1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
48 #include "hard-reg-set.h"
50 #include "insn-config.h"
55 #include "basic-block.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
93 rtx global_rtl[GR_MAX];
95 /* Commonly used RTL for hard registers. These objects are not necessarily
96 unique, so we allocate them separately from global_rtl. They are
97 initialized once per compilation unit, then copied into regno_reg_rtx
98 at the beginning of each function. */
99 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
102 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
103 record a copy of const[012]_rtx. */
105 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
109 REAL_VALUE_TYPE dconst0;
110 REAL_VALUE_TYPE dconst1;
111 REAL_VALUE_TYPE dconst2;
112 REAL_VALUE_TYPE dconstm1;
114 /* All references to the following fixed hard registers go through
115 these unique rtl objects. On machines where the frame-pointer and
116 arg-pointer are the same register, they use the same unique object.
118 After register allocation, other rtl objects which used to be pseudo-regs
119 may be clobbered to refer to the frame-pointer register.
120 But references that were originally to the frame-pointer can be
121 distinguished from the others because they contain frame_pointer_rtx.
123 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
124 tricky: until register elimination has taken place hard_frame_pointer_rtx
125 should be used if it is being set, and frame_pointer_rtx otherwise. After
126 register elimination hard_frame_pointer_rtx should always be used.
127 On machines where the two registers are same (most) then these are the
130 In an inline procedure, the stack and frame pointer rtxs may not be
131 used for anything else. */
132 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
133 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
134 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
135 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
136 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138 /* This is used to implement __builtin_return_address for some machines.
139 See for instance the MIPS port. */
140 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142 /* We make one copy of (const_int C) where C is in
143 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
144 to save space during the compilation and simplify comparisons of
147 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149 /* A hash table storing CONST_INTs whose absolute value is greater
150 than MAX_SAVED_CONST_INT. */
152 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
153 htab_t const_int_htab;
155 /* A hash table storing memory attribute structures. */
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
157 htab_t mem_attrs_htab;
159 /* A hash table storing all CONST_DOUBLEs. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
161 htab_t const_double_htab;
163 #define first_insn (cfun->emit->x_first_insn)
164 #define last_insn (cfun->emit->x_last_insn)
165 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
166 #define last_linenum (cfun->emit->x_last_linenum)
167 #define last_filename (cfun->emit->x_last_filename)
168 #define first_label_num (cfun->emit->x_first_label_num)
170 static rtx make_jump_insn_raw PARAMS ((rtx));
171 static rtx make_call_insn_raw PARAMS ((rtx));
172 static rtx find_line_note PARAMS ((rtx));
173 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
175 static void unshare_all_rtl_1 PARAMS ((rtx));
176 static void unshare_all_decls PARAMS ((tree));
177 static void reset_used_decls PARAMS ((tree));
178 static void mark_label_nuses PARAMS ((rtx));
179 static hashval_t const_int_htab_hash PARAMS ((const void *));
180 static int const_int_htab_eq PARAMS ((const void *,
182 static hashval_t const_double_htab_hash PARAMS ((const void *));
183 static int const_double_htab_eq PARAMS ((const void *,
185 static rtx lookup_const_double PARAMS ((rtx));
186 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
187 static int mem_attrs_htab_eq PARAMS ((const void *,
189 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
192 static tree component_ref_for_mem_expr PARAMS ((tree));
193 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
195 /* Probability of the conditional branch currently proceeded by try_split.
196 Set to -1 otherwise. */
197 int split_branch_probability = -1;
199 /* Returns a hash code for X (which is a really a CONST_INT). */
202 const_int_htab_hash (x)
205 return (hashval_t) INTVAL ((struct rtx_def *) x);
208 /* Returns non-zero if the value represented by X (which is really a
209 CONST_INT) is the same as that given by Y (which is really a
213 const_int_htab_eq (x, y)
217 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
220 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
222 const_double_htab_hash (x)
229 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
230 h ^= XWINT (value, i);
234 /* Returns non-zero if the value represented by X (really a ...)
235 is the same as that represented by Y (really a ...) */
237 const_double_htab_eq (x, y)
241 rtx a = (rtx)x, b = (rtx)y;
244 if (GET_MODE (a) != GET_MODE (b))
246 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
247 if (XWINT (a, i) != XWINT (b, i))
253 /* Returns a hash code for X (which is a really a mem_attrs *). */
256 mem_attrs_htab_hash (x)
259 mem_attrs *p = (mem_attrs *) x;
261 return (p->alias ^ (p->align * 1000)
262 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
263 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
267 /* Returns non-zero if the value represented by X (which is really a
268 mem_attrs *) is the same as that given by Y (which is also really a
272 mem_attrs_htab_eq (x, y)
276 mem_attrs *p = (mem_attrs *) x;
277 mem_attrs *q = (mem_attrs *) y;
279 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
280 && p->size == q->size && p->align == q->align);
283 /* Allocate a new mem_attrs structure and insert it into the hash table if
284 one identical to it is not already in the table. We are doing this for
288 get_mem_attrs (alias, expr, offset, size, align, mode)
294 enum machine_mode mode;
299 /* If everything is the default, we can just return zero. */
300 if (alias == 0 && expr == 0 && offset == 0
302 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
303 && (align == BITS_PER_UNIT
305 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
310 attrs.offset = offset;
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
324 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
325 don't attempt to share with the various global pieces of rtl (such as
326 frame_pointer_rtx). */
329 gen_raw_REG (mode, regno)
330 enum machine_mode mode;
333 rtx x = gen_rtx_raw_REG (mode, regno);
334 ORIGINAL_REGNO (x) = regno;
338 /* There are some RTL codes that require special attention; the generation
339 functions do the raw handling. If you add to this list, modify
340 special_rtx in gengenrtl.c as well. */
343 gen_rtx_CONST_INT (mode, arg)
344 enum machine_mode mode ATTRIBUTE_UNUSED;
349 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
350 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
352 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
353 if (const_true_rtx && arg == STORE_FLAG_VALUE)
354 return const_true_rtx;
357 /* Look up the CONST_INT in the hash table. */
358 slot = htab_find_slot_with_hash (const_int_htab, &arg,
359 (hashval_t) arg, INSERT);
361 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
367 gen_int_mode (c, mode)
369 enum machine_mode mode;
371 return GEN_INT (trunc_int_for_mode (c, mode));
374 /* CONST_DOUBLEs might be created from pairs of integers, or from
375 REAL_VALUE_TYPEs. Also, their length is known only at run time,
376 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
378 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
379 hash table. If so, return its counterpart; otherwise add it
380 to the hash table and return it. */
382 lookup_const_double (real)
385 void **slot = htab_find_slot (const_double_htab, real, INSERT);
392 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
393 VALUE in mode MODE. */
395 const_double_from_real_value (value, mode)
396 REAL_VALUE_TYPE value;
397 enum machine_mode mode;
399 rtx real = rtx_alloc (CONST_DOUBLE);
400 PUT_MODE (real, mode);
402 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
404 return lookup_const_double (real);
407 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
408 of ints: I0 is the low-order word and I1 is the high-order word.
409 Do not use this routine for non-integer modes; convert to
410 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
413 immed_double_const (i0, i1, mode)
414 HOST_WIDE_INT i0, i1;
415 enum machine_mode mode;
420 if (mode != VOIDmode)
423 if (GET_MODE_CLASS (mode) != MODE_INT
424 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
427 /* We clear out all bits that don't belong in MODE, unless they and
428 our sign bit are all one. So we get either a reasonable negative
429 value or a reasonable unsigned value for this mode. */
430 width = GET_MODE_BITSIZE (mode);
431 if (width < HOST_BITS_PER_WIDE_INT
432 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
433 != ((HOST_WIDE_INT) (-1) << (width - 1))))
434 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
435 else if (width == HOST_BITS_PER_WIDE_INT
436 && ! (i1 == ~0 && i0 < 0))
438 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
439 /* We cannot represent this value as a constant. */
442 /* If this would be an entire word for the target, but is not for
443 the host, then sign-extend on the host so that the number will
444 look the same way on the host that it would on the target.
446 For example, when building a 64 bit alpha hosted 32 bit sparc
447 targeted compiler, then we want the 32 bit unsigned value -1 to be
448 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
449 The latter confuses the sparc backend. */
451 if (width < HOST_BITS_PER_WIDE_INT
452 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
453 i0 |= ((HOST_WIDE_INT) (-1) << width);
455 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
458 ??? Strictly speaking, this is wrong if we create a CONST_INT for
459 a large unsigned constant with the size of MODE being
460 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
461 in a wider mode. In that case we will mis-interpret it as a
464 Unfortunately, the only alternative is to make a CONST_DOUBLE for
465 any constant in any mode if it is an unsigned constant larger
466 than the maximum signed integer in an int on the host. However,
467 doing this will break everyone that always expects to see a
468 CONST_INT for SImode and smaller.
470 We have always been making CONST_INTs in this case, so nothing
471 new is being broken. */
473 if (width <= HOST_BITS_PER_WIDE_INT)
474 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
477 /* If this integer fits in one word, return a CONST_INT. */
478 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
481 /* We use VOIDmode for integers. */
482 value = rtx_alloc (CONST_DOUBLE);
483 PUT_MODE (value, VOIDmode);
485 CONST_DOUBLE_LOW (value) = i0;
486 CONST_DOUBLE_HIGH (value) = i1;
488 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
489 XWINT (value, i) = 0;
491 return lookup_const_double (value);
495 gen_rtx_REG (mode, regno)
496 enum machine_mode mode;
499 /* In case the MD file explicitly references the frame pointer, have
500 all such references point to the same frame pointer. This is
501 used during frame pointer elimination to distinguish the explicit
502 references to these registers from pseudos that happened to be
505 If we have eliminated the frame pointer or arg pointer, we will
506 be using it as a normal register, for example as a spill
507 register. In such cases, we might be accessing it in a mode that
508 is not Pmode and therefore cannot use the pre-allocated rtx.
510 Also don't do this when we are making new REGs in reload, since
511 we don't want to get confused with the real pointers. */
513 if (mode == Pmode && !reload_in_progress)
515 if (regno == FRAME_POINTER_REGNUM)
516 return frame_pointer_rtx;
517 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
518 if (regno == HARD_FRAME_POINTER_REGNUM)
519 return hard_frame_pointer_rtx;
521 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
522 if (regno == ARG_POINTER_REGNUM)
523 return arg_pointer_rtx;
525 #ifdef RETURN_ADDRESS_POINTER_REGNUM
526 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
527 return return_address_pointer_rtx;
529 if (regno == PIC_OFFSET_TABLE_REGNUM
530 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
531 return pic_offset_table_rtx;
532 if (regno == STACK_POINTER_REGNUM)
533 return stack_pointer_rtx;
537 /* If the per-function register table has been set up, try to re-use
538 an existing entry in that table to avoid useless generation of RTL.
540 This code is disabled for now until we can fix the various backends
541 which depend on having non-shared hard registers in some cases. Long
542 term we want to re-enable this code as it can significantly cut down
543 on the amount of useless RTL that gets generated. */
547 && regno < FIRST_PSEUDO_REGISTER
548 && reg_raw_mode[regno] == mode)
549 return regno_reg_rtx[regno];
552 return gen_raw_REG (mode, regno);
556 gen_rtx_MEM (mode, addr)
557 enum machine_mode mode;
560 rtx rt = gen_rtx_raw_MEM (mode, addr);
562 /* This field is not cleared by the mere allocation of the rtx, so
570 gen_rtx_SUBREG (mode, reg, offset)
571 enum machine_mode mode;
575 /* This is the most common failure type.
576 Catch it early so we can see who does it. */
577 if ((offset % GET_MODE_SIZE (mode)) != 0)
580 /* This check isn't usable right now because combine will
581 throw arbitrary crap like a CALL into a SUBREG in
582 gen_lowpart_for_combine so we must just eat it. */
584 /* Check for this too. */
585 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
588 return gen_rtx_raw_SUBREG (mode, reg, offset);
591 /* Generate a SUBREG representing the least-significant part of REG if MODE
592 is smaller than mode of REG, otherwise paradoxical SUBREG. */
595 gen_lowpart_SUBREG (mode, reg)
596 enum machine_mode mode;
599 enum machine_mode inmode;
601 inmode = GET_MODE (reg);
602 if (inmode == VOIDmode)
604 return gen_rtx_SUBREG (mode, reg,
605 subreg_lowpart_offset (mode, inmode));
608 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
610 ** This routine generates an RTX of the size specified by
611 ** <code>, which is an RTX code. The RTX structure is initialized
612 ** from the arguments <element1> through <elementn>, which are
613 ** interpreted according to the specific RTX type's format. The
614 ** special machine mode associated with the rtx (if any) is specified
617 ** gen_rtx can be invoked in a way which resembles the lisp-like
618 ** rtx it will generate. For example, the following rtx structure:
620 ** (plus:QI (mem:QI (reg:SI 1))
621 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
623 ** ...would be generated by the following C code:
625 ** gen_rtx (PLUS, QImode,
626 ** gen_rtx (MEM, QImode,
627 ** gen_rtx (REG, SImode, 1)),
628 ** gen_rtx (MEM, QImode,
629 ** gen_rtx (PLUS, SImode,
630 ** gen_rtx (REG, SImode, 2),
631 ** gen_rtx (REG, SImode, 3)))),
636 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
638 int i; /* Array indices... */
639 const char *fmt; /* Current rtx's format... */
640 rtx rt_val; /* RTX to return to caller... */
643 VA_FIXEDARG (p, enum rtx_code, code);
644 VA_FIXEDARG (p, enum machine_mode, mode);
649 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
654 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
655 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
657 rt_val = immed_double_const (arg0, arg1, mode);
662 rt_val = gen_rtx_REG (mode, va_arg (p, int));
666 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
670 rt_val = rtx_alloc (code); /* Allocate the storage space. */
671 rt_val->mode = mode; /* Store the machine mode... */
673 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
674 for (i = 0; i < GET_RTX_LENGTH (code); i++)
678 case '0': /* Unused field. */
681 case 'i': /* An integer? */
682 XINT (rt_val, i) = va_arg (p, int);
685 case 'w': /* A wide integer? */
686 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
689 case 's': /* A string? */
690 XSTR (rt_val, i) = va_arg (p, char *);
693 case 'e': /* An expression? */
694 case 'u': /* An insn? Same except when printing. */
695 XEXP (rt_val, i) = va_arg (p, rtx);
698 case 'E': /* An RTX vector? */
699 XVEC (rt_val, i) = va_arg (p, rtvec);
702 case 'b': /* A bitmap? */
703 XBITMAP (rt_val, i) = va_arg (p, bitmap);
706 case 't': /* A tree? */
707 XTREE (rt_val, i) = va_arg (p, tree);
721 /* gen_rtvec (n, [rt1, ..., rtn])
723 ** This routine creates an rtvec and stores within it the
724 ** pointers to rtx's which are its arguments.
729 gen_rtvec VPARAMS ((int n, ...))
735 VA_FIXEDARG (p, int, n);
738 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
740 vector = (rtx *) alloca (n * sizeof (rtx));
742 for (i = 0; i < n; i++)
743 vector[i] = va_arg (p, rtx);
745 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
749 return gen_rtvec_v (save_n, vector);
753 gen_rtvec_v (n, argp)
761 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
763 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
765 for (i = 0; i < n; i++)
766 rt_val->elem[i] = *argp++;
771 /* Generate a REG rtx for a new pseudo register of mode MODE.
772 This pseudo is assigned the next sequential register number. */
776 enum machine_mode mode;
778 struct function *f = cfun;
781 /* Don't let anything called after initial flow analysis create new
786 if (generating_concat_p
787 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
788 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
790 /* For complex modes, don't make a single pseudo.
791 Instead, make a CONCAT of two pseudos.
792 This allows noncontiguous allocation of the real and imaginary parts,
793 which makes much better code. Besides, allocating DCmode
794 pseudos overstrains reload on some machines like the 386. */
795 rtx realpart, imagpart;
796 int size = GET_MODE_UNIT_SIZE (mode);
797 enum machine_mode partmode
798 = mode_for_size (size * BITS_PER_UNIT,
799 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
800 ? MODE_FLOAT : MODE_INT),
803 realpart = gen_reg_rtx (partmode);
804 imagpart = gen_reg_rtx (partmode);
805 return gen_rtx_CONCAT (mode, realpart, imagpart);
808 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
809 enough to have an element for this pseudo reg number. */
811 if (reg_rtx_no == f->emit->regno_pointer_align_length)
813 int old_size = f->emit->regno_pointer_align_length;
818 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
819 memset (new + old_size, 0, old_size);
820 f->emit->regno_pointer_align = (unsigned char *) new;
822 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
823 old_size * 2 * sizeof (rtx));
824 memset (new1 + old_size, 0, old_size * sizeof (rtx));
825 regno_reg_rtx = new1;
827 new2 = (tree *) ggc_realloc (f->emit->regno_decl,
828 old_size * 2 * sizeof (tree));
829 memset (new2 + old_size, 0, old_size * sizeof (tree));
830 f->emit->regno_decl = new2;
832 f->emit->regno_pointer_align_length = old_size * 2;
835 val = gen_raw_REG (mode, reg_rtx_no);
836 regno_reg_rtx[reg_rtx_no++] = val;
840 /* Identify REG (which may be a CONCAT) as a user register. */
846 if (GET_CODE (reg) == CONCAT)
848 REG_USERVAR_P (XEXP (reg, 0)) = 1;
849 REG_USERVAR_P (XEXP (reg, 1)) = 1;
851 else if (GET_CODE (reg) == REG)
852 REG_USERVAR_P (reg) = 1;
857 /* Identify REG as a probable pointer register and show its alignment
858 as ALIGN, if nonzero. */
861 mark_reg_pointer (reg, align)
865 if (! REG_POINTER (reg))
867 REG_POINTER (reg) = 1;
870 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
872 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
873 /* We can no-longer be sure just how aligned this pointer is */
874 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
877 /* Return 1 plus largest pseudo reg number used in the current function. */
885 /* Return 1 + the largest label number used so far in the current function. */
890 if (last_label_num && label_num == base_label_num)
891 return last_label_num;
895 /* Return first label number used in this function (if any were used). */
898 get_first_label_num ()
900 return first_label_num;
903 /* Return the final regno of X, which is a SUBREG of a hard
906 subreg_hard_regno (x, check_mode)
910 enum machine_mode mode = GET_MODE (x);
911 unsigned int byte_offset, base_regno, final_regno;
912 rtx reg = SUBREG_REG (x);
914 /* This is where we attempt to catch illegal subregs
915 created by the compiler. */
916 if (GET_CODE (x) != SUBREG
917 || GET_CODE (reg) != REG)
919 base_regno = REGNO (reg);
920 if (base_regno >= FIRST_PSEUDO_REGISTER)
922 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
925 /* Catch non-congruent offsets too. */
926 byte_offset = SUBREG_BYTE (x);
927 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
930 final_regno = subreg_regno (x);
935 /* Return a value representing some low-order bits of X, where the number
936 of low-order bits is given by MODE. Note that no conversion is done
937 between floating-point and fixed-point values, rather, the bit
938 representation is returned.
940 This function handles the cases in common between gen_lowpart, below,
941 and two variants in cse.c and combine.c. These are the cases that can
942 be safely handled at all points in the compilation.
944 If this is not a case we can handle, return 0. */
947 gen_lowpart_common (mode, x)
948 enum machine_mode mode;
951 int msize = GET_MODE_SIZE (mode);
952 int xsize = GET_MODE_SIZE (GET_MODE (x));
955 if (GET_MODE (x) == mode)
958 /* MODE must occupy no more words than the mode of X. */
959 if (GET_MODE (x) != VOIDmode
960 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
961 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
964 offset = subreg_lowpart_offset (mode, GET_MODE (x));
966 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
967 && (GET_MODE_CLASS (mode) == MODE_INT
968 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
970 /* If we are getting the low-order part of something that has been
971 sign- or zero-extended, we can either just use the object being
972 extended or make a narrower extension. If we want an even smaller
973 piece than the size of the object being extended, call ourselves
976 This case is used mostly by combine and cse. */
978 if (GET_MODE (XEXP (x, 0)) == mode)
980 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
981 return gen_lowpart_common (mode, XEXP (x, 0));
982 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
983 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
985 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
986 || GET_CODE (x) == CONCAT)
987 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
988 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
989 from the low-order part of the constant. */
990 else if ((GET_MODE_CLASS (mode) == MODE_INT
991 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
992 && GET_MODE (x) == VOIDmode
993 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
995 /* If MODE is twice the host word size, X is already the desired
996 representation. Otherwise, if MODE is wider than a word, we can't
997 do this. If MODE is exactly a word, return just one CONST_INT. */
999 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1001 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1003 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1004 return (GET_CODE (x) == CONST_INT ? x
1005 : GEN_INT (CONST_DOUBLE_LOW (x)));
1008 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1009 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1010 : CONST_DOUBLE_LOW (x));
1012 /* Sign extend to HOST_WIDE_INT. */
1013 val = trunc_int_for_mode (val, mode);
1015 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1020 /* The floating-point emulator can handle all conversions between
1021 FP and integer operands. This simplifies reload because it
1022 doesn't have to deal with constructs like (subreg:DI
1023 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1024 /* Single-precision floats are always 32-bits and double-precision
1025 floats are always 64-bits. */
1027 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1028 && GET_MODE_BITSIZE (mode) == 32
1029 && GET_CODE (x) == CONST_INT)
1035 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1036 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1038 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1039 && GET_MODE_BITSIZE (mode) == 64
1040 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1041 && GET_MODE (x) == VOIDmode)
1045 HOST_WIDE_INT low, high;
1047 if (GET_CODE (x) == CONST_INT)
1050 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1054 low = CONST_DOUBLE_LOW (x);
1055 high = CONST_DOUBLE_HIGH (x);
1058 #if HOST_BITS_PER_WIDE_INT == 32
1059 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1061 if (WORDS_BIG_ENDIAN)
1062 i[0] = high, i[1] = low;
1064 i[0] = low, i[1] = high;
1069 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1070 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1072 else if ((GET_MODE_CLASS (mode) == MODE_INT
1073 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1074 && GET_CODE (x) == CONST_DOUBLE
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1078 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1079 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1081 /* Convert 'r' into an array of four 32-bit words in target word
1083 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1084 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1087 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1090 i[3 - 3 * endian] = 0;
1093 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1094 i[2 - 2 * endian] = 0;
1095 i[3 - 2 * endian] = 0;
1098 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1099 i[3 - 3 * endian] = 0;
1102 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1107 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1109 #if HOST_BITS_PER_WIDE_INT == 32
1110 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1112 if (HOST_BITS_PER_WIDE_INT != 64)
1115 return immed_double_const ((((unsigned long) i[3 * endian])
1116 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1117 (((unsigned long) i[2 - endian])
1118 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1123 /* Otherwise, we can't do this. */
1127 /* Return the real part (which has mode MODE) of a complex value X.
1128 This always comes at the low address in memory. */
1131 gen_realpart (mode, x)
1132 enum machine_mode mode;
1135 if (WORDS_BIG_ENDIAN
1136 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1138 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1140 ("can't access real part of complex value in hard register");
1141 else if (WORDS_BIG_ENDIAN)
1142 return gen_highpart (mode, x);
1144 return gen_lowpart (mode, x);
1147 /* Return the imaginary part (which has mode MODE) of a complex value X.
1148 This always comes at the high address in memory. */
1151 gen_imagpart (mode, x)
1152 enum machine_mode mode;
1155 if (WORDS_BIG_ENDIAN)
1156 return gen_lowpart (mode, x);
1157 else if (! WORDS_BIG_ENDIAN
1158 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1160 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1162 ("can't access imaginary part of complex value in hard register");
1164 return gen_highpart (mode, x);
1167 /* Return 1 iff X, assumed to be a SUBREG,
1168 refers to the real part of the complex value in its containing reg.
1169 Complex values are always stored with the real part in the first word,
1170 regardless of WORDS_BIG_ENDIAN. */
1173 subreg_realpart_p (x)
1176 if (GET_CODE (x) != SUBREG)
1179 return ((unsigned int) SUBREG_BYTE (x)
1180 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1183 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1184 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1185 least-significant part of X.
1186 MODE specifies how big a part of X to return;
1187 it usually should not be larger than a word.
1188 If X is a MEM whose address is a QUEUED, the value may be so also. */
1191 gen_lowpart (mode, x)
1192 enum machine_mode mode;
1195 rtx result = gen_lowpart_common (mode, x);
1199 else if (GET_CODE (x) == REG)
1201 /* Must be a hard reg that's not valid in MODE. */
1202 result = gen_lowpart_common (mode, copy_to_reg (x));
1207 else if (GET_CODE (x) == MEM)
1209 /* The only additional case we can do is MEM. */
1211 if (WORDS_BIG_ENDIAN)
1212 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1213 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1215 if (BYTES_BIG_ENDIAN)
1216 /* Adjust the address so that the address-after-the-data
1218 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1219 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1221 return adjust_address (x, mode, offset);
1223 else if (GET_CODE (x) == ADDRESSOF)
1224 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1229 /* Like `gen_lowpart', but refer to the most significant part.
1230 This is used to access the imaginary part of a complex number. */
1233 gen_highpart (mode, x)
1234 enum machine_mode mode;
1237 unsigned int msize = GET_MODE_SIZE (mode);
1240 /* This case loses if X is a subreg. To catch bugs early,
1241 complain if an invalid MODE is used even in other cases. */
1242 if (msize > UNITS_PER_WORD
1243 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1246 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1247 subreg_highpart_offset (mode, GET_MODE (x)));
1249 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1250 the target if we have a MEM. gen_highpart must return a valid operand,
1251 emitting code if necessary to do so. */
1252 if (result != NULL_RTX && GET_CODE (result) == MEM)
1253 result = validize_mem (result);
1260 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1261 be VOIDmode constant. */
1263 gen_highpart_mode (outermode, innermode, exp)
1264 enum machine_mode outermode, innermode;
1267 if (GET_MODE (exp) != VOIDmode)
1269 if (GET_MODE (exp) != innermode)
1271 return gen_highpart (outermode, exp);
1273 return simplify_gen_subreg (outermode, exp, innermode,
1274 subreg_highpart_offset (outermode, innermode));
1277 /* Return offset in bytes to get OUTERMODE low part
1278 of the value in mode INNERMODE stored in memory in target format. */
1281 subreg_lowpart_offset (outermode, innermode)
1282 enum machine_mode outermode, innermode;
1284 unsigned int offset = 0;
1285 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1289 if (WORDS_BIG_ENDIAN)
1290 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1291 if (BYTES_BIG_ENDIAN)
1292 offset += difference % UNITS_PER_WORD;
1298 /* Return offset in bytes to get OUTERMODE high part
1299 of the value in mode INNERMODE stored in memory in target format. */
1301 subreg_highpart_offset (outermode, innermode)
1302 enum machine_mode outermode, innermode;
1304 unsigned int offset = 0;
1305 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1307 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1312 if (! WORDS_BIG_ENDIAN)
1313 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1314 if (! BYTES_BIG_ENDIAN)
1315 offset += difference % UNITS_PER_WORD;
1321 /* Return 1 iff X, assumed to be a SUBREG,
1322 refers to the least significant part of its containing reg.
1323 If X is not a SUBREG, always return 1 (it is its own low part!). */
1326 subreg_lowpart_p (x)
1329 if (GET_CODE (x) != SUBREG)
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
1339 /* Helper routine for all the constant cases of operand_subword.
1340 Some places invoke this directly. */
1343 constant_subword (op, offset, mode)
1346 enum machine_mode mode;
1348 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1351 /* If OP is already an integer word, return it. */
1352 if (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1356 /* The output is some bits, the width of the target machine's word.
1357 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1359 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1360 && GET_MODE_CLASS (mode) == MODE_FLOAT
1361 && GET_MODE_BITSIZE (mode) == 64
1362 && GET_CODE (op) == CONST_DOUBLE)
1367 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1368 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1370 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1371 which the words are written depends on the word endianness.
1372 ??? This is a potential portability problem and should
1373 be fixed at some point.
1375 We must exercise caution with the sign bit. By definition there
1376 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1377 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1378 So we explicitly mask and sign-extend as necessary. */
1379 if (BITS_PER_WORD == 32)
1382 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1383 return GEN_INT (val);
1385 #if HOST_BITS_PER_WIDE_INT >= 64
1386 else if (BITS_PER_WORD >= 64 && offset == 0)
1388 val = k[! WORDS_BIG_ENDIAN];
1389 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1390 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1391 return GEN_INT (val);
1394 else if (BITS_PER_WORD == 16)
1396 val = k[offset >> 1];
1397 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1399 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1400 return GEN_INT (val);
1405 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1406 && GET_MODE_CLASS (mode) == MODE_FLOAT
1407 && GET_MODE_BITSIZE (mode) > 64
1408 && GET_CODE (op) == CONST_DOUBLE)
1413 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1414 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1416 if (BITS_PER_WORD == 32)
1419 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1420 return GEN_INT (val);
1422 #if HOST_BITS_PER_WIDE_INT >= 64
1423 else if (BITS_PER_WORD >= 64 && offset <= 1)
1425 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1426 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1427 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1428 return GEN_INT (val);
1435 /* Single word float is a little harder, since single- and double-word
1436 values often do not have the same high-order bits. We have already
1437 verified that we want the only defined word of the single-word value. */
1438 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1439 && GET_MODE_BITSIZE (mode) == 32
1440 && GET_CODE (op) == CONST_DOUBLE)
1445 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1446 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1448 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1450 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1452 if (BITS_PER_WORD == 16)
1454 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1456 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1459 return GEN_INT (val);
1462 /* The only remaining cases that we can handle are integers.
1463 Convert to proper endianness now since these cases need it.
1464 At this point, offset == 0 means the low-order word.
1466 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1467 in general. However, if OP is (const_int 0), we can just return
1470 if (op == const0_rtx)
1473 if (GET_MODE_CLASS (mode) != MODE_INT
1474 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1475 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1478 if (WORDS_BIG_ENDIAN)
1479 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1481 /* Find out which word on the host machine this value is in and get
1482 it from the constant. */
1483 val = (offset / size_ratio == 0
1484 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1485 : (GET_CODE (op) == CONST_INT
1486 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1488 /* Get the value we want into the low bits of val. */
1489 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1490 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1492 val = trunc_int_for_mode (val, word_mode);
1494 return GEN_INT (val);
1497 /* Return subword OFFSET of operand OP.
1498 The word number, OFFSET, is interpreted as the word number starting
1499 at the low-order address. OFFSET 0 is the low-order word if not
1500 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1502 If we cannot extract the required word, we return zero. Otherwise,
1503 an rtx corresponding to the requested word will be returned.
1505 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1506 reload has completed, a valid address will always be returned. After
1507 reload, if a valid address cannot be returned, we return zero.
1509 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1510 it is the responsibility of the caller.
1512 MODE is the mode of OP in case it is a CONST_INT.
1514 ??? This is still rather broken for some cases. The problem for the
1515 moment is that all callers of this thing provide no 'goal mode' to
1516 tell us to work with. This exists because all callers were written
1517 in a word based SUBREG world.
1518 Now use of this function can be deprecated by simplify_subreg in most
1523 operand_subword (op, offset, validate_address, mode)
1525 unsigned int offset;
1526 int validate_address;
1527 enum machine_mode mode;
1529 if (mode == VOIDmode)
1530 mode = GET_MODE (op);
1532 if (mode == VOIDmode)
1535 /* If OP is narrower than a word, fail. */
1537 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1540 /* If we want a word outside OP, return zero. */
1542 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1545 /* Form a new MEM at the requested address. */
1546 if (GET_CODE (op) == MEM)
1548 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1550 if (! validate_address)
1553 else if (reload_completed)
1555 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1559 return replace_equiv_address (new, XEXP (new, 0));
1562 /* Rest can be handled by simplify_subreg. */
1563 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1566 /* Similar to `operand_subword', but never return 0. If we can't extract
1567 the required subword, put OP into a register and try again. If that fails,
1568 abort. We always validate the address in this case.
1570 MODE is the mode of OP, in case it is CONST_INT. */
1573 operand_subword_force (op, offset, mode)
1575 unsigned int offset;
1576 enum machine_mode mode;
1578 rtx result = operand_subword (op, offset, 1, mode);
1583 if (mode != BLKmode && mode != VOIDmode)
1585 /* If this is a register which can not be accessed by words, copy it
1586 to a pseudo register. */
1587 if (GET_CODE (op) == REG)
1588 op = copy_to_reg (op);
1590 op = force_reg (mode, op);
1593 result = operand_subword (op, offset, 1, mode);
1600 /* Given a compare instruction, swap the operands.
1601 A test instruction is changed into a compare of 0 against the operand. */
1604 reverse_comparison (insn)
1607 rtx body = PATTERN (insn);
1610 if (GET_CODE (body) == SET)
1611 comp = SET_SRC (body);
1613 comp = SET_SRC (XVECEXP (body, 0, 0));
1615 if (GET_CODE (comp) == COMPARE)
1617 rtx op0 = XEXP (comp, 0);
1618 rtx op1 = XEXP (comp, 1);
1619 XEXP (comp, 0) = op1;
1620 XEXP (comp, 1) = op0;
1624 rtx new = gen_rtx_COMPARE (VOIDmode,
1625 CONST0_RTX (GET_MODE (comp)), comp);
1626 if (GET_CODE (body) == SET)
1627 SET_SRC (body) = new;
1629 SET_SRC (XVECEXP (body, 0, 0)) = new;
1633 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1634 or (2) a component ref of something variable. Represent the later with
1635 a NULL expression. */
1638 component_ref_for_mem_expr (ref)
1641 tree inner = TREE_OPERAND (ref, 0);
1643 if (TREE_CODE (inner) == COMPONENT_REF)
1644 inner = component_ref_for_mem_expr (inner);
1647 tree placeholder_ptr = 0;
1649 /* Now remove any conversions: they don't change what the underlying
1650 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1651 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1652 || TREE_CODE (inner) == NON_LVALUE_EXPR
1653 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1654 || TREE_CODE (inner) == SAVE_EXPR
1655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1656 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1657 inner = find_placeholder (inner, &placeholder_ptr);
1659 inner = TREE_OPERAND (inner, 0);
1661 if (! DECL_P (inner))
1665 if (inner == TREE_OPERAND (ref, 0))
1668 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1669 TREE_OPERAND (ref, 1));
1672 /* Given REF, a MEM, and T, either the type of X or the expression
1673 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1674 if we are making a new object of this type. */
1677 set_mem_attributes (ref, t, objectp)
1682 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1683 tree expr = MEM_EXPR (ref);
1684 rtx offset = MEM_OFFSET (ref);
1685 rtx size = MEM_SIZE (ref);
1686 unsigned int align = MEM_ALIGN (ref);
1689 /* It can happen that type_for_mode was given a mode for which there
1690 is no language-level type. In which case it returns NULL, which
1695 type = TYPE_P (t) ? t : TREE_TYPE (t);
1697 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1698 wrong answer, as it assumes that DECL_RTL already has the right alias
1699 info. Callers should not set DECL_RTL until after the call to
1700 set_mem_attributes. */
1701 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1704 /* Get the alias set from the expression or type (perhaps using a
1705 front-end routine) and use it. */
1706 alias = get_alias_set (t);
1708 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1709 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1710 RTX_UNCHANGING_P (ref)
1711 |= ((lang_hooks.honor_readonly
1712 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1713 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1715 /* If we are making an object of this type, or if this is a DECL, we know
1716 that it is a scalar if the type is not an aggregate. */
1717 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1718 MEM_SCALAR_P (ref) = 1;
1720 /* We can set the alignment from the type if we are making an object,
1721 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1722 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1723 align = MAX (align, TYPE_ALIGN (type));
1725 /* If the size is known, we can set that. */
1726 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1727 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1729 /* If T is not a type, we may be able to deduce some more information about
1733 maybe_set_unchanging (ref, t);
1734 if (TREE_THIS_VOLATILE (t))
1735 MEM_VOLATILE_P (ref) = 1;
1737 /* Now remove any conversions: they don't change what the underlying
1738 object is. Likewise for SAVE_EXPR. */
1739 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1740 || TREE_CODE (t) == NON_LVALUE_EXPR
1741 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1742 || TREE_CODE (t) == SAVE_EXPR)
1743 t = TREE_OPERAND (t, 0);
1745 /* If this expression can't be addressed (e.g., it contains a reference
1746 to a non-addressable field), show we don't change its alias set. */
1747 if (! can_address_p (t))
1748 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1750 /* If this is a decl, set the attributes of the MEM from it. */
1754 offset = const0_rtx;
1755 size = (DECL_SIZE_UNIT (t)
1756 && host_integerp (DECL_SIZE_UNIT (t), 1)
1757 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1758 align = DECL_ALIGN (t);
1761 /* If this is a constant, we know the alignment. */
1762 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1764 align = TYPE_ALIGN (type);
1765 #ifdef CONSTANT_ALIGNMENT
1766 align = CONSTANT_ALIGNMENT (t, align);
1770 /* If this is a field reference and not a bit-field, record it. */
1771 /* ??? There is some information that can be gleened from bit-fields,
1772 such as the word offset in the structure that might be modified.
1773 But skip it for now. */
1774 else if (TREE_CODE (t) == COMPONENT_REF
1775 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1777 expr = component_ref_for_mem_expr (t);
1778 offset = const0_rtx;
1779 /* ??? Any reason the field size would be different than
1780 the size we got from the type? */
1783 /* If this is an array reference, look for an outer field reference. */
1784 else if (TREE_CODE (t) == ARRAY_REF)
1786 tree off_tree = size_zero_node;
1791 = fold (build (PLUS_EXPR, sizetype,
1792 fold (build (MULT_EXPR, sizetype,
1793 TREE_OPERAND (t, 1),
1794 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1796 t = TREE_OPERAND (t, 0);
1798 while (TREE_CODE (t) == ARRAY_REF);
1800 if (TREE_CODE (t) == COMPONENT_REF)
1802 expr = component_ref_for_mem_expr (t);
1803 if (host_integerp (off_tree, 1))
1804 offset = GEN_INT (tree_low_cst (off_tree, 1));
1805 /* ??? Any reason the field size would be different than
1806 the size we got from the type? */
1811 /* Now set the attributes we computed above. */
1813 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1815 /* If this is already known to be a scalar or aggregate, we are done. */
1816 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1819 /* If it is a reference into an aggregate, this is part of an aggregate.
1820 Otherwise we don't know. */
1821 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1822 || TREE_CODE (t) == ARRAY_RANGE_REF
1823 || TREE_CODE (t) == BIT_FIELD_REF)
1824 MEM_IN_STRUCT_P (ref) = 1;
1827 /* Set the alias set of MEM to SET. */
1830 set_mem_alias_set (mem, set)
1834 #ifdef ENABLE_CHECKING
1835 /* If the new and old alias sets don't conflict, something is wrong. */
1836 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1840 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1841 MEM_SIZE (mem), MEM_ALIGN (mem),
1845 /* Set the alignment of MEM to ALIGN bits. */
1848 set_mem_align (mem, align)
1852 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1853 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1857 /* Set the expr for MEM to EXPR. */
1860 set_mem_expr (mem, expr)
1865 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1866 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1869 /* Set the offset of MEM to OFFSET. */
1872 set_mem_offset (mem, offset)
1875 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1876 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1880 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1881 and its address changed to ADDR. (VOIDmode means don't change the mode.
1882 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1883 returned memory location is required to be valid. The memory
1884 attributes are not changed. */
1887 change_address_1 (memref, mode, addr, validate)
1889 enum machine_mode mode;
1895 if (GET_CODE (memref) != MEM)
1897 if (mode == VOIDmode)
1898 mode = GET_MODE (memref);
1900 addr = XEXP (memref, 0);
1904 if (reload_in_progress || reload_completed)
1906 if (! memory_address_p (mode, addr))
1910 addr = memory_address (mode, addr);
1913 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1916 new = gen_rtx_MEM (mode, addr);
1917 MEM_COPY_ATTRIBUTES (new, memref);
1921 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1922 way we are changing MEMREF, so we only preserve the alias set. */
1925 change_address (memref, mode, addr)
1927 enum machine_mode mode;
1930 rtx new = change_address_1 (memref, mode, addr, 1);
1931 enum machine_mode mmode = GET_MODE (new);
1934 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1935 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1936 (mmode == BLKmode ? BITS_PER_UNIT
1937 : GET_MODE_ALIGNMENT (mmode)),
1943 /* Return a memory reference like MEMREF, but with its mode changed
1944 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1945 nonzero, the memory address is forced to be valid.
1946 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1947 and caller is responsible for adjusting MEMREF base register. */
1950 adjust_address_1 (memref, mode, offset, validate, adjust)
1952 enum machine_mode mode;
1953 HOST_WIDE_INT offset;
1954 int validate, adjust;
1956 rtx addr = XEXP (memref, 0);
1958 rtx memoffset = MEM_OFFSET (memref);
1960 unsigned int memalign = MEM_ALIGN (memref);
1962 /* ??? Prefer to create garbage instead of creating shared rtl.
1963 This may happen even if offset is non-zero -- consider
1964 (plus (plus reg reg) const_int) -- so do this always. */
1965 addr = copy_rtx (addr);
1969 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1970 object, we can merge it into the LO_SUM. */
1971 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1973 && (unsigned HOST_WIDE_INT) offset
1974 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1975 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1976 plus_constant (XEXP (addr, 1), offset));
1978 addr = plus_constant (addr, offset);
1981 new = change_address_1 (memref, mode, addr, validate);
1983 /* Compute the new values of the memory attributes due to this adjustment.
1984 We add the offsets and update the alignment. */
1986 memoffset = GEN_INT (offset + INTVAL (memoffset));
1988 /* Compute the new alignment by taking the MIN of the alignment and the
1989 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1994 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1996 /* We can compute the size in a number of ways. */
1997 if (GET_MODE (new) != BLKmode)
1998 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1999 else if (MEM_SIZE (memref))
2000 size = plus_constant (MEM_SIZE (memref), -offset);
2002 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2003 memoffset, size, memalign, GET_MODE (new));
2005 /* At some point, we should validate that this offset is within the object,
2006 if all the appropriate values are known. */
2010 /* Return a memory reference like MEMREF, but with its mode changed
2011 to MODE and its address changed to ADDR, which is assumed to be
2012 MEMREF offseted by OFFSET bytes. If VALIDATE is
2013 nonzero, the memory address is forced to be valid. */
2016 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2018 enum machine_mode mode;
2020 HOST_WIDE_INT offset;
2023 memref = change_address_1 (memref, VOIDmode, addr, validate);
2024 return adjust_address_1 (memref, mode, offset, validate, 0);
2027 /* Return a memory reference like MEMREF, but whose address is changed by
2028 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2029 known to be in OFFSET (possibly 1). */
2032 offset_address (memref, offset, pow2)
2037 rtx new, addr = XEXP (memref, 0);
2039 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2041 /* At this point we don't know _why_ the address is invalid. It
2042 could have secondary memory refereces, multiplies or anything.
2044 However, if we did go and rearrange things, we can wind up not
2045 being able to recognize the magic around pic_offset_table_rtx.
2046 This stuff is fragile, and is yet another example of why it is
2047 bad to expose PIC machinery too early. */
2048 if (! memory_address_p (GET_MODE (memref), new)
2049 && GET_CODE (addr) == PLUS
2050 && XEXP (addr, 0) == pic_offset_table_rtx)
2052 addr = force_reg (GET_MODE (addr), addr);
2053 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2056 update_temp_slot_address (XEXP (memref, 0), new);
2057 new = change_address_1 (memref, VOIDmode, new, 1);
2059 /* Update the alignment to reflect the offset. Reset the offset, which
2062 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2063 MIN (MEM_ALIGN (memref),
2064 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2069 /* Return a memory reference like MEMREF, but with its address changed to
2070 ADDR. The caller is asserting that the actual piece of memory pointed
2071 to is the same, just the form of the address is being changed, such as
2072 by putting something into a register. */
2075 replace_equiv_address (memref, addr)
2079 /* change_address_1 copies the memory attribute structure without change
2080 and that's exactly what we want here. */
2081 update_temp_slot_address (XEXP (memref, 0), addr);
2082 return change_address_1 (memref, VOIDmode, addr, 1);
2085 /* Likewise, but the reference is not required to be valid. */
2088 replace_equiv_address_nv (memref, addr)
2092 return change_address_1 (memref, VOIDmode, addr, 0);
2095 /* Return a memory reference like MEMREF, but with its mode widened to
2096 MODE and offset by OFFSET. This would be used by targets that e.g.
2097 cannot issue QImode memory operations and have to use SImode memory
2098 operations plus masking logic. */
2101 widen_memory_access (memref, mode, offset)
2103 enum machine_mode mode;
2104 HOST_WIDE_INT offset;
2106 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2107 tree expr = MEM_EXPR (new);
2108 rtx memoffset = MEM_OFFSET (new);
2109 unsigned int size = GET_MODE_SIZE (mode);
2111 /* If we don't know what offset we were at within the expression, then
2112 we can't know if we've overstepped the bounds. */
2118 if (TREE_CODE (expr) == COMPONENT_REF)
2120 tree field = TREE_OPERAND (expr, 1);
2122 if (! DECL_SIZE_UNIT (field))
2128 /* Is the field at least as large as the access? If so, ok,
2129 otherwise strip back to the containing structure. */
2130 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2131 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2132 && INTVAL (memoffset) >= 0)
2135 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2141 expr = TREE_OPERAND (expr, 0);
2142 memoffset = (GEN_INT (INTVAL (memoffset)
2143 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2144 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2147 /* Similarly for the decl. */
2148 else if (DECL_P (expr)
2149 && DECL_SIZE_UNIT (expr)
2150 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2151 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2152 && (! memoffset || INTVAL (memoffset) >= 0))
2156 /* The widened memory access overflows the expression, which means
2157 that it could alias another expression. Zap it. */
2164 memoffset = NULL_RTX;
2166 /* The widened memory may alias other stuff, so zap the alias set. */
2167 /* ??? Maybe use get_alias_set on any remaining expression. */
2169 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2170 MEM_ALIGN (new), mode);
2175 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2182 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2183 NULL, label_num++, NULL, NULL);
2185 LABEL_NUSES (label) = 0;
2186 LABEL_ALTERNATE_NAME (label) = NULL;
2190 /* For procedure integration. */
2192 /* Install new pointers to the first and last insns in the chain.
2193 Also, set cur_insn_uid to one higher than the last in use.
2194 Used for an inline-procedure after copying the insn chain. */
2197 set_new_first_and_last_insn (first, last)
2206 for (insn = first; insn; insn = NEXT_INSN (insn))
2207 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2212 /* Set the range of label numbers found in the current function.
2213 This is used when belatedly compiling an inline function. */
2216 set_new_first_and_last_label_num (first, last)
2219 base_label_num = label_num;
2220 first_label_num = first;
2221 last_label_num = last;
2224 /* Set the last label number found in the current function.
2225 This is used when belatedly compiling an inline function. */
2228 set_new_last_label_num (last)
2231 base_label_num = label_num;
2232 last_label_num = last;
2235 /* Restore all variables describing the current status from the structure *P.
2236 This is used after a nested function. */
2239 restore_emit_status (p)
2240 struct function *p ATTRIBUTE_UNUSED;
2245 /* Go through all the RTL insn bodies and copy any invalid shared
2246 structure. This routine should only be called once. */
2249 unshare_all_rtl (fndecl, insn)
2255 /* Make sure that virtual parameters are not shared. */
2256 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2257 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2259 /* Make sure that virtual stack slots are not shared. */
2260 unshare_all_decls (DECL_INITIAL (fndecl));
2262 /* Unshare just about everything else. */
2263 unshare_all_rtl_1 (insn);
2265 /* Make sure the addresses of stack slots found outside the insn chain
2266 (such as, in DECL_RTL of a variable) are not shared
2267 with the insn chain.
2269 This special care is necessary when the stack slot MEM does not
2270 actually appear in the insn chain. If it does appear, its address
2271 is unshared from all else at that point. */
2272 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2275 /* Go through all the RTL insn bodies and copy any invalid shared
2276 structure, again. This is a fairly expensive thing to do so it
2277 should be done sparingly. */
2280 unshare_all_rtl_again (insn)
2286 for (p = insn; p; p = NEXT_INSN (p))
2289 reset_used_flags (PATTERN (p));
2290 reset_used_flags (REG_NOTES (p));
2291 reset_used_flags (LOG_LINKS (p));
2294 /* Make sure that virtual stack slots are not shared. */
2295 reset_used_decls (DECL_INITIAL (cfun->decl));
2297 /* Make sure that virtual parameters are not shared. */
2298 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2299 reset_used_flags (DECL_RTL (decl));
2301 reset_used_flags (stack_slot_list);
2303 unshare_all_rtl (cfun->decl, insn);
2306 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2307 Assumes the mark bits are cleared at entry. */
2310 unshare_all_rtl_1 (insn)
2313 for (; insn; insn = NEXT_INSN (insn))
2316 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2317 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2318 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2322 /* Go through all virtual stack slots of a function and copy any
2323 shared structure. */
2325 unshare_all_decls (blk)
2330 /* Copy shared decls. */
2331 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2332 if (DECL_RTL_SET_P (t))
2333 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2335 /* Now process sub-blocks. */
2336 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2337 unshare_all_decls (t);
2340 /* Go through all virtual stack slots of a function and mark them as
2343 reset_used_decls (blk)
2349 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2350 if (DECL_RTL_SET_P (t))
2351 reset_used_flags (DECL_RTL (t));
2353 /* Now process sub-blocks. */
2354 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2355 reset_used_decls (t);
2358 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2359 placed in the result directly, rather than being copied. MAY_SHARE is
2360 either a MEM of an EXPR_LIST of MEMs. */
2363 copy_most_rtx (orig, may_share)
2370 const char *format_ptr;
2372 if (orig == may_share
2373 || (GET_CODE (may_share) == EXPR_LIST
2374 && in_expr_list_p (may_share, orig)))
2377 code = GET_CODE (orig);
2395 copy = rtx_alloc (code);
2396 PUT_MODE (copy, GET_MODE (orig));
2397 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2398 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2399 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2400 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2401 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2403 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2405 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2407 switch (*format_ptr++)
2410 XEXP (copy, i) = XEXP (orig, i);
2411 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2412 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2416 XEXP (copy, i) = XEXP (orig, i);
2421 XVEC (copy, i) = XVEC (orig, i);
2422 if (XVEC (orig, i) != NULL)
2424 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2425 for (j = 0; j < XVECLEN (copy, i); j++)
2426 XVECEXP (copy, i, j)
2427 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2432 XWINT (copy, i) = XWINT (orig, i);
2437 XINT (copy, i) = XINT (orig, i);
2441 XTREE (copy, i) = XTREE (orig, i);
2446 XSTR (copy, i) = XSTR (orig, i);
2450 /* Copy this through the wide int field; that's safest. */
2451 X0WINT (copy, i) = X0WINT (orig, i);
2461 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2462 Recursively does the same for subexpressions. */
2465 copy_rtx_if_shared (orig)
2471 const char *format_ptr;
2477 code = GET_CODE (x);
2479 /* These types may be freely shared. */
2493 /* SCRATCH must be shared because they represent distinct values. */
2497 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2498 a LABEL_REF, it isn't sharable. */
2499 if (GET_CODE (XEXP (x, 0)) == PLUS
2500 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2501 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2510 /* The chain of insns is not being copied. */
2514 /* A MEM is allowed to be shared if its address is constant.
2516 We used to allow sharing of MEMs which referenced
2517 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2518 that can lose. instantiate_virtual_regs will not unshare
2519 the MEMs, and combine may change the structure of the address
2520 because it looks safe and profitable in one context, but
2521 in some other context it creates unrecognizable RTL. */
2522 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2531 /* This rtx may not be shared. If it has already been seen,
2532 replace it with a copy of itself. */
2534 if (RTX_FLAG (x, used))
2538 copy = rtx_alloc (code);
2540 (sizeof (*copy) - sizeof (copy->fld)
2541 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2545 RTX_FLAG (x, used) = 1;
2547 /* Now scan the subexpressions recursively.
2548 We can store any replaced subexpressions directly into X
2549 since we know X is not shared! Any vectors in X
2550 must be copied if X was copied. */
2552 format_ptr = GET_RTX_FORMAT (code);
2554 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2556 switch (*format_ptr++)
2559 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2563 if (XVEC (x, i) != NULL)
2566 int len = XVECLEN (x, i);
2568 if (copied && len > 0)
2569 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2570 for (j = 0; j < len; j++)
2571 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2579 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2580 to look for shared sub-parts. */
2583 reset_used_flags (x)
2588 const char *format_ptr;
2593 code = GET_CODE (x);
2595 /* These types may be freely shared so we needn't do any resetting
2617 /* The chain of insns is not being copied. */
2624 RTX_FLAG (x, used) = 0;
2626 format_ptr = GET_RTX_FORMAT (code);
2627 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2629 switch (*format_ptr++)
2632 reset_used_flags (XEXP (x, i));
2636 for (j = 0; j < XVECLEN (x, i); j++)
2637 reset_used_flags (XVECEXP (x, i, j));
2643 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2644 Return X or the rtx for the pseudo reg the value of X was copied into.
2645 OTHER must be valid as a SET_DEST. */
2648 make_safe_from (x, other)
2652 switch (GET_CODE (other))
2655 other = SUBREG_REG (other);
2657 case STRICT_LOW_PART:
2660 other = XEXP (other, 0);
2666 if ((GET_CODE (other) == MEM
2668 && GET_CODE (x) != REG
2669 && GET_CODE (x) != SUBREG)
2670 || (GET_CODE (other) == REG
2671 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2672 || reg_mentioned_p (other, x))))
2674 rtx temp = gen_reg_rtx (GET_MODE (x));
2675 emit_move_insn (temp, x);
2681 /* Emission of insns (adding them to the doubly-linked list). */
2683 /* Return the first insn of the current sequence or current function. */
2691 /* Specify a new insn as the first in the chain. */
2694 set_first_insn (insn)
2697 if (PREV_INSN (insn) != 0)
2702 /* Return the last insn emitted in current sequence or current function. */
2710 /* Specify a new insn as the last in the chain. */
2713 set_last_insn (insn)
2716 if (NEXT_INSN (insn) != 0)
2721 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2724 get_last_insn_anywhere ()
2726 struct sequence_stack *stack;
2729 for (stack = seq_stack; stack; stack = stack->next)
2730 if (stack->last != 0)
2735 /* Return the first nonnote insn emitted in current sequence or current
2736 function. This routine looks inside SEQUENCEs. */
2739 get_first_nonnote_insn ()
2741 rtx insn = first_insn;
2745 insn = next_insn (insn);
2746 if (insn == 0 || GET_CODE (insn) != NOTE)
2753 /* Return the last nonnote insn emitted in current sequence or current
2754 function. This routine looks inside SEQUENCEs. */
2757 get_last_nonnote_insn ()
2759 rtx insn = last_insn;
2763 insn = previous_insn (insn);
2764 if (insn == 0 || GET_CODE (insn) != NOTE)
2771 /* Return a number larger than any instruction's uid in this function. */
2776 return cur_insn_uid;
2779 /* Renumber instructions so that no instruction UIDs are wasted. */
2782 renumber_insns (stream)
2787 /* If we're not supposed to renumber instructions, don't. */
2788 if (!flag_renumber_insns)
2791 /* If there aren't that many instructions, then it's not really
2792 worth renumbering them. */
2793 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2798 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2801 fprintf (stream, "Renumbering insn %d to %d\n",
2802 INSN_UID (insn), cur_insn_uid);
2803 INSN_UID (insn) = cur_insn_uid++;
2807 /* Return the next insn. If it is a SEQUENCE, return the first insn
2816 insn = NEXT_INSN (insn);
2817 if (insn && GET_CODE (insn) == INSN
2818 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2819 insn = XVECEXP (PATTERN (insn), 0, 0);
2825 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2829 previous_insn (insn)
2834 insn = PREV_INSN (insn);
2835 if (insn && GET_CODE (insn) == INSN
2836 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2837 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2843 /* Return the next insn after INSN that is not a NOTE. This routine does not
2844 look inside SEQUENCEs. */
2847 next_nonnote_insn (insn)
2852 insn = NEXT_INSN (insn);
2853 if (insn == 0 || GET_CODE (insn) != NOTE)
2860 /* Return the previous insn before INSN that is not a NOTE. This routine does
2861 not look inside SEQUENCEs. */
2864 prev_nonnote_insn (insn)
2869 insn = PREV_INSN (insn);
2870 if (insn == 0 || GET_CODE (insn) != NOTE)
2877 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2878 or 0, if there is none. This routine does not look inside
2882 next_real_insn (insn)
2887 insn = NEXT_INSN (insn);
2888 if (insn == 0 || GET_CODE (insn) == INSN
2889 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2896 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2897 or 0, if there is none. This routine does not look inside
2901 prev_real_insn (insn)
2906 insn = PREV_INSN (insn);
2907 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2908 || GET_CODE (insn) == JUMP_INSN)
2915 /* Find the next insn after INSN that really does something. This routine
2916 does not look inside SEQUENCEs. Until reload has completed, this is the
2917 same as next_real_insn. */
2920 active_insn_p (insn)
2923 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2924 || (GET_CODE (insn) == INSN
2925 && (! reload_completed
2926 || (GET_CODE (PATTERN (insn)) != USE
2927 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2931 next_active_insn (insn)
2936 insn = NEXT_INSN (insn);
2937 if (insn == 0 || active_insn_p (insn))
2944 /* Find the last insn before INSN that really does something. This routine
2945 does not look inside SEQUENCEs. Until reload has completed, this is the
2946 same as prev_real_insn. */
2949 prev_active_insn (insn)
2954 insn = PREV_INSN (insn);
2955 if (insn == 0 || active_insn_p (insn))
2962 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2970 insn = NEXT_INSN (insn);
2971 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2978 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2986 insn = PREV_INSN (insn);
2987 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2995 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2996 and REG_CC_USER notes so we can find it. */
2999 link_cc0_insns (insn)
3002 rtx user = next_nonnote_insn (insn);
3004 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3005 user = XVECEXP (PATTERN (user), 0, 0);
3007 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3009 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3012 /* Return the next insn that uses CC0 after INSN, which is assumed to
3013 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3014 applied to the result of this function should yield INSN).
3016 Normally, this is simply the next insn. However, if a REG_CC_USER note
3017 is present, it contains the insn that uses CC0.
3019 Return 0 if we can't find the insn. */
3022 next_cc0_user (insn)
3025 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3028 return XEXP (note, 0);
3030 insn = next_nonnote_insn (insn);
3031 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3032 insn = XVECEXP (PATTERN (insn), 0, 0);
3034 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3040 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3041 note, it is the previous insn. */
3044 prev_cc0_setter (insn)
3047 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3050 return XEXP (note, 0);
3052 insn = prev_nonnote_insn (insn);
3053 if (! sets_cc0_p (PATTERN (insn)))
3060 /* Increment the label uses for all labels present in rtx. */
3063 mark_label_nuses (x)
3070 code = GET_CODE (x);
3071 if (code == LABEL_REF)
3072 LABEL_NUSES (XEXP (x, 0))++;
3074 fmt = GET_RTX_FORMAT (code);
3075 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3078 mark_label_nuses (XEXP (x, i));
3079 else if (fmt[i] == 'E')
3080 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3081 mark_label_nuses (XVECEXP (x, i, j));
3086 /* Try splitting insns that can be split for better scheduling.
3087 PAT is the pattern which might split.
3088 TRIAL is the insn providing PAT.
3089 LAST is non-zero if we should return the last insn of the sequence produced.
3091 If this routine succeeds in splitting, it returns the first or last
3092 replacement insn depending on the value of LAST. Otherwise, it
3093 returns TRIAL. If the insn to be returned can be split, it will be. */
3096 try_split (pat, trial, last)
3100 rtx before = PREV_INSN (trial);
3101 rtx after = NEXT_INSN (trial);
3102 int has_barrier = 0;
3107 if (any_condjump_p (trial)
3108 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3109 split_branch_probability = INTVAL (XEXP (note, 0));
3110 probability = split_branch_probability;
3112 seq = split_insns (pat, trial);
3114 split_branch_probability = -1;
3116 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3117 We may need to handle this specially. */
3118 if (after && GET_CODE (after) == BARRIER)
3121 after = NEXT_INSN (after);
3126 /* Sometimes there will be only one insn in that list, this case will
3127 normally arise only when we want it in turn to be split (SFmode on
3128 the 29k is an example). */
3129 if (NEXT_INSN (seq) != NULL_RTX)
3131 rtx insn_last, insn;
3134 /* Avoid infinite loop if any insn of the result matches
3135 the original pattern. */
3139 if (INSN_P (insn_last)
3140 && rtx_equal_p (PATTERN (insn_last), pat))
3142 if (NEXT_INSN (insn_last) == NULL_RTX)
3144 insn_last = NEXT_INSN (insn_last);
3149 while (insn != NULL_RTX)
3151 if (GET_CODE (insn) == JUMP_INSN)
3153 mark_jump_label (PATTERN (insn), insn, 0);
3155 if (probability != -1
3156 && any_condjump_p (insn)
3157 && !find_reg_note (insn, REG_BR_PROB, 0))
3159 /* We can preserve the REG_BR_PROB notes only if exactly
3160 one jump is created, otherwise the machine description
3161 is responsible for this step using
3162 split_branch_probability variable. */
3166 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3167 GEN_INT (probability),
3172 insn = PREV_INSN (insn);
3175 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3176 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3177 if (GET_CODE (trial) == CALL_INSN)
3180 while (insn != NULL_RTX)
3182 if (GET_CODE (insn) == CALL_INSN)
3183 CALL_INSN_FUNCTION_USAGE (insn)
3184 = CALL_INSN_FUNCTION_USAGE (trial);
3186 insn = PREV_INSN (insn);
3190 /* Copy notes, particularly those related to the CFG. */
3191 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3193 switch (REG_NOTE_KIND (note))
3197 while (insn != NULL_RTX)
3199 if (GET_CODE (insn) == CALL_INSN
3200 || (flag_non_call_exceptions
3201 && may_trap_p (PATTERN (insn))))
3203 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3206 insn = PREV_INSN (insn);
3212 case REG_ALWAYS_RETURN:
3214 while (insn != NULL_RTX)
3216 if (GET_CODE (insn) == CALL_INSN)
3218 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3221 insn = PREV_INSN (insn);
3225 case REG_NON_LOCAL_GOTO:
3227 while (insn != NULL_RTX)
3229 if (GET_CODE (insn) == JUMP_INSN)
3231 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3234 insn = PREV_INSN (insn);
3243 /* If there are LABELS inside the split insns increment the
3244 usage count so we don't delete the label. */
3245 if (GET_CODE (trial) == INSN)
3248 while (insn != NULL_RTX)
3250 if (GET_CODE (insn) == INSN)
3251 mark_label_nuses (PATTERN (insn));
3253 insn = PREV_INSN (insn);
3257 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3259 delete_insn (trial);
3261 emit_barrier_after (tem);
3263 /* Recursively call try_split for each new insn created; by the
3264 time control returns here that insn will be fully split, so
3265 set LAST and continue from the insn after the one returned.
3266 We can't use next_active_insn here since AFTER may be a note.
3267 Ignore deleted insns, which can be occur if not optimizing. */
3268 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3269 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3270 tem = try_split (PATTERN (tem), tem, 1);
3272 /* Avoid infinite loop if the result matches the original pattern. */
3273 else if (rtx_equal_p (PATTERN (seq), pat))
3277 PATTERN (trial) = PATTERN (seq);
3278 INSN_CODE (trial) = -1;
3279 try_split (PATTERN (trial), trial, last);
3282 /* Return either the first or the last insn, depending on which was
3285 ? (after ? PREV_INSN (after) : last_insn)
3286 : NEXT_INSN (before);
3292 /* Make and return an INSN rtx, initializing all its slots.
3293 Store PATTERN in the pattern slots. */
3296 make_insn_raw (pattern)
3301 insn = rtx_alloc (INSN);
3303 INSN_UID (insn) = cur_insn_uid++;
3304 PATTERN (insn) = pattern;
3305 INSN_CODE (insn) = -1;
3306 LOG_LINKS (insn) = NULL;
3307 REG_NOTES (insn) = NULL;
3308 INSN_SCOPE (insn) = NULL;
3309 BLOCK_FOR_INSN (insn) = NULL;
3311 #ifdef ENABLE_RTL_CHECKING
3314 && (returnjump_p (insn)
3315 || (GET_CODE (insn) == SET
3316 && SET_DEST (insn) == pc_rtx)))
3318 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3326 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3329 make_jump_insn_raw (pattern)
3334 insn = rtx_alloc (JUMP_INSN);
3335 INSN_UID (insn) = cur_insn_uid++;
3337 PATTERN (insn) = pattern;
3338 INSN_CODE (insn) = -1;
3339 LOG_LINKS (insn) = NULL;
3340 REG_NOTES (insn) = NULL;
3341 JUMP_LABEL (insn) = NULL;
3342 INSN_SCOPE (insn) = NULL;
3343 BLOCK_FOR_INSN (insn) = NULL;
3348 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3351 make_call_insn_raw (pattern)
3356 insn = rtx_alloc (CALL_INSN);
3357 INSN_UID (insn) = cur_insn_uid++;
3359 PATTERN (insn) = pattern;
3360 INSN_CODE (insn) = -1;
3361 LOG_LINKS (insn) = NULL;
3362 REG_NOTES (insn) = NULL;
3363 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3364 INSN_SCOPE (insn) = NULL;
3365 BLOCK_FOR_INSN (insn) = NULL;
3370 /* Add INSN to the end of the doubly-linked list.
3371 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3377 PREV_INSN (insn) = last_insn;
3378 NEXT_INSN (insn) = 0;
3380 if (NULL != last_insn)
3381 NEXT_INSN (last_insn) = insn;
3383 if (NULL == first_insn)
3389 /* Add INSN into the doubly-linked list after insn AFTER. This and
3390 the next should be the only functions called to insert an insn once
3391 delay slots have been filled since only they know how to update a
3395 add_insn_after (insn, after)
3398 rtx next = NEXT_INSN (after);
3401 if (optimize && INSN_DELETED_P (after))
3404 NEXT_INSN (insn) = next;
3405 PREV_INSN (insn) = after;
3409 PREV_INSN (next) = insn;
3410 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3411 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3413 else if (last_insn == after)
3417 struct sequence_stack *stack = seq_stack;
3418 /* Scan all pending sequences too. */
3419 for (; stack; stack = stack->next)
3420 if (after == stack->last)
3430 if (GET_CODE (after) != BARRIER
3431 && GET_CODE (insn) != BARRIER
3432 && (bb = BLOCK_FOR_INSN (after)))
3434 set_block_for_insn (insn, bb);
3436 bb->flags |= BB_DIRTY;
3437 /* Should not happen as first in the BB is always
3438 either NOTE or LABEL. */
3439 if (bb->end == after
3440 /* Avoid clobbering of structure when creating new BB. */
3441 && GET_CODE (insn) != BARRIER
3442 && (GET_CODE (insn) != NOTE
3443 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3447 NEXT_INSN (after) = insn;
3448 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3450 rtx sequence = PATTERN (after);
3451 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3455 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3456 the previous should be the only functions called to insert an insn once
3457 delay slots have been filled since only they know how to update a
3461 add_insn_before (insn, before)
3464 rtx prev = PREV_INSN (before);
3467 if (optimize && INSN_DELETED_P (before))
3470 PREV_INSN (insn) = prev;
3471 NEXT_INSN (insn) = before;
3475 NEXT_INSN (prev) = insn;
3476 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3478 rtx sequence = PATTERN (prev);
3479 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3482 else if (first_insn == before)
3486 struct sequence_stack *stack = seq_stack;
3487 /* Scan all pending sequences too. */
3488 for (; stack; stack = stack->next)
3489 if (before == stack->first)
3491 stack->first = insn;
3499 if (GET_CODE (before) != BARRIER
3500 && GET_CODE (insn) != BARRIER
3501 && (bb = BLOCK_FOR_INSN (before)))
3503 set_block_for_insn (insn, bb);
3505 bb->flags |= BB_DIRTY;
3506 /* Should not happen as first in the BB is always
3507 either NOTE or LABEl. */
3508 if (bb->head == insn
3509 /* Avoid clobbering of structure when creating new BB. */
3510 && GET_CODE (insn) != BARRIER
3511 && (GET_CODE (insn) != NOTE
3512 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3516 PREV_INSN (before) = insn;
3517 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3518 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3521 /* Remove an insn from its doubly-linked list. This function knows how
3522 to handle sequences. */
3527 rtx next = NEXT_INSN (insn);
3528 rtx prev = PREV_INSN (insn);
3533 NEXT_INSN (prev) = next;
3534 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3536 rtx sequence = PATTERN (prev);
3537 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3540 else if (first_insn == insn)
3544 struct sequence_stack *stack = seq_stack;
3545 /* Scan all pending sequences too. */
3546 for (; stack; stack = stack->next)
3547 if (insn == stack->first)
3549 stack->first = next;
3559 PREV_INSN (next) = prev;
3560 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3561 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3563 else if (last_insn == insn)
3567 struct sequence_stack *stack = seq_stack;
3568 /* Scan all pending sequences too. */
3569 for (; stack; stack = stack->next)
3570 if (insn == stack->last)
3579 if (GET_CODE (insn) != BARRIER
3580 && (bb = BLOCK_FOR_INSN (insn)))
3583 bb->flags |= BB_DIRTY;
3584 if (bb->head == insn)
3586 /* Never ever delete the basic block note without deleting whole
3588 if (GET_CODE (insn) == NOTE)
3592 if (bb->end == insn)
3597 /* Delete all insns made since FROM.
3598 FROM becomes the new last instruction. */
3601 delete_insns_since (from)
3607 NEXT_INSN (from) = 0;
3611 /* This function is deprecated, please use sequences instead.
3613 Move a consecutive bunch of insns to a different place in the chain.
3614 The insns to be moved are those between FROM and TO.
3615 They are moved to a new position after the insn AFTER.
3616 AFTER must not be FROM or TO or any insn in between.
3618 This function does not know about SEQUENCEs and hence should not be
3619 called after delay-slot filling has been done. */
3622 reorder_insns_nobb (from, to, after)
3623 rtx from, to, after;
3625 /* Splice this bunch out of where it is now. */
3626 if (PREV_INSN (from))
3627 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3629 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3630 if (last_insn == to)
3631 last_insn = PREV_INSN (from);
3632 if (first_insn == from)
3633 first_insn = NEXT_INSN (to);
3635 /* Make the new neighbors point to it and it to them. */
3636 if (NEXT_INSN (after))
3637 PREV_INSN (NEXT_INSN (after)) = to;
3639 NEXT_INSN (to) = NEXT_INSN (after);
3640 PREV_INSN (from) = after;
3641 NEXT_INSN (after) = from;
3642 if (after == last_insn)
3646 /* Same as function above, but take care to update BB boundaries. */
3648 reorder_insns (from, to, after)
3649 rtx from, to, after;
3651 rtx prev = PREV_INSN (from);
3652 basic_block bb, bb2;
3654 reorder_insns_nobb (from, to, after);
3656 if (GET_CODE (after) != BARRIER
3657 && (bb = BLOCK_FOR_INSN (after)))
3660 bb->flags |= BB_DIRTY;
3662 if (GET_CODE (from) != BARRIER
3663 && (bb2 = BLOCK_FOR_INSN (from)))
3667 bb2->flags |= BB_DIRTY;
3670 if (bb->end == after)
3673 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3674 set_block_for_insn (x, bb);
3678 /* Return the line note insn preceding INSN. */
3681 find_line_note (insn)
3684 if (no_line_numbers)
3687 for (; insn; insn = PREV_INSN (insn))
3688 if (GET_CODE (insn) == NOTE
3689 && NOTE_LINE_NUMBER (insn) >= 0)
3695 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3696 of the moved insns when debugging. This may insert a note between AFTER
3697 and FROM, and another one after TO. */
3700 reorder_insns_with_line_notes (from, to, after)
3701 rtx from, to, after;
3703 rtx from_line = find_line_note (from);
3704 rtx after_line = find_line_note (after);
3706 reorder_insns (from, to, after);
3708 if (from_line == after_line)
3712 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3713 NOTE_LINE_NUMBER (from_line),
3716 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3717 NOTE_LINE_NUMBER (after_line),
3721 /* Remove unnecessary notes from the instruction stream. */
3724 remove_unnecessary_notes ()
3726 rtx block_stack = NULL_RTX;
3727 rtx eh_stack = NULL_RTX;
3732 /* We must not remove the first instruction in the function because
3733 the compiler depends on the first instruction being a note. */
3734 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3736 /* Remember what's next. */
3737 next = NEXT_INSN (insn);
3739 /* We're only interested in notes. */
3740 if (GET_CODE (insn) != NOTE)
3743 switch (NOTE_LINE_NUMBER (insn))
3745 case NOTE_INSN_DELETED:
3746 case NOTE_INSN_LOOP_END_TOP_COND:
3750 case NOTE_INSN_EH_REGION_BEG:
3751 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3754 case NOTE_INSN_EH_REGION_END:
3755 /* Too many end notes. */
3756 if (eh_stack == NULL_RTX)
3758 /* Mismatched nesting. */
3759 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3762 eh_stack = XEXP (eh_stack, 1);
3763 free_INSN_LIST_node (tmp);
3766 case NOTE_INSN_BLOCK_BEG:
3767 /* By now, all notes indicating lexical blocks should have
3768 NOTE_BLOCK filled in. */
3769 if (NOTE_BLOCK (insn) == NULL_TREE)
3771 block_stack = alloc_INSN_LIST (insn, block_stack);
3774 case NOTE_INSN_BLOCK_END:
3775 /* Too many end notes. */
3776 if (block_stack == NULL_RTX)
3778 /* Mismatched nesting. */
3779 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3782 block_stack = XEXP (block_stack, 1);
3783 free_INSN_LIST_node (tmp);
3785 /* Scan back to see if there are any non-note instructions
3786 between INSN and the beginning of this block. If not,
3787 then there is no PC range in the generated code that will
3788 actually be in this block, so there's no point in
3789 remembering the existence of the block. */
3790 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3792 /* This block contains a real instruction. Note that we
3793 don't include labels; if the only thing in the block
3794 is a label, then there are still no PC values that
3795 lie within the block. */
3799 /* We're only interested in NOTEs. */
3800 if (GET_CODE (tmp) != NOTE)
3803 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3805 /* We just verified that this BLOCK matches us with
3806 the block_stack check above. Never delete the
3807 BLOCK for the outermost scope of the function; we
3808 can refer to names from that scope even if the
3809 block notes are messed up. */
3810 if (! is_body_block (NOTE_BLOCK (insn))
3811 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3818 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3819 /* There's a nested block. We need to leave the
3820 current block in place since otherwise the debugger
3821 wouldn't be able to show symbols from our block in
3822 the nested block. */
3828 /* Too many begin notes. */
3829 if (block_stack || eh_stack)
3834 /* Emit insn(s) of given code and pattern
3835 at a specified place within the doubly-linked list.
3837 All of the emit_foo global entry points accept an object
3838 X which is either an insn list or a PATTERN of a single
3841 There are thus a few canonical ways to generate code and
3842 emit it at a specific place in the instruction stream. For
3843 example, consider the instruction named SPOT and the fact that
3844 we would like to emit some instructions before SPOT. We might
3848 ... emit the new instructions ...
3849 insns_head = get_insns ();
3852 emit_insn_before (insns_head, SPOT);
3854 It used to be common to generate SEQUENCE rtl instead, but that
3855 is a relic of the past which no longer occurs. The reason is that
3856 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3857 generated would almost certainly die right after it was created. */
3859 /* Make X be output before the instruction BEFORE. */
3862 emit_insn_before (x, before)
3868 #ifdef ENABLE_RTL_CHECKING
3869 if (before == NULL_RTX)
3876 switch (GET_CODE (x))
3887 rtx next = NEXT_INSN (insn);
3888 add_insn_before (insn, before);
3894 #ifdef ENABLE_RTL_CHECKING
3901 last = make_insn_raw (x);
3902 add_insn_before (last, before);
3909 /* Make an instruction with body X and code JUMP_INSN
3910 and output it before the instruction BEFORE. */
3913 emit_jump_insn_before (x, before)
3918 #ifdef ENABLE_RTL_CHECKING
3919 if (before == NULL_RTX)
3923 switch (GET_CODE (x))
3934 rtx next = NEXT_INSN (insn);
3935 add_insn_before (insn, before);
3941 #ifdef ENABLE_RTL_CHECKING
3948 last = make_jump_insn_raw (x);
3949 add_insn_before (last, before);
3956 /* Make an instruction with body X and code CALL_INSN
3957 and output it before the instruction BEFORE. */
3960 emit_call_insn_before (x, before)
3965 #ifdef ENABLE_RTL_CHECKING
3966 if (before == NULL_RTX)
3970 switch (GET_CODE (x))
3981 rtx next = NEXT_INSN (insn);
3982 add_insn_before (insn, before);
3988 #ifdef ENABLE_RTL_CHECKING
3995 last = make_call_insn_raw (x);
3996 add_insn_before (last, before);
4003 /* Make an insn of code BARRIER
4004 and output it before the insn BEFORE. */
4007 emit_barrier_before (before)
4010 rtx insn = rtx_alloc (BARRIER);
4012 INSN_UID (insn) = cur_insn_uid++;
4014 add_insn_before (insn, before);
4018 /* Emit the label LABEL before the insn BEFORE. */
4021 emit_label_before (label, before)
4024 /* This can be called twice for the same label as a result of the
4025 confusion that follows a syntax error! So make it harmless. */
4026 if (INSN_UID (label) == 0)
4028 INSN_UID (label) = cur_insn_uid++;
4029 add_insn_before (label, before);
4035 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4038 emit_note_before (subtype, before)
4042 rtx note = rtx_alloc (NOTE);
4043 INSN_UID (note) = cur_insn_uid++;
4044 NOTE_SOURCE_FILE (note) = 0;
4045 NOTE_LINE_NUMBER (note) = subtype;
4046 BLOCK_FOR_INSN (note) = NULL;
4048 add_insn_before (note, before);
4052 /* Helper for emit_insn_after, handles lists of instructions
4055 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4058 emit_insn_after_1 (first, after)
4065 if (GET_CODE (after) != BARRIER
4066 && (bb = BLOCK_FOR_INSN (after)))
4068 bb->flags |= BB_DIRTY;
4069 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4070 if (GET_CODE (last) != BARRIER)
4071 set_block_for_insn (last, bb);
4072 if (GET_CODE (last) != BARRIER)
4073 set_block_for_insn (last, bb);
4074 if (bb->end == after)
4078 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4081 after_after = NEXT_INSN (after);
4083 NEXT_INSN (after) = first;
4084 PREV_INSN (first) = after;
4085 NEXT_INSN (last) = after_after;
4087 PREV_INSN (after_after) = last;
4089 if (after == last_insn)
4094 /* Make X be output after the insn AFTER. */
4097 emit_insn_after (x, after)
4102 #ifdef ENABLE_RTL_CHECKING
4103 if (after == NULL_RTX)
4110 switch (GET_CODE (x))
4118 last = emit_insn_after_1 (x, after);
4121 #ifdef ENABLE_RTL_CHECKING
4128 last = make_insn_raw (x);
4129 add_insn_after (last, after);
4136 /* Similar to emit_insn_after, except that line notes are to be inserted so
4137 as to act as if this insn were at FROM. */
4140 emit_insn_after_with_line_notes (x, after, from)
4143 rtx from_line = find_line_note (from);
4144 rtx after_line = find_line_note (after);
4145 rtx insn = emit_insn_after (x, after);
4148 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4149 NOTE_LINE_NUMBER (from_line),
4153 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4154 NOTE_LINE_NUMBER (after_line),
4158 /* Make an insn of code JUMP_INSN with body X
4159 and output it after the insn AFTER. */
4162 emit_jump_insn_after (x, after)
4167 #ifdef ENABLE_RTL_CHECKING
4168 if (after == NULL_RTX)
4172 switch (GET_CODE (x))
4180 last = emit_insn_after_1 (x, after);
4183 #ifdef ENABLE_RTL_CHECKING
4190 last = make_jump_insn_raw (x);
4191 add_insn_after (last, after);
4198 /* Make an instruction with body X and code CALL_INSN
4199 and output it after the instruction AFTER. */
4202 emit_call_insn_after (x, after)
4207 #ifdef ENABLE_RTL_CHECKING
4208 if (after == NULL_RTX)
4212 switch (GET_CODE (x))
4220 last = emit_insn_after_1 (x, after);
4223 #ifdef ENABLE_RTL_CHECKING
4230 last = make_call_insn_raw (x);
4231 add_insn_after (last, after);
4238 /* Make an insn of code BARRIER
4239 and output it after the insn AFTER. */
4242 emit_barrier_after (after)
4245 rtx insn = rtx_alloc (BARRIER);
4247 INSN_UID (insn) = cur_insn_uid++;
4249 add_insn_after (insn, after);
4253 /* Emit the label LABEL after the insn AFTER. */
4256 emit_label_after (label, after)
4259 /* This can be called twice for the same label
4260 as a result of the confusion that follows a syntax error!
4261 So make it harmless. */
4262 if (INSN_UID (label) == 0)
4264 INSN_UID (label) = cur_insn_uid++;
4265 add_insn_after (label, after);
4271 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4274 emit_note_after (subtype, after)
4278 rtx note = rtx_alloc (NOTE);
4279 INSN_UID (note) = cur_insn_uid++;
4280 NOTE_SOURCE_FILE (note) = 0;
4281 NOTE_LINE_NUMBER (note) = subtype;
4282 BLOCK_FOR_INSN (note) = NULL;
4283 add_insn_after (note, after);
4287 /* Emit a line note for FILE and LINE after the insn AFTER. */
4290 emit_line_note_after (file, line, after)
4297 if (no_line_numbers && line > 0)
4303 note = rtx_alloc (NOTE);
4304 INSN_UID (note) = cur_insn_uid++;
4305 NOTE_SOURCE_FILE (note) = file;
4306 NOTE_LINE_NUMBER (note) = line;
4307 BLOCK_FOR_INSN (note) = NULL;
4308 add_insn_after (note, after);
4312 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4314 emit_insn_after_scope (pattern, after, scope)
4318 rtx last = emit_insn_after (pattern, after);
4320 after = NEXT_INSN (after);
4323 if (active_insn_p (after))
4324 INSN_SCOPE (after) = scope;
4327 after = NEXT_INSN (after);
4332 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4334 emit_jump_insn_after_scope (pattern, after, scope)
4338 rtx last = emit_jump_insn_after (pattern, after);
4340 after = NEXT_INSN (after);
4343 if (active_insn_p (after))
4344 INSN_SCOPE (after) = scope;
4347 after = NEXT_INSN (after);
4352 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4354 emit_call_insn_after_scope (pattern, after, scope)
4358 rtx last = emit_call_insn_after (pattern, after);
4360 after = NEXT_INSN (after);
4363 if (active_insn_p (after))
4364 INSN_SCOPE (after) = scope;
4367 after = NEXT_INSN (after);
4372 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4374 emit_insn_before_scope (pattern, before, scope)
4375 rtx pattern, before;
4378 rtx first = PREV_INSN (before);
4379 rtx last = emit_insn_before (pattern, before);
4381 first = NEXT_INSN (first);
4384 if (active_insn_p (first))
4385 INSN_SCOPE (first) = scope;
4388 first = NEXT_INSN (first);
4393 /* Take X and emit it at the end of the doubly-linked
4396 Returns the last insn emitted. */
4402 rtx last = last_insn;
4408 switch (GET_CODE (x))
4419 rtx next = NEXT_INSN (insn);
4426 #ifdef ENABLE_RTL_CHECKING
4433 last = make_insn_raw (x);
4441 /* Make an insn of code JUMP_INSN with pattern X
4442 and add it to the end of the doubly-linked list. */
4450 switch (GET_CODE (x))
4461 rtx next = NEXT_INSN (insn);
4468 #ifdef ENABLE_RTL_CHECKING
4475 last = make_jump_insn_raw (x);
4483 /* Make an insn of code CALL_INSN with pattern X
4484 and add it to the end of the doubly-linked list. */
4492 switch (GET_CODE (x))
4500 insn = emit_insn (x);
4503 #ifdef ENABLE_RTL_CHECKING
4510 insn = make_call_insn_raw (x);
4518 /* Add the label LABEL to the end of the doubly-linked list. */
4524 /* This can be called twice for the same label
4525 as a result of the confusion that follows a syntax error!
4526 So make it harmless. */
4527 if (INSN_UID (label) == 0)
4529 INSN_UID (label) = cur_insn_uid++;
4535 /* Make an insn of code BARRIER
4536 and add it to the end of the doubly-linked list. */
4541 rtx barrier = rtx_alloc (BARRIER);
4542 INSN_UID (barrier) = cur_insn_uid++;
4547 /* Make an insn of code NOTE
4548 with data-fields specified by FILE and LINE
4549 and add it to the end of the doubly-linked list,
4550 but only if line-numbers are desired for debugging info. */
4553 emit_line_note (file, line)
4557 set_file_and_line_for_stmt (file, line);
4560 if (no_line_numbers)
4564 return emit_note (file, line);
4567 /* Make an insn of code NOTE
4568 with data-fields specified by FILE and LINE
4569 and add it to the end of the doubly-linked list.
4570 If it is a line-number NOTE, omit it if it matches the previous one. */
4573 emit_note (file, line)
4581 if (file && last_filename && !strcmp (file, last_filename)
4582 && line == last_linenum)
4584 last_filename = file;
4585 last_linenum = line;
4588 if (no_line_numbers && line > 0)
4594 note = rtx_alloc (NOTE);
4595 INSN_UID (note) = cur_insn_uid++;
4596 NOTE_SOURCE_FILE (note) = file;
4597 NOTE_LINE_NUMBER (note) = line;
4598 BLOCK_FOR_INSN (note) = NULL;
4603 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4606 emit_line_note_force (file, line)
4611 return emit_line_note (file, line);
4614 /* Cause next statement to emit a line note even if the line number
4615 has not changed. This is used at the beginning of a function. */
4618 force_next_line_note ()
4623 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4624 note of this type already exists, remove it first. */
4627 set_unique_reg_note (insn, kind, datum)
4632 rtx note = find_reg_note (insn, kind, NULL_RTX);
4638 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4639 has multiple sets (some callers assume single_set
4640 means the insn only has one set, when in fact it
4641 means the insn only has one * useful * set). */
4642 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4649 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4650 It serves no useful purpose and breaks eliminate_regs. */
4651 if (GET_CODE (datum) == ASM_OPERANDS)
4661 XEXP (note, 0) = datum;
4665 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4666 return REG_NOTES (insn);
4669 /* Return an indication of which type of insn should have X as a body.
4670 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4676 if (GET_CODE (x) == CODE_LABEL)
4678 if (GET_CODE (x) == CALL)
4680 if (GET_CODE (x) == RETURN)
4682 if (GET_CODE (x) == SET)
4684 if (SET_DEST (x) == pc_rtx)
4686 else if (GET_CODE (SET_SRC (x)) == CALL)
4691 if (GET_CODE (x) == PARALLEL)
4694 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4695 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4697 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4698 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4700 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4701 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4707 /* Emit the rtl pattern X as an appropriate kind of insn.
4708 If X is a label, it is simply added into the insn chain. */
4714 enum rtx_code code = classify_insn (x);
4716 if (code == CODE_LABEL)
4717 return emit_label (x);
4718 else if (code == INSN)
4719 return emit_insn (x);
4720 else if (code == JUMP_INSN)
4722 rtx insn = emit_jump_insn (x);
4723 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4724 return emit_barrier ();
4727 else if (code == CALL_INSN)
4728 return emit_call_insn (x);
4733 /* Space for free sequence stack entries. */
4734 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4736 /* Begin emitting insns to a sequence which can be packaged in an
4737 RTL_EXPR. If this sequence will contain something that might cause
4738 the compiler to pop arguments to function calls (because those
4739 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4740 details), use do_pending_stack_adjust before calling this function.
4741 That will ensure that the deferred pops are not accidentally
4742 emitted in the middle of this sequence. */
4747 struct sequence_stack *tem;
4749 if (free_sequence_stack != NULL)
4751 tem = free_sequence_stack;
4752 free_sequence_stack = tem->next;
4755 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4757 tem->next = seq_stack;
4758 tem->first = first_insn;
4759 tem->last = last_insn;
4760 tem->sequence_rtl_expr = seq_rtl_expr;
4768 /* Similarly, but indicate that this sequence will be placed in T, an
4769 RTL_EXPR. See the documentation for start_sequence for more
4770 information about how to use this function. */
4773 start_sequence_for_rtl_expr (t)
4781 /* Set up the insn chain starting with FIRST as the current sequence,
4782 saving the previously current one. See the documentation for
4783 start_sequence for more information about how to use this function. */
4786 push_to_sequence (first)
4793 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4799 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4802 push_to_full_sequence (first, last)
4808 /* We really should have the end of the insn chain here. */
4809 if (last && NEXT_INSN (last))
4813 /* Set up the outer-level insn chain
4814 as the current sequence, saving the previously current one. */
4817 push_topmost_sequence ()
4819 struct sequence_stack *stack, *top = NULL;
4823 for (stack = seq_stack; stack; stack = stack->next)
4826 first_insn = top->first;
4827 last_insn = top->last;
4828 seq_rtl_expr = top->sequence_rtl_expr;
4831 /* After emitting to the outer-level insn chain, update the outer-level
4832 insn chain, and restore the previous saved state. */
4835 pop_topmost_sequence ()
4837 struct sequence_stack *stack, *top = NULL;
4839 for (stack = seq_stack; stack; stack = stack->next)
4842 top->first = first_insn;
4843 top->last = last_insn;
4844 /* ??? Why don't we save seq_rtl_expr here? */
4849 /* After emitting to a sequence, restore previous saved state.
4851 To get the contents of the sequence just made, you must call
4852 `get_insns' *before* calling here.
4854 If the compiler might have deferred popping arguments while
4855 generating this sequence, and this sequence will not be immediately
4856 inserted into the instruction stream, use do_pending_stack_adjust
4857 before calling get_insns. That will ensure that the deferred
4858 pops are inserted into this sequence, and not into some random
4859 location in the instruction stream. See INHIBIT_DEFER_POP for more
4860 information about deferred popping of arguments. */
4865 struct sequence_stack *tem = seq_stack;
4867 first_insn = tem->first;
4868 last_insn = tem->last;
4869 seq_rtl_expr = tem->sequence_rtl_expr;
4870 seq_stack = tem->next;
4872 memset (tem, 0, sizeof (*tem));
4873 tem->next = free_sequence_stack;
4874 free_sequence_stack = tem;
4877 /* This works like end_sequence, but records the old sequence in FIRST
4881 end_full_sequence (first, last)
4884 *first = first_insn;
4889 /* Return 1 if currently emitting into a sequence. */
4894 return seq_stack != 0;
4897 /* Put the various virtual registers into REGNO_REG_RTX. */
4900 init_virtual_regs (es)
4901 struct emit_status *es;
4903 rtx *ptr = es->x_regno_reg_rtx;
4904 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4905 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4906 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4907 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4908 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4912 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4913 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4914 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4915 static int copy_insn_n_scratches;
4917 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4918 copied an ASM_OPERANDS.
4919 In that case, it is the original input-operand vector. */
4920 static rtvec orig_asm_operands_vector;
4922 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4923 copied an ASM_OPERANDS.
4924 In that case, it is the copied input-operand vector. */
4925 static rtvec copy_asm_operands_vector;
4927 /* Likewise for the constraints vector. */
4928 static rtvec orig_asm_constraints_vector;
4929 static rtvec copy_asm_constraints_vector;
4931 /* Recursively create a new copy of an rtx for copy_insn.
4932 This function differs from copy_rtx in that it handles SCRATCHes and
4933 ASM_OPERANDs properly.
4934 Normally, this function is not used directly; use copy_insn as front end.
4935 However, you could first copy an insn pattern with copy_insn and then use
4936 this function afterwards to properly copy any REG_NOTEs containing
4946 const char *format_ptr;
4948 code = GET_CODE (orig);
4965 for (i = 0; i < copy_insn_n_scratches; i++)
4966 if (copy_insn_scratch_in[i] == orig)
4967 return copy_insn_scratch_out[i];
4971 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4972 a LABEL_REF, it isn't sharable. */
4973 if (GET_CODE (XEXP (orig, 0)) == PLUS
4974 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4975 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4979 /* A MEM with a constant address is not sharable. The problem is that
4980 the constant address may need to be reloaded. If the mem is shared,
4981 then reloading one copy of this mem will cause all copies to appear
4982 to have been reloaded. */
4988 copy = rtx_alloc (code);
4990 /* Copy the various flags, and other information. We assume that
4991 all fields need copying, and then clear the fields that should
4992 not be copied. That is the sensible default behavior, and forces
4993 us to explicitly document why we are *not* copying a flag. */
4994 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4996 /* We do not copy the USED flag, which is used as a mark bit during
4997 walks over the RTL. */
4998 RTX_FLAG (copy, used) = 0;
5000 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5001 if (GET_RTX_CLASS (code) == 'i')
5003 RTX_FLAG (copy, jump) = 0;
5004 RTX_FLAG (copy, call) = 0;
5005 RTX_FLAG (copy, frame_related) = 0;
5008 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5010 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5012 copy->fld[i] = orig->fld[i];
5013 switch (*format_ptr++)
5016 if (XEXP (orig, i) != NULL)
5017 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5022 if (XVEC (orig, i) == orig_asm_constraints_vector)
5023 XVEC (copy, i) = copy_asm_constraints_vector;
5024 else if (XVEC (orig, i) == orig_asm_operands_vector)
5025 XVEC (copy, i) = copy_asm_operands_vector;
5026 else if (XVEC (orig, i) != NULL)
5028 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5029 for (j = 0; j < XVECLEN (copy, i); j++)
5030 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5041 /* These are left unchanged. */
5049 if (code == SCRATCH)
5051 i = copy_insn_n_scratches++;
5052 if (i >= MAX_RECOG_OPERANDS)
5054 copy_insn_scratch_in[i] = orig;
5055 copy_insn_scratch_out[i] = copy;
5057 else if (code == ASM_OPERANDS)
5059 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5060 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5061 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5062 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5068 /* Create a new copy of an rtx.
5069 This function differs from copy_rtx in that it handles SCRATCHes and
5070 ASM_OPERANDs properly.
5071 INSN doesn't really have to be a full INSN; it could be just the
5077 copy_insn_n_scratches = 0;
5078 orig_asm_operands_vector = 0;
5079 orig_asm_constraints_vector = 0;
5080 copy_asm_operands_vector = 0;
5081 copy_asm_constraints_vector = 0;
5082 return copy_insn_1 (insn);
5085 /* Initialize data structures and variables in this file
5086 before generating rtl for each function. */
5091 struct function *f = cfun;
5093 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5096 seq_rtl_expr = NULL;
5098 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5101 first_label_num = label_num;
5105 /* Init the tables that describe all the pseudo regs. */
5107 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5109 f->emit->regno_pointer_align
5110 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5111 * sizeof (unsigned char));
5114 = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5118 = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5121 /* Put copies of all the hard registers into regno_reg_rtx. */
5122 memcpy (regno_reg_rtx,
5123 static_regno_reg_rtx,
5124 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5126 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5127 init_virtual_regs (f->emit);
5129 /* Indicate that the virtual registers and stack locations are
5131 REG_POINTER (stack_pointer_rtx) = 1;
5132 REG_POINTER (frame_pointer_rtx) = 1;
5133 REG_POINTER (hard_frame_pointer_rtx) = 1;
5134 REG_POINTER (arg_pointer_rtx) = 1;
5136 REG_POINTER (virtual_incoming_args_rtx) = 1;
5137 REG_POINTER (virtual_stack_vars_rtx) = 1;
5138 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5139 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5140 REG_POINTER (virtual_cfa_rtx) = 1;
5142 #ifdef STACK_BOUNDARY
5143 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5144 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5145 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5146 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5148 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5149 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5150 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5151 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5152 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5155 #ifdef INIT_EXPANDERS
5160 /* Generate the constant 0. */
5163 gen_const_vector_0 (mode)
5164 enum machine_mode mode;
5169 enum machine_mode inner;
5171 units = GET_MODE_NUNITS (mode);
5172 inner = GET_MODE_INNER (mode);
5174 v = rtvec_alloc (units);
5176 /* We need to call this function after we to set CONST0_RTX first. */
5177 if (!CONST0_RTX (inner))
5180 for (i = 0; i < units; ++i)
5181 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5183 tem = gen_rtx_CONST_VECTOR (mode, v);
5187 /* Create some permanent unique rtl objects shared between all functions.
5188 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5191 init_emit_once (line_numbers)
5195 enum machine_mode mode;
5196 enum machine_mode double_mode;
5198 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5200 const_int_htab = htab_create (37, const_int_htab_hash,
5201 const_int_htab_eq, NULL);
5203 const_double_htab = htab_create (37, const_double_htab_hash,
5204 const_double_htab_eq, NULL);
5206 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5207 mem_attrs_htab_eq, NULL);
5209 no_line_numbers = ! line_numbers;
5211 /* Compute the word and byte modes. */
5213 byte_mode = VOIDmode;
5214 word_mode = VOIDmode;
5215 double_mode = VOIDmode;
5217 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5218 mode = GET_MODE_WIDER_MODE (mode))
5220 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5221 && byte_mode == VOIDmode)
5224 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5225 && word_mode == VOIDmode)
5229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5230 mode = GET_MODE_WIDER_MODE (mode))
5232 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5233 && double_mode == VOIDmode)
5237 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5239 /* Assign register numbers to the globally defined register rtx.
5240 This must be done at runtime because the register number field
5241 is in a union and some compilers can't initialize unions. */
5243 pc_rtx = gen_rtx (PC, VOIDmode);
5244 cc0_rtx = gen_rtx (CC0, VOIDmode);
5245 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5246 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5247 if (hard_frame_pointer_rtx == 0)
5248 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5249 HARD_FRAME_POINTER_REGNUM);
5250 if (arg_pointer_rtx == 0)
5251 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5252 virtual_incoming_args_rtx =
5253 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5254 virtual_stack_vars_rtx =
5255 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5256 virtual_stack_dynamic_rtx =
5257 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5258 virtual_outgoing_args_rtx =
5259 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5260 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5262 /* Initialize RTL for commonly used hard registers. These are
5263 copied into regno_reg_rtx as we begin to compile each function. */
5264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5265 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5267 #ifdef INIT_EXPANDERS
5268 /* This is to initialize {init|mark|free}_machine_status before the first
5269 call to push_function_context_to. This is needed by the Chill front
5270 end which calls push_function_context_to before the first call to
5271 init_function_start. */
5275 /* Create the unique rtx's for certain rtx codes and operand values. */
5277 /* Don't use gen_rtx here since gen_rtx in this case
5278 tries to use these variables. */
5279 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5280 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5281 gen_rtx_raw_CONST_INT (VOIDmode, i);
5283 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5284 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5285 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5287 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5289 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5290 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5291 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5292 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5294 for (i = 0; i <= 2; i++)
5296 REAL_VALUE_TYPE *r =
5297 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5300 mode = GET_MODE_WIDER_MODE (mode))
5301 const_tiny_rtx[i][(int) mode] =
5302 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5304 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5306 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5307 mode = GET_MODE_WIDER_MODE (mode))
5308 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5310 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5312 mode = GET_MODE_WIDER_MODE (mode))
5313 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5318 mode = GET_MODE_WIDER_MODE (mode))
5319 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5323 mode = GET_MODE_WIDER_MODE (mode))
5324 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5326 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5327 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5328 const_tiny_rtx[0][i] = const0_rtx;
5330 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5331 if (STORE_FLAG_VALUE == 1)
5332 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5334 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5335 return_address_pointer_rtx
5336 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5340 struct_value_rtx = STRUCT_VALUE;
5342 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5345 #ifdef STRUCT_VALUE_INCOMING
5346 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5348 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5349 struct_value_incoming_rtx
5350 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5352 struct_value_incoming_rtx = struct_value_rtx;
5356 #ifdef STATIC_CHAIN_REGNUM
5357 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5359 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5360 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5361 static_chain_incoming_rtx
5362 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5365 static_chain_incoming_rtx = static_chain_rtx;
5369 static_chain_rtx = STATIC_CHAIN;
5371 #ifdef STATIC_CHAIN_INCOMING
5372 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5374 static_chain_incoming_rtx = static_chain_rtx;
5378 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5379 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5382 /* Query and clear/ restore no_line_numbers. This is used by the
5383 switch / case handling in stmt.c to give proper line numbers in
5384 warnings about unreachable code. */
5387 force_line_numbers ()
5389 int old = no_line_numbers;
5391 no_line_numbers = 0;
5393 force_next_line_note ();
5398 restore_line_number_status (old_value)
5401 no_line_numbers = old_value;
5404 /* Produce exact duplicate of insn INSN after AFTER.
5405 Care updating of libcall regions if present. */
5408 emit_copy_of_insn_after (insn, after)
5412 rtx note1, note2, link;
5414 switch (GET_CODE (insn))
5417 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5421 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5425 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5426 if (CALL_INSN_FUNCTION_USAGE (insn))
5427 CALL_INSN_FUNCTION_USAGE (new)
5428 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5429 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5430 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5437 /* Update LABEL_NUSES. */
5438 mark_jump_label (PATTERN (new), new, 0);
5440 INSN_SCOPE (new) = INSN_SCOPE (insn);
5442 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5444 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5445 if (REG_NOTE_KIND (link) != REG_LABEL)
5447 if (GET_CODE (link) == EXPR_LIST)
5449 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5454 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5459 /* Fix the libcall sequences. */
5460 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5463 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5465 XEXP (note1, 0) = p;
5466 XEXP (note2, 0) = new;
5471 #include "gt-emit-rtl.h"