1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* Middle-to-low level generation of rtx code and insns.
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
53 #include "basic-block.h"
56 /* Commonly used modes. */
58 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
59 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
60 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
61 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
64 /* This is *not* reset after each function. It gives each CODE_LABEL
65 in the entire compilation a unique label number. */
67 static int label_num = 1;
69 /* Highest label number in current function.
70 Zero means use the value of label_num instead.
71 This is nonzero only when belatedly compiling an inline function. */
73 static int last_label_num;
75 /* Value label_num had when set_new_first_and_last_label_number was called.
76 If label_num has not changed since then, last_label_num is valid. */
78 static int base_label_num;
80 /* Nonzero means do not generate NOTEs for source line numbers. */
82 static int no_line_numbers;
84 /* Commonly used rtx's, so that we only need space for one copy.
85 These are initialized once for the entire compilation.
86 All of these except perhaps the floating-point CONST_DOUBLEs
87 are unique; no other rtx-object will be equal to any of these. */
89 rtx global_rtl[GR_MAX];
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
95 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99 REAL_VALUE_TYPE dconst0;
100 REAL_VALUE_TYPE dconst1;
101 REAL_VALUE_TYPE dconst2;
102 REAL_VALUE_TYPE dconstm1;
104 /* All references to the following fixed hard registers go through
105 these unique rtl objects. On machines where the frame-pointer and
106 arg-pointer are the same register, they use the same unique object.
108 After register allocation, other rtl objects which used to be pseudo-regs
109 may be clobbered to refer to the frame-pointer register.
110 But references that were originally to the frame-pointer can be
111 distinguished from the others because they contain frame_pointer_rtx.
113 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
114 tricky: until register elimination has taken place hard_frame_pointer_rtx
115 should be used if it is being set, and frame_pointer_rtx otherwise. After
116 register elimination hard_frame_pointer_rtx should always be used.
117 On machines where the two registers are same (most) then these are the
120 In an inline procedure, the stack and frame pointer rtxs may not be
121 used for anything else. */
122 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
123 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
124 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
125 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
126 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
128 /* This is used to implement __builtin_return_address for some machines.
129 See for instance the MIPS port. */
130 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
132 /* We make one copy of (const_int C) where C is in
133 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
134 to save space during the compilation and simplify comparisons of
137 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
140 shortly thrown away. We use two mechanisms to prevent this waste:
142 For sizes up to 5 elements, we keep a SEQUENCE and its associated
143 rtvec for use by gen_sequence. One entry for each size is
144 sufficient because most cases are calls to gen_sequence followed by
145 immediately emitting the SEQUENCE. Reuse is safe since emitting a
146 sequence is destructive on the insn in it anyway and hence can't be
149 We do not bother to save this cached data over nested function calls.
150 Instead, we just reinitialize them. */
152 #define SEQUENCE_RESULT_SIZE 5
154 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
156 /* During RTL generation, we also keep a list of free INSN rtl codes. */
157 static rtx free_insn;
159 #define first_insn (current_function->emit->x_first_insn)
160 #define last_insn (current_function->emit->x_last_insn)
161 #define cur_insn_uid (current_function->emit->x_cur_insn_uid)
162 #define last_linenum (current_function->emit->x_last_linenum)
163 #define last_filename (current_function->emit->x_last_filename)
164 #define first_label_num (current_function->emit->x_first_label_num)
166 /* This is where the pointer to the obstack being used for RTL is stored. */
167 extern struct obstack *rtl_obstack;
169 static rtx make_jump_insn_raw PROTO((rtx));
170 static rtx make_call_insn_raw PROTO((rtx));
171 static rtx find_line_note PROTO((rtx));
172 static void mark_sequence_stack PROTO((struct sequence_stack *));
174 /* There are some RTL codes that require special attention; the generation
175 functions do the raw handling. If you add to this list, modify
176 special_rtx in gengenrtl.c as well. */
179 gen_rtx_CONST_INT (mode, arg)
180 enum machine_mode mode;
183 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
184 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
186 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
187 if (const_true_rtx && arg == STORE_FLAG_VALUE)
188 return const_true_rtx;
191 return gen_rtx_raw_CONST_INT (mode, arg);
194 /* CONST_DOUBLEs needs special handling because its length is known
197 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
198 enum machine_mode mode;
200 HOST_WIDE_INT arg1, arg2;
202 rtx r = rtx_alloc (CONST_DOUBLE);
207 X0EXP (r, 1) = NULL_RTX;
211 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
218 gen_rtx_REG (mode, regno)
219 enum machine_mode mode;
222 /* In case the MD file explicitly references the frame pointer, have
223 all such references point to the same frame pointer. This is
224 used during frame pointer elimination to distinguish the explicit
225 references to these registers from pseudos that happened to be
228 If we have eliminated the frame pointer or arg pointer, we will
229 be using it as a normal register, for example as a spill
230 register. In such cases, we might be accessing it in a mode that
231 is not Pmode and therefore cannot use the pre-allocated rtx.
233 Also don't do this when we are making new REGs in reload, since
234 we don't want to get confused with the real pointers. */
236 if (mode == Pmode && !reload_in_progress)
238 if (regno == FRAME_POINTER_REGNUM)
239 return frame_pointer_rtx;
240 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
241 if (regno == HARD_FRAME_POINTER_REGNUM)
242 return hard_frame_pointer_rtx;
244 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
245 if (regno == ARG_POINTER_REGNUM)
246 return arg_pointer_rtx;
248 #ifdef RETURN_ADDRESS_POINTER_REGNUM
249 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
250 return return_address_pointer_rtx;
252 if (regno == STACK_POINTER_REGNUM)
253 return stack_pointer_rtx;
256 return gen_rtx_raw_REG (mode, regno);
260 gen_rtx_MEM (mode, addr)
261 enum machine_mode mode;
264 rtx rt = gen_rtx_raw_MEM (mode, addr);
266 /* This field is not cleared by the mere allocation of the rtx, so
268 MEM_ALIAS_SET (rt) = 0;
273 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
275 ** This routine generates an RTX of the size specified by
276 ** <code>, which is an RTX code. The RTX structure is initialized
277 ** from the arguments <element1> through <elementn>, which are
278 ** interpreted according to the specific RTX type's format. The
279 ** special machine mode associated with the rtx (if any) is specified
282 ** gen_rtx can be invoked in a way which resembles the lisp-like
283 ** rtx it will generate. For example, the following rtx structure:
285 ** (plus:QI (mem:QI (reg:SI 1))
286 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
288 ** ...would be generated by the following C code:
290 ** gen_rtx (PLUS, QImode,
291 ** gen_rtx (MEM, QImode,
292 ** gen_rtx (REG, SImode, 1)),
293 ** gen_rtx (MEM, QImode,
294 ** gen_rtx (PLUS, SImode,
295 ** gen_rtx (REG, SImode, 2),
296 ** gen_rtx (REG, SImode, 3)))),
301 gen_rtx VPROTO((enum rtx_code code, enum machine_mode mode, ...))
303 #ifndef ANSI_PROTOTYPES
305 enum machine_mode mode;
308 register int i; /* Array indices... */
309 register const char *fmt; /* Current rtx's format... */
310 register rtx rt_val; /* RTX to return to caller... */
314 #ifndef ANSI_PROTOTYPES
315 code = va_arg (p, enum rtx_code);
316 mode = va_arg (p, enum machine_mode);
322 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
327 rtx arg0 = va_arg (p, rtx);
328 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
329 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
330 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
335 rt_val = gen_rtx_REG (mode, va_arg (p, int));
339 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
343 rt_val = rtx_alloc (code); /* Allocate the storage space. */
344 rt_val->mode = mode; /* Store the machine mode... */
346 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
347 for (i = 0; i < GET_RTX_LENGTH (code); i++)
351 case '0': /* Unused field. */
354 case 'i': /* An integer? */
355 XINT (rt_val, i) = va_arg (p, int);
358 case 'w': /* A wide integer? */
359 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
362 case 's': /* A string? */
363 XSTR (rt_val, i) = va_arg (p, char *);
366 case 'e': /* An expression? */
367 case 'u': /* An insn? Same except when printing. */
368 XEXP (rt_val, i) = va_arg (p, rtx);
371 case 'E': /* An RTX vector? */
372 XVEC (rt_val, i) = va_arg (p, rtvec);
375 case 'b': /* A bitmap? */
376 XBITMAP (rt_val, i) = va_arg (p, bitmap);
379 case 't': /* A tree? */
380 XTREE (rt_val, i) = va_arg (p, tree);
394 /* gen_rtvec (n, [rt1, ..., rtn])
396 ** This routine creates an rtvec and stores within it the
397 ** pointers to rtx's which are its arguments.
402 gen_rtvec VPROTO((int n, ...))
404 #ifndef ANSI_PROTOTYPES
413 #ifndef ANSI_PROTOTYPES
418 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
420 vector = (rtx *) alloca (n * sizeof (rtx));
422 for (i = 0; i < n; i++)
423 vector[i] = va_arg (p, rtx);
426 return gen_rtvec_v (n, vector);
430 gen_rtvec_v (n, argp)
435 register rtvec rt_val;
438 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
440 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
442 for (i = 0; i < n; i++)
443 rt_val->elem[i] = *argp++;
449 /* Generate a REG rtx for a new pseudo register of mode MODE.
450 This pseudo is assigned the next sequential register number. */
454 enum machine_mode mode;
456 struct function *f = current_function;
459 /* Don't let anything called after initial flow analysis create new
464 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
465 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
467 /* For complex modes, don't make a single pseudo.
468 Instead, make a CONCAT of two pseudos.
469 This allows noncontiguous allocation of the real and imaginary parts,
470 which makes much better code. Besides, allocating DCmode
471 pseudos overstrains reload on some machines like the 386. */
472 rtx realpart, imagpart;
473 int size = GET_MODE_UNIT_SIZE (mode);
474 enum machine_mode partmode
475 = mode_for_size (size * BITS_PER_UNIT,
476 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
477 ? MODE_FLOAT : MODE_INT),
480 realpart = gen_reg_rtx (partmode);
481 imagpart = gen_reg_rtx (partmode);
482 return gen_rtx_CONCAT (mode, realpart, imagpart);
485 /* Make sure regno_pointer_flag and regno_reg_rtx are large
486 enough to have an element for this pseudo reg number. */
488 if (reg_rtx_no == f->emit->regno_pointer_flag_length)
490 int old_size = f->emit->regno_pointer_flag_length;
493 new = xrealloc (f->emit->regno_pointer_flag, old_size * 2);
494 memset (new + old_size, 0, old_size);
495 f->emit->regno_pointer_flag = new;
497 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
498 memset (new + old_size, 0, old_size);
499 f->emit->regno_pointer_align = new;
501 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
502 old_size * 2 * sizeof (rtx));
503 memset (new1 + old_size, 0, old_size * sizeof (rtx));
504 regno_reg_rtx = new1;
506 f->emit->regno_pointer_flag_length = old_size * 2;
509 val = gen_rtx_raw_REG (mode, reg_rtx_no);
510 regno_reg_rtx[reg_rtx_no++] = val;
514 /* Identify REG (which may be a CONCAT) as a user register. */
520 if (GET_CODE (reg) == CONCAT)
522 REG_USERVAR_P (XEXP (reg, 0)) = 1;
523 REG_USERVAR_P (XEXP (reg, 1)) = 1;
525 else if (GET_CODE (reg) == REG)
526 REG_USERVAR_P (reg) = 1;
531 /* Identify REG as a probable pointer register and show its alignment
532 as ALIGN, if nonzero. */
535 mark_reg_pointer (reg, align)
539 if (! REGNO_POINTER_FLAG (REGNO (reg)))
541 REGNO_POINTER_FLAG (REGNO (reg)) = 1;
544 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
546 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
547 /* We can no-longer be sure just how aligned this pointer is */
548 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
551 /* Return 1 plus largest pseudo reg number used in the current function. */
559 /* Return 1 + the largest label number used so far in the current function. */
564 if (last_label_num && label_num == base_label_num)
565 return last_label_num;
569 /* Return first label number used in this function (if any were used). */
572 get_first_label_num ()
574 return first_label_num;
577 /* Return a value representing some low-order bits of X, where the number
578 of low-order bits is given by MODE. Note that no conversion is done
579 between floating-point and fixed-point values, rather, the bit
580 representation is returned.
582 This function handles the cases in common between gen_lowpart, below,
583 and two variants in cse.c and combine.c. These are the cases that can
584 be safely handled at all points in the compilation.
586 If this is not a case we can handle, return 0. */
589 gen_lowpart_common (mode, x)
590 enum machine_mode mode;
595 if (GET_MODE (x) == mode)
598 /* MODE must occupy no more words than the mode of X. */
599 if (GET_MODE (x) != VOIDmode
600 && ((GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
601 > ((GET_MODE_SIZE (GET_MODE (x)) + (UNITS_PER_WORD - 1))
605 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
606 word = ((GET_MODE_SIZE (GET_MODE (x))
607 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
610 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
611 && (GET_MODE_CLASS (mode) == MODE_INT
612 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
614 /* If we are getting the low-order part of something that has been
615 sign- or zero-extended, we can either just use the object being
616 extended or make a narrower extension. If we want an even smaller
617 piece than the size of the object being extended, call ourselves
620 This case is used mostly by combine and cse. */
622 if (GET_MODE (XEXP (x, 0)) == mode)
624 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
625 return gen_lowpart_common (mode, XEXP (x, 0));
626 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
627 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
629 else if (GET_CODE (x) == SUBREG
630 && (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
631 || GET_MODE_SIZE (mode) == GET_MODE_UNIT_SIZE (GET_MODE (x))))
632 return (GET_MODE (SUBREG_REG (x)) == mode && SUBREG_WORD (x) == 0
634 : gen_rtx_SUBREG (mode, SUBREG_REG (x), SUBREG_WORD (x) + word));
635 else if (GET_CODE (x) == REG)
637 /* Let the backend decide how many registers to skip. This is needed
638 in particular for Sparc64 where fp regs are smaller than a word. */
639 /* ??? Note that subregs are now ambiguous, in that those against
640 pseudos are sized by the Word Size, while those against hard
641 regs are sized by the underlying register size. Better would be
642 to always interpret the subreg offset parameter as bytes or bits. */
644 if (WORDS_BIG_ENDIAN && REGNO (x) < FIRST_PSEUDO_REGISTER)
645 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
646 - HARD_REGNO_NREGS (REGNO (x), mode));
648 /* If the register is not valid for MODE, return 0. If we don't
649 do this, there is no way to fix up the resulting REG later.
650 But we do do this if the current REG is not valid for its
651 mode. This latter is a kludge, but is required due to the
652 way that parameters are passed on some machines, most
654 if (REGNO (x) < FIRST_PSEUDO_REGISTER
655 && ! HARD_REGNO_MODE_OK (REGNO (x) + word, mode)
656 && HARD_REGNO_MODE_OK (REGNO (x), GET_MODE (x)))
658 else if (REGNO (x) < FIRST_PSEUDO_REGISTER
659 /* integrate.c can't handle parts of a return value register. */
660 && (! REG_FUNCTION_VALUE_P (x)
661 || ! rtx_equal_function_value_matters)
662 #ifdef CLASS_CANNOT_CHANGE_SIZE
663 && ! (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (x))
664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_INT
665 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_FLOAT
666 && (TEST_HARD_REG_BIT
667 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
670 /* We want to keep the stack, frame, and arg pointers
672 && x != frame_pointer_rtx
673 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
674 && x != arg_pointer_rtx
676 && x != stack_pointer_rtx)
677 return gen_rtx_REG (mode, REGNO (x) + word);
679 return gen_rtx_SUBREG (mode, x, word);
681 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
682 from the low-order part of the constant. */
683 else if ((GET_MODE_CLASS (mode) == MODE_INT
684 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
685 && GET_MODE (x) == VOIDmode
686 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
688 /* If MODE is twice the host word size, X is already the desired
689 representation. Otherwise, if MODE is wider than a word, we can't
690 do this. If MODE is exactly a word, return just one CONST_INT.
691 If MODE is smaller than a word, clear the bits that don't belong
692 in our mode, unless they and our sign bit are all one. So we get
693 either a reasonable negative value or a reasonable unsigned value
696 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
698 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
700 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
701 return (GET_CODE (x) == CONST_INT ? x
702 : GEN_INT (CONST_DOUBLE_LOW (x)));
705 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
706 int width = GET_MODE_BITSIZE (mode);
707 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
708 : CONST_DOUBLE_LOW (x));
710 /* Sign extend to HOST_WIDE_INT. */
711 val = val << (HOST_BITS_PER_WIDE_INT - width) >> (HOST_BITS_PER_WIDE_INT - width);
713 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
718 /* If X is an integral constant but we want it in floating-point, it
719 must be the case that we have a union of an integer and a floating-point
720 value. If the machine-parameters allow it, simulate that union here
721 and return the result. The two-word and single-word cases are
724 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
725 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
726 || flag_pretend_float)
727 && GET_MODE_CLASS (mode) == MODE_FLOAT
728 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
729 && GET_CODE (x) == CONST_INT
730 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
731 #ifdef REAL_ARITHMETIC
737 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
738 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
742 union {HOST_WIDE_INT i; float d; } u;
745 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
748 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
749 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
750 || flag_pretend_float)
751 && GET_MODE_CLASS (mode) == MODE_FLOAT
752 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
753 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
754 && GET_MODE (x) == VOIDmode
755 && (sizeof (double) * HOST_BITS_PER_CHAR
756 == 2 * HOST_BITS_PER_WIDE_INT))
757 #ifdef REAL_ARITHMETIC
761 HOST_WIDE_INT low, high;
763 if (GET_CODE (x) == CONST_INT)
764 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
766 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
768 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
770 if (WORDS_BIG_ENDIAN)
771 i[0] = high, i[1] = low;
773 i[0] = low, i[1] = high;
775 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
776 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
780 union {HOST_WIDE_INT i[2]; double d; } u;
781 HOST_WIDE_INT low, high;
783 if (GET_CODE (x) == CONST_INT)
784 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
786 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
788 #ifdef HOST_WORDS_BIG_ENDIAN
789 u.i[0] = high, u.i[1] = low;
791 u.i[0] = low, u.i[1] = high;
794 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
798 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
799 same as sizeof (double) or when sizeof (float) is larger than the
800 size of a word on the target machine. */
801 #ifdef REAL_ARITHMETIC
802 else if (mode == SFmode && GET_CODE (x) == CONST_INT)
808 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
809 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
811 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
812 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
813 || flag_pretend_float)
814 && GET_MODE_CLASS (mode) == MODE_FLOAT
815 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
816 && GET_CODE (x) == CONST_INT
817 && (sizeof (double) * HOST_BITS_PER_CHAR
818 == HOST_BITS_PER_WIDE_INT))
824 r = REAL_VALUE_FROM_TARGET_DOUBLE (&i);
825 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
829 /* Similarly, if this is converting a floating-point value into a
830 single-word integer. Only do this is the host and target parameters are
833 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
834 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
835 || flag_pretend_float)
836 && (GET_MODE_CLASS (mode) == MODE_INT
837 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
838 && GET_CODE (x) == CONST_DOUBLE
839 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
840 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
841 return operand_subword (x, word, 0, GET_MODE (x));
843 /* Similarly, if this is converting a floating-point value into a
844 two-word integer, we can do this one word at a time and make an
845 integer. Only do this is the host and target parameters are
848 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
849 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
850 || flag_pretend_float)
851 && (GET_MODE_CLASS (mode) == MODE_INT
852 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
853 && GET_CODE (x) == CONST_DOUBLE
854 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
855 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
858 = operand_subword (x, word + WORDS_BIG_ENDIAN, 0, GET_MODE (x));
860 = operand_subword (x, word + ! WORDS_BIG_ENDIAN, 0, GET_MODE (x));
862 if (lowpart && GET_CODE (lowpart) == CONST_INT
863 && highpart && GET_CODE (highpart) == CONST_INT)
864 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
867 /* Otherwise, we can't do this. */
871 /* Return the real part (which has mode MODE) of a complex value X.
872 This always comes at the low address in memory. */
875 gen_realpart (mode, x)
876 enum machine_mode mode;
879 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
881 else if (WORDS_BIG_ENDIAN
882 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
884 && REGNO (x) < FIRST_PSEUDO_REGISTER)
885 fatal ("Unable to access real part of complex value in a hard register on this target");
886 else if (WORDS_BIG_ENDIAN)
887 return gen_highpart (mode, x);
889 return gen_lowpart (mode, x);
892 /* Return the imaginary part (which has mode MODE) of a complex value X.
893 This always comes at the high address in memory. */
896 gen_imagpart (mode, x)
897 enum machine_mode mode;
900 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
902 else if (WORDS_BIG_ENDIAN)
903 return gen_lowpart (mode, x);
904 else if (!WORDS_BIG_ENDIAN
905 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
907 && REGNO (x) < FIRST_PSEUDO_REGISTER)
908 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
910 return gen_highpart (mode, x);
913 /* Return 1 iff X, assumed to be a SUBREG,
914 refers to the real part of the complex value in its containing reg.
915 Complex values are always stored with the real part in the first word,
916 regardless of WORDS_BIG_ENDIAN. */
919 subreg_realpart_p (x)
922 if (GET_CODE (x) != SUBREG)
925 return SUBREG_WORD (x) * UNITS_PER_WORD < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x)));
928 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
929 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
930 least-significant part of X.
931 MODE specifies how big a part of X to return;
932 it usually should not be larger than a word.
933 If X is a MEM whose address is a QUEUED, the value may be so also. */
936 gen_lowpart (mode, x)
937 enum machine_mode mode;
940 rtx result = gen_lowpart_common (mode, x);
944 else if (GET_CODE (x) == REG)
946 /* Must be a hard reg that's not valid in MODE. */
947 result = gen_lowpart_common (mode, copy_to_reg (x));
952 else if (GET_CODE (x) == MEM)
954 /* The only additional case we can do is MEM. */
955 register int offset = 0;
956 if (WORDS_BIG_ENDIAN)
957 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
958 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
960 if (BYTES_BIG_ENDIAN)
961 /* Adjust the address so that the address-after-the-data
963 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
964 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
966 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
968 else if (GET_CODE (x) == ADDRESSOF)
969 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
974 /* Like `gen_lowpart', but refer to the most significant part.
975 This is used to access the imaginary part of a complex number. */
978 gen_highpart (mode, x)
979 enum machine_mode mode;
982 /* This case loses if X is a subreg. To catch bugs early,
983 complain if an invalid MODE is used even in other cases. */
984 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
985 && GET_MODE_SIZE (mode) != GET_MODE_UNIT_SIZE (GET_MODE (x)))
987 if (GET_CODE (x) == CONST_DOUBLE
988 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
989 && GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT
992 return GEN_INT (CONST_DOUBLE_HIGH (x) & GET_MODE_MASK (mode));
993 else if (GET_CODE (x) == CONST_INT)
995 if (HOST_BITS_PER_WIDE_INT <= BITS_PER_WORD)
997 return GEN_INT (INTVAL (x) >> (HOST_BITS_PER_WIDE_INT - BITS_PER_WORD));
999 else if (GET_CODE (x) == MEM)
1001 register int offset = 0;
1002 if (! WORDS_BIG_ENDIAN)
1003 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1004 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1006 if (! BYTES_BIG_ENDIAN
1007 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
1008 offset -= (GET_MODE_SIZE (mode)
1009 - MIN (UNITS_PER_WORD,
1010 GET_MODE_SIZE (GET_MODE (x))));
1012 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1014 else if (GET_CODE (x) == SUBREG)
1016 /* The only time this should occur is when we are looking at a
1017 multi-word item with a SUBREG whose mode is the same as that of the
1018 item. It isn't clear what we would do if it wasn't. */
1019 if (SUBREG_WORD (x) != 0)
1021 return gen_highpart (mode, SUBREG_REG (x));
1023 else if (GET_CODE (x) == REG)
1027 /* Let the backend decide how many registers to skip. This is needed
1028 in particular for sparc64 where fp regs are smaller than a word. */
1029 /* ??? Note that subregs are now ambiguous, in that those against
1030 pseudos are sized by the word size, while those against hard
1031 regs are sized by the underlying register size. Better would be
1032 to always interpret the subreg offset parameter as bytes or bits. */
1034 if (WORDS_BIG_ENDIAN)
1036 else if (REGNO (x) < FIRST_PSEUDO_REGISTER)
1037 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
1038 - HARD_REGNO_NREGS (REGNO (x), mode));
1040 word = ((GET_MODE_SIZE (GET_MODE (x))
1041 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1044 if (REGNO (x) < FIRST_PSEUDO_REGISTER
1045 /* integrate.c can't handle parts of a return value register. */
1046 && (! REG_FUNCTION_VALUE_P (x)
1047 || ! rtx_equal_function_value_matters)
1048 /* We want to keep the stack, frame, and arg pointers special. */
1049 && x != frame_pointer_rtx
1050 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1051 && x != arg_pointer_rtx
1053 && x != stack_pointer_rtx)
1054 return gen_rtx_REG (mode, REGNO (x) + word);
1056 return gen_rtx_SUBREG (mode, x, word);
1062 /* Return 1 iff X, assumed to be a SUBREG,
1063 refers to the least significant part of its containing reg.
1064 If X is not a SUBREG, always return 1 (it is its own low part!). */
1067 subreg_lowpart_p (x)
1070 if (GET_CODE (x) != SUBREG)
1072 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1075 if (WORDS_BIG_ENDIAN
1076 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD)
1077 return (SUBREG_WORD (x)
1078 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1079 - MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD))
1082 return SUBREG_WORD (x) == 0;
1085 /* Return subword I of operand OP.
1086 The word number, I, is interpreted as the word number starting at the
1087 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1088 otherwise it is the high-order word.
1090 If we cannot extract the required word, we return zero. Otherwise, an
1091 rtx corresponding to the requested word will be returned.
1093 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1094 reload has completed, a valid address will always be returned. After
1095 reload, if a valid address cannot be returned, we return zero.
1097 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1098 it is the responsibility of the caller.
1100 MODE is the mode of OP in case it is a CONST_INT. */
1103 operand_subword (op, i, validate_address, mode)
1106 int validate_address;
1107 enum machine_mode mode;
1110 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1112 if (mode == VOIDmode)
1113 mode = GET_MODE (op);
1115 if (mode == VOIDmode)
1118 /* If OP is narrower than a word, fail. */
1120 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1123 /* If we want a word outside OP, return zero. */
1125 && (i + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1128 /* If OP is already an integer word, return it. */
1129 if (GET_MODE_CLASS (mode) == MODE_INT
1130 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1133 /* If OP is a REG or SUBREG, we can handle it very simply. */
1134 if (GET_CODE (op) == REG)
1136 /* ??? There is a potential problem with this code. It does not
1137 properly handle extractions of a subword from a hard register
1138 that is larger than word_mode. Presumably the check for
1139 HARD_REGNO_MODE_OK catches these most of these cases. */
1141 /* If OP is a hard register, but OP + I is not a hard register,
1142 then extracting a subword is impossible.
1144 For example, consider if OP is the last hard register and it is
1145 larger than word_mode. If we wanted word N (for N > 0) because a
1146 part of that hard register was known to contain a useful value,
1147 then OP + I would refer to a pseudo, not the hard register we
1149 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1150 && REGNO (op) + i >= FIRST_PSEUDO_REGISTER)
1153 /* If the register is not valid for MODE, return 0. Note we
1154 have to check both OP and OP + I since they may refer to
1155 different parts of the register file.
1157 Consider if OP refers to the last 96bit FP register and we want
1158 subword 3 because that subword is known to contain a value we
1160 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1161 && (! HARD_REGNO_MODE_OK (REGNO (op), word_mode)
1162 || ! HARD_REGNO_MODE_OK (REGNO (op) + i, word_mode)))
1164 else if (REGNO (op) >= FIRST_PSEUDO_REGISTER
1165 || (REG_FUNCTION_VALUE_P (op)
1166 && rtx_equal_function_value_matters)
1167 /* We want to keep the stack, frame, and arg pointers
1169 || op == frame_pointer_rtx
1170 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1171 || op == arg_pointer_rtx
1173 || op == stack_pointer_rtx)
1174 return gen_rtx_SUBREG (word_mode, op, i);
1176 return gen_rtx_REG (word_mode, REGNO (op) + i);
1178 else if (GET_CODE (op) == SUBREG)
1179 return gen_rtx_SUBREG (word_mode, SUBREG_REG (op), i + SUBREG_WORD (op));
1180 else if (GET_CODE (op) == CONCAT)
1182 int partwords = GET_MODE_UNIT_SIZE (GET_MODE (op)) / UNITS_PER_WORD;
1184 return operand_subword (XEXP (op, 0), i, validate_address, mode);
1185 return operand_subword (XEXP (op, 1), i - partwords,
1186 validate_address, mode);
1189 /* Form a new MEM at the requested address. */
1190 if (GET_CODE (op) == MEM)
1192 rtx addr = plus_constant (XEXP (op, 0), i * UNITS_PER_WORD);
1195 if (validate_address)
1197 if (reload_completed)
1199 if (! strict_memory_address_p (word_mode, addr))
1203 addr = memory_address (word_mode, addr);
1206 new = gen_rtx_MEM (word_mode, addr);
1208 MEM_COPY_ATTRIBUTES (new, op);
1209 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1210 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op);
1215 /* The only remaining cases are when OP is a constant. If the host and
1216 target floating formats are the same, handling two-word floating
1217 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1218 are defined as returning one or two 32 bit values, respectively,
1219 and not values of BITS_PER_WORD bits. */
1220 #ifdef REAL_ARITHMETIC
1221 /* The output is some bits, the width of the target machine's word.
1222 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1224 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1225 && GET_MODE_CLASS (mode) == MODE_FLOAT
1226 && GET_MODE_BITSIZE (mode) == 64
1227 && GET_CODE (op) == CONST_DOUBLE)
1232 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1233 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1235 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1236 which the words are written depends on the word endianness.
1237 ??? This is a potential portability problem and should
1238 be fixed at some point.
1240 We must excercise caution with the sign bit. By definition there
1241 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1242 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1243 So we explicitly mask and sign-extend as necessary. */
1244 if (BITS_PER_WORD == 32)
1247 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1248 return GEN_INT (val);
1250 #if HOST_BITS_PER_WIDE_INT >= 64
1251 else if (BITS_PER_WORD >= 64 && i == 0)
1253 val = k[! WORDS_BIG_ENDIAN];
1254 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1255 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1256 return GEN_INT (val);
1259 else if (BITS_PER_WORD == 16)
1262 if ((i & 1) == !WORDS_BIG_ENDIAN)
1265 return GEN_INT (val);
1270 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1271 && GET_MODE_CLASS (mode) == MODE_FLOAT
1272 && GET_MODE_BITSIZE (mode) > 64
1273 && GET_CODE (op) == CONST_DOUBLE)
1278 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1279 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1281 if (BITS_PER_WORD == 32)
1284 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 return GEN_INT (val);
1290 #else /* no REAL_ARITHMETIC */
1291 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1292 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1293 || flag_pretend_float)
1294 && GET_MODE_CLASS (mode) == MODE_FLOAT
1295 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1296 && GET_CODE (op) == CONST_DOUBLE)
1298 /* The constant is stored in the host's word-ordering,
1299 but we want to access it in the target's word-ordering. Some
1300 compilers don't like a conditional inside macro args, so we have two
1301 copies of the return. */
1302 #ifdef HOST_WORDS_BIG_ENDIAN
1303 return GEN_INT (i == WORDS_BIG_ENDIAN
1304 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1306 return GEN_INT (i != WORDS_BIG_ENDIAN
1307 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1310 #endif /* no REAL_ARITHMETIC */
1312 /* Single word float is a little harder, since single- and double-word
1313 values often do not have the same high-order bits. We have already
1314 verified that we want the only defined word of the single-word value. */
1315 #ifdef REAL_ARITHMETIC
1316 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1317 && GET_MODE_BITSIZE (mode) == 32
1318 && GET_CODE (op) == CONST_DOUBLE)
1323 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1324 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1326 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1328 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1330 if (BITS_PER_WORD == 16)
1332 if ((i & 1) == !WORDS_BIG_ENDIAN)
1337 return GEN_INT (val);
1340 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1341 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1342 || flag_pretend_float)
1343 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1344 && GET_MODE_CLASS (mode) == MODE_FLOAT
1345 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1346 && GET_CODE (op) == CONST_DOUBLE)
1349 union {float f; HOST_WIDE_INT i; } u;
1351 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1354 return GEN_INT (u.i);
1356 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1357 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1358 || flag_pretend_float)
1359 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1360 && GET_MODE_CLASS (mode) == MODE_FLOAT
1361 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1362 && GET_CODE (op) == CONST_DOUBLE)
1365 union {double d; HOST_WIDE_INT i; } u;
1367 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1370 return GEN_INT (u.i);
1372 #endif /* no REAL_ARITHMETIC */
1374 /* The only remaining cases that we can handle are integers.
1375 Convert to proper endianness now since these cases need it.
1376 At this point, i == 0 means the low-order word.
1378 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1379 in general. However, if OP is (const_int 0), we can just return
1382 if (op == const0_rtx)
1385 if (GET_MODE_CLASS (mode) != MODE_INT
1386 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1387 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1390 if (WORDS_BIG_ENDIAN)
1391 i = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - i;
1393 /* Find out which word on the host machine this value is in and get
1394 it from the constant. */
1395 val = (i / size_ratio == 0
1396 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1397 : (GET_CODE (op) == CONST_INT
1398 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1400 /* Get the value we want into the low bits of val. */
1401 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1402 val = ((val >> ((i % size_ratio) * BITS_PER_WORD)));
1404 val = trunc_int_for_mode (val, word_mode);
1406 return GEN_INT (val);
1409 /* Similar to `operand_subword', but never return 0. If we can't extract
1410 the required subword, put OP into a register and try again. If that fails,
1411 abort. We always validate the address in this case. It is not valid
1412 to call this function after reload; it is mostly meant for RTL
1415 MODE is the mode of OP, in case it is CONST_INT. */
1418 operand_subword_force (op, i, mode)
1421 enum machine_mode mode;
1423 rtx result = operand_subword (op, i, 1, mode);
1428 if (mode != BLKmode && mode != VOIDmode)
1430 /* If this is a register which can not be accessed by words, copy it
1431 to a pseudo register. */
1432 if (GET_CODE (op) == REG)
1433 op = copy_to_reg (op);
1435 op = force_reg (mode, op);
1438 result = operand_subword (op, i, 1, mode);
1445 /* Given a compare instruction, swap the operands.
1446 A test instruction is changed into a compare of 0 against the operand. */
1449 reverse_comparison (insn)
1452 rtx body = PATTERN (insn);
1455 if (GET_CODE (body) == SET)
1456 comp = SET_SRC (body);
1458 comp = SET_SRC (XVECEXP (body, 0, 0));
1460 if (GET_CODE (comp) == COMPARE)
1462 rtx op0 = XEXP (comp, 0);
1463 rtx op1 = XEXP (comp, 1);
1464 XEXP (comp, 0) = op1;
1465 XEXP (comp, 1) = op0;
1469 rtx new = gen_rtx_COMPARE (VOIDmode,
1470 CONST0_RTX (GET_MODE (comp)), comp);
1471 if (GET_CODE (body) == SET)
1472 SET_SRC (body) = new;
1474 SET_SRC (XVECEXP (body, 0, 0)) = new;
1478 /* Return a memory reference like MEMREF, but with its mode changed
1479 to MODE and its address changed to ADDR.
1480 (VOIDmode means don't change the mode.
1481 NULL for ADDR means don't change the address.) */
1484 change_address (memref, mode, addr)
1486 enum machine_mode mode;
1491 if (GET_CODE (memref) != MEM)
1493 if (mode == VOIDmode)
1494 mode = GET_MODE (memref);
1496 addr = XEXP (memref, 0);
1498 /* If reload is in progress or has completed, ADDR must be valid.
1499 Otherwise, we can call memory_address to make it valid. */
1500 if (reload_completed || reload_in_progress)
1502 if (! memory_address_p (mode, addr))
1506 addr = memory_address (mode, addr);
1508 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1511 new = gen_rtx_MEM (mode, addr);
1512 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref);
1513 MEM_COPY_ATTRIBUTES (new, memref);
1514 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref);
1518 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1525 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1526 NULL_RTX, label_num++, NULL_PTR, NULL_PTR);
1528 LABEL_NUSES (label) = 0;
1529 LABEL_ALTERNATE_NAME (label) = NULL;
1533 /* For procedure integration. */
1535 /* Install new pointers to the first and last insns in the chain.
1536 Also, set cur_insn_uid to one higher than the last in use.
1537 Used for an inline-procedure after copying the insn chain. */
1540 set_new_first_and_last_insn (first, last)
1549 for (insn = first; insn; insn = NEXT_INSN (insn))
1550 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1555 /* Set the range of label numbers found in the current function.
1556 This is used when belatedly compiling an inline function. */
1559 set_new_first_and_last_label_num (first, last)
1562 base_label_num = label_num;
1563 first_label_num = first;
1564 last_label_num = last;
1567 /* Set the last label number found in the current function.
1568 This is used when belatedly compiling an inline function. */
1571 set_new_last_label_num (last)
1574 base_label_num = label_num;
1575 last_label_num = last;
1578 /* Restore all variables describing the current status from the structure *P.
1579 This is used after a nested function. */
1582 restore_emit_status (p)
1586 clear_emit_caches ();
1589 /* Clear out all parts of the state in F that can safely be discarded
1590 after the function has been compiled, to let garbage collection
1591 reclaim the memory. */
1594 free_emit_status (f)
1597 free (f->emit->x_regno_reg_rtx);
1598 free (f->emit->regno_pointer_flag);
1599 free (f->emit->regno_pointer_align);
1604 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1605 It does not work to do this twice, because the mark bits set here
1606 are not cleared afterwards. */
1609 unshare_all_rtl (insn)
1612 for (; insn; insn = NEXT_INSN (insn))
1613 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1614 || GET_CODE (insn) == CALL_INSN)
1616 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1617 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1618 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1621 /* Make sure the addresses of stack slots found outside the insn chain
1622 (such as, in DECL_RTL of a variable) are not shared
1623 with the insn chain.
1625 This special care is necessary when the stack slot MEM does not
1626 actually appear in the insn chain. If it does appear, its address
1627 is unshared from all else at that point. */
1629 copy_rtx_if_shared (stack_slot_list);
1632 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1633 Recursively does the same for subexpressions. */
1636 copy_rtx_if_shared (orig)
1639 register rtx x = orig;
1641 register enum rtx_code code;
1642 register const char *format_ptr;
1648 code = GET_CODE (x);
1650 /* These types may be freely shared. */
1663 /* SCRATCH must be shared because they represent distinct values. */
1667 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1668 a LABEL_REF, it isn't sharable. */
1669 if (GET_CODE (XEXP (x, 0)) == PLUS
1670 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1671 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1680 /* The chain of insns is not being copied. */
1684 /* A MEM is allowed to be shared if its address is constant.
1686 We used to allow sharing of MEMs which referenced
1687 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1688 that can lose. instantiate_virtual_regs will not unshare
1689 the MEMs, and combine may change the structure of the address
1690 because it looks safe and profitable in one context, but
1691 in some other context it creates unrecognizable RTL. */
1692 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
1701 /* This rtx may not be shared. If it has already been seen,
1702 replace it with a copy of itself. */
1708 copy = rtx_alloc (code);
1709 bcopy ((char *) x, (char *) copy,
1710 (sizeof (*copy) - sizeof (copy->fld)
1711 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1717 /* Now scan the subexpressions recursively.
1718 We can store any replaced subexpressions directly into X
1719 since we know X is not shared! Any vectors in X
1720 must be copied if X was copied. */
1722 format_ptr = GET_RTX_FORMAT (code);
1724 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1726 switch (*format_ptr++)
1729 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1733 if (XVEC (x, i) != NULL)
1736 int len = XVECLEN (x, i);
1738 if (copied && len > 0)
1739 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
1740 for (j = 0; j < len; j++)
1741 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
1749 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1750 to look for shared sub-parts. */
1753 reset_used_flags (x)
1757 register enum rtx_code code;
1758 register const char *format_ptr;
1763 code = GET_CODE (x);
1765 /* These types may be freely shared so we needn't do any resetting
1786 /* The chain of insns is not being copied. */
1795 format_ptr = GET_RTX_FORMAT (code);
1796 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1798 switch (*format_ptr++)
1801 reset_used_flags (XEXP (x, i));
1805 for (j = 0; j < XVECLEN (x, i); j++)
1806 reset_used_flags (XVECEXP (x, i, j));
1812 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1813 Return X or the rtx for the pseudo reg the value of X was copied into.
1814 OTHER must be valid as a SET_DEST. */
1817 make_safe_from (x, other)
1821 switch (GET_CODE (other))
1824 other = SUBREG_REG (other);
1826 case STRICT_LOW_PART:
1829 other = XEXP (other, 0);
1835 if ((GET_CODE (other) == MEM
1837 && GET_CODE (x) != REG
1838 && GET_CODE (x) != SUBREG)
1839 || (GET_CODE (other) == REG
1840 && (REGNO (other) < FIRST_PSEUDO_REGISTER
1841 || reg_mentioned_p (other, x))))
1843 rtx temp = gen_reg_rtx (GET_MODE (x));
1844 emit_move_insn (temp, x);
1850 /* Emission of insns (adding them to the doubly-linked list). */
1852 /* Return the first insn of the current sequence or current function. */
1860 /* Return the last insn emitted in current sequence or current function. */
1868 /* Specify a new insn as the last in the chain. */
1871 set_last_insn (insn)
1874 if (NEXT_INSN (insn) != 0)
1879 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1882 get_last_insn_anywhere ()
1884 struct sequence_stack *stack;
1887 for (stack = seq_stack; stack; stack = stack->next)
1888 if (stack->last != 0)
1893 /* Return a number larger than any instruction's uid in this function. */
1898 return cur_insn_uid;
1905 int old_max_uid = cur_insn_uid;
1907 /* If there aren't that many instructions, then it's not really
1908 worth renumbering them. */
1909 if (get_max_uid () < 25000)
1914 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1915 INSN_UID (insn) = cur_insn_uid++;
1918 /* Return the next insn. If it is a SEQUENCE, return the first insn
1927 insn = NEXT_INSN (insn);
1928 if (insn && GET_CODE (insn) == INSN
1929 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1930 insn = XVECEXP (PATTERN (insn), 0, 0);
1936 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1940 previous_insn (insn)
1945 insn = PREV_INSN (insn);
1946 if (insn && GET_CODE (insn) == INSN
1947 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1948 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
1954 /* Return the next insn after INSN that is not a NOTE. This routine does not
1955 look inside SEQUENCEs. */
1958 next_nonnote_insn (insn)
1963 insn = NEXT_INSN (insn);
1964 if (insn == 0 || GET_CODE (insn) != NOTE)
1971 /* Return the previous insn before INSN that is not a NOTE. This routine does
1972 not look inside SEQUENCEs. */
1975 prev_nonnote_insn (insn)
1980 insn = PREV_INSN (insn);
1981 if (insn == 0 || GET_CODE (insn) != NOTE)
1988 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
1989 or 0, if there is none. This routine does not look inside
1993 next_real_insn (insn)
1998 insn = NEXT_INSN (insn);
1999 if (insn == 0 || GET_CODE (insn) == INSN
2000 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2007 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2008 or 0, if there is none. This routine does not look inside
2012 prev_real_insn (insn)
2017 insn = PREV_INSN (insn);
2018 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2019 || GET_CODE (insn) == JUMP_INSN)
2026 /* Find the next insn after INSN that really does something. This routine
2027 does not look inside SEQUENCEs. Until reload has completed, this is the
2028 same as next_real_insn. */
2031 next_active_insn (insn)
2036 insn = NEXT_INSN (insn);
2038 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2039 || (GET_CODE (insn) == INSN
2040 && (! reload_completed
2041 || (GET_CODE (PATTERN (insn)) != USE
2042 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2049 /* Find the last insn before INSN that really does something. This routine
2050 does not look inside SEQUENCEs. Until reload has completed, this is the
2051 same as prev_real_insn. */
2054 prev_active_insn (insn)
2059 insn = PREV_INSN (insn);
2061 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2062 || (GET_CODE (insn) == INSN
2063 && (! reload_completed
2064 || (GET_CODE (PATTERN (insn)) != USE
2065 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2072 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2080 insn = NEXT_INSN (insn);
2081 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2088 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2096 insn = PREV_INSN (insn);
2097 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2105 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2106 and REG_CC_USER notes so we can find it. */
2109 link_cc0_insns (insn)
2112 rtx user = next_nonnote_insn (insn);
2114 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2115 user = XVECEXP (PATTERN (user), 0, 0);
2117 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2119 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2122 /* Return the next insn that uses CC0 after INSN, which is assumed to
2123 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2124 applied to the result of this function should yield INSN).
2126 Normally, this is simply the next insn. However, if a REG_CC_USER note
2127 is present, it contains the insn that uses CC0.
2129 Return 0 if we can't find the insn. */
2132 next_cc0_user (insn)
2135 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2138 return XEXP (note, 0);
2140 insn = next_nonnote_insn (insn);
2141 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2142 insn = XVECEXP (PATTERN (insn), 0, 0);
2144 if (insn && GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2145 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2151 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2152 note, it is the previous insn. */
2155 prev_cc0_setter (insn)
2158 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2161 return XEXP (note, 0);
2163 insn = prev_nonnote_insn (insn);
2164 if (! sets_cc0_p (PATTERN (insn)))
2171 /* Try splitting insns that can be split for better scheduling.
2172 PAT is the pattern which might split.
2173 TRIAL is the insn providing PAT.
2174 LAST is non-zero if we should return the last insn of the sequence produced.
2176 If this routine succeeds in splitting, it returns the first or last
2177 replacement insn depending on the value of LAST. Otherwise, it
2178 returns TRIAL. If the insn to be returned can be split, it will be. */
2181 try_split (pat, trial, last)
2185 rtx before = PREV_INSN (trial);
2186 rtx after = NEXT_INSN (trial);
2187 rtx seq = split_insns (pat, trial);
2188 int has_barrier = 0;
2191 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2192 We may need to handle this specially. */
2193 if (after && GET_CODE (after) == BARRIER)
2196 after = NEXT_INSN (after);
2201 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2202 The latter case will normally arise only when being done so that
2203 it, in turn, will be split (SFmode on the 29k is an example). */
2204 if (GET_CODE (seq) == SEQUENCE)
2206 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2207 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2208 increment the usage count so we don't delete the label. */
2211 if (GET_CODE (trial) == JUMP_INSN)
2212 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2213 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2215 JUMP_LABEL (XVECEXP (seq, 0, i)) = JUMP_LABEL (trial);
2217 if (JUMP_LABEL (trial))
2218 LABEL_NUSES (JUMP_LABEL (trial))++;
2221 tem = emit_insn_after (seq, before);
2223 delete_insn (trial);
2225 emit_barrier_after (tem);
2227 /* Recursively call try_split for each new insn created; by the
2228 time control returns here that insn will be fully split, so
2229 set LAST and continue from the insn after the one returned.
2230 We can't use next_active_insn here since AFTER may be a note.
2231 Ignore deleted insns, which can be occur if not optimizing. */
2232 for (tem = NEXT_INSN (before); tem != after;
2233 tem = NEXT_INSN (tem))
2234 if (! INSN_DELETED_P (tem)
2235 && GET_RTX_CLASS (GET_CODE (tem)) == 'i')
2236 tem = try_split (PATTERN (tem), tem, 1);
2238 /* Avoid infinite loop if the result matches the original pattern. */
2239 else if (rtx_equal_p (seq, pat))
2243 PATTERN (trial) = seq;
2244 INSN_CODE (trial) = -1;
2245 try_split (seq, trial, last);
2248 /* Return either the first or the last insn, depending on which was
2250 return last ? prev_active_insn (after) : next_active_insn (before);
2256 /* Make and return an INSN rtx, initializing all its slots.
2257 Store PATTERN in the pattern slots. */
2260 make_insn_raw (pattern)
2265 /* If in RTL generation phase, see if FREE_INSN can be used. */
2266 if (!ggc_p && free_insn != 0 && rtx_equal_function_value_matters)
2269 free_insn = NEXT_INSN (free_insn);
2270 PUT_CODE (insn, INSN);
2273 insn = rtx_alloc (INSN);
2275 INSN_UID (insn) = cur_insn_uid++;
2276 PATTERN (insn) = pattern;
2277 INSN_CODE (insn) = -1;
2278 LOG_LINKS (insn) = NULL;
2279 REG_NOTES (insn) = NULL;
2284 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2287 make_jump_insn_raw (pattern)
2292 insn = rtx_alloc (JUMP_INSN);
2293 INSN_UID (insn) = cur_insn_uid++;
2295 PATTERN (insn) = pattern;
2296 INSN_CODE (insn) = -1;
2297 LOG_LINKS (insn) = NULL;
2298 REG_NOTES (insn) = NULL;
2299 JUMP_LABEL (insn) = NULL;
2304 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2307 make_call_insn_raw (pattern)
2312 insn = rtx_alloc (CALL_INSN);
2313 INSN_UID (insn) = cur_insn_uid++;
2315 PATTERN (insn) = pattern;
2316 INSN_CODE (insn) = -1;
2317 LOG_LINKS (insn) = NULL;
2318 REG_NOTES (insn) = NULL;
2319 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2324 /* Add INSN to the end of the doubly-linked list.
2325 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2331 PREV_INSN (insn) = last_insn;
2332 NEXT_INSN (insn) = 0;
2334 if (NULL != last_insn)
2335 NEXT_INSN (last_insn) = insn;
2337 if (NULL == first_insn)
2343 /* Add INSN into the doubly-linked list after insn AFTER. This and
2344 the next should be the only functions called to insert an insn once
2345 delay slots have been filled since only they know how to update a
2349 add_insn_after (insn, after)
2352 rtx next = NEXT_INSN (after);
2354 if (optimize && INSN_DELETED_P (after))
2357 NEXT_INSN (insn) = next;
2358 PREV_INSN (insn) = after;
2362 PREV_INSN (next) = insn;
2363 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2364 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2366 else if (last_insn == after)
2370 struct sequence_stack *stack = seq_stack;
2371 /* Scan all pending sequences too. */
2372 for (; stack; stack = stack->next)
2373 if (after == stack->last)
2383 NEXT_INSN (after) = insn;
2384 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2386 rtx sequence = PATTERN (after);
2387 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2391 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2392 the previous should be the only functions called to insert an insn once
2393 delay slots have been filled since only they know how to update a
2397 add_insn_before (insn, before)
2400 rtx prev = PREV_INSN (before);
2402 if (optimize && INSN_DELETED_P (before))
2405 PREV_INSN (insn) = prev;
2406 NEXT_INSN (insn) = before;
2410 NEXT_INSN (prev) = insn;
2411 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2413 rtx sequence = PATTERN (prev);
2414 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2417 else if (first_insn == before)
2421 struct sequence_stack *stack = seq_stack;
2422 /* Scan all pending sequences too. */
2423 for (; stack; stack = stack->next)
2424 if (before == stack->first)
2426 stack->first = insn;
2434 PREV_INSN (before) = insn;
2435 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2436 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2439 /* Remove an insn from its doubly-linked list. This function knows how
2440 to handle sequences. */
2445 rtx next = NEXT_INSN (insn);
2446 rtx prev = PREV_INSN (insn);
2449 NEXT_INSN (prev) = next;
2450 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2452 rtx sequence = PATTERN (prev);
2453 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2456 else if (first_insn == insn)
2460 struct sequence_stack *stack = seq_stack;
2461 /* Scan all pending sequences too. */
2462 for (; stack; stack = stack->next)
2463 if (insn == stack->first)
2465 stack->first = next;
2475 PREV_INSN (next) = prev;
2476 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2477 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2479 else if (last_insn == insn)
2483 struct sequence_stack *stack = seq_stack;
2484 /* Scan all pending sequences too. */
2485 for (; stack; stack = stack->next)
2486 if (insn == stack->last)
2497 /* Delete all insns made since FROM.
2498 FROM becomes the new last instruction. */
2501 delete_insns_since (from)
2507 NEXT_INSN (from) = 0;
2511 /* This function is deprecated, please use sequences instead.
2513 Move a consecutive bunch of insns to a different place in the chain.
2514 The insns to be moved are those between FROM and TO.
2515 They are moved to a new position after the insn AFTER.
2516 AFTER must not be FROM or TO or any insn in between.
2518 This function does not know about SEQUENCEs and hence should not be
2519 called after delay-slot filling has been done. */
2522 reorder_insns (from, to, after)
2523 rtx from, to, after;
2525 /* Splice this bunch out of where it is now. */
2526 if (PREV_INSN (from))
2527 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2529 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2530 if (last_insn == to)
2531 last_insn = PREV_INSN (from);
2532 if (first_insn == from)
2533 first_insn = NEXT_INSN (to);
2535 /* Make the new neighbors point to it and it to them. */
2536 if (NEXT_INSN (after))
2537 PREV_INSN (NEXT_INSN (after)) = to;
2539 NEXT_INSN (to) = NEXT_INSN (after);
2540 PREV_INSN (from) = after;
2541 NEXT_INSN (after) = from;
2542 if (after == last_insn)
2546 /* Return the line note insn preceding INSN. */
2549 find_line_note (insn)
2552 if (no_line_numbers)
2555 for (; insn; insn = PREV_INSN (insn))
2556 if (GET_CODE (insn) == NOTE
2557 && NOTE_LINE_NUMBER (insn) >= 0)
2563 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2564 of the moved insns when debugging. This may insert a note between AFTER
2565 and FROM, and another one after TO. */
2568 reorder_insns_with_line_notes (from, to, after)
2569 rtx from, to, after;
2571 rtx from_line = find_line_note (from);
2572 rtx after_line = find_line_note (after);
2574 reorder_insns (from, to, after);
2576 if (from_line == after_line)
2580 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2581 NOTE_LINE_NUMBER (from_line),
2584 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2585 NOTE_LINE_NUMBER (after_line),
2589 /* Remove unncessary notes from the instruction stream. */
2592 remove_unncessary_notes ()
2596 varray_type block_stack;
2598 /* Remove NOTE_INSN_DELETED notes. We must not remove the first
2599 instruction in the function because the compiler depends on the
2600 first instruction being a note. */
2601 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
2603 /* Remember what's next. */
2604 next = NEXT_INSN (insn);
2606 /* We're only interested in notes. */
2607 if (GET_CODE (insn) != NOTE)
2610 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
2616 /* Emit an insn of given code and pattern
2617 at a specified place within the doubly-linked list. */
2619 /* Make an instruction with body PATTERN
2620 and output it before the instruction BEFORE. */
2623 emit_insn_before (pattern, before)
2624 register rtx pattern, before;
2626 register rtx insn = before;
2628 if (GET_CODE (pattern) == SEQUENCE)
2632 for (i = 0; i < XVECLEN (pattern, 0); i++)
2634 insn = XVECEXP (pattern, 0, i);
2635 add_insn_before (insn, before);
2637 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2638 sequence_result[XVECLEN (pattern, 0)] = pattern;
2642 insn = make_insn_raw (pattern);
2643 add_insn_before (insn, before);
2649 /* Similar to emit_insn_before, but update basic block boundaries as well. */
2652 emit_block_insn_before (pattern, before, block)
2653 rtx pattern, before;
2656 rtx prev = PREV_INSN (before);
2657 rtx r = emit_insn_before (pattern, before);
2658 if (block && block->head == before)
2659 block->head = NEXT_INSN (prev);
2663 /* Make an instruction with body PATTERN and code JUMP_INSN
2664 and output it before the instruction BEFORE. */
2667 emit_jump_insn_before (pattern, before)
2668 register rtx pattern, before;
2672 if (GET_CODE (pattern) == SEQUENCE)
2673 insn = emit_insn_before (pattern, before);
2676 insn = make_jump_insn_raw (pattern);
2677 add_insn_before (insn, before);
2683 /* Make an instruction with body PATTERN and code CALL_INSN
2684 and output it before the instruction BEFORE. */
2687 emit_call_insn_before (pattern, before)
2688 register rtx pattern, before;
2692 if (GET_CODE (pattern) == SEQUENCE)
2693 insn = emit_insn_before (pattern, before);
2696 insn = make_call_insn_raw (pattern);
2697 add_insn_before (insn, before);
2698 PUT_CODE (insn, CALL_INSN);
2704 /* Make an insn of code BARRIER
2705 and output it before the insn BEFORE. */
2708 emit_barrier_before (before)
2709 register rtx before;
2711 register rtx insn = rtx_alloc (BARRIER);
2713 INSN_UID (insn) = cur_insn_uid++;
2715 add_insn_before (insn, before);
2719 /* Emit the label LABEL before the insn BEFORE. */
2722 emit_label_before (label, before)
2725 /* This can be called twice for the same label as a result of the
2726 confusion that follows a syntax error! So make it harmless. */
2727 if (INSN_UID (label) == 0)
2729 INSN_UID (label) = cur_insn_uid++;
2730 add_insn_before (label, before);
2736 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2739 emit_note_before (subtype, before)
2743 register rtx note = rtx_alloc (NOTE);
2744 INSN_UID (note) = cur_insn_uid++;
2745 NOTE_SOURCE_FILE (note) = 0;
2746 NOTE_LINE_NUMBER (note) = subtype;
2748 add_insn_before (note, before);
2752 /* Make an insn of code INSN with body PATTERN
2753 and output it after the insn AFTER. */
2756 emit_insn_after (pattern, after)
2757 register rtx pattern, after;
2759 register rtx insn = after;
2761 if (GET_CODE (pattern) == SEQUENCE)
2765 for (i = 0; i < XVECLEN (pattern, 0); i++)
2767 insn = XVECEXP (pattern, 0, i);
2768 add_insn_after (insn, after);
2771 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2772 sequence_result[XVECLEN (pattern, 0)] = pattern;
2776 insn = make_insn_raw (pattern);
2777 add_insn_after (insn, after);
2783 /* Similar to emit_insn_after, except that line notes are to be inserted so
2784 as to act as if this insn were at FROM. */
2787 emit_insn_after_with_line_notes (pattern, after, from)
2788 rtx pattern, after, from;
2790 rtx from_line = find_line_note (from);
2791 rtx after_line = find_line_note (after);
2792 rtx insn = emit_insn_after (pattern, after);
2795 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2796 NOTE_LINE_NUMBER (from_line),
2800 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2801 NOTE_LINE_NUMBER (after_line),
2805 /* Similar to emit_insn_after, but update basic block boundaries as well. */
2808 emit_block_insn_after (pattern, after, block)
2812 rtx r = emit_insn_after (pattern, after);
2813 if (block && block->end == after)
2818 /* Make an insn of code JUMP_INSN with body PATTERN
2819 and output it after the insn AFTER. */
2822 emit_jump_insn_after (pattern, after)
2823 register rtx pattern, after;
2827 if (GET_CODE (pattern) == SEQUENCE)
2828 insn = emit_insn_after (pattern, after);
2831 insn = make_jump_insn_raw (pattern);
2832 add_insn_after (insn, after);
2838 /* Make an insn of code BARRIER
2839 and output it after the insn AFTER. */
2842 emit_barrier_after (after)
2845 register rtx insn = rtx_alloc (BARRIER);
2847 INSN_UID (insn) = cur_insn_uid++;
2849 add_insn_after (insn, after);
2853 /* Emit the label LABEL after the insn AFTER. */
2856 emit_label_after (label, after)
2859 /* This can be called twice for the same label
2860 as a result of the confusion that follows a syntax error!
2861 So make it harmless. */
2862 if (INSN_UID (label) == 0)
2864 INSN_UID (label) = cur_insn_uid++;
2865 add_insn_after (label, after);
2871 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2874 emit_note_after (subtype, after)
2878 register rtx note = rtx_alloc (NOTE);
2879 INSN_UID (note) = cur_insn_uid++;
2880 NOTE_SOURCE_FILE (note) = 0;
2881 NOTE_LINE_NUMBER (note) = subtype;
2882 add_insn_after (note, after);
2886 /* Emit a line note for FILE and LINE after the insn AFTER. */
2889 emit_line_note_after (file, line, after)
2896 if (no_line_numbers && line > 0)
2902 note = rtx_alloc (NOTE);
2903 INSN_UID (note) = cur_insn_uid++;
2904 NOTE_SOURCE_FILE (note) = file;
2905 NOTE_LINE_NUMBER (note) = line;
2906 add_insn_after (note, after);
2910 /* Make an insn of code INSN with pattern PATTERN
2911 and add it to the end of the doubly-linked list.
2912 If PATTERN is a SEQUENCE, take the elements of it
2913 and emit an insn for each element.
2915 Returns the last insn emitted. */
2921 rtx insn = last_insn;
2923 if (GET_CODE (pattern) == SEQUENCE)
2927 for (i = 0; i < XVECLEN (pattern, 0); i++)
2929 insn = XVECEXP (pattern, 0, i);
2932 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2933 sequence_result[XVECLEN (pattern, 0)] = pattern;
2937 insn = make_insn_raw (pattern);
2944 /* Emit the insns in a chain starting with INSN.
2945 Return the last insn emitted. */
2955 rtx next = NEXT_INSN (insn);
2964 /* Emit the insns in a chain starting with INSN and place them in front of
2965 the insn BEFORE. Return the last insn emitted. */
2968 emit_insns_before (insn, before)
2976 rtx next = NEXT_INSN (insn);
2977 add_insn_before (insn, before);
2985 /* Emit the insns in a chain starting with FIRST and place them in back of
2986 the insn AFTER. Return the last insn emitted. */
2989 emit_insns_after (first, after)
2994 register rtx after_after;
3002 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3005 after_after = NEXT_INSN (after);
3007 NEXT_INSN (after) = first;
3008 PREV_INSN (first) = after;
3009 NEXT_INSN (last) = after_after;
3011 PREV_INSN (after_after) = last;
3013 if (after == last_insn)
3018 /* Make an insn of code JUMP_INSN with pattern PATTERN
3019 and add it to the end of the doubly-linked list. */
3022 emit_jump_insn (pattern)
3025 if (GET_CODE (pattern) == SEQUENCE)
3026 return emit_insn (pattern);
3029 register rtx insn = make_jump_insn_raw (pattern);
3035 /* Make an insn of code CALL_INSN with pattern PATTERN
3036 and add it to the end of the doubly-linked list. */
3039 emit_call_insn (pattern)
3042 if (GET_CODE (pattern) == SEQUENCE)
3043 return emit_insn (pattern);
3046 register rtx insn = make_call_insn_raw (pattern);
3048 PUT_CODE (insn, CALL_INSN);
3053 /* Add the label LABEL to the end of the doubly-linked list. */
3059 /* This can be called twice for the same label
3060 as a result of the confusion that follows a syntax error!
3061 So make it harmless. */
3062 if (INSN_UID (label) == 0)
3064 INSN_UID (label) = cur_insn_uid++;
3070 /* Make an insn of code BARRIER
3071 and add it to the end of the doubly-linked list. */
3076 register rtx barrier = rtx_alloc (BARRIER);
3077 INSN_UID (barrier) = cur_insn_uid++;
3082 /* Make an insn of code NOTE
3083 with data-fields specified by FILE and LINE
3084 and add it to the end of the doubly-linked list,
3085 but only if line-numbers are desired for debugging info. */
3088 emit_line_note (file, line)
3092 set_file_and_line_for_stmt (file, line);
3095 if (no_line_numbers)
3099 return emit_note (file, line);
3102 /* Make an insn of code NOTE
3103 with data-fields specified by FILE and LINE
3104 and add it to the end of the doubly-linked list.
3105 If it is a line-number NOTE, omit it if it matches the previous one. */
3108 emit_note (file, line)
3116 if (file && last_filename && !strcmp (file, last_filename)
3117 && line == last_linenum)
3119 last_filename = file;
3120 last_linenum = line;
3123 if (no_line_numbers && line > 0)
3129 note = rtx_alloc (NOTE);
3130 INSN_UID (note) = cur_insn_uid++;
3131 NOTE_SOURCE_FILE (note) = file;
3132 NOTE_LINE_NUMBER (note) = line;
3137 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3140 emit_line_note_force (file, line)
3145 return emit_line_note (file, line);
3148 /* Cause next statement to emit a line note even if the line number
3149 has not changed. This is used at the beginning of a function. */
3152 force_next_line_note ()
3157 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3158 note of this type already exists, remove it first. */
3161 set_unique_reg_note (insn, kind, datum)
3166 rtx note = find_reg_note (insn, kind, NULL_RTX);
3168 /* First remove the note if there already is one. */
3170 remove_note (insn, note);
3172 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3175 /* Return an indication of which type of insn should have X as a body.
3176 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3182 if (GET_CODE (x) == CODE_LABEL)
3184 if (GET_CODE (x) == CALL)
3186 if (GET_CODE (x) == RETURN)
3188 if (GET_CODE (x) == SET)
3190 if (SET_DEST (x) == pc_rtx)
3192 else if (GET_CODE (SET_SRC (x)) == CALL)
3197 if (GET_CODE (x) == PARALLEL)
3200 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3201 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3203 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3204 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3206 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3207 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3213 /* Emit the rtl pattern X as an appropriate kind of insn.
3214 If X is a label, it is simply added into the insn chain. */
3220 enum rtx_code code = classify_insn (x);
3222 if (code == CODE_LABEL)
3223 return emit_label (x);
3224 else if (code == INSN)
3225 return emit_insn (x);
3226 else if (code == JUMP_INSN)
3228 register rtx insn = emit_jump_insn (x);
3229 if (simplejump_p (insn) || GET_CODE (x) == RETURN)
3230 return emit_barrier ();
3233 else if (code == CALL_INSN)
3234 return emit_call_insn (x);
3239 /* Begin emitting insns to a sequence which can be packaged in an
3240 RTL_EXPR. If this sequence will contain something that might cause
3241 the compiler to pop arguments to function calls (because those
3242 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3243 details), use do_pending_stack_adjust before calling this function.
3244 That will ensure that the deferred pops are not accidentally
3245 emitted in the middel of this sequence. */
3250 struct sequence_stack *tem;
3252 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
3254 tem->next = seq_stack;
3255 tem->first = first_insn;
3256 tem->last = last_insn;
3257 tem->sequence_rtl_expr = seq_rtl_expr;
3265 /* Similarly, but indicate that this sequence will be placed in T, an
3266 RTL_EXPR. See the documentation for start_sequence for more
3267 information about how to use this function. */
3270 start_sequence_for_rtl_expr (t)
3278 /* Set up the insn chain starting with FIRST as the current sequence,
3279 saving the previously current one. See the documentation for
3280 start_sequence for more information about how to use this function. */
3283 push_to_sequence (first)
3290 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3296 /* Set up the outer-level insn chain
3297 as the current sequence, saving the previously current one. */
3300 push_topmost_sequence ()
3302 struct sequence_stack *stack, *top = NULL;
3306 for (stack = seq_stack; stack; stack = stack->next)
3309 first_insn = top->first;
3310 last_insn = top->last;
3311 seq_rtl_expr = top->sequence_rtl_expr;
3314 /* After emitting to the outer-level insn chain, update the outer-level
3315 insn chain, and restore the previous saved state. */
3318 pop_topmost_sequence ()
3320 struct sequence_stack *stack, *top = NULL;
3322 for (stack = seq_stack; stack; stack = stack->next)
3325 top->first = first_insn;
3326 top->last = last_insn;
3327 /* ??? Why don't we save seq_rtl_expr here? */
3332 /* After emitting to a sequence, restore previous saved state.
3334 To get the contents of the sequence just made, you must call
3335 `gen_sequence' *before* calling here.
3337 If the compiler might have deferred popping arguments while
3338 generating this sequence, and this sequence will not be immediately
3339 inserted into the instruction stream, use do_pending_stack_adjust
3340 before calling gen_sequence. That will ensure that the deferred
3341 pops are inserted into this sequence, and not into some random
3342 location in the instruction stream. See INHIBIT_DEFER_POP for more
3343 information about deferred popping of arguments. */
3348 struct sequence_stack *tem = seq_stack;
3350 first_insn = tem->first;
3351 last_insn = tem->last;
3352 seq_rtl_expr = tem->sequence_rtl_expr;
3353 seq_stack = tem->next;
3358 /* Return 1 if currently emitting into a sequence. */
3363 return seq_stack != 0;
3366 /* Generate a SEQUENCE rtx containing the insns already emitted
3367 to the current sequence.
3369 This is how the gen_... function from a DEFINE_EXPAND
3370 constructs the SEQUENCE that it returns. */
3380 /* Count the insns in the chain. */
3382 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3385 /* If only one insn, return its pattern rather than a SEQUENCE.
3386 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3387 the case of an empty list.) */
3389 && ! RTX_FRAME_RELATED_P (first_insn)
3390 && (GET_CODE (first_insn) == INSN
3391 || GET_CODE (first_insn) == JUMP_INSN
3392 /* Don't discard the call usage field. */
3393 || (GET_CODE (first_insn) == CALL_INSN
3394 && CALL_INSN_FUNCTION_USAGE (first_insn) == NULL_RTX)))
3398 NEXT_INSN (first_insn) = free_insn;
3399 free_insn = first_insn;
3401 return PATTERN (first_insn);
3404 /* Put them in a vector. See if we already have a SEQUENCE of the
3405 appropriate length around. */
3406 if (!ggc_p && len < SEQUENCE_RESULT_SIZE
3407 && (result = sequence_result[len]) != 0)
3408 sequence_result[len] = 0;
3411 /* Ensure that this rtl goes in saveable_obstack, since we may
3413 push_obstacks_nochange ();
3414 rtl_in_saveable_obstack ();
3415 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3419 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3420 XVECEXP (result, 0, i) = tem;
3425 /* Put the various virtual registers into REGNO_REG_RTX. */
3428 init_virtual_regs (es)
3429 struct emit_status *es;
3431 rtx *ptr = es->x_regno_reg_rtx;
3432 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3433 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3434 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3435 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3436 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3440 clear_emit_caches ()
3444 /* Clear the start_sequence/gen_sequence cache. */
3445 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3446 sequence_result[i] = 0;
3450 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3451 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3452 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3453 static int copy_insn_n_scratches;
3455 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3456 copied an ASM_OPERANDS.
3457 In that case, it is the original input-operand vector. */
3458 static rtvec orig_asm_operands_vector;
3460 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3461 copied an ASM_OPERANDS.
3462 In that case, it is the copied input-operand vector. */
3463 static rtvec copy_asm_operands_vector;
3465 /* Likewise for the constraints vector. */
3466 static rtvec orig_asm_constraints_vector;
3467 static rtvec copy_asm_constraints_vector;
3469 /* Recursively create a new copy of an rtx for copy_insn.
3470 This function differs from copy_rtx in that it handles SCRATCHes and
3471 ASM_OPERANDs properly.
3472 Normally, this function is not used directly; use copy_insn as front end.
3473 However, you could first copy an insn pattern with copy_insn and then use
3474 this function afterwards to properly copy any REG_NOTEs containing
3483 register RTX_CODE code;
3484 register const char *format_ptr;
3486 code = GET_CODE (orig);
3502 for (i = 0; i < copy_insn_n_scratches; i++)
3503 if (copy_insn_scratch_in[i] == orig)
3504 return copy_insn_scratch_out[i];
3508 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3509 a LABEL_REF, it isn't sharable. */
3510 if (GET_CODE (XEXP (orig, 0)) == PLUS
3511 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3512 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3516 /* A MEM with a constant address is not sharable. The problem is that
3517 the constant address may need to be reloaded. If the mem is shared,
3518 then reloading one copy of this mem will cause all copies to appear
3519 to have been reloaded. */
3525 copy = rtx_alloc (code);
3527 /* Copy the various flags, and other information. We assume that
3528 all fields need copying, and then clear the fields that should
3529 not be copied. That is the sensible default behavior, and forces
3530 us to explicitly document why we are *not* copying a flag. */
3531 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3533 /* We do not copy the USED flag, which is used as a mark bit during
3534 walks over the RTL. */
3537 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3538 if (GET_RTX_CLASS (code) == 'i')
3542 copy->frame_related = 0;
3545 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3547 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3549 copy->fld[i] = orig->fld[i];
3550 switch (*format_ptr++)
3553 if (XEXP (orig, i) != NULL)
3554 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
3559 if (XVEC (orig, i) == orig_asm_constraints_vector)
3560 XVEC (copy, i) = copy_asm_constraints_vector;
3561 else if (XVEC (orig, i) == orig_asm_operands_vector)
3562 XVEC (copy, i) = copy_asm_operands_vector;
3563 else if (XVEC (orig, i) != NULL)
3565 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3566 for (j = 0; j < XVECLEN (copy, i); j++)
3567 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
3573 bitmap new_bits = BITMAP_OBSTACK_ALLOC (rtl_obstack);
3574 bitmap_copy (new_bits, XBITMAP (orig, i));
3575 XBITMAP (copy, i) = new_bits;
3586 /* These are left unchanged. */
3594 if (code == SCRATCH)
3596 i = copy_insn_n_scratches++;
3597 if (i >= MAX_RECOG_OPERANDS)
3599 copy_insn_scratch_in[i] = orig;
3600 copy_insn_scratch_out[i] = copy;
3602 else if (code == ASM_OPERANDS)
3604 orig_asm_operands_vector = XVEC (orig, 3);
3605 copy_asm_operands_vector = XVEC (copy, 3);
3606 orig_asm_constraints_vector = XVEC (orig, 4);
3607 copy_asm_constraints_vector = XVEC (copy, 4);
3613 /* Create a new copy of an rtx.
3614 This function differs from copy_rtx in that it handles SCRATCHes and
3615 ASM_OPERANDs properly.
3616 INSN doesn't really have to be a full INSN; it could be just the
3622 copy_insn_n_scratches = 0;
3623 orig_asm_operands_vector = 0;
3624 orig_asm_constraints_vector = 0;
3625 copy_asm_operands_vector = 0;
3626 copy_asm_constraints_vector = 0;
3627 return copy_insn_1 (insn);
3630 /* Initialize data structures and variables in this file
3631 before generating rtl for each function. */
3636 struct function *f = current_function;
3638 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
3641 seq_rtl_expr = NULL;
3643 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3646 first_label_num = label_num;
3650 clear_emit_caches ();
3652 /* Init the tables that describe all the pseudo regs. */
3654 f->emit->regno_pointer_flag_length = LAST_VIRTUAL_REGISTER + 101;
3656 f->emit->regno_pointer_flag
3657 = (char *) xcalloc (f->emit->regno_pointer_flag_length, sizeof (char));
3659 f->emit->regno_pointer_align
3660 = (char *) xcalloc (f->emit->regno_pointer_flag_length,
3664 = (rtx *) xcalloc (f->emit->regno_pointer_flag_length * sizeof (rtx),
3667 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3668 init_virtual_regs (f->emit);
3670 /* Indicate that the virtual registers and stack locations are
3672 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM) = 1;
3673 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM) = 1;
3674 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM) = 1;
3675 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM) = 1;
3677 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM) = 1;
3678 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM) = 1;
3679 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM) = 1;
3680 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM) = 1;
3681 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM) = 1;
3683 #ifdef STACK_BOUNDARY
3684 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3685 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3686 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)
3687 = STACK_BOUNDARY / BITS_PER_UNIT;
3688 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3690 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM)
3691 = STACK_BOUNDARY / BITS_PER_UNIT;
3692 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM)
3693 = STACK_BOUNDARY / BITS_PER_UNIT;
3694 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM)
3695 = STACK_BOUNDARY / BITS_PER_UNIT;
3696 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM)
3697 = STACK_BOUNDARY / BITS_PER_UNIT;
3698 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = UNITS_PER_WORD;
3701 #ifdef INIT_EXPANDERS
3706 /* Mark SS for GC. */
3709 mark_sequence_stack (ss)
3710 struct sequence_stack *ss;
3714 ggc_mark_rtx (ss->first);
3715 ggc_mark_tree (ss->sequence_rtl_expr);
3720 /* Mark ES for GC. */
3723 mark_emit_status (es)
3724 struct emit_status *es;
3732 for (i = es->regno_pointer_flag_length, r = es->x_regno_reg_rtx;
3736 mark_sequence_stack (es->sequence_stack);
3737 ggc_mark_tree (es->sequence_rtl_expr);
3738 ggc_mark_rtx (es->x_first_insn);
3741 /* Create some permanent unique rtl objects shared between all functions.
3742 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3745 init_emit_once (line_numbers)
3749 enum machine_mode mode;
3750 enum machine_mode double_mode;
3752 no_line_numbers = ! line_numbers;
3754 /* Compute the word and byte modes. */
3756 byte_mode = VOIDmode;
3757 word_mode = VOIDmode;
3758 double_mode = VOIDmode;
3760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3761 mode = GET_MODE_WIDER_MODE (mode))
3763 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
3764 && byte_mode == VOIDmode)
3767 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
3768 && word_mode == VOIDmode)
3772 #ifndef DOUBLE_TYPE_SIZE
3773 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3776 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3777 mode = GET_MODE_WIDER_MODE (mode))
3779 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
3780 && double_mode == VOIDmode)
3784 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
3786 /* Assign register numbers to the globally defined register rtx.
3787 This must be done at runtime because the register number field
3788 is in a union and some compilers can't initialize unions. */
3790 pc_rtx = gen_rtx (PC, VOIDmode);
3791 cc0_rtx = gen_rtx (CC0, VOIDmode);
3792 stack_pointer_rtx = gen_rtx_raw_REG (Pmode, STACK_POINTER_REGNUM);
3793 frame_pointer_rtx = gen_rtx_raw_REG (Pmode, FRAME_POINTER_REGNUM);
3794 if (hard_frame_pointer_rtx == 0)
3795 hard_frame_pointer_rtx = gen_rtx_raw_REG (Pmode,
3796 HARD_FRAME_POINTER_REGNUM);
3797 if (arg_pointer_rtx == 0)
3798 arg_pointer_rtx = gen_rtx_raw_REG (Pmode, ARG_POINTER_REGNUM);
3799 virtual_incoming_args_rtx =
3800 gen_rtx_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
3801 virtual_stack_vars_rtx =
3802 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
3803 virtual_stack_dynamic_rtx =
3804 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
3805 virtual_outgoing_args_rtx =
3806 gen_rtx_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
3807 virtual_cfa_rtx = gen_rtx_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
3809 /* These rtx must be roots if GC is enabled. */
3811 ggc_add_rtx_root (global_rtl, GR_MAX);
3813 #ifdef INIT_EXPANDERS
3814 /* This is to initialize save_machine_status and restore_machine_status before
3815 the first call to push_function_context_to. This is needed by the Chill
3816 front end which calls push_function_context_to before the first cal to
3817 init_function_start. */
3821 /* Create the unique rtx's for certain rtx codes and operand values. */
3823 /* Don't use gen_rtx here since gen_rtx in this case
3824 tries to use these variables. */
3825 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
3826 const_int_rtx[i + MAX_SAVED_CONST_INT] =
3827 gen_rtx_raw_CONST_INT (VOIDmode, i);
3829 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
3831 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
3832 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
3833 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
3835 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
3837 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
3838 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
3839 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
3840 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
3842 for (i = 0; i <= 2; i++)
3844 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3845 mode = GET_MODE_WIDER_MODE (mode))
3847 rtx tem = rtx_alloc (CONST_DOUBLE);
3848 union real_extract u;
3850 bzero ((char *) &u, sizeof u); /* Zero any holes in a structure. */
3851 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
3853 bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (tem), sizeof u);
3854 CONST_DOUBLE_MEM (tem) = cc0_rtx;
3855 PUT_MODE (tem, mode);
3857 const_tiny_rtx[i][(int) mode] = tem;
3860 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
3862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3863 mode = GET_MODE_WIDER_MODE (mode))
3864 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3866 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
3868 mode = GET_MODE_WIDER_MODE (mode))
3869 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3872 for (mode = CCmode; mode < MAX_MACHINE_MODE; ++mode)
3873 if (GET_MODE_CLASS (mode) == MODE_CC)
3874 const_tiny_rtx[0][(int) mode] = const0_rtx;
3876 ggc_add_rtx_root (&const_tiny_rtx[0][0], sizeof(const_tiny_rtx)/sizeof(rtx));
3877 ggc_add_rtx_root (&const_true_rtx, 1);
3879 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3880 return_address_pointer_rtx
3881 = gen_rtx_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
3885 struct_value_rtx = STRUCT_VALUE;
3887 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
3890 #ifdef STRUCT_VALUE_INCOMING
3891 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
3893 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3894 struct_value_incoming_rtx
3895 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
3897 struct_value_incoming_rtx = struct_value_rtx;
3901 #ifdef STATIC_CHAIN_REGNUM
3902 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
3904 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3905 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
3906 static_chain_incoming_rtx
3907 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
3910 static_chain_incoming_rtx = static_chain_rtx;
3914 static_chain_rtx = STATIC_CHAIN;
3916 #ifdef STATIC_CHAIN_INCOMING
3917 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
3919 static_chain_incoming_rtx = static_chain_rtx;
3923 #ifdef PIC_OFFSET_TABLE_REGNUM
3924 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
3927 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
3928 ggc_add_rtx_root (&struct_value_rtx, 1);
3929 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
3930 ggc_add_rtx_root (&static_chain_rtx, 1);
3931 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
3932 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
3935 /* Query and clear/ restore no_line_numbers. This is used by the
3936 switch / case handling in stmt.c to give proper line numbers in
3937 warnings about unreachable code. */
3940 force_line_numbers ()
3942 int old = no_line_numbers;
3944 no_line_numbers = 0;
3946 force_next_line_note ();
3951 restore_line_number_status (old_value)
3954 no_line_numbers = old_value;