1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* Middle-to-low level generation of rtx code and insns.
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
53 #include "basic-block.h"
56 /* Commonly used modes. */
58 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
59 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
60 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
61 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
64 /* This is *not* reset after each function. It gives each CODE_LABEL
65 in the entire compilation a unique label number. */
67 static int label_num = 1;
69 /* Highest label number in current function.
70 Zero means use the value of label_num instead.
71 This is nonzero only when belatedly compiling an inline function. */
73 static int last_label_num;
75 /* Value label_num had when set_new_first_and_last_label_number was called.
76 If label_num has not changed since then, last_label_num is valid. */
78 static int base_label_num;
80 /* Nonzero means do not generate NOTEs for source line numbers. */
82 static int no_line_numbers;
84 /* Commonly used rtx's, so that we only need space for one copy.
85 These are initialized once for the entire compilation.
86 All of these except perhaps the floating-point CONST_DOUBLEs
87 are unique; no other rtx-object will be equal to any of these. */
89 rtx global_rtl[GR_MAX];
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
95 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99 REAL_VALUE_TYPE dconst0;
100 REAL_VALUE_TYPE dconst1;
101 REAL_VALUE_TYPE dconst2;
102 REAL_VALUE_TYPE dconstm1;
104 /* All references to the following fixed hard registers go through
105 these unique rtl objects. On machines where the frame-pointer and
106 arg-pointer are the same register, they use the same unique object.
108 After register allocation, other rtl objects which used to be pseudo-regs
109 may be clobbered to refer to the frame-pointer register.
110 But references that were originally to the frame-pointer can be
111 distinguished from the others because they contain frame_pointer_rtx.
113 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
114 tricky: until register elimination has taken place hard_frame_pointer_rtx
115 should be used if it is being set, and frame_pointer_rtx otherwise. After
116 register elimination hard_frame_pointer_rtx should always be used.
117 On machines where the two registers are same (most) then these are the
120 In an inline procedure, the stack and frame pointer rtxs may not be
121 used for anything else. */
122 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
123 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
124 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
125 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
126 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
128 /* This is used to implement __builtin_return_address for some machines.
129 See for instance the MIPS port. */
130 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
132 /* We make one copy of (const_int C) where C is in
133 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
134 to save space during the compilation and simplify comparisons of
137 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
140 shortly thrown away. We use two mechanisms to prevent this waste:
142 For sizes up to 5 elements, we keep a SEQUENCE and its associated
143 rtvec for use by gen_sequence. One entry for each size is
144 sufficient because most cases are calls to gen_sequence followed by
145 immediately emitting the SEQUENCE. Reuse is safe since emitting a
146 sequence is destructive on the insn in it anyway and hence can't be
149 We do not bother to save this cached data over nested function calls.
150 Instead, we just reinitialize them. */
152 #define SEQUENCE_RESULT_SIZE 5
154 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
156 /* During RTL generation, we also keep a list of free INSN rtl codes. */
157 static rtx free_insn;
159 #define first_insn (current_function->emit->x_first_insn)
160 #define last_insn (current_function->emit->x_last_insn)
161 #define cur_insn_uid (current_function->emit->x_cur_insn_uid)
162 #define last_linenum (current_function->emit->x_last_linenum)
163 #define last_filename (current_function->emit->x_last_filename)
164 #define first_label_num (current_function->emit->x_first_label_num)
166 /* This is where the pointer to the obstack being used for RTL is stored. */
167 extern struct obstack *rtl_obstack;
169 static rtx make_jump_insn_raw PROTO((rtx));
170 static rtx make_call_insn_raw PROTO((rtx));
171 static rtx find_line_note PROTO((rtx));
172 static void mark_sequence_stack PROTO((struct sequence_stack *));
174 /* There are some RTL codes that require special attention; the generation
175 functions do the raw handling. If you add to this list, modify
176 special_rtx in gengenrtl.c as well. */
179 gen_rtx_CONST_INT (mode, arg)
180 enum machine_mode mode;
183 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
184 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
186 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
187 if (const_true_rtx && arg == STORE_FLAG_VALUE)
188 return const_true_rtx;
191 return gen_rtx_raw_CONST_INT (mode, arg);
194 /* CONST_DOUBLEs needs special handling because its length is known
197 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
198 enum machine_mode mode;
200 HOST_WIDE_INT arg1, arg2;
202 rtx r = rtx_alloc (CONST_DOUBLE);
207 X0EXP (r, 1) = NULL_RTX;
211 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
218 gen_rtx_REG (mode, regno)
219 enum machine_mode mode;
222 /* In case the MD file explicitly references the frame pointer, have
223 all such references point to the same frame pointer. This is
224 used during frame pointer elimination to distinguish the explicit
225 references to these registers from pseudos that happened to be
228 If we have eliminated the frame pointer or arg pointer, we will
229 be using it as a normal register, for example as a spill
230 register. In such cases, we might be accessing it in a mode that
231 is not Pmode and therefore cannot use the pre-allocated rtx.
233 Also don't do this when we are making new REGs in reload, since
234 we don't want to get confused with the real pointers. */
236 if (mode == Pmode && !reload_in_progress)
238 if (regno == FRAME_POINTER_REGNUM)
239 return frame_pointer_rtx;
240 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
241 if (regno == HARD_FRAME_POINTER_REGNUM)
242 return hard_frame_pointer_rtx;
244 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
245 if (regno == ARG_POINTER_REGNUM)
246 return arg_pointer_rtx;
248 #ifdef RETURN_ADDRESS_POINTER_REGNUM
249 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
250 return return_address_pointer_rtx;
252 if (regno == STACK_POINTER_REGNUM)
253 return stack_pointer_rtx;
256 return gen_rtx_raw_REG (mode, regno);
260 gen_rtx_MEM (mode, addr)
261 enum machine_mode mode;
264 rtx rt = gen_rtx_raw_MEM (mode, addr);
266 /* This field is not cleared by the mere allocation of the rtx, so
268 MEM_ALIAS_SET (rt) = 0;
273 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
275 ** This routine generates an RTX of the size specified by
276 ** <code>, which is an RTX code. The RTX structure is initialized
277 ** from the arguments <element1> through <elementn>, which are
278 ** interpreted according to the specific RTX type's format. The
279 ** special machine mode associated with the rtx (if any) is specified
282 ** gen_rtx can be invoked in a way which resembles the lisp-like
283 ** rtx it will generate. For example, the following rtx structure:
285 ** (plus:QI (mem:QI (reg:SI 1))
286 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
288 ** ...would be generated by the following C code:
290 ** gen_rtx (PLUS, QImode,
291 ** gen_rtx (MEM, QImode,
292 ** gen_rtx (REG, SImode, 1)),
293 ** gen_rtx (MEM, QImode,
294 ** gen_rtx (PLUS, SImode,
295 ** gen_rtx (REG, SImode, 2),
296 ** gen_rtx (REG, SImode, 3)))),
301 gen_rtx VPROTO((enum rtx_code code, enum machine_mode mode, ...))
303 #ifndef ANSI_PROTOTYPES
305 enum machine_mode mode;
308 register int i; /* Array indices... */
309 register const char *fmt; /* Current rtx's format... */
310 register rtx rt_val; /* RTX to return to caller... */
314 #ifndef ANSI_PROTOTYPES
315 code = va_arg (p, enum rtx_code);
316 mode = va_arg (p, enum machine_mode);
322 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
327 rtx arg0 = va_arg (p, rtx);
328 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
329 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
330 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
335 rt_val = gen_rtx_REG (mode, va_arg (p, int));
339 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
343 rt_val = rtx_alloc (code); /* Allocate the storage space. */
344 rt_val->mode = mode; /* Store the machine mode... */
346 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
347 for (i = 0; i < GET_RTX_LENGTH (code); i++)
351 case '0': /* Unused field. */
354 case 'i': /* An integer? */
355 XINT (rt_val, i) = va_arg (p, int);
358 case 'w': /* A wide integer? */
359 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
362 case 's': /* A string? */
363 XSTR (rt_val, i) = va_arg (p, char *);
366 case 'e': /* An expression? */
367 case 'u': /* An insn? Same except when printing. */
368 XEXP (rt_val, i) = va_arg (p, rtx);
371 case 'E': /* An RTX vector? */
372 XVEC (rt_val, i) = va_arg (p, rtvec);
375 case 'b': /* A bitmap? */
376 XBITMAP (rt_val, i) = va_arg (p, bitmap);
379 case 't': /* A tree? */
380 XTREE (rt_val, i) = va_arg (p, tree);
394 /* gen_rtvec (n, [rt1, ..., rtn])
396 ** This routine creates an rtvec and stores within it the
397 ** pointers to rtx's which are its arguments.
402 gen_rtvec VPROTO((int n, ...))
404 #ifndef ANSI_PROTOTYPES
413 #ifndef ANSI_PROTOTYPES
418 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
420 vector = (rtx *) alloca (n * sizeof (rtx));
422 for (i = 0; i < n; i++)
423 vector[i] = va_arg (p, rtx);
426 return gen_rtvec_v (n, vector);
430 gen_rtvec_v (n, argp)
435 register rtvec rt_val;
438 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
440 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
442 for (i = 0; i < n; i++)
443 rt_val->elem[i] = *argp++;
449 /* Generate a REG rtx for a new pseudo register of mode MODE.
450 This pseudo is assigned the next sequential register number. */
454 enum machine_mode mode;
456 struct function *f = current_function;
459 /* Don't let anything called after initial flow analysis create new
464 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
465 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
467 /* For complex modes, don't make a single pseudo.
468 Instead, make a CONCAT of two pseudos.
469 This allows noncontiguous allocation of the real and imaginary parts,
470 which makes much better code. Besides, allocating DCmode
471 pseudos overstrains reload on some machines like the 386. */
472 rtx realpart, imagpart;
473 int size = GET_MODE_UNIT_SIZE (mode);
474 enum machine_mode partmode
475 = mode_for_size (size * BITS_PER_UNIT,
476 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
477 ? MODE_FLOAT : MODE_INT),
480 realpart = gen_reg_rtx (partmode);
481 imagpart = gen_reg_rtx (partmode);
482 return gen_rtx_CONCAT (mode, realpart, imagpart);
485 /* Make sure regno_pointer_flag and regno_reg_rtx are large
486 enough to have an element for this pseudo reg number. */
488 if (reg_rtx_no == f->emit->regno_pointer_flag_length)
490 int old_size = f->emit->regno_pointer_flag_length;
493 new = xrealloc (f->emit->regno_pointer_flag, old_size * 2);
494 memset (new + old_size, 0, old_size);
495 f->emit->regno_pointer_flag = new;
497 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
498 memset (new + old_size, 0, old_size);
499 f->emit->regno_pointer_align = new;
501 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
502 old_size * 2 * sizeof (rtx));
503 memset (new1 + old_size, 0, old_size * sizeof (rtx));
504 regno_reg_rtx = new1;
506 f->emit->regno_pointer_flag_length = old_size * 2;
509 val = gen_rtx_raw_REG (mode, reg_rtx_no);
510 regno_reg_rtx[reg_rtx_no++] = val;
514 /* Identify REG (which may be a CONCAT) as a user register. */
520 if (GET_CODE (reg) == CONCAT)
522 REG_USERVAR_P (XEXP (reg, 0)) = 1;
523 REG_USERVAR_P (XEXP (reg, 1)) = 1;
525 else if (GET_CODE (reg) == REG)
526 REG_USERVAR_P (reg) = 1;
531 /* Identify REG as a probable pointer register and show its alignment
532 as ALIGN, if nonzero. */
535 mark_reg_pointer (reg, align)
539 if (! REGNO_POINTER_FLAG (REGNO (reg)))
541 REGNO_POINTER_FLAG (REGNO (reg)) = 1;
544 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
546 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
547 /* We can no-longer be sure just how aligned this pointer is */
548 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
551 /* Return 1 plus largest pseudo reg number used in the current function. */
559 /* Return 1 + the largest label number used so far in the current function. */
564 if (last_label_num && label_num == base_label_num)
565 return last_label_num;
569 /* Return first label number used in this function (if any were used). */
572 get_first_label_num ()
574 return first_label_num;
577 /* Return a value representing some low-order bits of X, where the number
578 of low-order bits is given by MODE. Note that no conversion is done
579 between floating-point and fixed-point values, rather, the bit
580 representation is returned.
582 This function handles the cases in common between gen_lowpart, below,
583 and two variants in cse.c and combine.c. These are the cases that can
584 be safely handled at all points in the compilation.
586 If this is not a case we can handle, return 0. */
589 gen_lowpart_common (mode, x)
590 enum machine_mode mode;
595 if (GET_MODE (x) == mode)
598 /* MODE must occupy no more words than the mode of X. */
599 if (GET_MODE (x) != VOIDmode
600 && ((GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
601 > ((GET_MODE_SIZE (GET_MODE (x)) + (UNITS_PER_WORD - 1))
605 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
606 word = ((GET_MODE_SIZE (GET_MODE (x))
607 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
610 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
611 && (GET_MODE_CLASS (mode) == MODE_INT
612 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
614 /* If we are getting the low-order part of something that has been
615 sign- or zero-extended, we can either just use the object being
616 extended or make a narrower extension. If we want an even smaller
617 piece than the size of the object being extended, call ourselves
620 This case is used mostly by combine and cse. */
622 if (GET_MODE (XEXP (x, 0)) == mode)
624 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
625 return gen_lowpart_common (mode, XEXP (x, 0));
626 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
627 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
629 else if (GET_CODE (x) == SUBREG
630 && (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
631 || GET_MODE_SIZE (mode) == GET_MODE_UNIT_SIZE (GET_MODE (x))))
632 return (GET_MODE (SUBREG_REG (x)) == mode && SUBREG_WORD (x) == 0
634 : gen_rtx_SUBREG (mode, SUBREG_REG (x), SUBREG_WORD (x) + word));
635 else if (GET_CODE (x) == REG)
637 /* Let the backend decide how many registers to skip. This is needed
638 in particular for Sparc64 where fp regs are smaller than a word. */
639 /* ??? Note that subregs are now ambiguous, in that those against
640 pseudos are sized by the Word Size, while those against hard
641 regs are sized by the underlying register size. Better would be
642 to always interpret the subreg offset parameter as bytes or bits. */
644 if (WORDS_BIG_ENDIAN && REGNO (x) < FIRST_PSEUDO_REGISTER)
645 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
646 - HARD_REGNO_NREGS (REGNO (x), mode));
648 /* If the register is not valid for MODE, return 0. If we don't
649 do this, there is no way to fix up the resulting REG later.
650 But we do do this if the current REG is not valid for its
651 mode. This latter is a kludge, but is required due to the
652 way that parameters are passed on some machines, most
654 if (REGNO (x) < FIRST_PSEUDO_REGISTER
655 && ! HARD_REGNO_MODE_OK (REGNO (x) + word, mode)
656 && HARD_REGNO_MODE_OK (REGNO (x), GET_MODE (x)))
658 else if (REGNO (x) < FIRST_PSEUDO_REGISTER
659 /* integrate.c can't handle parts of a return value register. */
660 && (! REG_FUNCTION_VALUE_P (x)
661 || ! rtx_equal_function_value_matters)
662 #ifdef CLASS_CANNOT_CHANGE_SIZE
663 && ! (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (x))
664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_INT
665 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_FLOAT
666 && (TEST_HARD_REG_BIT
667 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
670 /* We want to keep the stack, frame, and arg pointers
672 && x != frame_pointer_rtx
673 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
674 && x != arg_pointer_rtx
676 && x != stack_pointer_rtx)
677 return gen_rtx_REG (mode, REGNO (x) + word);
679 return gen_rtx_SUBREG (mode, x, word);
681 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
682 from the low-order part of the constant. */
683 else if ((GET_MODE_CLASS (mode) == MODE_INT
684 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
685 && GET_MODE (x) == VOIDmode
686 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
688 /* If MODE is twice the host word size, X is already the desired
689 representation. Otherwise, if MODE is wider than a word, we can't
690 do this. If MODE is exactly a word, return just one CONST_INT.
691 If MODE is smaller than a word, clear the bits that don't belong
692 in our mode, unless they and our sign bit are all one. So we get
693 either a reasonable negative value or a reasonable unsigned value
696 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
698 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
700 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
701 return (GET_CODE (x) == CONST_INT ? x
702 : GEN_INT (CONST_DOUBLE_LOW (x)));
705 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
706 int width = GET_MODE_BITSIZE (mode);
707 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
708 : CONST_DOUBLE_LOW (x));
710 /* Sign extend to HOST_WIDE_INT. */
711 val = val << (HOST_BITS_PER_WIDE_INT - width) >> (HOST_BITS_PER_WIDE_INT - width);
713 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
718 /* If X is an integral constant but we want it in floating-point, it
719 must be the case that we have a union of an integer and a floating-point
720 value. If the machine-parameters allow it, simulate that union here
721 and return the result. The two-word and single-word cases are
724 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
725 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
726 || flag_pretend_float)
727 && GET_MODE_CLASS (mode) == MODE_FLOAT
728 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
729 && GET_CODE (x) == CONST_INT
730 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
731 #ifdef REAL_ARITHMETIC
737 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
738 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
742 union {HOST_WIDE_INT i; float d; } u;
745 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
748 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
749 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
750 || flag_pretend_float)
751 && GET_MODE_CLASS (mode) == MODE_FLOAT
752 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
753 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
754 && GET_MODE (x) == VOIDmode
755 && (sizeof (double) * HOST_BITS_PER_CHAR
756 == 2 * HOST_BITS_PER_WIDE_INT))
757 #ifdef REAL_ARITHMETIC
761 HOST_WIDE_INT low, high;
763 if (GET_CODE (x) == CONST_INT)
764 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
766 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
768 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
770 if (WORDS_BIG_ENDIAN)
771 i[0] = high, i[1] = low;
773 i[0] = low, i[1] = high;
775 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
776 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
780 union {HOST_WIDE_INT i[2]; double d; } u;
781 HOST_WIDE_INT low, high;
783 if (GET_CODE (x) == CONST_INT)
784 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
786 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
788 #ifdef HOST_WORDS_BIG_ENDIAN
789 u.i[0] = high, u.i[1] = low;
791 u.i[0] = low, u.i[1] = high;
794 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
798 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
799 same as sizeof (double) or when sizeof (float) is larger than the
800 size of a word on the target machine. */
801 #ifdef REAL_ARITHMETIC
802 else if (mode == SFmode && GET_CODE (x) == CONST_INT)
808 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
809 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
811 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
812 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
813 || flag_pretend_float)
814 && GET_MODE_CLASS (mode) == MODE_FLOAT
815 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
816 && GET_CODE (x) == CONST_INT
817 && (sizeof (double) * HOST_BITS_PER_CHAR
818 == HOST_BITS_PER_WIDE_INT))
824 r = REAL_VALUE_FROM_TARGET_DOUBLE (&i);
825 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
829 /* Similarly, if this is converting a floating-point value into a
830 single-word integer. Only do this is the host and target parameters are
833 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
834 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
835 || flag_pretend_float)
836 && (GET_MODE_CLASS (mode) == MODE_INT
837 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
838 && GET_CODE (x) == CONST_DOUBLE
839 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
840 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
841 return operand_subword (x, word, 0, GET_MODE (x));
843 /* Similarly, if this is converting a floating-point value into a
844 two-word integer, we can do this one word at a time and make an
845 integer. Only do this is the host and target parameters are
848 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
849 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
850 || flag_pretend_float)
851 && (GET_MODE_CLASS (mode) == MODE_INT
852 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
853 && GET_CODE (x) == CONST_DOUBLE
854 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
855 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
858 = operand_subword (x, word + WORDS_BIG_ENDIAN, 0, GET_MODE (x));
860 = operand_subword (x, word + ! WORDS_BIG_ENDIAN, 0, GET_MODE (x));
862 if (lowpart && GET_CODE (lowpart) == CONST_INT
863 && highpart && GET_CODE (highpart) == CONST_INT)
864 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
867 /* Otherwise, we can't do this. */
871 /* Return the real part (which has mode MODE) of a complex value X.
872 This always comes at the low address in memory. */
875 gen_realpart (mode, x)
876 enum machine_mode mode;
879 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
881 else if (WORDS_BIG_ENDIAN
882 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
884 && REGNO (x) < FIRST_PSEUDO_REGISTER)
885 fatal ("Unable to access real part of complex value in a hard register on this target");
886 else if (WORDS_BIG_ENDIAN)
887 return gen_highpart (mode, x);
889 return gen_lowpart (mode, x);
892 /* Return the imaginary part (which has mode MODE) of a complex value X.
893 This always comes at the high address in memory. */
896 gen_imagpart (mode, x)
897 enum machine_mode mode;
900 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
902 else if (WORDS_BIG_ENDIAN)
903 return gen_lowpart (mode, x);
904 else if (!WORDS_BIG_ENDIAN
905 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
907 && REGNO (x) < FIRST_PSEUDO_REGISTER)
908 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
910 return gen_highpart (mode, x);
913 /* Return 1 iff X, assumed to be a SUBREG,
914 refers to the real part of the complex value in its containing reg.
915 Complex values are always stored with the real part in the first word,
916 regardless of WORDS_BIG_ENDIAN. */
919 subreg_realpart_p (x)
922 if (GET_CODE (x) != SUBREG)
925 return SUBREG_WORD (x) * UNITS_PER_WORD < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x)));
928 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
929 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
930 least-significant part of X.
931 MODE specifies how big a part of X to return;
932 it usually should not be larger than a word.
933 If X is a MEM whose address is a QUEUED, the value may be so also. */
936 gen_lowpart (mode, x)
937 enum machine_mode mode;
940 rtx result = gen_lowpart_common (mode, x);
944 else if (GET_CODE (x) == REG)
946 /* Must be a hard reg that's not valid in MODE. */
947 result = gen_lowpart_common (mode, copy_to_reg (x));
952 else if (GET_CODE (x) == MEM)
954 /* The only additional case we can do is MEM. */
955 register int offset = 0;
956 if (WORDS_BIG_ENDIAN)
957 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
958 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
960 if (BYTES_BIG_ENDIAN)
961 /* Adjust the address so that the address-after-the-data
963 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
964 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
966 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
968 else if (GET_CODE (x) == ADDRESSOF)
969 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
974 /* Like `gen_lowpart', but refer to the most significant part.
975 This is used to access the imaginary part of a complex number. */
978 gen_highpart (mode, x)
979 enum machine_mode mode;
982 /* This case loses if X is a subreg. To catch bugs early,
983 complain if an invalid MODE is used even in other cases. */
984 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
985 && GET_MODE_SIZE (mode) != GET_MODE_UNIT_SIZE (GET_MODE (x)))
987 if (GET_CODE (x) == CONST_DOUBLE
988 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
989 && GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT
992 return GEN_INT (CONST_DOUBLE_HIGH (x) & GET_MODE_MASK (mode));
993 else if (GET_CODE (x) == CONST_INT)
995 if (HOST_BITS_PER_WIDE_INT <= BITS_PER_WORD)
997 return GEN_INT (INTVAL (x) >> (HOST_BITS_PER_WIDE_INT - BITS_PER_WORD));
999 else if (GET_CODE (x) == MEM)
1001 register int offset = 0;
1002 if (! WORDS_BIG_ENDIAN)
1003 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1004 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1006 if (! BYTES_BIG_ENDIAN
1007 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
1008 offset -= (GET_MODE_SIZE (mode)
1009 - MIN (UNITS_PER_WORD,
1010 GET_MODE_SIZE (GET_MODE (x))));
1012 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1014 else if (GET_CODE (x) == SUBREG)
1016 /* The only time this should occur is when we are looking at a
1017 multi-word item with a SUBREG whose mode is the same as that of the
1018 item. It isn't clear what we would do if it wasn't. */
1019 if (SUBREG_WORD (x) != 0)
1021 return gen_highpart (mode, SUBREG_REG (x));
1023 else if (GET_CODE (x) == REG)
1027 /* Let the backend decide how many registers to skip. This is needed
1028 in particular for sparc64 where fp regs are smaller than a word. */
1029 /* ??? Note that subregs are now ambiguous, in that those against
1030 pseudos are sized by the word size, while those against hard
1031 regs are sized by the underlying register size. Better would be
1032 to always interpret the subreg offset parameter as bytes or bits. */
1034 if (WORDS_BIG_ENDIAN)
1036 else if (REGNO (x) < FIRST_PSEUDO_REGISTER)
1037 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
1038 - HARD_REGNO_NREGS (REGNO (x), mode));
1040 word = ((GET_MODE_SIZE (GET_MODE (x))
1041 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1044 if (REGNO (x) < FIRST_PSEUDO_REGISTER
1045 /* integrate.c can't handle parts of a return value register. */
1046 && (! REG_FUNCTION_VALUE_P (x)
1047 || ! rtx_equal_function_value_matters)
1048 /* We want to keep the stack, frame, and arg pointers special. */
1049 && x != frame_pointer_rtx
1050 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1051 && x != arg_pointer_rtx
1053 && x != stack_pointer_rtx)
1054 return gen_rtx_REG (mode, REGNO (x) + word);
1056 return gen_rtx_SUBREG (mode, x, word);
1062 /* Return 1 iff X, assumed to be a SUBREG,
1063 refers to the least significant part of its containing reg.
1064 If X is not a SUBREG, always return 1 (it is its own low part!). */
1067 subreg_lowpart_p (x)
1070 if (GET_CODE (x) != SUBREG)
1072 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1075 if (WORDS_BIG_ENDIAN
1076 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD)
1077 return (SUBREG_WORD (x)
1078 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1079 - MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD))
1082 return SUBREG_WORD (x) == 0;
1085 /* Return subword I of operand OP.
1086 The word number, I, is interpreted as the word number starting at the
1087 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1088 otherwise it is the high-order word.
1090 If we cannot extract the required word, we return zero. Otherwise, an
1091 rtx corresponding to the requested word will be returned.
1093 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1094 reload has completed, a valid address will always be returned. After
1095 reload, if a valid address cannot be returned, we return zero.
1097 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1098 it is the responsibility of the caller.
1100 MODE is the mode of OP in case it is a CONST_INT. */
1103 operand_subword (op, i, validate_address, mode)
1106 int validate_address;
1107 enum machine_mode mode;
1110 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1112 if (mode == VOIDmode)
1113 mode = GET_MODE (op);
1115 if (mode == VOIDmode)
1118 /* If OP is narrower than a word, fail. */
1120 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1123 /* If we want a word outside OP, return zero. */
1125 && (i + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1128 /* If OP is already an integer word, return it. */
1129 if (GET_MODE_CLASS (mode) == MODE_INT
1130 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1133 /* If OP is a REG or SUBREG, we can handle it very simply. */
1134 if (GET_CODE (op) == REG)
1136 /* ??? There is a potential problem with this code. It does not
1137 properly handle extractions of a subword from a hard register
1138 that is larger than word_mode. Presumably the check for
1139 HARD_REGNO_MODE_OK catches these most of these cases. */
1141 /* If OP is a hard register, but OP + I is not a hard register,
1142 then extracting a subword is impossible.
1144 For example, consider if OP is the last hard register and it is
1145 larger than word_mode. If we wanted word N (for N > 0) because a
1146 part of that hard register was known to contain a useful value,
1147 then OP + I would refer to a pseudo, not the hard register we
1149 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1150 && REGNO (op) + i >= FIRST_PSEUDO_REGISTER)
1153 /* If the register is not valid for MODE, return 0. Note we
1154 have to check both OP and OP + I since they may refer to
1155 different parts of the register file.
1157 Consider if OP refers to the last 96bit FP register and we want
1158 subword 3 because that subword is known to contain a value we
1160 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1161 && (! HARD_REGNO_MODE_OK (REGNO (op), word_mode)
1162 || ! HARD_REGNO_MODE_OK (REGNO (op) + i, word_mode)))
1164 else if (REGNO (op) >= FIRST_PSEUDO_REGISTER
1165 || (REG_FUNCTION_VALUE_P (op)
1166 && rtx_equal_function_value_matters)
1167 /* We want to keep the stack, frame, and arg pointers
1169 || op == frame_pointer_rtx
1170 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1171 || op == arg_pointer_rtx
1173 || op == stack_pointer_rtx)
1174 return gen_rtx_SUBREG (word_mode, op, i);
1176 return gen_rtx_REG (word_mode, REGNO (op) + i);
1178 else if (GET_CODE (op) == SUBREG)
1179 return gen_rtx_SUBREG (word_mode, SUBREG_REG (op), i + SUBREG_WORD (op));
1180 else if (GET_CODE (op) == CONCAT)
1182 int partwords = GET_MODE_UNIT_SIZE (GET_MODE (op)) / UNITS_PER_WORD;
1184 return operand_subword (XEXP (op, 0), i, validate_address, mode);
1185 return operand_subword (XEXP (op, 1), i - partwords,
1186 validate_address, mode);
1189 /* Form a new MEM at the requested address. */
1190 if (GET_CODE (op) == MEM)
1192 rtx addr = plus_constant (XEXP (op, 0), i * UNITS_PER_WORD);
1195 if (validate_address)
1197 if (reload_completed)
1199 if (! strict_memory_address_p (word_mode, addr))
1203 addr = memory_address (word_mode, addr);
1206 new = gen_rtx_MEM (word_mode, addr);
1208 MEM_COPY_ATTRIBUTES (new, op);
1209 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1210 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op);
1215 /* The only remaining cases are when OP is a constant. If the host and
1216 target floating formats are the same, handling two-word floating
1217 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1218 are defined as returning one or two 32 bit values, respectively,
1219 and not values of BITS_PER_WORD bits. */
1220 #ifdef REAL_ARITHMETIC
1221 /* The output is some bits, the width of the target machine's word.
1222 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1224 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1225 && GET_MODE_CLASS (mode) == MODE_FLOAT
1226 && GET_MODE_BITSIZE (mode) == 64
1227 && GET_CODE (op) == CONST_DOUBLE)
1232 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1233 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1235 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1236 which the words are written depends on the word endianness.
1237 ??? This is a potential portability problem and should
1238 be fixed at some point.
1240 We must excercise caution with the sign bit. By definition there
1241 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1242 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1243 So we explicitly mask and sign-extend as necessary. */
1244 if (BITS_PER_WORD == 32)
1247 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1248 return GEN_INT (val);
1250 #if HOST_BITS_PER_WIDE_INT >= 64
1251 else if (BITS_PER_WORD >= 64 && i == 0)
1253 val = k[! WORDS_BIG_ENDIAN];
1254 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1255 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1256 return GEN_INT (val);
1259 else if (BITS_PER_WORD == 16)
1262 if ((i & 1) == !WORDS_BIG_ENDIAN)
1265 return GEN_INT (val);
1270 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1271 && GET_MODE_CLASS (mode) == MODE_FLOAT
1272 && GET_MODE_BITSIZE (mode) > 64
1273 && GET_CODE (op) == CONST_DOUBLE)
1278 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1279 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1281 if (BITS_PER_WORD == 32)
1284 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 return GEN_INT (val);
1290 #else /* no REAL_ARITHMETIC */
1291 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1292 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1293 || flag_pretend_float)
1294 && GET_MODE_CLASS (mode) == MODE_FLOAT
1295 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1296 && GET_CODE (op) == CONST_DOUBLE)
1298 /* The constant is stored in the host's word-ordering,
1299 but we want to access it in the target's word-ordering. Some
1300 compilers don't like a conditional inside macro args, so we have two
1301 copies of the return. */
1302 #ifdef HOST_WORDS_BIG_ENDIAN
1303 return GEN_INT (i == WORDS_BIG_ENDIAN
1304 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1306 return GEN_INT (i != WORDS_BIG_ENDIAN
1307 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1310 #endif /* no REAL_ARITHMETIC */
1312 /* Single word float is a little harder, since single- and double-word
1313 values often do not have the same high-order bits. We have already
1314 verified that we want the only defined word of the single-word value. */
1315 #ifdef REAL_ARITHMETIC
1316 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1317 && GET_MODE_BITSIZE (mode) == 32
1318 && GET_CODE (op) == CONST_DOUBLE)
1323 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1324 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1326 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1328 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1330 if (BITS_PER_WORD == 16)
1332 if ((i & 1) == !WORDS_BIG_ENDIAN)
1337 return GEN_INT (val);
1340 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1341 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1342 || flag_pretend_float)
1343 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1344 && GET_MODE_CLASS (mode) == MODE_FLOAT
1345 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1346 && GET_CODE (op) == CONST_DOUBLE)
1349 union {float f; HOST_WIDE_INT i; } u;
1351 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1354 return GEN_INT (u.i);
1356 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1357 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1358 || flag_pretend_float)
1359 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1360 && GET_MODE_CLASS (mode) == MODE_FLOAT
1361 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1362 && GET_CODE (op) == CONST_DOUBLE)
1365 union {double d; HOST_WIDE_INT i; } u;
1367 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1370 return GEN_INT (u.i);
1372 #endif /* no REAL_ARITHMETIC */
1374 /* The only remaining cases that we can handle are integers.
1375 Convert to proper endianness now since these cases need it.
1376 At this point, i == 0 means the low-order word.
1378 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1379 in general. However, if OP is (const_int 0), we can just return
1382 if (op == const0_rtx)
1385 if (GET_MODE_CLASS (mode) != MODE_INT
1386 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1387 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1390 if (WORDS_BIG_ENDIAN)
1391 i = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - i;
1393 /* Find out which word on the host machine this value is in and get
1394 it from the constant. */
1395 val = (i / size_ratio == 0
1396 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1397 : (GET_CODE (op) == CONST_INT
1398 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1400 /* Get the value we want into the low bits of val. */
1401 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1402 val = ((val >> ((i % size_ratio) * BITS_PER_WORD)));
1404 val = trunc_int_for_mode (val, word_mode);
1406 return GEN_INT (val);
1409 /* Similar to `operand_subword', but never return 0. If we can't extract
1410 the required subword, put OP into a register and try again. If that fails,
1411 abort. We always validate the address in this case. It is not valid
1412 to call this function after reload; it is mostly meant for RTL
1415 MODE is the mode of OP, in case it is CONST_INT. */
1418 operand_subword_force (op, i, mode)
1421 enum machine_mode mode;
1423 rtx result = operand_subword (op, i, 1, mode);
1428 if (mode != BLKmode && mode != VOIDmode)
1430 /* If this is a register which can not be accessed by words, copy it
1431 to a pseudo register. */
1432 if (GET_CODE (op) == REG)
1433 op = copy_to_reg (op);
1435 op = force_reg (mode, op);
1438 result = operand_subword (op, i, 1, mode);
1445 /* Given a compare instruction, swap the operands.
1446 A test instruction is changed into a compare of 0 against the operand. */
1449 reverse_comparison (insn)
1452 rtx body = PATTERN (insn);
1455 if (GET_CODE (body) == SET)
1456 comp = SET_SRC (body);
1458 comp = SET_SRC (XVECEXP (body, 0, 0));
1460 if (GET_CODE (comp) == COMPARE)
1462 rtx op0 = XEXP (comp, 0);
1463 rtx op1 = XEXP (comp, 1);
1464 XEXP (comp, 0) = op1;
1465 XEXP (comp, 1) = op0;
1469 rtx new = gen_rtx_COMPARE (VOIDmode,
1470 CONST0_RTX (GET_MODE (comp)), comp);
1471 if (GET_CODE (body) == SET)
1472 SET_SRC (body) = new;
1474 SET_SRC (XVECEXP (body, 0, 0)) = new;
1478 /* Return a memory reference like MEMREF, but with its mode changed
1479 to MODE and its address changed to ADDR.
1480 (VOIDmode means don't change the mode.
1481 NULL for ADDR means don't change the address.) */
1484 change_address (memref, mode, addr)
1486 enum machine_mode mode;
1491 if (GET_CODE (memref) != MEM)
1493 if (mode == VOIDmode)
1494 mode = GET_MODE (memref);
1496 addr = XEXP (memref, 0);
1498 /* If reload is in progress or has completed, ADDR must be valid.
1499 Otherwise, we can call memory_address to make it valid. */
1500 if (reload_completed || reload_in_progress)
1502 if (! memory_address_p (mode, addr))
1506 addr = memory_address (mode, addr);
1508 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1511 new = gen_rtx_MEM (mode, addr);
1512 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref);
1513 MEM_COPY_ATTRIBUTES (new, memref);
1514 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref);
1518 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1525 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1526 NULL_RTX, label_num++, NULL_PTR);
1528 LABEL_NUSES (label) = 0;
1532 /* For procedure integration. */
1534 /* Install new pointers to the first and last insns in the chain.
1535 Also, set cur_insn_uid to one higher than the last in use.
1536 Used for an inline-procedure after copying the insn chain. */
1539 set_new_first_and_last_insn (first, last)
1548 for (insn = first; insn; insn = NEXT_INSN (insn))
1549 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1554 /* Set the range of label numbers found in the current function.
1555 This is used when belatedly compiling an inline function. */
1558 set_new_first_and_last_label_num (first, last)
1561 base_label_num = label_num;
1562 first_label_num = first;
1563 last_label_num = last;
1566 /* Set the last label number found in the current function.
1567 This is used when belatedly compiling an inline function. */
1570 set_new_last_label_num (last)
1573 base_label_num = label_num;
1574 last_label_num = last;
1577 /* Restore all variables describing the current status from the structure *P.
1578 This is used after a nested function. */
1581 restore_emit_status (p)
1585 clear_emit_caches ();
1588 /* Clear out all parts of the state in F that can safely be discarded
1589 after the function has been compiled, to let garbage collection
1590 reclaim the memory. */
1593 free_emit_status (f)
1596 free (f->emit->x_regno_reg_rtx);
1597 free (f->emit->regno_pointer_flag);
1598 free (f->emit->regno_pointer_align);
1603 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1604 It does not work to do this twice, because the mark bits set here
1605 are not cleared afterwards. */
1608 unshare_all_rtl (insn)
1611 for (; insn; insn = NEXT_INSN (insn))
1612 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1613 || GET_CODE (insn) == CALL_INSN)
1615 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1616 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1617 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1620 /* Make sure the addresses of stack slots found outside the insn chain
1621 (such as, in DECL_RTL of a variable) are not shared
1622 with the insn chain.
1624 This special care is necessary when the stack slot MEM does not
1625 actually appear in the insn chain. If it does appear, its address
1626 is unshared from all else at that point. */
1628 copy_rtx_if_shared (stack_slot_list);
1631 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1632 Recursively does the same for subexpressions. */
1635 copy_rtx_if_shared (orig)
1638 register rtx x = orig;
1640 register enum rtx_code code;
1641 register const char *format_ptr;
1647 code = GET_CODE (x);
1649 /* These types may be freely shared. */
1662 /* SCRATCH must be shared because they represent distinct values. */
1666 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1667 a LABEL_REF, it isn't sharable. */
1668 if (GET_CODE (XEXP (x, 0)) == PLUS
1669 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1670 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1679 /* The chain of insns is not being copied. */
1683 /* A MEM is allowed to be shared if its address is constant.
1685 We used to allow sharing of MEMs which referenced
1686 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1687 that can lose. instantiate_virtual_regs will not unshare
1688 the MEMs, and combine may change the structure of the address
1689 because it looks safe and profitable in one context, but
1690 in some other context it creates unrecognizable RTL. */
1691 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
1700 /* This rtx may not be shared. If it has already been seen,
1701 replace it with a copy of itself. */
1707 copy = rtx_alloc (code);
1708 bcopy ((char *) x, (char *) copy,
1709 (sizeof (*copy) - sizeof (copy->fld)
1710 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1716 /* Now scan the subexpressions recursively.
1717 We can store any replaced subexpressions directly into X
1718 since we know X is not shared! Any vectors in X
1719 must be copied if X was copied. */
1721 format_ptr = GET_RTX_FORMAT (code);
1723 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1725 switch (*format_ptr++)
1728 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1732 if (XVEC (x, i) != NULL)
1735 int len = XVECLEN (x, i);
1737 if (copied && len > 0)
1738 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
1739 for (j = 0; j < len; j++)
1740 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
1748 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1749 to look for shared sub-parts. */
1752 reset_used_flags (x)
1756 register enum rtx_code code;
1757 register const char *format_ptr;
1762 code = GET_CODE (x);
1764 /* These types may be freely shared so we needn't do any resetting
1785 /* The chain of insns is not being copied. */
1794 format_ptr = GET_RTX_FORMAT (code);
1795 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1797 switch (*format_ptr++)
1800 reset_used_flags (XEXP (x, i));
1804 for (j = 0; j < XVECLEN (x, i); j++)
1805 reset_used_flags (XVECEXP (x, i, j));
1811 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1812 Return X or the rtx for the pseudo reg the value of X was copied into.
1813 OTHER must be valid as a SET_DEST. */
1816 make_safe_from (x, other)
1820 switch (GET_CODE (other))
1823 other = SUBREG_REG (other);
1825 case STRICT_LOW_PART:
1828 other = XEXP (other, 0);
1834 if ((GET_CODE (other) == MEM
1836 && GET_CODE (x) != REG
1837 && GET_CODE (x) != SUBREG)
1838 || (GET_CODE (other) == REG
1839 && (REGNO (other) < FIRST_PSEUDO_REGISTER
1840 || reg_mentioned_p (other, x))))
1842 rtx temp = gen_reg_rtx (GET_MODE (x));
1843 emit_move_insn (temp, x);
1849 /* Emission of insns (adding them to the doubly-linked list). */
1851 /* Return the first insn of the current sequence or current function. */
1859 /* Return the last insn emitted in current sequence or current function. */
1867 /* Specify a new insn as the last in the chain. */
1870 set_last_insn (insn)
1873 if (NEXT_INSN (insn) != 0)
1878 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1881 get_last_insn_anywhere ()
1883 struct sequence_stack *stack;
1886 for (stack = seq_stack; stack; stack = stack->next)
1887 if (stack->last != 0)
1892 /* Return a number larger than any instruction's uid in this function. */
1897 return cur_insn_uid;
1900 /* Return the next insn. If it is a SEQUENCE, return the first insn
1909 insn = NEXT_INSN (insn);
1910 if (insn && GET_CODE (insn) == INSN
1911 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1912 insn = XVECEXP (PATTERN (insn), 0, 0);
1918 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1922 previous_insn (insn)
1927 insn = PREV_INSN (insn);
1928 if (insn && GET_CODE (insn) == INSN
1929 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1930 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
1936 /* Return the next insn after INSN that is not a NOTE. This routine does not
1937 look inside SEQUENCEs. */
1940 next_nonnote_insn (insn)
1945 insn = NEXT_INSN (insn);
1946 if (insn == 0 || GET_CODE (insn) != NOTE)
1953 /* Return the previous insn before INSN that is not a NOTE. This routine does
1954 not look inside SEQUENCEs. */
1957 prev_nonnote_insn (insn)
1962 insn = PREV_INSN (insn);
1963 if (insn == 0 || GET_CODE (insn) != NOTE)
1970 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
1971 or 0, if there is none. This routine does not look inside
1975 next_real_insn (insn)
1980 insn = NEXT_INSN (insn);
1981 if (insn == 0 || GET_CODE (insn) == INSN
1982 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
1989 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
1990 or 0, if there is none. This routine does not look inside
1994 prev_real_insn (insn)
1999 insn = PREV_INSN (insn);
2000 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2001 || GET_CODE (insn) == JUMP_INSN)
2008 /* Find the next insn after INSN that really does something. This routine
2009 does not look inside SEQUENCEs. Until reload has completed, this is the
2010 same as next_real_insn. */
2013 next_active_insn (insn)
2018 insn = NEXT_INSN (insn);
2020 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2021 || (GET_CODE (insn) == INSN
2022 && (! reload_completed
2023 || (GET_CODE (PATTERN (insn)) != USE
2024 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2031 /* Find the last insn before INSN that really does something. This routine
2032 does not look inside SEQUENCEs. Until reload has completed, this is the
2033 same as prev_real_insn. */
2036 prev_active_insn (insn)
2041 insn = PREV_INSN (insn);
2043 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2044 || (GET_CODE (insn) == INSN
2045 && (! reload_completed
2046 || (GET_CODE (PATTERN (insn)) != USE
2047 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2054 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2062 insn = NEXT_INSN (insn);
2063 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2070 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2078 insn = PREV_INSN (insn);
2079 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2087 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2088 and REG_CC_USER notes so we can find it. */
2091 link_cc0_insns (insn)
2094 rtx user = next_nonnote_insn (insn);
2096 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2097 user = XVECEXP (PATTERN (user), 0, 0);
2099 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2101 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2104 /* Return the next insn that uses CC0 after INSN, which is assumed to
2105 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2106 applied to the result of this function should yield INSN).
2108 Normally, this is simply the next insn. However, if a REG_CC_USER note
2109 is present, it contains the insn that uses CC0.
2111 Return 0 if we can't find the insn. */
2114 next_cc0_user (insn)
2117 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2120 return XEXP (note, 0);
2122 insn = next_nonnote_insn (insn);
2123 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2124 insn = XVECEXP (PATTERN (insn), 0, 0);
2126 if (insn && GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2127 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2133 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2134 note, it is the previous insn. */
2137 prev_cc0_setter (insn)
2140 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2143 return XEXP (note, 0);
2145 insn = prev_nonnote_insn (insn);
2146 if (! sets_cc0_p (PATTERN (insn)))
2153 /* Try splitting insns that can be split for better scheduling.
2154 PAT is the pattern which might split.
2155 TRIAL is the insn providing PAT.
2156 LAST is non-zero if we should return the last insn of the sequence produced.
2158 If this routine succeeds in splitting, it returns the first or last
2159 replacement insn depending on the value of LAST. Otherwise, it
2160 returns TRIAL. If the insn to be returned can be split, it will be. */
2163 try_split (pat, trial, last)
2167 rtx before = PREV_INSN (trial);
2168 rtx after = NEXT_INSN (trial);
2169 rtx seq = split_insns (pat, trial);
2170 int has_barrier = 0;
2173 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2174 We may need to handle this specially. */
2175 if (after && GET_CODE (after) == BARRIER)
2178 after = NEXT_INSN (after);
2183 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2184 The latter case will normally arise only when being done so that
2185 it, in turn, will be split (SFmode on the 29k is an example). */
2186 if (GET_CODE (seq) == SEQUENCE)
2188 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2189 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2190 increment the usage count so we don't delete the label. */
2193 if (GET_CODE (trial) == JUMP_INSN)
2194 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2195 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2197 JUMP_LABEL (XVECEXP (seq, 0, i)) = JUMP_LABEL (trial);
2199 if (JUMP_LABEL (trial))
2200 LABEL_NUSES (JUMP_LABEL (trial))++;
2203 tem = emit_insn_after (seq, before);
2205 delete_insn (trial);
2207 emit_barrier_after (tem);
2209 /* Recursively call try_split for each new insn created; by the
2210 time control returns here that insn will be fully split, so
2211 set LAST and continue from the insn after the one returned.
2212 We can't use next_active_insn here since AFTER may be a note.
2213 Ignore deleted insns, which can be occur if not optimizing. */
2214 for (tem = NEXT_INSN (before); tem != after;
2215 tem = NEXT_INSN (tem))
2216 if (! INSN_DELETED_P (tem)
2217 && GET_RTX_CLASS (GET_CODE (tem)) == 'i')
2218 tem = try_split (PATTERN (tem), tem, 1);
2220 /* Avoid infinite loop if the result matches the original pattern. */
2221 else if (rtx_equal_p (seq, pat))
2225 PATTERN (trial) = seq;
2226 INSN_CODE (trial) = -1;
2227 try_split (seq, trial, last);
2230 /* Return either the first or the last insn, depending on which was
2232 return last ? prev_active_insn (after) : next_active_insn (before);
2238 /* Make and return an INSN rtx, initializing all its slots.
2239 Store PATTERN in the pattern slots. */
2242 make_insn_raw (pattern)
2247 /* If in RTL generation phase, see if FREE_INSN can be used. */
2248 if (!ggc_p && free_insn != 0 && rtx_equal_function_value_matters)
2251 free_insn = NEXT_INSN (free_insn);
2252 PUT_CODE (insn, INSN);
2255 insn = rtx_alloc (INSN);
2257 INSN_UID (insn) = cur_insn_uid++;
2258 PATTERN (insn) = pattern;
2259 INSN_CODE (insn) = -1;
2260 LOG_LINKS (insn) = NULL;
2261 REG_NOTES (insn) = NULL;
2266 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2269 make_jump_insn_raw (pattern)
2274 insn = rtx_alloc (JUMP_INSN);
2275 INSN_UID (insn) = cur_insn_uid++;
2277 PATTERN (insn) = pattern;
2278 INSN_CODE (insn) = -1;
2279 LOG_LINKS (insn) = NULL;
2280 REG_NOTES (insn) = NULL;
2281 JUMP_LABEL (insn) = NULL;
2286 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2289 make_call_insn_raw (pattern)
2294 insn = rtx_alloc (CALL_INSN);
2295 INSN_UID (insn) = cur_insn_uid++;
2297 PATTERN (insn) = pattern;
2298 INSN_CODE (insn) = -1;
2299 LOG_LINKS (insn) = NULL;
2300 REG_NOTES (insn) = NULL;
2301 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2306 /* Add INSN to the end of the doubly-linked list.
2307 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2313 PREV_INSN (insn) = last_insn;
2314 NEXT_INSN (insn) = 0;
2316 if (NULL != last_insn)
2317 NEXT_INSN (last_insn) = insn;
2319 if (NULL == first_insn)
2325 /* Add INSN into the doubly-linked list after insn AFTER. This and
2326 the next should be the only functions called to insert an insn once
2327 delay slots have been filled since only they know how to update a
2331 add_insn_after (insn, after)
2334 rtx next = NEXT_INSN (after);
2336 if (optimize && INSN_DELETED_P (after))
2339 NEXT_INSN (insn) = next;
2340 PREV_INSN (insn) = after;
2344 PREV_INSN (next) = insn;
2345 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2346 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2348 else if (last_insn == after)
2352 struct sequence_stack *stack = seq_stack;
2353 /* Scan all pending sequences too. */
2354 for (; stack; stack = stack->next)
2355 if (after == stack->last)
2365 NEXT_INSN (after) = insn;
2366 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2368 rtx sequence = PATTERN (after);
2369 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2373 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2374 the previous should be the only functions called to insert an insn once
2375 delay slots have been filled since only they know how to update a
2379 add_insn_before (insn, before)
2382 rtx prev = PREV_INSN (before);
2384 if (optimize && INSN_DELETED_P (before))
2387 PREV_INSN (insn) = prev;
2388 NEXT_INSN (insn) = before;
2392 NEXT_INSN (prev) = insn;
2393 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2395 rtx sequence = PATTERN (prev);
2396 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2399 else if (first_insn == before)
2403 struct sequence_stack *stack = seq_stack;
2404 /* Scan all pending sequences too. */
2405 for (; stack; stack = stack->next)
2406 if (before == stack->first)
2408 stack->first = insn;
2416 PREV_INSN (before) = insn;
2417 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2418 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2421 /* Remove an insn from its doubly-linked list. This function knows how
2422 to handle sequences. */
2427 rtx next = NEXT_INSN (insn);
2428 rtx prev = PREV_INSN (insn);
2431 NEXT_INSN (prev) = next;
2432 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2434 rtx sequence = PATTERN (prev);
2435 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2438 else if (first_insn == insn)
2442 struct sequence_stack *stack = seq_stack;
2443 /* Scan all pending sequences too. */
2444 for (; stack; stack = stack->next)
2445 if (insn == stack->first)
2447 stack->first = next;
2457 PREV_INSN (next) = prev;
2458 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2459 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2461 else if (last_insn == insn)
2465 struct sequence_stack *stack = seq_stack;
2466 /* Scan all pending sequences too. */
2467 for (; stack; stack = stack->next)
2468 if (insn == stack->last)
2479 /* Delete all insns made since FROM.
2480 FROM becomes the new last instruction. */
2483 delete_insns_since (from)
2489 NEXT_INSN (from) = 0;
2493 /* This function is deprecated, please use sequences instead.
2495 Move a consecutive bunch of insns to a different place in the chain.
2496 The insns to be moved are those between FROM and TO.
2497 They are moved to a new position after the insn AFTER.
2498 AFTER must not be FROM or TO or any insn in between.
2500 This function does not know about SEQUENCEs and hence should not be
2501 called after delay-slot filling has been done. */
2504 reorder_insns (from, to, after)
2505 rtx from, to, after;
2507 /* Splice this bunch out of where it is now. */
2508 if (PREV_INSN (from))
2509 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2511 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2512 if (last_insn == to)
2513 last_insn = PREV_INSN (from);
2514 if (first_insn == from)
2515 first_insn = NEXT_INSN (to);
2517 /* Make the new neighbors point to it and it to them. */
2518 if (NEXT_INSN (after))
2519 PREV_INSN (NEXT_INSN (after)) = to;
2521 NEXT_INSN (to) = NEXT_INSN (after);
2522 PREV_INSN (from) = after;
2523 NEXT_INSN (after) = from;
2524 if (after == last_insn)
2528 /* Return the line note insn preceding INSN. */
2531 find_line_note (insn)
2534 if (no_line_numbers)
2537 for (; insn; insn = PREV_INSN (insn))
2538 if (GET_CODE (insn) == NOTE
2539 && NOTE_LINE_NUMBER (insn) >= 0)
2545 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2546 of the moved insns when debugging. This may insert a note between AFTER
2547 and FROM, and another one after TO. */
2550 reorder_insns_with_line_notes (from, to, after)
2551 rtx from, to, after;
2553 rtx from_line = find_line_note (from);
2554 rtx after_line = find_line_note (after);
2556 reorder_insns (from, to, after);
2558 if (from_line == after_line)
2562 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2563 NOTE_LINE_NUMBER (from_line),
2566 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2567 NOTE_LINE_NUMBER (after_line),
2571 /* Emit an insn of given code and pattern
2572 at a specified place within the doubly-linked list. */
2574 /* Make an instruction with body PATTERN
2575 and output it before the instruction BEFORE. */
2578 emit_insn_before (pattern, before)
2579 register rtx pattern, before;
2581 register rtx insn = before;
2583 if (GET_CODE (pattern) == SEQUENCE)
2587 for (i = 0; i < XVECLEN (pattern, 0); i++)
2589 insn = XVECEXP (pattern, 0, i);
2590 add_insn_before (insn, before);
2592 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2593 sequence_result[XVECLEN (pattern, 0)] = pattern;
2597 insn = make_insn_raw (pattern);
2598 add_insn_before (insn, before);
2604 /* Similar to emit_insn_before, but update basic block boundaries as well. */
2607 emit_block_insn_before (pattern, before, block)
2608 rtx pattern, before;
2611 rtx prev = PREV_INSN (before);
2612 rtx r = emit_insn_before (pattern, before);
2613 if (block && block->head == before)
2614 block->head = NEXT_INSN (prev);
2618 /* Make an instruction with body PATTERN and code JUMP_INSN
2619 and output it before the instruction BEFORE. */
2622 emit_jump_insn_before (pattern, before)
2623 register rtx pattern, before;
2627 if (GET_CODE (pattern) == SEQUENCE)
2628 insn = emit_insn_before (pattern, before);
2631 insn = make_jump_insn_raw (pattern);
2632 add_insn_before (insn, before);
2638 /* Make an instruction with body PATTERN and code CALL_INSN
2639 and output it before the instruction BEFORE. */
2642 emit_call_insn_before (pattern, before)
2643 register rtx pattern, before;
2647 if (GET_CODE (pattern) == SEQUENCE)
2648 insn = emit_insn_before (pattern, before);
2651 insn = make_call_insn_raw (pattern);
2652 add_insn_before (insn, before);
2653 PUT_CODE (insn, CALL_INSN);
2659 /* Make an insn of code BARRIER
2660 and output it before the insn BEFORE. */
2663 emit_barrier_before (before)
2664 register rtx before;
2666 register rtx insn = rtx_alloc (BARRIER);
2668 INSN_UID (insn) = cur_insn_uid++;
2670 add_insn_before (insn, before);
2674 /* Emit the label LABEL before the insn BEFORE. */
2677 emit_label_before (label, before)
2680 /* This can be called twice for the same label as a result of the
2681 confusion that follows a syntax error! So make it harmless. */
2682 if (INSN_UID (label) == 0)
2684 INSN_UID (label) = cur_insn_uid++;
2685 add_insn_before (label, before);
2691 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2694 emit_note_before (subtype, before)
2698 register rtx note = rtx_alloc (NOTE);
2699 INSN_UID (note) = cur_insn_uid++;
2700 NOTE_SOURCE_FILE (note) = 0;
2701 NOTE_LINE_NUMBER (note) = subtype;
2703 add_insn_before (note, before);
2707 /* Make an insn of code INSN with body PATTERN
2708 and output it after the insn AFTER. */
2711 emit_insn_after (pattern, after)
2712 register rtx pattern, after;
2714 register rtx insn = after;
2716 if (GET_CODE (pattern) == SEQUENCE)
2720 for (i = 0; i < XVECLEN (pattern, 0); i++)
2722 insn = XVECEXP (pattern, 0, i);
2723 add_insn_after (insn, after);
2726 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2727 sequence_result[XVECLEN (pattern, 0)] = pattern;
2731 insn = make_insn_raw (pattern);
2732 add_insn_after (insn, after);
2738 /* Similar to emit_insn_after, except that line notes are to be inserted so
2739 as to act as if this insn were at FROM. */
2742 emit_insn_after_with_line_notes (pattern, after, from)
2743 rtx pattern, after, from;
2745 rtx from_line = find_line_note (from);
2746 rtx after_line = find_line_note (after);
2747 rtx insn = emit_insn_after (pattern, after);
2750 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2751 NOTE_LINE_NUMBER (from_line),
2755 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2756 NOTE_LINE_NUMBER (after_line),
2760 /* Similar to emit_insn_after, but update basic block boundaries as well. */
2763 emit_block_insn_after (pattern, after, block)
2767 rtx r = emit_insn_after (pattern, after);
2768 if (block && block->end == after)
2773 /* Make an insn of code JUMP_INSN with body PATTERN
2774 and output it after the insn AFTER. */
2777 emit_jump_insn_after (pattern, after)
2778 register rtx pattern, after;
2782 if (GET_CODE (pattern) == SEQUENCE)
2783 insn = emit_insn_after (pattern, after);
2786 insn = make_jump_insn_raw (pattern);
2787 add_insn_after (insn, after);
2793 /* Make an insn of code BARRIER
2794 and output it after the insn AFTER. */
2797 emit_barrier_after (after)
2800 register rtx insn = rtx_alloc (BARRIER);
2802 INSN_UID (insn) = cur_insn_uid++;
2804 add_insn_after (insn, after);
2808 /* Emit the label LABEL after the insn AFTER. */
2811 emit_label_after (label, after)
2814 /* This can be called twice for the same label
2815 as a result of the confusion that follows a syntax error!
2816 So make it harmless. */
2817 if (INSN_UID (label) == 0)
2819 INSN_UID (label) = cur_insn_uid++;
2820 add_insn_after (label, after);
2826 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2829 emit_note_after (subtype, after)
2833 register rtx note = rtx_alloc (NOTE);
2834 INSN_UID (note) = cur_insn_uid++;
2835 NOTE_SOURCE_FILE (note) = 0;
2836 NOTE_LINE_NUMBER (note) = subtype;
2837 add_insn_after (note, after);
2841 /* Emit a line note for FILE and LINE after the insn AFTER. */
2844 emit_line_note_after (file, line, after)
2851 if (no_line_numbers && line > 0)
2857 note = rtx_alloc (NOTE);
2858 INSN_UID (note) = cur_insn_uid++;
2859 NOTE_SOURCE_FILE (note) = file;
2860 NOTE_LINE_NUMBER (note) = line;
2861 add_insn_after (note, after);
2865 /* Make an insn of code INSN with pattern PATTERN
2866 and add it to the end of the doubly-linked list.
2867 If PATTERN is a SEQUENCE, take the elements of it
2868 and emit an insn for each element.
2870 Returns the last insn emitted. */
2876 rtx insn = last_insn;
2878 if (GET_CODE (pattern) == SEQUENCE)
2882 for (i = 0; i < XVECLEN (pattern, 0); i++)
2884 insn = XVECEXP (pattern, 0, i);
2887 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2888 sequence_result[XVECLEN (pattern, 0)] = pattern;
2892 insn = make_insn_raw (pattern);
2899 /* Emit the insns in a chain starting with INSN.
2900 Return the last insn emitted. */
2910 rtx next = NEXT_INSN (insn);
2919 /* Emit the insns in a chain starting with INSN and place them in front of
2920 the insn BEFORE. Return the last insn emitted. */
2923 emit_insns_before (insn, before)
2931 rtx next = NEXT_INSN (insn);
2932 add_insn_before (insn, before);
2940 /* Emit the insns in a chain starting with FIRST and place them in back of
2941 the insn AFTER. Return the last insn emitted. */
2944 emit_insns_after (first, after)
2949 register rtx after_after;
2957 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
2960 after_after = NEXT_INSN (after);
2962 NEXT_INSN (after) = first;
2963 PREV_INSN (first) = after;
2964 NEXT_INSN (last) = after_after;
2966 PREV_INSN (after_after) = last;
2968 if (after == last_insn)
2973 /* Make an insn of code JUMP_INSN with pattern PATTERN
2974 and add it to the end of the doubly-linked list. */
2977 emit_jump_insn (pattern)
2980 if (GET_CODE (pattern) == SEQUENCE)
2981 return emit_insn (pattern);
2984 register rtx insn = make_jump_insn_raw (pattern);
2990 /* Make an insn of code CALL_INSN with pattern PATTERN
2991 and add it to the end of the doubly-linked list. */
2994 emit_call_insn (pattern)
2997 if (GET_CODE (pattern) == SEQUENCE)
2998 return emit_insn (pattern);
3001 register rtx insn = make_call_insn_raw (pattern);
3003 PUT_CODE (insn, CALL_INSN);
3008 /* Add the label LABEL to the end of the doubly-linked list. */
3014 /* This can be called twice for the same label
3015 as a result of the confusion that follows a syntax error!
3016 So make it harmless. */
3017 if (INSN_UID (label) == 0)
3019 INSN_UID (label) = cur_insn_uid++;
3025 /* Make an insn of code BARRIER
3026 and add it to the end of the doubly-linked list. */
3031 register rtx barrier = rtx_alloc (BARRIER);
3032 INSN_UID (barrier) = cur_insn_uid++;
3037 /* Make an insn of code NOTE
3038 with data-fields specified by FILE and LINE
3039 and add it to the end of the doubly-linked list,
3040 but only if line-numbers are desired for debugging info. */
3043 emit_line_note (file, line)
3047 set_file_and_line_for_stmt (file, line);
3050 if (no_line_numbers)
3054 return emit_note (file, line);
3057 /* Make an insn of code NOTE
3058 with data-fields specified by FILE and LINE
3059 and add it to the end of the doubly-linked list.
3060 If it is a line-number NOTE, omit it if it matches the previous one. */
3063 emit_note (file, line)
3071 if (file && last_filename && !strcmp (file, last_filename)
3072 && line == last_linenum)
3074 last_filename = file;
3075 last_linenum = line;
3078 if (no_line_numbers && line > 0)
3084 note = rtx_alloc (NOTE);
3085 INSN_UID (note) = cur_insn_uid++;
3086 NOTE_SOURCE_FILE (note) = file;
3087 NOTE_LINE_NUMBER (note) = line;
3092 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3095 emit_line_note_force (file, line)
3100 return emit_line_note (file, line);
3103 /* Cause next statement to emit a line note even if the line number
3104 has not changed. This is used at the beginning of a function. */
3107 force_next_line_note ()
3112 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3113 note of this type already exists, remove it first. */
3116 set_unique_reg_note (insn, kind, datum)
3121 rtx note = find_reg_note (insn, kind, NULL_RTX);
3123 /* First remove the note if there already is one. */
3125 remove_note (insn, note);
3127 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3130 /* Return an indication of which type of insn should have X as a body.
3131 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3137 if (GET_CODE (x) == CODE_LABEL)
3139 if (GET_CODE (x) == CALL)
3141 if (GET_CODE (x) == RETURN)
3143 if (GET_CODE (x) == SET)
3145 if (SET_DEST (x) == pc_rtx)
3147 else if (GET_CODE (SET_SRC (x)) == CALL)
3152 if (GET_CODE (x) == PARALLEL)
3155 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3156 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3158 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3159 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3161 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3162 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3168 /* Emit the rtl pattern X as an appropriate kind of insn.
3169 If X is a label, it is simply added into the insn chain. */
3175 enum rtx_code code = classify_insn (x);
3177 if (code == CODE_LABEL)
3178 return emit_label (x);
3179 else if (code == INSN)
3180 return emit_insn (x);
3181 else if (code == JUMP_INSN)
3183 register rtx insn = emit_jump_insn (x);
3184 if (simplejump_p (insn) || GET_CODE (x) == RETURN)
3185 return emit_barrier ();
3188 else if (code == CALL_INSN)
3189 return emit_call_insn (x);
3194 /* Begin emitting insns to a sequence which can be packaged in an
3195 RTL_EXPR. If this sequence will contain something that might cause
3196 the compiler to pop arguments to function calls (because those
3197 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3198 details), use do_pending_stack_adjust before calling this function.
3199 That will ensure that the deferred pops are not accidentally
3200 emitted in the middel of this sequence. */
3205 struct sequence_stack *tem;
3207 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
3209 tem->next = seq_stack;
3210 tem->first = first_insn;
3211 tem->last = last_insn;
3212 tem->sequence_rtl_expr = seq_rtl_expr;
3220 /* Similarly, but indicate that this sequence will be placed in T, an
3221 RTL_EXPR. See the documentation for start_sequence for more
3222 information about how to use this function. */
3225 start_sequence_for_rtl_expr (t)
3233 /* Set up the insn chain starting with FIRST as the current sequence,
3234 saving the previously current one. See the documentation for
3235 start_sequence for more information about how to use this function. */
3238 push_to_sequence (first)
3245 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3251 /* Set up the outer-level insn chain
3252 as the current sequence, saving the previously current one. */
3255 push_topmost_sequence ()
3257 struct sequence_stack *stack, *top = NULL;
3261 for (stack = seq_stack; stack; stack = stack->next)
3264 first_insn = top->first;
3265 last_insn = top->last;
3266 seq_rtl_expr = top->sequence_rtl_expr;
3269 /* After emitting to the outer-level insn chain, update the outer-level
3270 insn chain, and restore the previous saved state. */
3273 pop_topmost_sequence ()
3275 struct sequence_stack *stack, *top = NULL;
3277 for (stack = seq_stack; stack; stack = stack->next)
3280 top->first = first_insn;
3281 top->last = last_insn;
3282 /* ??? Why don't we save seq_rtl_expr here? */
3287 /* After emitting to a sequence, restore previous saved state.
3289 To get the contents of the sequence just made, you must call
3290 `gen_sequence' *before* calling here.
3292 If the compiler might have deferred popping arguments while
3293 generating this sequence, and this sequence will not be immediately
3294 inserted into the instruction stream, use do_pending_stack_adjust
3295 before calling gen_sequence. That will ensure that the deferred
3296 pops are inserted into this sequence, and not into some random
3297 location in the instruction stream. See INHIBIT_DEFER_POP for more
3298 information about deferred popping of arguments. */
3303 struct sequence_stack *tem = seq_stack;
3305 first_insn = tem->first;
3306 last_insn = tem->last;
3307 seq_rtl_expr = tem->sequence_rtl_expr;
3308 seq_stack = tem->next;
3313 /* Return 1 if currently emitting into a sequence. */
3318 return seq_stack != 0;
3321 /* Generate a SEQUENCE rtx containing the insns already emitted
3322 to the current sequence.
3324 This is how the gen_... function from a DEFINE_EXPAND
3325 constructs the SEQUENCE that it returns. */
3335 /* Count the insns in the chain. */
3337 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3340 /* If only one insn, return its pattern rather than a SEQUENCE.
3341 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3342 the case of an empty list.) */
3344 && ! RTX_FRAME_RELATED_P (first_insn)
3345 && (GET_CODE (first_insn) == INSN
3346 || GET_CODE (first_insn) == JUMP_INSN
3347 /* Don't discard the call usage field. */
3348 || (GET_CODE (first_insn) == CALL_INSN
3349 && CALL_INSN_FUNCTION_USAGE (first_insn) == NULL_RTX)))
3353 NEXT_INSN (first_insn) = free_insn;
3354 free_insn = first_insn;
3356 return PATTERN (first_insn);
3359 /* Put them in a vector. See if we already have a SEQUENCE of the
3360 appropriate length around. */
3361 if (!ggc_p && len < SEQUENCE_RESULT_SIZE
3362 && (result = sequence_result[len]) != 0)
3363 sequence_result[len] = 0;
3366 /* Ensure that this rtl goes in saveable_obstack, since we may
3368 push_obstacks_nochange ();
3369 rtl_in_saveable_obstack ();
3370 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3374 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3375 XVECEXP (result, 0, i) = tem;
3380 /* Put the various virtual registers into REGNO_REG_RTX. */
3383 init_virtual_regs (es)
3384 struct emit_status *es;
3386 rtx *ptr = es->x_regno_reg_rtx;
3387 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3388 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3389 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3390 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3391 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3395 clear_emit_caches ()
3399 /* Clear the start_sequence/gen_sequence cache. */
3400 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3401 sequence_result[i] = 0;
3405 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3406 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3407 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3408 static int copy_insn_n_scratches;
3410 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3411 copied an ASM_OPERANDS.
3412 In that case, it is the original input-operand vector. */
3413 static rtvec orig_asm_operands_vector;
3415 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3416 copied an ASM_OPERANDS.
3417 In that case, it is the copied input-operand vector. */
3418 static rtvec copy_asm_operands_vector;
3420 /* Likewise for the constraints vector. */
3421 static rtvec orig_asm_constraints_vector;
3422 static rtvec copy_asm_constraints_vector;
3424 /* Recursively create a new copy of an rtx for copy_insn.
3425 This function differs from copy_rtx in that it handles SCRATCHes and
3426 ASM_OPERANDs properly.
3427 Normally, this function is not used directly; use copy_insn as front end.
3428 However, you could first copy an insn pattern with copy_insn and then use
3429 this function afterwards to properly copy any REG_NOTEs containing
3438 register RTX_CODE code;
3439 register const char *format_ptr;
3441 code = GET_CODE (orig);
3457 for (i = 0; i < copy_insn_n_scratches; i++)
3458 if (copy_insn_scratch_in[i] == orig)
3459 return copy_insn_scratch_out[i];
3463 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3464 a LABEL_REF, it isn't sharable. */
3465 if (GET_CODE (XEXP (orig, 0)) == PLUS
3466 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3467 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3471 /* A MEM with a constant address is not sharable. The problem is that
3472 the constant address may need to be reloaded. If the mem is shared,
3473 then reloading one copy of this mem will cause all copies to appear
3474 to have been reloaded. */
3480 copy = rtx_alloc (code);
3482 /* Copy the various flags, and other information. We assume that
3483 all fields need copying, and then clear the fields that should
3484 not be copied. That is the sensible default behavior, and forces
3485 us to explicitly document why we are *not* copying a flag. */
3486 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3488 /* We do not copy the USED flag, which is used as a mark bit during
3489 walks over the RTL. */
3492 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3493 if (GET_RTX_CLASS (code) == 'i')
3497 copy->frame_related = 0;
3500 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3502 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3504 copy->fld[i] = orig->fld[i];
3505 switch (*format_ptr++)
3508 if (XEXP (orig, i) != NULL)
3509 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
3514 if (XVEC (orig, i) == orig_asm_constraints_vector)
3515 XVEC (copy, i) = copy_asm_constraints_vector;
3516 else if (XVEC (orig, i) == orig_asm_operands_vector)
3517 XVEC (copy, i) = copy_asm_operands_vector;
3518 else if (XVEC (orig, i) != NULL)
3520 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3521 for (j = 0; j < XVECLEN (copy, i); j++)
3522 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
3528 bitmap new_bits = BITMAP_OBSTACK_ALLOC (rtl_obstack);
3529 bitmap_copy (new_bits, XBITMAP (orig, i));
3530 XBITMAP (copy, i) = new_bits;
3541 /* These are left unchanged. */
3549 if (code == SCRATCH)
3551 i = copy_insn_n_scratches++;
3552 if (i >= MAX_RECOG_OPERANDS)
3554 copy_insn_scratch_in[i] = orig;
3555 copy_insn_scratch_out[i] = copy;
3557 else if (code == ASM_OPERANDS)
3559 orig_asm_operands_vector = XVEC (orig, 3);
3560 copy_asm_operands_vector = XVEC (copy, 3);
3561 orig_asm_constraints_vector = XVEC (orig, 4);
3562 copy_asm_constraints_vector = XVEC (copy, 4);
3568 /* Create a new copy of an rtx.
3569 This function differs from copy_rtx in that it handles SCRATCHes and
3570 ASM_OPERANDs properly.
3571 INSN doesn't really have to be a full INSN; it could be just the
3577 copy_insn_n_scratches = 0;
3578 orig_asm_operands_vector = 0;
3579 orig_asm_constraints_vector = 0;
3580 copy_asm_operands_vector = 0;
3581 copy_asm_constraints_vector = 0;
3582 return copy_insn_1 (insn);
3585 /* Initialize data structures and variables in this file
3586 before generating rtl for each function. */
3591 struct function *f = current_function;
3593 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
3596 seq_rtl_expr = NULL;
3598 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3601 first_label_num = label_num;
3605 clear_emit_caches ();
3607 /* Init the tables that describe all the pseudo regs. */
3609 f->emit->regno_pointer_flag_length = LAST_VIRTUAL_REGISTER + 101;
3611 f->emit->regno_pointer_flag
3612 = (char *) xcalloc (f->emit->regno_pointer_flag_length, sizeof (char));
3614 f->emit->regno_pointer_align
3615 = (char *) xcalloc (f->emit->regno_pointer_flag_length,
3619 = (rtx *) xcalloc (f->emit->regno_pointer_flag_length * sizeof (rtx),
3622 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3623 init_virtual_regs (f->emit);
3625 /* Indicate that the virtual registers and stack locations are
3627 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM) = 1;
3628 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM) = 1;
3629 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM) = 1;
3630 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM) = 1;
3632 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM) = 1;
3633 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM) = 1;
3634 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM) = 1;
3635 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM) = 1;
3636 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM) = 1;
3638 #ifdef STACK_BOUNDARY
3639 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3640 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3641 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)
3642 = STACK_BOUNDARY / BITS_PER_UNIT;
3643 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3645 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM)
3646 = STACK_BOUNDARY / BITS_PER_UNIT;
3647 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM)
3648 = STACK_BOUNDARY / BITS_PER_UNIT;
3649 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM)
3650 = STACK_BOUNDARY / BITS_PER_UNIT;
3651 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM)
3652 = STACK_BOUNDARY / BITS_PER_UNIT;
3653 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = UNITS_PER_WORD;
3656 #ifdef INIT_EXPANDERS
3661 /* Mark SS for GC. */
3664 mark_sequence_stack (ss)
3665 struct sequence_stack *ss;
3669 ggc_mark_rtx (ss->first);
3670 ggc_mark_tree (ss->sequence_rtl_expr);
3675 /* Mark ES for GC. */
3678 mark_emit_status (es)
3679 struct emit_status *es;
3687 for (i = es->regno_pointer_flag_length, r = es->x_regno_reg_rtx;
3691 mark_sequence_stack (es->sequence_stack);
3692 ggc_mark_tree (es->sequence_rtl_expr);
3693 ggc_mark_rtx (es->x_first_insn);
3696 /* Create some permanent unique rtl objects shared between all functions.
3697 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3700 init_emit_once (line_numbers)
3704 enum machine_mode mode;
3705 enum machine_mode double_mode;
3707 no_line_numbers = ! line_numbers;
3709 /* Compute the word and byte modes. */
3711 byte_mode = VOIDmode;
3712 word_mode = VOIDmode;
3713 double_mode = VOIDmode;
3715 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3716 mode = GET_MODE_WIDER_MODE (mode))
3718 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
3719 && byte_mode == VOIDmode)
3722 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
3723 && word_mode == VOIDmode)
3727 #ifndef DOUBLE_TYPE_SIZE
3728 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3731 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3732 mode = GET_MODE_WIDER_MODE (mode))
3734 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
3735 && double_mode == VOIDmode)
3739 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
3741 /* Assign register numbers to the globally defined register rtx.
3742 This must be done at runtime because the register number field
3743 is in a union and some compilers can't initialize unions. */
3745 pc_rtx = gen_rtx (PC, VOIDmode);
3746 cc0_rtx = gen_rtx (CC0, VOIDmode);
3747 stack_pointer_rtx = gen_rtx_raw_REG (Pmode, STACK_POINTER_REGNUM);
3748 frame_pointer_rtx = gen_rtx_raw_REG (Pmode, FRAME_POINTER_REGNUM);
3749 if (hard_frame_pointer_rtx == 0)
3750 hard_frame_pointer_rtx = gen_rtx_raw_REG (Pmode,
3751 HARD_FRAME_POINTER_REGNUM);
3752 if (arg_pointer_rtx == 0)
3753 arg_pointer_rtx = gen_rtx_raw_REG (Pmode, ARG_POINTER_REGNUM);
3754 virtual_incoming_args_rtx =
3755 gen_rtx_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
3756 virtual_stack_vars_rtx =
3757 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
3758 virtual_stack_dynamic_rtx =
3759 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
3760 virtual_outgoing_args_rtx =
3761 gen_rtx_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
3762 virtual_cfa_rtx = gen_rtx_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
3764 /* These rtx must be roots if GC is enabled. */
3766 ggc_add_rtx_root (global_rtl, GR_MAX);
3768 #ifdef INIT_EXPANDERS
3769 /* This is to initialize save_machine_status and restore_machine_status before
3770 the first call to push_function_context_to. This is needed by the Chill
3771 front end which calls push_function_context_to before the first cal to
3772 init_function_start. */
3776 /* Create the unique rtx's for certain rtx codes and operand values. */
3778 /* Don't use gen_rtx here since gen_rtx in this case
3779 tries to use these variables. */
3780 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
3781 const_int_rtx[i + MAX_SAVED_CONST_INT] =
3782 gen_rtx_raw_CONST_INT (VOIDmode, i);
3784 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
3786 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
3787 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
3788 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
3790 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
3792 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
3793 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
3794 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
3795 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
3797 for (i = 0; i <= 2; i++)
3799 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3800 mode = GET_MODE_WIDER_MODE (mode))
3802 rtx tem = rtx_alloc (CONST_DOUBLE);
3803 union real_extract u;
3805 bzero ((char *) &u, sizeof u); /* Zero any holes in a structure. */
3806 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
3808 bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (tem), sizeof u);
3809 CONST_DOUBLE_MEM (tem) = cc0_rtx;
3810 PUT_MODE (tem, mode);
3812 const_tiny_rtx[i][(int) mode] = tem;
3815 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
3817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3818 mode = GET_MODE_WIDER_MODE (mode))
3819 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
3823 mode = GET_MODE_WIDER_MODE (mode))
3824 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3827 for (mode = CCmode; mode < MAX_MACHINE_MODE; ++mode)
3828 if (GET_MODE_CLASS (mode) == MODE_CC)
3829 const_tiny_rtx[0][(int) mode] = const0_rtx;
3831 ggc_add_rtx_root (&const_tiny_rtx[0][0], sizeof(const_tiny_rtx)/sizeof(rtx));
3832 ggc_add_rtx_root (&const_true_rtx, 1);
3834 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3835 return_address_pointer_rtx
3836 = gen_rtx_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
3840 struct_value_rtx = STRUCT_VALUE;
3842 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
3845 #ifdef STRUCT_VALUE_INCOMING
3846 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
3848 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3849 struct_value_incoming_rtx
3850 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
3852 struct_value_incoming_rtx = struct_value_rtx;
3856 #ifdef STATIC_CHAIN_REGNUM
3857 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
3859 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3860 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
3861 static_chain_incoming_rtx
3862 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
3865 static_chain_incoming_rtx = static_chain_rtx;
3869 static_chain_rtx = STATIC_CHAIN;
3871 #ifdef STATIC_CHAIN_INCOMING
3872 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
3874 static_chain_incoming_rtx = static_chain_rtx;
3878 #ifdef PIC_OFFSET_TABLE_REGNUM
3879 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
3882 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
3883 ggc_add_rtx_root (&struct_value_rtx, 1);
3884 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
3885 ggc_add_rtx_root (&static_chain_rtx, 1);
3886 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
3887 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
3890 /* Query and clear/ restore no_line_numbers. This is used by the
3891 switch / case handling in stmt.c to give proper line numbers in
3892 warnings about unreachable code. */
3895 force_line_numbers ()
3897 int old = no_line_numbers;
3899 no_line_numbers = 0;
3901 force_next_line_note ();
3906 restore_line_number_status (old_value)
3909 no_line_numbers = old_value;