1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* Middle-to-low level generation of rtx code and insns.
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
53 #include "basic-block.h"
56 /* Commonly used modes. */
58 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
59 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
60 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
61 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
64 /* This is *not* reset after each function. It gives each CODE_LABEL
65 in the entire compilation a unique label number. */
67 static int label_num = 1;
69 /* Highest label number in current function.
70 Zero means use the value of label_num instead.
71 This is nonzero only when belatedly compiling an inline function. */
73 static int last_label_num;
75 /* Value label_num had when set_new_first_and_last_label_number was called.
76 If label_num has not changed since then, last_label_num is valid. */
78 static int base_label_num;
80 /* Nonzero means do not generate NOTEs for source line numbers. */
82 static int no_line_numbers;
84 /* Commonly used rtx's, so that we only need space for one copy.
85 These are initialized once for the entire compilation.
86 All of these except perhaps the floating-point CONST_DOUBLEs
87 are unique; no other rtx-object will be equal to any of these. */
89 rtx global_rtl[GR_MAX];
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
95 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99 REAL_VALUE_TYPE dconst0;
100 REAL_VALUE_TYPE dconst1;
101 REAL_VALUE_TYPE dconst2;
102 REAL_VALUE_TYPE dconstm1;
104 /* All references to the following fixed hard registers go through
105 these unique rtl objects. On machines where the frame-pointer and
106 arg-pointer are the same register, they use the same unique object.
108 After register allocation, other rtl objects which used to be pseudo-regs
109 may be clobbered to refer to the frame-pointer register.
110 But references that were originally to the frame-pointer can be
111 distinguished from the others because they contain frame_pointer_rtx.
113 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
114 tricky: until register elimination has taken place hard_frame_pointer_rtx
115 should be used if it is being set, and frame_pointer_rtx otherwise. After
116 register elimination hard_frame_pointer_rtx should always be used.
117 On machines where the two registers are same (most) then these are the
120 In an inline procedure, the stack and frame pointer rtxs may not be
121 used for anything else. */
122 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
123 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
124 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
125 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
126 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
128 /* This is used to implement __builtin_return_address for some machines.
129 See for instance the MIPS port. */
130 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
132 /* We make one copy of (const_int C) where C is in
133 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
134 to save space during the compilation and simplify comparisons of
137 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
140 shortly thrown away. We use two mechanisms to prevent this waste:
142 For sizes up to 5 elements, we keep a SEQUENCE and its associated
143 rtvec for use by gen_sequence. One entry for each size is
144 sufficient because most cases are calls to gen_sequence followed by
145 immediately emitting the SEQUENCE. Reuse is safe since emitting a
146 sequence is destructive on the insn in it anyway and hence can't be
149 We do not bother to save this cached data over nested function calls.
150 Instead, we just reinitialize them. */
152 #define SEQUENCE_RESULT_SIZE 5
154 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
156 /* During RTL generation, we also keep a list of free INSN rtl codes. */
157 static rtx free_insn;
159 #define first_insn (cfun->emit->x_first_insn)
160 #define last_insn (cfun->emit->x_last_insn)
161 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
162 #define last_linenum (cfun->emit->x_last_linenum)
163 #define last_filename (cfun->emit->x_last_filename)
164 #define first_label_num (cfun->emit->x_first_label_num)
166 /* This is where the pointer to the obstack being used for RTL is stored. */
167 extern struct obstack *rtl_obstack;
169 static rtx make_jump_insn_raw PARAMS ((rtx));
170 static rtx make_call_insn_raw PARAMS ((rtx));
171 static rtx find_line_note PARAMS ((rtx));
172 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
174 /* There are some RTL codes that require special attention; the generation
175 functions do the raw handling. If you add to this list, modify
176 special_rtx in gengenrtl.c as well. */
179 gen_rtx_CONST_INT (mode, arg)
180 enum machine_mode mode;
183 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
184 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
186 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
187 if (const_true_rtx && arg == STORE_FLAG_VALUE)
188 return const_true_rtx;
191 return gen_rtx_raw_CONST_INT (mode, arg);
194 /* CONST_DOUBLEs needs special handling because its length is known
197 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
198 enum machine_mode mode;
200 HOST_WIDE_INT arg1, arg2;
202 rtx r = rtx_alloc (CONST_DOUBLE);
207 X0EXP (r, 1) = NULL_RTX;
211 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
218 gen_rtx_REG (mode, regno)
219 enum machine_mode mode;
222 /* In case the MD file explicitly references the frame pointer, have
223 all such references point to the same frame pointer. This is
224 used during frame pointer elimination to distinguish the explicit
225 references to these registers from pseudos that happened to be
228 If we have eliminated the frame pointer or arg pointer, we will
229 be using it as a normal register, for example as a spill
230 register. In such cases, we might be accessing it in a mode that
231 is not Pmode and therefore cannot use the pre-allocated rtx.
233 Also don't do this when we are making new REGs in reload, since
234 we don't want to get confused with the real pointers. */
236 if (mode == Pmode && !reload_in_progress)
238 if (regno == FRAME_POINTER_REGNUM)
239 return frame_pointer_rtx;
240 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
241 if (regno == HARD_FRAME_POINTER_REGNUM)
242 return hard_frame_pointer_rtx;
244 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
245 if (regno == ARG_POINTER_REGNUM)
246 return arg_pointer_rtx;
248 #ifdef RETURN_ADDRESS_POINTER_REGNUM
249 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
250 return return_address_pointer_rtx;
252 if (regno == STACK_POINTER_REGNUM)
253 return stack_pointer_rtx;
256 return gen_rtx_raw_REG (mode, regno);
260 gen_rtx_MEM (mode, addr)
261 enum machine_mode mode;
264 rtx rt = gen_rtx_raw_MEM (mode, addr);
266 /* This field is not cleared by the mere allocation of the rtx, so
268 MEM_ALIAS_SET (rt) = 0;
273 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
275 ** This routine generates an RTX of the size specified by
276 ** <code>, which is an RTX code. The RTX structure is initialized
277 ** from the arguments <element1> through <elementn>, which are
278 ** interpreted according to the specific RTX type's format. The
279 ** special machine mode associated with the rtx (if any) is specified
282 ** gen_rtx can be invoked in a way which resembles the lisp-like
283 ** rtx it will generate. For example, the following rtx structure:
285 ** (plus:QI (mem:QI (reg:SI 1))
286 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
288 ** ...would be generated by the following C code:
290 ** gen_rtx (PLUS, QImode,
291 ** gen_rtx (MEM, QImode,
292 ** gen_rtx (REG, SImode, 1)),
293 ** gen_rtx (MEM, QImode,
294 ** gen_rtx (PLUS, SImode,
295 ** gen_rtx (REG, SImode, 2),
296 ** gen_rtx (REG, SImode, 3)))),
301 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
303 #ifndef ANSI_PROTOTYPES
305 enum machine_mode mode;
308 register int i; /* Array indices... */
309 register const char *fmt; /* Current rtx's format... */
310 register rtx rt_val; /* RTX to return to caller... */
314 #ifndef ANSI_PROTOTYPES
315 code = va_arg (p, enum rtx_code);
316 mode = va_arg (p, enum machine_mode);
322 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
327 rtx arg0 = va_arg (p, rtx);
328 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
329 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
330 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
335 rt_val = gen_rtx_REG (mode, va_arg (p, int));
339 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
343 rt_val = rtx_alloc (code); /* Allocate the storage space. */
344 rt_val->mode = mode; /* Store the machine mode... */
346 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
347 for (i = 0; i < GET_RTX_LENGTH (code); i++)
351 case '0': /* Unused field. */
354 case 'i': /* An integer? */
355 XINT (rt_val, i) = va_arg (p, int);
358 case 'w': /* A wide integer? */
359 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
362 case 's': /* A string? */
363 XSTR (rt_val, i) = va_arg (p, char *);
366 case 'e': /* An expression? */
367 case 'u': /* An insn? Same except when printing. */
368 XEXP (rt_val, i) = va_arg (p, rtx);
371 case 'E': /* An RTX vector? */
372 XVEC (rt_val, i) = va_arg (p, rtvec);
375 case 'b': /* A bitmap? */
376 XBITMAP (rt_val, i) = va_arg (p, bitmap);
379 case 't': /* A tree? */
380 XTREE (rt_val, i) = va_arg (p, tree);
394 /* gen_rtvec (n, [rt1, ..., rtn])
396 ** This routine creates an rtvec and stores within it the
397 ** pointers to rtx's which are its arguments.
402 gen_rtvec VPARAMS ((int n, ...))
404 #ifndef ANSI_PROTOTYPES
413 #ifndef ANSI_PROTOTYPES
418 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
420 vector = (rtx *) alloca (n * sizeof (rtx));
422 for (i = 0; i < n; i++)
423 vector[i] = va_arg (p, rtx);
426 return gen_rtvec_v (n, vector);
430 gen_rtvec_v (n, argp)
435 register rtvec rt_val;
438 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
440 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
442 for (i = 0; i < n; i++)
443 rt_val->elem[i] = *argp++;
449 /* Generate a REG rtx for a new pseudo register of mode MODE.
450 This pseudo is assigned the next sequential register number. */
454 enum machine_mode mode;
456 struct function *f = cfun;
459 /* Don't let anything called after initial flow analysis create new
464 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
465 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
467 /* For complex modes, don't make a single pseudo.
468 Instead, make a CONCAT of two pseudos.
469 This allows noncontiguous allocation of the real and imaginary parts,
470 which makes much better code. Besides, allocating DCmode
471 pseudos overstrains reload on some machines like the 386. */
472 rtx realpart, imagpart;
473 int size = GET_MODE_UNIT_SIZE (mode);
474 enum machine_mode partmode
475 = mode_for_size (size * BITS_PER_UNIT,
476 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
477 ? MODE_FLOAT : MODE_INT),
480 realpart = gen_reg_rtx (partmode);
481 imagpart = gen_reg_rtx (partmode);
482 return gen_rtx_CONCAT (mode, realpart, imagpart);
485 /* Make sure regno_pointer_flag and regno_reg_rtx are large
486 enough to have an element for this pseudo reg number. */
488 if (reg_rtx_no == f->emit->regno_pointer_flag_length)
490 int old_size = f->emit->regno_pointer_flag_length;
493 new = xrealloc (f->emit->regno_pointer_flag, old_size * 2);
494 memset (new + old_size, 0, old_size);
495 f->emit->regno_pointer_flag = new;
497 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
498 memset (new + old_size, 0, old_size);
499 f->emit->regno_pointer_align = new;
501 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
502 old_size * 2 * sizeof (rtx));
503 memset (new1 + old_size, 0, old_size * sizeof (rtx));
504 regno_reg_rtx = new1;
506 f->emit->regno_pointer_flag_length = old_size * 2;
509 val = gen_rtx_raw_REG (mode, reg_rtx_no);
510 regno_reg_rtx[reg_rtx_no++] = val;
514 /* Identify REG (which may be a CONCAT) as a user register. */
520 if (GET_CODE (reg) == CONCAT)
522 REG_USERVAR_P (XEXP (reg, 0)) = 1;
523 REG_USERVAR_P (XEXP (reg, 1)) = 1;
525 else if (GET_CODE (reg) == REG)
526 REG_USERVAR_P (reg) = 1;
531 /* Identify REG as a probable pointer register and show its alignment
532 as ALIGN, if nonzero. */
535 mark_reg_pointer (reg, align)
539 if (! REGNO_POINTER_FLAG (REGNO (reg)))
541 REGNO_POINTER_FLAG (REGNO (reg)) = 1;
544 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
546 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
547 /* We can no-longer be sure just how aligned this pointer is */
548 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
551 /* Return 1 plus largest pseudo reg number used in the current function. */
559 /* Return 1 + the largest label number used so far in the current function. */
564 if (last_label_num && label_num == base_label_num)
565 return last_label_num;
569 /* Return first label number used in this function (if any were used). */
572 get_first_label_num ()
574 return first_label_num;
577 /* Return a value representing some low-order bits of X, where the number
578 of low-order bits is given by MODE. Note that no conversion is done
579 between floating-point and fixed-point values, rather, the bit
580 representation is returned.
582 This function handles the cases in common between gen_lowpart, below,
583 and two variants in cse.c and combine.c. These are the cases that can
584 be safely handled at all points in the compilation.
586 If this is not a case we can handle, return 0. */
589 gen_lowpart_common (mode, x)
590 enum machine_mode mode;
595 if (GET_MODE (x) == mode)
598 /* MODE must occupy no more words than the mode of X. */
599 if (GET_MODE (x) != VOIDmode
600 && ((GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
601 > ((GET_MODE_SIZE (GET_MODE (x)) + (UNITS_PER_WORD - 1))
605 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
606 word = ((GET_MODE_SIZE (GET_MODE (x))
607 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
610 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
611 && (GET_MODE_CLASS (mode) == MODE_INT
612 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
614 /* If we are getting the low-order part of something that has been
615 sign- or zero-extended, we can either just use the object being
616 extended or make a narrower extension. If we want an even smaller
617 piece than the size of the object being extended, call ourselves
620 This case is used mostly by combine and cse. */
622 if (GET_MODE (XEXP (x, 0)) == mode)
624 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
625 return gen_lowpart_common (mode, XEXP (x, 0));
626 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
627 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
629 else if (GET_CODE (x) == SUBREG
630 && (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
631 || GET_MODE_SIZE (mode) == GET_MODE_UNIT_SIZE (GET_MODE (x))))
632 return (GET_MODE (SUBREG_REG (x)) == mode && SUBREG_WORD (x) == 0
634 : gen_rtx_SUBREG (mode, SUBREG_REG (x), SUBREG_WORD (x) + word));
635 else if (GET_CODE (x) == REG)
637 /* Let the backend decide how many registers to skip. This is needed
638 in particular for Sparc64 where fp regs are smaller than a word. */
639 /* ??? Note that subregs are now ambiguous, in that those against
640 pseudos are sized by the Word Size, while those against hard
641 regs are sized by the underlying register size. Better would be
642 to always interpret the subreg offset parameter as bytes or bits. */
644 if (WORDS_BIG_ENDIAN && REGNO (x) < FIRST_PSEUDO_REGISTER)
645 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
646 - HARD_REGNO_NREGS (REGNO (x), mode));
648 /* If the register is not valid for MODE, return 0. If we don't
649 do this, there is no way to fix up the resulting REG later.
650 But we do do this if the current REG is not valid for its
651 mode. This latter is a kludge, but is required due to the
652 way that parameters are passed on some machines, most
654 if (REGNO (x) < FIRST_PSEUDO_REGISTER
655 && ! HARD_REGNO_MODE_OK (REGNO (x) + word, mode)
656 && HARD_REGNO_MODE_OK (REGNO (x), GET_MODE (x)))
658 else if (REGNO (x) < FIRST_PSEUDO_REGISTER
659 /* integrate.c can't handle parts of a return value register. */
660 && (! REG_FUNCTION_VALUE_P (x)
661 || ! rtx_equal_function_value_matters)
662 #ifdef CLASS_CANNOT_CHANGE_SIZE
663 && ! (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (x))
664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_INT
665 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_FLOAT
666 && (TEST_HARD_REG_BIT
667 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
670 /* We want to keep the stack, frame, and arg pointers
672 && x != frame_pointer_rtx
673 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
674 && x != arg_pointer_rtx
676 && x != stack_pointer_rtx)
677 return gen_rtx_REG (mode, REGNO (x) + word);
679 return gen_rtx_SUBREG (mode, x, word);
681 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
682 from the low-order part of the constant. */
683 else if ((GET_MODE_CLASS (mode) == MODE_INT
684 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
685 && GET_MODE (x) == VOIDmode
686 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
688 /* If MODE is twice the host word size, X is already the desired
689 representation. Otherwise, if MODE is wider than a word, we can't
690 do this. If MODE is exactly a word, return just one CONST_INT.
691 If MODE is smaller than a word, clear the bits that don't belong
692 in our mode, unless they and our sign bit are all one. So we get
693 either a reasonable negative value or a reasonable unsigned value
696 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
698 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
700 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
701 return (GET_CODE (x) == CONST_INT ? x
702 : GEN_INT (CONST_DOUBLE_LOW (x)));
705 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
706 int width = GET_MODE_BITSIZE (mode);
707 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
708 : CONST_DOUBLE_LOW (x));
710 /* Sign extend to HOST_WIDE_INT. */
711 val = val << (HOST_BITS_PER_WIDE_INT - width) >> (HOST_BITS_PER_WIDE_INT - width);
713 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
718 /* If X is an integral constant but we want it in floating-point, it
719 must be the case that we have a union of an integer and a floating-point
720 value. If the machine-parameters allow it, simulate that union here
721 and return the result. The two-word and single-word cases are
724 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
725 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
726 || flag_pretend_float)
727 && GET_MODE_CLASS (mode) == MODE_FLOAT
728 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
729 && GET_CODE (x) == CONST_INT
730 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
731 #ifdef REAL_ARITHMETIC
737 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
738 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
742 union {HOST_WIDE_INT i; float d; } u;
745 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
748 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
749 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
750 || flag_pretend_float)
751 && GET_MODE_CLASS (mode) == MODE_FLOAT
752 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
753 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
754 && GET_MODE (x) == VOIDmode
755 && (sizeof (double) * HOST_BITS_PER_CHAR
756 == 2 * HOST_BITS_PER_WIDE_INT))
757 #ifdef REAL_ARITHMETIC
761 HOST_WIDE_INT low, high;
763 if (GET_CODE (x) == CONST_INT)
764 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
766 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
768 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
770 if (WORDS_BIG_ENDIAN)
771 i[0] = high, i[1] = low;
773 i[0] = low, i[1] = high;
775 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
776 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
780 union {HOST_WIDE_INT i[2]; double d; } u;
781 HOST_WIDE_INT low, high;
783 if (GET_CODE (x) == CONST_INT)
784 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
786 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
788 #ifdef HOST_WORDS_BIG_ENDIAN
789 u.i[0] = high, u.i[1] = low;
791 u.i[0] = low, u.i[1] = high;
794 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
798 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
799 same as sizeof (double) or when sizeof (float) is larger than the
800 size of a word on the target machine. */
801 #ifdef REAL_ARITHMETIC
802 else if (mode == SFmode && GET_CODE (x) == CONST_INT)
808 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
809 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
811 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
812 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
813 || flag_pretend_float)
814 && GET_MODE_CLASS (mode) == MODE_FLOAT
815 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
816 && GET_CODE (x) == CONST_INT
817 && (sizeof (double) * HOST_BITS_PER_CHAR
818 == HOST_BITS_PER_WIDE_INT))
824 r = REAL_VALUE_FROM_TARGET_DOUBLE (&i);
825 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
829 /* Similarly, if this is converting a floating-point value into a
830 single-word integer. Only do this is the host and target parameters are
833 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
834 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
835 || flag_pretend_float)
836 && (GET_MODE_CLASS (mode) == MODE_INT
837 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
838 && GET_CODE (x) == CONST_DOUBLE
839 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
840 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
841 return operand_subword (x, word, 0, GET_MODE (x));
843 /* Similarly, if this is converting a floating-point value into a
844 two-word integer, we can do this one word at a time and make an
845 integer. Only do this is the host and target parameters are
848 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
849 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
850 || flag_pretend_float)
851 && (GET_MODE_CLASS (mode) == MODE_INT
852 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
853 && GET_CODE (x) == CONST_DOUBLE
854 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
855 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
858 = operand_subword (x, word + WORDS_BIG_ENDIAN, 0, GET_MODE (x));
860 = operand_subword (x, word + ! WORDS_BIG_ENDIAN, 0, GET_MODE (x));
862 if (lowpart && GET_CODE (lowpart) == CONST_INT
863 && highpart && GET_CODE (highpart) == CONST_INT)
864 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
867 /* Otherwise, we can't do this. */
871 /* Return the real part (which has mode MODE) of a complex value X.
872 This always comes at the low address in memory. */
875 gen_realpart (mode, x)
876 enum machine_mode mode;
879 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
881 else if (WORDS_BIG_ENDIAN
882 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
884 && REGNO (x) < FIRST_PSEUDO_REGISTER)
885 fatal ("Unable to access real part of complex value in a hard register on this target");
886 else if (WORDS_BIG_ENDIAN)
887 return gen_highpart (mode, x);
889 return gen_lowpart (mode, x);
892 /* Return the imaginary part (which has mode MODE) of a complex value X.
893 This always comes at the high address in memory. */
896 gen_imagpart (mode, x)
897 enum machine_mode mode;
900 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
902 else if (WORDS_BIG_ENDIAN)
903 return gen_lowpart (mode, x);
904 else if (!WORDS_BIG_ENDIAN
905 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
907 && REGNO (x) < FIRST_PSEUDO_REGISTER)
908 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
910 return gen_highpart (mode, x);
913 /* Return 1 iff X, assumed to be a SUBREG,
914 refers to the real part of the complex value in its containing reg.
915 Complex values are always stored with the real part in the first word,
916 regardless of WORDS_BIG_ENDIAN. */
919 subreg_realpart_p (x)
922 if (GET_CODE (x) != SUBREG)
925 return SUBREG_WORD (x) * UNITS_PER_WORD < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x)));
928 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
929 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
930 least-significant part of X.
931 MODE specifies how big a part of X to return;
932 it usually should not be larger than a word.
933 If X is a MEM whose address is a QUEUED, the value may be so also. */
936 gen_lowpart (mode, x)
937 enum machine_mode mode;
940 rtx result = gen_lowpart_common (mode, x);
944 else if (GET_CODE (x) == REG)
946 /* Must be a hard reg that's not valid in MODE. */
947 result = gen_lowpart_common (mode, copy_to_reg (x));
952 else if (GET_CODE (x) == MEM)
954 /* The only additional case we can do is MEM. */
955 register int offset = 0;
956 if (WORDS_BIG_ENDIAN)
957 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
958 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
960 if (BYTES_BIG_ENDIAN)
961 /* Adjust the address so that the address-after-the-data
963 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
964 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
966 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
968 else if (GET_CODE (x) == ADDRESSOF)
969 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
974 /* Like `gen_lowpart', but refer to the most significant part.
975 This is used to access the imaginary part of a complex number. */
978 gen_highpart (mode, x)
979 enum machine_mode mode;
982 /* This case loses if X is a subreg. To catch bugs early,
983 complain if an invalid MODE is used even in other cases. */
984 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
985 && GET_MODE_SIZE (mode) != GET_MODE_UNIT_SIZE (GET_MODE (x)))
987 if (GET_CODE (x) == CONST_DOUBLE
988 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
989 && GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT
992 return GEN_INT (CONST_DOUBLE_HIGH (x) & GET_MODE_MASK (mode));
993 else if (GET_CODE (x) == CONST_INT)
995 if (HOST_BITS_PER_WIDE_INT <= BITS_PER_WORD)
997 return GEN_INT (INTVAL (x) >> (HOST_BITS_PER_WIDE_INT - BITS_PER_WORD));
999 else if (GET_CODE (x) == MEM)
1001 register int offset = 0;
1002 if (! WORDS_BIG_ENDIAN)
1003 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1004 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1006 if (! BYTES_BIG_ENDIAN
1007 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
1008 offset -= (GET_MODE_SIZE (mode)
1009 - MIN (UNITS_PER_WORD,
1010 GET_MODE_SIZE (GET_MODE (x))));
1012 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1014 else if (GET_CODE (x) == SUBREG)
1016 /* The only time this should occur is when we are looking at a
1017 multi-word item with a SUBREG whose mode is the same as that of the
1018 item. It isn't clear what we would do if it wasn't. */
1019 if (SUBREG_WORD (x) != 0)
1021 return gen_highpart (mode, SUBREG_REG (x));
1023 else if (GET_CODE (x) == REG)
1027 /* Let the backend decide how many registers to skip. This is needed
1028 in particular for sparc64 where fp regs are smaller than a word. */
1029 /* ??? Note that subregs are now ambiguous, in that those against
1030 pseudos are sized by the word size, while those against hard
1031 regs are sized by the underlying register size. Better would be
1032 to always interpret the subreg offset parameter as bytes or bits. */
1034 if (WORDS_BIG_ENDIAN)
1036 else if (REGNO (x) < FIRST_PSEUDO_REGISTER)
1037 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
1038 - HARD_REGNO_NREGS (REGNO (x), mode));
1040 word = ((GET_MODE_SIZE (GET_MODE (x))
1041 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1044 if (REGNO (x) < FIRST_PSEUDO_REGISTER
1045 /* integrate.c can't handle parts of a return value register. */
1046 && (! REG_FUNCTION_VALUE_P (x)
1047 || ! rtx_equal_function_value_matters)
1048 /* We want to keep the stack, frame, and arg pointers special. */
1049 && x != frame_pointer_rtx
1050 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1051 && x != arg_pointer_rtx
1053 && x != stack_pointer_rtx)
1054 return gen_rtx_REG (mode, REGNO (x) + word);
1056 return gen_rtx_SUBREG (mode, x, word);
1062 /* Return 1 iff X, assumed to be a SUBREG,
1063 refers to the least significant part of its containing reg.
1064 If X is not a SUBREG, always return 1 (it is its own low part!). */
1067 subreg_lowpart_p (x)
1070 if (GET_CODE (x) != SUBREG)
1072 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1075 if (WORDS_BIG_ENDIAN
1076 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD)
1077 return (SUBREG_WORD (x)
1078 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1079 - MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD))
1082 return SUBREG_WORD (x) == 0;
1085 /* Return subword I of operand OP.
1086 The word number, I, is interpreted as the word number starting at the
1087 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1088 otherwise it is the high-order word.
1090 If we cannot extract the required word, we return zero. Otherwise, an
1091 rtx corresponding to the requested word will be returned.
1093 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1094 reload has completed, a valid address will always be returned. After
1095 reload, if a valid address cannot be returned, we return zero.
1097 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1098 it is the responsibility of the caller.
1100 MODE is the mode of OP in case it is a CONST_INT. */
1103 operand_subword (op, i, validate_address, mode)
1106 int validate_address;
1107 enum machine_mode mode;
1110 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1112 if (mode == VOIDmode)
1113 mode = GET_MODE (op);
1115 if (mode == VOIDmode)
1118 /* If OP is narrower than a word, fail. */
1120 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1123 /* If we want a word outside OP, return zero. */
1125 && (i + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1128 /* If OP is already an integer word, return it. */
1129 if (GET_MODE_CLASS (mode) == MODE_INT
1130 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1133 /* If OP is a REG or SUBREG, we can handle it very simply. */
1134 if (GET_CODE (op) == REG)
1136 /* ??? There is a potential problem with this code. It does not
1137 properly handle extractions of a subword from a hard register
1138 that is larger than word_mode. Presumably the check for
1139 HARD_REGNO_MODE_OK catches these most of these cases. */
1141 /* If OP is a hard register, but OP + I is not a hard register,
1142 then extracting a subword is impossible.
1144 For example, consider if OP is the last hard register and it is
1145 larger than word_mode. If we wanted word N (for N > 0) because a
1146 part of that hard register was known to contain a useful value,
1147 then OP + I would refer to a pseudo, not the hard register we
1149 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1150 && REGNO (op) + i >= FIRST_PSEUDO_REGISTER)
1153 /* If the register is not valid for MODE, return 0. Note we
1154 have to check both OP and OP + I since they may refer to
1155 different parts of the register file.
1157 Consider if OP refers to the last 96bit FP register and we want
1158 subword 3 because that subword is known to contain a value we
1160 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1161 && (! HARD_REGNO_MODE_OK (REGNO (op), word_mode)
1162 || ! HARD_REGNO_MODE_OK (REGNO (op) + i, word_mode)))
1164 else if (REGNO (op) >= FIRST_PSEUDO_REGISTER
1165 || (REG_FUNCTION_VALUE_P (op)
1166 && rtx_equal_function_value_matters)
1167 /* We want to keep the stack, frame, and arg pointers
1169 || op == frame_pointer_rtx
1170 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1171 || op == arg_pointer_rtx
1173 || op == stack_pointer_rtx)
1174 return gen_rtx_SUBREG (word_mode, op, i);
1176 return gen_rtx_REG (word_mode, REGNO (op) + i);
1178 else if (GET_CODE (op) == SUBREG)
1179 return gen_rtx_SUBREG (word_mode, SUBREG_REG (op), i + SUBREG_WORD (op));
1180 else if (GET_CODE (op) == CONCAT)
1182 int partwords = GET_MODE_UNIT_SIZE (GET_MODE (op)) / UNITS_PER_WORD;
1184 return operand_subword (XEXP (op, 0), i, validate_address, mode);
1185 return operand_subword (XEXP (op, 1), i - partwords,
1186 validate_address, mode);
1189 /* Form a new MEM at the requested address. */
1190 if (GET_CODE (op) == MEM)
1192 rtx addr = plus_constant (XEXP (op, 0), i * UNITS_PER_WORD);
1195 if (validate_address)
1197 if (reload_completed)
1199 if (! strict_memory_address_p (word_mode, addr))
1203 addr = memory_address (word_mode, addr);
1206 new = gen_rtx_MEM (word_mode, addr);
1208 MEM_COPY_ATTRIBUTES (new, op);
1209 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1210 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op);
1215 /* The only remaining cases are when OP is a constant. If the host and
1216 target floating formats are the same, handling two-word floating
1217 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1218 are defined as returning one or two 32 bit values, respectively,
1219 and not values of BITS_PER_WORD bits. */
1220 #ifdef REAL_ARITHMETIC
1221 /* The output is some bits, the width of the target machine's word.
1222 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1224 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1225 && GET_MODE_CLASS (mode) == MODE_FLOAT
1226 && GET_MODE_BITSIZE (mode) == 64
1227 && GET_CODE (op) == CONST_DOUBLE)
1232 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1233 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1235 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1236 which the words are written depends on the word endianness.
1237 ??? This is a potential portability problem and should
1238 be fixed at some point.
1240 We must excercise caution with the sign bit. By definition there
1241 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1242 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1243 So we explicitly mask and sign-extend as necessary. */
1244 if (BITS_PER_WORD == 32)
1247 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1248 return GEN_INT (val);
1250 #if HOST_BITS_PER_WIDE_INT >= 64
1251 else if (BITS_PER_WORD >= 64 && i == 0)
1253 val = k[! WORDS_BIG_ENDIAN];
1254 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1255 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1256 return GEN_INT (val);
1259 else if (BITS_PER_WORD == 16)
1262 if ((i & 1) == !WORDS_BIG_ENDIAN)
1265 return GEN_INT (val);
1270 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1271 && GET_MODE_CLASS (mode) == MODE_FLOAT
1272 && GET_MODE_BITSIZE (mode) > 64
1273 && GET_CODE (op) == CONST_DOUBLE)
1278 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1279 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1281 if (BITS_PER_WORD == 32)
1284 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 return GEN_INT (val);
1287 #if HOST_BITS_PER_WIDE_INT >= 64
1288 else if (BITS_PER_WORD >= 64 && i <= 1)
1290 val = k[i*2 + ! WORDS_BIG_ENDIAN];
1291 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1292 val |= (HOST_WIDE_INT) k[i*2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1293 return GEN_INT (val);
1299 #else /* no REAL_ARITHMETIC */
1300 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1301 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1302 || flag_pretend_float)
1303 && GET_MODE_CLASS (mode) == MODE_FLOAT
1304 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1305 && GET_CODE (op) == CONST_DOUBLE)
1307 /* The constant is stored in the host's word-ordering,
1308 but we want to access it in the target's word-ordering. Some
1309 compilers don't like a conditional inside macro args, so we have two
1310 copies of the return. */
1311 #ifdef HOST_WORDS_BIG_ENDIAN
1312 return GEN_INT (i == WORDS_BIG_ENDIAN
1313 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1315 return GEN_INT (i != WORDS_BIG_ENDIAN
1316 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1319 #endif /* no REAL_ARITHMETIC */
1321 /* Single word float is a little harder, since single- and double-word
1322 values often do not have the same high-order bits. We have already
1323 verified that we want the only defined word of the single-word value. */
1324 #ifdef REAL_ARITHMETIC
1325 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1326 && GET_MODE_BITSIZE (mode) == 32
1327 && GET_CODE (op) == CONST_DOUBLE)
1332 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1333 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1335 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1337 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1339 if (BITS_PER_WORD == 16)
1341 if ((i & 1) == !WORDS_BIG_ENDIAN)
1346 return GEN_INT (val);
1349 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1350 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1351 || flag_pretend_float)
1352 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1353 && GET_MODE_CLASS (mode) == MODE_FLOAT
1354 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1355 && GET_CODE (op) == CONST_DOUBLE)
1358 union {float f; HOST_WIDE_INT i; } u;
1360 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1363 return GEN_INT (u.i);
1365 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1366 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1367 || flag_pretend_float)
1368 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1369 && GET_MODE_CLASS (mode) == MODE_FLOAT
1370 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1371 && GET_CODE (op) == CONST_DOUBLE)
1374 union {double d; HOST_WIDE_INT i; } u;
1376 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1379 return GEN_INT (u.i);
1381 #endif /* no REAL_ARITHMETIC */
1383 /* The only remaining cases that we can handle are integers.
1384 Convert to proper endianness now since these cases need it.
1385 At this point, i == 0 means the low-order word.
1387 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1388 in general. However, if OP is (const_int 0), we can just return
1391 if (op == const0_rtx)
1394 if (GET_MODE_CLASS (mode) != MODE_INT
1395 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1396 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1399 if (WORDS_BIG_ENDIAN)
1400 i = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - i;
1402 /* Find out which word on the host machine this value is in and get
1403 it from the constant. */
1404 val = (i / size_ratio == 0
1405 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1406 : (GET_CODE (op) == CONST_INT
1407 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1409 /* Get the value we want into the low bits of val. */
1410 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1411 val = ((val >> ((i % size_ratio) * BITS_PER_WORD)));
1413 val = trunc_int_for_mode (val, word_mode);
1415 return GEN_INT (val);
1418 /* Similar to `operand_subword', but never return 0. If we can't extract
1419 the required subword, put OP into a register and try again. If that fails,
1420 abort. We always validate the address in this case. It is not valid
1421 to call this function after reload; it is mostly meant for RTL
1424 MODE is the mode of OP, in case it is CONST_INT. */
1427 operand_subword_force (op, i, mode)
1430 enum machine_mode mode;
1432 rtx result = operand_subword (op, i, 1, mode);
1437 if (mode != BLKmode && mode != VOIDmode)
1439 /* If this is a register which can not be accessed by words, copy it
1440 to a pseudo register. */
1441 if (GET_CODE (op) == REG)
1442 op = copy_to_reg (op);
1444 op = force_reg (mode, op);
1447 result = operand_subword (op, i, 1, mode);
1454 /* Given a compare instruction, swap the operands.
1455 A test instruction is changed into a compare of 0 against the operand. */
1458 reverse_comparison (insn)
1461 rtx body = PATTERN (insn);
1464 if (GET_CODE (body) == SET)
1465 comp = SET_SRC (body);
1467 comp = SET_SRC (XVECEXP (body, 0, 0));
1469 if (GET_CODE (comp) == COMPARE)
1471 rtx op0 = XEXP (comp, 0);
1472 rtx op1 = XEXP (comp, 1);
1473 XEXP (comp, 0) = op1;
1474 XEXP (comp, 1) = op0;
1478 rtx new = gen_rtx_COMPARE (VOIDmode,
1479 CONST0_RTX (GET_MODE (comp)), comp);
1480 if (GET_CODE (body) == SET)
1481 SET_SRC (body) = new;
1483 SET_SRC (XVECEXP (body, 0, 0)) = new;
1487 /* Return a memory reference like MEMREF, but with its mode changed
1488 to MODE and its address changed to ADDR.
1489 (VOIDmode means don't change the mode.
1490 NULL for ADDR means don't change the address.) */
1493 change_address (memref, mode, addr)
1495 enum machine_mode mode;
1500 if (GET_CODE (memref) != MEM)
1502 if (mode == VOIDmode)
1503 mode = GET_MODE (memref);
1505 addr = XEXP (memref, 0);
1507 /* If reload is in progress or has completed, ADDR must be valid.
1508 Otherwise, we can call memory_address to make it valid. */
1509 if (reload_completed || reload_in_progress)
1511 if (! memory_address_p (mode, addr))
1515 addr = memory_address (mode, addr);
1517 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1520 new = gen_rtx_MEM (mode, addr);
1521 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref);
1522 MEM_COPY_ATTRIBUTES (new, memref);
1523 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref);
1527 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1534 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1535 NULL_RTX, label_num++, NULL_PTR, NULL_PTR);
1537 LABEL_NUSES (label) = 0;
1538 LABEL_ALTERNATE_NAME (label) = NULL;
1542 /* For procedure integration. */
1544 /* Install new pointers to the first and last insns in the chain.
1545 Also, set cur_insn_uid to one higher than the last in use.
1546 Used for an inline-procedure after copying the insn chain. */
1549 set_new_first_and_last_insn (first, last)
1558 for (insn = first; insn; insn = NEXT_INSN (insn))
1559 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1564 /* Set the range of label numbers found in the current function.
1565 This is used when belatedly compiling an inline function. */
1568 set_new_first_and_last_label_num (first, last)
1571 base_label_num = label_num;
1572 first_label_num = first;
1573 last_label_num = last;
1576 /* Set the last label number found in the current function.
1577 This is used when belatedly compiling an inline function. */
1580 set_new_last_label_num (last)
1583 base_label_num = label_num;
1584 last_label_num = last;
1587 /* Restore all variables describing the current status from the structure *P.
1588 This is used after a nested function. */
1591 restore_emit_status (p)
1592 struct function *p ATTRIBUTE_UNUSED;
1595 clear_emit_caches ();
1598 /* Clear out all parts of the state in F that can safely be discarded
1599 after the function has been compiled, to let garbage collection
1600 reclaim the memory. */
1603 free_emit_status (f)
1606 free (f->emit->x_regno_reg_rtx);
1607 free (f->emit->regno_pointer_flag);
1608 free (f->emit->regno_pointer_align);
1613 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1614 It does not work to do this twice, because the mark bits set here
1615 are not cleared afterwards. */
1618 unshare_all_rtl (insn)
1621 for (; insn; insn = NEXT_INSN (insn))
1622 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1623 || GET_CODE (insn) == CALL_INSN)
1625 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1626 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1627 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1630 /* Make sure the addresses of stack slots found outside the insn chain
1631 (such as, in DECL_RTL of a variable) are not shared
1632 with the insn chain.
1634 This special care is necessary when the stack slot MEM does not
1635 actually appear in the insn chain. If it does appear, its address
1636 is unshared from all else at that point. */
1638 copy_rtx_if_shared (stack_slot_list);
1641 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1642 Recursively does the same for subexpressions. */
1645 copy_rtx_if_shared (orig)
1648 register rtx x = orig;
1650 register enum rtx_code code;
1651 register const char *format_ptr;
1657 code = GET_CODE (x);
1659 /* These types may be freely shared. */
1672 /* SCRATCH must be shared because they represent distinct values. */
1676 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1677 a LABEL_REF, it isn't sharable. */
1678 if (GET_CODE (XEXP (x, 0)) == PLUS
1679 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1680 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1689 /* The chain of insns is not being copied. */
1693 /* A MEM is allowed to be shared if its address is constant.
1695 We used to allow sharing of MEMs which referenced
1696 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1697 that can lose. instantiate_virtual_regs will not unshare
1698 the MEMs, and combine may change the structure of the address
1699 because it looks safe and profitable in one context, but
1700 in some other context it creates unrecognizable RTL. */
1701 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
1710 /* This rtx may not be shared. If it has already been seen,
1711 replace it with a copy of itself. */
1717 copy = rtx_alloc (code);
1718 bcopy ((char *) x, (char *) copy,
1719 (sizeof (*copy) - sizeof (copy->fld)
1720 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1726 /* Now scan the subexpressions recursively.
1727 We can store any replaced subexpressions directly into X
1728 since we know X is not shared! Any vectors in X
1729 must be copied if X was copied. */
1731 format_ptr = GET_RTX_FORMAT (code);
1733 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1735 switch (*format_ptr++)
1738 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1742 if (XVEC (x, i) != NULL)
1745 int len = XVECLEN (x, i);
1747 if (copied && len > 0)
1748 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
1749 for (j = 0; j < len; j++)
1750 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
1758 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1759 to look for shared sub-parts. */
1762 reset_used_flags (x)
1766 register enum rtx_code code;
1767 register const char *format_ptr;
1772 code = GET_CODE (x);
1774 /* These types may be freely shared so we needn't do any resetting
1795 /* The chain of insns is not being copied. */
1804 format_ptr = GET_RTX_FORMAT (code);
1805 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1807 switch (*format_ptr++)
1810 reset_used_flags (XEXP (x, i));
1814 for (j = 0; j < XVECLEN (x, i); j++)
1815 reset_used_flags (XVECEXP (x, i, j));
1821 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1822 Return X or the rtx for the pseudo reg the value of X was copied into.
1823 OTHER must be valid as a SET_DEST. */
1826 make_safe_from (x, other)
1830 switch (GET_CODE (other))
1833 other = SUBREG_REG (other);
1835 case STRICT_LOW_PART:
1838 other = XEXP (other, 0);
1844 if ((GET_CODE (other) == MEM
1846 && GET_CODE (x) != REG
1847 && GET_CODE (x) != SUBREG)
1848 || (GET_CODE (other) == REG
1849 && (REGNO (other) < FIRST_PSEUDO_REGISTER
1850 || reg_mentioned_p (other, x))))
1852 rtx temp = gen_reg_rtx (GET_MODE (x));
1853 emit_move_insn (temp, x);
1859 /* Emission of insns (adding them to the doubly-linked list). */
1861 /* Return the first insn of the current sequence or current function. */
1869 /* Return the last insn emitted in current sequence or current function. */
1877 /* Specify a new insn as the last in the chain. */
1880 set_last_insn (insn)
1883 if (NEXT_INSN (insn) != 0)
1888 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1891 get_last_insn_anywhere ()
1893 struct sequence_stack *stack;
1896 for (stack = seq_stack; stack; stack = stack->next)
1897 if (stack->last != 0)
1902 /* Return a number larger than any instruction's uid in this function. */
1907 return cur_insn_uid;
1910 /* Renumber instructions so that no instruction UIDs are wasted. */
1913 renumber_insns (stream)
1918 /* If we're not supposed to renumber instructions, don't. */
1919 if (!flag_renumber_insns)
1922 /* If there aren't that many instructions, then it's not really
1923 worth renumbering them. */
1924 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
1929 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1932 fprintf (stream, "Renumbering insn %d to %d\n",
1933 INSN_UID (insn), cur_insn_uid);
1934 INSN_UID (insn) = cur_insn_uid++;
1938 /* Return the next insn. If it is a SEQUENCE, return the first insn
1947 insn = NEXT_INSN (insn);
1948 if (insn && GET_CODE (insn) == INSN
1949 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1950 insn = XVECEXP (PATTERN (insn), 0, 0);
1956 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1960 previous_insn (insn)
1965 insn = PREV_INSN (insn);
1966 if (insn && GET_CODE (insn) == INSN
1967 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1968 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
1974 /* Return the next insn after INSN that is not a NOTE. This routine does not
1975 look inside SEQUENCEs. */
1978 next_nonnote_insn (insn)
1983 insn = NEXT_INSN (insn);
1984 if (insn == 0 || GET_CODE (insn) != NOTE)
1991 /* Return the previous insn before INSN that is not a NOTE. This routine does
1992 not look inside SEQUENCEs. */
1995 prev_nonnote_insn (insn)
2000 insn = PREV_INSN (insn);
2001 if (insn == 0 || GET_CODE (insn) != NOTE)
2008 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2009 or 0, if there is none. This routine does not look inside
2013 next_real_insn (insn)
2018 insn = NEXT_INSN (insn);
2019 if (insn == 0 || GET_CODE (insn) == INSN
2020 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2027 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2028 or 0, if there is none. This routine does not look inside
2032 prev_real_insn (insn)
2037 insn = PREV_INSN (insn);
2038 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2039 || GET_CODE (insn) == JUMP_INSN)
2046 /* Find the next insn after INSN that really does something. This routine
2047 does not look inside SEQUENCEs. Until reload has completed, this is the
2048 same as next_real_insn. */
2051 next_active_insn (insn)
2056 insn = NEXT_INSN (insn);
2058 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2059 || (GET_CODE (insn) == INSN
2060 && (! reload_completed
2061 || (GET_CODE (PATTERN (insn)) != USE
2062 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2069 /* Find the last insn before INSN that really does something. This routine
2070 does not look inside SEQUENCEs. Until reload has completed, this is the
2071 same as prev_real_insn. */
2074 prev_active_insn (insn)
2079 insn = PREV_INSN (insn);
2081 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2082 || (GET_CODE (insn) == INSN
2083 && (! reload_completed
2084 || (GET_CODE (PATTERN (insn)) != USE
2085 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2092 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2100 insn = NEXT_INSN (insn);
2101 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2108 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2116 insn = PREV_INSN (insn);
2117 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2125 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2126 and REG_CC_USER notes so we can find it. */
2129 link_cc0_insns (insn)
2132 rtx user = next_nonnote_insn (insn);
2134 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2135 user = XVECEXP (PATTERN (user), 0, 0);
2137 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2139 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2142 /* Return the next insn that uses CC0 after INSN, which is assumed to
2143 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2144 applied to the result of this function should yield INSN).
2146 Normally, this is simply the next insn. However, if a REG_CC_USER note
2147 is present, it contains the insn that uses CC0.
2149 Return 0 if we can't find the insn. */
2152 next_cc0_user (insn)
2155 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2158 return XEXP (note, 0);
2160 insn = next_nonnote_insn (insn);
2161 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2162 insn = XVECEXP (PATTERN (insn), 0, 0);
2164 if (insn && GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2165 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2171 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2172 note, it is the previous insn. */
2175 prev_cc0_setter (insn)
2178 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2181 return XEXP (note, 0);
2183 insn = prev_nonnote_insn (insn);
2184 if (! sets_cc0_p (PATTERN (insn)))
2191 /* Try splitting insns that can be split for better scheduling.
2192 PAT is the pattern which might split.
2193 TRIAL is the insn providing PAT.
2194 LAST is non-zero if we should return the last insn of the sequence produced.
2196 If this routine succeeds in splitting, it returns the first or last
2197 replacement insn depending on the value of LAST. Otherwise, it
2198 returns TRIAL. If the insn to be returned can be split, it will be. */
2201 try_split (pat, trial, last)
2205 rtx before = PREV_INSN (trial);
2206 rtx after = NEXT_INSN (trial);
2207 rtx seq = split_insns (pat, trial);
2208 int has_barrier = 0;
2211 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2212 We may need to handle this specially. */
2213 if (after && GET_CODE (after) == BARRIER)
2216 after = NEXT_INSN (after);
2221 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2222 The latter case will normally arise only when being done so that
2223 it, in turn, will be split (SFmode on the 29k is an example). */
2224 if (GET_CODE (seq) == SEQUENCE)
2226 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2227 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2228 increment the usage count so we don't delete the label. */
2231 if (GET_CODE (trial) == JUMP_INSN)
2232 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2233 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2235 JUMP_LABEL (XVECEXP (seq, 0, i)) = JUMP_LABEL (trial);
2237 if (JUMP_LABEL (trial))
2238 LABEL_NUSES (JUMP_LABEL (trial))++;
2241 tem = emit_insn_after (seq, before);
2243 delete_insn (trial);
2245 emit_barrier_after (tem);
2247 /* Recursively call try_split for each new insn created; by the
2248 time control returns here that insn will be fully split, so
2249 set LAST and continue from the insn after the one returned.
2250 We can't use next_active_insn here since AFTER may be a note.
2251 Ignore deleted insns, which can be occur if not optimizing. */
2252 for (tem = NEXT_INSN (before); tem != after;
2253 tem = NEXT_INSN (tem))
2254 if (! INSN_DELETED_P (tem)
2255 && GET_RTX_CLASS (GET_CODE (tem)) == 'i')
2256 tem = try_split (PATTERN (tem), tem, 1);
2258 /* Avoid infinite loop if the result matches the original pattern. */
2259 else if (rtx_equal_p (seq, pat))
2263 PATTERN (trial) = seq;
2264 INSN_CODE (trial) = -1;
2265 try_split (seq, trial, last);
2268 /* Return either the first or the last insn, depending on which was
2271 ? (after ? prev_active_insn (after) : last_insn)
2272 : next_active_insn (before);
2278 /* Make and return an INSN rtx, initializing all its slots.
2279 Store PATTERN in the pattern slots. */
2282 make_insn_raw (pattern)
2287 /* If in RTL generation phase, see if FREE_INSN can be used. */
2288 if (!ggc_p && free_insn != 0 && rtx_equal_function_value_matters)
2291 free_insn = NEXT_INSN (free_insn);
2292 PUT_CODE (insn, INSN);
2295 insn = rtx_alloc (INSN);
2297 INSN_UID (insn) = cur_insn_uid++;
2298 PATTERN (insn) = pattern;
2299 INSN_CODE (insn) = -1;
2300 LOG_LINKS (insn) = NULL;
2301 REG_NOTES (insn) = NULL;
2306 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2309 make_jump_insn_raw (pattern)
2314 insn = rtx_alloc (JUMP_INSN);
2315 INSN_UID (insn) = cur_insn_uid++;
2317 PATTERN (insn) = pattern;
2318 INSN_CODE (insn) = -1;
2319 LOG_LINKS (insn) = NULL;
2320 REG_NOTES (insn) = NULL;
2321 JUMP_LABEL (insn) = NULL;
2326 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2329 make_call_insn_raw (pattern)
2334 insn = rtx_alloc (CALL_INSN);
2335 INSN_UID (insn) = cur_insn_uid++;
2337 PATTERN (insn) = pattern;
2338 INSN_CODE (insn) = -1;
2339 LOG_LINKS (insn) = NULL;
2340 REG_NOTES (insn) = NULL;
2341 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2346 /* Add INSN to the end of the doubly-linked list.
2347 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2353 PREV_INSN (insn) = last_insn;
2354 NEXT_INSN (insn) = 0;
2356 if (NULL != last_insn)
2357 NEXT_INSN (last_insn) = insn;
2359 if (NULL == first_insn)
2365 /* Add INSN into the doubly-linked list after insn AFTER. This and
2366 the next should be the only functions called to insert an insn once
2367 delay slots have been filled since only they know how to update a
2371 add_insn_after (insn, after)
2374 rtx next = NEXT_INSN (after);
2376 if (optimize && INSN_DELETED_P (after))
2379 NEXT_INSN (insn) = next;
2380 PREV_INSN (insn) = after;
2384 PREV_INSN (next) = insn;
2385 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2386 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2388 else if (last_insn == after)
2392 struct sequence_stack *stack = seq_stack;
2393 /* Scan all pending sequences too. */
2394 for (; stack; stack = stack->next)
2395 if (after == stack->last)
2405 NEXT_INSN (after) = insn;
2406 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2408 rtx sequence = PATTERN (after);
2409 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2413 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2414 the previous should be the only functions called to insert an insn once
2415 delay slots have been filled since only they know how to update a
2419 add_insn_before (insn, before)
2422 rtx prev = PREV_INSN (before);
2424 if (optimize && INSN_DELETED_P (before))
2427 PREV_INSN (insn) = prev;
2428 NEXT_INSN (insn) = before;
2432 NEXT_INSN (prev) = insn;
2433 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2435 rtx sequence = PATTERN (prev);
2436 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2439 else if (first_insn == before)
2443 struct sequence_stack *stack = seq_stack;
2444 /* Scan all pending sequences too. */
2445 for (; stack; stack = stack->next)
2446 if (before == stack->first)
2448 stack->first = insn;
2456 PREV_INSN (before) = insn;
2457 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2458 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2461 /* Remove an insn from its doubly-linked list. This function knows how
2462 to handle sequences. */
2467 rtx next = NEXT_INSN (insn);
2468 rtx prev = PREV_INSN (insn);
2471 NEXT_INSN (prev) = next;
2472 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2474 rtx sequence = PATTERN (prev);
2475 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2478 else if (first_insn == insn)
2482 struct sequence_stack *stack = seq_stack;
2483 /* Scan all pending sequences too. */
2484 for (; stack; stack = stack->next)
2485 if (insn == stack->first)
2487 stack->first = next;
2497 PREV_INSN (next) = prev;
2498 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2499 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2501 else if (last_insn == insn)
2505 struct sequence_stack *stack = seq_stack;
2506 /* Scan all pending sequences too. */
2507 for (; stack; stack = stack->next)
2508 if (insn == stack->last)
2519 /* Delete all insns made since FROM.
2520 FROM becomes the new last instruction. */
2523 delete_insns_since (from)
2529 NEXT_INSN (from) = 0;
2533 /* This function is deprecated, please use sequences instead.
2535 Move a consecutive bunch of insns to a different place in the chain.
2536 The insns to be moved are those between FROM and TO.
2537 They are moved to a new position after the insn AFTER.
2538 AFTER must not be FROM or TO or any insn in between.
2540 This function does not know about SEQUENCEs and hence should not be
2541 called after delay-slot filling has been done. */
2544 reorder_insns (from, to, after)
2545 rtx from, to, after;
2547 /* Splice this bunch out of where it is now. */
2548 if (PREV_INSN (from))
2549 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2551 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2552 if (last_insn == to)
2553 last_insn = PREV_INSN (from);
2554 if (first_insn == from)
2555 first_insn = NEXT_INSN (to);
2557 /* Make the new neighbors point to it and it to them. */
2558 if (NEXT_INSN (after))
2559 PREV_INSN (NEXT_INSN (after)) = to;
2561 NEXT_INSN (to) = NEXT_INSN (after);
2562 PREV_INSN (from) = after;
2563 NEXT_INSN (after) = from;
2564 if (after == last_insn)
2568 /* Return the line note insn preceding INSN. */
2571 find_line_note (insn)
2574 if (no_line_numbers)
2577 for (; insn; insn = PREV_INSN (insn))
2578 if (GET_CODE (insn) == NOTE
2579 && NOTE_LINE_NUMBER (insn) >= 0)
2585 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2586 of the moved insns when debugging. This may insert a note between AFTER
2587 and FROM, and another one after TO. */
2590 reorder_insns_with_line_notes (from, to, after)
2591 rtx from, to, after;
2593 rtx from_line = find_line_note (from);
2594 rtx after_line = find_line_note (after);
2596 reorder_insns (from, to, after);
2598 if (from_line == after_line)
2602 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2603 NOTE_LINE_NUMBER (from_line),
2606 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2607 NOTE_LINE_NUMBER (after_line),
2611 /* Remove unncessary notes from the instruction stream. */
2614 remove_unncessary_notes ()
2619 /* Remove NOTE_INSN_DELETED notes. We must not remove the first
2620 instruction in the function because the compiler depends on the
2621 first instruction being a note. */
2622 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
2624 /* Remember what's next. */
2625 next = NEXT_INSN (insn);
2627 /* We're only interested in notes. */
2628 if (GET_CODE (insn) != NOTE)
2631 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
2637 /* Emit an insn of given code and pattern
2638 at a specified place within the doubly-linked list. */
2640 /* Make an instruction with body PATTERN
2641 and output it before the instruction BEFORE. */
2644 emit_insn_before (pattern, before)
2645 register rtx pattern, before;
2647 register rtx insn = before;
2649 if (GET_CODE (pattern) == SEQUENCE)
2653 for (i = 0; i < XVECLEN (pattern, 0); i++)
2655 insn = XVECEXP (pattern, 0, i);
2656 add_insn_before (insn, before);
2658 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2659 sequence_result[XVECLEN (pattern, 0)] = pattern;
2663 insn = make_insn_raw (pattern);
2664 add_insn_before (insn, before);
2670 /* Similar to emit_insn_before, but update basic block boundaries as well. */
2673 emit_block_insn_before (pattern, before, block)
2674 rtx pattern, before;
2677 rtx prev = PREV_INSN (before);
2678 rtx r = emit_insn_before (pattern, before);
2679 if (block && block->head == before)
2680 block->head = NEXT_INSN (prev);
2684 /* Make an instruction with body PATTERN and code JUMP_INSN
2685 and output it before the instruction BEFORE. */
2688 emit_jump_insn_before (pattern, before)
2689 register rtx pattern, before;
2693 if (GET_CODE (pattern) == SEQUENCE)
2694 insn = emit_insn_before (pattern, before);
2697 insn = make_jump_insn_raw (pattern);
2698 add_insn_before (insn, before);
2704 /* Make an instruction with body PATTERN and code CALL_INSN
2705 and output it before the instruction BEFORE. */
2708 emit_call_insn_before (pattern, before)
2709 register rtx pattern, before;
2713 if (GET_CODE (pattern) == SEQUENCE)
2714 insn = emit_insn_before (pattern, before);
2717 insn = make_call_insn_raw (pattern);
2718 add_insn_before (insn, before);
2719 PUT_CODE (insn, CALL_INSN);
2725 /* Make an insn of code BARRIER
2726 and output it before the insn BEFORE. */
2729 emit_barrier_before (before)
2730 register rtx before;
2732 register rtx insn = rtx_alloc (BARRIER);
2734 INSN_UID (insn) = cur_insn_uid++;
2736 add_insn_before (insn, before);
2740 /* Emit the label LABEL before the insn BEFORE. */
2743 emit_label_before (label, before)
2746 /* This can be called twice for the same label as a result of the
2747 confusion that follows a syntax error! So make it harmless. */
2748 if (INSN_UID (label) == 0)
2750 INSN_UID (label) = cur_insn_uid++;
2751 add_insn_before (label, before);
2757 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2760 emit_note_before (subtype, before)
2764 register rtx note = rtx_alloc (NOTE);
2765 INSN_UID (note) = cur_insn_uid++;
2766 NOTE_SOURCE_FILE (note) = 0;
2767 NOTE_LINE_NUMBER (note) = subtype;
2769 add_insn_before (note, before);
2773 /* Make an insn of code INSN with body PATTERN
2774 and output it after the insn AFTER. */
2777 emit_insn_after (pattern, after)
2778 register rtx pattern, after;
2780 register rtx insn = after;
2782 if (GET_CODE (pattern) == SEQUENCE)
2786 for (i = 0; i < XVECLEN (pattern, 0); i++)
2788 insn = XVECEXP (pattern, 0, i);
2789 add_insn_after (insn, after);
2792 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2793 sequence_result[XVECLEN (pattern, 0)] = pattern;
2797 insn = make_insn_raw (pattern);
2798 add_insn_after (insn, after);
2804 /* Similar to emit_insn_after, except that line notes are to be inserted so
2805 as to act as if this insn were at FROM. */
2808 emit_insn_after_with_line_notes (pattern, after, from)
2809 rtx pattern, after, from;
2811 rtx from_line = find_line_note (from);
2812 rtx after_line = find_line_note (after);
2813 rtx insn = emit_insn_after (pattern, after);
2816 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2817 NOTE_LINE_NUMBER (from_line),
2821 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2822 NOTE_LINE_NUMBER (after_line),
2826 /* Similar to emit_insn_after, but update basic block boundaries as well. */
2829 emit_block_insn_after (pattern, after, block)
2833 rtx r = emit_insn_after (pattern, after);
2834 if (block && block->end == after)
2839 /* Make an insn of code JUMP_INSN with body PATTERN
2840 and output it after the insn AFTER. */
2843 emit_jump_insn_after (pattern, after)
2844 register rtx pattern, after;
2848 if (GET_CODE (pattern) == SEQUENCE)
2849 insn = emit_insn_after (pattern, after);
2852 insn = make_jump_insn_raw (pattern);
2853 add_insn_after (insn, after);
2859 /* Make an insn of code BARRIER
2860 and output it after the insn AFTER. */
2863 emit_barrier_after (after)
2866 register rtx insn = rtx_alloc (BARRIER);
2868 INSN_UID (insn) = cur_insn_uid++;
2870 add_insn_after (insn, after);
2874 /* Emit the label LABEL after the insn AFTER. */
2877 emit_label_after (label, after)
2880 /* This can be called twice for the same label
2881 as a result of the confusion that follows a syntax error!
2882 So make it harmless. */
2883 if (INSN_UID (label) == 0)
2885 INSN_UID (label) = cur_insn_uid++;
2886 add_insn_after (label, after);
2892 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2895 emit_note_after (subtype, after)
2899 register rtx note = rtx_alloc (NOTE);
2900 INSN_UID (note) = cur_insn_uid++;
2901 NOTE_SOURCE_FILE (note) = 0;
2902 NOTE_LINE_NUMBER (note) = subtype;
2903 add_insn_after (note, after);
2907 /* Emit a line note for FILE and LINE after the insn AFTER. */
2910 emit_line_note_after (file, line, after)
2917 if (no_line_numbers && line > 0)
2923 note = rtx_alloc (NOTE);
2924 INSN_UID (note) = cur_insn_uid++;
2925 NOTE_SOURCE_FILE (note) = file;
2926 NOTE_LINE_NUMBER (note) = line;
2927 add_insn_after (note, after);
2931 /* Make an insn of code INSN with pattern PATTERN
2932 and add it to the end of the doubly-linked list.
2933 If PATTERN is a SEQUENCE, take the elements of it
2934 and emit an insn for each element.
2936 Returns the last insn emitted. */
2942 rtx insn = last_insn;
2944 if (GET_CODE (pattern) == SEQUENCE)
2948 for (i = 0; i < XVECLEN (pattern, 0); i++)
2950 insn = XVECEXP (pattern, 0, i);
2953 if (!ggc_p && XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2954 sequence_result[XVECLEN (pattern, 0)] = pattern;
2958 insn = make_insn_raw (pattern);
2962 #ifdef ENABLE_RTL_CHECKING
2964 && (returnjump_p (insn)
2965 || (GET_CODE (insn) == SET
2966 && SET_DEST (insn) == pc_rtx)))
2968 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2976 /* Emit the insns in a chain starting with INSN.
2977 Return the last insn emitted. */
2987 rtx next = NEXT_INSN (insn);
2996 /* Emit the insns in a chain starting with INSN and place them in front of
2997 the insn BEFORE. Return the last insn emitted. */
3000 emit_insns_before (insn, before)
3008 rtx next = NEXT_INSN (insn);
3009 add_insn_before (insn, before);
3017 /* Emit the insns in a chain starting with FIRST and place them in back of
3018 the insn AFTER. Return the last insn emitted. */
3021 emit_insns_after (first, after)
3026 register rtx after_after;
3034 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3037 after_after = NEXT_INSN (after);
3039 NEXT_INSN (after) = first;
3040 PREV_INSN (first) = after;
3041 NEXT_INSN (last) = after_after;
3043 PREV_INSN (after_after) = last;
3045 if (after == last_insn)
3050 /* Make an insn of code JUMP_INSN with pattern PATTERN
3051 and add it to the end of the doubly-linked list. */
3054 emit_jump_insn (pattern)
3057 if (GET_CODE (pattern) == SEQUENCE)
3058 return emit_insn (pattern);
3061 register rtx insn = make_jump_insn_raw (pattern);
3067 /* Make an insn of code CALL_INSN with pattern PATTERN
3068 and add it to the end of the doubly-linked list. */
3071 emit_call_insn (pattern)
3074 if (GET_CODE (pattern) == SEQUENCE)
3075 return emit_insn (pattern);
3078 register rtx insn = make_call_insn_raw (pattern);
3080 PUT_CODE (insn, CALL_INSN);
3085 /* Add the label LABEL to the end of the doubly-linked list. */
3091 /* This can be called twice for the same label
3092 as a result of the confusion that follows a syntax error!
3093 So make it harmless. */
3094 if (INSN_UID (label) == 0)
3096 INSN_UID (label) = cur_insn_uid++;
3102 /* Make an insn of code BARRIER
3103 and add it to the end of the doubly-linked list. */
3108 register rtx barrier = rtx_alloc (BARRIER);
3109 INSN_UID (barrier) = cur_insn_uid++;
3114 /* Make an insn of code NOTE
3115 with data-fields specified by FILE and LINE
3116 and add it to the end of the doubly-linked list,
3117 but only if line-numbers are desired for debugging info. */
3120 emit_line_note (file, line)
3124 set_file_and_line_for_stmt (file, line);
3127 if (no_line_numbers)
3131 return emit_note (file, line);
3134 /* Make an insn of code NOTE
3135 with data-fields specified by FILE and LINE
3136 and add it to the end of the doubly-linked list.
3137 If it is a line-number NOTE, omit it if it matches the previous one. */
3140 emit_note (file, line)
3148 if (file && last_filename && !strcmp (file, last_filename)
3149 && line == last_linenum)
3151 last_filename = file;
3152 last_linenum = line;
3155 if (no_line_numbers && line > 0)
3161 note = rtx_alloc (NOTE);
3162 INSN_UID (note) = cur_insn_uid++;
3163 NOTE_SOURCE_FILE (note) = file;
3164 NOTE_LINE_NUMBER (note) = line;
3169 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3172 emit_line_note_force (file, line)
3177 return emit_line_note (file, line);
3180 /* Cause next statement to emit a line note even if the line number
3181 has not changed. This is used at the beginning of a function. */
3184 force_next_line_note ()
3189 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3190 note of this type already exists, remove it first. */
3193 set_unique_reg_note (insn, kind, datum)
3198 rtx note = find_reg_note (insn, kind, NULL_RTX);
3200 /* First remove the note if there already is one. */
3202 remove_note (insn, note);
3204 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3207 /* Return an indication of which type of insn should have X as a body.
3208 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3214 if (GET_CODE (x) == CODE_LABEL)
3216 if (GET_CODE (x) == CALL)
3218 if (GET_CODE (x) == RETURN)
3220 if (GET_CODE (x) == SET)
3222 if (SET_DEST (x) == pc_rtx)
3224 else if (GET_CODE (SET_SRC (x)) == CALL)
3229 if (GET_CODE (x) == PARALLEL)
3232 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3233 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3235 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3236 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3238 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3239 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3245 /* Emit the rtl pattern X as an appropriate kind of insn.
3246 If X is a label, it is simply added into the insn chain. */
3252 enum rtx_code code = classify_insn (x);
3254 if (code == CODE_LABEL)
3255 return emit_label (x);
3256 else if (code == INSN)
3257 return emit_insn (x);
3258 else if (code == JUMP_INSN)
3260 register rtx insn = emit_jump_insn (x);
3261 if (simplejump_p (insn) || GET_CODE (x) == RETURN)
3262 return emit_barrier ();
3265 else if (code == CALL_INSN)
3266 return emit_call_insn (x);
3271 /* Begin emitting insns to a sequence which can be packaged in an
3272 RTL_EXPR. If this sequence will contain something that might cause
3273 the compiler to pop arguments to function calls (because those
3274 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3275 details), use do_pending_stack_adjust before calling this function.
3276 That will ensure that the deferred pops are not accidentally
3277 emitted in the middel of this sequence. */
3282 struct sequence_stack *tem;
3284 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
3286 tem->next = seq_stack;
3287 tem->first = first_insn;
3288 tem->last = last_insn;
3289 tem->sequence_rtl_expr = seq_rtl_expr;
3297 /* Similarly, but indicate that this sequence will be placed in T, an
3298 RTL_EXPR. See the documentation for start_sequence for more
3299 information about how to use this function. */
3302 start_sequence_for_rtl_expr (t)
3310 /* Set up the insn chain starting with FIRST as the current sequence,
3311 saving the previously current one. See the documentation for
3312 start_sequence for more information about how to use this function. */
3315 push_to_sequence (first)
3322 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3328 /* Set up the outer-level insn chain
3329 as the current sequence, saving the previously current one. */
3332 push_topmost_sequence ()
3334 struct sequence_stack *stack, *top = NULL;
3338 for (stack = seq_stack; stack; stack = stack->next)
3341 first_insn = top->first;
3342 last_insn = top->last;
3343 seq_rtl_expr = top->sequence_rtl_expr;
3346 /* After emitting to the outer-level insn chain, update the outer-level
3347 insn chain, and restore the previous saved state. */
3350 pop_topmost_sequence ()
3352 struct sequence_stack *stack, *top = NULL;
3354 for (stack = seq_stack; stack; stack = stack->next)
3357 top->first = first_insn;
3358 top->last = last_insn;
3359 /* ??? Why don't we save seq_rtl_expr here? */
3364 /* After emitting to a sequence, restore previous saved state.
3366 To get the contents of the sequence just made, you must call
3367 `gen_sequence' *before* calling here.
3369 If the compiler might have deferred popping arguments while
3370 generating this sequence, and this sequence will not be immediately
3371 inserted into the instruction stream, use do_pending_stack_adjust
3372 before calling gen_sequence. That will ensure that the deferred
3373 pops are inserted into this sequence, and not into some random
3374 location in the instruction stream. See INHIBIT_DEFER_POP for more
3375 information about deferred popping of arguments. */
3380 struct sequence_stack *tem = seq_stack;
3382 first_insn = tem->first;
3383 last_insn = tem->last;
3384 seq_rtl_expr = tem->sequence_rtl_expr;
3385 seq_stack = tem->next;
3390 /* Return 1 if currently emitting into a sequence. */
3395 return seq_stack != 0;
3398 /* Generate a SEQUENCE rtx containing the insns already emitted
3399 to the current sequence.
3401 This is how the gen_... function from a DEFINE_EXPAND
3402 constructs the SEQUENCE that it returns. */
3412 /* Count the insns in the chain. */
3414 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3417 /* If only one insn, return it rather than a SEQUENCE.
3418 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3419 the case of an empty list.)
3420 We only return the pattern of an insn if its code is INSN and it
3421 has no notes. This ensures that no information gets lost. */
3423 && ! RTX_FRAME_RELATED_P (first_insn)
3424 && GET_CODE (first_insn) == INSN
3425 /* Don't throw away any reg notes. */
3426 && REG_NOTES (first_insn) == 0)
3430 NEXT_INSN (first_insn) = free_insn;
3431 free_insn = first_insn;
3433 return PATTERN (first_insn);
3436 /* Put them in a vector. See if we already have a SEQUENCE of the
3437 appropriate length around. */
3438 if (!ggc_p && len < SEQUENCE_RESULT_SIZE
3439 && (result = sequence_result[len]) != 0)
3440 sequence_result[len] = 0;
3443 /* Ensure that this rtl goes in saveable_obstack, since we may
3445 push_obstacks_nochange ();
3446 rtl_in_saveable_obstack ();
3447 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3451 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3452 XVECEXP (result, 0, i) = tem;
3457 /* Put the various virtual registers into REGNO_REG_RTX. */
3460 init_virtual_regs (es)
3461 struct emit_status *es;
3463 rtx *ptr = es->x_regno_reg_rtx;
3464 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3465 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3466 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3467 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3468 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3472 clear_emit_caches ()
3476 /* Clear the start_sequence/gen_sequence cache. */
3477 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3478 sequence_result[i] = 0;
3482 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3483 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3484 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3485 static int copy_insn_n_scratches;
3487 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3488 copied an ASM_OPERANDS.
3489 In that case, it is the original input-operand vector. */
3490 static rtvec orig_asm_operands_vector;
3492 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3493 copied an ASM_OPERANDS.
3494 In that case, it is the copied input-operand vector. */
3495 static rtvec copy_asm_operands_vector;
3497 /* Likewise for the constraints vector. */
3498 static rtvec orig_asm_constraints_vector;
3499 static rtvec copy_asm_constraints_vector;
3501 /* Recursively create a new copy of an rtx for copy_insn.
3502 This function differs from copy_rtx in that it handles SCRATCHes and
3503 ASM_OPERANDs properly.
3504 Normally, this function is not used directly; use copy_insn as front end.
3505 However, you could first copy an insn pattern with copy_insn and then use
3506 this function afterwards to properly copy any REG_NOTEs containing
3515 register RTX_CODE code;
3516 register const char *format_ptr;
3518 code = GET_CODE (orig);
3534 for (i = 0; i < copy_insn_n_scratches; i++)
3535 if (copy_insn_scratch_in[i] == orig)
3536 return copy_insn_scratch_out[i];
3540 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3541 a LABEL_REF, it isn't sharable. */
3542 if (GET_CODE (XEXP (orig, 0)) == PLUS
3543 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3544 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3548 /* A MEM with a constant address is not sharable. The problem is that
3549 the constant address may need to be reloaded. If the mem is shared,
3550 then reloading one copy of this mem will cause all copies to appear
3551 to have been reloaded. */
3557 copy = rtx_alloc (code);
3559 /* Copy the various flags, and other information. We assume that
3560 all fields need copying, and then clear the fields that should
3561 not be copied. That is the sensible default behavior, and forces
3562 us to explicitly document why we are *not* copying a flag. */
3563 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3565 /* We do not copy the USED flag, which is used as a mark bit during
3566 walks over the RTL. */
3569 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3570 if (GET_RTX_CLASS (code) == 'i')
3574 copy->frame_related = 0;
3577 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3579 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3581 copy->fld[i] = orig->fld[i];
3582 switch (*format_ptr++)
3585 if (XEXP (orig, i) != NULL)
3586 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
3591 if (XVEC (orig, i) == orig_asm_constraints_vector)
3592 XVEC (copy, i) = copy_asm_constraints_vector;
3593 else if (XVEC (orig, i) == orig_asm_operands_vector)
3594 XVEC (copy, i) = copy_asm_operands_vector;
3595 else if (XVEC (orig, i) != NULL)
3597 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3598 for (j = 0; j < XVECLEN (copy, i); j++)
3599 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
3605 bitmap new_bits = BITMAP_OBSTACK_ALLOC (rtl_obstack);
3606 bitmap_copy (new_bits, XBITMAP (orig, i));
3607 XBITMAP (copy, i) = new_bits;
3618 /* These are left unchanged. */
3626 if (code == SCRATCH)
3628 i = copy_insn_n_scratches++;
3629 if (i >= MAX_RECOG_OPERANDS)
3631 copy_insn_scratch_in[i] = orig;
3632 copy_insn_scratch_out[i] = copy;
3634 else if (code == ASM_OPERANDS)
3636 orig_asm_operands_vector = XVEC (orig, 3);
3637 copy_asm_operands_vector = XVEC (copy, 3);
3638 orig_asm_constraints_vector = XVEC (orig, 4);
3639 copy_asm_constraints_vector = XVEC (copy, 4);
3645 /* Create a new copy of an rtx.
3646 This function differs from copy_rtx in that it handles SCRATCHes and
3647 ASM_OPERANDs properly.
3648 INSN doesn't really have to be a full INSN; it could be just the
3654 copy_insn_n_scratches = 0;
3655 orig_asm_operands_vector = 0;
3656 orig_asm_constraints_vector = 0;
3657 copy_asm_operands_vector = 0;
3658 copy_asm_constraints_vector = 0;
3659 return copy_insn_1 (insn);
3662 /* Initialize data structures and variables in this file
3663 before generating rtl for each function. */
3668 struct function *f = cfun;
3670 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
3673 seq_rtl_expr = NULL;
3675 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3678 first_label_num = label_num;
3682 clear_emit_caches ();
3684 /* Init the tables that describe all the pseudo regs. */
3686 f->emit->regno_pointer_flag_length = LAST_VIRTUAL_REGISTER + 101;
3688 f->emit->regno_pointer_flag
3689 = (char *) xcalloc (f->emit->regno_pointer_flag_length, sizeof (char));
3691 f->emit->regno_pointer_align
3692 = (char *) xcalloc (f->emit->regno_pointer_flag_length,
3696 = (rtx *) xcalloc (f->emit->regno_pointer_flag_length * sizeof (rtx),
3699 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3700 init_virtual_regs (f->emit);
3702 /* Indicate that the virtual registers and stack locations are
3704 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM) = 1;
3705 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM) = 1;
3706 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM) = 1;
3707 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM) = 1;
3709 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM) = 1;
3710 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM) = 1;
3711 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM) = 1;
3712 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM) = 1;
3713 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM) = 1;
3715 #ifdef STACK_BOUNDARY
3716 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3717 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3718 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)
3719 = STACK_BOUNDARY / BITS_PER_UNIT;
3720 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3722 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM)
3723 = STACK_BOUNDARY / BITS_PER_UNIT;
3724 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM)
3725 = STACK_BOUNDARY / BITS_PER_UNIT;
3726 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM)
3727 = STACK_BOUNDARY / BITS_PER_UNIT;
3728 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM)
3729 = STACK_BOUNDARY / BITS_PER_UNIT;
3730 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = UNITS_PER_WORD;
3733 #ifdef INIT_EXPANDERS
3738 /* Mark SS for GC. */
3741 mark_sequence_stack (ss)
3742 struct sequence_stack *ss;
3746 ggc_mark_rtx (ss->first);
3747 ggc_mark_tree (ss->sequence_rtl_expr);
3752 /* Mark ES for GC. */
3755 mark_emit_status (es)
3756 struct emit_status *es;
3764 for (i = es->regno_pointer_flag_length, r = es->x_regno_reg_rtx;
3768 mark_sequence_stack (es->sequence_stack);
3769 ggc_mark_tree (es->sequence_rtl_expr);
3770 ggc_mark_rtx (es->x_first_insn);
3773 /* Create some permanent unique rtl objects shared between all functions.
3774 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3777 init_emit_once (line_numbers)
3781 enum machine_mode mode;
3782 enum machine_mode double_mode;
3784 no_line_numbers = ! line_numbers;
3786 /* Compute the word and byte modes. */
3788 byte_mode = VOIDmode;
3789 word_mode = VOIDmode;
3790 double_mode = VOIDmode;
3792 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3793 mode = GET_MODE_WIDER_MODE (mode))
3795 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
3796 && byte_mode == VOIDmode)
3799 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
3800 && word_mode == VOIDmode)
3804 #ifndef DOUBLE_TYPE_SIZE
3805 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3808 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3809 mode = GET_MODE_WIDER_MODE (mode))
3811 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
3812 && double_mode == VOIDmode)
3816 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
3818 /* Assign register numbers to the globally defined register rtx.
3819 This must be done at runtime because the register number field
3820 is in a union and some compilers can't initialize unions. */
3822 pc_rtx = gen_rtx (PC, VOIDmode);
3823 cc0_rtx = gen_rtx (CC0, VOIDmode);
3824 stack_pointer_rtx = gen_rtx_raw_REG (Pmode, STACK_POINTER_REGNUM);
3825 frame_pointer_rtx = gen_rtx_raw_REG (Pmode, FRAME_POINTER_REGNUM);
3826 if (hard_frame_pointer_rtx == 0)
3827 hard_frame_pointer_rtx = gen_rtx_raw_REG (Pmode,
3828 HARD_FRAME_POINTER_REGNUM);
3829 if (arg_pointer_rtx == 0)
3830 arg_pointer_rtx = gen_rtx_raw_REG (Pmode, ARG_POINTER_REGNUM);
3831 virtual_incoming_args_rtx =
3832 gen_rtx_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
3833 virtual_stack_vars_rtx =
3834 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
3835 virtual_stack_dynamic_rtx =
3836 gen_rtx_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
3837 virtual_outgoing_args_rtx =
3838 gen_rtx_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
3839 virtual_cfa_rtx = gen_rtx_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
3841 /* These rtx must be roots if GC is enabled. */
3843 ggc_add_rtx_root (global_rtl, GR_MAX);
3845 #ifdef INIT_EXPANDERS
3846 /* This is to initialize save_machine_status and restore_machine_status before
3847 the first call to push_function_context_to. This is needed by the Chill
3848 front end which calls push_function_context_to before the first cal to
3849 init_function_start. */
3853 /* Create the unique rtx's for certain rtx codes and operand values. */
3855 /* Don't use gen_rtx here since gen_rtx in this case
3856 tries to use these variables. */
3857 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
3858 const_int_rtx[i + MAX_SAVED_CONST_INT] =
3859 gen_rtx_raw_CONST_INT (VOIDmode, i);
3861 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
3863 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
3864 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
3865 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
3867 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
3869 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
3870 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
3871 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
3872 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
3874 for (i = 0; i <= 2; i++)
3876 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3877 mode = GET_MODE_WIDER_MODE (mode))
3879 rtx tem = rtx_alloc (CONST_DOUBLE);
3880 union real_extract u;
3882 bzero ((char *) &u, sizeof u); /* Zero any holes in a structure. */
3883 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
3885 bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (tem), sizeof u);
3886 CONST_DOUBLE_MEM (tem) = cc0_rtx;
3887 PUT_MODE (tem, mode);
3889 const_tiny_rtx[i][(int) mode] = tem;
3892 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
3894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3895 mode = GET_MODE_WIDER_MODE (mode))
3896 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3898 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
3900 mode = GET_MODE_WIDER_MODE (mode))
3901 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3904 for (mode = CCmode; mode < MAX_MACHINE_MODE; ++mode)
3905 if (GET_MODE_CLASS (mode) == MODE_CC)
3906 const_tiny_rtx[0][(int) mode] = const0_rtx;
3908 ggc_add_rtx_root (&const_tiny_rtx[0][0], sizeof(const_tiny_rtx)/sizeof(rtx));
3909 ggc_add_rtx_root (&const_true_rtx, 1);
3911 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3912 return_address_pointer_rtx
3913 = gen_rtx_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
3917 struct_value_rtx = STRUCT_VALUE;
3919 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
3922 #ifdef STRUCT_VALUE_INCOMING
3923 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
3925 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3926 struct_value_incoming_rtx
3927 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
3929 struct_value_incoming_rtx = struct_value_rtx;
3933 #ifdef STATIC_CHAIN_REGNUM
3934 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
3936 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3937 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
3938 static_chain_incoming_rtx
3939 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
3942 static_chain_incoming_rtx = static_chain_rtx;
3946 static_chain_rtx = STATIC_CHAIN;
3948 #ifdef STATIC_CHAIN_INCOMING
3949 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
3951 static_chain_incoming_rtx = static_chain_rtx;
3955 #ifdef PIC_OFFSET_TABLE_REGNUM
3956 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
3959 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
3960 ggc_add_rtx_root (&struct_value_rtx, 1);
3961 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
3962 ggc_add_rtx_root (&static_chain_rtx, 1);
3963 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
3964 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
3967 /* Query and clear/ restore no_line_numbers. This is used by the
3968 switch / case handling in stmt.c to give proper line numbers in
3969 warnings about unreachable code. */
3972 force_line_numbers ()
3974 int old = no_line_numbers;
3976 no_line_numbers = 0;
3978 force_next_line_note ();
3983 restore_line_number_status (old_value)
3986 no_line_numbers = old_value;