1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
37 #define obstack_chunk_alloc xmalloc
38 #define obstack_chunk_free free
39 extern int xmalloc ();
42 extern struct obstack *function_maybepermanent_obstack;
44 extern tree pushdecl ();
45 extern tree poplevel ();
47 /* Similar, but round to the next highest integer that meets the
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 #define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 /* Save any constant pool constants in an insn. */
59 static void save_constants ();
61 /* Note when parameter registers are the destination of a SET. */
62 static void note_modified_parmregs ();
64 /* Copy an rtx for save_for_inline_copying. */
65 static rtx copy_for_inline ();
67 /* Make copies of MEMs in DECL_RTLs. */
68 static void copy_decl_rtls ();
70 static tree copy_decl_tree ();
72 /* Return the constant equivalent of a given rtx, or 0 if none. */
73 static rtx const_equiv ();
75 static void integrate_parm_decls ();
76 static void integrate_decl_tree ();
78 static void subst_constants ();
79 static rtx fold_out_const_cc0 ();
81 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
82 is safe and reasonable to integrate into other functions.
83 Nonzero means value is a warning message with a single %s
84 for the function's name. */
87 function_cannot_inline_p (fndecl)
91 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
92 int max_insns = INTEGRATE_THRESHOLD (fndecl);
93 register int ninsns = 0;
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last && TREE_VALUE (last) != void_type_node)
100 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
101 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
102 "__builtin_va_alist")))
103 return "varargs function cannot be inline";
105 if (current_function_calls_alloca)
106 return "function using alloca cannot be inline";
108 if (current_function_contains_functions)
109 return "function with nested functions cannot be inline";
111 /* This restriction may be eliminated sometime soon. But for now, don't
112 worry about remapping the static chain. */
113 if (current_function_needs_context)
114 return "nested function cannot be inline";
116 /* If its not even close, don't even look. */
117 if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
118 return "function too large to be inline";
121 /* Large stacks are OK now that inlined functions can share them. */
122 /* Don't inline functions with large stack usage,
123 since they can make other recursive functions burn up stack. */
124 if (!TREE_INLINE (fndecl) && get_frame_size () > 100)
125 return "function stack frame for inlining";
129 /* Don't inline functions which do not specify a function prototype and
130 have BLKmode argument or take the address of a parameter. */
131 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
133 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
134 TREE_ADDRESSABLE (parms) = 1;
135 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
136 return "no prototype, and parameter address used; cannot be inline";
140 /* We can't inline functions that return structures
141 the old-fashioned PCC way, copying into a static block. */
142 if (current_function_returns_pcc_struct)
143 return "inline functions not supported for this return value type";
145 /* We can't inline functions that return structures of varying size. */
146 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
147 return "function with varying-size return value cannot be inline";
149 /* Cannot inline a function with a varying size argument. */
150 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
151 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
152 return "function with varying-size parameter cannot be inline";
154 if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns)
156 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
157 insn = NEXT_INSN (insn))
159 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
163 if (ninsns >= max_insns)
164 return "function too large to be inline";
167 /* We cannot inline this function if forced_labels is non-zero. This
168 implies that a label in this function was used as an initializer.
169 Because labels can not be duplicated, all labels in the function
170 will be renamed when it is inlined. However, there is no way to find
171 and fix all variables initialized with addresses of labels in this
172 function, hence inlining is impossible. */
175 return "function with label addresses used in initializers cannot inline";
180 /* Variables used within save_for_inline. */
182 /* Mapping from old pseudo-register to new pseudo-registers.
183 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
184 It is allocated in `save_for_inline' and `expand_inline_function',
185 and deallocated on exit from each of those routines. */
188 /* Mapping from old code-labels to new code-labels.
189 The first element of this map is label_map[min_labelno].
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192 static rtx *label_map;
194 /* Mapping from old insn uid's to copied insns.
195 It is allocated in `save_for_inline' and `expand_inline_function',
196 and deallocated on exit from each of those routines. */
197 static rtx *insn_map;
199 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
200 Zero for a reg that isn't a parm's home.
201 Only reg numbers less than max_parm_reg are mapped here. */
202 static tree *parmdecl_map;
204 /* Keep track of first pseudo-register beyond those that are parms. */
205 static int max_parm_reg;
207 /* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the original input-operand vector. */
210 static rtvec orig_asm_operands_vector;
212 /* When an insn is being copied by copy_for_inline,
213 this is nonzero if we have copied an ASM_OPERANDS.
214 In that case, it is the copied input-operand vector. */
215 static rtvec copy_asm_operands_vector;
217 /* Likewise, this is the copied constraints vector. */
218 static rtvec copy_asm_constraints_vector;
220 /* In save_for_inline, nonzero if past the parm-initialization insns. */
221 static int in_nonparm_insns;
223 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
224 needed to save FNDECL's insns and info for future inline expansion. */
227 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
234 int function_flags, i;
238 /* Compute the values of any flags we must restore when inlining this. */
241 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
242 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
243 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
244 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
245 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
246 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
247 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
248 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
249 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
250 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
252 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
253 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
254 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
256 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
258 parms = TREE_CHAIN (parms), i++)
260 rtx p = DECL_RTL (parms);
262 if (GET_CODE (p) == MEM && copy)
264 /* Copy the rtl so that modifications of the addresses
265 later in compilation won't affect this arg_vector.
266 Virtual register instantiation can screw the address
268 rtx new = copy_rtx (p);
270 /* Don't leave the old copy anywhere in this decl. */
271 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
272 || (GET_CODE (DECL_RTL (parms)) == MEM
273 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
274 && (XEXP (DECL_RTL (parms), 0)
275 == XEXP (DECL_INCOMING_RTL (parms), 0))))
276 DECL_INCOMING_RTL (parms) = new;
277 DECL_RTL (parms) = new;
280 RTVEC_ELT (arg_vector, i) = p;
282 if (GET_CODE (p) == REG)
283 parmdecl_map[REGNO (p)] = parms;
284 /* This flag is cleared later
285 if the function ever modifies the value of the parm. */
286 TREE_READONLY (parms) = 1;
289 /* Assume we start out in the insns that set up the parameters. */
290 in_nonparm_insns = 0;
292 /* The list of DECL_SAVED_INSNS, starts off with a header which
293 contains the following information:
295 the first insn of the function (not including the insns that copy
296 parameters into registers).
297 the first parameter insn of the function,
298 the first label used by that function,
299 the last label used by that function,
300 the highest register number used for parameters,
301 the total number of registers used,
302 the size of the incoming stack area for parameters,
303 the number of bytes popped on return,
305 some flags that are used to restore compiler globals,
306 the value of current_function_outgoing_args_size,
307 the original argument vector,
308 and the original DECL_INITIAL. */
310 return gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
311 max_parm_reg, max_reg,
312 current_function_args_size,
313 current_function_pops_args,
314 stack_slot_list, function_flags,
315 current_function_outgoing_args_size,
316 arg_vector, (rtx) DECL_INITIAL (fndecl));
319 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
320 things that must be done to make FNDECL expandable as an inline function.
321 HEAD contains the chain of insns to which FNDECL will expand. */
324 finish_inline (fndecl, head)
328 NEXT_INSN (head) = get_first_nonparm_insn ();
329 FIRST_PARM_INSN (head) = get_insns ();
330 DECL_SAVED_INSNS (fndecl) = head;
331 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
332 TREE_INLINE (fndecl) = 1;
335 /* Make the insns and PARM_DECLs of the current function permanent
336 and record other information in DECL_SAVED_INSNS to allow inlining
337 of this function in subsequent calls.
339 This function is called when we are going to immediately compile
340 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
341 modified by the compilation process, so we copy all of them to
342 new storage and consider the new insns to be the insn chain to be
346 save_for_inline_copying (fndecl)
349 rtx first_insn, last_insn, insn;
351 int max_labelno, min_labelno, i, len;
354 rtx first_nonparm_insn;
356 /* Make and emit a return-label if we have not already done so.
357 Do this before recording the bounds on label numbers. */
359 if (return_label == 0)
361 return_label = gen_label_rtx ();
362 emit_label (return_label);
365 /* Get some bounds on the labels and registers used. */
367 max_labelno = max_label_num ();
368 min_labelno = get_first_label_num ();
369 max_reg = max_reg_num ();
371 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
372 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
373 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
374 for the parms, prior to elimination of virtual registers.
375 These values are needed for substituting parms properly. */
377 max_parm_reg = max_parm_reg_num ();
378 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
380 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
382 if (current_function_uses_const_pool)
384 /* Replace any constant pool references with the actual constant. We
385 will put the constants back in the copy made below. */
386 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
387 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
389 save_constants (&PATTERN (insn));
390 if (REG_NOTES (insn))
391 save_constants (®_NOTES (insn));
394 /* Clear out the constant pool so that we can recreate it with the
395 copied constants below. */
396 init_const_rtx_hash_table ();
397 clear_const_double_mem ();
400 max_uid = INSN_UID (head);
402 /* We have now allocated all that needs to be allocated permanently
403 on the rtx obstack. Set our high-water mark, so that we
404 can free the rest of this when the time comes. */
408 /* Copy the chain insns of this function.
409 Install the copied chain as the insns of this function,
410 for continued compilation;
411 the original chain is recorded as the DECL_SAVED_INSNS
412 for inlining future calls. */
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
419 if (GET_CODE (insn) != NOTE)
421 first_insn = rtx_alloc (NOTE);
422 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
423 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
424 INSN_UID (first_insn) = INSN_UID (insn);
425 PREV_INSN (first_insn) = NULL;
426 NEXT_INSN (first_insn) = NULL;
427 last_insn = first_insn;
429 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
430 Make these new rtx's now, and install them in regno_reg_rtx, so they
431 will be the official pseudo-reg rtx's for the rest of compilation. */
433 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
435 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
436 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
437 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
438 regno_reg_rtx[i], len);
440 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
441 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
442 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
444 /* Likewise each label rtx must have a unique rtx as its copy. */
446 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
447 label_map -= min_labelno;
449 for (i = min_labelno; i < max_labelno; i++)
450 label_map[i] = gen_label_rtx ();
452 /* Record the mapping of old insns to copied insns. */
454 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
455 bzero (insn_map, max_uid * sizeof (rtx));
457 /* Get the insn which signals the end of parameter setup code. */
458 first_nonparm_insn = get_first_nonparm_insn ();
460 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
461 (the former occurs when a variable has its address taken)
462 since these may be shared and can be changed by virtual
463 register instantiation. DECL_RTL values for our arguments
464 have already been copied by initialize_for_inline. */
465 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
466 if (GET_CODE (regno_reg_rtx[i]) == MEM)
467 XEXP (regno_reg_rtx[i], 0)
468 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
470 /* Copy the tree of subblocks of the function, and the decls in them.
471 We will use the copy for compiling this function, then restore the original
472 subblocks and decls for use when inlining this function.
474 Several parts of the compiler modify BLOCK trees. In particular,
475 instantiate_virtual_regs will instantiate any virtual regs
476 mentioned in the DECL_RTLs of the decls, and loop
477 unrolling will replicate any BLOCK trees inside an unrolled loop.
479 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
480 which we will use for inlining. The rtl might even contain pseudoregs
481 whose space has been freed. */
483 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
485 /* Now copy each DECL_RTL which is a MEM,
486 so it is safe to modify their addresses. */
487 copy_decl_rtls (DECL_INITIAL (fndecl));
489 /* Now copy the chain of insns. Do this twice. The first copy the insn
490 itself and its body. The second time copy of REG_NOTES. This is because
491 a REG_NOTE may have a forward pointer to another insn. */
493 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
495 orig_asm_operands_vector = 0;
497 if (insn == first_nonparm_insn)
498 in_nonparm_insns = 1;
500 switch (GET_CODE (insn))
503 /* No need to keep these. */
504 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
507 copy = rtx_alloc (NOTE);
508 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
509 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
515 copy = rtx_alloc (GET_CODE (insn));
516 PATTERN (copy) = copy_for_inline (PATTERN (insn));
517 INSN_CODE (copy) = -1;
518 LOG_LINKS (copy) = NULL;
519 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
523 copy = label_map[CODE_LABEL_NUMBER (insn)];
524 LABEL_NAME (copy) = LABEL_NAME (insn);
528 copy = rtx_alloc (BARRIER);
534 INSN_UID (copy) = INSN_UID (insn);
535 insn_map[INSN_UID (insn)] = copy;
536 NEXT_INSN (last_insn) = copy;
537 PREV_INSN (copy) = last_insn;
541 /* Now copy the REG_NOTES. */
542 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
543 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
544 && insn_map[INSN_UID(insn)])
545 REG_NOTES (insn_map[INSN_UID (insn)])
546 = copy_for_inline (REG_NOTES (insn));
548 NEXT_INSN (last_insn) = NULL;
550 finish_inline (fndecl, head);
552 set_new_first_and_last_insn (first_insn, last_insn);
555 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
558 copy_decl_tree (block)
561 tree t, vars, subblocks;
563 vars = copy_list (BLOCK_VARS (block));
566 /* Process all subblocks. */
567 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
569 tree copy = copy_decl_tree (t);
570 TREE_CHAIN (copy) = subblocks;
574 t = copy_node (block);
575 BLOCK_VARS (t) = vars;
576 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
580 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
583 copy_decl_rtls (block)
588 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
589 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
590 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
592 /* Process all subblocks. */
593 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
597 /* Make the insns and PARM_DECLs of the current function permanent
598 and record other information in DECL_SAVED_INSNS to allow inlining
599 of this function in subsequent calls.
601 This routine need not copy any insns because we are not going
602 to immediately compile the insns in the insn chain. There
603 are two cases when we would compile the insns for FNDECL:
604 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
605 be output at the end of other compilation, because somebody took
606 its address. In the first case, the insns of FNDECL are copied
607 as it is expanded inline, so FNDECL's saved insns are not
608 modified. In the second case, FNDECL is used for the last time,
609 so modifying the rtl is not a problem.
611 ??? Actually, we do not verify that FNDECL is not inline expanded
612 by other functions which must also be written down at the end
613 of compilation. We could set flag_no_inline to nonzero when
614 the time comes to write down such functions. */
617 save_for_inline_nocopy (fndecl)
623 int max_labelno, min_labelno, i, len;
626 rtx first_nonparm_insn;
629 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
630 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
631 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
632 for the parms, prior to elimination of virtual registers.
633 These values are needed for substituting parms properly. */
635 max_parm_reg = max_parm_reg_num ();
636 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
638 /* Make and emit a return-label if we have not already done so. */
640 if (return_label == 0)
642 return_label = gen_label_rtx ();
643 emit_label (return_label);
646 head = initialize_for_inline (fndecl, get_first_label_num (),
647 max_label_num (), max_reg_num (), 0);
649 /* If there are insns that copy parms from the stack into pseudo registers,
650 those insns are not copied. `expand_inline_function' must
651 emit the correct code to handle such things. */
654 if (GET_CODE (insn) != NOTE)
657 /* Get the insn which signals the end of parameter setup code. */
658 first_nonparm_insn = get_first_nonparm_insn ();
660 /* Now just scan the chain of insns to see what happens to our
661 PARM_DECLs. If a PARM_DECL is used but never modified, we
662 can substitute its rtl directly when expanding inline (and
663 perform constant folding when its incoming value is constant).
664 Otherwise, we have to copy its value into a new register and track
665 the new register's life. */
667 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
669 if (insn == first_nonparm_insn)
670 in_nonparm_insns = 1;
672 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
674 if (current_function_uses_const_pool)
676 /* Replace any constant pool references with the actual constant.
677 We will put the constant back if we need to write the
678 function out after all. */
679 save_constants (&PATTERN (insn));
680 if (REG_NOTES (insn))
681 save_constants (®_NOTES (insn));
684 /* Record what interesting things happen to our parameters. */
685 note_stores (PATTERN (insn), note_modified_parmregs);
689 /* We have now allocated all that needs to be allocated permanently
690 on the rtx obstack. Set our high-water mark, so that we
691 can free the rest of this when the time comes. */
695 finish_inline (fndecl, head);
698 /* Given PX, a pointer into an insn, search for references to the constant
699 pool. Replace each with a CONST that has the mode of the original
700 constant, contains the constant, and has RTX_INTEGRATED_P set.
701 Similarly, constant pool addresses not enclosed in a MEM are replaced
702 with an ADDRESS rtx which also gives the constant, mode, and has
703 RTX_INTEGRATED_P set. */
715 /* If this is a CONST_DOUBLE, don't try to fix things up in
716 CONST_DOUBLE_MEM, because this is an infinite recursion. */
717 if (GET_CODE (x) == CONST_DOUBLE)
719 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
720 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
722 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
723 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
724 RTX_INTEGRATED_P (new) = 1;
726 /* If the MEM was in a different mode than the constant (perhaps we
727 were only looking at the low-order part), surround it with a
728 SUBREG so we can save both modes. */
730 if (GET_MODE (x) != const_mode)
732 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
733 RTX_INTEGRATED_P (new) = 1;
737 save_constants (&XEXP (*px, 0));
739 else if (GET_CODE (x) == SYMBOL_REF
740 && CONSTANT_POOL_ADDRESS_P (x))
742 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
743 save_constants (&XEXP (*px, 0));
744 RTX_INTEGRATED_P (*px) = 1;
749 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
750 int len = GET_RTX_LENGTH (GET_CODE (x));
752 for (i = len-1; i >= 0; i--)
757 for (j = 0; j < XVECLEN (x, i); j++)
758 save_constants (&XVECEXP (x, i, j));
762 if (XEXP (x, i) == 0)
766 /* Hack tail-recursion here. */
770 save_constants (&XEXP (x, i));
777 /* Note whether a parameter is modified or not. */
780 note_modified_parmregs (reg, x)
784 if (GET_CODE (reg) == REG && in_nonparm_insns
785 && REGNO (reg) < max_parm_reg
786 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
787 && parmdecl_map[REGNO (reg)] != 0)
788 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
791 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
792 according to `reg_map' and `label_map'. The original rtl insns
793 will be saved for inlining; this is used to make a copy
794 which is used to finish compiling the inline function itself.
796 If we find a "saved" constant pool entry, one which was replaced with
797 the value of the constant, convert it back to a constant pool entry.
798 Since the pool wasn't touched, this should simply restore the old
801 All other kinds of rtx are copied except those that can never be
802 changed during compilation. */
805 copy_for_inline (orig)
808 register rtx x = orig;
810 register enum rtx_code code;
811 register char *format_ptr;
818 /* These types may be freely shared. */
830 /* We have to make a new CONST_DOUBLE to ensure that we account for
831 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
832 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
836 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
837 return immed_real_const_1 (d, GET_MODE (x));
840 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
844 /* Get constant pool entry for constant in the pool. */
845 if (RTX_INTEGRATED_P (x))
846 return validize_mem (force_const_mem (GET_MODE (x),
847 copy_for_inline (XEXP (x, 0))));
851 /* Get constant pool entry, but access in different mode. */
852 if (RTX_INTEGRATED_P (x))
855 = force_const_mem (GET_MODE (SUBREG_REG (x)),
856 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
858 PUT_MODE (new, GET_MODE (x));
859 return validize_mem (new);
864 /* If not special for constant pool error. Else get constant pool
866 if (! RTX_INTEGRATED_P (x))
869 return XEXP (force_const_mem (GET_MODE (x),
870 copy_for_inline (XEXP (x, 0))), 0);
873 /* If a single asm insn contains multiple output operands
874 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
875 We must make sure that the copied insn continues to share it. */
876 if (orig_asm_operands_vector == XVEC (orig, 3))
878 x = rtx_alloc (ASM_OPERANDS);
879 XSTR (x, 0) = XSTR (orig, 0);
880 XSTR (x, 1) = XSTR (orig, 1);
881 XINT (x, 2) = XINT (orig, 2);
882 XVEC (x, 3) = copy_asm_operands_vector;
883 XVEC (x, 4) = copy_asm_constraints_vector;
884 XSTR (x, 5) = XSTR (orig, 5);
885 XINT (x, 6) = XINT (orig, 6);
891 /* A MEM is usually allowed to be shared if its address is constant
892 or is a constant plus one of the special registers.
894 We do not allow sharing of addresses that are either a special
895 register or the sum of a constant and a special register because
896 it is possible for unshare_all_rtl to copy the address, into memory
897 that won't be saved. Although the MEM can safely be shared, and
898 won't be copied there, the address itself cannot be shared, and may
901 There are also two exceptions with constants: The first is if the
902 constant is a LABEL_REF or the sum of the LABEL_REF
903 and an integer. This case can happen if we have an inline
904 function that supplies a constant operand to the call of another
905 inline function that uses it in a switch statement. In this case,
906 we will be replacing the LABEL_REF, so we have to replace this MEM
909 The second case is if we have a (const (plus (address ..) ...)).
910 In that case we need to put back the address of the constant pool
913 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
914 && GET_CODE (XEXP (x, 0)) != LABEL_REF
915 && ! (GET_CODE (XEXP (x, 0)) == CONST
916 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
917 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
919 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
926 /* Must point to the new insn. */
927 return gen_rtx (LABEL_REF, GET_MODE (orig),
928 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
932 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
933 return reg_map [REGNO (x)];
938 /* If a parm that gets modified lives in a pseudo-reg,
939 clear its TREE_READONLY to prevent certain optimizations. */
941 rtx dest = SET_DEST (x);
943 while (GET_CODE (dest) == STRICT_LOW_PART
944 || GET_CODE (dest) == ZERO_EXTRACT
945 || GET_CODE (dest) == SUBREG)
946 dest = XEXP (dest, 0);
948 if (GET_CODE (dest) == REG
949 && REGNO (dest) < max_parm_reg
950 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
951 && parmdecl_map[REGNO (dest)] != 0
952 /* The insn to load an arg pseudo from a stack slot
953 does not count as modifying it. */
955 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
959 #if 0 /* This is a good idea, but here is the wrong place for it. */
960 /* Arrange that CONST_INTs always appear as the second operand
961 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
962 always appear as the first. */
964 if (GET_CODE (XEXP (x, 0)) == CONST_INT
965 || (XEXP (x, 1) == frame_pointer_rtx
966 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
967 && XEXP (x, 1) == arg_pointer_rtx)))
970 XEXP (x, 0) = XEXP (x, 1);
977 /* Replace this rtx with a copy of itself. */
979 x = rtx_alloc (code);
980 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
981 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
983 /* Now scan the subexpressions recursively.
984 We can store any replaced subexpressions directly into X
985 since we know X is not shared! Any vectors in X
986 must be copied if X was copied. */
988 format_ptr = GET_RTX_FORMAT (code);
990 for (i = 0; i < GET_RTX_LENGTH (code); i++)
992 switch (*format_ptr++)
995 XEXP (x, i) = copy_for_inline (XEXP (x, i));
999 /* Change any references to old-insns to point to the
1000 corresponding copied insns. */
1001 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1005 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1009 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1010 for (j = 0; j < XVECLEN (x, i); j++)
1012 = copy_for_inline (XVECEXP (x, i, j));
1018 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1020 orig_asm_operands_vector = XVEC (orig, 3);
1021 copy_asm_operands_vector = XVEC (x, 3);
1022 copy_asm_constraints_vector = XVEC (x, 4);
1028 /* Unfortunately, we need a global copy of const_equiv map for communication
1029 with a function called from note_stores. Be *very* careful that this
1030 is used properly in the presence of recursion. */
1032 rtx *global_const_equiv_map;
1034 #define FIXED_BASE_PLUS_P(X) \
1035 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1036 && GET_CODE (XEXP (X, 0)) == REG \
1037 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1038 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1040 /* Integrate the procedure defined by FNDECL. Note that this function
1041 may wind up calling itself. Since the static variables are not
1042 reentrant, we do not assign them until after the possibility
1043 of recursion is eliminated.
1045 If IGNORE is nonzero, do not produce a value.
1046 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1049 (rtx)-1 if we could not substitute the function
1050 0 if we substituted it and it does not produce a value
1051 else an rtx for where the value is stored. */
1054 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1059 rtx structure_value_addr;
1061 tree formal, actual;
1062 rtx header = DECL_SAVED_INSNS (fndecl);
1063 rtx insns = FIRST_FUNCTION_INSN (header);
1064 rtx parm_insns = FIRST_PARM_INSN (header);
1070 int min_labelno = FIRST_LABELNO (header);
1071 int max_labelno = LAST_LABELNO (header);
1073 rtx local_return_label = 0;
1076 struct inline_remap *map;
1078 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1080 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1081 max_regno = MAX_REGNUM (header) + 3;
1082 if (max_regno < FIRST_PSEUDO_REGISTER)
1085 nargs = list_length (DECL_ARGUMENTS (fndecl));
1087 /* We expect PARMS to have the right length; don't crash if not. */
1088 if (list_length (parms) != nargs)
1090 /* Also check that the parms type match. Since the appropriate
1091 conversions or default promotions have already been applied,
1092 the machine modes should match exactly. */
1093 for (formal = DECL_ARGUMENTS (fndecl),
1096 formal = TREE_CHAIN (formal),
1097 actual = TREE_CHAIN (actual))
1099 tree arg = TREE_VALUE (actual);
1100 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1101 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1103 /* If they are block mode, the types should match exactly.
1104 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1105 which could happen if the parameter has incomplete type. */
1106 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1110 /* Make a binding contour to keep inline cleanups called at
1111 outer function-scope level from looking like they are shadowing
1112 parameter declarations. */
1115 /* Make a fresh binding contour that we can easily remove. */
1117 expand_start_bindings (0);
1118 if (GET_CODE (parm_insns) == NOTE
1119 && NOTE_LINE_NUMBER (parm_insns) > 0)
1120 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
1122 /* Expand the function arguments. Do this first so that any
1123 new registers get created before we allocate the maps. */
1125 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1126 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1128 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1130 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1132 /* Actual parameter, converted to the type of the argument within the
1134 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1135 /* Mode of the variable used within the function. */
1136 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1137 /* Where parameter is located in the function. */
1140 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1143 loc = RTVEC_ELT (arg_vector, i);
1145 /* If this is an object passed by invisible reference, we copy the
1146 object into a stack slot and save its address. If this will go
1147 into memory, we do nothing now. Otherwise, we just expand the
1149 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1150 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1152 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
1153 rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1);
1155 store_expr (arg, stack_slot, 0);
1157 arg_vals[i] = XEXP (stack_slot, 0);
1159 else if (GET_CODE (loc) != MEM)
1160 arg_vals[i] = expand_expr (arg, 0, mode, EXPAND_SUM);
1164 if (arg_vals[i] != 0
1165 && (! TREE_READONLY (formal)
1166 /* If the parameter is not read-only, copy our argument through
1167 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1168 TARGET in any way. In the inline function, they will likely
1169 be two different pseudos, and `safe_from_p' will make all
1170 sorts of smart assumptions about their not conflicting.
1171 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1172 wrong, so put ARG_VALS[I] into a fresh register. */
1174 && (GET_CODE (arg_vals[i]) == REG
1175 || GET_CODE (arg_vals[i]) == SUBREG
1176 || GET_CODE (arg_vals[i]) == MEM)
1177 && reg_overlap_mentioned_p (arg_vals[i], target))))
1178 arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]);
1181 /* Allocate the structures we use to remap things. */
1183 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1184 map->fndecl = fndecl;
1186 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1187 bzero (map->reg_map, max_regno * sizeof (rtx));
1189 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1190 map->label_map -= min_labelno;
1192 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1193 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1194 map->min_insnno = 0;
1195 map->max_insnno = INSN_UID (header);
1197 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1198 be large enough for all our pseudos. This is the number we are currently
1199 using plus the number in the called routine, plus 15 for each arg,
1200 five to compute the virtual frame pointer, and five for the return value.
1201 This should be enough for most cases. We do not reference entries
1202 outside the range of the map.
1204 ??? These numbers are quite arbitrary and were obtained by
1205 experimentation. At some point, we should try to allocate the
1206 table after all the parameters are set up so we an more accurately
1207 estimate the number of pseudos we will need. */
1209 map->const_equiv_map_size
1210 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1212 map->const_equiv_map
1213 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1214 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1217 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1218 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
1221 /* Record the current insn in case we have to set up pointers to frame
1222 and argument memory blocks. */
1223 map->insns_at_start = get_last_insn ();
1225 /* Update the outgoing argument size to allow for those in the inlined
1227 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1228 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1230 /* If the inline function needs to make PIC references, that means
1231 that this function's PIC offset table must be used. */
1232 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1233 current_function_uses_pic_offset_table = 1;
1235 /* Process each argument. For each, set up things so that the function's
1236 reference to the argument will refer to the argument being passed.
1237 We only replace REG with REG here. Any simplifications are done
1238 via const_equiv_map.
1240 We make two passes: In the first, we deal with parameters that will
1241 be placed into registers, since we need to ensure that the allocated
1242 register number fits in const_equiv_map. Then we store all non-register
1243 parameters into their memory location. */
1245 for (i = 0; i < nargs; i++)
1247 rtx copy = arg_vals[i];
1249 loc = RTVEC_ELT (arg_vector, i);
1251 /* There are three cases, each handled separately. */
1252 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1253 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1255 /* This must be an object passed by invisible reference (it could
1256 also be a variable-sized object, but we forbid inlining functions
1257 with variable-sized arguments). COPY is the address of the
1258 actual value (this computation will cause it to be copied). We
1259 map that address for the register, noting the actual address as
1260 an equivalent in case it can be substituted into the insns. */
1262 if (GET_CODE (copy) != REG)
1264 temp = copy_addr_to_reg (copy);
1265 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1267 map->const_equiv_map[REGNO (temp)] = copy;
1268 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1272 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1274 else if (GET_CODE (loc) == MEM)
1276 /* This is the case of a parameter that lives in memory.
1277 It will live in the block we allocate in the called routine's
1278 frame that simulates the incoming argument area. Do nothing
1279 now; we will call store_expr later. */
1282 else if (GET_CODE (loc) == REG)
1284 /* This is the good case where the parameter is in a register.
1285 If it is read-only and our argument is a constant, set up the
1286 constant equivalence. */
1287 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1289 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1290 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1292 map->const_equiv_map[REGNO (temp)] = copy;
1293 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1297 map->reg_map[REGNO (loc)] = copy;
1302 /* Free any temporaries we made setting up this parameter. */
1306 /* Now do the parameters that will be placed in memory. */
1308 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1309 formal; formal = TREE_CHAIN (formal), i++)
1311 rtx copy = arg_vals[i];
1313 loc = RTVEC_ELT (arg_vector, i);
1315 if (GET_CODE (loc) == MEM
1316 /* Exclude case handled above. */
1317 && ! (GET_CODE (XEXP (loc, 0)) == REG
1318 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1320 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1322 /* Compute the address in the area we reserved and store the
1324 temp = copy_rtx_and_substitute (loc, map);
1325 subst_constants (&temp, 0, map);
1326 apply_change_group ();
1327 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1328 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1329 store_expr (arg_trees[i], temp, 0);
1331 /* Free any temporaries we made setting up this parameter. */
1336 /* Deal with the places that the function puts its result.
1337 We are driven by what is placed into DECL_RESULT.
1339 Initially, we assume that we don't have anything special handling for
1340 REG_FUNCTION_RETURN_VALUE_P. */
1342 map->inline_target = 0;
1343 loc = DECL_RTL (DECL_RESULT (fndecl));
1344 if (TYPE_MODE (type) == VOIDmode)
1345 /* There is no return value to worry about. */
1347 else if (GET_CODE (loc) == MEM)
1349 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1352 /* Pass the function the address in which to return a structure value.
1353 Note that a constructor can cause someone to call us with
1354 STRUCTURE_VALUE_ADDR, but the initialization takes place
1355 via the first parameter, rather than the struct return address.
1357 We have two cases: If the address is a simple register indirect,
1358 use the mapping mechanism to point that register to our structure
1359 return address. Otherwise, store the structure return value into
1360 the place that it will be referenced from. */
1362 if (GET_CODE (XEXP (loc, 0)) == REG)
1364 temp = force_reg (Pmode, structure_value_addr);
1365 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1366 if (CONSTANT_P (structure_value_addr)
1367 || (GET_CODE (structure_value_addr) == PLUS
1368 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1369 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1371 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1372 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1377 temp = copy_rtx_and_substitute (loc, map);
1378 subst_constants (&temp, 0, map);
1379 apply_change_group ();
1380 emit_move_insn (temp, structure_value_addr);
1384 /* We will ignore the result value, so don't look at its structure.
1385 Note that preparations for an aggregate return value
1386 do need to be made (above) even if it will be ignored. */
1388 else if (GET_CODE (loc) == REG)
1390 /* The function returns an object in a register and we use the return
1391 value. Set up our target for remapping. */
1393 /* Machine mode function was declared to return. */
1394 enum machine_mode departing_mode = TYPE_MODE (type);
1395 /* (Possibly wider) machine mode it actually computes
1396 (for the sake of callers that fail to declare it right). */
1397 enum machine_mode arriving_mode
1398 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1401 /* Don't use MEMs as direct targets because on some machines
1402 substituting a MEM for a REG makes invalid insns.
1403 Let the combiner substitute the MEM if that is valid. */
1404 if (target == 0 || GET_CODE (target) != REG
1405 || GET_MODE (target) != departing_mode)
1406 target = gen_reg_rtx (departing_mode);
1408 /* If function's value was promoted before return,
1409 avoid machine mode mismatch when we substitute INLINE_TARGET.
1410 But TARGET is what we will return to the caller. */
1411 if (arriving_mode != departing_mode)
1412 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1414 reg_to_map = target;
1416 /* Usually, the result value is the machine's return register.
1417 Sometimes it may be a pseudo. Handle both cases. */
1418 if (REG_FUNCTION_VALUE_P (loc))
1419 map->inline_target = reg_to_map;
1421 map->reg_map[REGNO (loc)] = reg_to_map;
1424 /* Make new label equivalences for the labels in the called function. */
1425 for (i = min_labelno; i < max_labelno; i++)
1426 map->label_map[i] = gen_label_rtx ();
1428 /* Perform postincrements before actually calling the function. */
1431 /* Clean up stack so that variables might have smaller offsets. */
1432 do_pending_stack_adjust ();
1434 /* Save a copy of the location of const_equiv_map for mark_stores, called
1436 global_const_equiv_map = map->const_equiv_map;
1438 /* Now copy the insns one by one. Do this in two passes, first the insns and
1439 then their REG_NOTES, just like save_for_inline. */
1441 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1443 for (insn = insns; insn; insn = NEXT_INSN (insn))
1447 map->orig_asm_operands_vector = 0;
1449 switch (GET_CODE (insn))
1452 pattern = PATTERN (insn);
1454 if (GET_CODE (pattern) == USE
1455 && GET_CODE (XEXP (pattern, 0)) == REG
1456 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1457 /* The (USE (REG n)) at return from the function should
1458 be ignored since we are changing (REG n) into
1462 /* Ignore setting a function value that we don't want to use. */
1463 if (map->inline_target == 0
1464 && GET_CODE (pattern) == SET
1465 && GET_CODE (SET_DEST (pattern)) == REG
1466 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1468 if (volatile_refs_p (SET_SRC (pattern)))
1470 /* If we must not delete the source,
1471 load it into a new temporary. */
1472 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1474 = gen_reg_rtx (GET_MODE (SET_DEST (pattern)));
1480 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1481 /* REG_NOTES will be copied later. */
1484 /* If this insn is setting CC0, it may need to look at
1485 the insn that uses CC0 to see what type of insn it is.
1486 In that case, the call to recog via validate_change will
1487 fail. So don't substitute constants here. Instead,
1488 do it when we emit the following insn.
1490 For example, see the pyr.md file. That machine has signed and
1491 unsigned compares. The compare patterns must check the
1492 following branch insn to see which what kind of compare to
1495 If the previous insn set CC0, substitute constants on it as
1497 if (sets_cc0_p (PATTERN (copy)) != 0)
1502 try_constants (cc0_insn, map);
1504 try_constants (copy, map);
1507 try_constants (copy, map);
1512 if (GET_CODE (PATTERN (insn)) == RETURN)
1514 if (local_return_label == 0)
1515 local_return_label = gen_label_rtx ();
1516 pattern = gen_jump (local_return_label);
1519 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1521 copy = emit_jump_insn (pattern);
1525 try_constants (cc0_insn, map);
1528 try_constants (copy, map);
1530 /* If this used to be a conditional jump insn but whose branch
1531 direction is now know, we must do something special. */
1532 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1535 /* The previous insn set cc0 for us. So delete it. */
1536 delete_insn (PREV_INSN (copy));
1539 /* If this is now a no-op, delete it. */
1540 if (map->last_pc_value == pc_rtx)
1546 /* Otherwise, this is unconditional jump so we must put a
1547 BARRIER after it. We could do some dead code elimination
1548 here, but jump.c will do it just as well. */
1554 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1555 copy = emit_call_insn (pattern);
1559 try_constants (cc0_insn, map);
1562 try_constants (copy, map);
1564 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1565 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1566 map->const_equiv_map[i] = 0;
1570 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1571 LABEL_NAME (copy) = LABEL_NAME (insn);
1576 copy = emit_barrier ();
1580 /* It is important to discard function-end and function-beg notes,
1581 so we have only one of each in the current function.
1582 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1583 deleted these in the copy used for continuing compilation,
1584 not the copy used for inlining). */
1585 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1586 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1587 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1588 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1599 RTX_INTEGRATED_P (copy) = 1;
1601 map->insn_map[INSN_UID (insn)] = copy;
1604 /* Now copy the REG_NOTES. */
1605 for (insn = insns; insn; insn = NEXT_INSN (insn))
1606 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1607 && map->insn_map[INSN_UID (insn)])
1608 REG_NOTES (map->insn_map[INSN_UID (insn)])
1609 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1611 if (local_return_label)
1612 emit_label (local_return_label);
1614 /* Make copies of the decls of the symbols in the inline function, so that
1615 the copies of the variables get declared in the current function. Set
1616 up things so that lookup_static_chain knows that to interpret registers
1617 in SAVE_EXPRs for TYPE_SIZEs as local. */
1619 inline_function_decl = fndecl;
1620 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map, 0);
1621 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1622 inline_function_decl = 0;
1624 /* End the scope containing the copied formal parameter variables. */
1626 expand_end_bindings (getdecls (), 1, 1);
1629 emit_line_note (input_filename, lineno);
1631 if (structure_value_addr)
1632 return gen_rtx (MEM, TYPE_MODE (type),
1633 memory_address (TYPE_MODE (type), structure_value_addr));
1637 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1638 push all of those decls and give each one the corresponding home. */
1641 integrate_parm_decls (args, map, arg_vector)
1643 struct inline_remap *map;
1649 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1651 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1654 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1656 /* These args would always appear unused, if not for this. */
1657 TREE_USED (decl) = 1;
1658 /* Prevent warning for shadowing with these. */
1659 DECL_FROM_INLINE (decl) = 1;
1661 /* Fully instantiate the address with the equivalent form so that the
1662 debugging information contains the actual register, instead of the
1663 virtual register. Do this by not passing an insn to
1665 subst_constants (&new_decl_rtl, 0, map);
1666 apply_change_group ();
1667 DECL_RTL (decl) = new_decl_rtl;
1671 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1672 current function a tree of contexts isomorphic to the one that is given.
1674 LEVEL indicates how far down into the BLOCK tree is the node we are
1675 currently traversing. It is always zero for the initial call.
1677 MAP, if nonzero, is a pointer to a inline_remap map which indicates how
1678 registers used in the DECL_RTL field should be remapped. If it is zero,
1679 no mapping is necessary.
1681 FUNCTIONBODY indicates whether the top level block tree corresponds to
1682 a function body. This is identical in meaning to the functionbody
1683 argument of poplevel. */
1686 integrate_decl_tree (let, level, map, functionbody)
1689 struct inline_remap *map;
1696 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1698 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1699 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1700 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1701 if (! functionbody && DECL_RTL (t) != 0)
1703 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1704 /* Fully instantiate the address with the equivalent form so that the
1705 debugging information contains the actual register, instead of the
1706 virtual register. Do this by not passing an insn to
1708 subst_constants (&DECL_RTL (d), 0, map);
1709 apply_change_group ();
1711 else if (DECL_RTL (t))
1712 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1713 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1714 TREE_STATIC (d) = TREE_STATIC (t);
1715 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1716 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1717 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1718 TREE_READONLY (d) = TREE_READONLY (t);
1719 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1720 /* These args would always appear unused, if not for this. */
1722 /* Prevent warning for shadowing with these. */
1723 DECL_FROM_INLINE (d) = 1;
1727 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1728 integrate_decl_tree (t, level + 1, map, functionbody);
1730 node = poplevel (level > 0, 0, level == 0 && functionbody);
1732 TREE_USED (node) = TREE_USED (let);
1735 /* Create a new copy of an rtx.
1736 Recursively copies the operands of the rtx,
1737 except for those few rtx codes that are sharable.
1739 We always return an rtx that is similar to that incoming rtx, with the
1740 exception of possibly changing a REG to a SUBREG or vice versa. No
1741 rtl is ever emitted.
1743 Handle constants that need to be placed in the constant pool by
1744 calling `force_const_mem'. */
1747 copy_rtx_and_substitute (orig, map)
1749 struct inline_remap *map;
1751 register rtx copy, temp;
1753 register RTX_CODE code;
1754 register enum machine_mode mode;
1755 register char *format_ptr;
1761 code = GET_CODE (orig);
1762 mode = GET_MODE (orig);
1767 /* If the stack pointer register shows up, it must be part of
1768 stack-adjustments (*not* because we eliminated the frame pointer!).
1769 Small hard registers are returned as-is. Pseudo-registers
1770 go through their `reg_map'. */
1771 regno = REGNO (orig);
1772 if (regno <= LAST_VIRTUAL_REGISTER)
1774 /* Some hard registers are also mapped,
1775 but others are not translated. */
1776 if (map->reg_map[regno] != 0)
1777 return map->reg_map[regno];
1779 /* If this is the virtual frame pointer, make space in current
1780 function's stack frame for the stack frame of the inline function.
1782 Copy the address of this area into a pseudo. Map
1783 virtual_stack_vars_rtx to this pseudo and set up a constant
1784 equivalence for it to be the address. This will substitute the
1785 address into insns where it can be substituted and use the new
1786 pseudo where it can't. */
1787 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1790 int size = DECL_FRAME_SIZE (map->fndecl);
1794 loc = assign_stack_temp (BLKmode, size, 1);
1795 loc = XEXP (loc, 0);
1796 #ifdef FRAME_GROWS_DOWNWARD
1797 /* In this case, virtual_stack_vars_rtx points to one byte
1798 higher than the top of the frame area. So compute the offset
1799 to one byte higher than our substitute frame.
1800 Keep the fake frame pointer aligned like a real one. */
1801 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1802 loc = plus_constant (loc, rounded);
1804 map->reg_map[regno] = temp = force_operand (loc, 0);
1805 map->const_equiv_map[REGNO (temp)] = loc;
1806 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1808 seq = gen_sequence ();
1810 emit_insn_after (seq, map->insns_at_start);
1813 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1815 /* Do the same for a block to contain any arguments referenced
1818 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1821 loc = assign_stack_temp (BLKmode, size, 1);
1822 loc = XEXP (loc, 0);
1823 map->reg_map[regno] = temp = force_operand (loc, 0);
1824 map->const_equiv_map[REGNO (temp)] = loc;
1825 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1827 seq = gen_sequence ();
1829 emit_insn_after (seq, map->insns_at_start);
1832 else if (REG_FUNCTION_VALUE_P (orig))
1834 /* This is a reference to the function return value. If
1835 the function doesn't have a return value, error. If the
1836 mode doesn't agree, make a SUBREG. */
1837 if (map->inline_target == 0)
1838 /* Must be unrolling loops or replicating code if we
1839 reach here, so return the register unchanged. */
1841 else if (mode != GET_MODE (map->inline_target))
1842 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1844 return map->inline_target;
1848 if (map->reg_map[regno] == NULL)
1850 map->reg_map[regno] = gen_reg_rtx (mode);
1851 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1852 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1853 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1854 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1856 return map->reg_map[regno];
1859 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1860 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1861 if (GET_CODE (copy) == SUBREG)
1862 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1863 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1865 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1866 SUBREG_WORD (orig));
1870 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1872 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1873 if (GET_CODE (copy) == SUBREG)
1874 copy = SUBREG_REG (copy);
1875 return gen_rtx (code, VOIDmode, copy);
1878 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1879 = LABEL_PRESERVE_P (orig);
1880 return map->label_map[CODE_LABEL_NUMBER (orig)];
1883 copy = rtx_alloc (LABEL_REF);
1884 PUT_MODE (copy, mode);
1885 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1886 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1895 /* Symbols which represent the address of a label stored in the constant
1896 pool must be modified to point to a constant pool entry for the
1897 remapped label. Otherwise, symbols are returned unchanged. */
1898 if (CONSTANT_POOL_ADDRESS_P (orig))
1900 rtx constant = get_pool_constant (orig);
1901 if (GET_CODE (constant) == LABEL_REF)
1903 copy = rtx_alloc (LABEL_REF);
1904 PUT_MODE (copy, mode);
1906 = map->label_map[CODE_LABEL_NUMBER (XEXP (constant, 0))];
1907 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1908 copy = force_const_mem (Pmode, copy);
1909 return XEXP (copy, 0);
1915 /* We have to make a new copy of this CONST_DOUBLE because don't want
1916 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1917 duplicate of a CONST_DOUBLE we have already seen. */
1918 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1922 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1923 return immed_real_const_1 (d, GET_MODE (orig));
1926 return immed_double_const (CONST_DOUBLE_LOW (orig),
1927 CONST_DOUBLE_HIGH (orig), VOIDmode);
1930 /* Make new constant pool entry for a constant
1931 that was in the pool of the inline function. */
1932 if (RTX_INTEGRATED_P (orig))
1934 /* If this was an address of a constant pool entry that itself
1935 had to be placed in the constant pool, it might not be a
1936 valid address. So the recursive call below might turn it
1937 into a register. In that case, it isn't a constant any
1938 more, so return it. This has the potential of changing a
1939 MEM into a REG, but we'll assume that it safe. */
1940 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
1941 if (! CONSTANT_P (temp))
1943 return validize_mem (force_const_mem (GET_MODE (orig), temp));
1948 /* If from constant pool address, make new constant pool entry and
1949 return its address. */
1950 if (! RTX_INTEGRATED_P (orig))
1953 temp = force_const_mem (GET_MODE (orig),
1954 copy_rtx_and_substitute (XEXP (orig, 0), map));
1957 /* Legitimizing the address here is incorrect.
1959 The only ADDRESS rtx's that can reach here are ones created by
1960 save_constants. Hence the operand of the ADDRESS is always legal
1961 in this position of the instruction, since the original rtx without
1962 the ADDRESS was legal.
1964 The reason we don't legitimize the address here is that on the
1965 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
1966 This code forces the operand of the address to a register, which
1967 fails because we can not take the HIGH part of a register.
1969 Also, change_address may create new registers. These registers
1970 will not have valid reg_map entries. This can cause try_constants()
1971 to fail because assumes that all registers in the rtx have valid
1972 reg_map entries, and it may end up replacing one of these new
1973 registers with junk. */
1975 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1976 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1979 return XEXP (temp, 0);
1982 /* If a single asm insn contains multiple output operands
1983 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1984 We must make sure that the copied insn continues to share it. */
1985 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1987 copy = rtx_alloc (ASM_OPERANDS);
1988 XSTR (copy, 0) = XSTR (orig, 0);
1989 XSTR (copy, 1) = XSTR (orig, 1);
1990 XINT (copy, 2) = XINT (orig, 2);
1991 XVEC (copy, 3) = map->copy_asm_operands_vector;
1992 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1993 XSTR (copy, 5) = XSTR (orig, 5);
1994 XINT (copy, 6) = XINT (orig, 6);
2000 /* This is given special treatment because the first
2001 operand of a CALL is a (MEM ...) which may get
2002 forced into a register for cse. This is undesirable
2003 if function-address cse isn't wanted or if we won't do cse. */
2004 #ifndef NO_FUNCTION_CSE
2005 if (! (optimize && ! flag_no_function_cse))
2007 return gen_rtx (CALL, GET_MODE (orig),
2008 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2009 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2010 copy_rtx_and_substitute (XEXP (orig, 1), map));
2014 /* Must be ifdefed out for loop unrolling to work. */
2020 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2022 If the nonlocal goto is into the current function,
2023 this will result in unnecessarily bad code, but should work. */
2024 if (SET_DEST (orig) == virtual_stack_vars_rtx
2025 || SET_DEST (orig) == virtual_incoming_args_rtx)
2026 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2027 copy_rtx_and_substitute (SET_SRC (orig), map));
2031 copy = rtx_alloc (MEM);
2032 PUT_MODE (copy, mode);
2033 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2034 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2035 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2036 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2040 copy = rtx_alloc (code);
2041 PUT_MODE (copy, mode);
2042 copy->in_struct = orig->in_struct;
2043 copy->volatil = orig->volatil;
2044 copy->unchanging = orig->unchanging;
2046 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2048 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2050 switch (*format_ptr++)
2056 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2060 /* Change any references to old-insns to point to the
2061 corresponding copied insns. */
2062 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2066 XVEC (copy, i) = XVEC (orig, i);
2067 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2069 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2070 for (j = 0; j < XVECLEN (copy, i); j++)
2071 XVECEXP (copy, i, j)
2072 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2077 XINT (copy, i) = XINT (orig, i);
2081 XSTR (copy, i) = XSTR (orig, i);
2089 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2091 map->orig_asm_operands_vector = XVEC (orig, 3);
2092 map->copy_asm_operands_vector = XVEC (copy, 3);
2093 map->copy_asm_constraints_vector = XVEC (copy, 4);
2099 /* Substitute known constant values into INSN, if that is valid. */
2102 try_constants (insn, map)
2104 struct inline_remap *map;
2109 subst_constants (&PATTERN (insn), insn, map);
2111 /* Apply the changes if they are valid; otherwise discard them. */
2112 apply_change_group ();
2114 /* Show we don't know the value of anything stored or clobbered. */
2115 note_stores (PATTERN (insn), mark_stores);
2116 map->last_pc_value = 0;
2118 map->last_cc0_value = 0;
2121 /* Set up any constant equivalences made in this insn. */
2122 for (i = 0; i < map->num_sets; i++)
2124 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2126 int regno = REGNO (map->equiv_sets[i].dest);
2128 if (map->const_equiv_map[regno] == 0
2129 /* Following clause is a hack to make case work where GNU C++
2130 reassigns a variable to make cse work right. */
2131 || ! rtx_equal_p (map->const_equiv_map[regno],
2132 map->equiv_sets[i].equiv))
2134 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2135 map->const_age_map[regno] = map->const_age;
2138 else if (map->equiv_sets[i].dest == pc_rtx)
2139 map->last_pc_value = map->equiv_sets[i].equiv;
2141 else if (map->equiv_sets[i].dest == cc0_rtx)
2142 map->last_cc0_value = map->equiv_sets[i].equiv;
2147 /* Substitute known constants for pseudo regs in the contents of LOC,
2148 which are part of INSN.
2149 If INSN is zero, the substitution should always be done (this is used to
2151 These changes are taken out by try_constants if the result is not valid.
2153 Note that we are more concerned with determining when the result of a SET
2154 is a constant, for further propagation, than actually inserting constants
2155 into insns; cse will do the latter task better.
2157 This function is also used to adjust address of items previously addressed
2158 via the virtual stack variable or virtual incoming arguments registers. */
2161 subst_constants (loc, insn, map)
2164 struct inline_remap *map;
2168 register enum rtx_code code;
2169 register char *format_ptr;
2170 int num_changes = num_validated_changes ();
2172 enum machine_mode op0_mode;
2174 code = GET_CODE (x);
2189 validate_change (insn, loc, map->last_cc0_value, 1);
2195 /* The only thing we can do with a USE or CLOBBER is possibly do
2196 some substitutions in a MEM within it. */
2197 if (GET_CODE (XEXP (x, 0)) == MEM)
2198 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2202 /* Substitute for parms and known constants. Don't replace
2203 hard regs used as user variables with constants. */
2205 int regno = REGNO (x);
2207 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2208 && regno < map->const_equiv_map_size
2209 && map->const_equiv_map[regno] != 0
2210 && map->const_age_map[regno] >= map->const_age)
2211 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2216 /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify
2219 rtx inner = SUBREG_REG (x);
2222 /* We can't call subst_constants on &SUBREG_REG (x) because any
2223 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2224 see what is inside, try to form the new SUBREG and see if that is
2225 valid. We handle two cases: extracting a full word in an
2226 integral mode and extracting the low part. */
2227 subst_constants (&inner, 0, map);
2229 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2230 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2231 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2232 new = operand_subword (inner, SUBREG_WORD (x), 0,
2233 GET_MODE (SUBREG_REG (x)));
2235 if (new == 0 && subreg_lowpart_p (x))
2236 new = gen_lowpart_common (GET_MODE (x), inner);
2239 validate_change (insn, loc, new, 1);
2245 subst_constants (&XEXP (x, 0), insn, map);
2247 /* If a memory address got spoiled, change it back. */
2248 if (insn != 0 && num_validated_changes () != num_changes
2249 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2250 cancel_changes (num_changes);
2255 /* Substitute constants in our source, and in any arguments to a
2256 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2258 rtx *dest_loc = &SET_DEST (x);
2259 rtx dest = *dest_loc;
2262 subst_constants (&SET_SRC (x), insn, map);
2265 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2266 || GET_CODE (*dest_loc) == SIGN_EXTRACT
2267 || GET_CODE (*dest_loc) == SUBREG
2268 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2270 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2272 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2273 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2275 dest_loc = &XEXP (*dest_loc, 0);
2278 /* Check for the case of DEST a SUBREG, both it and the underlying
2279 register are less than one word, and the SUBREG has the wider mode.
2280 In the case, we are really setting the underlying register to the
2281 source converted to the mode of DEST. So indicate that. */
2282 if (GET_CODE (dest) == SUBREG
2283 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2284 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2285 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2286 <= GET_MODE_SIZE (GET_MODE (dest)))
2287 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2288 src = tem, dest = SUBREG_REG (dest);
2290 /* If storing a recognizable value save it for later recording. */
2291 if ((map->num_sets < MAX_RECOG_OPERANDS)
2292 && (CONSTANT_P (src)
2293 || (GET_CODE (src) == PLUS
2294 && GET_CODE (XEXP (src, 0)) == REG
2295 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2296 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2297 && CONSTANT_P (XEXP (src, 1)))
2298 || GET_CODE (src) == COMPARE
2303 && (src == pc_rtx || GET_CODE (src) == RETURN
2304 || GET_CODE (src) == LABEL_REF))))
2306 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2307 it will cause us to save the COMPARE with any constants
2308 substituted, which is what we want for later. */
2309 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2310 map->equiv_sets[map->num_sets++].dest = dest;
2317 format_ptr = GET_RTX_FORMAT (code);
2319 /* If the first operand is an expression, save its mode for later. */
2320 if (*format_ptr == 'e')
2321 op0_mode = GET_MODE (XEXP (x, 0));
2323 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2325 switch (*format_ptr++)
2332 subst_constants (&XEXP (x, i), insn, map);
2341 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2344 for (j = 0; j < XVECLEN (x, i); j++)
2345 subst_constants (&XVECEXP (x, i, j), insn, map);
2354 /* If this is a commutative operation, move a constant to the second
2355 operand unless the second operand is already a CONST_INT. */
2356 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2357 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2359 rtx tem = XEXP (x, 0);
2360 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2361 validate_change (insn, &XEXP (x, 1), tem, 1);
2364 /* Simplify the expression in case we put in some constants. */
2365 switch (GET_RTX_CLASS (code))
2368 new = simplify_unary_operation (code, GET_MODE (x),
2369 XEXP (x, 0), op0_mode);
2374 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2375 if (op_mode == VOIDmode)
2376 op_mode = GET_MODE (XEXP (x, 1));
2377 new = simplify_relational_operation (code, op_mode,
2378 XEXP (x, 0), XEXP (x, 1));
2384 new = simplify_binary_operation (code, GET_MODE (x),
2385 XEXP (x, 0), XEXP (x, 1));
2390 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2391 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2396 validate_change (insn, loc, new, 1);
2399 /* Show that register modified no longer contain known constants. We are
2400 called from note_stores with parts of the new insn. */
2403 mark_stores (dest, x)
2407 if (GET_CODE (dest) == SUBREG)
2408 dest = SUBREG_REG (dest);
2410 if (GET_CODE (dest) == REG)
2411 global_const_equiv_map[REGNO (dest)] = 0;
2414 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2415 pointed to by PX, they represent constants in the constant pool.
2416 Replace these with a new memory reference obtained from force_const_mem.
2417 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2418 address of a constant pool entry. Replace them with the address of
2419 a new constant pool entry obtained from force_const_mem. */
2422 restore_constants (px)
2432 if (GET_CODE (x) == CONST_DOUBLE)
2434 /* We have to make a new CONST_DOUBLE to ensure that we account for
2435 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2436 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2440 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2441 *px = immed_real_const_1 (d, GET_MODE (x));
2444 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2448 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2450 restore_constants (&XEXP (x, 0));
2451 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2453 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2455 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2456 rtx new = XEXP (SUBREG_REG (x), 0);
2458 restore_constants (&new);
2459 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2460 PUT_MODE (new, GET_MODE (x));
2461 *px = validize_mem (new);
2463 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2465 restore_constants (&XEXP (x, 0));
2466 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2470 fmt = GET_RTX_FORMAT (GET_CODE (x));
2471 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2476 for (j = 0; j < XVECLEN (x, i); j++)
2477 restore_constants (&XVECEXP (x, i, j));
2481 restore_constants (&XEXP (x, i));
2488 /* Output the assembly language code for the function FNDECL
2489 from its DECL_SAVED_INSNS. Used for inline functions that are output
2490 at end of compilation instead of where they came in the source. */
2493 output_inline_function (fndecl)
2496 rtx head = DECL_SAVED_INSNS (fndecl);
2499 temporary_allocation ();
2501 current_function_decl = fndecl;
2503 /* This call is only used to initialize global variables. */
2504 init_function_start (fndecl, "lossage", 1);
2506 /* Redo parameter determinations in case the FUNCTION_...
2507 macros took machine-specific actions that need to be redone. */
2508 assign_parms (fndecl, 1);
2510 /* Set stack frame size. */
2511 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2513 restore_reg_data (FIRST_PARM_INSN (head));
2515 stack_slot_list = STACK_SLOT_LIST (head);
2517 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2518 current_function_calls_alloca = 1;
2520 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2521 current_function_calls_setjmp = 1;
2523 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2524 current_function_calls_longjmp = 1;
2526 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2527 current_function_returns_struct = 1;
2529 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2530 current_function_returns_pcc_struct = 1;
2532 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2533 current_function_needs_context = 1;
2535 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2536 current_function_has_nonlocal_label = 1;
2538 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2539 current_function_returns_pointer = 1;
2541 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2542 current_function_uses_const_pool = 1;
2544 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2545 current_function_uses_pic_offset_table = 1;
2547 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2548 current_function_pops_args = POPS_ARGS (head);
2550 /* There is no need to output a return label again. */
2553 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2555 /* Find last insn and rebuild the constant pool. */
2556 for (last = FIRST_PARM_INSN (head);
2557 NEXT_INSN (last); last = NEXT_INSN (last))
2559 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2561 restore_constants (&PATTERN (last));
2562 restore_constants (®_NOTES (last));
2566 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2567 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2569 /* Compile this function all the way down to assembly code. */
2570 rest_of_compilation (fndecl);
2572 current_function_decl = 0;
2574 permanent_allocation ();