1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
37 #define obstack_chunk_alloc xmalloc
38 #define obstack_chunk_free free
40 extern struct obstack *function_maybepermanent_obstack;
42 extern tree pushdecl ();
43 extern tree poplevel ();
45 /* Similar, but round to the next highest integer that meets the
47 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
49 /* Default max number of insns a function can have and still be inline.
50 This is overridden on RISC machines. */
51 #ifndef INTEGRATE_THRESHOLD
52 #define INTEGRATE_THRESHOLD(DECL) \
53 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
56 /* Save any constant pool constants in an insn. */
57 static void save_constants ();
59 /* Note when parameter registers are the destination of a SET. */
60 static void note_modified_parmregs ();
62 /* Copy an rtx for save_for_inline_copying. */
63 static rtx copy_for_inline ();
65 /* Make copies of MEMs in DECL_RTLs. */
66 static void copy_decl_rtls ();
68 static tree copy_decl_tree ();
69 static tree copy_decl_list ();
71 static void integrate_parm_decls ();
72 static void integrate_decl_tree ();
74 static void subst_constants ();
76 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
77 is safe and reasonable to integrate into other functions.
78 Nonzero means value is a warning message with a single %s
79 for the function's name. */
82 function_cannot_inline_p (fndecl)
86 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
87 int max_insns = INTEGRATE_THRESHOLD (fndecl);
88 register int ninsns = 0;
91 /* No inlines with varargs. `grokdeclarator' gives a warning
92 message about that if `inline' is specified. This code
93 it put in to catch the volunteers. */
94 if ((last && TREE_VALUE (last) != void_type_node)
95 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
96 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
97 "__builtin_va_alist")))
98 return "varargs function cannot be inline";
100 if (current_function_calls_alloca)
101 return "function using alloca cannot be inline";
103 if (current_function_contains_functions)
104 return "function with nested functions cannot be inline";
106 /* This restriction may be eliminated sometime soon. But for now, don't
107 worry about remapping the static chain. */
108 if (current_function_needs_context)
109 return "nested function cannot be inline";
111 /* If its not even close, don't even look. */
112 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
113 return "function too large to be inline";
116 /* Large stacks are OK now that inlined functions can share them. */
117 /* Don't inline functions with large stack usage,
118 since they can make other recursive functions burn up stack. */
119 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
120 return "function stack frame for inlining";
124 /* Don't inline functions which do not specify a function prototype and
125 have BLKmode argument or take the address of a parameter. */
126 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
128 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
129 TREE_ADDRESSABLE (parms) = 1;
130 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
131 return "no prototype, and parameter address used; cannot be inline";
135 /* We can't inline functions that return structures
136 the old-fashioned PCC way, copying into a static block. */
137 if (current_function_returns_pcc_struct)
138 return "inline functions not supported for this return value type";
140 /* We can't inline functions that return structures of varying size. */
141 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
142 return "function with varying-size return value cannot be inline";
144 /* Cannot inline a function with a varying size argument. */
145 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
147 return "function with varying-size parameter cannot be inline";
149 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
151 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
152 insn = NEXT_INSN (insn))
154 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
158 if (ninsns >= max_insns)
159 return "function too large to be inline";
162 /* We cannot inline this function if forced_labels is non-zero. This
163 implies that a label in this function was used as an initializer.
164 Because labels can not be duplicated, all labels in the function
165 will be renamed when it is inlined. However, there is no way to find
166 and fix all variables initialized with addresses of labels in this
167 function, hence inlining is impossible. */
170 return "function with label addresses used in initializers cannot inline";
175 /* Variables used within save_for_inline. */
177 /* Mapping from old pseudo-register to new pseudo-registers.
178 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
179 It is allocated in `save_for_inline' and `expand_inline_function',
180 and deallocated on exit from each of those routines. */
183 /* Mapping from old code-labels to new code-labels.
184 The first element of this map is label_map[min_labelno].
185 It is allocated in `save_for_inline' and `expand_inline_function',
186 and deallocated on exit from each of those routines. */
187 static rtx *label_map;
189 /* Mapping from old insn uid's to copied insns.
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192 static rtx *insn_map;
194 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
195 Zero for a reg that isn't a parm's home.
196 Only reg numbers less than max_parm_reg are mapped here. */
197 static tree *parmdecl_map;
199 /* Keep track of first pseudo-register beyond those that are parms. */
200 static int max_parm_reg;
202 /* When an insn is being copied by copy_for_inline,
203 this is nonzero if we have copied an ASM_OPERANDS.
204 In that case, it is the original input-operand vector. */
205 static rtvec orig_asm_operands_vector;
207 /* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the copied input-operand vector. */
210 static rtvec copy_asm_operands_vector;
212 /* Likewise, this is the copied constraints vector. */
213 static rtvec copy_asm_constraints_vector;
215 /* In save_for_inline, nonzero if past the parm-initialization insns. */
216 static int in_nonparm_insns;
218 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
219 needed to save FNDECL's insns and info for future inline expansion. */
222 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
229 int function_flags, i;
233 /* Compute the values of any flags we must restore when inlining this. */
236 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
237 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
238 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
239 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
240 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
241 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
242 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
243 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
244 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
245 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
247 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
248 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
249 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
251 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
253 parms = TREE_CHAIN (parms), i++)
255 rtx p = DECL_RTL (parms);
257 if (GET_CODE (p) == MEM && copy)
259 /* Copy the rtl so that modifications of the addresses
260 later in compilation won't affect this arg_vector.
261 Virtual register instantiation can screw the address
263 rtx new = copy_rtx (p);
265 /* Don't leave the old copy anywhere in this decl. */
266 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
267 || (GET_CODE (DECL_RTL (parms)) == MEM
268 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
269 && (XEXP (DECL_RTL (parms), 0)
270 == XEXP (DECL_INCOMING_RTL (parms), 0))))
271 DECL_INCOMING_RTL (parms) = new;
272 DECL_RTL (parms) = new;
275 RTVEC_ELT (arg_vector, i) = p;
277 if (GET_CODE (p) == REG)
278 parmdecl_map[REGNO (p)] = parms;
279 /* This flag is cleared later
280 if the function ever modifies the value of the parm. */
281 TREE_READONLY (parms) = 1;
284 /* Assume we start out in the insns that set up the parameters. */
285 in_nonparm_insns = 0;
287 /* The list of DECL_SAVED_INSNS, starts off with a header which
288 contains the following information:
290 the first insn of the function (not including the insns that copy
291 parameters into registers).
292 the first parameter insn of the function,
293 the first label used by that function,
294 the last label used by that function,
295 the highest register number used for parameters,
296 the total number of registers used,
297 the size of the incoming stack area for parameters,
298 the number of bytes popped on return,
300 some flags that are used to restore compiler globals,
301 the value of current_function_outgoing_args_size,
302 the original argument vector,
303 and the original DECL_INITIAL. */
305 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
306 max_parm_reg, max_reg,
307 current_function_args_size,
308 current_function_pops_args,
309 stack_slot_list, function_flags,
310 current_function_outgoing_args_size,
311 arg_vector, (rtx) DECL_INITIAL (fndecl));
314 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
315 things that must be done to make FNDECL expandable as an inline function.
316 HEAD contains the chain of insns to which FNDECL will expand. */
319 finish_inline (fndecl, head)
323 NEXT_INSN (head) = get_first_nonparm_insn ();
324 FIRST_PARM_INSN (head) = get_insns ();
325 DECL_SAVED_INSNS (fndecl) = head;
326 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
327 DECL_INLINE (fndecl) = 1;
330 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
331 they all point to the new (copied) rtxs. */
334 adjust_copied_decl_tree (block)
337 register tree subblock;
338 register rtx original_end;
340 original_end = BLOCK_END_NOTE (block);
343 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
344 NOTE_SOURCE_FILE (original_end) = 0;
347 /* Process all subblocks. */
348 for (subblock = BLOCK_SUBBLOCKS (block);
350 subblock = TREE_CHAIN (subblock))
351 adjust_copied_decl_tree (subblock);
354 /* Make the insns and PARM_DECLs of the current function permanent
355 and record other information in DECL_SAVED_INSNS to allow inlining
356 of this function in subsequent calls.
358 This function is called when we are going to immediately compile
359 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
360 modified by the compilation process, so we copy all of them to
361 new storage and consider the new insns to be the insn chain to be
362 compiled. Our caller (rest_of_compilation) saves the original
363 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
366 save_for_inline_copying (fndecl)
369 rtx first_insn, last_insn, insn;
371 int max_labelno, min_labelno, i, len;
374 rtx first_nonparm_insn;
376 /* Make and emit a return-label if we have not already done so.
377 Do this before recording the bounds on label numbers. */
379 if (return_label == 0)
381 return_label = gen_label_rtx ();
382 emit_label (return_label);
385 /* Get some bounds on the labels and registers used. */
387 max_labelno = max_label_num ();
388 min_labelno = get_first_label_num ();
389 max_reg = max_reg_num ();
391 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
392 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
393 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
394 for the parms, prior to elimination of virtual registers.
395 These values are needed for substituting parms properly. */
397 max_parm_reg = max_parm_reg_num ();
398 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
400 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
402 if (current_function_uses_const_pool)
404 /* Replace any constant pool references with the actual constant. We
405 will put the constants back in the copy made below. */
406 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
407 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
409 save_constants (&PATTERN (insn));
410 if (REG_NOTES (insn))
411 save_constants (®_NOTES (insn));
414 /* Clear out the constant pool so that we can recreate it with the
415 copied constants below. */
416 init_const_rtx_hash_table ();
417 clear_const_double_mem ();
420 max_uid = INSN_UID (head);
422 /* We have now allocated all that needs to be allocated permanently
423 on the rtx obstack. Set our high-water mark, so that we
424 can free the rest of this when the time comes. */
428 /* Copy the chain insns of this function.
429 Install the copied chain as the insns of this function,
430 for continued compilation;
431 the original chain is recorded as the DECL_SAVED_INSNS
432 for inlining future calls. */
434 /* If there are insns that copy parms from the stack into pseudo registers,
435 those insns are not copied. `expand_inline_function' must
436 emit the correct code to handle such things. */
439 if (GET_CODE (insn) != NOTE)
441 first_insn = rtx_alloc (NOTE);
442 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
443 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
444 INSN_UID (first_insn) = INSN_UID (insn);
445 PREV_INSN (first_insn) = NULL;
446 NEXT_INSN (first_insn) = NULL;
447 last_insn = first_insn;
449 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
450 Make these new rtx's now, and install them in regno_reg_rtx, so they
451 will be the official pseudo-reg rtx's for the rest of compilation. */
453 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
455 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
456 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
457 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
458 regno_reg_rtx[i], len);
460 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
461 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
462 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
464 /* Likewise each label rtx must have a unique rtx as its copy. */
466 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
467 label_map -= min_labelno;
469 for (i = min_labelno; i < max_labelno; i++)
470 label_map[i] = gen_label_rtx ();
472 /* Record the mapping of old insns to copied insns. */
474 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
475 bzero (insn_map, max_uid * sizeof (rtx));
477 /* Get the insn which signals the end of parameter setup code. */
478 first_nonparm_insn = get_first_nonparm_insn ();
480 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
481 (the former occurs when a variable has its address taken)
482 since these may be shared and can be changed by virtual
483 register instantiation. DECL_RTL values for our arguments
484 have already been copied by initialize_for_inline. */
485 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
486 if (GET_CODE (regno_reg_rtx[i]) == MEM)
487 XEXP (regno_reg_rtx[i], 0)
488 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
490 /* Copy the tree of subblocks of the function, and the decls in them.
491 We will use the copy for compiling this function, then restore the original
492 subblocks and decls for use when inlining this function.
494 Several parts of the compiler modify BLOCK trees. In particular,
495 instantiate_virtual_regs will instantiate any virtual regs
496 mentioned in the DECL_RTLs of the decls, and loop
497 unrolling will replicate any BLOCK trees inside an unrolled loop.
499 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
500 which we will use for inlining. The rtl might even contain pseudoregs
501 whose space has been freed. */
503 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
504 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
506 /* Now copy each DECL_RTL which is a MEM,
507 so it is safe to modify their addresses. */
508 copy_decl_rtls (DECL_INITIAL (fndecl));
510 /* The fndecl node acts as its own progenitor, so mark it as such. */
511 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
513 /* Now copy the chain of insns. Do this twice. The first copy the insn
514 itself and its body. The second time copy of REG_NOTES. This is because
515 a REG_NOTE may have a forward pointer to another insn. */
517 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
519 orig_asm_operands_vector = 0;
521 if (insn == first_nonparm_insn)
522 in_nonparm_insns = 1;
524 switch (GET_CODE (insn))
527 /* No need to keep these. */
528 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
531 copy = rtx_alloc (NOTE);
532 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
533 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
534 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
537 NOTE_SOURCE_FILE (insn) = (char *) copy;
538 NOTE_SOURCE_FILE (copy) = 0;
545 copy = rtx_alloc (GET_CODE (insn));
546 PATTERN (copy) = copy_for_inline (PATTERN (insn));
547 INSN_CODE (copy) = -1;
548 LOG_LINKS (copy) = NULL;
549 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
553 copy = label_map[CODE_LABEL_NUMBER (insn)];
554 LABEL_NAME (copy) = LABEL_NAME (insn);
558 copy = rtx_alloc (BARRIER);
564 INSN_UID (copy) = INSN_UID (insn);
565 insn_map[INSN_UID (insn)] = copy;
566 NEXT_INSN (last_insn) = copy;
567 PREV_INSN (copy) = last_insn;
571 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
573 /* Now copy the REG_NOTES. */
574 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
575 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
576 && insn_map[INSN_UID(insn)])
577 REG_NOTES (insn_map[INSN_UID (insn)])
578 = copy_for_inline (REG_NOTES (insn));
580 NEXT_INSN (last_insn) = NULL;
582 finish_inline (fndecl, head);
584 set_new_first_and_last_insn (first_insn, last_insn);
587 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
588 For example, this can copy a list made of TREE_LIST nodes. While copying,
589 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
590 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
591 point to the corresponding (abstract) original node. */
594 copy_decl_list (list)
598 register tree prev, next;
603 head = prev = copy_node (list);
604 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
605 DECL_ABSTRACT_ORIGIN (head) = list;
606 next = TREE_CHAIN (list);
611 copy = copy_node (next);
612 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
613 DECL_ABSTRACT_ORIGIN (copy) = next;
614 TREE_CHAIN (prev) = copy;
616 next = TREE_CHAIN (next);
621 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
624 copy_decl_tree (block)
627 tree t, vars, subblocks;
629 vars = copy_decl_list (BLOCK_VARS (block));
632 /* Process all subblocks. */
633 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
635 tree copy = copy_decl_tree (t);
636 TREE_CHAIN (copy) = subblocks;
640 t = copy_node (block);
641 BLOCK_VARS (t) = vars;
642 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
643 /* If the BLOCK being cloned is already marked as having been instantiated
644 from something else, then leave that `origin' marking alone. Elsewise,
645 mark the clone as having originated from the BLOCK we are cloning. */
646 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
647 BLOCK_ABSTRACT_ORIGIN (t) = block;
651 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
654 copy_decl_rtls (block)
659 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
660 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
661 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
663 /* Process all subblocks. */
664 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
668 /* Make the insns and PARM_DECLs of the current function permanent
669 and record other information in DECL_SAVED_INSNS to allow inlining
670 of this function in subsequent calls.
672 This routine need not copy any insns because we are not going
673 to immediately compile the insns in the insn chain. There
674 are two cases when we would compile the insns for FNDECL:
675 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
676 be output at the end of other compilation, because somebody took
677 its address. In the first case, the insns of FNDECL are copied
678 as it is expanded inline, so FNDECL's saved insns are not
679 modified. In the second case, FNDECL is used for the last time,
680 so modifying the rtl is not a problem.
682 ??? Actually, we do not verify that FNDECL is not inline expanded
683 by other functions which must also be written down at the end
684 of compilation. We could set flag_no_inline to nonzero when
685 the time comes to write down such functions. */
688 save_for_inline_nocopy (fndecl)
694 int max_labelno, min_labelno, i, len;
697 rtx first_nonparm_insn;
700 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
701 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
702 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
703 for the parms, prior to elimination of virtual registers.
704 These values are needed for substituting parms properly. */
706 max_parm_reg = max_parm_reg_num ();
707 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
709 /* Make and emit a return-label if we have not already done so. */
711 if (return_label == 0)
713 return_label = gen_label_rtx ();
714 emit_label (return_label);
717 head = initialize_for_inline (fndecl, get_first_label_num (),
718 max_label_num (), max_reg_num (), 0);
720 /* If there are insns that copy parms from the stack into pseudo registers,
721 those insns are not copied. `expand_inline_function' must
722 emit the correct code to handle such things. */
725 if (GET_CODE (insn) != NOTE)
728 /* Get the insn which signals the end of parameter setup code. */
729 first_nonparm_insn = get_first_nonparm_insn ();
731 /* Now just scan the chain of insns to see what happens to our
732 PARM_DECLs. If a PARM_DECL is used but never modified, we
733 can substitute its rtl directly when expanding inline (and
734 perform constant folding when its incoming value is constant).
735 Otherwise, we have to copy its value into a new register and track
736 the new register's life. */
738 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
740 if (insn == first_nonparm_insn)
741 in_nonparm_insns = 1;
743 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
745 if (current_function_uses_const_pool)
747 /* Replace any constant pool references with the actual constant.
748 We will put the constant back if we need to write the
749 function out after all. */
750 save_constants (&PATTERN (insn));
751 if (REG_NOTES (insn))
752 save_constants (®_NOTES (insn));
755 /* Record what interesting things happen to our parameters. */
756 note_stores (PATTERN (insn), note_modified_parmregs);
760 /* We have now allocated all that needs to be allocated permanently
761 on the rtx obstack. Set our high-water mark, so that we
762 can free the rest of this when the time comes. */
766 finish_inline (fndecl, head);
769 /* Given PX, a pointer into an insn, search for references to the constant
770 pool. Replace each with a CONST that has the mode of the original
771 constant, contains the constant, and has RTX_INTEGRATED_P set.
772 Similarly, constant pool addresses not enclosed in a MEM are replaced
773 with an ADDRESS rtx which also gives the constant, mode, and has
774 RTX_INTEGRATED_P set. */
786 /* If this is a CONST_DOUBLE, don't try to fix things up in
787 CONST_DOUBLE_MEM, because this is an infinite recursion. */
788 if (GET_CODE (x) == CONST_DOUBLE)
790 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
791 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
793 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
794 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
795 RTX_INTEGRATED_P (new) = 1;
797 /* If the MEM was in a different mode than the constant (perhaps we
798 were only looking at the low-order part), surround it with a
799 SUBREG so we can save both modes. */
801 if (GET_MODE (x) != const_mode)
803 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
804 RTX_INTEGRATED_P (new) = 1;
808 save_constants (&XEXP (*px, 0));
810 else if (GET_CODE (x) == SYMBOL_REF
811 && CONSTANT_POOL_ADDRESS_P (x))
813 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
814 save_constants (&XEXP (*px, 0));
815 RTX_INTEGRATED_P (*px) = 1;
820 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
821 int len = GET_RTX_LENGTH (GET_CODE (x));
823 for (i = len-1; i >= 0; i--)
828 for (j = 0; j < XVECLEN (x, i); j++)
829 save_constants (&XVECEXP (x, i, j));
833 if (XEXP (x, i) == 0)
837 /* Hack tail-recursion here. */
841 save_constants (&XEXP (x, i));
848 /* Note whether a parameter is modified or not. */
851 note_modified_parmregs (reg, x)
855 if (GET_CODE (reg) == REG && in_nonparm_insns
856 && REGNO (reg) < max_parm_reg
857 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
858 && parmdecl_map[REGNO (reg)] != 0)
859 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
862 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
863 according to `reg_map' and `label_map'. The original rtl insns
864 will be saved for inlining; this is used to make a copy
865 which is used to finish compiling the inline function itself.
867 If we find a "saved" constant pool entry, one which was replaced with
868 the value of the constant, convert it back to a constant pool entry.
869 Since the pool wasn't touched, this should simply restore the old
872 All other kinds of rtx are copied except those that can never be
873 changed during compilation. */
876 copy_for_inline (orig)
879 register rtx x = orig;
881 register enum rtx_code code;
882 register char *format_ptr;
889 /* These types may be freely shared. */
901 /* We have to make a new CONST_DOUBLE to ensure that we account for
902 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
903 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
907 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
908 return immed_real_const_1 (d, GET_MODE (x));
911 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
915 /* Get constant pool entry for constant in the pool. */
916 if (RTX_INTEGRATED_P (x))
917 return validize_mem (force_const_mem (GET_MODE (x),
918 copy_for_inline (XEXP (x, 0))));
922 /* Get constant pool entry, but access in different mode. */
923 if (RTX_INTEGRATED_P (x))
926 = force_const_mem (GET_MODE (SUBREG_REG (x)),
927 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
929 PUT_MODE (new, GET_MODE (x));
930 return validize_mem (new);
935 /* If not special for constant pool error. Else get constant pool
937 if (! RTX_INTEGRATED_P (x))
940 return XEXP (force_const_mem (GET_MODE (x),
941 copy_for_inline (XEXP (x, 0))), 0);
944 /* If a single asm insn contains multiple output operands
945 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
946 We must make sure that the copied insn continues to share it. */
947 if (orig_asm_operands_vector == XVEC (orig, 3))
949 x = rtx_alloc (ASM_OPERANDS);
950 XSTR (x, 0) = XSTR (orig, 0);
951 XSTR (x, 1) = XSTR (orig, 1);
952 XINT (x, 2) = XINT (orig, 2);
953 XVEC (x, 3) = copy_asm_operands_vector;
954 XVEC (x, 4) = copy_asm_constraints_vector;
955 XSTR (x, 5) = XSTR (orig, 5);
956 XINT (x, 6) = XINT (orig, 6);
962 /* A MEM is usually allowed to be shared if its address is constant
963 or is a constant plus one of the special registers.
965 We do not allow sharing of addresses that are either a special
966 register or the sum of a constant and a special register because
967 it is possible for unshare_all_rtl to copy the address, into memory
968 that won't be saved. Although the MEM can safely be shared, and
969 won't be copied there, the address itself cannot be shared, and may
972 There are also two exceptions with constants: The first is if the
973 constant is a LABEL_REF or the sum of the LABEL_REF
974 and an integer. This case can happen if we have an inline
975 function that supplies a constant operand to the call of another
976 inline function that uses it in a switch statement. In this case,
977 we will be replacing the LABEL_REF, so we have to replace this MEM
980 The second case is if we have a (const (plus (address ..) ...)).
981 In that case we need to put back the address of the constant pool
984 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
985 && GET_CODE (XEXP (x, 0)) != LABEL_REF
986 && ! (GET_CODE (XEXP (x, 0)) == CONST
987 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
988 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
990 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
997 /* Must point to the new insn. */
998 return gen_rtx (LABEL_REF, GET_MODE (orig),
999 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1003 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1004 return reg_map [REGNO (x)];
1009 /* If a parm that gets modified lives in a pseudo-reg,
1010 clear its TREE_READONLY to prevent certain optimizations. */
1012 rtx dest = SET_DEST (x);
1014 while (GET_CODE (dest) == STRICT_LOW_PART
1015 || GET_CODE (dest) == ZERO_EXTRACT
1016 || GET_CODE (dest) == SUBREG)
1017 dest = XEXP (dest, 0);
1019 if (GET_CODE (dest) == REG
1020 && REGNO (dest) < max_parm_reg
1021 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1022 && parmdecl_map[REGNO (dest)] != 0
1023 /* The insn to load an arg pseudo from a stack slot
1024 does not count as modifying it. */
1025 && in_nonparm_insns)
1026 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1030 #if 0 /* This is a good idea, but here is the wrong place for it. */
1031 /* Arrange that CONST_INTs always appear as the second operand
1032 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1033 always appear as the first. */
1035 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1036 || (XEXP (x, 1) == frame_pointer_rtx
1037 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1038 && XEXP (x, 1) == arg_pointer_rtx)))
1040 rtx t = XEXP (x, 0);
1041 XEXP (x, 0) = XEXP (x, 1);
1048 /* Replace this rtx with a copy of itself. */
1050 x = rtx_alloc (code);
1051 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
1052 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1054 /* Now scan the subexpressions recursively.
1055 We can store any replaced subexpressions directly into X
1056 since we know X is not shared! Any vectors in X
1057 must be copied if X was copied. */
1059 format_ptr = GET_RTX_FORMAT (code);
1061 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1063 switch (*format_ptr++)
1066 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1070 /* Change any references to old-insns to point to the
1071 corresponding copied insns. */
1072 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1076 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1080 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1081 for (j = 0; j < XVECLEN (x, i); j++)
1083 = copy_for_inline (XVECEXP (x, i, j));
1089 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1091 orig_asm_operands_vector = XVEC (orig, 3);
1092 copy_asm_operands_vector = XVEC (x, 3);
1093 copy_asm_constraints_vector = XVEC (x, 4);
1099 /* Unfortunately, we need a global copy of const_equiv map for communication
1100 with a function called from note_stores. Be *very* careful that this
1101 is used properly in the presence of recursion. */
1103 rtx *global_const_equiv_map;
1105 #define FIXED_BASE_PLUS_P(X) \
1106 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1107 && GET_CODE (XEXP (X, 0)) == REG \
1108 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1109 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1111 /* Integrate the procedure defined by FNDECL. Note that this function
1112 may wind up calling itself. Since the static variables are not
1113 reentrant, we do not assign them until after the possibility
1114 of recursion is eliminated.
1116 If IGNORE is nonzero, do not produce a value.
1117 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1120 (rtx)-1 if we could not substitute the function
1121 0 if we substituted it and it does not produce a value
1122 else an rtx for where the value is stored. */
1125 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1130 rtx structure_value_addr;
1132 tree formal, actual, block;
1133 rtx header = DECL_SAVED_INSNS (fndecl);
1134 rtx insns = FIRST_FUNCTION_INSN (header);
1135 rtx parm_insns = FIRST_PARM_INSN (header);
1141 int min_labelno = FIRST_LABELNO (header);
1142 int max_labelno = LAST_LABELNO (header);
1144 rtx local_return_label = 0;
1147 struct inline_remap *map;
1149 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1151 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1152 max_regno = MAX_REGNUM (header) + 3;
1153 if (max_regno < FIRST_PSEUDO_REGISTER)
1156 nargs = list_length (DECL_ARGUMENTS (fndecl));
1158 /* We expect PARMS to have the right length; don't crash if not. */
1159 if (list_length (parms) != nargs)
1160 return (rtx) (HOST_WIDE_INT) -1;
1161 /* Also check that the parms type match. Since the appropriate
1162 conversions or default promotions have already been applied,
1163 the machine modes should match exactly. */
1164 for (formal = DECL_ARGUMENTS (fndecl),
1167 formal = TREE_CHAIN (formal),
1168 actual = TREE_CHAIN (actual))
1170 tree arg = TREE_VALUE (actual);
1171 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1172 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1173 return (rtx) (HOST_WIDE_INT) -1;
1174 /* If they are block mode, the types should match exactly.
1175 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1176 which could happen if the parameter has incomplete type. */
1177 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1178 return (rtx) (HOST_WIDE_INT) -1;
1181 /* Make a binding contour to keep inline cleanups called at
1182 outer function-scope level from looking like they are shadowing
1183 parameter declarations. */
1186 /* Make a fresh binding contour that we can easily remove. */
1188 expand_start_bindings (0);
1189 if (GET_CODE (parm_insns) == NOTE
1190 && NOTE_LINE_NUMBER (parm_insns) > 0)
1192 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1193 NOTE_LINE_NUMBER (parm_insns));
1195 RTX_INTEGRATED_P (note) = 1;
1198 /* Expand the function arguments. Do this first so that any
1199 new registers get created before we allocate the maps. */
1201 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1202 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1204 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1206 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1208 /* Actual parameter, converted to the type of the argument within the
1210 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1211 /* Mode of the variable used within the function. */
1212 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1213 /* Where parameter is located in the function. */
1216 /* Make sure this formal has some correspondence in the users code
1217 * before emitting any line notes for it. */
1218 if (DECL_SOURCE_LINE (formal))
1220 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1221 DECL_SOURCE_LINE (formal));
1223 RTX_INTEGRATED_P (note) = 1;
1227 loc = RTVEC_ELT (arg_vector, i);
1229 /* If this is an object passed by invisible reference, we copy the
1230 object into a stack slot and save its address. If this will go
1231 into memory, we do nothing now. Otherwise, we just expand the
1233 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1234 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1237 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1238 int_size_in_bytes (TREE_TYPE (arg)), 1);
1240 store_expr (arg, stack_slot, 0);
1242 arg_vals[i] = XEXP (stack_slot, 0);
1244 else if (GET_CODE (loc) != MEM)
1245 /* The mode if LOC and ARG can differ if LOC was a variable
1246 that had its mode promoted via PROMOTED_MODE. */
1247 arg_vals[i] = convert_to_mode (GET_MODE (loc),
1248 expand_expr (arg, NULL_RTX, mode,
1250 TREE_UNSIGNED (TREE_TYPE (formal)));
1254 if (arg_vals[i] != 0
1255 && (! TREE_READONLY (formal)
1256 /* If the parameter is not read-only, copy our argument through
1257 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1258 TARGET in any way. In the inline function, they will likely
1259 be two different pseudos, and `safe_from_p' will make all
1260 sorts of smart assumptions about their not conflicting.
1261 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1262 wrong, so put ARG_VALS[I] into a fresh register. */
1264 && (GET_CODE (arg_vals[i]) == REG
1265 || GET_CODE (arg_vals[i]) == SUBREG
1266 || GET_CODE (arg_vals[i]) == MEM)
1267 && reg_overlap_mentioned_p (arg_vals[i], target))))
1268 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1271 /* Allocate the structures we use to remap things. */
1273 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1274 map->fndecl = fndecl;
1276 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1277 bzero (map->reg_map, max_regno * sizeof (rtx));
1279 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1280 map->label_map -= min_labelno;
1282 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1283 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1284 map->min_insnno = 0;
1285 map->max_insnno = INSN_UID (header);
1287 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1288 be large enough for all our pseudos. This is the number we are currently
1289 using plus the number in the called routine, plus 15 for each arg,
1290 five to compute the virtual frame pointer, and five for the return value.
1291 This should be enough for most cases. We do not reference entries
1292 outside the range of the map.
1294 ??? These numbers are quite arbitrary and were obtained by
1295 experimentation. At some point, we should try to allocate the
1296 table after all the parameters are set up so we an more accurately
1297 estimate the number of pseudos we will need. */
1299 map->const_equiv_map_size
1300 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1302 map->const_equiv_map
1303 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1304 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1307 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1308 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
1311 /* Record the current insn in case we have to set up pointers to frame
1312 and argument memory blocks. */
1313 map->insns_at_start = get_last_insn ();
1315 /* Update the outgoing argument size to allow for those in the inlined
1317 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1318 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1320 /* If the inline function needs to make PIC references, that means
1321 that this function's PIC offset table must be used. */
1322 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1323 current_function_uses_pic_offset_table = 1;
1325 /* Process each argument. For each, set up things so that the function's
1326 reference to the argument will refer to the argument being passed.
1327 We only replace REG with REG here. Any simplifications are done
1328 via const_equiv_map.
1330 We make two passes: In the first, we deal with parameters that will
1331 be placed into registers, since we need to ensure that the allocated
1332 register number fits in const_equiv_map. Then we store all non-register
1333 parameters into their memory location. */
1335 for (i = 0; i < nargs; i++)
1337 rtx copy = arg_vals[i];
1339 loc = RTVEC_ELT (arg_vector, i);
1341 /* There are three cases, each handled separately. */
1342 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1343 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1345 /* This must be an object passed by invisible reference (it could
1346 also be a variable-sized object, but we forbid inlining functions
1347 with variable-sized arguments). COPY is the address of the
1348 actual value (this computation will cause it to be copied). We
1349 map that address for the register, noting the actual address as
1350 an equivalent in case it can be substituted into the insns. */
1352 if (GET_CODE (copy) != REG)
1354 temp = copy_addr_to_reg (copy);
1355 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1357 map->const_equiv_map[REGNO (temp)] = copy;
1358 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1362 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1364 else if (GET_CODE (loc) == MEM)
1366 /* This is the case of a parameter that lives in memory.
1367 It will live in the block we allocate in the called routine's
1368 frame that simulates the incoming argument area. Do nothing
1369 now; we will call store_expr later. */
1372 else if (GET_CODE (loc) == REG)
1374 /* This is the good case where the parameter is in a register.
1375 If it is read-only and our argument is a constant, set up the
1376 constant equivalence.
1378 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1379 that flag set if it is a register. */
1381 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1382 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1383 && ! REG_USERVAR_P (copy)))
1385 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1386 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1387 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1389 map->const_equiv_map[REGNO (temp)] = copy;
1390 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1394 map->reg_map[REGNO (loc)] = copy;
1399 /* Free any temporaries we made setting up this parameter. */
1403 /* Now do the parameters that will be placed in memory. */
1405 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1406 formal; formal = TREE_CHAIN (formal), i++)
1408 rtx copy = arg_vals[i];
1410 loc = RTVEC_ELT (arg_vector, i);
1412 if (GET_CODE (loc) == MEM
1413 /* Exclude case handled above. */
1414 && ! (GET_CODE (XEXP (loc, 0)) == REG
1415 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1417 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1418 DECL_SOURCE_LINE (formal));
1420 RTX_INTEGRATED_P (note) = 1;
1422 /* Compute the address in the area we reserved and store the
1424 temp = copy_rtx_and_substitute (loc, map);
1425 subst_constants (&temp, NULL_RTX, map);
1426 apply_change_group ();
1427 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1428 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1429 store_expr (arg_trees[i], temp, 0);
1431 /* Free any temporaries we made setting up this parameter. */
1436 /* Deal with the places that the function puts its result.
1437 We are driven by what is placed into DECL_RESULT.
1439 Initially, we assume that we don't have anything special handling for
1440 REG_FUNCTION_RETURN_VALUE_P. */
1442 map->inline_target = 0;
1443 loc = DECL_RTL (DECL_RESULT (fndecl));
1444 if (TYPE_MODE (type) == VOIDmode)
1445 /* There is no return value to worry about. */
1447 else if (GET_CODE (loc) == MEM)
1449 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1452 /* Pass the function the address in which to return a structure value.
1453 Note that a constructor can cause someone to call us with
1454 STRUCTURE_VALUE_ADDR, but the initialization takes place
1455 via the first parameter, rather than the struct return address.
1457 We have two cases: If the address is a simple register indirect,
1458 use the mapping mechanism to point that register to our structure
1459 return address. Otherwise, store the structure return value into
1460 the place that it will be referenced from. */
1462 if (GET_CODE (XEXP (loc, 0)) == REG)
1464 temp = force_reg (Pmode, structure_value_addr);
1465 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1466 if (CONSTANT_P (structure_value_addr)
1467 || (GET_CODE (structure_value_addr) == PLUS
1468 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1469 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1471 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1472 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1477 temp = copy_rtx_and_substitute (loc, map);
1478 subst_constants (&temp, NULL_RTX, map);
1479 apply_change_group ();
1480 emit_move_insn (temp, structure_value_addr);
1484 /* We will ignore the result value, so don't look at its structure.
1485 Note that preparations for an aggregate return value
1486 do need to be made (above) even if it will be ignored. */
1488 else if (GET_CODE (loc) == REG)
1490 /* The function returns an object in a register and we use the return
1491 value. Set up our target for remapping. */
1493 /* Machine mode function was declared to return. */
1494 enum machine_mode departing_mode = TYPE_MODE (type);
1495 /* (Possibly wider) machine mode it actually computes
1496 (for the sake of callers that fail to declare it right). */
1497 enum machine_mode arriving_mode
1498 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1501 /* Don't use MEMs as direct targets because on some machines
1502 substituting a MEM for a REG makes invalid insns.
1503 Let the combiner substitute the MEM if that is valid. */
1504 if (target == 0 || GET_CODE (target) != REG
1505 || GET_MODE (target) != departing_mode)
1506 target = gen_reg_rtx (departing_mode);
1508 /* If function's value was promoted before return,
1509 avoid machine mode mismatch when we substitute INLINE_TARGET.
1510 But TARGET is what we will return to the caller. */
1511 if (arriving_mode != departing_mode)
1512 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1514 reg_to_map = target;
1516 /* Usually, the result value is the machine's return register.
1517 Sometimes it may be a pseudo. Handle both cases. */
1518 if (REG_FUNCTION_VALUE_P (loc))
1519 map->inline_target = reg_to_map;
1521 map->reg_map[REGNO (loc)] = reg_to_map;
1524 /* Make new label equivalences for the labels in the called function. */
1525 for (i = min_labelno; i < max_labelno; i++)
1526 map->label_map[i] = gen_label_rtx ();
1528 /* Perform postincrements before actually calling the function. */
1531 /* Clean up stack so that variables might have smaller offsets. */
1532 do_pending_stack_adjust ();
1534 /* Save a copy of the location of const_equiv_map for mark_stores, called
1536 global_const_equiv_map = map->const_equiv_map;
1538 /* Now copy the insns one by one. Do this in two passes, first the insns and
1539 then their REG_NOTES, just like save_for_inline. */
1541 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1543 for (insn = insns; insn; insn = NEXT_INSN (insn))
1547 map->orig_asm_operands_vector = 0;
1549 switch (GET_CODE (insn))
1552 pattern = PATTERN (insn);
1554 if (GET_CODE (pattern) == USE
1555 && GET_CODE (XEXP (pattern, 0)) == REG
1556 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1557 /* The (USE (REG n)) at return from the function should
1558 be ignored since we are changing (REG n) into
1562 /* Ignore setting a function value that we don't want to use. */
1563 if (map->inline_target == 0
1564 && GET_CODE (pattern) == SET
1565 && GET_CODE (SET_DEST (pattern)) == REG
1566 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1568 if (volatile_refs_p (SET_SRC (pattern)))
1570 /* If we must not delete the source,
1571 load it into a new temporary. */
1572 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1573 SET_DEST (PATTERN (copy))
1574 = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (copy))));
1580 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1581 /* REG_NOTES will be copied later. */
1584 /* If this insn is setting CC0, it may need to look at
1585 the insn that uses CC0 to see what type of insn it is.
1586 In that case, the call to recog via validate_change will
1587 fail. So don't substitute constants here. Instead,
1588 do it when we emit the following insn.
1590 For example, see the pyr.md file. That machine has signed and
1591 unsigned compares. The compare patterns must check the
1592 following branch insn to see which what kind of compare to
1595 If the previous insn set CC0, substitute constants on it as
1597 if (sets_cc0_p (PATTERN (copy)) != 0)
1602 try_constants (cc0_insn, map);
1604 try_constants (copy, map);
1607 try_constants (copy, map);
1612 if (GET_CODE (PATTERN (insn)) == RETURN)
1614 if (local_return_label == 0)
1615 local_return_label = gen_label_rtx ();
1616 pattern = gen_jump (local_return_label);
1619 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1621 copy = emit_jump_insn (pattern);
1625 try_constants (cc0_insn, map);
1628 try_constants (copy, map);
1630 /* If this used to be a conditional jump insn but whose branch
1631 direction is now know, we must do something special. */
1632 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1635 /* The previous insn set cc0 for us. So delete it. */
1636 delete_insn (PREV_INSN (copy));
1639 /* If this is now a no-op, delete it. */
1640 if (map->last_pc_value == pc_rtx)
1646 /* Otherwise, this is unconditional jump so we must put a
1647 BARRIER after it. We could do some dead code elimination
1648 here, but jump.c will do it just as well. */
1654 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1655 copy = emit_call_insn (pattern);
1659 try_constants (cc0_insn, map);
1662 try_constants (copy, map);
1664 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1665 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1666 map->const_equiv_map[i] = 0;
1670 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1671 LABEL_NAME (copy) = LABEL_NAME (insn);
1676 copy = emit_barrier ();
1680 /* It is important to discard function-end and function-beg notes,
1681 so we have only one of each in the current function.
1682 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1683 deleted these in the copy used for continuing compilation,
1684 not the copy used for inlining). */
1685 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1686 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1687 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1688 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1699 RTX_INTEGRATED_P (copy) = 1;
1701 map->insn_map[INSN_UID (insn)] = copy;
1704 /* Now copy the REG_NOTES. */
1705 for (insn = insns; insn; insn = NEXT_INSN (insn))
1706 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1707 && map->insn_map[INSN_UID (insn)])
1708 REG_NOTES (map->insn_map[INSN_UID (insn)])
1709 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1711 if (local_return_label)
1712 emit_label (local_return_label);
1714 /* Make copies of the decls of the symbols in the inline function, so that
1715 the copies of the variables get declared in the current function. Set
1716 up things so that lookup_static_chain knows that to interpret registers
1717 in SAVE_EXPRs for TYPE_SIZEs as local. */
1719 inline_function_decl = fndecl;
1720 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1721 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1722 inline_function_decl = 0;
1724 /* End the scope containing the copied formal parameter variables
1725 and copied LABEL_DECLs. */
1727 expand_end_bindings (getdecls (), 1, 1);
1728 block = poplevel (1, 1, 0);
1729 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1730 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1732 emit_line_note (input_filename, lineno);
1734 if (structure_value_addr)
1735 return gen_rtx (MEM, TYPE_MODE (type),
1736 memory_address (TYPE_MODE (type), structure_value_addr));
1740 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1741 push all of those decls and give each one the corresponding home. */
1744 integrate_parm_decls (args, map, arg_vector)
1746 struct inline_remap *map;
1752 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1754 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1757 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1759 /* These args would always appear unused, if not for this. */
1760 TREE_USED (decl) = 1;
1761 /* Prevent warning for shadowing with these. */
1762 DECL_ABSTRACT_ORIGIN (decl) = tail;
1764 /* Fully instantiate the address with the equivalent form so that the
1765 debugging information contains the actual register, instead of the
1766 virtual register. Do this by not passing an insn to
1768 subst_constants (&new_decl_rtl, NULL_RTX, map);
1769 apply_change_group ();
1770 DECL_RTL (decl) = new_decl_rtl;
1774 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1775 current function a tree of contexts isomorphic to the one that is given.
1777 LEVEL indicates how far down into the BLOCK tree is the node we are
1778 currently traversing. It is always zero except for recursive calls.
1780 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1781 registers used in the DECL_RTL field should be remapped. If it is zero,
1782 no mapping is necessary. */
1785 integrate_decl_tree (let, level, map)
1788 struct inline_remap *map;
1795 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1797 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1798 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1799 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1800 if (DECL_RTL (t) != 0)
1802 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1803 /* Fully instantiate the address with the equivalent form so that the
1804 debugging information contains the actual register, instead of the
1805 virtual register. Do this by not passing an insn to
1807 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1808 apply_change_group ();
1810 else if (DECL_RTL (t))
1811 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1812 DECL_EXTERNAL (d) = DECL_EXTERNAL (t);
1813 TREE_STATIC (d) = TREE_STATIC (t);
1814 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1815 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1816 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1817 TREE_READONLY (d) = TREE_READONLY (t);
1818 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1819 /* These args would always appear unused, if not for this. */
1821 /* Prevent warning for shadowing with these. */
1822 DECL_ABSTRACT_ORIGIN (d) = t;
1826 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1827 integrate_decl_tree (t, level + 1, map);
1831 node = poplevel (1, 0, 0);
1834 TREE_USED (node) = TREE_USED (let);
1835 BLOCK_ABSTRACT_ORIGIN (node) = let;
1840 /* Create a new copy of an rtx.
1841 Recursively copies the operands of the rtx,
1842 except for those few rtx codes that are sharable.
1844 We always return an rtx that is similar to that incoming rtx, with the
1845 exception of possibly changing a REG to a SUBREG or vice versa. No
1846 rtl is ever emitted.
1848 Handle constants that need to be placed in the constant pool by
1849 calling `force_const_mem'. */
1852 copy_rtx_and_substitute (orig, map)
1854 struct inline_remap *map;
1856 register rtx copy, temp;
1858 register RTX_CODE code;
1859 register enum machine_mode mode;
1860 register char *format_ptr;
1866 code = GET_CODE (orig);
1867 mode = GET_MODE (orig);
1872 /* If the stack pointer register shows up, it must be part of
1873 stack-adjustments (*not* because we eliminated the frame pointer!).
1874 Small hard registers are returned as-is. Pseudo-registers
1875 go through their `reg_map'. */
1876 regno = REGNO (orig);
1877 if (regno <= LAST_VIRTUAL_REGISTER)
1879 /* Some hard registers are also mapped,
1880 but others are not translated. */
1881 if (map->reg_map[regno] != 0)
1882 return map->reg_map[regno];
1884 /* If this is the virtual frame pointer, make space in current
1885 function's stack frame for the stack frame of the inline function.
1887 Copy the address of this area into a pseudo. Map
1888 virtual_stack_vars_rtx to this pseudo and set up a constant
1889 equivalence for it to be the address. This will substitute the
1890 address into insns where it can be substituted and use the new
1891 pseudo where it can't. */
1892 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1895 int size = DECL_FRAME_SIZE (map->fndecl);
1899 loc = assign_stack_temp (BLKmode, size, 1);
1900 loc = XEXP (loc, 0);
1901 #ifdef FRAME_GROWS_DOWNWARD
1902 /* In this case, virtual_stack_vars_rtx points to one byte
1903 higher than the top of the frame area. So compute the offset
1904 to one byte higher than our substitute frame.
1905 Keep the fake frame pointer aligned like a real one. */
1906 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1907 loc = plus_constant (loc, rounded);
1909 map->reg_map[regno] = temp
1910 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1911 map->const_equiv_map[REGNO (temp)] = loc;
1912 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1914 seq = gen_sequence ();
1916 emit_insn_after (seq, map->insns_at_start);
1919 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1921 /* Do the same for a block to contain any arguments referenced
1924 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1927 loc = assign_stack_temp (BLKmode, size, 1);
1928 loc = XEXP (loc, 0);
1929 /* When arguments grow downward, the virtual incoming
1930 args pointer points to the top of the argument block,
1931 so the remapped location better do the same. */
1932 #ifdef ARGS_GROW_DOWNWARD
1933 loc = plus_constant (loc, size);
1935 map->reg_map[regno] = temp
1936 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1937 map->const_equiv_map[REGNO (temp)] = loc;
1938 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1940 seq = gen_sequence ();
1942 emit_insn_after (seq, map->insns_at_start);
1945 else if (REG_FUNCTION_VALUE_P (orig))
1947 /* This is a reference to the function return value. If
1948 the function doesn't have a return value, error. If the
1949 mode doesn't agree, make a SUBREG. */
1950 if (map->inline_target == 0)
1951 /* Must be unrolling loops or replicating code if we
1952 reach here, so return the register unchanged. */
1954 else if (mode != GET_MODE (map->inline_target))
1955 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1957 return map->inline_target;
1961 if (map->reg_map[regno] == NULL)
1963 map->reg_map[regno] = gen_reg_rtx (mode);
1964 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1965 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1966 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1967 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1969 return map->reg_map[regno];
1972 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1973 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1974 if (GET_CODE (copy) == SUBREG)
1975 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1976 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1978 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1979 SUBREG_WORD (orig));
1983 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1985 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1986 if (GET_CODE (copy) == SUBREG)
1987 copy = SUBREG_REG (copy);
1988 return gen_rtx (code, VOIDmode, copy);
1991 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1992 = LABEL_PRESERVE_P (orig);
1993 return map->label_map[CODE_LABEL_NUMBER (orig)];
1996 copy = rtx_alloc (LABEL_REF);
1997 PUT_MODE (copy, mode);
1998 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1999 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2008 /* Symbols which represent the address of a label stored in the constant
2009 pool must be modified to point to a constant pool entry for the
2010 remapped label. Otherwise, symbols are returned unchanged. */
2011 if (CONSTANT_POOL_ADDRESS_P (orig))
2013 rtx constant = get_pool_constant (orig);
2014 if (GET_CODE (constant) == LABEL_REF)
2016 copy = rtx_alloc (LABEL_REF);
2017 PUT_MODE (copy, mode);
2019 = map->label_map[CODE_LABEL_NUMBER (XEXP (constant, 0))];
2020 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2021 copy = force_const_mem (Pmode, copy);
2022 return XEXP (copy, 0);
2028 /* We have to make a new copy of this CONST_DOUBLE because don't want
2029 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2030 duplicate of a CONST_DOUBLE we have already seen. */
2031 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2035 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2036 return immed_real_const_1 (d, GET_MODE (orig));
2039 return immed_double_const (CONST_DOUBLE_LOW (orig),
2040 CONST_DOUBLE_HIGH (orig), VOIDmode);
2043 /* Make new constant pool entry for a constant
2044 that was in the pool of the inline function. */
2045 if (RTX_INTEGRATED_P (orig))
2047 /* If this was an address of a constant pool entry that itself
2048 had to be placed in the constant pool, it might not be a
2049 valid address. So the recursive call below might turn it
2050 into a register. In that case, it isn't a constant any
2051 more, so return it. This has the potential of changing a
2052 MEM into a REG, but we'll assume that it safe. */
2053 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2054 if (! CONSTANT_P (temp))
2056 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2061 /* If from constant pool address, make new constant pool entry and
2062 return its address. */
2063 if (! RTX_INTEGRATED_P (orig))
2066 temp = force_const_mem (GET_MODE (orig),
2067 copy_rtx_and_substitute (XEXP (orig, 0), map));
2070 /* Legitimizing the address here is incorrect.
2072 The only ADDRESS rtx's that can reach here are ones created by
2073 save_constants. Hence the operand of the ADDRESS is always legal
2074 in this position of the instruction, since the original rtx without
2075 the ADDRESS was legal.
2077 The reason we don't legitimize the address here is that on the
2078 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2079 This code forces the operand of the address to a register, which
2080 fails because we can not take the HIGH part of a register.
2082 Also, change_address may create new registers. These registers
2083 will not have valid reg_map entries. This can cause try_constants()
2084 to fail because assumes that all registers in the rtx have valid
2085 reg_map entries, and it may end up replacing one of these new
2086 registers with junk. */
2088 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2089 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2092 return XEXP (temp, 0);
2095 /* If a single asm insn contains multiple output operands
2096 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2097 We must make sure that the copied insn continues to share it. */
2098 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2100 copy = rtx_alloc (ASM_OPERANDS);
2101 XSTR (copy, 0) = XSTR (orig, 0);
2102 XSTR (copy, 1) = XSTR (orig, 1);
2103 XINT (copy, 2) = XINT (orig, 2);
2104 XVEC (copy, 3) = map->copy_asm_operands_vector;
2105 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2106 XSTR (copy, 5) = XSTR (orig, 5);
2107 XINT (copy, 6) = XINT (orig, 6);
2113 /* This is given special treatment because the first
2114 operand of a CALL is a (MEM ...) which may get
2115 forced into a register for cse. This is undesirable
2116 if function-address cse isn't wanted or if we won't do cse. */
2117 #ifndef NO_FUNCTION_CSE
2118 if (! (optimize && ! flag_no_function_cse))
2120 return gen_rtx (CALL, GET_MODE (orig),
2121 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2122 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2123 copy_rtx_and_substitute (XEXP (orig, 1), map));
2127 /* Must be ifdefed out for loop unrolling to work. */
2133 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2135 If the nonlocal goto is into the current function,
2136 this will result in unnecessarily bad code, but should work. */
2137 if (SET_DEST (orig) == virtual_stack_vars_rtx
2138 || SET_DEST (orig) == virtual_incoming_args_rtx)
2139 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2140 copy_rtx_and_substitute (SET_SRC (orig), map));
2144 copy = rtx_alloc (MEM);
2145 PUT_MODE (copy, mode);
2146 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2147 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2148 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2149 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2153 copy = rtx_alloc (code);
2154 PUT_MODE (copy, mode);
2155 copy->in_struct = orig->in_struct;
2156 copy->volatil = orig->volatil;
2157 copy->unchanging = orig->unchanging;
2159 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2161 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2163 switch (*format_ptr++)
2169 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2173 /* Change any references to old-insns to point to the
2174 corresponding copied insns. */
2175 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2179 XVEC (copy, i) = XVEC (orig, i);
2180 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2182 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2183 for (j = 0; j < XVECLEN (copy, i); j++)
2184 XVECEXP (copy, i, j)
2185 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2190 XWINT (copy, i) = XWINT (orig, i);
2194 XINT (copy, i) = XINT (orig, i);
2198 XSTR (copy, i) = XSTR (orig, i);
2206 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2208 map->orig_asm_operands_vector = XVEC (orig, 3);
2209 map->copy_asm_operands_vector = XVEC (copy, 3);
2210 map->copy_asm_constraints_vector = XVEC (copy, 4);
2216 /* Substitute known constant values into INSN, if that is valid. */
2219 try_constants (insn, map)
2221 struct inline_remap *map;
2226 subst_constants (&PATTERN (insn), insn, map);
2228 /* Apply the changes if they are valid; otherwise discard them. */
2229 apply_change_group ();
2231 /* Show we don't know the value of anything stored or clobbered. */
2232 note_stores (PATTERN (insn), mark_stores);
2233 map->last_pc_value = 0;
2235 map->last_cc0_value = 0;
2238 /* Set up any constant equivalences made in this insn. */
2239 for (i = 0; i < map->num_sets; i++)
2241 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2243 int regno = REGNO (map->equiv_sets[i].dest);
2245 if (map->const_equiv_map[regno] == 0
2246 /* Following clause is a hack to make case work where GNU C++
2247 reassigns a variable to make cse work right. */
2248 || ! rtx_equal_p (map->const_equiv_map[regno],
2249 map->equiv_sets[i].equiv))
2251 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2252 map->const_age_map[regno] = map->const_age;
2255 else if (map->equiv_sets[i].dest == pc_rtx)
2256 map->last_pc_value = map->equiv_sets[i].equiv;
2258 else if (map->equiv_sets[i].dest == cc0_rtx)
2259 map->last_cc0_value = map->equiv_sets[i].equiv;
2264 /* Substitute known constants for pseudo regs in the contents of LOC,
2265 which are part of INSN.
2266 If INSN is zero, the substitution should always be done (this is used to
2268 These changes are taken out by try_constants if the result is not valid.
2270 Note that we are more concerned with determining when the result of a SET
2271 is a constant, for further propagation, than actually inserting constants
2272 into insns; cse will do the latter task better.
2274 This function is also used to adjust address of items previously addressed
2275 via the virtual stack variable or virtual incoming arguments registers. */
2278 subst_constants (loc, insn, map)
2281 struct inline_remap *map;
2285 register enum rtx_code code;
2286 register char *format_ptr;
2287 int num_changes = num_validated_changes ();
2289 enum machine_mode op0_mode;
2291 code = GET_CODE (x);
2306 validate_change (insn, loc, map->last_cc0_value, 1);
2312 /* The only thing we can do with a USE or CLOBBER is possibly do
2313 some substitutions in a MEM within it. */
2314 if (GET_CODE (XEXP (x, 0)) == MEM)
2315 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2319 /* Substitute for parms and known constants. Don't replace
2320 hard regs used as user variables with constants. */
2322 int regno = REGNO (x);
2324 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2325 && regno < map->const_equiv_map_size
2326 && map->const_equiv_map[regno] != 0
2327 && map->const_age_map[regno] >= map->const_age)
2328 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2333 /* SUBREG applied to something other than a reg
2334 should be treated as ordinary, since that must
2335 be a special hack and we don't know how to treat it specially.
2336 Consider for example mulsidi3 in m68k.md.
2337 Ordinary SUBREG of a REG needs this special treatment. */
2338 if (GET_CODE (SUBREG_REG (x)) == REG)
2340 rtx inner = SUBREG_REG (x);
2343 /* We can't call subst_constants on &SUBREG_REG (x) because any
2344 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2345 see what is inside, try to form the new SUBREG and see if that is
2346 valid. We handle two cases: extracting a full word in an
2347 integral mode and extracting the low part. */
2348 subst_constants (&inner, NULL_RTX, map);
2350 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2351 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2352 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2353 new = operand_subword (inner, SUBREG_WORD (x), 0,
2354 GET_MODE (SUBREG_REG (x)));
2356 if (new == 0 && subreg_lowpart_p (x))
2357 new = gen_lowpart_common (GET_MODE (x), inner);
2360 validate_change (insn, loc, new, 1);
2367 subst_constants (&XEXP (x, 0), insn, map);
2369 /* If a memory address got spoiled, change it back. */
2370 if (insn != 0 && num_validated_changes () != num_changes
2371 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2372 cancel_changes (num_changes);
2377 /* Substitute constants in our source, and in any arguments to a
2378 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2380 rtx *dest_loc = &SET_DEST (x);
2381 rtx dest = *dest_loc;
2384 subst_constants (&SET_SRC (x), insn, map);
2387 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2388 /* By convention, we always use ZERO_EXTRACT in the dest. */
2389 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2390 || GET_CODE (*dest_loc) == SUBREG
2391 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2393 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2395 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2396 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2398 dest_loc = &XEXP (*dest_loc, 0);
2401 /* Do substitute in the address of a destination in memory. */
2402 if (GET_CODE (*dest_loc) == MEM)
2403 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2405 /* Check for the case of DEST a SUBREG, both it and the underlying
2406 register are less than one word, and the SUBREG has the wider mode.
2407 In the case, we are really setting the underlying register to the
2408 source converted to the mode of DEST. So indicate that. */
2409 if (GET_CODE (dest) == SUBREG
2410 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2411 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2412 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2413 <= GET_MODE_SIZE (GET_MODE (dest)))
2414 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2415 src = tem, dest = SUBREG_REG (dest);
2417 /* If storing a recognizable value save it for later recording. */
2418 if ((map->num_sets < MAX_RECOG_OPERANDS)
2419 && (CONSTANT_P (src)
2420 || (GET_CODE (src) == PLUS
2421 && GET_CODE (XEXP (src, 0)) == REG
2422 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2423 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2424 && CONSTANT_P (XEXP (src, 1)))
2425 || GET_CODE (src) == COMPARE
2430 && (src == pc_rtx || GET_CODE (src) == RETURN
2431 || GET_CODE (src) == LABEL_REF))))
2433 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2434 it will cause us to save the COMPARE with any constants
2435 substituted, which is what we want for later. */
2436 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2437 map->equiv_sets[map->num_sets++].dest = dest;
2444 format_ptr = GET_RTX_FORMAT (code);
2446 /* If the first operand is an expression, save its mode for later. */
2447 if (*format_ptr == 'e')
2448 op0_mode = GET_MODE (XEXP (x, 0));
2450 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2452 switch (*format_ptr++)
2459 subst_constants (&XEXP (x, i), insn, map);
2469 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2472 for (j = 0; j < XVECLEN (x, i); j++)
2473 subst_constants (&XVECEXP (x, i, j), insn, map);
2482 /* If this is a commutative operation, move a constant to the second
2483 operand unless the second operand is already a CONST_INT. */
2484 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2485 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2487 rtx tem = XEXP (x, 0);
2488 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2489 validate_change (insn, &XEXP (x, 1), tem, 1);
2492 /* Simplify the expression in case we put in some constants. */
2493 switch (GET_RTX_CLASS (code))
2496 new = simplify_unary_operation (code, GET_MODE (x),
2497 XEXP (x, 0), op0_mode);
2502 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2503 if (op_mode == VOIDmode)
2504 op_mode = GET_MODE (XEXP (x, 1));
2505 new = simplify_relational_operation (code, op_mode,
2506 XEXP (x, 0), XEXP (x, 1));
2507 #ifdef FLOAT_STORE_FLAG_VALUE
2508 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2509 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2510 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2517 new = simplify_binary_operation (code, GET_MODE (x),
2518 XEXP (x, 0), XEXP (x, 1));
2523 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2524 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2529 validate_change (insn, loc, new, 1);
2532 /* Show that register modified no longer contain known constants. We are
2533 called from note_stores with parts of the new insn. */
2536 mark_stores (dest, x)
2540 if (GET_CODE (dest) == SUBREG)
2541 dest = SUBREG_REG (dest);
2543 if (GET_CODE (dest) == REG)
2544 global_const_equiv_map[REGNO (dest)] = 0;
2547 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2548 pointed to by PX, they represent constants in the constant pool.
2549 Replace these with a new memory reference obtained from force_const_mem.
2550 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2551 address of a constant pool entry. Replace them with the address of
2552 a new constant pool entry obtained from force_const_mem. */
2555 restore_constants (px)
2565 if (GET_CODE (x) == CONST_DOUBLE)
2567 /* We have to make a new CONST_DOUBLE to ensure that we account for
2568 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2569 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2573 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2574 *px = immed_real_const_1 (d, GET_MODE (x));
2577 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2581 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2583 restore_constants (&XEXP (x, 0));
2584 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2586 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2588 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2589 rtx new = XEXP (SUBREG_REG (x), 0);
2591 restore_constants (&new);
2592 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2593 PUT_MODE (new, GET_MODE (x));
2594 *px = validize_mem (new);
2596 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2598 restore_constants (&XEXP (x, 0));
2599 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2603 fmt = GET_RTX_FORMAT (GET_CODE (x));
2604 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2609 for (j = 0; j < XVECLEN (x, i); j++)
2610 restore_constants (&XVECEXP (x, i, j));
2614 restore_constants (&XEXP (x, i));
2621 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2622 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2623 that it points to the node itself, thus indicating that the node is its
2624 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2625 the given node is NULL, recursively descend the decl/block tree which
2626 it is the root of, and for each other ..._DECL or BLOCK node contained
2627 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2628 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2629 values to point to themselves. */
2631 static void set_decl_origin_self ();
2634 set_block_origin_self (stmt)
2637 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2639 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2642 register tree local_decl;
2644 for (local_decl = BLOCK_VARS (stmt);
2645 local_decl != NULL_TREE;
2646 local_decl = TREE_CHAIN (local_decl))
2647 set_decl_origin_self (local_decl); /* Potential recursion. */
2651 register tree subblock;
2653 for (subblock = BLOCK_SUBBLOCKS (stmt);
2654 subblock != NULL_TREE;
2655 subblock = BLOCK_CHAIN (subblock))
2656 set_block_origin_self (subblock); /* Recurse. */
2661 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2662 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2663 node to so that it points to the node itself, thus indicating that the
2664 node represents its own (abstract) origin. Additionally, if the
2665 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2666 the decl/block tree of which the given node is the root of, and for
2667 each other ..._DECL or BLOCK node contained therein whose
2668 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2669 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2670 point to themselves. */
2673 set_decl_origin_self (decl)
2676 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2678 DECL_ABSTRACT_ORIGIN (decl) = decl;
2679 if (TREE_CODE (decl) == FUNCTION_DECL)
2683 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2684 DECL_ABSTRACT_ORIGIN (arg) = arg;
2685 if (DECL_INITIAL (decl) != NULL_TREE)
2686 set_block_origin_self (DECL_INITIAL (decl));
2691 /* Given a pointer to some BLOCK node, and a boolean value to set the
2692 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2693 the given block, and for all local decls and all local sub-blocks
2694 (recursively) which are contained therein. */
2696 void set_decl_abstract_flags ();
2699 set_block_abstract_flags (stmt, setting)
2701 register int setting;
2703 BLOCK_ABSTRACT (stmt) = setting;
2706 register tree local_decl;
2708 for (local_decl = BLOCK_VARS (stmt);
2709 local_decl != NULL_TREE;
2710 local_decl = TREE_CHAIN (local_decl))
2711 set_decl_abstract_flags (local_decl, setting);
2715 register tree subblock;
2717 for (subblock = BLOCK_SUBBLOCKS (stmt);
2718 subblock != NULL_TREE;
2719 subblock = BLOCK_CHAIN (subblock))
2720 set_block_abstract_flags (subblock, setting);
2724 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2725 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2726 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2727 set the abstract flags for all of the parameters, local vars, local
2728 blocks and sub-blocks (recursively) to the same setting. */
2731 set_decl_abstract_flags (decl, setting)
2733 register int setting;
2735 DECL_ABSTRACT (decl) = setting;
2736 if (TREE_CODE (decl) == FUNCTION_DECL)
2740 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2741 DECL_ABSTRACT (arg) = setting;
2742 if (DECL_INITIAL (decl) != NULL_TREE)
2743 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2747 /* Output the assembly language code for the function FNDECL
2748 from its DECL_SAVED_INSNS. Used for inline functions that are output
2749 at end of compilation instead of where they came in the source. */
2752 output_inline_function (fndecl)
2755 rtx head = DECL_SAVED_INSNS (fndecl);
2758 temporary_allocation ();
2760 current_function_decl = fndecl;
2762 /* This call is only used to initialize global variables. */
2763 init_function_start (fndecl, "lossage", 1);
2765 /* Redo parameter determinations in case the FUNCTION_...
2766 macros took machine-specific actions that need to be redone. */
2767 assign_parms (fndecl, 1);
2769 /* Set stack frame size. */
2770 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2772 restore_reg_data (FIRST_PARM_INSN (head));
2774 stack_slot_list = STACK_SLOT_LIST (head);
2776 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2777 current_function_calls_alloca = 1;
2779 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2780 current_function_calls_setjmp = 1;
2782 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2783 current_function_calls_longjmp = 1;
2785 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2786 current_function_returns_struct = 1;
2788 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2789 current_function_returns_pcc_struct = 1;
2791 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2792 current_function_needs_context = 1;
2794 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2795 current_function_has_nonlocal_label = 1;
2797 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2798 current_function_returns_pointer = 1;
2800 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2801 current_function_uses_const_pool = 1;
2803 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2804 current_function_uses_pic_offset_table = 1;
2806 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2807 current_function_pops_args = POPS_ARGS (head);
2809 /* There is no need to output a return label again. */
2812 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2814 /* Find last insn and rebuild the constant pool. */
2815 for (last = FIRST_PARM_INSN (head);
2816 NEXT_INSN (last); last = NEXT_INSN (last))
2818 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2820 restore_constants (&PATTERN (last));
2821 restore_constants (®_NOTES (last));
2825 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2826 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2828 /* We must have already output DWARF debugging information for the
2829 original (abstract) inline function declaration/definition, so
2830 we want to make sure that the debugging information we generate
2831 for this special instance of the inline function refers back to
2832 the information we already generated. To make sure that happens,
2833 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2834 node (and for all of the local ..._DECL nodes which are its children)
2835 so that they all point to themselves. */
2837 set_decl_origin_self (fndecl);
2839 /* Compile this function all the way down to assembly code. */
2840 rest_of_compilation (fndecl);
2842 current_function_decl = 0;
2844 permanent_allocation ();