1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
38 #define obstack_chunk_alloc xmalloc
39 #define obstack_chunk_free free
41 extern struct obstack *function_maybepermanent_obstack;
43 extern tree pushdecl ();
44 extern tree poplevel ();
46 /* Similar, but round to the next highest integer that meets the
48 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50 /* Default max number of insns a function can have and still be inline.
51 This is overridden on RISC machines. */
52 #ifndef INTEGRATE_THRESHOLD
53 #define INTEGRATE_THRESHOLD(DECL) \
54 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
57 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
58 static void finish_inline PROTO((tree, rtx));
59 static void adjust_copied_decl_tree PROTO((tree));
60 static tree copy_decl_list PROTO((tree));
61 static tree copy_decl_tree PROTO((tree));
62 static void copy_decl_rtls PROTO((tree));
63 static void save_constants PROTO((rtx *));
64 static void note_modified_parmregs PROTO((rtx, rtx));
65 static rtx copy_for_inline PROTO((rtx));
66 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
67 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
68 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
69 static void restore_constants PROTO((rtx *));
70 static void set_block_origin_self PROTO((tree));
71 static void set_decl_origin_self PROTO((tree));
72 static void set_block_abstract_flags PROTO((tree, int));
74 void set_decl_abstract_flags PROTO((tree, int));
76 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
77 is safe and reasonable to integrate into other functions.
78 Nonzero means value is a warning message with a single %s
79 for the function's name. */
82 function_cannot_inline_p (fndecl)
86 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
87 int max_insns = INTEGRATE_THRESHOLD (fndecl);
88 register int ninsns = 0;
91 /* No inlines with varargs. `grokdeclarator' gives a warning
92 message about that if `inline' is specified. This code
93 it put in to catch the volunteers. */
94 if ((last && TREE_VALUE (last) != void_type_node)
95 || current_function_varargs)
96 return "varargs function cannot be inline";
98 if (current_function_calls_alloca)
99 return "function using alloca cannot be inline";
101 if (current_function_contains_functions)
102 return "function with nested functions cannot be inline";
104 /* If its not even close, don't even look. */
105 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
106 return "function too large to be inline";
109 /* Large stacks are OK now that inlined functions can share them. */
110 /* Don't inline functions with large stack usage,
111 since they can make other recursive functions burn up stack. */
112 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
113 return "function stack frame for inlining";
117 /* Don't inline functions which do not specify a function prototype and
118 have BLKmode argument or take the address of a parameter. */
119 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
121 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
122 TREE_ADDRESSABLE (parms) = 1;
123 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
124 return "no prototype, and parameter address used; cannot be inline";
128 /* We can't inline functions that return structures
129 the old-fashioned PCC way, copying into a static block. */
130 if (current_function_returns_pcc_struct)
131 return "inline functions not supported for this return value type";
133 /* We can't inline functions that return BLKmode structures in registers. */
134 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
135 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
136 return "inline functions not supported for this return value type";
138 /* We can't inline functions that return structures of varying size. */
139 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
140 return "function with varying-size return value cannot be inline";
142 /* Cannot inline a function with a varying size argument or one that
143 receives a transparent union. */
144 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
147 return "function with varying-size parameter cannot be inline";
148 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
149 return "function with transparent unit parameter cannot be inline";
152 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
154 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
155 insn = NEXT_INSN (insn))
157 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
161 if (ninsns >= max_insns)
162 return "function too large to be inline";
165 /* We cannot inline this function if forced_labels is non-zero. This
166 implies that a label in this function was used as an initializer.
167 Because labels can not be duplicated, all labels in the function
168 will be renamed when it is inlined. However, there is no way to find
169 and fix all variables initialized with addresses of labels in this
170 function, hence inlining is impossible. */
173 return "function with label addresses used in initializers cannot inline";
175 /* We cannot inline a nested function that jumps to a nonlocal label. */
176 if (current_function_has_nonlocal_goto)
177 return "function with nonlocal goto cannot be inline";
182 /* Variables used within save_for_inline. */
184 /* Mapping from old pseudo-register to new pseudo-registers.
185 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
186 It is allocated in `save_for_inline' and `expand_inline_function',
187 and deallocated on exit from each of those routines. */
190 /* Mapping from old code-labels to new code-labels.
191 The first element of this map is label_map[min_labelno].
192 It is allocated in `save_for_inline' and `expand_inline_function',
193 and deallocated on exit from each of those routines. */
194 static rtx *label_map;
196 /* Mapping from old insn uid's to copied insns.
197 It is allocated in `save_for_inline' and `expand_inline_function',
198 and deallocated on exit from each of those routines. */
199 static rtx *insn_map;
201 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
202 Zero for a reg that isn't a parm's home.
203 Only reg numbers less than max_parm_reg are mapped here. */
204 static tree *parmdecl_map;
206 /* Keep track of first pseudo-register beyond those that are parms. */
207 static int max_parm_reg;
209 /* When an insn is being copied by copy_for_inline,
210 this is nonzero if we have copied an ASM_OPERANDS.
211 In that case, it is the original input-operand vector. */
212 static rtvec orig_asm_operands_vector;
214 /* When an insn is being copied by copy_for_inline,
215 this is nonzero if we have copied an ASM_OPERANDS.
216 In that case, it is the copied input-operand vector. */
217 static rtvec copy_asm_operands_vector;
219 /* Likewise, this is the copied constraints vector. */
220 static rtvec copy_asm_constraints_vector;
222 /* In save_for_inline, nonzero if past the parm-initialization insns. */
223 static int in_nonparm_insns;
225 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
226 needed to save FNDECL's insns and info for future inline expansion. */
229 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
236 int function_flags, i;
240 /* Compute the values of any flags we must restore when inlining this. */
243 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
244 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
245 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
246 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
247 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
248 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
249 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
250 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
251 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
252 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
254 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
255 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
256 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
258 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
260 parms = TREE_CHAIN (parms), i++)
262 rtx p = DECL_RTL (parms);
264 if (GET_CODE (p) == MEM && copy)
266 /* Copy the rtl so that modifications of the addresses
267 later in compilation won't affect this arg_vector.
268 Virtual register instantiation can screw the address
270 rtx new = copy_rtx (p);
272 /* Don't leave the old copy anywhere in this decl. */
273 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
274 || (GET_CODE (DECL_RTL (parms)) == MEM
275 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
276 && (XEXP (DECL_RTL (parms), 0)
277 == XEXP (DECL_INCOMING_RTL (parms), 0))))
278 DECL_INCOMING_RTL (parms) = new;
279 DECL_RTL (parms) = new;
282 RTVEC_ELT (arg_vector, i) = p;
284 if (GET_CODE (p) == REG)
285 parmdecl_map[REGNO (p)] = parms;
286 else if (GET_CODE (p) == CONCAT)
288 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
289 rtx pimag = gen_imagpart (GET_MODE (preal), p);
291 if (GET_CODE (preal) == REG)
292 parmdecl_map[REGNO (preal)] = parms;
293 if (GET_CODE (pimag) == REG)
294 parmdecl_map[REGNO (pimag)] = parms;
297 /* This flag is cleared later
298 if the function ever modifies the value of the parm. */
299 TREE_READONLY (parms) = 1;
302 /* Assume we start out in the insns that set up the parameters. */
303 in_nonparm_insns = 0;
305 /* The list of DECL_SAVED_INSNS, starts off with a header which
306 contains the following information:
308 the first insn of the function (not including the insns that copy
309 parameters into registers).
310 the first parameter insn of the function,
311 the first label used by that function,
312 the last label used by that function,
313 the highest register number used for parameters,
314 the total number of registers used,
315 the size of the incoming stack area for parameters,
316 the number of bytes popped on return,
318 some flags that are used to restore compiler globals,
319 the value of current_function_outgoing_args_size,
320 the original argument vector,
321 and the original DECL_INITIAL. */
323 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
324 max_parm_reg, max_reg,
325 current_function_args_size,
326 current_function_pops_args,
327 stack_slot_list, function_flags,
328 current_function_outgoing_args_size,
329 arg_vector, (rtx) DECL_INITIAL (fndecl));
332 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
333 things that must be done to make FNDECL expandable as an inline function.
334 HEAD contains the chain of insns to which FNDECL will expand. */
337 finish_inline (fndecl, head)
341 NEXT_INSN (head) = get_first_nonparm_insn ();
342 FIRST_PARM_INSN (head) = get_insns ();
343 DECL_SAVED_INSNS (fndecl) = head;
344 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
347 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
348 they all point to the new (copied) rtxs. */
351 adjust_copied_decl_tree (block)
354 register tree subblock;
355 register rtx original_end;
357 original_end = BLOCK_END_NOTE (block);
360 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
361 NOTE_SOURCE_FILE (original_end) = 0;
364 /* Process all subblocks. */
365 for (subblock = BLOCK_SUBBLOCKS (block);
367 subblock = TREE_CHAIN (subblock))
368 adjust_copied_decl_tree (subblock);
371 /* Make the insns and PARM_DECLs of the current function permanent
372 and record other information in DECL_SAVED_INSNS to allow inlining
373 of this function in subsequent calls.
375 This function is called when we are going to immediately compile
376 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
377 modified by the compilation process, so we copy all of them to
378 new storage and consider the new insns to be the insn chain to be
379 compiled. Our caller (rest_of_compilation) saves the original
380 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
382 /* ??? The nonlocal_label list should be adjusted also. However, since
383 a function that contains a nested function never gets inlined currently,
384 the nonlocal_label list will always be empty, so we don't worry about
388 save_for_inline_copying (fndecl)
391 rtx first_insn, last_insn, insn;
393 int max_labelno, min_labelno, i, len;
396 rtx first_nonparm_insn;
398 /* Make and emit a return-label if we have not already done so.
399 Do this before recording the bounds on label numbers. */
401 if (return_label == 0)
403 return_label = gen_label_rtx ();
404 emit_label (return_label);
407 /* Get some bounds on the labels and registers used. */
409 max_labelno = max_label_num ();
410 min_labelno = get_first_label_num ();
411 max_reg = max_reg_num ();
413 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
414 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
415 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
416 for the parms, prior to elimination of virtual registers.
417 These values are needed for substituting parms properly. */
419 max_parm_reg = max_parm_reg_num ();
420 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
422 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
424 if (current_function_uses_const_pool)
426 /* Replace any constant pool references with the actual constant. We
427 will put the constants back in the copy made below. */
428 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
429 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
431 save_constants (&PATTERN (insn));
432 if (REG_NOTES (insn))
433 save_constants (®_NOTES (insn));
436 /* Clear out the constant pool so that we can recreate it with the
437 copied constants below. */
438 init_const_rtx_hash_table ();
439 clear_const_double_mem ();
442 max_uid = INSN_UID (head);
444 /* We have now allocated all that needs to be allocated permanently
445 on the rtx obstack. Set our high-water mark, so that we
446 can free the rest of this when the time comes. */
450 /* Copy the chain insns of this function.
451 Install the copied chain as the insns of this function,
452 for continued compilation;
453 the original chain is recorded as the DECL_SAVED_INSNS
454 for inlining future calls. */
456 /* If there are insns that copy parms from the stack into pseudo registers,
457 those insns are not copied. `expand_inline_function' must
458 emit the correct code to handle such things. */
461 if (GET_CODE (insn) != NOTE)
463 first_insn = rtx_alloc (NOTE);
464 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
465 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
466 INSN_UID (first_insn) = INSN_UID (insn);
467 PREV_INSN (first_insn) = NULL;
468 NEXT_INSN (first_insn) = NULL;
469 last_insn = first_insn;
471 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
472 Make these new rtx's now, and install them in regno_reg_rtx, so they
473 will be the official pseudo-reg rtx's for the rest of compilation. */
475 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
477 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
478 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
479 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
480 regno_reg_rtx[i], len);
482 bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
483 (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
484 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
486 /* Likewise each label rtx must have a unique rtx as its copy. */
488 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
489 label_map -= min_labelno;
491 for (i = min_labelno; i < max_labelno; i++)
492 label_map[i] = gen_label_rtx ();
494 /* Record the mapping of old insns to copied insns. */
496 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
497 bzero ((char *) insn_map, max_uid * sizeof (rtx));
499 /* Get the insn which signals the end of parameter setup code. */
500 first_nonparm_insn = get_first_nonparm_insn ();
502 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
503 (the former occurs when a variable has its address taken)
504 since these may be shared and can be changed by virtual
505 register instantiation. DECL_RTL values for our arguments
506 have already been copied by initialize_for_inline. */
507 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
508 if (GET_CODE (regno_reg_rtx[i]) == MEM)
509 XEXP (regno_reg_rtx[i], 0)
510 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
512 /* Copy the tree of subblocks of the function, and the decls in them.
513 We will use the copy for compiling this function, then restore the original
514 subblocks and decls for use when inlining this function.
516 Several parts of the compiler modify BLOCK trees. In particular,
517 instantiate_virtual_regs will instantiate any virtual regs
518 mentioned in the DECL_RTLs of the decls, and loop
519 unrolling will replicate any BLOCK trees inside an unrolled loop.
521 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
522 which we will use for inlining. The rtl might even contain pseudoregs
523 whose space has been freed. */
525 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
526 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
528 /* Now copy each DECL_RTL which is a MEM,
529 so it is safe to modify their addresses. */
530 copy_decl_rtls (DECL_INITIAL (fndecl));
532 /* The fndecl node acts as its own progenitor, so mark it as such. */
533 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
535 /* Now copy the chain of insns. Do this twice. The first copy the insn
536 itself and its body. The second time copy of REG_NOTES. This is because
537 a REG_NOTE may have a forward pointer to another insn. */
539 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
541 orig_asm_operands_vector = 0;
543 if (insn == first_nonparm_insn)
544 in_nonparm_insns = 1;
546 switch (GET_CODE (insn))
549 /* No need to keep these. */
550 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
553 copy = rtx_alloc (NOTE);
554 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
555 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
556 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
559 NOTE_SOURCE_FILE (insn) = (char *) copy;
560 NOTE_SOURCE_FILE (copy) = 0;
567 copy = rtx_alloc (GET_CODE (insn));
569 if (GET_CODE (insn) == CALL_INSN)
570 CALL_INSN_FUNCTION_USAGE (copy) =
571 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
573 PATTERN (copy) = copy_for_inline (PATTERN (insn));
574 INSN_CODE (copy) = -1;
575 LOG_LINKS (copy) = NULL_RTX;
576 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
580 copy = label_map[CODE_LABEL_NUMBER (insn)];
581 LABEL_NAME (copy) = LABEL_NAME (insn);
585 copy = rtx_alloc (BARRIER);
591 INSN_UID (copy) = INSN_UID (insn);
592 insn_map[INSN_UID (insn)] = copy;
593 NEXT_INSN (last_insn) = copy;
594 PREV_INSN (copy) = last_insn;
598 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
600 /* Now copy the REG_NOTES. */
601 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
602 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
603 && insn_map[INSN_UID(insn)])
604 REG_NOTES (insn_map[INSN_UID (insn)])
605 = copy_for_inline (REG_NOTES (insn));
607 NEXT_INSN (last_insn) = NULL;
609 finish_inline (fndecl, head);
611 set_new_first_and_last_insn (first_insn, last_insn);
614 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
615 For example, this can copy a list made of TREE_LIST nodes. While copying,
616 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
617 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
618 point to the corresponding (abstract) original node. */
621 copy_decl_list (list)
625 register tree prev, next;
630 head = prev = copy_node (list);
631 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
632 DECL_ABSTRACT_ORIGIN (head) = list;
633 next = TREE_CHAIN (list);
638 copy = copy_node (next);
639 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
640 DECL_ABSTRACT_ORIGIN (copy) = next;
641 TREE_CHAIN (prev) = copy;
643 next = TREE_CHAIN (next);
648 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
651 copy_decl_tree (block)
654 tree t, vars, subblocks;
656 vars = copy_decl_list (BLOCK_VARS (block));
659 /* Process all subblocks. */
660 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
662 tree copy = copy_decl_tree (t);
663 TREE_CHAIN (copy) = subblocks;
667 t = copy_node (block);
668 BLOCK_VARS (t) = vars;
669 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
670 /* If the BLOCK being cloned is already marked as having been instantiated
671 from something else, then leave that `origin' marking alone. Elsewise,
672 mark the clone as having originated from the BLOCK we are cloning. */
673 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
674 BLOCK_ABSTRACT_ORIGIN (t) = block;
678 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
681 copy_decl_rtls (block)
686 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
687 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
688 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
690 /* Process all subblocks. */
691 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
695 /* Make the insns and PARM_DECLs of the current function permanent
696 and record other information in DECL_SAVED_INSNS to allow inlining
697 of this function in subsequent calls.
699 This routine need not copy any insns because we are not going
700 to immediately compile the insns in the insn chain. There
701 are two cases when we would compile the insns for FNDECL:
702 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
703 be output at the end of other compilation, because somebody took
704 its address. In the first case, the insns of FNDECL are copied
705 as it is expanded inline, so FNDECL's saved insns are not
706 modified. In the second case, FNDECL is used for the last time,
707 so modifying the rtl is not a problem.
709 ??? Actually, we do not verify that FNDECL is not inline expanded
710 by other functions which must also be written down at the end
711 of compilation. We could set flag_no_inline to nonzero when
712 the time comes to write down such functions. */
715 save_for_inline_nocopy (fndecl)
720 rtx first_nonparm_insn;
722 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
723 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
724 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
725 for the parms, prior to elimination of virtual registers.
726 These values are needed for substituting parms properly. */
728 max_parm_reg = max_parm_reg_num ();
729 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
731 /* Make and emit a return-label if we have not already done so. */
733 if (return_label == 0)
735 return_label = gen_label_rtx ();
736 emit_label (return_label);
739 head = initialize_for_inline (fndecl, get_first_label_num (),
740 max_label_num (), max_reg_num (), 0);
742 /* If there are insns that copy parms from the stack into pseudo registers,
743 those insns are not copied. `expand_inline_function' must
744 emit the correct code to handle such things. */
747 if (GET_CODE (insn) != NOTE)
750 /* Get the insn which signals the end of parameter setup code. */
751 first_nonparm_insn = get_first_nonparm_insn ();
753 /* Now just scan the chain of insns to see what happens to our
754 PARM_DECLs. If a PARM_DECL is used but never modified, we
755 can substitute its rtl directly when expanding inline (and
756 perform constant folding when its incoming value is constant).
757 Otherwise, we have to copy its value into a new register and track
758 the new register's life. */
760 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
762 if (insn == first_nonparm_insn)
763 in_nonparm_insns = 1;
765 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
767 if (current_function_uses_const_pool)
769 /* Replace any constant pool references with the actual constant.
770 We will put the constant back if we need to write the
771 function out after all. */
772 save_constants (&PATTERN (insn));
773 if (REG_NOTES (insn))
774 save_constants (®_NOTES (insn));
777 /* Record what interesting things happen to our parameters. */
778 note_stores (PATTERN (insn), note_modified_parmregs);
782 /* We have now allocated all that needs to be allocated permanently
783 on the rtx obstack. Set our high-water mark, so that we
784 can free the rest of this when the time comes. */
788 finish_inline (fndecl, head);
791 /* Given PX, a pointer into an insn, search for references to the constant
792 pool. Replace each with a CONST that has the mode of the original
793 constant, contains the constant, and has RTX_INTEGRATED_P set.
794 Similarly, constant pool addresses not enclosed in a MEM are replaced
795 with an ADDRESS rtx which also gives the constant, mode, and has
796 RTX_INTEGRATED_P set. */
808 /* If this is a CONST_DOUBLE, don't try to fix things up in
809 CONST_DOUBLE_MEM, because this is an infinite recursion. */
810 if (GET_CODE (x) == CONST_DOUBLE)
812 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
813 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
815 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
816 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
817 RTX_INTEGRATED_P (new) = 1;
819 /* If the MEM was in a different mode than the constant (perhaps we
820 were only looking at the low-order part), surround it with a
821 SUBREG so we can save both modes. */
823 if (GET_MODE (x) != const_mode)
825 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
826 RTX_INTEGRATED_P (new) = 1;
830 save_constants (&XEXP (*px, 0));
832 else if (GET_CODE (x) == SYMBOL_REF
833 && CONSTANT_POOL_ADDRESS_P (x))
835 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
836 save_constants (&XEXP (*px, 0));
837 RTX_INTEGRATED_P (*px) = 1;
842 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
843 int len = GET_RTX_LENGTH (GET_CODE (x));
845 for (i = len-1; i >= 0; i--)
850 for (j = 0; j < XVECLEN (x, i); j++)
851 save_constants (&XVECEXP (x, i, j));
855 if (XEXP (x, i) == 0)
859 /* Hack tail-recursion here. */
863 save_constants (&XEXP (x, i));
870 /* Note whether a parameter is modified or not. */
873 note_modified_parmregs (reg, x)
877 if (GET_CODE (reg) == REG && in_nonparm_insns
878 && REGNO (reg) < max_parm_reg
879 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
880 && parmdecl_map[REGNO (reg)] != 0)
881 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
884 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
885 according to `reg_map' and `label_map'. The original rtl insns
886 will be saved for inlining; this is used to make a copy
887 which is used to finish compiling the inline function itself.
889 If we find a "saved" constant pool entry, one which was replaced with
890 the value of the constant, convert it back to a constant pool entry.
891 Since the pool wasn't touched, this should simply restore the old
894 All other kinds of rtx are copied except those that can never be
895 changed during compilation. */
898 copy_for_inline (orig)
901 register rtx x = orig;
903 register enum rtx_code code;
904 register char *format_ptr;
911 /* These types may be freely shared. */
923 /* We have to make a new CONST_DOUBLE to ensure that we account for
924 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
925 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
929 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
930 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
933 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
937 /* Get constant pool entry for constant in the pool. */
938 if (RTX_INTEGRATED_P (x))
939 return validize_mem (force_const_mem (GET_MODE (x),
940 copy_for_inline (XEXP (x, 0))));
944 /* Get constant pool entry, but access in different mode. */
945 if (RTX_INTEGRATED_P (x))
948 = force_const_mem (GET_MODE (SUBREG_REG (x)),
949 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
951 PUT_MODE (new, GET_MODE (x));
952 return validize_mem (new);
957 /* If not special for constant pool error. Else get constant pool
959 if (! RTX_INTEGRATED_P (x))
962 return XEXP (force_const_mem (GET_MODE (x),
963 copy_for_inline (XEXP (x, 0))), 0);
966 /* If a single asm insn contains multiple output operands
967 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
968 We must make sure that the copied insn continues to share it. */
969 if (orig_asm_operands_vector == XVEC (orig, 3))
971 x = rtx_alloc (ASM_OPERANDS);
972 x->volatil = orig->volatil;
973 XSTR (x, 0) = XSTR (orig, 0);
974 XSTR (x, 1) = XSTR (orig, 1);
975 XINT (x, 2) = XINT (orig, 2);
976 XVEC (x, 3) = copy_asm_operands_vector;
977 XVEC (x, 4) = copy_asm_constraints_vector;
978 XSTR (x, 5) = XSTR (orig, 5);
979 XINT (x, 6) = XINT (orig, 6);
985 /* A MEM is usually allowed to be shared if its address is constant
986 or is a constant plus one of the special registers.
988 We do not allow sharing of addresses that are either a special
989 register or the sum of a constant and a special register because
990 it is possible for unshare_all_rtl to copy the address, into memory
991 that won't be saved. Although the MEM can safely be shared, and
992 won't be copied there, the address itself cannot be shared, and may
995 There are also two exceptions with constants: The first is if the
996 constant is a LABEL_REF or the sum of the LABEL_REF
997 and an integer. This case can happen if we have an inline
998 function that supplies a constant operand to the call of another
999 inline function that uses it in a switch statement. In this case,
1000 we will be replacing the LABEL_REF, so we have to replace this MEM
1003 The second case is if we have a (const (plus (address ..) ...)).
1004 In that case we need to put back the address of the constant pool
1007 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1008 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1009 && ! (GET_CODE (XEXP (x, 0)) == CONST
1010 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1011 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1013 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1019 /* If this is a non-local label, just make a new LABEL_REF.
1020 Otherwise, use the new label as well. */
1021 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1022 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1023 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1024 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1025 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1029 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1030 return reg_map [REGNO (x)];
1035 /* If a parm that gets modified lives in a pseudo-reg,
1036 clear its TREE_READONLY to prevent certain optimizations. */
1038 rtx dest = SET_DEST (x);
1040 while (GET_CODE (dest) == STRICT_LOW_PART
1041 || GET_CODE (dest) == ZERO_EXTRACT
1042 || GET_CODE (dest) == SUBREG)
1043 dest = XEXP (dest, 0);
1045 if (GET_CODE (dest) == REG
1046 && REGNO (dest) < max_parm_reg
1047 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1048 && parmdecl_map[REGNO (dest)] != 0
1049 /* The insn to load an arg pseudo from a stack slot
1050 does not count as modifying it. */
1051 && in_nonparm_insns)
1052 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1056 #if 0 /* This is a good idea, but here is the wrong place for it. */
1057 /* Arrange that CONST_INTs always appear as the second operand
1058 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1059 always appear as the first. */
1061 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1062 || (XEXP (x, 1) == frame_pointer_rtx
1063 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1064 && XEXP (x, 1) == arg_pointer_rtx)))
1066 rtx t = XEXP (x, 0);
1067 XEXP (x, 0) = XEXP (x, 1);
1074 /* Replace this rtx with a copy of itself. */
1076 x = rtx_alloc (code);
1077 bcopy ((char *) orig, (char *) x,
1078 (sizeof (*x) - sizeof (x->fld)
1079 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1081 /* Now scan the subexpressions recursively.
1082 We can store any replaced subexpressions directly into X
1083 since we know X is not shared! Any vectors in X
1084 must be copied if X was copied. */
1086 format_ptr = GET_RTX_FORMAT (code);
1088 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1090 switch (*format_ptr++)
1093 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1097 /* Change any references to old-insns to point to the
1098 corresponding copied insns. */
1099 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1103 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1107 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1108 for (j = 0; j < XVECLEN (x, i); j++)
1110 = copy_for_inline (XVECEXP (x, i, j));
1116 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1118 orig_asm_operands_vector = XVEC (orig, 3);
1119 copy_asm_operands_vector = XVEC (x, 3);
1120 copy_asm_constraints_vector = XVEC (x, 4);
1126 /* Unfortunately, we need a global copy of const_equiv map for communication
1127 with a function called from note_stores. Be *very* careful that this
1128 is used properly in the presence of recursion. */
1130 rtx *global_const_equiv_map;
1131 int global_const_equiv_map_size;
1133 #define FIXED_BASE_PLUS_P(X) \
1134 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1135 && GET_CODE (XEXP (X, 0)) == REG \
1136 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1137 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1139 /* Integrate the procedure defined by FNDECL. Note that this function
1140 may wind up calling itself. Since the static variables are not
1141 reentrant, we do not assign them until after the possibility
1142 of recursion is eliminated.
1144 If IGNORE is nonzero, do not produce a value.
1145 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1148 (rtx)-1 if we could not substitute the function
1149 0 if we substituted it and it does not produce a value
1150 else an rtx for where the value is stored. */
1153 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1158 rtx structure_value_addr;
1160 tree formal, actual, block;
1161 rtx header = DECL_SAVED_INSNS (fndecl);
1162 rtx insns = FIRST_FUNCTION_INSN (header);
1163 rtx parm_insns = FIRST_PARM_INSN (header);
1169 int min_labelno = FIRST_LABELNO (header);
1170 int max_labelno = LAST_LABELNO (header);
1172 rtx local_return_label = 0;
1176 struct inline_remap *map;
1178 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1179 rtx static_chain_value = 0;
1181 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1182 max_regno = MAX_REGNUM (header) + 3;
1183 if (max_regno < FIRST_PSEUDO_REGISTER)
1186 nargs = list_length (DECL_ARGUMENTS (fndecl));
1188 /* Check that the parms type match and that sufficient arguments were
1189 passed. Since the appropriate conversions or default promotions have
1190 already been applied, the machine modes should match exactly. */
1192 for (formal = DECL_ARGUMENTS (fndecl),
1195 formal = TREE_CHAIN (formal),
1196 actual = TREE_CHAIN (actual))
1199 enum machine_mode mode;
1202 return (rtx) (HOST_WIDE_INT) -1;
1204 arg = TREE_VALUE (actual);
1205 mode= TYPE_MODE (DECL_ARG_TYPE (formal));
1207 if (mode != TYPE_MODE (TREE_TYPE (arg))
1208 /* If they are block mode, the types should match exactly.
1209 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1210 which could happen if the parameter has incomplete type. */
1211 || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1212 return (rtx) (HOST_WIDE_INT) -1;
1215 /* Extra arguments are valid, but will be ignored below, so we must
1216 evaluate them here for side-effects. */
1217 for (; actual; actual = TREE_CHAIN (actual))
1218 expand_expr (TREE_VALUE (actual), const0_rtx,
1219 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1221 /* Make a binding contour to keep inline cleanups called at
1222 outer function-scope level from looking like they are shadowing
1223 parameter declarations. */
1226 /* Make a fresh binding contour that we can easily remove. */
1228 expand_start_bindings (0);
1229 if (GET_CODE (parm_insns) == NOTE
1230 && NOTE_LINE_NUMBER (parm_insns) > 0)
1232 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1233 NOTE_LINE_NUMBER (parm_insns));
1235 RTX_INTEGRATED_P (note) = 1;
1238 /* Expand the function arguments. Do this first so that any
1239 new registers get created before we allocate the maps. */
1241 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1242 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1244 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1246 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1248 /* Actual parameter, converted to the type of the argument within the
1250 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1251 /* Mode of the variable used within the function. */
1252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1255 /* Make sure this formal has some correspondence in the users code
1256 * before emitting any line notes for it. */
1257 if (DECL_SOURCE_LINE (formal))
1259 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1260 DECL_SOURCE_LINE (formal));
1262 RTX_INTEGRATED_P (note) = 1;
1266 loc = RTVEC_ELT (arg_vector, i);
1268 /* If this is an object passed by invisible reference, we copy the
1269 object into a stack slot and save its address. If this will go
1270 into memory, we do nothing now. Otherwise, we just expand the
1272 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1273 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1276 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1277 int_size_in_bytes (TREE_TYPE (arg)), 1);
1278 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1280 store_expr (arg, stack_slot, 0);
1282 arg_vals[i] = XEXP (stack_slot, 0);
1285 else if (GET_CODE (loc) != MEM)
1287 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1288 /* The mode if LOC and ARG can differ if LOC was a variable
1289 that had its mode promoted via PROMOTED_MODE. */
1290 arg_vals[i] = convert_modes (GET_MODE (loc),
1291 TYPE_MODE (TREE_TYPE (arg)),
1292 expand_expr (arg, NULL_RTX, mode,
1294 TREE_UNSIGNED (TREE_TYPE (formal)));
1296 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1301 if (arg_vals[i] != 0
1302 && (! TREE_READONLY (formal)
1303 /* If the parameter is not read-only, copy our argument through
1304 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1305 TARGET in any way. In the inline function, they will likely
1306 be two different pseudos, and `safe_from_p' will make all
1307 sorts of smart assumptions about their not conflicting.
1308 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1309 wrong, so put ARG_VALS[I] into a fresh register.
1310 Don't worry about invisible references, since their stack
1311 temps will never overlap the target. */
1314 && (GET_CODE (arg_vals[i]) == REG
1315 || GET_CODE (arg_vals[i]) == SUBREG
1316 || GET_CODE (arg_vals[i]) == MEM)
1317 && reg_overlap_mentioned_p (arg_vals[i], target))
1318 /* ??? We must always copy a SUBREG into a REG, because it might
1319 get substituted into an address, and not all ports correctly
1320 handle SUBREGs in addresses. */
1321 || (GET_CODE (arg_vals[i]) == SUBREG)))
1322 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1325 /* Allocate the structures we use to remap things. */
1327 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1328 map->fndecl = fndecl;
1330 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1331 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1333 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1334 map->label_map -= min_labelno;
1336 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1337 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1338 map->min_insnno = 0;
1339 map->max_insnno = INSN_UID (header);
1341 map->integrating = 1;
1343 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1344 be large enough for all our pseudos. This is the number we are currently
1345 using plus the number in the called routine, plus 15 for each arg,
1346 five to compute the virtual frame pointer, and five for the return value.
1347 This should be enough for most cases. We do not reference entries
1348 outside the range of the map.
1350 ??? These numbers are quite arbitrary and were obtained by
1351 experimentation. At some point, we should try to allocate the
1352 table after all the parameters are set up so we an more accurately
1353 estimate the number of pseudos we will need. */
1355 map->const_equiv_map_size
1356 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1358 map->const_equiv_map
1359 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1360 bzero ((char *) map->const_equiv_map,
1361 map->const_equiv_map_size * sizeof (rtx));
1364 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1365 bzero ((char *) map->const_age_map,
1366 map->const_equiv_map_size * sizeof (unsigned));
1369 /* Record the current insn in case we have to set up pointers to frame
1370 and argument memory blocks. */
1371 map->insns_at_start = get_last_insn ();
1373 /* Update the outgoing argument size to allow for those in the inlined
1375 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1376 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1378 /* If the inline function needs to make PIC references, that means
1379 that this function's PIC offset table must be used. */
1380 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1381 current_function_uses_pic_offset_table = 1;
1383 /* If this function needs a context, set it up. */
1384 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1385 static_chain_value = lookup_static_chain (fndecl);
1387 /* Process each argument. For each, set up things so that the function's
1388 reference to the argument will refer to the argument being passed.
1389 We only replace REG with REG here. Any simplifications are done
1390 via const_equiv_map.
1392 We make two passes: In the first, we deal with parameters that will
1393 be placed into registers, since we need to ensure that the allocated
1394 register number fits in const_equiv_map. Then we store all non-register
1395 parameters into their memory location. */
1397 /* Don't try to free temp stack slots here, because we may put one of the
1398 parameters into a temp stack slot. */
1400 for (i = 0; i < nargs; i++)
1402 rtx copy = arg_vals[i];
1404 loc = RTVEC_ELT (arg_vector, i);
1406 /* There are three cases, each handled separately. */
1407 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1408 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1410 /* This must be an object passed by invisible reference (it could
1411 also be a variable-sized object, but we forbid inlining functions
1412 with variable-sized arguments). COPY is the address of the
1413 actual value (this computation will cause it to be copied). We
1414 map that address for the register, noting the actual address as
1415 an equivalent in case it can be substituted into the insns. */
1417 if (GET_CODE (copy) != REG)
1419 temp = copy_addr_to_reg (copy);
1420 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1421 && REGNO (temp) < map->const_equiv_map_size)
1423 map->const_equiv_map[REGNO (temp)] = copy;
1424 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1428 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1430 else if (GET_CODE (loc) == MEM)
1432 /* This is the case of a parameter that lives in memory.
1433 It will live in the block we allocate in the called routine's
1434 frame that simulates the incoming argument area. Do nothing
1435 now; we will call store_expr later. */
1438 else if (GET_CODE (loc) == REG)
1440 /* This is the good case where the parameter is in a register.
1441 If it is read-only and our argument is a constant, set up the
1442 constant equivalence.
1444 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1445 that flag set if it is a register.
1447 Also, don't allow hard registers here; they might not be valid
1448 when substituted into insns. */
1450 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1451 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1452 && ! REG_USERVAR_P (copy))
1453 || (GET_CODE (copy) == REG
1454 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1456 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1457 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1458 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1459 && REGNO (temp) < map->const_equiv_map_size)
1461 map->const_equiv_map[REGNO (temp)] = copy;
1462 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1466 map->reg_map[REGNO (loc)] = copy;
1468 else if (GET_CODE (loc) == CONCAT)
1470 /* This is the good case where the parameter is in a
1471 pair of separate pseudos.
1472 If it is read-only and our argument is a constant, set up the
1473 constant equivalence.
1475 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1476 that flag set if it is a register.
1478 Also, don't allow hard registers here; they might not be valid
1479 when substituted into insns. */
1480 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1481 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1482 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1483 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1485 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1486 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1487 && ! REG_USERVAR_P (copyreal))
1488 || (GET_CODE (copyreal) == REG
1489 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1491 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1492 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1493 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1494 && REGNO (temp) < map->const_equiv_map_size)
1496 map->const_equiv_map[REGNO (temp)] = copyreal;
1497 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1501 map->reg_map[REGNO (locreal)] = copyreal;
1503 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1504 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1505 && ! REG_USERVAR_P (copyimag))
1506 || (GET_CODE (copyimag) == REG
1507 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1509 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1510 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1511 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1512 && REGNO (temp) < map->const_equiv_map_size)
1514 map->const_equiv_map[REGNO (temp)] = copyimag;
1515 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1519 map->reg_map[REGNO (locimag)] = copyimag;
1525 /* Now do the parameters that will be placed in memory. */
1527 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1528 formal; formal = TREE_CHAIN (formal), i++)
1530 loc = RTVEC_ELT (arg_vector, i);
1532 if (GET_CODE (loc) == MEM
1533 /* Exclude case handled above. */
1534 && ! (GET_CODE (XEXP (loc, 0)) == REG
1535 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1537 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1538 DECL_SOURCE_LINE (formal));
1540 RTX_INTEGRATED_P (note) = 1;
1542 /* Compute the address in the area we reserved and store the
1544 temp = copy_rtx_and_substitute (loc, map);
1545 subst_constants (&temp, NULL_RTX, map);
1546 apply_change_group ();
1547 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1548 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1549 store_expr (arg_trees[i], temp, 0);
1553 /* Deal with the places that the function puts its result.
1554 We are driven by what is placed into DECL_RESULT.
1556 Initially, we assume that we don't have anything special handling for
1557 REG_FUNCTION_RETURN_VALUE_P. */
1559 map->inline_target = 0;
1560 loc = DECL_RTL (DECL_RESULT (fndecl));
1561 if (TYPE_MODE (type) == VOIDmode)
1562 /* There is no return value to worry about. */
1564 else if (GET_CODE (loc) == MEM)
1566 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1569 /* Pass the function the address in which to return a structure value.
1570 Note that a constructor can cause someone to call us with
1571 STRUCTURE_VALUE_ADDR, but the initialization takes place
1572 via the first parameter, rather than the struct return address.
1574 We have two cases: If the address is a simple register indirect,
1575 use the mapping mechanism to point that register to our structure
1576 return address. Otherwise, store the structure return value into
1577 the place that it will be referenced from. */
1579 if (GET_CODE (XEXP (loc, 0)) == REG)
1581 temp = force_reg (Pmode, structure_value_addr);
1582 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1583 if ((CONSTANT_P (structure_value_addr)
1584 || (GET_CODE (structure_value_addr) == PLUS
1585 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1586 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1587 && REGNO (temp) < map->const_equiv_map_size)
1589 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1590 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1595 temp = copy_rtx_and_substitute (loc, map);
1596 subst_constants (&temp, NULL_RTX, map);
1597 apply_change_group ();
1598 emit_move_insn (temp, structure_value_addr);
1602 /* We will ignore the result value, so don't look at its structure.
1603 Note that preparations for an aggregate return value
1604 do need to be made (above) even if it will be ignored. */
1606 else if (GET_CODE (loc) == REG)
1608 /* The function returns an object in a register and we use the return
1609 value. Set up our target for remapping. */
1611 /* Machine mode function was declared to return. */
1612 enum machine_mode departing_mode = TYPE_MODE (type);
1613 /* (Possibly wider) machine mode it actually computes
1614 (for the sake of callers that fail to declare it right). */
1615 enum machine_mode arriving_mode
1616 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1619 /* Don't use MEMs as direct targets because on some machines
1620 substituting a MEM for a REG makes invalid insns.
1621 Let the combiner substitute the MEM if that is valid. */
1622 if (target == 0 || GET_CODE (target) != REG
1623 || GET_MODE (target) != departing_mode)
1624 target = gen_reg_rtx (departing_mode);
1626 /* If function's value was promoted before return,
1627 avoid machine mode mismatch when we substitute INLINE_TARGET.
1628 But TARGET is what we will return to the caller. */
1629 if (arriving_mode != departing_mode)
1630 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1632 reg_to_map = target;
1634 /* Usually, the result value is the machine's return register.
1635 Sometimes it may be a pseudo. Handle both cases. */
1636 if (REG_FUNCTION_VALUE_P (loc))
1637 map->inline_target = reg_to_map;
1639 map->reg_map[REGNO (loc)] = reg_to_map;
1642 /* Make new label equivalences for the labels in the called function. */
1643 for (i = min_labelno; i < max_labelno; i++)
1644 map->label_map[i] = gen_label_rtx ();
1646 /* Perform postincrements before actually calling the function. */
1649 /* Clean up stack so that variables might have smaller offsets. */
1650 do_pending_stack_adjust ();
1652 /* Save a copy of the location of const_equiv_map for mark_stores, called
1654 global_const_equiv_map = map->const_equiv_map;
1655 global_const_equiv_map_size = map->const_equiv_map_size;
1657 /* If the called function does an alloca, save and restore the
1658 stack pointer around the call. This saves stack space, but
1659 also is required if this inline is being done between two
1661 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1662 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1664 /* Now copy the insns one by one. Do this in two passes, first the insns and
1665 then their REG_NOTES, just like save_for_inline. */
1667 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1669 for (insn = insns; insn; insn = NEXT_INSN (insn))
1671 rtx copy, pattern, set;
1673 map->orig_asm_operands_vector = 0;
1675 switch (GET_CODE (insn))
1678 pattern = PATTERN (insn);
1679 set = single_set (insn);
1681 if (GET_CODE (pattern) == USE
1682 && GET_CODE (XEXP (pattern, 0)) == REG
1683 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1684 /* The (USE (REG n)) at return from the function should
1685 be ignored since we are changing (REG n) into
1689 /* Ignore setting a function value that we don't want to use. */
1690 if (map->inline_target == 0
1692 && GET_CODE (SET_DEST (set)) == REG
1693 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1695 if (volatile_refs_p (SET_SRC (set)))
1699 /* If we must not delete the source,
1700 load it into a new temporary. */
1701 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1703 new_set = single_set (copy);
1708 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1714 /* If this is setting the static chain rtx, omit it. */
1715 else if (static_chain_value != 0
1717 && GET_CODE (SET_DEST (set)) == REG
1718 && rtx_equal_p (SET_DEST (set),
1719 static_chain_incoming_rtx))
1722 /* If this is setting the static chain pseudo, set it from
1723 the value we want to give it instead. */
1724 else if (static_chain_value != 0
1726 && rtx_equal_p (SET_SRC (set),
1727 static_chain_incoming_rtx))
1729 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1731 copy = emit_move_insn (newdest, static_chain_value);
1732 static_chain_value = 0;
1735 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1736 /* REG_NOTES will be copied later. */
1739 /* If this insn is setting CC0, it may need to look at
1740 the insn that uses CC0 to see what type of insn it is.
1741 In that case, the call to recog via validate_change will
1742 fail. So don't substitute constants here. Instead,
1743 do it when we emit the following insn.
1745 For example, see the pyr.md file. That machine has signed and
1746 unsigned compares. The compare patterns must check the
1747 following branch insn to see which what kind of compare to
1750 If the previous insn set CC0, substitute constants on it as
1752 if (sets_cc0_p (PATTERN (copy)) != 0)
1757 try_constants (cc0_insn, map);
1759 try_constants (copy, map);
1762 try_constants (copy, map);
1767 if (GET_CODE (PATTERN (insn)) == RETURN)
1769 if (local_return_label == 0)
1770 local_return_label = gen_label_rtx ();
1771 pattern = gen_jump (local_return_label);
1774 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1776 copy = emit_jump_insn (pattern);
1780 try_constants (cc0_insn, map);
1783 try_constants (copy, map);
1785 /* If this used to be a conditional jump insn but whose branch
1786 direction is now know, we must do something special. */
1787 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1790 /* The previous insn set cc0 for us. So delete it. */
1791 delete_insn (PREV_INSN (copy));
1794 /* If this is now a no-op, delete it. */
1795 if (map->last_pc_value == pc_rtx)
1801 /* Otherwise, this is unconditional jump so we must put a
1802 BARRIER after it. We could do some dead code elimination
1803 here, but jump.c will do it just as well. */
1809 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1810 copy = emit_call_insn (pattern);
1812 /* Because the USAGE information potentially contains objects other
1813 than hard registers, we need to copy it. */
1814 CALL_INSN_FUNCTION_USAGE (copy) =
1815 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1819 try_constants (cc0_insn, map);
1822 try_constants (copy, map);
1824 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1825 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826 map->const_equiv_map[i] = 0;
1830 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1831 LABEL_NAME (copy) = LABEL_NAME (insn);
1836 copy = emit_barrier ();
1840 /* It is important to discard function-end and function-beg notes,
1841 so we have only one of each in the current function.
1842 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1843 deleted these in the copy used for continuing compilation,
1844 not the copy used for inlining). */
1845 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1846 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1847 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1848 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1859 RTX_INTEGRATED_P (copy) = 1;
1861 map->insn_map[INSN_UID (insn)] = copy;
1864 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1865 from parameters can be substituted in. These are the only ones that
1866 are valid across the entire function. */
1868 for (insn = insns; insn; insn = NEXT_INSN (insn))
1869 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1870 && map->insn_map[INSN_UID (insn)]
1871 && REG_NOTES (insn))
1873 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1874 /* We must also do subst_constants, in case one of our parameters
1875 has const type and constant value. */
1876 subst_constants (&tem, NULL_RTX, map);
1877 apply_change_group ();
1878 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1881 if (local_return_label)
1882 emit_label (local_return_label);
1884 /* Restore the stack pointer if we saved it above. */
1885 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1886 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1888 /* Make copies of the decls of the symbols in the inline function, so that
1889 the copies of the variables get declared in the current function. Set
1890 up things so that lookup_static_chain knows that to interpret registers
1891 in SAVE_EXPRs for TYPE_SIZEs as local. */
1893 inline_function_decl = fndecl;
1894 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1895 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1896 inline_function_decl = 0;
1898 /* End the scope containing the copied formal parameter variables
1899 and copied LABEL_DECLs. */
1901 expand_end_bindings (getdecls (), 1, 1);
1902 block = poplevel (1, 1, 0);
1903 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1904 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1906 emit_line_note (input_filename, lineno);
1908 if (structure_value_addr)
1910 target = gen_rtx (MEM, TYPE_MODE (type),
1911 memory_address (TYPE_MODE (type), structure_value_addr));
1912 MEM_IN_STRUCT_P (target) = 1;
1917 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1918 push all of those decls and give each one the corresponding home. */
1921 integrate_parm_decls (args, map, arg_vector)
1923 struct inline_remap *map;
1929 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1931 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1934 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1936 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1937 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1938 here, but that's going to require some more work. */
1939 /* DECL_INCOMING_RTL (decl) = ?; */
1940 /* These args would always appear unused, if not for this. */
1941 TREE_USED (decl) = 1;
1942 /* Prevent warning for shadowing with these. */
1943 DECL_ABSTRACT_ORIGIN (decl) = tail;
1945 /* Fully instantiate the address with the equivalent form so that the
1946 debugging information contains the actual register, instead of the
1947 virtual register. Do this by not passing an insn to
1949 subst_constants (&new_decl_rtl, NULL_RTX, map);
1950 apply_change_group ();
1951 DECL_RTL (decl) = new_decl_rtl;
1955 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1956 current function a tree of contexts isomorphic to the one that is given.
1958 LEVEL indicates how far down into the BLOCK tree is the node we are
1959 currently traversing. It is always zero except for recursive calls.
1961 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1962 registers used in the DECL_RTL field should be remapped. If it is zero,
1963 no mapping is necessary. */
1966 integrate_decl_tree (let, level, map)
1969 struct inline_remap *map;
1976 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1980 push_obstacks_nochange ();
1981 saveable_allocation ();
1985 if (DECL_RTL (t) != 0)
1987 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1988 /* Fully instantiate the address with the equivalent form so that the
1989 debugging information contains the actual register, instead of the
1990 virtual register. Do this by not passing an insn to
1992 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1993 apply_change_group ();
1995 /* These args would always appear unused, if not for this. */
1997 /* Prevent warning for shadowing with these. */
1998 DECL_ABSTRACT_ORIGIN (d) = t;
2000 if (DECL_LANG_SPECIFIC (d))
2006 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2007 integrate_decl_tree (t, level + 1, map);
2011 node = poplevel (1, 0, 0);
2014 TREE_USED (node) = TREE_USED (let);
2015 BLOCK_ABSTRACT_ORIGIN (node) = let;
2020 /* Create a new copy of an rtx.
2021 Recursively copies the operands of the rtx,
2022 except for those few rtx codes that are sharable.
2024 We always return an rtx that is similar to that incoming rtx, with the
2025 exception of possibly changing a REG to a SUBREG or vice versa. No
2026 rtl is ever emitted.
2028 Handle constants that need to be placed in the constant pool by
2029 calling `force_const_mem'. */
2032 copy_rtx_and_substitute (orig, map)
2034 struct inline_remap *map;
2036 register rtx copy, temp;
2038 register RTX_CODE code;
2039 register enum machine_mode mode;
2040 register char *format_ptr;
2046 code = GET_CODE (orig);
2047 mode = GET_MODE (orig);
2052 /* If the stack pointer register shows up, it must be part of
2053 stack-adjustments (*not* because we eliminated the frame pointer!).
2054 Small hard registers are returned as-is. Pseudo-registers
2055 go through their `reg_map'. */
2056 regno = REGNO (orig);
2057 if (regno <= LAST_VIRTUAL_REGISTER)
2059 /* Some hard registers are also mapped,
2060 but others are not translated. */
2061 if (map->reg_map[regno] != 0)
2062 return map->reg_map[regno];
2064 /* If this is the virtual frame pointer, make space in current
2065 function's stack frame for the stack frame of the inline function.
2067 Copy the address of this area into a pseudo. Map
2068 virtual_stack_vars_rtx to this pseudo and set up a constant
2069 equivalence for it to be the address. This will substitute the
2070 address into insns where it can be substituted and use the new
2071 pseudo where it can't. */
2072 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2075 int size = DECL_FRAME_SIZE (map->fndecl);
2079 loc = assign_stack_temp (BLKmode, size, 1);
2080 loc = XEXP (loc, 0);
2081 #ifdef FRAME_GROWS_DOWNWARD
2082 /* In this case, virtual_stack_vars_rtx points to one byte
2083 higher than the top of the frame area. So compute the offset
2084 to one byte higher than our substitute frame.
2085 Keep the fake frame pointer aligned like a real one. */
2086 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2087 loc = plus_constant (loc, rounded);
2089 map->reg_map[regno] = temp
2090 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2092 if (REGNO (temp) < map->const_equiv_map_size)
2094 map->const_equiv_map[REGNO (temp)] = loc;
2095 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2098 seq = gen_sequence ();
2100 emit_insn_after (seq, map->insns_at_start);
2103 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2105 /* Do the same for a block to contain any arguments referenced
2108 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2111 loc = assign_stack_temp (BLKmode, size, 1);
2112 loc = XEXP (loc, 0);
2113 /* When arguments grow downward, the virtual incoming
2114 args pointer points to the top of the argument block,
2115 so the remapped location better do the same. */
2116 #ifdef ARGS_GROW_DOWNWARD
2117 loc = plus_constant (loc, size);
2119 map->reg_map[regno] = temp
2120 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2122 if (REGNO (temp) < map->const_equiv_map_size)
2124 map->const_equiv_map[REGNO (temp)] = loc;
2125 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2128 seq = gen_sequence ();
2130 emit_insn_after (seq, map->insns_at_start);
2133 else if (REG_FUNCTION_VALUE_P (orig))
2135 /* This is a reference to the function return value. If
2136 the function doesn't have a return value, error. If the
2137 mode doesn't agree, make a SUBREG. */
2138 if (map->inline_target == 0)
2139 /* Must be unrolling loops or replicating code if we
2140 reach here, so return the register unchanged. */
2142 else if (mode != GET_MODE (map->inline_target))
2143 return gen_lowpart (mode, map->inline_target);
2145 return map->inline_target;
2149 if (map->reg_map[regno] == NULL)
2151 map->reg_map[regno] = gen_reg_rtx (mode);
2152 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2153 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2154 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2155 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2157 return map->reg_map[regno];
2160 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2161 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2162 if (GET_CODE (copy) == SUBREG)
2163 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2164 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2165 else if (GET_CODE (copy) == CONCAT)
2166 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2168 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2169 SUBREG_WORD (orig));
2173 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2174 to (use foo) if the original insn didn't have a subreg.
2175 Removing the subreg distorts the VAX movstrhi pattern
2176 by changing the mode of an operand. */
2177 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2178 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2179 copy = SUBREG_REG (copy);
2180 return gen_rtx (code, VOIDmode, copy);
2183 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2184 = LABEL_PRESERVE_P (orig);
2185 return map->label_map[CODE_LABEL_NUMBER (orig)];
2188 copy = gen_rtx (LABEL_REF, mode,
2189 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2190 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2191 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2193 /* The fact that this label was previously nonlocal does not mean
2194 it still is, so we must check if it is within the range of
2195 this function's labels. */
2196 LABEL_REF_NONLOCAL_P (copy)
2197 = (LABEL_REF_NONLOCAL_P (orig)
2198 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2199 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2201 /* If we have made a nonlocal label local, it means that this
2202 inlined call will be refering to our nonlocal goto handler.
2203 So make sure we create one for this block; we normally would
2204 not since this is not otherwise considered a "call". */
2205 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2206 function_call_count++;
2216 /* Symbols which represent the address of a label stored in the constant
2217 pool must be modified to point to a constant pool entry for the
2218 remapped label. Otherwise, symbols are returned unchanged. */
2219 if (CONSTANT_POOL_ADDRESS_P (orig))
2221 rtx constant = get_pool_constant (orig);
2222 if (GET_CODE (constant) == LABEL_REF)
2223 return XEXP (force_const_mem (Pmode,
2224 copy_rtx_and_substitute (constant,
2232 /* We have to make a new copy of this CONST_DOUBLE because don't want
2233 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2234 duplicate of a CONST_DOUBLE we have already seen. */
2235 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2239 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2240 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2243 return immed_double_const (CONST_DOUBLE_LOW (orig),
2244 CONST_DOUBLE_HIGH (orig), VOIDmode);
2247 /* Make new constant pool entry for a constant
2248 that was in the pool of the inline function. */
2249 if (RTX_INTEGRATED_P (orig))
2251 /* If this was an address of a constant pool entry that itself
2252 had to be placed in the constant pool, it might not be a
2253 valid address. So the recursive call below might turn it
2254 into a register. In that case, it isn't a constant any
2255 more, so return it. This has the potential of changing a
2256 MEM into a REG, but we'll assume that it safe. */
2257 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2258 if (! CONSTANT_P (temp))
2260 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2265 /* If from constant pool address, make new constant pool entry and
2266 return its address. */
2267 if (! RTX_INTEGRATED_P (orig))
2270 temp = force_const_mem (GET_MODE (orig),
2271 copy_rtx_and_substitute (XEXP (orig, 0), map));
2274 /* Legitimizing the address here is incorrect.
2276 The only ADDRESS rtx's that can reach here are ones created by
2277 save_constants. Hence the operand of the ADDRESS is always valid
2278 in this position of the instruction, since the original rtx without
2279 the ADDRESS was valid.
2281 The reason we don't legitimize the address here is that on the
2282 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2283 This code forces the operand of the address to a register, which
2284 fails because we can not take the HIGH part of a register.
2286 Also, change_address may create new registers. These registers
2287 will not have valid reg_map entries. This can cause try_constants()
2288 to fail because assumes that all registers in the rtx have valid
2289 reg_map entries, and it may end up replacing one of these new
2290 registers with junk. */
2292 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2293 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2296 return XEXP (temp, 0);
2299 /* If a single asm insn contains multiple output operands
2300 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2301 We must make sure that the copied insn continues to share it. */
2302 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2304 copy = rtx_alloc (ASM_OPERANDS);
2305 copy->volatil = orig->volatil;
2306 XSTR (copy, 0) = XSTR (orig, 0);
2307 XSTR (copy, 1) = XSTR (orig, 1);
2308 XINT (copy, 2) = XINT (orig, 2);
2309 XVEC (copy, 3) = map->copy_asm_operands_vector;
2310 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2311 XSTR (copy, 5) = XSTR (orig, 5);
2312 XINT (copy, 6) = XINT (orig, 6);
2318 /* This is given special treatment because the first
2319 operand of a CALL is a (MEM ...) which may get
2320 forced into a register for cse. This is undesirable
2321 if function-address cse isn't wanted or if we won't do cse. */
2322 #ifndef NO_FUNCTION_CSE
2323 if (! (optimize && ! flag_no_function_cse))
2325 return gen_rtx (CALL, GET_MODE (orig),
2326 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2327 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2328 copy_rtx_and_substitute (XEXP (orig, 1), map));
2332 /* Must be ifdefed out for loop unrolling to work. */
2338 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2340 If the nonlocal goto is into the current function,
2341 this will result in unnecessarily bad code, but should work. */
2342 if (SET_DEST (orig) == virtual_stack_vars_rtx
2343 || SET_DEST (orig) == virtual_incoming_args_rtx)
2344 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2345 copy_rtx_and_substitute (SET_SRC (orig), map));
2349 copy = rtx_alloc (MEM);
2350 PUT_MODE (copy, mode);
2351 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2352 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2353 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2355 /* If doing function inlining, this MEM might not be const in the
2356 function that it is being inlined into, and thus may not be
2357 unchanging after function inlining. Constant pool references are
2358 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2360 if (! map->integrating)
2361 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2366 copy = rtx_alloc (code);
2367 PUT_MODE (copy, mode);
2368 copy->in_struct = orig->in_struct;
2369 copy->volatil = orig->volatil;
2370 copy->unchanging = orig->unchanging;
2372 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2374 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2376 switch (*format_ptr++)
2382 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2386 /* Change any references to old-insns to point to the
2387 corresponding copied insns. */
2388 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2392 XVEC (copy, i) = XVEC (orig, i);
2393 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2395 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2396 for (j = 0; j < XVECLEN (copy, i); j++)
2397 XVECEXP (copy, i, j)
2398 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2403 XWINT (copy, i) = XWINT (orig, i);
2407 XINT (copy, i) = XINT (orig, i);
2411 XSTR (copy, i) = XSTR (orig, i);
2419 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2421 map->orig_asm_operands_vector = XVEC (orig, 3);
2422 map->copy_asm_operands_vector = XVEC (copy, 3);
2423 map->copy_asm_constraints_vector = XVEC (copy, 4);
2429 /* Substitute known constant values into INSN, if that is valid. */
2432 try_constants (insn, map)
2434 struct inline_remap *map;
2439 subst_constants (&PATTERN (insn), insn, map);
2441 /* Apply the changes if they are valid; otherwise discard them. */
2442 apply_change_group ();
2444 /* Show we don't know the value of anything stored or clobbered. */
2445 note_stores (PATTERN (insn), mark_stores);
2446 map->last_pc_value = 0;
2448 map->last_cc0_value = 0;
2451 /* Set up any constant equivalences made in this insn. */
2452 for (i = 0; i < map->num_sets; i++)
2454 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2456 int regno = REGNO (map->equiv_sets[i].dest);
2458 if (regno < map->const_equiv_map_size
2459 && (map->const_equiv_map[regno] == 0
2460 /* Following clause is a hack to make case work where GNU C++
2461 reassigns a variable to make cse work right. */
2462 || ! rtx_equal_p (map->const_equiv_map[regno],
2463 map->equiv_sets[i].equiv)))
2465 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2466 map->const_age_map[regno] = map->const_age;
2469 else if (map->equiv_sets[i].dest == pc_rtx)
2470 map->last_pc_value = map->equiv_sets[i].equiv;
2472 else if (map->equiv_sets[i].dest == cc0_rtx)
2473 map->last_cc0_value = map->equiv_sets[i].equiv;
2478 /* Substitute known constants for pseudo regs in the contents of LOC,
2479 which are part of INSN.
2480 If INSN is zero, the substitution should always be done (this is used to
2482 These changes are taken out by try_constants if the result is not valid.
2484 Note that we are more concerned with determining when the result of a SET
2485 is a constant, for further propagation, than actually inserting constants
2486 into insns; cse will do the latter task better.
2488 This function is also used to adjust address of items previously addressed
2489 via the virtual stack variable or virtual incoming arguments registers. */
2492 subst_constants (loc, insn, map)
2495 struct inline_remap *map;
2499 register enum rtx_code code;
2500 register char *format_ptr;
2501 int num_changes = num_validated_changes ();
2503 enum machine_mode op0_mode;
2505 code = GET_CODE (x);
2520 validate_change (insn, loc, map->last_cc0_value, 1);
2526 /* The only thing we can do with a USE or CLOBBER is possibly do
2527 some substitutions in a MEM within it. */
2528 if (GET_CODE (XEXP (x, 0)) == MEM)
2529 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2533 /* Substitute for parms and known constants. Don't replace
2534 hard regs used as user variables with constants. */
2536 int regno = REGNO (x);
2538 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2539 && regno < map->const_equiv_map_size
2540 && map->const_equiv_map[regno] != 0
2541 && map->const_age_map[regno] >= map->const_age)
2542 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2547 /* SUBREG applied to something other than a reg
2548 should be treated as ordinary, since that must
2549 be a special hack and we don't know how to treat it specially.
2550 Consider for example mulsidi3 in m68k.md.
2551 Ordinary SUBREG of a REG needs this special treatment. */
2552 if (GET_CODE (SUBREG_REG (x)) == REG)
2554 rtx inner = SUBREG_REG (x);
2557 /* We can't call subst_constants on &SUBREG_REG (x) because any
2558 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2559 see what is inside, try to form the new SUBREG and see if that is
2560 valid. We handle two cases: extracting a full word in an
2561 integral mode and extracting the low part. */
2562 subst_constants (&inner, NULL_RTX, map);
2564 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2565 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2566 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2567 new = operand_subword (inner, SUBREG_WORD (x), 0,
2568 GET_MODE (SUBREG_REG (x)));
2570 if (new == 0 && subreg_lowpart_p (x))
2571 new = gen_lowpart_common (GET_MODE (x), inner);
2574 validate_change (insn, loc, new, 1);
2581 subst_constants (&XEXP (x, 0), insn, map);
2583 /* If a memory address got spoiled, change it back. */
2584 if (insn != 0 && num_validated_changes () != num_changes
2585 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2586 cancel_changes (num_changes);
2591 /* Substitute constants in our source, and in any arguments to a
2592 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2594 rtx *dest_loc = &SET_DEST (x);
2595 rtx dest = *dest_loc;
2598 subst_constants (&SET_SRC (x), insn, map);
2601 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2602 /* By convention, we always use ZERO_EXTRACT in the dest. */
2603 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2604 || GET_CODE (*dest_loc) == SUBREG
2605 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2607 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2609 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2610 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2612 dest_loc = &XEXP (*dest_loc, 0);
2615 /* Do substitute in the address of a destination in memory. */
2616 if (GET_CODE (*dest_loc) == MEM)
2617 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2619 /* Check for the case of DEST a SUBREG, both it and the underlying
2620 register are less than one word, and the SUBREG has the wider mode.
2621 In the case, we are really setting the underlying register to the
2622 source converted to the mode of DEST. So indicate that. */
2623 if (GET_CODE (dest) == SUBREG
2624 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2625 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2626 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2627 <= GET_MODE_SIZE (GET_MODE (dest)))
2628 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2630 src = tem, dest = SUBREG_REG (dest);
2632 /* If storing a recognizable value save it for later recording. */
2633 if ((map->num_sets < MAX_RECOG_OPERANDS)
2634 && (CONSTANT_P (src)
2635 || (GET_CODE (src) == REG
2636 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2637 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2638 || (GET_CODE (src) == PLUS
2639 && GET_CODE (XEXP (src, 0)) == REG
2640 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2641 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2642 && CONSTANT_P (XEXP (src, 1)))
2643 || GET_CODE (src) == COMPARE
2648 && (src == pc_rtx || GET_CODE (src) == RETURN
2649 || GET_CODE (src) == LABEL_REF))))
2651 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2652 it will cause us to save the COMPARE with any constants
2653 substituted, which is what we want for later. */
2654 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2655 map->equiv_sets[map->num_sets++].dest = dest;
2662 format_ptr = GET_RTX_FORMAT (code);
2664 /* If the first operand is an expression, save its mode for later. */
2665 if (*format_ptr == 'e')
2666 op0_mode = GET_MODE (XEXP (x, 0));
2668 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2670 switch (*format_ptr++)
2677 subst_constants (&XEXP (x, i), insn, map);
2687 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2690 for (j = 0; j < XVECLEN (x, i); j++)
2691 subst_constants (&XVECEXP (x, i, j), insn, map);
2700 /* If this is a commutative operation, move a constant to the second
2701 operand unless the second operand is already a CONST_INT. */
2702 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2703 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2705 rtx tem = XEXP (x, 0);
2706 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2707 validate_change (insn, &XEXP (x, 1), tem, 1);
2710 /* Simplify the expression in case we put in some constants. */
2711 switch (GET_RTX_CLASS (code))
2714 new = simplify_unary_operation (code, GET_MODE (x),
2715 XEXP (x, 0), op0_mode);
2720 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2721 if (op_mode == VOIDmode)
2722 op_mode = GET_MODE (XEXP (x, 1));
2723 new = simplify_relational_operation (code, op_mode,
2724 XEXP (x, 0), XEXP (x, 1));
2725 #ifdef FLOAT_STORE_FLAG_VALUE
2726 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2727 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2728 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2736 new = simplify_binary_operation (code, GET_MODE (x),
2737 XEXP (x, 0), XEXP (x, 1));
2742 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2743 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2748 validate_change (insn, loc, new, 1);
2751 /* Show that register modified no longer contain known constants. We are
2752 called from note_stores with parts of the new insn. */
2755 mark_stores (dest, x)
2760 enum machine_mode mode;
2762 /* DEST is always the innermost thing set, except in the case of
2763 SUBREGs of hard registers. */
2765 if (GET_CODE (dest) == REG)
2766 regno = REGNO (dest), mode = GET_MODE (dest);
2767 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2769 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2770 mode = GET_MODE (SUBREG_REG (dest));
2775 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2776 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2779 for (i = regno; i <= last_reg; i++)
2780 if (i < global_const_equiv_map_size)
2781 global_const_equiv_map[i] = 0;
2785 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2786 pointed to by PX, they represent constants in the constant pool.
2787 Replace these with a new memory reference obtained from force_const_mem.
2788 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2789 address of a constant pool entry. Replace them with the address of
2790 a new constant pool entry obtained from force_const_mem. */
2793 restore_constants (px)
2803 if (GET_CODE (x) == CONST_DOUBLE)
2805 /* We have to make a new CONST_DOUBLE to ensure that we account for
2806 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2807 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2811 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2812 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2815 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2819 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2821 restore_constants (&XEXP (x, 0));
2822 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2824 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2826 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2827 rtx new = XEXP (SUBREG_REG (x), 0);
2829 restore_constants (&new);
2830 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2831 PUT_MODE (new, GET_MODE (x));
2832 *px = validize_mem (new);
2834 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2836 restore_constants (&XEXP (x, 0));
2837 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2841 fmt = GET_RTX_FORMAT (GET_CODE (x));
2842 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2847 for (j = 0; j < XVECLEN (x, i); j++)
2848 restore_constants (&XVECEXP (x, i, j));
2852 restore_constants (&XEXP (x, i));
2859 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2860 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2861 that it points to the node itself, thus indicating that the node is its
2862 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2863 the given node is NULL, recursively descend the decl/block tree which
2864 it is the root of, and for each other ..._DECL or BLOCK node contained
2865 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2866 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2867 values to point to themselves. */
2870 set_block_origin_self (stmt)
2873 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2875 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2878 register tree local_decl;
2880 for (local_decl = BLOCK_VARS (stmt);
2881 local_decl != NULL_TREE;
2882 local_decl = TREE_CHAIN (local_decl))
2883 set_decl_origin_self (local_decl); /* Potential recursion. */
2887 register tree subblock;
2889 for (subblock = BLOCK_SUBBLOCKS (stmt);
2890 subblock != NULL_TREE;
2891 subblock = BLOCK_CHAIN (subblock))
2892 set_block_origin_self (subblock); /* Recurse. */
2897 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2898 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2899 node to so that it points to the node itself, thus indicating that the
2900 node represents its own (abstract) origin. Additionally, if the
2901 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2902 the decl/block tree of which the given node is the root of, and for
2903 each other ..._DECL or BLOCK node contained therein whose
2904 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2905 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2906 point to themselves. */
2909 set_decl_origin_self (decl)
2912 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2914 DECL_ABSTRACT_ORIGIN (decl) = decl;
2915 if (TREE_CODE (decl) == FUNCTION_DECL)
2919 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2920 DECL_ABSTRACT_ORIGIN (arg) = arg;
2921 if (DECL_INITIAL (decl) != NULL_TREE)
2922 set_block_origin_self (DECL_INITIAL (decl));
2927 /* Given a pointer to some BLOCK node, and a boolean value to set the
2928 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2929 the given block, and for all local decls and all local sub-blocks
2930 (recursively) which are contained therein. */
2933 set_block_abstract_flags (stmt, setting)
2935 register int setting;
2937 BLOCK_ABSTRACT (stmt) = setting;
2940 register tree local_decl;
2942 for (local_decl = BLOCK_VARS (stmt);
2943 local_decl != NULL_TREE;
2944 local_decl = TREE_CHAIN (local_decl))
2945 set_decl_abstract_flags (local_decl, setting);
2949 register tree subblock;
2951 for (subblock = BLOCK_SUBBLOCKS (stmt);
2952 subblock != NULL_TREE;
2953 subblock = BLOCK_CHAIN (subblock))
2954 set_block_abstract_flags (subblock, setting);
2958 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2959 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2960 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2961 set the abstract flags for all of the parameters, local vars, local
2962 blocks and sub-blocks (recursively) to the same setting. */
2965 set_decl_abstract_flags (decl, setting)
2967 register int setting;
2969 DECL_ABSTRACT (decl) = setting;
2970 if (TREE_CODE (decl) == FUNCTION_DECL)
2974 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2975 DECL_ABSTRACT (arg) = setting;
2976 if (DECL_INITIAL (decl) != NULL_TREE)
2977 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2981 /* Output the assembly language code for the function FNDECL
2982 from its DECL_SAVED_INSNS. Used for inline functions that are output
2983 at end of compilation instead of where they came in the source. */
2986 output_inline_function (fndecl)
2992 if (output_bytecode)
2994 warning ("`inline' ignored for bytecode output");
2998 head = DECL_SAVED_INSNS (fndecl);
2999 current_function_decl = fndecl;
3001 /* This call is only used to initialize global variables. */
3002 init_function_start (fndecl, "lossage", 1);
3004 /* Redo parameter determinations in case the FUNCTION_...
3005 macros took machine-specific actions that need to be redone. */
3006 assign_parms (fndecl, 1);
3008 /* Set stack frame size. */
3009 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3011 restore_reg_data (FIRST_PARM_INSN (head));
3013 stack_slot_list = STACK_SLOT_LIST (head);
3015 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3016 current_function_calls_alloca = 1;
3018 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3019 current_function_calls_setjmp = 1;
3021 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3022 current_function_calls_longjmp = 1;
3024 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3025 current_function_returns_struct = 1;
3027 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3028 current_function_returns_pcc_struct = 1;
3030 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3031 current_function_needs_context = 1;
3033 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3034 current_function_has_nonlocal_label = 1;
3036 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3037 current_function_returns_pointer = 1;
3039 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3040 current_function_uses_const_pool = 1;
3042 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3043 current_function_uses_pic_offset_table = 1;
3045 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3046 current_function_pops_args = POPS_ARGS (head);
3048 /* There is no need to output a return label again. */
3051 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl), 0);
3053 /* Find last insn and rebuild the constant pool. */
3054 for (last = FIRST_PARM_INSN (head);
3055 NEXT_INSN (last); last = NEXT_INSN (last))
3057 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3059 restore_constants (&PATTERN (last));
3060 restore_constants (®_NOTES (last));
3064 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3065 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3067 /* We must have already output DWARF debugging information for the
3068 original (abstract) inline function declaration/definition, so
3069 we want to make sure that the debugging information we generate
3070 for this special instance of the inline function refers back to
3071 the information we already generated. To make sure that happens,
3072 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3073 node (and for all of the local ..._DECL nodes which are its children)
3074 so that they all point to themselves. */
3076 set_decl_origin_self (fndecl);
3078 /* We're not deferring this any longer. */
3079 DECL_DEFER_OUTPUT (fndecl) = 0;
3081 /* Compile this function all the way down to assembly code. */
3082 rest_of_compilation (fndecl);
3084 current_function_decl = 0;