struct inline_remap
{
+ /* True if we are doing function integration, false otherwise.
+ Used to control whether RTX_UNCHANGING bits are copied by
+ copy_rtx_and_substitute. */
+ int integrating;
/* Definition of function be inlined. */
union tree_node *fndecl;
/* Place to put insns needed at start of function. */
this inline instance. These pseudos are then marked as being equivalent
to the appropriate address and substituted if valid. */
rtx *const_equiv_map;
+ /* Number of entries in const_equiv_map and const_arg_map. */
+ int const_equiv_map_size;
/* This is incremented for each new basic block.
It is used to store in const_age_map to record the domain of validity
of each entry in const_equiv_map.
/* Return a copy of an rtx (as needed), substituting pseudo-register,
labels, and frame-pointer offsets as necessary. */
-extern rtx copy_rtx_and_substitute ();
+extern rtx copy_rtx_and_substitute PROTO((rtx, struct inline_remap *));
-extern void try_constants ();
+extern void try_constants PROTO((rtx, struct inline_remap *));
-extern void mark_stores ();
-
-/* We do some simple constant folding optimization. This optimization
- really exists primarily to save time inlining a function. It
- also helps users who ask for inline functions without -O. */
-extern rtx try_fold_condition ();
+extern void mark_stores PROTO((rtx, rtx));
extern rtx *global_const_equiv_map;