1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
32 #include "insn-config.h"
36 #include "integrate.h"
48 #define obstack_chunk_alloc xmalloc
49 #define obstack_chunk_free free
51 extern struct obstack *function_maybepermanent_obstack;
53 /* Similar, but round to the next highest integer that meets the
55 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
57 /* Default max number of insns a function can have and still be inline.
58 This is overridden on RISC machines. */
59 #ifndef INTEGRATE_THRESHOLD
60 /* Inlining small functions might save more space then not inlining at
61 all. Assume 1 instruction for the call and 1.5 insns per argument. */
62 #define INTEGRATE_THRESHOLD(DECL) \
64 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
65 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
69 /* Private type used by {get/has}_func_hard_reg_initial_val. */
70 typedef struct initial_value_pair {
74 typedef struct initial_value_struct {
77 initial_value_pair *entries;
78 } initial_value_struct;
80 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
82 static rtvec initialize_for_inline PARAMS ((tree));
83 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
84 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
86 static tree integrate_decl_tree PARAMS ((tree,
87 struct inline_remap *));
88 static void subst_constants PARAMS ((rtx *, rtx,
89 struct inline_remap *, int));
90 static void set_block_origin_self PARAMS ((tree));
91 static void set_block_abstract_flags PARAMS ((tree, int));
92 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
94 void set_decl_abstract_flags PARAMS ((tree, int));
95 static void mark_stores PARAMS ((rtx, rtx, void *));
96 static void save_parm_insns PARAMS ((rtx, rtx));
97 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
99 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
101 static int compare_blocks PARAMS ((const PTR, const PTR));
102 static int find_block PARAMS ((const PTR, const PTR));
104 /* Used by copy_rtx_and_substitute; this indicates whether the function is
105 called for the purpose of inlining or some other purpose (i.e. loop
106 unrolling). This affects how constant pool references are handled.
107 This variable contains the FUNCTION_DECL for the inlined function. */
108 static struct function *inlining = 0;
110 /* Returns the Ith entry in the label_map contained in MAP. If the
111 Ith entry has not yet been set, return a fresh label. This function
112 performs a lazy initialization of label_map, thereby avoiding huge memory
113 explosions when the label_map gets very large. */
116 get_label_from_map (map, i)
117 struct inline_remap *map;
120 rtx x = map->label_map[i];
123 x = map->label_map[i] = gen_label_rtx ();
128 /* Return false if the function FNDECL cannot be inlined on account of its
129 attributes, true otherwise. */
131 function_attribute_inlinable_p (fndecl)
134 bool has_machine_attr = false;
137 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
139 tree name = TREE_PURPOSE (a);
142 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
144 if (is_attribute_p (targetm.attribute_table[i].name, name))
146 has_machine_attr = true;
150 if (has_machine_attr)
154 if (has_machine_attr)
155 return (*targetm.function_attribute_inlinable_p) (fndecl);
160 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
161 is safe and reasonable to integrate into other functions.
162 Nonzero means value is a warning msgid with a single %s
163 for the function's name. */
166 function_cannot_inline_p (fndecl)
167 register tree fndecl;
170 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
172 /* For functions marked as inline increase the maximum size to
173 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
174 use the limit given by INTEGRATE_THRESHOLD. */
176 int max_insns = (DECL_INLINE (fndecl))
178 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
179 : INTEGRATE_THRESHOLD (fndecl);
181 register int ninsns = 0;
184 if (DECL_UNINLINABLE (fndecl))
185 return N_("function cannot be inline");
187 /* No inlines with varargs. */
188 if ((last && TREE_VALUE (last) != void_type_node)
189 || current_function_varargs)
190 return N_("varargs function cannot be inline");
192 if (current_function_calls_alloca)
193 return N_("function using alloca cannot be inline");
195 if (current_function_calls_setjmp)
196 return N_("function using setjmp cannot be inline");
198 if (current_function_calls_eh_return)
199 return N_("function uses __builtin_eh_return");
201 if (current_function_contains_functions)
202 return N_("function with nested functions cannot be inline");
206 N_("function with label addresses used in initializers cannot inline");
208 if (current_function_cannot_inline)
209 return current_function_cannot_inline;
211 /* If its not even close, don't even look. */
212 if (get_max_uid () > 3 * max_insns)
213 return N_("function too large to be inline");
216 /* Don't inline functions which do not specify a function prototype and
217 have BLKmode argument or take the address of a parameter. */
218 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
220 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
221 TREE_ADDRESSABLE (parms) = 1;
222 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
223 return N_("no prototype, and parameter address used; cannot be inline");
227 /* We can't inline functions that return structures
228 the old-fashioned PCC way, copying into a static block. */
229 if (current_function_returns_pcc_struct)
230 return N_("inline functions not supported for this return value type");
232 /* We can't inline functions that return structures of varying size. */
233 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
234 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
235 return N_("function with varying-size return value cannot be inline");
237 /* Cannot inline a function with a varying size argument or one that
238 receives a transparent union. */
239 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
241 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
242 return N_("function with varying-size parameter cannot be inline");
243 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
244 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
245 return N_("function with transparent unit parameter cannot be inline");
248 if (get_max_uid () > max_insns)
250 for (ninsns = 0, insn = get_first_nonparm_insn ();
251 insn && ninsns < max_insns;
252 insn = NEXT_INSN (insn))
256 if (ninsns >= max_insns)
257 return N_("function too large to be inline");
260 /* We will not inline a function which uses computed goto. The addresses of
261 its local labels, which may be tucked into global storage, are of course
262 not constant across instantiations, which causes unexpected behaviour. */
263 if (current_function_has_computed_jump)
264 return N_("function with computed jump cannot inline");
266 /* We cannot inline a nested function that jumps to a nonlocal label. */
267 if (current_function_has_nonlocal_goto)
268 return N_("function with nonlocal goto cannot be inline");
270 /* We can't inline functions that return a PARALLEL rtx. */
271 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
273 rtx result = DECL_RTL (DECL_RESULT (fndecl));
274 if (GET_CODE (result) == PARALLEL)
275 return N_("inline functions not supported for this return value type");
278 /* If the function has a target specific attribute attached to it,
279 then we assume that we should not inline it. This can be overriden
280 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
281 if (!function_attribute_inlinable_p (fndecl))
282 return N_("function with target specific attribute(s) cannot be inlined");
287 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
288 Zero for a reg that isn't a parm's home.
289 Only reg numbers less than max_parm_reg are mapped here. */
290 static tree *parmdecl_map;
292 /* In save_for_inline, nonzero if past the parm-initialization insns. */
293 static int in_nonparm_insns;
295 /* Subroutine for `save_for_inline'. Performs initialization
296 needed to save FNDECL's insns and info for future inline expansion. */
299 initialize_for_inline (fndecl)
306 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
307 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
308 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
310 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
312 parms = TREE_CHAIN (parms), i++)
314 rtx p = DECL_RTL (parms);
316 /* If we have (mem (addressof (mem ...))), use the inner MEM since
317 otherwise the copy_rtx call below will not unshare the MEM since
318 it shares ADDRESSOF. */
319 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
320 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
321 p = XEXP (XEXP (p, 0), 0);
323 RTVEC_ELT (arg_vector, i) = p;
325 if (GET_CODE (p) == REG)
326 parmdecl_map[REGNO (p)] = parms;
327 else if (GET_CODE (p) == CONCAT)
329 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
330 rtx pimag = gen_imagpart (GET_MODE (preal), p);
332 if (GET_CODE (preal) == REG)
333 parmdecl_map[REGNO (preal)] = parms;
334 if (GET_CODE (pimag) == REG)
335 parmdecl_map[REGNO (pimag)] = parms;
338 /* This flag is cleared later
339 if the function ever modifies the value of the parm. */
340 TREE_READONLY (parms) = 1;
346 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
347 originally was in the FROM_FN, but now it will be in the
351 copy_decl_for_inlining (decl, from_fn, to_fn)
358 /* Copy the declaration. */
359 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
361 /* For a parameter, we must make an equivalent VAR_DECL, not a
363 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
364 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
365 TREE_READONLY (copy) = TREE_READONLY (decl);
366 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
370 copy = copy_node (decl);
371 if (DECL_LANG_SPECIFIC (copy))
372 copy_lang_decl (copy);
374 /* TREE_ADDRESSABLE isn't used to indicate that a label's
375 address has been taken; it's for internal bookkeeping in
376 expand_goto_internal. */
377 if (TREE_CODE (copy) == LABEL_DECL)
378 TREE_ADDRESSABLE (copy) = 0;
381 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
382 declaration inspired this copy. */
383 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
385 /* The new variable/label has no RTL, yet. */
386 SET_DECL_RTL (copy, NULL_RTX);
388 /* These args would always appear unused, if not for this. */
389 TREE_USED (copy) = 1;
391 /* Set the context for the new declaration. */
392 if (!DECL_CONTEXT (decl))
393 /* Globals stay global. */
395 else if (DECL_CONTEXT (decl) != from_fn)
396 /* Things that weren't in the scope of the function we're inlining
397 from aren't in the scope we're inlining too, either. */
399 else if (TREE_STATIC (decl))
400 /* Function-scoped static variables should say in the original
404 /* Ordinary automatic local variables are now in the scope of the
406 DECL_CONTEXT (copy) = to_fn;
411 /* Make the insns and PARM_DECLs of the current function permanent
412 and record other information in DECL_SAVED_INSNS to allow inlining
413 of this function in subsequent calls.
415 This routine need not copy any insns because we are not going
416 to immediately compile the insns in the insn chain. There
417 are two cases when we would compile the insns for FNDECL:
418 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
419 be output at the end of other compilation, because somebody took
420 its address. In the first case, the insns of FNDECL are copied
421 as it is expanded inline, so FNDECL's saved insns are not
422 modified. In the second case, FNDECL is used for the last time,
423 so modifying the rtl is not a problem.
425 We don't have to worry about FNDECL being inline expanded by
426 other functions which are written at the end of compilation
427 because flag_no_inline is turned on when we begin writing
428 functions at the end of compilation. */
431 save_for_inline (fndecl)
436 rtx first_nonparm_insn;
438 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
439 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
440 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
441 for the parms, prior to elimination of virtual registers.
442 These values are needed for substituting parms properly. */
444 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
446 /* Make and emit a return-label if we have not already done so. */
448 if (return_label == 0)
450 return_label = gen_label_rtx ();
451 emit_label (return_label);
454 argvec = initialize_for_inline (fndecl);
456 /* Delete basic block notes created by early run of find_basic_block.
457 The notes would be later used by find_basic_blocks to reuse the memory
458 for basic_block structures on already freed obstack. */
459 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
460 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
461 delete_related_insns (insn);
463 /* If there are insns that copy parms from the stack into pseudo registers,
464 those insns are not copied. `expand_inline_function' must
465 emit the correct code to handle such things. */
468 if (GET_CODE (insn) != NOTE)
471 /* Get the insn which signals the end of parameter setup code. */
472 first_nonparm_insn = get_first_nonparm_insn ();
474 /* Now just scan the chain of insns to see what happens to our
475 PARM_DECLs. If a PARM_DECL is used but never modified, we
476 can substitute its rtl directly when expanding inline (and
477 perform constant folding when its incoming value is constant).
478 Otherwise, we have to copy its value into a new register and track
479 the new register's life. */
480 in_nonparm_insns = 0;
481 save_parm_insns (insn, first_nonparm_insn);
483 cfun->inl_max_label_num = max_label_num ();
484 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
485 cfun->original_arg_vector = argvec;
486 cfun->original_decl_initial = DECL_INITIAL (fndecl);
487 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
488 DECL_SAVED_INSNS (fndecl) = cfun;
494 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
495 PARM_DECL is used but never modified, we can substitute its rtl directly
496 when expanding inline (and perform constant folding when its incoming
497 value is constant). Otherwise, we have to copy its value into a new
498 register and track the new register's life. */
501 save_parm_insns (insn, first_nonparm_insn)
503 rtx first_nonparm_insn;
505 if (insn == NULL_RTX)
508 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
510 if (insn == first_nonparm_insn)
511 in_nonparm_insns = 1;
515 /* Record what interesting things happen to our parameters. */
516 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
518 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
519 three attached sequences: normal call, sibling call and tail
521 if (GET_CODE (insn) == CALL_INSN
522 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
526 for (i = 0; i < 3; i++)
527 save_parm_insns (XEXP (PATTERN (insn), i),
534 /* Note whether a parameter is modified or not. */
537 note_modified_parmregs (reg, x, data)
539 rtx x ATTRIBUTE_UNUSED;
540 void *data ATTRIBUTE_UNUSED;
542 if (GET_CODE (reg) == REG && in_nonparm_insns
543 && REGNO (reg) < max_parm_reg
544 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
545 && parmdecl_map[REGNO (reg)] != 0)
546 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
549 /* Unfortunately, we need a global copy of const_equiv map for communication
550 with a function called from note_stores. Be *very* careful that this
551 is used properly in the presence of recursion. */
553 varray_type global_const_equiv_varray;
555 #define FIXED_BASE_PLUS_P(X) \
556 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
557 && GET_CODE (XEXP (X, 0)) == REG \
558 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
559 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
561 /* Called to set up a mapping for the case where a parameter is in a
562 register. If it is read-only and our argument is a constant, set up the
563 constant equivalence.
565 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
568 Also, don't allow hard registers here; they might not be valid when
569 substituted into insns. */
571 process_reg_param (map, loc, copy)
572 struct inline_remap *map;
575 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
576 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
577 && ! REG_USERVAR_P (copy))
578 || (GET_CODE (copy) == REG
579 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
581 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
582 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
583 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
584 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
587 map->reg_map[REGNO (loc)] = copy;
590 /* Compare two BLOCKs for qsort. The key we sort on is the
591 BLOCK_ABSTRACT_ORIGIN of the blocks. */
594 compare_blocks (v1, v2)
598 tree b1 = *((const tree *) v1);
599 tree b2 = *((const tree *) v2);
601 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
602 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
605 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
606 an original block; the second to a remapped equivalent. */
613 const union tree_node *b1 = (const union tree_node *) v1;
614 tree b2 = *((const tree *) v2);
616 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
619 /* Integrate the procedure defined by FNDECL. Note that this function
620 may wind up calling itself. Since the static variables are not
621 reentrant, we do not assign them until after the possibility
622 of recursion is eliminated.
624 If IGNORE is nonzero, do not produce a value.
625 Otherwise store the value in TARGET if it is nonzero and that is convenient.
628 (rtx)-1 if we could not substitute the function
629 0 if we substituted it and it does not produce a value
630 else an rtx for where the value is stored. */
633 expand_inline_function (fndecl, parms, target, ignore, type,
634 structure_value_addr)
639 rtx structure_value_addr;
641 struct function *inlining_previous;
642 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
643 tree formal, actual, block;
644 rtx parm_insns = inl_f->emit->x_first_insn;
645 rtx insns = (inl_f->inl_last_parm_insn
646 ? NEXT_INSN (inl_f->inl_last_parm_insn)
652 int min_labelno = inl_f->emit->x_first_label_num;
653 int max_labelno = inl_f->inl_max_label_num;
658 struct inline_remap *map = 0;
662 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
663 rtx static_chain_value = 0;
665 int eh_region_offset;
667 /* The pointer used to track the true location of the memory used
668 for MAP->LABEL_MAP. */
669 rtx *real_label_map = 0;
671 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
672 max_regno = inl_f->emit->x_reg_rtx_no + 3;
673 if (max_regno < FIRST_PSEUDO_REGISTER)
676 /* Pull out the decl for the function definition; fndecl may be a
677 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
678 fndecl = inl_f->decl;
680 nargs = list_length (DECL_ARGUMENTS (fndecl));
682 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
683 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
685 /* Check that the parms type match and that sufficient arguments were
686 passed. Since the appropriate conversions or default promotions have
687 already been applied, the machine modes should match exactly. */
689 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
691 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
694 enum machine_mode mode;
697 return (rtx) (HOST_WIDE_INT) -1;
699 arg = TREE_VALUE (actual);
700 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
702 if (arg == error_mark_node
703 || mode != TYPE_MODE (TREE_TYPE (arg))
704 /* If they are block mode, the types should match exactly.
705 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
706 which could happen if the parameter has incomplete type. */
708 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
709 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
710 return (rtx) (HOST_WIDE_INT) -1;
713 /* Extra arguments are valid, but will be ignored below, so we must
714 evaluate them here for side-effects. */
715 for (; actual; actual = TREE_CHAIN (actual))
716 expand_expr (TREE_VALUE (actual), const0_rtx,
717 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
719 /* Expand the function arguments. Do this first so that any
720 new registers get created before we allocate the maps. */
722 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
723 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
725 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
727 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
729 /* Actual parameter, converted to the type of the argument within the
731 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
732 /* Mode of the variable used within the function. */
733 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
737 loc = RTVEC_ELT (arg_vector, i);
739 /* If this is an object passed by invisible reference, we copy the
740 object into a stack slot and save its address. If this will go
741 into memory, we do nothing now. Otherwise, we just expand the
743 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
744 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
746 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
748 store_expr (arg, stack_slot, 0);
749 arg_vals[i] = XEXP (stack_slot, 0);
752 else if (GET_CODE (loc) != MEM)
754 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
756 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
757 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
759 pmode = promote_mode (TREE_TYPE (formal), pmode,
762 if (GET_MODE (loc) != pmode)
765 /* The mode if LOC and ARG can differ if LOC was a variable
766 that had its mode promoted via PROMOTED_MODE. */
767 arg_vals[i] = convert_modes (pmode,
768 TYPE_MODE (TREE_TYPE (arg)),
769 expand_expr (arg, NULL_RTX, mode,
774 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
780 && (! TREE_READONLY (formal)
781 /* If the parameter is not read-only, copy our argument through
782 a register. Also, we cannot use ARG_VALS[I] if it overlaps
783 TARGET in any way. In the inline function, they will likely
784 be two different pseudos, and `safe_from_p' will make all
785 sorts of smart assumptions about their not conflicting.
786 But if ARG_VALS[I] overlaps TARGET, these assumptions are
787 wrong, so put ARG_VALS[I] into a fresh register.
788 Don't worry about invisible references, since their stack
789 temps will never overlap the target. */
792 && (GET_CODE (arg_vals[i]) == REG
793 || GET_CODE (arg_vals[i]) == SUBREG
794 || GET_CODE (arg_vals[i]) == MEM)
795 && reg_overlap_mentioned_p (arg_vals[i], target))
796 /* ??? We must always copy a SUBREG into a REG, because it might
797 get substituted into an address, and not all ports correctly
798 handle SUBREGs in addresses. */
799 || (GET_CODE (arg_vals[i]) == SUBREG)))
800 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
802 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
803 && POINTER_TYPE_P (TREE_TYPE (formal)))
804 mark_reg_pointer (arg_vals[i],
805 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
808 /* Allocate the structures we use to remap things. */
810 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
811 map->fndecl = fndecl;
813 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
814 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
816 /* We used to use alloca here, but the size of what it would try to
817 allocate would occasionally cause it to exceed the stack limit and
818 cause unpredictable core dumps. */
820 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
821 map->label_map = real_label_map;
822 map->local_return_label = NULL_RTX;
824 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
825 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
827 map->max_insnno = inl_max_uid;
829 map->integrating = 1;
830 map->compare_src = NULL_RTX;
831 map->compare_mode = VOIDmode;
833 /* const_equiv_varray maps pseudos in our routine to constants, so
834 it needs to be large enough for all our pseudos. This is the
835 number we are currently using plus the number in the called
836 routine, plus 15 for each arg, five to compute the virtual frame
837 pointer, and five for the return value. This should be enough
838 for most cases. We do not reference entries outside the range of
841 ??? These numbers are quite arbitrary and were obtained by
842 experimentation. At some point, we should try to allocate the
843 table after all the parameters are set up so we an more accurately
844 estimate the number of pseudos we will need. */
846 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
848 + (max_regno - FIRST_PSEUDO_REGISTER)
851 "expand_inline_function");
854 /* Record the current insn in case we have to set up pointers to frame
855 and argument memory blocks. If there are no insns yet, add a dummy
856 insn that can be used as an insertion point. */
857 map->insns_at_start = get_last_insn ();
858 if (map->insns_at_start == 0)
859 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
861 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
862 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
864 /* Update the outgoing argument size to allow for those in the inlined
866 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
867 current_function_outgoing_args_size = inl_f->outgoing_args_size;
869 /* If the inline function needs to make PIC references, that means
870 that this function's PIC offset table must be used. */
871 if (inl_f->uses_pic_offset_table)
872 current_function_uses_pic_offset_table = 1;
874 /* If this function needs a context, set it up. */
875 if (inl_f->needs_context)
876 static_chain_value = lookup_static_chain (fndecl);
878 if (GET_CODE (parm_insns) == NOTE
879 && NOTE_LINE_NUMBER (parm_insns) > 0)
881 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
882 NOTE_LINE_NUMBER (parm_insns));
884 RTX_INTEGRATED_P (note) = 1;
887 /* Process each argument. For each, set up things so that the function's
888 reference to the argument will refer to the argument being passed.
889 We only replace REG with REG here. Any simplifications are done
892 We make two passes: In the first, we deal with parameters that will
893 be placed into registers, since we need to ensure that the allocated
894 register number fits in const_equiv_map. Then we store all non-register
895 parameters into their memory location. */
897 /* Don't try to free temp stack slots here, because we may put one of the
898 parameters into a temp stack slot. */
900 for (i = 0; i < nargs; i++)
902 rtx copy = arg_vals[i];
904 loc = RTVEC_ELT (arg_vector, i);
906 /* There are three cases, each handled separately. */
907 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
908 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
910 /* This must be an object passed by invisible reference (it could
911 also be a variable-sized object, but we forbid inlining functions
912 with variable-sized arguments). COPY is the address of the
913 actual value (this computation will cause it to be copied). We
914 map that address for the register, noting the actual address as
915 an equivalent in case it can be substituted into the insns. */
917 if (GET_CODE (copy) != REG)
919 temp = copy_addr_to_reg (copy);
920 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
921 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
924 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
926 else if (GET_CODE (loc) == MEM)
928 /* This is the case of a parameter that lives in memory. It
929 will live in the block we allocate in the called routine's
930 frame that simulates the incoming argument area. Do nothing
931 with the parameter now; we will call store_expr later. In
932 this case, however, we must ensure that the virtual stack and
933 incoming arg rtx values are expanded now so that we can be
934 sure we have enough slots in the const equiv map since the
935 store_expr call can easily blow the size estimate. */
936 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
937 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
939 else if (GET_CODE (loc) == REG)
940 process_reg_param (map, loc, copy);
941 else if (GET_CODE (loc) == CONCAT)
943 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
944 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
945 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
946 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
948 process_reg_param (map, locreal, copyreal);
949 process_reg_param (map, locimag, copyimag);
955 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
956 specially. This function can be called recursively, so we need to
957 save the previous value. */
958 inlining_previous = inlining;
961 /* Now do the parameters that will be placed in memory. */
963 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
964 formal; formal = TREE_CHAIN (formal), i++)
966 loc = RTVEC_ELT (arg_vector, i);
968 if (GET_CODE (loc) == MEM
969 /* Exclude case handled above. */
970 && ! (GET_CODE (XEXP (loc, 0)) == REG
971 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
973 rtx note = emit_note (DECL_SOURCE_FILE (formal),
974 DECL_SOURCE_LINE (formal));
976 RTX_INTEGRATED_P (note) = 1;
978 /* Compute the address in the area we reserved and store the
980 temp = copy_rtx_and_substitute (loc, map, 1);
981 subst_constants (&temp, NULL_RTX, map, 1);
982 apply_change_group ();
983 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
984 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
985 store_expr (arg_trees[i], temp, 0);
989 /* Deal with the places that the function puts its result.
990 We are driven by what is placed into DECL_RESULT.
992 Initially, we assume that we don't have anything special handling for
993 REG_FUNCTION_RETURN_VALUE_P. */
995 map->inline_target = 0;
996 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
997 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
999 if (TYPE_MODE (type) == VOIDmode)
1000 /* There is no return value to worry about. */
1002 else if (GET_CODE (loc) == MEM)
1004 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1006 temp = copy_rtx_and_substitute (loc, map, 1);
1007 subst_constants (&temp, NULL_RTX, map, 1);
1008 apply_change_group ();
1013 if (! structure_value_addr
1014 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1017 /* Pass the function the address in which to return a structure
1018 value. Note that a constructor can cause someone to call us
1019 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1020 via the first parameter, rather than the struct return address.
1022 We have two cases: If the address is a simple register
1023 indirect, use the mapping mechanism to point that register to
1024 our structure return address. Otherwise, store the structure
1025 return value into the place that it will be referenced from. */
1027 if (GET_CODE (XEXP (loc, 0)) == REG)
1029 temp = force_operand (structure_value_addr, NULL_RTX);
1030 temp = force_reg (Pmode, temp);
1031 /* A virtual register might be invalid in an insn, because
1032 it can cause trouble in reload. Since we don't have access
1033 to the expanders at map translation time, make sure we have
1034 a proper register now.
1035 If a virtual register is actually valid, cse or combine
1036 can put it into the mapped insns. */
1037 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1038 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1039 temp = copy_to_mode_reg (Pmode, temp);
1040 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1042 if (CONSTANT_P (structure_value_addr)
1043 || GET_CODE (structure_value_addr) == ADDRESSOF
1044 || (GET_CODE (structure_value_addr) == PLUS
1045 && (XEXP (structure_value_addr, 0)
1046 == virtual_stack_vars_rtx)
1047 && (GET_CODE (XEXP (structure_value_addr, 1))
1050 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1056 temp = copy_rtx_and_substitute (loc, map, 1);
1057 subst_constants (&temp, NULL_RTX, map, 0);
1058 apply_change_group ();
1059 emit_move_insn (temp, structure_value_addr);
1064 /* We will ignore the result value, so don't look at its structure.
1065 Note that preparations for an aggregate return value
1066 do need to be made (above) even if it will be ignored. */
1068 else if (GET_CODE (loc) == REG)
1070 /* The function returns an object in a register and we use the return
1071 value. Set up our target for remapping. */
1073 /* Machine mode function was declared to return. */
1074 enum machine_mode departing_mode = TYPE_MODE (type);
1075 /* (Possibly wider) machine mode it actually computes
1076 (for the sake of callers that fail to declare it right).
1077 We have to use the mode of the result's RTL, rather than
1078 its type, since expand_function_start may have promoted it. */
1079 enum machine_mode arriving_mode
1080 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1083 /* Don't use MEMs as direct targets because on some machines
1084 substituting a MEM for a REG makes invalid insns.
1085 Let the combiner substitute the MEM if that is valid. */
1086 if (target == 0 || GET_CODE (target) != REG
1087 || GET_MODE (target) != departing_mode)
1089 /* Don't make BLKmode registers. If this looks like
1090 a BLKmode object being returned in a register, get
1091 the mode from that, otherwise abort. */
1092 if (departing_mode == BLKmode)
1094 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1096 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1097 arriving_mode = departing_mode;
1103 target = gen_reg_rtx (departing_mode);
1106 /* If function's value was promoted before return,
1107 avoid machine mode mismatch when we substitute INLINE_TARGET.
1108 But TARGET is what we will return to the caller. */
1109 if (arriving_mode != departing_mode)
1111 /* Avoid creating a paradoxical subreg wider than
1112 BITS_PER_WORD, since that is illegal. */
1113 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1115 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1116 GET_MODE_BITSIZE (arriving_mode)))
1117 /* Maybe could be handled by using convert_move () ? */
1119 reg_to_map = gen_reg_rtx (arriving_mode);
1120 target = gen_lowpart (departing_mode, reg_to_map);
1123 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1126 reg_to_map = target;
1128 /* Usually, the result value is the machine's return register.
1129 Sometimes it may be a pseudo. Handle both cases. */
1130 if (REG_FUNCTION_VALUE_P (loc))
1131 map->inline_target = reg_to_map;
1133 map->reg_map[REGNO (loc)] = reg_to_map;
1135 else if (GET_CODE (loc) == CONCAT)
1137 enum machine_mode departing_mode = TYPE_MODE (type);
1138 enum machine_mode arriving_mode
1139 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1141 if (departing_mode != arriving_mode)
1143 if (GET_CODE (XEXP (loc, 0)) != REG
1144 || GET_CODE (XEXP (loc, 1)) != REG)
1147 /* Don't use MEMs as direct targets because on some machines
1148 substituting a MEM for a REG makes invalid insns.
1149 Let the combiner substitute the MEM if that is valid. */
1150 if (target == 0 || GET_CODE (target) != REG
1151 || GET_MODE (target) != departing_mode)
1152 target = gen_reg_rtx (departing_mode);
1154 if (GET_CODE (target) != CONCAT)
1157 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1158 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1163 /* Remap the exception handler data pointer from one to the other. */
1164 temp = get_exception_pointer (inl_f);
1166 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1168 /* Initialize label_map. get_label_from_map will actually make
1170 memset ((char *) &map->label_map[min_labelno], 0,
1171 (max_labelno - min_labelno) * sizeof (rtx));
1173 /* Make copies of the decls of the symbols in the inline function, so that
1174 the copies of the variables get declared in the current function. Set
1175 up things so that lookup_static_chain knows that to interpret registers
1176 in SAVE_EXPRs for TYPE_SIZEs as local. */
1177 inline_function_decl = fndecl;
1178 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1179 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1180 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1181 inline_function_decl = 0;
1183 /* Make a fresh binding contour that we can easily remove. Do this after
1184 expanding our arguments so cleanups are properly scoped. */
1185 expand_start_bindings_and_block (0, block);
1187 /* Sort the block-map so that it will be easy to find remapped
1189 qsort (&VARRAY_TREE (map->block_map, 0),
1190 map->block_map->elements_used,
1194 /* Perform postincrements before actually calling the function. */
1197 /* Clean up stack so that variables might have smaller offsets. */
1198 do_pending_stack_adjust ();
1200 /* Save a copy of the location of const_equiv_varray for
1201 mark_stores, called via note_stores. */
1202 global_const_equiv_varray = map->const_equiv_varray;
1204 /* If the called function does an alloca, save and restore the
1205 stack pointer around the call. This saves stack space, but
1206 also is required if this inline is being done between two
1208 if (inl_f->calls_alloca)
1209 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1211 /* Map pseudos used for initial hard reg values. */
1212 setup_initial_hard_reg_value_integration (inl_f, map);
1214 /* Now copy the insns one by one. */
1215 copy_insn_list (insns, map, static_chain_value);
1217 /* Duplicate the EH regions. This will create an offset from the
1218 region numbers in the function we're inlining to the region
1219 numbers in the calling function. This must wait until after
1220 copy_insn_list, as we need the insn map to be complete. */
1221 eh_region_offset = duplicate_eh_regions (inl_f, map);
1223 /* Now copy the REG_NOTES for those insns. */
1224 copy_insn_notes (insns, map, eh_region_offset);
1226 /* If the insn sequence required one, emit the return label. */
1227 if (map->local_return_label)
1228 emit_label (map->local_return_label);
1230 /* Restore the stack pointer if we saved it above. */
1231 if (inl_f->calls_alloca)
1232 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1234 if (! cfun->x_whole_function_mode_p)
1235 /* In statement-at-a-time mode, we just tell the front-end to add
1236 this block to the list of blocks at this binding level. We
1237 can't do it the way it's done for function-at-a-time mode the
1238 superblocks have not been created yet. */
1239 insert_block (block);
1243 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1244 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1247 /* End the scope containing the copied formal parameter variables
1248 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1249 here so that expand_end_bindings will not check for unused
1250 variables. That's already been checked for when the inlined
1251 function was defined. */
1252 expand_end_bindings (NULL_TREE, 1, 1);
1254 /* Must mark the line number note after inlined functions as a repeat, so
1255 that the test coverage code can avoid counting the call twice. This
1256 just tells the code to ignore the immediately following line note, since
1257 there already exists a copy of this note before the expanded inline call.
1258 This line number note is still needed for debugging though, so we can't
1260 if (flag_test_coverage)
1261 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1263 emit_line_note (input_filename, lineno);
1265 /* If the function returns a BLKmode object in a register, copy it
1266 out of the temp register into a BLKmode memory object. */
1268 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1269 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1270 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1272 if (structure_value_addr)
1274 target = gen_rtx_MEM (TYPE_MODE (type),
1275 memory_address (TYPE_MODE (type),
1276 structure_value_addr));
1277 set_mem_attributes (target, type, 1);
1280 /* Make sure we free the things we explicitly allocated with xmalloc. */
1282 free (real_label_map);
1283 VARRAY_FREE (map->const_equiv_varray);
1284 free (map->reg_map);
1285 VARRAY_FREE (map->block_map);
1286 free (map->insn_map);
1291 inlining = inlining_previous;
1296 /* Make copies of each insn in the given list using the mapping
1297 computed in expand_inline_function. This function may call itself for
1298 insns containing sequences.
1300 Copying is done in two passes, first the insns and then their REG_NOTES.
1302 If static_chain_value is non-zero, it represents the context-pointer
1303 register for the function. */
1306 copy_insn_list (insns, map, static_chain_value)
1308 struct inline_remap *map;
1309 rtx static_chain_value;
1318 /* Copy the insns one by one. Do this in two passes, first the insns and
1319 then their REG_NOTES. */
1321 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1323 for (insn = insns; insn; insn = NEXT_INSN (insn))
1325 rtx copy, pattern, set;
1327 map->orig_asm_operands_vector = 0;
1329 switch (GET_CODE (insn))
1332 pattern = PATTERN (insn);
1333 set = single_set (insn);
1335 if (GET_CODE (pattern) == USE
1336 && GET_CODE (XEXP (pattern, 0)) == REG
1337 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1338 /* The (USE (REG n)) at return from the function should
1339 be ignored since we are changing (REG n) into
1343 /* Ignore setting a function value that we don't want to use. */
1344 if (map->inline_target == 0
1346 && GET_CODE (SET_DEST (set)) == REG
1347 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1349 if (volatile_refs_p (SET_SRC (set)))
1353 /* If we must not delete the source,
1354 load it into a new temporary. */
1355 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1357 new_set = single_set (copy);
1362 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1364 /* If the source and destination are the same and it
1365 has a note on it, keep the insn. */
1366 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1367 && REG_NOTES (insn) != 0)
1368 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1373 /* Similarly if an ignored return value is clobbered. */
1374 else if (map->inline_target == 0
1375 && GET_CODE (pattern) == CLOBBER
1376 && GET_CODE (XEXP (pattern, 0)) == REG
1377 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1380 /* If this is setting the static chain rtx, omit it. */
1381 else if (static_chain_value != 0
1383 && GET_CODE (SET_DEST (set)) == REG
1384 && rtx_equal_p (SET_DEST (set),
1385 static_chain_incoming_rtx))
1388 /* If this is setting the static chain pseudo, set it from
1389 the value we want to give it instead. */
1390 else if (static_chain_value != 0
1392 && rtx_equal_p (SET_SRC (set),
1393 static_chain_incoming_rtx))
1395 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1397 copy = emit_move_insn (newdest, static_chain_value);
1398 static_chain_value = 0;
1401 /* If this is setting the virtual stack vars register, this must
1402 be the code at the handler for a builtin longjmp. The value
1403 saved in the setjmp buffer will be the address of the frame
1404 we've made for this inlined instance within our frame. But we
1405 know the offset of that value so we can use it to reconstruct
1406 our virtual stack vars register from that value. If we are
1407 copying it from the stack pointer, leave it unchanged. */
1409 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1411 HOST_WIDE_INT offset;
1412 temp = map->reg_map[REGNO (SET_DEST (set))];
1413 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1416 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1418 else if (GET_CODE (temp) == PLUS
1419 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1420 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1421 offset = INTVAL (XEXP (temp, 1));
1425 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1426 temp = SET_SRC (set);
1428 temp = force_operand (plus_constant (SET_SRC (set),
1432 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1436 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1437 /* REG_NOTES will be copied later. */
1440 /* If this insn is setting CC0, it may need to look at
1441 the insn that uses CC0 to see what type of insn it is.
1442 In that case, the call to recog via validate_change will
1443 fail. So don't substitute constants here. Instead,
1444 do it when we emit the following insn.
1446 For example, see the pyr.md file. That machine has signed and
1447 unsigned compares. The compare patterns must check the
1448 following branch insn to see which what kind of compare to
1451 If the previous insn set CC0, substitute constants on it as
1453 if (sets_cc0_p (PATTERN (copy)) != 0)
1458 try_constants (cc0_insn, map);
1460 try_constants (copy, map);
1463 try_constants (copy, map);
1468 if (map->integrating && returnjump_p (insn))
1470 if (map->local_return_label == 0)
1471 map->local_return_label = gen_label_rtx ();
1472 pattern = gen_jump (map->local_return_label);
1475 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1477 copy = emit_jump_insn (pattern);
1481 try_constants (cc0_insn, map);
1484 try_constants (copy, map);
1486 /* If this used to be a conditional jump insn but whose branch
1487 direction is now know, we must do something special. */
1488 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1491 /* If the previous insn set cc0 for us, delete it. */
1492 if (only_sets_cc0_p (PREV_INSN (copy)))
1493 delete_related_insns (PREV_INSN (copy));
1496 /* If this is now a no-op, delete it. */
1497 if (map->last_pc_value == pc_rtx)
1499 delete_related_insns (copy);
1503 /* Otherwise, this is unconditional jump so we must put a
1504 BARRIER after it. We could do some dead code elimination
1505 here, but jump.c will do it just as well. */
1511 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1512 three attached sequences: normal call, sibling call and tail
1514 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1519 for (i = 0; i < 3; i++)
1523 sequence[i] = NULL_RTX;
1524 seq = XEXP (PATTERN (insn), i);
1528 copy_insn_list (seq, map, static_chain_value);
1529 sequence[i] = get_insns ();
1534 /* Find the new tail recursion label.
1535 It will already be substituted into sequence[2]. */
1536 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1539 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1547 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1548 copy = emit_call_insn (pattern);
1550 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1551 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1553 /* Because the USAGE information potentially contains objects other
1554 than hard registers, we need to copy it. */
1556 CALL_INSN_FUNCTION_USAGE (copy)
1557 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1562 try_constants (cc0_insn, map);
1565 try_constants (copy, map);
1567 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1568 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1569 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1573 copy = emit_label (get_label_from_map (map,
1574 CODE_LABEL_NUMBER (insn)));
1575 LABEL_NAME (copy) = LABEL_NAME (insn);
1580 copy = emit_barrier ();
1584 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1586 copy = emit_label (get_label_from_map (map,
1587 CODE_LABEL_NUMBER (insn)));
1588 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1593 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1594 discarded because it is important to have only one of
1595 each in the current function.
1597 NOTE_INSN_DELETED notes aren't useful. */
1599 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1600 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1601 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1603 copy = emit_note (NOTE_SOURCE_FILE (insn),
1604 NOTE_LINE_NUMBER (insn));
1606 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1607 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1608 && NOTE_BLOCK (insn))
1610 tree *mapped_block_p;
1613 = (tree *) bsearch (NOTE_BLOCK (insn),
1614 &VARRAY_TREE (map->block_map, 0),
1615 map->block_map->elements_used,
1619 if (!mapped_block_p)
1622 NOTE_BLOCK (copy) = *mapped_block_p;
1625 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1626 NOTE_EXPECTED_VALUE (copy)
1627 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1639 RTX_INTEGRATED_P (copy) = 1;
1641 map->insn_map[INSN_UID (insn)] = copy;
1645 /* Copy the REG_NOTES. Increment const_age, so that only constants
1646 from parameters can be substituted in. These are the only ones
1647 that are valid across the entire function. */
1650 copy_insn_notes (insns, map, eh_region_offset)
1652 struct inline_remap *map;
1653 int eh_region_offset;
1658 for (insn = insns; insn; insn = NEXT_INSN (insn))
1660 if (! INSN_P (insn))
1663 new_insn = map->insn_map[INSN_UID (insn)];
1667 if (REG_NOTES (insn))
1669 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1671 /* We must also do subst_constants, in case one of our parameters
1672 has const type and constant value. */
1673 subst_constants (¬e, NULL_RTX, map, 0);
1674 apply_change_group ();
1675 REG_NOTES (new_insn) = note;
1677 /* Delete any REG_LABEL notes from the chain. Remap any
1678 REG_EH_REGION notes. */
1679 for (; note; note = next)
1681 next = XEXP (note, 1);
1682 if (REG_NOTE_KIND (note) == REG_LABEL)
1683 remove_note (new_insn, note);
1684 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1685 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1686 + eh_region_offset);
1690 if (GET_CODE (insn) == CALL_INSN
1691 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1694 for (i = 0; i < 3; i++)
1695 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1698 if (GET_CODE (insn) == JUMP_INSN
1699 && GET_CODE (PATTERN (insn)) == RESX)
1700 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1704 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1705 push all of those decls and give each one the corresponding home. */
1708 integrate_parm_decls (args, map, arg_vector)
1710 struct inline_remap *map;
1716 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1718 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1719 current_function_decl);
1721 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1723 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1724 here, but that's going to require some more work. */
1725 /* DECL_INCOMING_RTL (decl) = ?; */
1726 /* Fully instantiate the address with the equivalent form so that the
1727 debugging information contains the actual register, instead of the
1728 virtual register. Do this by not passing an insn to
1730 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1731 apply_change_group ();
1732 SET_DECL_RTL (decl, new_decl_rtl);
1736 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1737 current function a tree of contexts isomorphic to the one that is given.
1739 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1740 registers used in the DECL_RTL field should be remapped. If it is zero,
1741 no mapping is necessary. */
1744 integrate_decl_tree (let, map)
1746 struct inline_remap *map;
1752 new_block = make_node (BLOCK);
1753 VARRAY_PUSH_TREE (map->block_map, new_block);
1754 next = &BLOCK_VARS (new_block);
1756 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1760 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1762 if (DECL_RTL_SET_P (t))
1766 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1768 /* Fully instantiate the address with the equivalent form so that the
1769 debugging information contains the actual register, instead of the
1770 virtual register. Do this by not passing an insn to
1773 subst_constants (&r, NULL_RTX, map, 1);
1774 SET_DECL_RTL (d, r);
1775 apply_change_group ();
1778 /* Add this declaration to the list of variables in the new
1781 next = &TREE_CHAIN (d);
1784 next = &BLOCK_SUBBLOCKS (new_block);
1785 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1787 *next = integrate_decl_tree (t, map);
1788 BLOCK_SUPERCONTEXT (*next) = new_block;
1789 next = &BLOCK_CHAIN (*next);
1792 TREE_USED (new_block) = TREE_USED (let);
1793 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1798 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1799 except for those few rtx codes that are sharable.
1801 We always return an rtx that is similar to that incoming rtx, with the
1802 exception of possibly changing a REG to a SUBREG or vice versa. No
1803 rtl is ever emitted.
1805 If FOR_LHS is nonzero, if means we are processing something that will
1806 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1807 inlining since we need to be conservative in how it is set for
1810 Handle constants that need to be placed in the constant pool by
1811 calling `force_const_mem'. */
1814 copy_rtx_and_substitute (orig, map, for_lhs)
1816 struct inline_remap *map;
1819 register rtx copy, temp;
1821 register RTX_CODE code;
1822 register enum machine_mode mode;
1823 register const char *format_ptr;
1829 code = GET_CODE (orig);
1830 mode = GET_MODE (orig);
1835 /* If the stack pointer register shows up, it must be part of
1836 stack-adjustments (*not* because we eliminated the frame pointer!).
1837 Small hard registers are returned as-is. Pseudo-registers
1838 go through their `reg_map'. */
1839 regno = REGNO (orig);
1840 if (regno <= LAST_VIRTUAL_REGISTER
1841 || (map->integrating
1842 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1844 /* Some hard registers are also mapped,
1845 but others are not translated. */
1846 if (map->reg_map[regno] != 0)
1847 return map->reg_map[regno];
1849 /* If this is the virtual frame pointer, make space in current
1850 function's stack frame for the stack frame of the inline function.
1852 Copy the address of this area into a pseudo. Map
1853 virtual_stack_vars_rtx to this pseudo and set up a constant
1854 equivalence for it to be the address. This will substitute the
1855 address into insns where it can be substituted and use the new
1856 pseudo where it can't. */
1857 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1860 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1861 #ifdef FRAME_GROWS_DOWNWARD
1863 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1866 /* In this case, virtual_stack_vars_rtx points to one byte
1867 higher than the top of the frame area. So make sure we
1868 allocate a big enough chunk to keep the frame pointer
1869 aligned like a real one. */
1871 size = CEIL_ROUND (size, alignment);
1874 loc = assign_stack_temp (BLKmode, size, 1);
1875 loc = XEXP (loc, 0);
1876 #ifdef FRAME_GROWS_DOWNWARD
1877 /* In this case, virtual_stack_vars_rtx points to one byte
1878 higher than the top of the frame area. So compute the offset
1879 to one byte higher than our substitute frame. */
1880 loc = plus_constant (loc, size);
1882 map->reg_map[regno] = temp
1883 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1885 #ifdef STACK_BOUNDARY
1886 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1889 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1891 seq = gen_sequence ();
1893 emit_insn_after (seq, map->insns_at_start);
1896 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1897 || (map->integrating
1898 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1901 /* Do the same for a block to contain any arguments referenced
1904 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1907 loc = assign_stack_temp (BLKmode, size, 1);
1908 loc = XEXP (loc, 0);
1909 /* When arguments grow downward, the virtual incoming
1910 args pointer points to the top of the argument block,
1911 so the remapped location better do the same. */
1912 #ifdef ARGS_GROW_DOWNWARD
1913 loc = plus_constant (loc, size);
1915 map->reg_map[regno] = temp
1916 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1918 #ifdef STACK_BOUNDARY
1919 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1922 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1924 seq = gen_sequence ();
1926 emit_insn_after (seq, map->insns_at_start);
1929 else if (REG_FUNCTION_VALUE_P (orig))
1931 /* This is a reference to the function return value. If
1932 the function doesn't have a return value, error. If the
1933 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1934 if (map->inline_target == 0)
1936 if (rtx_equal_function_value_matters)
1937 /* This is an ignored return value. We must not
1938 leave it in with REG_FUNCTION_VALUE_P set, since
1939 that would confuse subsequent inlining of the
1940 current function into a later function. */
1941 return gen_rtx_REG (GET_MODE (orig), regno);
1943 /* Must be unrolling loops or replicating code if we
1944 reach here, so return the register unchanged. */
1947 else if (GET_MODE (map->inline_target) != BLKmode
1948 && mode != GET_MODE (map->inline_target))
1949 return gen_lowpart (mode, map->inline_target);
1951 return map->inline_target;
1953 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1954 /* If leaf_renumber_regs_insn() might remap this register to
1955 some other number, make sure we don't share it with the
1956 inlined function, otherwise delayed optimization of the
1957 inlined function may change it in place, breaking our
1958 reference to it. We may still shared it within the
1959 function, so create an entry for this register in the
1961 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1962 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1964 if (!map->leaf_reg_map[regno][mode])
1965 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
1966 return map->leaf_reg_map[regno][mode];
1974 if (map->reg_map[regno] == NULL)
1976 map->reg_map[regno] = gen_reg_rtx (mode);
1977 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1978 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1979 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1980 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1982 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1983 mark_reg_pointer (map->reg_map[regno],
1984 map->regno_pointer_align[regno]);
1986 return map->reg_map[regno];
1989 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1990 return simplify_gen_subreg (GET_MODE (orig), copy,
1991 GET_MODE (SUBREG_REG (orig)),
1992 SUBREG_BYTE (orig));
1995 copy = gen_rtx_ADDRESSOF (mode,
1996 copy_rtx_and_substitute (XEXP (orig, 0),
1998 0, ADDRESSOF_DECL (orig));
1999 regno = ADDRESSOF_REGNO (orig);
2000 if (map->reg_map[regno])
2001 regno = REGNO (map->reg_map[regno]);
2002 else if (regno > LAST_VIRTUAL_REGISTER)
2004 temp = XEXP (orig, 0);
2005 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2006 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2007 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2008 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2009 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2011 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2012 mark_reg_pointer (map->reg_map[regno],
2013 map->regno_pointer_align[regno]);
2014 regno = REGNO (map->reg_map[regno]);
2016 ADDRESSOF_REGNO (copy) = regno;
2021 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2022 to (use foo) if the original insn didn't have a subreg.
2023 Removing the subreg distorts the VAX movstrhi pattern
2024 by changing the mode of an operand. */
2025 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2026 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2027 copy = SUBREG_REG (copy);
2028 return gen_rtx_fmt_e (code, VOIDmode, copy);
2030 /* We need to handle "deleted" labels that appear in the DECL_RTL
2033 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2036 /* ... FALLTHRU ... */
2038 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2039 = LABEL_PRESERVE_P (orig);
2040 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2046 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2047 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2049 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2051 /* The fact that this label was previously nonlocal does not mean
2052 it still is, so we must check if it is within the range of
2053 this function's labels. */
2054 LABEL_REF_NONLOCAL_P (copy)
2055 = (LABEL_REF_NONLOCAL_P (orig)
2056 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2057 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2059 /* If we have made a nonlocal label local, it means that this
2060 inlined call will be referring to our nonlocal goto handler.
2061 So make sure we create one for this block; we normally would
2062 not since this is not otherwise considered a "call". */
2063 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2064 function_call_count++;
2074 /* Symbols which represent the address of a label stored in the constant
2075 pool must be modified to point to a constant pool entry for the
2076 remapped label. Otherwise, symbols are returned unchanged. */
2077 if (CONSTANT_POOL_ADDRESS_P (orig))
2079 struct function *f = inlining ? inlining : cfun;
2080 rtx constant = get_pool_constant_for_function (f, orig);
2081 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2084 rtx temp = force_const_mem (const_mode,
2085 copy_rtx_and_substitute (constant,
2089 /* Legitimizing the address here is incorrect.
2091 Since we had a SYMBOL_REF before, we can assume it is valid
2092 to have one in this position in the insn.
2094 Also, change_address may create new registers. These
2095 registers will not have valid reg_map entries. This can
2096 cause try_constants() to fail because assumes that all
2097 registers in the rtx have valid reg_map entries, and it may
2098 end up replacing one of these new registers with junk. */
2100 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2101 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2104 temp = XEXP (temp, 0);
2106 #ifdef POINTERS_EXTEND_UNSIGNED
2107 if (GET_MODE (temp) != GET_MODE (orig))
2108 temp = convert_memory_address (GET_MODE (orig), temp);
2112 else if (GET_CODE (constant) == LABEL_REF)
2113 return XEXP (force_const_mem
2115 copy_rtx_and_substitute (constant, map, for_lhs)),
2122 /* We have to make a new copy of this CONST_DOUBLE because don't want
2123 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2124 duplicate of a CONST_DOUBLE we have already seen. */
2125 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2129 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2130 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2133 return immed_double_const (CONST_DOUBLE_LOW (orig),
2134 CONST_DOUBLE_HIGH (orig), VOIDmode);
2137 /* Make new constant pool entry for a constant
2138 that was in the pool of the inline function. */
2139 if (RTX_INTEGRATED_P (orig))
2144 /* If a single asm insn contains multiple output operands then
2145 it contains multiple ASM_OPERANDS rtx's that share the input
2146 and constraint vecs. We must make sure that the copied insn
2147 continues to share it. */
2148 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2150 copy = rtx_alloc (ASM_OPERANDS);
2151 copy->volatil = orig->volatil;
2152 PUT_MODE (copy, GET_MODE (orig));
2153 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2154 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2155 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2156 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2157 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2158 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2159 = map->copy_asm_constraints_vector;
2160 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2161 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2167 /* This is given special treatment because the first
2168 operand of a CALL is a (MEM ...) which may get
2169 forced into a register for cse. This is undesirable
2170 if function-address cse isn't wanted or if we won't do cse. */
2171 #ifndef NO_FUNCTION_CSE
2172 if (! (optimize && ! flag_no_function_cse))
2177 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2178 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2180 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2184 /* Must be ifdefed out for loop unrolling to work. */
2190 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2191 Adjust the setting by the offset of the area we made.
2192 If the nonlocal goto is into the current function,
2193 this will result in unnecessarily bad code, but should work. */
2194 if (SET_DEST (orig) == virtual_stack_vars_rtx
2195 || SET_DEST (orig) == virtual_incoming_args_rtx)
2197 /* In case a translation hasn't occurred already, make one now. */
2200 HOST_WIDE_INT loc_offset;
2202 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2203 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2204 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2205 REGNO (equiv_reg)).rtx;
2207 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2209 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2212 (copy_rtx_and_substitute (SET_SRC (orig),
2218 return gen_rtx_SET (VOIDmode,
2219 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2220 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2225 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2226 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2228 enum machine_mode const_mode
2229 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2231 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2233 constant = copy_rtx_and_substitute (constant, map, 0);
2235 /* If this was an address of a constant pool entry that itself
2236 had to be placed in the constant pool, it might not be a
2237 valid address. So the recursive call might have turned it
2238 into a register. In that case, it isn't a constant any
2239 more, so return it. This has the potential of changing a
2240 MEM into a REG, but we'll assume that it safe. */
2241 if (! CONSTANT_P (constant))
2244 return validize_mem (force_const_mem (const_mode, constant));
2247 copy = rtx_alloc (MEM);
2248 PUT_MODE (copy, mode);
2249 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2250 MEM_COPY_ATTRIBUTES (copy, orig);
2257 copy = rtx_alloc (code);
2258 PUT_MODE (copy, mode);
2259 copy->in_struct = orig->in_struct;
2260 copy->volatil = orig->volatil;
2261 copy->unchanging = orig->unchanging;
2263 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2265 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2267 switch (*format_ptr++)
2270 /* Copy this through the wide int field; that's safest. */
2271 X0WINT (copy, i) = X0WINT (orig, i);
2276 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2280 /* Change any references to old-insns to point to the
2281 corresponding copied insns. */
2282 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2286 XVEC (copy, i) = XVEC (orig, i);
2287 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2289 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2290 for (j = 0; j < XVECLEN (copy, i); j++)
2291 XVECEXP (copy, i, j)
2292 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2298 XWINT (copy, i) = XWINT (orig, i);
2302 XINT (copy, i) = XINT (orig, i);
2306 XSTR (copy, i) = XSTR (orig, i);
2310 XTREE (copy, i) = XTREE (orig, i);
2318 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2320 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2321 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2322 map->copy_asm_constraints_vector
2323 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2329 /* Substitute known constant values into INSN, if that is valid. */
2332 try_constants (insn, map)
2334 struct inline_remap *map;
2340 /* First try just updating addresses, then other things. This is
2341 important when we have something like the store of a constant
2342 into memory and we can update the memory address but the machine
2343 does not support a constant source. */
2344 subst_constants (&PATTERN (insn), insn, map, 1);
2345 apply_change_group ();
2346 subst_constants (&PATTERN (insn), insn, map, 0);
2347 apply_change_group ();
2349 /* Show we don't know the value of anything stored or clobbered. */
2350 note_stores (PATTERN (insn), mark_stores, NULL);
2351 map->last_pc_value = 0;
2353 map->last_cc0_value = 0;
2356 /* Set up any constant equivalences made in this insn. */
2357 for (i = 0; i < map->num_sets; i++)
2359 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2361 int regno = REGNO (map->equiv_sets[i].dest);
2363 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2364 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2365 /* Following clause is a hack to make case work where GNU C++
2366 reassigns a variable to make cse work right. */
2367 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2369 map->equiv_sets[i].equiv))
2370 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2371 map->equiv_sets[i].equiv, map->const_age);
2373 else if (map->equiv_sets[i].dest == pc_rtx)
2374 map->last_pc_value = map->equiv_sets[i].equiv;
2376 else if (map->equiv_sets[i].dest == cc0_rtx)
2377 map->last_cc0_value = map->equiv_sets[i].equiv;
2382 /* Substitute known constants for pseudo regs in the contents of LOC,
2383 which are part of INSN.
2384 If INSN is zero, the substitution should always be done (this is used to
2386 These changes are taken out by try_constants if the result is not valid.
2388 Note that we are more concerned with determining when the result of a SET
2389 is a constant, for further propagation, than actually inserting constants
2390 into insns; cse will do the latter task better.
2392 This function is also used to adjust address of items previously addressed
2393 via the virtual stack variable or virtual incoming arguments registers.
2395 If MEMONLY is nonzero, only make changes inside a MEM. */
2398 subst_constants (loc, insn, map, memonly)
2401 struct inline_remap *map;
2406 register enum rtx_code code;
2407 register const char *format_ptr;
2408 int num_changes = num_validated_changes ();
2410 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2412 code = GET_CODE (x);
2428 validate_change (insn, loc, map->last_cc0_value, 1);
2434 /* The only thing we can do with a USE or CLOBBER is possibly do
2435 some substitutions in a MEM within it. */
2436 if (GET_CODE (XEXP (x, 0)) == MEM)
2437 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2441 /* Substitute for parms and known constants. Don't replace
2442 hard regs used as user variables with constants. */
2445 int regno = REGNO (x);
2446 struct const_equiv_data *p;
2448 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2449 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2450 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2452 && p->age >= map->const_age)
2453 validate_change (insn, loc, p->rtx, 1);
2458 /* SUBREG applied to something other than a reg
2459 should be treated as ordinary, since that must
2460 be a special hack and we don't know how to treat it specially.
2461 Consider for example mulsidi3 in m68k.md.
2462 Ordinary SUBREG of a REG needs this special treatment. */
2463 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2465 rtx inner = SUBREG_REG (x);
2468 /* We can't call subst_constants on &SUBREG_REG (x) because any
2469 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2470 see what is inside, try to form the new SUBREG and see if that is
2471 valid. We handle two cases: extracting a full word in an
2472 integral mode and extracting the low part. */
2473 subst_constants (&inner, NULL_RTX, map, 0);
2474 new = simplify_gen_subreg (GET_MODE (x), inner,
2475 GET_MODE (SUBREG_REG (x)),
2479 validate_change (insn, loc, new, 1);
2481 cancel_changes (num_changes);
2488 subst_constants (&XEXP (x, 0), insn, map, 0);
2490 /* If a memory address got spoiled, change it back. */
2491 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2492 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2493 cancel_changes (num_changes);
2498 /* Substitute constants in our source, and in any arguments to a
2499 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2501 rtx *dest_loc = &SET_DEST (x);
2502 rtx dest = *dest_loc;
2504 enum machine_mode compare_mode = VOIDmode;
2506 /* If SET_SRC is a COMPARE which subst_constants would turn into
2507 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2509 if (GET_CODE (SET_SRC (x)) == COMPARE)
2512 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2518 compare_mode = GET_MODE (XEXP (src, 0));
2519 if (compare_mode == VOIDmode)
2520 compare_mode = GET_MODE (XEXP (src, 1));
2524 subst_constants (&SET_SRC (x), insn, map, memonly);
2527 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2528 || GET_CODE (*dest_loc) == SUBREG
2529 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2531 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2533 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2534 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2536 dest_loc = &XEXP (*dest_loc, 0);
2539 /* Do substitute in the address of a destination in memory. */
2540 if (GET_CODE (*dest_loc) == MEM)
2541 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2543 /* Check for the case of DEST a SUBREG, both it and the underlying
2544 register are less than one word, and the SUBREG has the wider mode.
2545 In the case, we are really setting the underlying register to the
2546 source converted to the mode of DEST. So indicate that. */
2547 if (GET_CODE (dest) == SUBREG
2548 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2549 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2550 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2551 <= GET_MODE_SIZE (GET_MODE (dest)))
2552 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2554 src = tem, dest = SUBREG_REG (dest);
2556 /* If storing a recognizable value save it for later recording. */
2557 if ((map->num_sets < MAX_RECOG_OPERANDS)
2558 && (CONSTANT_P (src)
2559 || (GET_CODE (src) == REG
2560 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2561 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2562 || (GET_CODE (src) == PLUS
2563 && GET_CODE (XEXP (src, 0)) == REG
2564 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2565 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2566 && CONSTANT_P (XEXP (src, 1)))
2567 || GET_CODE (src) == COMPARE
2572 && (src == pc_rtx || GET_CODE (src) == RETURN
2573 || GET_CODE (src) == LABEL_REF))))
2575 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2576 it will cause us to save the COMPARE with any constants
2577 substituted, which is what we want for later. */
2578 rtx src_copy = copy_rtx (src);
2579 map->equiv_sets[map->num_sets].equiv = src_copy;
2580 map->equiv_sets[map->num_sets++].dest = dest;
2581 if (compare_mode != VOIDmode
2582 && GET_CODE (src) == COMPARE
2583 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2588 && GET_MODE (XEXP (src, 0)) == VOIDmode
2589 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2591 map->compare_src = src_copy;
2592 map->compare_mode = compare_mode;
2602 format_ptr = GET_RTX_FORMAT (code);
2604 /* If the first operand is an expression, save its mode for later. */
2605 if (*format_ptr == 'e')
2606 op0_mode = GET_MODE (XEXP (x, 0));
2608 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2610 switch (*format_ptr++)
2617 subst_constants (&XEXP (x, i), insn, map, memonly);
2629 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2630 for (j = 0; j < XVECLEN (x, i); j++)
2631 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2640 /* If this is a commutative operation, move a constant to the second
2641 operand unless the second operand is already a CONST_INT. */
2643 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2644 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2646 rtx tem = XEXP (x, 0);
2647 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2648 validate_change (insn, &XEXP (x, 1), tem, 1);
2651 /* Simplify the expression in case we put in some constants. */
2653 switch (GET_RTX_CLASS (code))
2656 if (op0_mode == MAX_MACHINE_MODE)
2658 new = simplify_unary_operation (code, GET_MODE (x),
2659 XEXP (x, 0), op0_mode);
2664 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2666 if (op_mode == VOIDmode)
2667 op_mode = GET_MODE (XEXP (x, 1));
2668 new = simplify_relational_operation (code, op_mode,
2669 XEXP (x, 0), XEXP (x, 1));
2670 #ifdef FLOAT_STORE_FLAG_VALUE
2671 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2673 enum machine_mode mode = GET_MODE (x);
2674 if (new == const0_rtx)
2675 new = CONST0_RTX (mode);
2678 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2679 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2688 new = simplify_binary_operation (code, GET_MODE (x),
2689 XEXP (x, 0), XEXP (x, 1));
2694 if (op0_mode == MAX_MACHINE_MODE)
2697 if (code == IF_THEN_ELSE)
2699 rtx op0 = XEXP (x, 0);
2701 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2702 && GET_MODE (op0) == VOIDmode
2703 && ! side_effects_p (op0)
2704 && XEXP (op0, 0) == map->compare_src
2705 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2707 /* We have compare of two VOIDmode constants for which
2708 we recorded the comparison mode. */
2710 simplify_relational_operation (GET_CODE (op0),
2715 if (temp == const0_rtx)
2717 else if (temp == const1_rtx)
2722 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2723 XEXP (x, 0), XEXP (x, 1),
2729 validate_change (insn, loc, new, 1);
2732 /* Show that register modified no longer contain known constants. We are
2733 called from note_stores with parts of the new insn. */
2736 mark_stores (dest, x, data)
2738 rtx x ATTRIBUTE_UNUSED;
2739 void *data ATTRIBUTE_UNUSED;
2742 enum machine_mode mode = VOIDmode;
2744 /* DEST is always the innermost thing set, except in the case of
2745 SUBREGs of hard registers. */
2747 if (GET_CODE (dest) == REG)
2748 regno = REGNO (dest), mode = GET_MODE (dest);
2749 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2751 regno = REGNO (SUBREG_REG (dest));
2752 if (regno < FIRST_PSEUDO_REGISTER)
2753 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2754 GET_MODE (SUBREG_REG (dest)),
2757 mode = GET_MODE (SUBREG_REG (dest));
2762 unsigned int uregno = regno;
2763 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2764 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2767 /* Ignore virtual stack var or virtual arg register since those
2768 are handled separately. */
2769 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2770 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2771 for (i = uregno; i <= last_reg; i++)
2772 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2773 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2777 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2778 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2779 that it points to the node itself, thus indicating that the node is its
2780 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2781 the given node is NULL, recursively descend the decl/block tree which
2782 it is the root of, and for each other ..._DECL or BLOCK node contained
2783 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2784 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2785 values to point to themselves. */
2788 set_block_origin_self (stmt)
2791 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2793 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2796 register tree local_decl;
2798 for (local_decl = BLOCK_VARS (stmt);
2799 local_decl != NULL_TREE;
2800 local_decl = TREE_CHAIN (local_decl))
2801 set_decl_origin_self (local_decl); /* Potential recursion. */
2805 register tree subblock;
2807 for (subblock = BLOCK_SUBBLOCKS (stmt);
2808 subblock != NULL_TREE;
2809 subblock = BLOCK_CHAIN (subblock))
2810 set_block_origin_self (subblock); /* Recurse. */
2815 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2816 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2817 node to so that it points to the node itself, thus indicating that the
2818 node represents its own (abstract) origin. Additionally, if the
2819 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2820 the decl/block tree of which the given node is the root of, and for
2821 each other ..._DECL or BLOCK node contained therein whose
2822 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2823 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2824 point to themselves. */
2827 set_decl_origin_self (decl)
2830 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2832 DECL_ABSTRACT_ORIGIN (decl) = decl;
2833 if (TREE_CODE (decl) == FUNCTION_DECL)
2837 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2838 DECL_ABSTRACT_ORIGIN (arg) = arg;
2839 if (DECL_INITIAL (decl) != NULL_TREE
2840 && DECL_INITIAL (decl) != error_mark_node)
2841 set_block_origin_self (DECL_INITIAL (decl));
2846 /* Given a pointer to some BLOCK node, and a boolean value to set the
2847 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2848 the given block, and for all local decls and all local sub-blocks
2849 (recursively) which are contained therein. */
2852 set_block_abstract_flags (stmt, setting)
2854 register int setting;
2856 register tree local_decl;
2857 register tree subblock;
2859 BLOCK_ABSTRACT (stmt) = setting;
2861 for (local_decl = BLOCK_VARS (stmt);
2862 local_decl != NULL_TREE;
2863 local_decl = TREE_CHAIN (local_decl))
2864 set_decl_abstract_flags (local_decl, setting);
2866 for (subblock = BLOCK_SUBBLOCKS (stmt);
2867 subblock != NULL_TREE;
2868 subblock = BLOCK_CHAIN (subblock))
2869 set_block_abstract_flags (subblock, setting);
2872 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2873 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2874 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2875 set the abstract flags for all of the parameters, local vars, local
2876 blocks and sub-blocks (recursively) to the same setting. */
2879 set_decl_abstract_flags (decl, setting)
2881 register int setting;
2883 DECL_ABSTRACT (decl) = setting;
2884 if (TREE_CODE (decl) == FUNCTION_DECL)
2888 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2889 DECL_ABSTRACT (arg) = setting;
2890 if (DECL_INITIAL (decl) != NULL_TREE
2891 && DECL_INITIAL (decl) != error_mark_node)
2892 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2896 /* Output the assembly language code for the function FNDECL
2897 from its DECL_SAVED_INSNS. Used for inline functions that are output
2898 at end of compilation instead of where they came in the source. */
2901 output_inline_function (fndecl)
2904 struct function *old_cfun = cfun;
2905 enum debug_info_type old_write_symbols = write_symbols;
2906 struct gcc_debug_hooks *old_debug_hooks = debug_hooks;
2907 struct function *f = DECL_SAVED_INSNS (fndecl);
2910 current_function_decl = fndecl;
2911 clear_emit_caches ();
2913 set_new_last_label_num (f->inl_max_label_num);
2915 /* We're not deferring this any longer. */
2916 DECL_DEFER_OUTPUT (fndecl) = 0;
2918 /* If requested, suppress debugging information. */
2919 if (f->no_debugging_symbols)
2921 write_symbols = NO_DEBUG;
2922 debug_hooks = &do_nothing_debug_hooks;
2925 /* Do any preparation, such as emitting abstract debug info for the inline
2926 before it gets mangled by optimization. */
2927 (*debug_hooks->outlining_inline_function) (fndecl);
2929 /* Compile this function all the way down to assembly code. As a
2930 side effect this destroys the saved RTL representation, but
2931 that's okay, because we don't need to inline this anymore. */
2932 rest_of_compilation (fndecl);
2933 DECL_INLINE (fndecl) = 0;
2936 current_function_decl = old_cfun ? old_cfun->decl : 0;
2937 write_symbols = old_write_symbols;
2938 debug_hooks = old_debug_hooks;
2942 /* Functions to keep track of the values hard regs had at the start of
2946 has_func_hard_reg_initial_val (fun, reg)
2947 struct function *fun;
2950 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
2956 for (i = 0; i < ivs->num_entries; i++)
2957 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
2958 return ivs->entries[i].pseudo;
2964 get_func_hard_reg_initial_val (fun, reg)
2965 struct function *fun;
2968 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
2969 rtx rv = has_func_hard_reg_initial_val (fun, reg);
2976 fun->hard_reg_initial_vals = (void *) xmalloc (sizeof (initial_value_struct));
2977 ivs = fun->hard_reg_initial_vals;
2978 ivs->num_entries = 0;
2979 ivs->max_entries = 5;
2980 ivs->entries = (initial_value_pair *) xmalloc (5 * sizeof (initial_value_pair));
2983 if (ivs->num_entries >= ivs->max_entries)
2985 ivs->max_entries += 5;
2987 (initial_value_pair *) xrealloc (ivs->entries,
2989 * sizeof (initial_value_pair));
2992 ivs->entries[ivs->num_entries].hard_reg = reg;
2993 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
2995 return ivs->entries[ivs->num_entries++].pseudo;
2999 get_hard_reg_initial_val (mode, regno)
3000 enum machine_mode mode;
3003 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3007 has_hard_reg_initial_val (mode, regno)
3008 enum machine_mode mode;
3011 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3015 mark_hard_reg_initial_vals (fun)
3016 struct function *fun;
3018 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3024 for (i = 0; i < ivs->num_entries; i ++)
3026 ggc_mark_rtx (ivs->entries[i].hard_reg);
3027 ggc_mark_rtx (ivs->entries[i].pseudo);
3032 setup_initial_hard_reg_value_integration (inl_f, remap)
3033 struct function *inl_f;
3034 struct inline_remap *remap;
3036 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3042 for (i = 0; i < ivs->num_entries; i ++)
3043 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3044 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3049 emit_initial_value_sets ()
3051 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3059 for (i = 0; i < ivs->num_entries; i++)
3060 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3064 emit_insns_after (seq, get_insns ());
3067 /* If the backend knows where to allocate pseudos for hard
3068 register initial values, register these allocations now. */
3070 allocate_initial_values (reg_equiv_memory_loc)
3071 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3073 #ifdef ALLOCATE_INITIAL_VALUE
3074 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3080 for (i = 0; i < ivs->num_entries; i++)
3082 int regno = REGNO (ivs->entries[i].pseudo);
3083 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3085 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3087 else if (GET_CODE (x) == MEM)
3088 reg_equiv_memory_loc[regno] = x;
3089 else if (GET_CODE (x) == REG)
3091 reg_renumber[regno] = REGNO (x);
3092 /* Poke the regno right into regno_reg_rtx
3093 so that even fixed regs are accepted. */
3094 REGNO (ivs->entries[i].pseudo) = REGNO (x);