1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
32 #include "insn-config.h"
36 #include "integrate.h"
47 #define obstack_chunk_alloc xmalloc
48 #define obstack_chunk_free free
50 extern struct obstack *function_maybepermanent_obstack;
52 /* Similar, but round to the next highest integer that meets the
54 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
56 /* Default max number of insns a function can have and still be inline.
57 This is overridden on RISC machines. */
58 #ifndef INTEGRATE_THRESHOLD
59 /* Inlining small functions might save more space then not inlining at
60 all. Assume 1 instruction for the call and 1.5 insns per argument. */
61 #define INTEGRATE_THRESHOLD(DECL) \
63 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
64 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
67 /* Decide whether a function with a target specific attribute
68 attached can be inlined. By default we disallow this. */
69 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
70 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
74 /* Private type used by {get/has}_func_hard_reg_initial_val. */
75 typedef struct initial_value_pair {
79 typedef struct initial_value_struct {
82 initial_value_pair *entries;
83 } initial_value_struct;
85 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
87 static rtvec initialize_for_inline PARAMS ((tree));
88 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
89 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
91 static tree integrate_decl_tree PARAMS ((tree,
92 struct inline_remap *));
93 static void subst_constants PARAMS ((rtx *, rtx,
94 struct inline_remap *, int));
95 static void set_block_origin_self PARAMS ((tree));
96 static void set_block_abstract_flags PARAMS ((tree, int));
97 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
99 void set_decl_abstract_flags PARAMS ((tree, int));
100 static void mark_stores PARAMS ((rtx, rtx, void *));
101 static void save_parm_insns PARAMS ((rtx, rtx));
102 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
104 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
106 static int compare_blocks PARAMS ((const PTR, const PTR));
107 static int find_block PARAMS ((const PTR, const PTR));
109 /* Used by copy_rtx_and_substitute; this indicates whether the function is
110 called for the purpose of inlining or some other purpose (i.e. loop
111 unrolling). This affects how constant pool references are handled.
112 This variable contains the FUNCTION_DECL for the inlined function. */
113 static struct function *inlining = 0;
115 /* Returns the Ith entry in the label_map contained in MAP. If the
116 Ith entry has not yet been set, return a fresh label. This function
117 performs a lazy initialization of label_map, thereby avoiding huge memory
118 explosions when the label_map gets very large. */
121 get_label_from_map (map, i)
122 struct inline_remap *map;
125 rtx x = map->label_map[i];
128 x = map->label_map[i] = gen_label_rtx ();
133 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
134 is safe and reasonable to integrate into other functions.
135 Nonzero means value is a warning msgid with a single %s
136 for the function's name. */
139 function_cannot_inline_p (fndecl)
140 register tree fndecl;
143 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
145 /* For functions marked as inline increase the maximum size to
146 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
147 use the limit given by INTEGRATE_THRESHOLD. */
149 int max_insns = (DECL_INLINE (fndecl))
151 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
152 : INTEGRATE_THRESHOLD (fndecl);
154 register int ninsns = 0;
157 if (DECL_UNINLINABLE (fndecl))
158 return N_("function cannot be inline");
160 /* No inlines with varargs. */
161 if ((last && TREE_VALUE (last) != void_type_node)
162 || current_function_varargs)
163 return N_("varargs function cannot be inline");
165 if (current_function_calls_alloca)
166 return N_("function using alloca cannot be inline");
168 if (current_function_calls_setjmp)
169 return N_("function using setjmp cannot be inline");
171 if (current_function_calls_eh_return)
172 return N_("function uses __builtin_eh_return");
174 if (current_function_contains_functions)
175 return N_("function with nested functions cannot be inline");
179 N_("function with label addresses used in initializers cannot inline");
181 if (current_function_cannot_inline)
182 return current_function_cannot_inline;
184 /* If its not even close, don't even look. */
185 if (get_max_uid () > 3 * max_insns)
186 return N_("function too large to be inline");
189 /* Don't inline functions which do not specify a function prototype and
190 have BLKmode argument or take the address of a parameter. */
191 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
193 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
194 TREE_ADDRESSABLE (parms) = 1;
195 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
196 return N_("no prototype, and parameter address used; cannot be inline");
200 /* We can't inline functions that return structures
201 the old-fashioned PCC way, copying into a static block. */
202 if (current_function_returns_pcc_struct)
203 return N_("inline functions not supported for this return value type");
205 /* We can't inline functions that return structures of varying size. */
206 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
207 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
208 return N_("function with varying-size return value cannot be inline");
210 /* Cannot inline a function with a varying size argument or one that
211 receives a transparent union. */
212 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
214 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
215 return N_("function with varying-size parameter cannot be inline");
216 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
217 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
218 return N_("function with transparent unit parameter cannot be inline");
221 if (get_max_uid () > max_insns)
223 for (ninsns = 0, insn = get_first_nonparm_insn ();
224 insn && ninsns < max_insns;
225 insn = NEXT_INSN (insn))
229 if (ninsns >= max_insns)
230 return N_("function too large to be inline");
233 /* We will not inline a function which uses computed goto. The addresses of
234 its local labels, which may be tucked into global storage, are of course
235 not constant across instantiations, which causes unexpected behaviour. */
236 if (current_function_has_computed_jump)
237 return N_("function with computed jump cannot inline");
239 /* We cannot inline a nested function that jumps to a nonlocal label. */
240 if (current_function_has_nonlocal_goto)
241 return N_("function with nonlocal goto cannot be inline");
243 /* We can't inline functions that return a PARALLEL rtx. */
244 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
246 rtx result = DECL_RTL (DECL_RESULT (fndecl));
247 if (GET_CODE (result) == PARALLEL)
248 return N_("inline functions not supported for this return value type");
251 /* If the function has a target specific attribute attached to it,
252 then we assume that we should not inline it. This can be overriden
253 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
254 if (DECL_MACHINE_ATTRIBUTES (fndecl)
255 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
256 return N_("function with target specific attribute(s) cannot be inlined");
261 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
262 Zero for a reg that isn't a parm's home.
263 Only reg numbers less than max_parm_reg are mapped here. */
264 static tree *parmdecl_map;
266 /* In save_for_inline, nonzero if past the parm-initialization insns. */
267 static int in_nonparm_insns;
269 /* Subroutine for `save_for_inline'. Performs initialization
270 needed to save FNDECL's insns and info for future inline expansion. */
273 initialize_for_inline (fndecl)
280 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
281 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
282 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
284 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
286 parms = TREE_CHAIN (parms), i++)
288 rtx p = DECL_RTL (parms);
290 /* If we have (mem (addressof (mem ...))), use the inner MEM since
291 otherwise the copy_rtx call below will not unshare the MEM since
292 it shares ADDRESSOF. */
293 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
294 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
295 p = XEXP (XEXP (p, 0), 0);
297 RTVEC_ELT (arg_vector, i) = p;
299 if (GET_CODE (p) == REG)
300 parmdecl_map[REGNO (p)] = parms;
301 else if (GET_CODE (p) == CONCAT)
303 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
304 rtx pimag = gen_imagpart (GET_MODE (preal), p);
306 if (GET_CODE (preal) == REG)
307 parmdecl_map[REGNO (preal)] = parms;
308 if (GET_CODE (pimag) == REG)
309 parmdecl_map[REGNO (pimag)] = parms;
312 /* This flag is cleared later
313 if the function ever modifies the value of the parm. */
314 TREE_READONLY (parms) = 1;
320 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
321 originally was in the FROM_FN, but now it will be in the
325 copy_decl_for_inlining (decl, from_fn, to_fn)
332 /* Copy the declaration. */
333 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
335 /* For a parameter, we must make an equivalent VAR_DECL, not a
337 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
338 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
339 TREE_READONLY (copy) = TREE_READONLY (decl);
340 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
344 copy = copy_node (decl);
345 if (DECL_LANG_SPECIFIC (copy))
346 copy_lang_decl (copy);
348 /* TREE_ADDRESSABLE isn't used to indicate that a label's
349 address has been taken; it's for internal bookkeeping in
350 expand_goto_internal. */
351 if (TREE_CODE (copy) == LABEL_DECL)
352 TREE_ADDRESSABLE (copy) = 0;
355 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
356 declaration inspired this copy. */
357 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
359 /* The new variable/label has no RTL, yet. */
360 SET_DECL_RTL (copy, NULL_RTX);
362 /* These args would always appear unused, if not for this. */
363 TREE_USED (copy) = 1;
365 /* Set the context for the new declaration. */
366 if (!DECL_CONTEXT (decl))
367 /* Globals stay global. */
369 else if (DECL_CONTEXT (decl) != from_fn)
370 /* Things that weren't in the scope of the function we're inlining
371 from aren't in the scope we're inlining too, either. */
373 else if (TREE_STATIC (decl))
374 /* Function-scoped static variables should say in the original
378 /* Ordinary automatic local variables are now in the scope of the
380 DECL_CONTEXT (copy) = to_fn;
385 /* Make the insns and PARM_DECLs of the current function permanent
386 and record other information in DECL_SAVED_INSNS to allow inlining
387 of this function in subsequent calls.
389 This routine need not copy any insns because we are not going
390 to immediately compile the insns in the insn chain. There
391 are two cases when we would compile the insns for FNDECL:
392 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
393 be output at the end of other compilation, because somebody took
394 its address. In the first case, the insns of FNDECL are copied
395 as it is expanded inline, so FNDECL's saved insns are not
396 modified. In the second case, FNDECL is used for the last time,
397 so modifying the rtl is not a problem.
399 We don't have to worry about FNDECL being inline expanded by
400 other functions which are written at the end of compilation
401 because flag_no_inline is turned on when we begin writing
402 functions at the end of compilation. */
405 save_for_inline (fndecl)
410 rtx first_nonparm_insn;
412 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
413 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
414 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
415 for the parms, prior to elimination of virtual registers.
416 These values are needed for substituting parms properly. */
418 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
420 /* Make and emit a return-label if we have not already done so. */
422 if (return_label == 0)
424 return_label = gen_label_rtx ();
425 emit_label (return_label);
428 argvec = initialize_for_inline (fndecl);
430 /* If there are insns that copy parms from the stack into pseudo registers,
431 those insns are not copied. `expand_inline_function' must
432 emit the correct code to handle such things. */
435 if (GET_CODE (insn) != NOTE)
438 /* Get the insn which signals the end of parameter setup code. */
439 first_nonparm_insn = get_first_nonparm_insn ();
441 /* Now just scan the chain of insns to see what happens to our
442 PARM_DECLs. If a PARM_DECL is used but never modified, we
443 can substitute its rtl directly when expanding inline (and
444 perform constant folding when its incoming value is constant).
445 Otherwise, we have to copy its value into a new register and track
446 the new register's life. */
447 in_nonparm_insns = 0;
448 save_parm_insns (insn, first_nonparm_insn);
450 cfun->inl_max_label_num = max_label_num ();
451 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
452 cfun->original_arg_vector = argvec;
453 cfun->original_decl_initial = DECL_INITIAL (fndecl);
454 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
455 DECL_SAVED_INSNS (fndecl) = cfun;
461 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
462 PARM_DECL is used but never modified, we can substitute its rtl directly
463 when expanding inline (and perform constant folding when its incoming
464 value is constant). Otherwise, we have to copy its value into a new
465 register and track the new register's life. */
468 save_parm_insns (insn, first_nonparm_insn)
470 rtx first_nonparm_insn;
472 if (insn == NULL_RTX)
475 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
477 if (insn == first_nonparm_insn)
478 in_nonparm_insns = 1;
482 /* Record what interesting things happen to our parameters. */
483 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
485 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
486 three attached sequences: normal call, sibling call and tail
488 if (GET_CODE (insn) == CALL_INSN
489 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
493 for (i = 0; i < 3; i++)
494 save_parm_insns (XEXP (PATTERN (insn), i),
501 /* Note whether a parameter is modified or not. */
504 note_modified_parmregs (reg, x, data)
506 rtx x ATTRIBUTE_UNUSED;
507 void *data ATTRIBUTE_UNUSED;
509 if (GET_CODE (reg) == REG && in_nonparm_insns
510 && REGNO (reg) < max_parm_reg
511 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
512 && parmdecl_map[REGNO (reg)] != 0)
513 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
516 /* Unfortunately, we need a global copy of const_equiv map for communication
517 with a function called from note_stores. Be *very* careful that this
518 is used properly in the presence of recursion. */
520 varray_type global_const_equiv_varray;
522 #define FIXED_BASE_PLUS_P(X) \
523 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
524 && GET_CODE (XEXP (X, 0)) == REG \
525 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
526 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
528 /* Called to set up a mapping for the case where a parameter is in a
529 register. If it is read-only and our argument is a constant, set up the
530 constant equivalence.
532 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
535 Also, don't allow hard registers here; they might not be valid when
536 substituted into insns. */
538 process_reg_param (map, loc, copy)
539 struct inline_remap *map;
542 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
543 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
544 && ! REG_USERVAR_P (copy))
545 || (GET_CODE (copy) == REG
546 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
548 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
549 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
550 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
551 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
554 map->reg_map[REGNO (loc)] = copy;
557 /* Compare two BLOCKs for qsort. The key we sort on is the
558 BLOCK_ABSTRACT_ORIGIN of the blocks. */
561 compare_blocks (v1, v2)
565 tree b1 = *((const tree *) v1);
566 tree b2 = *((const tree *) v2);
568 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
569 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
572 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
573 an original block; the second to a remapped equivalent. */
580 const union tree_node *b1 = (const union tree_node *) v1;
581 tree b2 = *((const tree *) v2);
583 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
586 /* Integrate the procedure defined by FNDECL. Note that this function
587 may wind up calling itself. Since the static variables are not
588 reentrant, we do not assign them until after the possibility
589 of recursion is eliminated.
591 If IGNORE is nonzero, do not produce a value.
592 Otherwise store the value in TARGET if it is nonzero and that is convenient.
595 (rtx)-1 if we could not substitute the function
596 0 if we substituted it and it does not produce a value
597 else an rtx for where the value is stored. */
600 expand_inline_function (fndecl, parms, target, ignore, type,
601 structure_value_addr)
606 rtx structure_value_addr;
608 struct function *inlining_previous;
609 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
610 tree formal, actual, block;
611 rtx parm_insns = inl_f->emit->x_first_insn;
612 rtx insns = (inl_f->inl_last_parm_insn
613 ? NEXT_INSN (inl_f->inl_last_parm_insn)
619 int min_labelno = inl_f->emit->x_first_label_num;
620 int max_labelno = inl_f->inl_max_label_num;
625 struct inline_remap *map = 0;
629 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
630 rtx static_chain_value = 0;
632 int eh_region_offset;
634 /* The pointer used to track the true location of the memory used
635 for MAP->LABEL_MAP. */
636 rtx *real_label_map = 0;
638 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
639 max_regno = inl_f->emit->x_reg_rtx_no + 3;
640 if (max_regno < FIRST_PSEUDO_REGISTER)
643 /* Pull out the decl for the function definition; fndecl may be a
644 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
645 fndecl = inl_f->decl;
647 nargs = list_length (DECL_ARGUMENTS (fndecl));
649 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
650 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
652 /* Check that the parms type match and that sufficient arguments were
653 passed. Since the appropriate conversions or default promotions have
654 already been applied, the machine modes should match exactly. */
656 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
658 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
661 enum machine_mode mode;
664 return (rtx) (HOST_WIDE_INT) -1;
666 arg = TREE_VALUE (actual);
667 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
669 if (arg == error_mark_node
670 || mode != TYPE_MODE (TREE_TYPE (arg))
671 /* If they are block mode, the types should match exactly.
672 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
673 which could happen if the parameter has incomplete type. */
675 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
676 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
677 return (rtx) (HOST_WIDE_INT) -1;
680 /* Extra arguments are valid, but will be ignored below, so we must
681 evaluate them here for side-effects. */
682 for (; actual; actual = TREE_CHAIN (actual))
683 expand_expr (TREE_VALUE (actual), const0_rtx,
684 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
686 /* Expand the function arguments. Do this first so that any
687 new registers get created before we allocate the maps. */
689 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
690 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
692 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
694 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
696 /* Actual parameter, converted to the type of the argument within the
698 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
699 /* Mode of the variable used within the function. */
700 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
704 loc = RTVEC_ELT (arg_vector, i);
706 /* If this is an object passed by invisible reference, we copy the
707 object into a stack slot and save its address. If this will go
708 into memory, we do nothing now. Otherwise, we just expand the
710 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
711 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
713 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
715 store_expr (arg, stack_slot, 0);
716 arg_vals[i] = XEXP (stack_slot, 0);
719 else if (GET_CODE (loc) != MEM)
721 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
723 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
724 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
726 pmode = promote_mode (TREE_TYPE (formal), pmode,
729 if (GET_MODE (loc) != pmode)
732 /* The mode if LOC and ARG can differ if LOC was a variable
733 that had its mode promoted via PROMOTED_MODE. */
734 arg_vals[i] = convert_modes (pmode,
735 TYPE_MODE (TREE_TYPE (arg)),
736 expand_expr (arg, NULL_RTX, mode,
741 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
747 && (! TREE_READONLY (formal)
748 /* If the parameter is not read-only, copy our argument through
749 a register. Also, we cannot use ARG_VALS[I] if it overlaps
750 TARGET in any way. In the inline function, they will likely
751 be two different pseudos, and `safe_from_p' will make all
752 sorts of smart assumptions about their not conflicting.
753 But if ARG_VALS[I] overlaps TARGET, these assumptions are
754 wrong, so put ARG_VALS[I] into a fresh register.
755 Don't worry about invisible references, since their stack
756 temps will never overlap the target. */
759 && (GET_CODE (arg_vals[i]) == REG
760 || GET_CODE (arg_vals[i]) == SUBREG
761 || GET_CODE (arg_vals[i]) == MEM)
762 && reg_overlap_mentioned_p (arg_vals[i], target))
763 /* ??? We must always copy a SUBREG into a REG, because it might
764 get substituted into an address, and not all ports correctly
765 handle SUBREGs in addresses. */
766 || (GET_CODE (arg_vals[i]) == SUBREG)))
767 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
769 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
770 && POINTER_TYPE_P (TREE_TYPE (formal)))
771 mark_reg_pointer (arg_vals[i],
772 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
775 /* Allocate the structures we use to remap things. */
777 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
778 map->fndecl = fndecl;
780 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
781 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
783 /* We used to use alloca here, but the size of what it would try to
784 allocate would occasionally cause it to exceed the stack limit and
785 cause unpredictable core dumps. */
787 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
788 map->label_map = real_label_map;
789 map->local_return_label = NULL_RTX;
791 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
792 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
794 map->max_insnno = inl_max_uid;
796 map->integrating = 1;
797 map->compare_src = NULL_RTX;
798 map->compare_mode = VOIDmode;
800 /* const_equiv_varray maps pseudos in our routine to constants, so
801 it needs to be large enough for all our pseudos. This is the
802 number we are currently using plus the number in the called
803 routine, plus 15 for each arg, five to compute the virtual frame
804 pointer, and five for the return value. This should be enough
805 for most cases. We do not reference entries outside the range of
808 ??? These numbers are quite arbitrary and were obtained by
809 experimentation. At some point, we should try to allocate the
810 table after all the parameters are set up so we an more accurately
811 estimate the number of pseudos we will need. */
813 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
815 + (max_regno - FIRST_PSEUDO_REGISTER)
818 "expand_inline_function");
821 /* Record the current insn in case we have to set up pointers to frame
822 and argument memory blocks. If there are no insns yet, add a dummy
823 insn that can be used as an insertion point. */
824 map->insns_at_start = get_last_insn ();
825 if (map->insns_at_start == 0)
826 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
828 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
829 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
831 /* Update the outgoing argument size to allow for those in the inlined
833 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
834 current_function_outgoing_args_size = inl_f->outgoing_args_size;
836 /* If the inline function needs to make PIC references, that means
837 that this function's PIC offset table must be used. */
838 if (inl_f->uses_pic_offset_table)
839 current_function_uses_pic_offset_table = 1;
841 /* If this function needs a context, set it up. */
842 if (inl_f->needs_context)
843 static_chain_value = lookup_static_chain (fndecl);
845 if (GET_CODE (parm_insns) == NOTE
846 && NOTE_LINE_NUMBER (parm_insns) > 0)
848 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
849 NOTE_LINE_NUMBER (parm_insns));
851 RTX_INTEGRATED_P (note) = 1;
854 /* Process each argument. For each, set up things so that the function's
855 reference to the argument will refer to the argument being passed.
856 We only replace REG with REG here. Any simplifications are done
859 We make two passes: In the first, we deal with parameters that will
860 be placed into registers, since we need to ensure that the allocated
861 register number fits in const_equiv_map. Then we store all non-register
862 parameters into their memory location. */
864 /* Don't try to free temp stack slots here, because we may put one of the
865 parameters into a temp stack slot. */
867 for (i = 0; i < nargs; i++)
869 rtx copy = arg_vals[i];
871 loc = RTVEC_ELT (arg_vector, i);
873 /* There are three cases, each handled separately. */
874 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
875 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
877 /* This must be an object passed by invisible reference (it could
878 also be a variable-sized object, but we forbid inlining functions
879 with variable-sized arguments). COPY is the address of the
880 actual value (this computation will cause it to be copied). We
881 map that address for the register, noting the actual address as
882 an equivalent in case it can be substituted into the insns. */
884 if (GET_CODE (copy) != REG)
886 temp = copy_addr_to_reg (copy);
887 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
888 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
891 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
893 else if (GET_CODE (loc) == MEM)
895 /* This is the case of a parameter that lives in memory. It
896 will live in the block we allocate in the called routine's
897 frame that simulates the incoming argument area. Do nothing
898 with the parameter now; we will call store_expr later. In
899 this case, however, we must ensure that the virtual stack and
900 incoming arg rtx values are expanded now so that we can be
901 sure we have enough slots in the const equiv map since the
902 store_expr call can easily blow the size estimate. */
903 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
904 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
906 else if (GET_CODE (loc) == REG)
907 process_reg_param (map, loc, copy);
908 else if (GET_CODE (loc) == CONCAT)
910 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
911 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
912 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
913 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
915 process_reg_param (map, locreal, copyreal);
916 process_reg_param (map, locimag, copyimag);
922 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
923 specially. This function can be called recursively, so we need to
924 save the previous value. */
925 inlining_previous = inlining;
928 /* Now do the parameters that will be placed in memory. */
930 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
931 formal; formal = TREE_CHAIN (formal), i++)
933 loc = RTVEC_ELT (arg_vector, i);
935 if (GET_CODE (loc) == MEM
936 /* Exclude case handled above. */
937 && ! (GET_CODE (XEXP (loc, 0)) == REG
938 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
940 rtx note = emit_note (DECL_SOURCE_FILE (formal),
941 DECL_SOURCE_LINE (formal));
943 RTX_INTEGRATED_P (note) = 1;
945 /* Compute the address in the area we reserved and store the
947 temp = copy_rtx_and_substitute (loc, map, 1);
948 subst_constants (&temp, NULL_RTX, map, 1);
949 apply_change_group ();
950 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
951 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
952 store_expr (arg_trees[i], temp, 0);
956 /* Deal with the places that the function puts its result.
957 We are driven by what is placed into DECL_RESULT.
959 Initially, we assume that we don't have anything special handling for
960 REG_FUNCTION_RETURN_VALUE_P. */
962 map->inline_target = 0;
963 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
964 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
966 if (TYPE_MODE (type) == VOIDmode)
967 /* There is no return value to worry about. */
969 else if (GET_CODE (loc) == MEM)
971 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
973 temp = copy_rtx_and_substitute (loc, map, 1);
974 subst_constants (&temp, NULL_RTX, map, 1);
975 apply_change_group ();
980 if (! structure_value_addr
981 || ! aggregate_value_p (DECL_RESULT (fndecl)))
984 /* Pass the function the address in which to return a structure
985 value. Note that a constructor can cause someone to call us
986 with STRUCTURE_VALUE_ADDR, but the initialization takes place
987 via the first parameter, rather than the struct return address.
989 We have two cases: If the address is a simple register
990 indirect, use the mapping mechanism to point that register to
991 our structure return address. Otherwise, store the structure
992 return value into the place that it will be referenced from. */
994 if (GET_CODE (XEXP (loc, 0)) == REG)
996 temp = force_operand (structure_value_addr, NULL_RTX);
997 temp = force_reg (Pmode, temp);
998 /* A virtual register might be invalid in an insn, because
999 it can cause trouble in reload. Since we don't have access
1000 to the expanders at map translation time, make sure we have
1001 a proper register now.
1002 If a virtual register is actually valid, cse or combine
1003 can put it into the mapped insns. */
1004 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1005 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1006 temp = copy_to_mode_reg (Pmode, temp);
1007 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1009 if (CONSTANT_P (structure_value_addr)
1010 || GET_CODE (structure_value_addr) == ADDRESSOF
1011 || (GET_CODE (structure_value_addr) == PLUS
1012 && (XEXP (structure_value_addr, 0)
1013 == virtual_stack_vars_rtx)
1014 && (GET_CODE (XEXP (structure_value_addr, 1))
1017 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1023 temp = copy_rtx_and_substitute (loc, map, 1);
1024 subst_constants (&temp, NULL_RTX, map, 0);
1025 apply_change_group ();
1026 emit_move_insn (temp, structure_value_addr);
1031 /* We will ignore the result value, so don't look at its structure.
1032 Note that preparations for an aggregate return value
1033 do need to be made (above) even if it will be ignored. */
1035 else if (GET_CODE (loc) == REG)
1037 /* The function returns an object in a register and we use the return
1038 value. Set up our target for remapping. */
1040 /* Machine mode function was declared to return. */
1041 enum machine_mode departing_mode = TYPE_MODE (type);
1042 /* (Possibly wider) machine mode it actually computes
1043 (for the sake of callers that fail to declare it right).
1044 We have to use the mode of the result's RTL, rather than
1045 its type, since expand_function_start may have promoted it. */
1046 enum machine_mode arriving_mode
1047 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1050 /* Don't use MEMs as direct targets because on some machines
1051 substituting a MEM for a REG makes invalid insns.
1052 Let the combiner substitute the MEM if that is valid. */
1053 if (target == 0 || GET_CODE (target) != REG
1054 || GET_MODE (target) != departing_mode)
1056 /* Don't make BLKmode registers. If this looks like
1057 a BLKmode object being returned in a register, get
1058 the mode from that, otherwise abort. */
1059 if (departing_mode == BLKmode)
1061 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1063 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1064 arriving_mode = departing_mode;
1070 target = gen_reg_rtx (departing_mode);
1073 /* If function's value was promoted before return,
1074 avoid machine mode mismatch when we substitute INLINE_TARGET.
1075 But TARGET is what we will return to the caller. */
1076 if (arriving_mode != departing_mode)
1078 /* Avoid creating a paradoxical subreg wider than
1079 BITS_PER_WORD, since that is illegal. */
1080 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1082 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1083 GET_MODE_BITSIZE (arriving_mode)))
1084 /* Maybe could be handled by using convert_move () ? */
1086 reg_to_map = gen_reg_rtx (arriving_mode);
1087 target = gen_lowpart (departing_mode, reg_to_map);
1090 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1093 reg_to_map = target;
1095 /* Usually, the result value is the machine's return register.
1096 Sometimes it may be a pseudo. Handle both cases. */
1097 if (REG_FUNCTION_VALUE_P (loc))
1098 map->inline_target = reg_to_map;
1100 map->reg_map[REGNO (loc)] = reg_to_map;
1102 else if (GET_CODE (loc) == CONCAT)
1104 enum machine_mode departing_mode = TYPE_MODE (type);
1105 enum machine_mode arriving_mode
1106 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1108 if (departing_mode != arriving_mode)
1110 if (GET_CODE (XEXP (loc, 0)) != REG
1111 || GET_CODE (XEXP (loc, 1)) != REG)
1114 /* Don't use MEMs as direct targets because on some machines
1115 substituting a MEM for a REG makes invalid insns.
1116 Let the combiner substitute the MEM if that is valid. */
1117 if (target == 0 || GET_CODE (target) != REG
1118 || GET_MODE (target) != departing_mode)
1119 target = gen_reg_rtx (departing_mode);
1121 if (GET_CODE (target) != CONCAT)
1124 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1125 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1130 /* Remap the exception handler data pointer from one to the other. */
1131 temp = get_exception_pointer (inl_f);
1133 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1135 /* Initialize label_map. get_label_from_map will actually make
1137 memset ((char *) &map->label_map[min_labelno], 0,
1138 (max_labelno - min_labelno) * sizeof (rtx));
1140 /* Make copies of the decls of the symbols in the inline function, so that
1141 the copies of the variables get declared in the current function. Set
1142 up things so that lookup_static_chain knows that to interpret registers
1143 in SAVE_EXPRs for TYPE_SIZEs as local. */
1144 inline_function_decl = fndecl;
1145 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1146 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1147 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1148 inline_function_decl = 0;
1150 /* Make a fresh binding contour that we can easily remove. Do this after
1151 expanding our arguments so cleanups are properly scoped. */
1152 expand_start_bindings_and_block (0, block);
1154 /* Sort the block-map so that it will be easy to find remapped
1156 qsort (&VARRAY_TREE (map->block_map, 0),
1157 map->block_map->elements_used,
1161 /* Perform postincrements before actually calling the function. */
1164 /* Clean up stack so that variables might have smaller offsets. */
1165 do_pending_stack_adjust ();
1167 /* Save a copy of the location of const_equiv_varray for
1168 mark_stores, called via note_stores. */
1169 global_const_equiv_varray = map->const_equiv_varray;
1171 /* If the called function does an alloca, save and restore the
1172 stack pointer around the call. This saves stack space, but
1173 also is required if this inline is being done between two
1175 if (inl_f->calls_alloca)
1176 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1178 /* Map pseudos used for initial hard reg values. */
1179 setup_initial_hard_reg_value_integration (inl_f, map);
1181 /* Now copy the insns one by one. */
1182 copy_insn_list (insns, map, static_chain_value);
1184 /* Duplicate the EH regions. This will create an offset from the
1185 region numbers in the function we're inlining to the region
1186 numbers in the calling function. This must wait until after
1187 copy_insn_list, as we need the insn map to be complete. */
1188 eh_region_offset = duplicate_eh_regions (inl_f, map);
1190 /* Now copy the REG_NOTES for those insns. */
1191 copy_insn_notes (insns, map, eh_region_offset);
1193 /* If the insn sequence required one, emit the return label. */
1194 if (map->local_return_label)
1195 emit_label (map->local_return_label);
1197 /* Restore the stack pointer if we saved it above. */
1198 if (inl_f->calls_alloca)
1199 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1201 if (! cfun->x_whole_function_mode_p)
1202 /* In statement-at-a-time mode, we just tell the front-end to add
1203 this block to the list of blocks at this binding level. We
1204 can't do it the way it's done for function-at-a-time mode the
1205 superblocks have not been created yet. */
1206 insert_block (block);
1210 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1211 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1214 /* End the scope containing the copied formal parameter variables
1215 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1216 here so that expand_end_bindings will not check for unused
1217 variables. That's already been checked for when the inlined
1218 function was defined. */
1219 expand_end_bindings (NULL_TREE, 1, 1);
1221 /* Must mark the line number note after inlined functions as a repeat, so
1222 that the test coverage code can avoid counting the call twice. This
1223 just tells the code to ignore the immediately following line note, since
1224 there already exists a copy of this note before the expanded inline call.
1225 This line number note is still needed for debugging though, so we can't
1227 if (flag_test_coverage)
1228 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1230 emit_line_note (input_filename, lineno);
1232 /* If the function returns a BLKmode object in a register, copy it
1233 out of the temp register into a BLKmode memory object. */
1235 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1236 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1237 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1239 if (structure_value_addr)
1241 target = gen_rtx_MEM (TYPE_MODE (type),
1242 memory_address (TYPE_MODE (type),
1243 structure_value_addr));
1244 set_mem_attributes (target, type, 1);
1247 /* Make sure we free the things we explicitly allocated with xmalloc. */
1249 free (real_label_map);
1250 VARRAY_FREE (map->const_equiv_varray);
1251 free (map->reg_map);
1252 VARRAY_FREE (map->block_map);
1253 free (map->insn_map);
1258 inlining = inlining_previous;
1263 /* Make copies of each insn in the given list using the mapping
1264 computed in expand_inline_function. This function may call itself for
1265 insns containing sequences.
1267 Copying is done in two passes, first the insns and then their REG_NOTES.
1269 If static_chain_value is non-zero, it represents the context-pointer
1270 register for the function. */
1273 copy_insn_list (insns, map, static_chain_value)
1275 struct inline_remap *map;
1276 rtx static_chain_value;
1285 /* Copy the insns one by one. Do this in two passes, first the insns and
1286 then their REG_NOTES. */
1288 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1290 for (insn = insns; insn; insn = NEXT_INSN (insn))
1292 rtx copy, pattern, set;
1294 map->orig_asm_operands_vector = 0;
1296 switch (GET_CODE (insn))
1299 pattern = PATTERN (insn);
1300 set = single_set (insn);
1302 if (GET_CODE (pattern) == USE
1303 && GET_CODE (XEXP (pattern, 0)) == REG
1304 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1305 /* The (USE (REG n)) at return from the function should
1306 be ignored since we are changing (REG n) into
1310 /* Ignore setting a function value that we don't want to use. */
1311 if (map->inline_target == 0
1313 && GET_CODE (SET_DEST (set)) == REG
1314 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1316 if (volatile_refs_p (SET_SRC (set)))
1320 /* If we must not delete the source,
1321 load it into a new temporary. */
1322 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1324 new_set = single_set (copy);
1329 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1331 /* If the source and destination are the same and it
1332 has a note on it, keep the insn. */
1333 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1334 && REG_NOTES (insn) != 0)
1335 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1340 /* Similarly if an ignored return value is clobbered. */
1341 else if (map->inline_target == 0
1342 && GET_CODE (pattern) == CLOBBER
1343 && GET_CODE (XEXP (pattern, 0)) == REG
1344 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1347 /* If this is setting the static chain rtx, omit it. */
1348 else if (static_chain_value != 0
1350 && GET_CODE (SET_DEST (set)) == REG
1351 && rtx_equal_p (SET_DEST (set),
1352 static_chain_incoming_rtx))
1355 /* If this is setting the static chain pseudo, set it from
1356 the value we want to give it instead. */
1357 else if (static_chain_value != 0
1359 && rtx_equal_p (SET_SRC (set),
1360 static_chain_incoming_rtx))
1362 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1364 copy = emit_move_insn (newdest, static_chain_value);
1365 static_chain_value = 0;
1368 /* If this is setting the virtual stack vars register, this must
1369 be the code at the handler for a builtin longjmp. The value
1370 saved in the setjmp buffer will be the address of the frame
1371 we've made for this inlined instance within our frame. But we
1372 know the offset of that value so we can use it to reconstruct
1373 our virtual stack vars register from that value. If we are
1374 copying it from the stack pointer, leave it unchanged. */
1376 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1378 HOST_WIDE_INT offset;
1379 temp = map->reg_map[REGNO (SET_DEST (set))];
1380 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1383 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1385 else if (GET_CODE (temp) == PLUS
1386 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1387 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1388 offset = INTVAL (XEXP (temp, 1));
1392 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1393 temp = SET_SRC (set);
1395 temp = force_operand (plus_constant (SET_SRC (set),
1399 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1403 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1404 /* REG_NOTES will be copied later. */
1407 /* If this insn is setting CC0, it may need to look at
1408 the insn that uses CC0 to see what type of insn it is.
1409 In that case, the call to recog via validate_change will
1410 fail. So don't substitute constants here. Instead,
1411 do it when we emit the following insn.
1413 For example, see the pyr.md file. That machine has signed and
1414 unsigned compares. The compare patterns must check the
1415 following branch insn to see which what kind of compare to
1418 If the previous insn set CC0, substitute constants on it as
1420 if (sets_cc0_p (PATTERN (copy)) != 0)
1425 try_constants (cc0_insn, map);
1427 try_constants (copy, map);
1430 try_constants (copy, map);
1435 if (map->integrating && returnjump_p (insn))
1437 if (map->local_return_label == 0)
1438 map->local_return_label = gen_label_rtx ();
1439 pattern = gen_jump (map->local_return_label);
1442 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1444 copy = emit_jump_insn (pattern);
1448 try_constants (cc0_insn, map);
1451 try_constants (copy, map);
1453 /* If this used to be a conditional jump insn but whose branch
1454 direction is now know, we must do something special. */
1455 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1458 /* If the previous insn set cc0 for us, delete it. */
1459 if (sets_cc0_p (PREV_INSN (copy)))
1460 delete_insn (PREV_INSN (copy));
1463 /* If this is now a no-op, delete it. */
1464 if (map->last_pc_value == pc_rtx)
1470 /* Otherwise, this is unconditional jump so we must put a
1471 BARRIER after it. We could do some dead code elimination
1472 here, but jump.c will do it just as well. */
1478 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1479 three attached sequences: normal call, sibling call and tail
1481 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1486 for (i = 0; i < 3; i++)
1490 sequence[i] = NULL_RTX;
1491 seq = XEXP (PATTERN (insn), i);
1495 copy_insn_list (seq, map, static_chain_value);
1496 sequence[i] = get_insns ();
1501 /* Find the new tail recursion label.
1502 It will already be substituted into sequence[2]. */
1503 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1506 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1514 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1515 copy = emit_call_insn (pattern);
1517 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1518 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1520 /* Because the USAGE information potentially contains objects other
1521 than hard registers, we need to copy it. */
1523 CALL_INSN_FUNCTION_USAGE (copy)
1524 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1529 try_constants (cc0_insn, map);
1532 try_constants (copy, map);
1534 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1535 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1536 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1540 copy = emit_label (get_label_from_map (map,
1541 CODE_LABEL_NUMBER (insn)));
1542 LABEL_NAME (copy) = LABEL_NAME (insn);
1547 copy = emit_barrier ();
1551 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1552 discarded because it is important to have only one of
1553 each in the current function.
1555 NOTE_INSN_DELETED notes aren't useful.
1557 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1558 pointer (which will soon be dangling) confuses flow's
1559 attempts to preserve bb structures during the compilation
1562 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1563 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1564 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1565 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1567 copy = emit_note (NOTE_SOURCE_FILE (insn),
1568 NOTE_LINE_NUMBER (insn));
1570 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1571 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1572 && NOTE_BLOCK (insn))
1574 tree *mapped_block_p;
1577 = (tree *) bsearch (NOTE_BLOCK (insn),
1578 &VARRAY_TREE (map->block_map, 0),
1579 map->block_map->elements_used,
1583 if (!mapped_block_p)
1586 NOTE_BLOCK (copy) = *mapped_block_p;
1589 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1590 NOTE_EXPECTED_VALUE (copy)
1591 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1603 RTX_INTEGRATED_P (copy) = 1;
1605 map->insn_map[INSN_UID (insn)] = copy;
1609 /* Copy the REG_NOTES. Increment const_age, so that only constants
1610 from parameters can be substituted in. These are the only ones
1611 that are valid across the entire function. */
1614 copy_insn_notes (insns, map, eh_region_offset)
1616 struct inline_remap *map;
1617 int eh_region_offset;
1622 for (insn = insns; insn; insn = NEXT_INSN (insn))
1624 if (! INSN_P (insn))
1627 new_insn = map->insn_map[INSN_UID (insn)];
1631 if (REG_NOTES (insn))
1633 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1635 /* We must also do subst_constants, in case one of our parameters
1636 has const type and constant value. */
1637 subst_constants (¬e, NULL_RTX, map, 0);
1638 apply_change_group ();
1639 REG_NOTES (new_insn) = note;
1641 /* Delete any REG_LABEL notes from the chain. Remap any
1642 REG_EH_REGION notes. */
1643 for (; note; note = next)
1645 next = XEXP (note, 1);
1646 if (REG_NOTE_KIND (note) == REG_LABEL)
1647 remove_note (new_insn, note);
1648 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1649 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1650 + eh_region_offset);
1654 if (GET_CODE (insn) == CALL_INSN
1655 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1658 for (i = 0; i < 3; i++)
1659 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1662 if (GET_CODE (insn) == JUMP_INSN
1663 && GET_CODE (PATTERN (insn)) == RESX)
1664 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1668 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1669 push all of those decls and give each one the corresponding home. */
1672 integrate_parm_decls (args, map, arg_vector)
1674 struct inline_remap *map;
1680 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1682 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1683 current_function_decl);
1685 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1687 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1688 here, but that's going to require some more work. */
1689 /* DECL_INCOMING_RTL (decl) = ?; */
1690 /* Fully instantiate the address with the equivalent form so that the
1691 debugging information contains the actual register, instead of the
1692 virtual register. Do this by not passing an insn to
1694 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1695 apply_change_group ();
1696 SET_DECL_RTL (decl, new_decl_rtl);
1700 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1701 current function a tree of contexts isomorphic to the one that is given.
1703 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1704 registers used in the DECL_RTL field should be remapped. If it is zero,
1705 no mapping is necessary. */
1708 integrate_decl_tree (let, map)
1710 struct inline_remap *map;
1716 new_block = make_node (BLOCK);
1717 VARRAY_PUSH_TREE (map->block_map, new_block);
1718 next = &BLOCK_VARS (new_block);
1720 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1724 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1726 if (DECL_RTL_SET_P (t))
1730 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1732 /* Fully instantiate the address with the equivalent form so that the
1733 debugging information contains the actual register, instead of the
1734 virtual register. Do this by not passing an insn to
1737 subst_constants (&r, NULL_RTX, map, 1);
1738 SET_DECL_RTL (d, r);
1739 apply_change_group ();
1742 /* Add this declaration to the list of variables in the new
1745 next = &TREE_CHAIN (d);
1748 next = &BLOCK_SUBBLOCKS (new_block);
1749 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1751 *next = integrate_decl_tree (t, map);
1752 BLOCK_SUPERCONTEXT (*next) = new_block;
1753 next = &BLOCK_CHAIN (*next);
1756 TREE_USED (new_block) = TREE_USED (let);
1757 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1762 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1763 except for those few rtx codes that are sharable.
1765 We always return an rtx that is similar to that incoming rtx, with the
1766 exception of possibly changing a REG to a SUBREG or vice versa. No
1767 rtl is ever emitted.
1769 If FOR_LHS is nonzero, if means we are processing something that will
1770 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1771 inlining since we need to be conservative in how it is set for
1774 Handle constants that need to be placed in the constant pool by
1775 calling `force_const_mem'. */
1778 copy_rtx_and_substitute (orig, map, for_lhs)
1780 struct inline_remap *map;
1783 register rtx copy, temp;
1785 register RTX_CODE code;
1786 register enum machine_mode mode;
1787 register const char *format_ptr;
1793 code = GET_CODE (orig);
1794 mode = GET_MODE (orig);
1799 /* If the stack pointer register shows up, it must be part of
1800 stack-adjustments (*not* because we eliminated the frame pointer!).
1801 Small hard registers are returned as-is. Pseudo-registers
1802 go through their `reg_map'. */
1803 regno = REGNO (orig);
1804 if (regno <= LAST_VIRTUAL_REGISTER
1805 || (map->integrating
1806 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1808 /* Some hard registers are also mapped,
1809 but others are not translated. */
1810 if (map->reg_map[regno] != 0)
1811 return map->reg_map[regno];
1813 /* If this is the virtual frame pointer, make space in current
1814 function's stack frame for the stack frame of the inline function.
1816 Copy the address of this area into a pseudo. Map
1817 virtual_stack_vars_rtx to this pseudo and set up a constant
1818 equivalence for it to be the address. This will substitute the
1819 address into insns where it can be substituted and use the new
1820 pseudo where it can't. */
1821 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1824 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1825 #ifdef FRAME_GROWS_DOWNWARD
1827 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1830 /* In this case, virtual_stack_vars_rtx points to one byte
1831 higher than the top of the frame area. So make sure we
1832 allocate a big enough chunk to keep the frame pointer
1833 aligned like a real one. */
1835 size = CEIL_ROUND (size, alignment);
1838 loc = assign_stack_temp (BLKmode, size, 1);
1839 loc = XEXP (loc, 0);
1840 #ifdef FRAME_GROWS_DOWNWARD
1841 /* In this case, virtual_stack_vars_rtx points to one byte
1842 higher than the top of the frame area. So compute the offset
1843 to one byte higher than our substitute frame. */
1844 loc = plus_constant (loc, size);
1846 map->reg_map[regno] = temp
1847 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1849 #ifdef STACK_BOUNDARY
1850 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1853 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1855 seq = gen_sequence ();
1857 emit_insn_after (seq, map->insns_at_start);
1860 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1861 || (map->integrating
1862 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1865 /* Do the same for a block to contain any arguments referenced
1868 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1871 loc = assign_stack_temp (BLKmode, size, 1);
1872 loc = XEXP (loc, 0);
1873 /* When arguments grow downward, the virtual incoming
1874 args pointer points to the top of the argument block,
1875 so the remapped location better do the same. */
1876 #ifdef ARGS_GROW_DOWNWARD
1877 loc = plus_constant (loc, size);
1879 map->reg_map[regno] = temp
1880 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1882 #ifdef STACK_BOUNDARY
1883 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1886 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1888 seq = gen_sequence ();
1890 emit_insn_after (seq, map->insns_at_start);
1893 else if (REG_FUNCTION_VALUE_P (orig))
1895 /* This is a reference to the function return value. If
1896 the function doesn't have a return value, error. If the
1897 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1898 if (map->inline_target == 0)
1900 if (rtx_equal_function_value_matters)
1901 /* This is an ignored return value. We must not
1902 leave it in with REG_FUNCTION_VALUE_P set, since
1903 that would confuse subsequent inlining of the
1904 current function into a later function. */
1905 return gen_rtx_REG (GET_MODE (orig), regno);
1907 /* Must be unrolling loops or replicating code if we
1908 reach here, so return the register unchanged. */
1911 else if (GET_MODE (map->inline_target) != BLKmode
1912 && mode != GET_MODE (map->inline_target))
1913 return gen_lowpart (mode, map->inline_target);
1915 return map->inline_target;
1917 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1918 /* If leaf_renumber_regs_insn() might remap this register to
1919 some other number, make sure we don't share it with the
1920 inlined function, otherwise delayed optimization of the
1921 inlined function may change it in place, breaking our
1922 reference to it. We may still shared it within the
1923 function, so create an entry for this register in the
1925 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1926 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1928 if (!map->leaf_reg_map[regno][mode])
1929 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
1930 return map->leaf_reg_map[regno][mode];
1938 if (map->reg_map[regno] == NULL)
1940 map->reg_map[regno] = gen_reg_rtx (mode);
1941 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1942 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1943 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1944 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1946 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1947 mark_reg_pointer (map->reg_map[regno],
1948 map->regno_pointer_align[regno]);
1950 return map->reg_map[regno];
1953 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1954 return simplify_gen_subreg (GET_MODE (orig), copy,
1955 GET_MODE (SUBREG_REG (orig)),
1956 SUBREG_BYTE (orig));
1959 copy = gen_rtx_ADDRESSOF (mode,
1960 copy_rtx_and_substitute (XEXP (orig, 0),
1962 0, ADDRESSOF_DECL (orig));
1963 regno = ADDRESSOF_REGNO (orig);
1964 if (map->reg_map[regno])
1965 regno = REGNO (map->reg_map[regno]);
1966 else if (regno > LAST_VIRTUAL_REGISTER)
1968 temp = XEXP (orig, 0);
1969 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1970 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1971 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1972 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1973 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1975 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1976 mark_reg_pointer (map->reg_map[regno],
1977 map->regno_pointer_align[regno]);
1978 regno = REGNO (map->reg_map[regno]);
1980 ADDRESSOF_REGNO (copy) = regno;
1985 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1986 to (use foo) if the original insn didn't have a subreg.
1987 Removing the subreg distorts the VAX movstrhi pattern
1988 by changing the mode of an operand. */
1989 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1990 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1991 copy = SUBREG_REG (copy);
1992 return gen_rtx_fmt_e (code, VOIDmode, copy);
1995 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1996 = LABEL_PRESERVE_P (orig);
1997 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1999 /* We need to handle "deleted" labels that appear in the DECL_RTL
2002 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
2003 return map->insn_map[INSN_UID (orig)];
2010 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2011 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2013 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2015 /* The fact that this label was previously nonlocal does not mean
2016 it still is, so we must check if it is within the range of
2017 this function's labels. */
2018 LABEL_REF_NONLOCAL_P (copy)
2019 = (LABEL_REF_NONLOCAL_P (orig)
2020 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2021 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2023 /* If we have made a nonlocal label local, it means that this
2024 inlined call will be referring to our nonlocal goto handler.
2025 So make sure we create one for this block; we normally would
2026 not since this is not otherwise considered a "call". */
2027 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2028 function_call_count++;
2038 /* Symbols which represent the address of a label stored in the constant
2039 pool must be modified to point to a constant pool entry for the
2040 remapped label. Otherwise, symbols are returned unchanged. */
2041 if (CONSTANT_POOL_ADDRESS_P (orig))
2043 struct function *f = inlining ? inlining : cfun;
2044 rtx constant = get_pool_constant_for_function (f, orig);
2045 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2048 rtx temp = force_const_mem (const_mode,
2049 copy_rtx_and_substitute (constant,
2053 /* Legitimizing the address here is incorrect.
2055 Since we had a SYMBOL_REF before, we can assume it is valid
2056 to have one in this position in the insn.
2058 Also, change_address may create new registers. These
2059 registers will not have valid reg_map entries. This can
2060 cause try_constants() to fail because assumes that all
2061 registers in the rtx have valid reg_map entries, and it may
2062 end up replacing one of these new registers with junk. */
2064 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2065 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2068 temp = XEXP (temp, 0);
2070 #ifdef POINTERS_EXTEND_UNSIGNED
2071 if (GET_MODE (temp) != GET_MODE (orig))
2072 temp = convert_memory_address (GET_MODE (orig), temp);
2076 else if (GET_CODE (constant) == LABEL_REF)
2077 return XEXP (force_const_mem
2079 copy_rtx_and_substitute (constant, map, for_lhs)),
2086 /* We have to make a new copy of this CONST_DOUBLE because don't want
2087 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2088 duplicate of a CONST_DOUBLE we have already seen. */
2089 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2093 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2094 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2097 return immed_double_const (CONST_DOUBLE_LOW (orig),
2098 CONST_DOUBLE_HIGH (orig), VOIDmode);
2101 /* Make new constant pool entry for a constant
2102 that was in the pool of the inline function. */
2103 if (RTX_INTEGRATED_P (orig))
2108 /* If a single asm insn contains multiple output operands then
2109 it contains multiple ASM_OPERANDS rtx's that share the input
2110 and constraint vecs. We must make sure that the copied insn
2111 continues to share it. */
2112 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2114 copy = rtx_alloc (ASM_OPERANDS);
2115 copy->volatil = orig->volatil;
2116 PUT_MODE (copy, GET_MODE (orig));
2117 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2118 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2119 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2120 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2121 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2122 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2123 = map->copy_asm_constraints_vector;
2124 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2125 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2131 /* This is given special treatment because the first
2132 operand of a CALL is a (MEM ...) which may get
2133 forced into a register for cse. This is undesirable
2134 if function-address cse isn't wanted or if we won't do cse. */
2135 #ifndef NO_FUNCTION_CSE
2136 if (! (optimize && ! flag_no_function_cse))
2141 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2142 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2144 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2148 /* Must be ifdefed out for loop unrolling to work. */
2154 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2155 Adjust the setting by the offset of the area we made.
2156 If the nonlocal goto is into the current function,
2157 this will result in unnecessarily bad code, but should work. */
2158 if (SET_DEST (orig) == virtual_stack_vars_rtx
2159 || SET_DEST (orig) == virtual_incoming_args_rtx)
2161 /* In case a translation hasn't occurred already, make one now. */
2164 HOST_WIDE_INT loc_offset;
2166 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2167 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2168 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2169 REGNO (equiv_reg)).rtx;
2171 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2173 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2176 (copy_rtx_and_substitute (SET_SRC (orig),
2182 return gen_rtx_SET (VOIDmode,
2183 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2184 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2189 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2190 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2192 enum machine_mode const_mode
2193 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2195 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2197 constant = copy_rtx_and_substitute (constant, map, 0);
2199 /* If this was an address of a constant pool entry that itself
2200 had to be placed in the constant pool, it might not be a
2201 valid address. So the recursive call might have turned it
2202 into a register. In that case, it isn't a constant any
2203 more, so return it. This has the potential of changing a
2204 MEM into a REG, but we'll assume that it safe. */
2205 if (! CONSTANT_P (constant))
2208 return validize_mem (force_const_mem (const_mode, constant));
2211 copy = rtx_alloc (MEM);
2212 PUT_MODE (copy, mode);
2213 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2214 MEM_COPY_ATTRIBUTES (copy, orig);
2221 copy = rtx_alloc (code);
2222 PUT_MODE (copy, mode);
2223 copy->in_struct = orig->in_struct;
2224 copy->volatil = orig->volatil;
2225 copy->unchanging = orig->unchanging;
2227 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2229 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2231 switch (*format_ptr++)
2234 /* Copy this through the wide int field; that's safest. */
2235 X0WINT (copy, i) = X0WINT (orig, i);
2240 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2244 /* Change any references to old-insns to point to the
2245 corresponding copied insns. */
2246 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2250 XVEC (copy, i) = XVEC (orig, i);
2251 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2253 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2254 for (j = 0; j < XVECLEN (copy, i); j++)
2255 XVECEXP (copy, i, j)
2256 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2262 XWINT (copy, i) = XWINT (orig, i);
2266 XINT (copy, i) = XINT (orig, i);
2270 XSTR (copy, i) = XSTR (orig, i);
2274 XTREE (copy, i) = XTREE (orig, i);
2282 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2284 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2285 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2286 map->copy_asm_constraints_vector
2287 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2293 /* Substitute known constant values into INSN, if that is valid. */
2296 try_constants (insn, map)
2298 struct inline_remap *map;
2304 /* First try just updating addresses, then other things. This is
2305 important when we have something like the store of a constant
2306 into memory and we can update the memory address but the machine
2307 does not support a constant source. */
2308 subst_constants (&PATTERN (insn), insn, map, 1);
2309 apply_change_group ();
2310 subst_constants (&PATTERN (insn), insn, map, 0);
2311 apply_change_group ();
2313 /* Show we don't know the value of anything stored or clobbered. */
2314 note_stores (PATTERN (insn), mark_stores, NULL);
2315 map->last_pc_value = 0;
2317 map->last_cc0_value = 0;
2320 /* Set up any constant equivalences made in this insn. */
2321 for (i = 0; i < map->num_sets; i++)
2323 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2325 int regno = REGNO (map->equiv_sets[i].dest);
2327 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2328 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2329 /* Following clause is a hack to make case work where GNU C++
2330 reassigns a variable to make cse work right. */
2331 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2333 map->equiv_sets[i].equiv))
2334 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2335 map->equiv_sets[i].equiv, map->const_age);
2337 else if (map->equiv_sets[i].dest == pc_rtx)
2338 map->last_pc_value = map->equiv_sets[i].equiv;
2340 else if (map->equiv_sets[i].dest == cc0_rtx)
2341 map->last_cc0_value = map->equiv_sets[i].equiv;
2346 /* Substitute known constants for pseudo regs in the contents of LOC,
2347 which are part of INSN.
2348 If INSN is zero, the substitution should always be done (this is used to
2350 These changes are taken out by try_constants if the result is not valid.
2352 Note that we are more concerned with determining when the result of a SET
2353 is a constant, for further propagation, than actually inserting constants
2354 into insns; cse will do the latter task better.
2356 This function is also used to adjust address of items previously addressed
2357 via the virtual stack variable or virtual incoming arguments registers.
2359 If MEMONLY is nonzero, only make changes inside a MEM. */
2362 subst_constants (loc, insn, map, memonly)
2365 struct inline_remap *map;
2370 register enum rtx_code code;
2371 register const char *format_ptr;
2372 int num_changes = num_validated_changes ();
2374 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2376 code = GET_CODE (x);
2392 validate_change (insn, loc, map->last_cc0_value, 1);
2398 /* The only thing we can do with a USE or CLOBBER is possibly do
2399 some substitutions in a MEM within it. */
2400 if (GET_CODE (XEXP (x, 0)) == MEM)
2401 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2405 /* Substitute for parms and known constants. Don't replace
2406 hard regs used as user variables with constants. */
2409 int regno = REGNO (x);
2410 struct const_equiv_data *p;
2412 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2413 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2414 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2416 && p->age >= map->const_age)
2417 validate_change (insn, loc, p->rtx, 1);
2422 /* SUBREG applied to something other than a reg
2423 should be treated as ordinary, since that must
2424 be a special hack and we don't know how to treat it specially.
2425 Consider for example mulsidi3 in m68k.md.
2426 Ordinary SUBREG of a REG needs this special treatment. */
2427 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2429 rtx inner = SUBREG_REG (x);
2432 /* We can't call subst_constants on &SUBREG_REG (x) because any
2433 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2434 see what is inside, try to form the new SUBREG and see if that is
2435 valid. We handle two cases: extracting a full word in an
2436 integral mode and extracting the low part. */
2437 subst_constants (&inner, NULL_RTX, map, 0);
2438 new = simplify_gen_subreg (GET_MODE (x), inner,
2439 GET_MODE (SUBREG_REG (x)),
2443 validate_change (insn, loc, new, 1);
2445 cancel_changes (num_changes);
2452 subst_constants (&XEXP (x, 0), insn, map, 0);
2454 /* If a memory address got spoiled, change it back. */
2455 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2456 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2457 cancel_changes (num_changes);
2462 /* Substitute constants in our source, and in any arguments to a
2463 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2465 rtx *dest_loc = &SET_DEST (x);
2466 rtx dest = *dest_loc;
2468 enum machine_mode compare_mode = VOIDmode;
2470 /* If SET_SRC is a COMPARE which subst_constants would turn into
2471 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2473 if (GET_CODE (SET_SRC (x)) == COMPARE)
2476 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2482 compare_mode = GET_MODE (XEXP (src, 0));
2483 if (compare_mode == VOIDmode)
2484 compare_mode = GET_MODE (XEXP (src, 1));
2488 subst_constants (&SET_SRC (x), insn, map, memonly);
2491 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2492 || GET_CODE (*dest_loc) == SUBREG
2493 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2495 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2497 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2498 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2500 dest_loc = &XEXP (*dest_loc, 0);
2503 /* Do substitute in the address of a destination in memory. */
2504 if (GET_CODE (*dest_loc) == MEM)
2505 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2507 /* Check for the case of DEST a SUBREG, both it and the underlying
2508 register are less than one word, and the SUBREG has the wider mode.
2509 In the case, we are really setting the underlying register to the
2510 source converted to the mode of DEST. So indicate that. */
2511 if (GET_CODE (dest) == SUBREG
2512 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2513 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2515 <= GET_MODE_SIZE (GET_MODE (dest)))
2516 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2518 src = tem, dest = SUBREG_REG (dest);
2520 /* If storing a recognizable value save it for later recording. */
2521 if ((map->num_sets < MAX_RECOG_OPERANDS)
2522 && (CONSTANT_P (src)
2523 || (GET_CODE (src) == REG
2524 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2525 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2526 || (GET_CODE (src) == PLUS
2527 && GET_CODE (XEXP (src, 0)) == REG
2528 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2529 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2530 && CONSTANT_P (XEXP (src, 1)))
2531 || GET_CODE (src) == COMPARE
2536 && (src == pc_rtx || GET_CODE (src) == RETURN
2537 || GET_CODE (src) == LABEL_REF))))
2539 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2540 it will cause us to save the COMPARE with any constants
2541 substituted, which is what we want for later. */
2542 rtx src_copy = copy_rtx (src);
2543 map->equiv_sets[map->num_sets].equiv = src_copy;
2544 map->equiv_sets[map->num_sets++].dest = dest;
2545 if (compare_mode != VOIDmode
2546 && GET_CODE (src) == COMPARE
2547 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2552 && GET_MODE (XEXP (src, 0)) == VOIDmode
2553 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2555 map->compare_src = src_copy;
2556 map->compare_mode = compare_mode;
2566 format_ptr = GET_RTX_FORMAT (code);
2568 /* If the first operand is an expression, save its mode for later. */
2569 if (*format_ptr == 'e')
2570 op0_mode = GET_MODE (XEXP (x, 0));
2572 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2574 switch (*format_ptr++)
2581 subst_constants (&XEXP (x, i), insn, map, memonly);
2593 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2594 for (j = 0; j < XVECLEN (x, i); j++)
2595 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2604 /* If this is a commutative operation, move a constant to the second
2605 operand unless the second operand is already a CONST_INT. */
2607 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2608 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2610 rtx tem = XEXP (x, 0);
2611 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2612 validate_change (insn, &XEXP (x, 1), tem, 1);
2615 /* Simplify the expression in case we put in some constants. */
2617 switch (GET_RTX_CLASS (code))
2620 if (op0_mode == MAX_MACHINE_MODE)
2622 new = simplify_unary_operation (code, GET_MODE (x),
2623 XEXP (x, 0), op0_mode);
2628 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2630 if (op_mode == VOIDmode)
2631 op_mode = GET_MODE (XEXP (x, 1));
2632 new = simplify_relational_operation (code, op_mode,
2633 XEXP (x, 0), XEXP (x, 1));
2634 #ifdef FLOAT_STORE_FLAG_VALUE
2635 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2637 enum machine_mode mode = GET_MODE (x);
2638 if (new == const0_rtx)
2639 new = CONST0_RTX (mode);
2642 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2643 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2652 new = simplify_binary_operation (code, GET_MODE (x),
2653 XEXP (x, 0), XEXP (x, 1));
2658 if (op0_mode == MAX_MACHINE_MODE)
2661 if (code == IF_THEN_ELSE)
2663 rtx op0 = XEXP (x, 0);
2665 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2666 && GET_MODE (op0) == VOIDmode
2667 && ! side_effects_p (op0)
2668 && XEXP (op0, 0) == map->compare_src
2669 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2671 /* We have compare of two VOIDmode constants for which
2672 we recorded the comparison mode. */
2674 simplify_relational_operation (GET_CODE (op0),
2679 if (temp == const0_rtx)
2681 else if (temp == const1_rtx)
2686 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2687 XEXP (x, 0), XEXP (x, 1),
2693 validate_change (insn, loc, new, 1);
2696 /* Show that register modified no longer contain known constants. We are
2697 called from note_stores with parts of the new insn. */
2700 mark_stores (dest, x, data)
2702 rtx x ATTRIBUTE_UNUSED;
2703 void *data ATTRIBUTE_UNUSED;
2706 enum machine_mode mode = VOIDmode;
2708 /* DEST is always the innermost thing set, except in the case of
2709 SUBREGs of hard registers. */
2711 if (GET_CODE (dest) == REG)
2712 regno = REGNO (dest), mode = GET_MODE (dest);
2713 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2715 regno = REGNO (SUBREG_REG (dest));
2716 if (regno < FIRST_PSEUDO_REGISTER)
2717 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2718 GET_MODE (SUBREG_REG (dest)),
2721 mode = GET_MODE (SUBREG_REG (dest));
2726 unsigned int uregno = regno;
2727 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2728 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2731 /* Ignore virtual stack var or virtual arg register since those
2732 are handled separately. */
2733 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2734 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2735 for (i = uregno; i <= last_reg; i++)
2736 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2737 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2741 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2742 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2743 that it points to the node itself, thus indicating that the node is its
2744 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2745 the given node is NULL, recursively descend the decl/block tree which
2746 it is the root of, and for each other ..._DECL or BLOCK node contained
2747 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2748 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2749 values to point to themselves. */
2752 set_block_origin_self (stmt)
2755 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2757 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2760 register tree local_decl;
2762 for (local_decl = BLOCK_VARS (stmt);
2763 local_decl != NULL_TREE;
2764 local_decl = TREE_CHAIN (local_decl))
2765 set_decl_origin_self (local_decl); /* Potential recursion. */
2769 register tree subblock;
2771 for (subblock = BLOCK_SUBBLOCKS (stmt);
2772 subblock != NULL_TREE;
2773 subblock = BLOCK_CHAIN (subblock))
2774 set_block_origin_self (subblock); /* Recurse. */
2779 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2780 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2781 node to so that it points to the node itself, thus indicating that the
2782 node represents its own (abstract) origin. Additionally, if the
2783 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2784 the decl/block tree of which the given node is the root of, and for
2785 each other ..._DECL or BLOCK node contained therein whose
2786 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2787 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2788 point to themselves. */
2791 set_decl_origin_self (decl)
2794 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2796 DECL_ABSTRACT_ORIGIN (decl) = decl;
2797 if (TREE_CODE (decl) == FUNCTION_DECL)
2801 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2802 DECL_ABSTRACT_ORIGIN (arg) = arg;
2803 if (DECL_INITIAL (decl) != NULL_TREE
2804 && DECL_INITIAL (decl) != error_mark_node)
2805 set_block_origin_self (DECL_INITIAL (decl));
2810 /* Given a pointer to some BLOCK node, and a boolean value to set the
2811 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2812 the given block, and for all local decls and all local sub-blocks
2813 (recursively) which are contained therein. */
2816 set_block_abstract_flags (stmt, setting)
2818 register int setting;
2820 register tree local_decl;
2821 register tree subblock;
2823 BLOCK_ABSTRACT (stmt) = setting;
2825 for (local_decl = BLOCK_VARS (stmt);
2826 local_decl != NULL_TREE;
2827 local_decl = TREE_CHAIN (local_decl))
2828 set_decl_abstract_flags (local_decl, setting);
2830 for (subblock = BLOCK_SUBBLOCKS (stmt);
2831 subblock != NULL_TREE;
2832 subblock = BLOCK_CHAIN (subblock))
2833 set_block_abstract_flags (subblock, setting);
2836 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2837 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2838 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2839 set the abstract flags for all of the parameters, local vars, local
2840 blocks and sub-blocks (recursively) to the same setting. */
2843 set_decl_abstract_flags (decl, setting)
2845 register int setting;
2847 DECL_ABSTRACT (decl) = setting;
2848 if (TREE_CODE (decl) == FUNCTION_DECL)
2852 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2853 DECL_ABSTRACT (arg) = setting;
2854 if (DECL_INITIAL (decl) != NULL_TREE
2855 && DECL_INITIAL (decl) != error_mark_node)
2856 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2860 /* Output the assembly language code for the function FNDECL
2861 from its DECL_SAVED_INSNS. Used for inline functions that are output
2862 at end of compilation instead of where they came in the source. */
2865 output_inline_function (fndecl)
2868 struct function *old_cfun = cfun;
2869 enum debug_info_type old_write_symbols = write_symbols;
2870 struct debug_hooks *old_debug_hooks = debug_hooks;
2871 struct function *f = DECL_SAVED_INSNS (fndecl);
2874 current_function_decl = fndecl;
2875 clear_emit_caches ();
2877 set_new_last_label_num (f->inl_max_label_num);
2879 /* We're not deferring this any longer. */
2880 DECL_DEFER_OUTPUT (fndecl) = 0;
2882 /* If requested, suppress debugging information. */
2883 if (f->no_debugging_symbols)
2885 write_symbols = NO_DEBUG;
2886 debug_hooks = &do_nothing_debug_hooks;
2889 /* Do any preparation, such as emitting abstract debug info for the inline
2890 before it gets mangled by optimization. */
2891 note_outlining_of_inline_function (fndecl);
2893 /* Compile this function all the way down to assembly code. */
2894 rest_of_compilation (fndecl);
2896 /* We can't inline this anymore. */
2898 DECL_INLINE (fndecl) = 0;
2901 current_function_decl = old_cfun ? old_cfun->decl : 0;
2902 write_symbols = old_write_symbols;
2903 debug_hooks = old_debug_hooks;
2907 /* Functions to keep track of the values hard regs had at the start of
2911 has_func_hard_reg_initial_val (fun, reg)
2912 struct function *fun;
2915 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
2921 for (i = 0; i < ivs->num_entries; i++)
2922 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
2923 return ivs->entries[i].pseudo;
2929 get_func_hard_reg_initial_val (fun, reg)
2930 struct function *fun;
2933 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
2934 rtx rv = has_func_hard_reg_initial_val (fun, reg);
2941 fun->hard_reg_initial_vals = (void *) xmalloc (sizeof (initial_value_struct));
2942 ivs = fun->hard_reg_initial_vals;
2943 ivs->num_entries = 0;
2944 ivs->max_entries = 5;
2945 ivs->entries = (initial_value_pair *) xmalloc (5 * sizeof (initial_value_pair));
2948 if (ivs->num_entries >= ivs->max_entries)
2950 ivs->max_entries += 5;
2952 (initial_value_pair *) xrealloc (ivs->entries,
2954 * sizeof (initial_value_pair));
2957 ivs->entries[ivs->num_entries].hard_reg = reg;
2958 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
2960 return ivs->entries[ivs->num_entries++].pseudo;
2964 get_hard_reg_initial_val (mode, regno)
2965 enum machine_mode mode;
2968 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
2972 has_hard_reg_initial_val (mode, regno)
2973 enum machine_mode mode;
2976 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
2980 mark_hard_reg_initial_vals (fun)
2981 struct function *fun;
2983 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
2989 for (i = 0; i < ivs->num_entries; i ++)
2991 ggc_mark_rtx (ivs->entries[i].hard_reg);
2992 ggc_mark_rtx (ivs->entries[i].pseudo);
2997 setup_initial_hard_reg_value_integration (inl_f, remap)
2998 struct function *inl_f;
2999 struct inline_remap *remap;
3001 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3007 for (i = 0; i < ivs->num_entries; i ++)
3008 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3009 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3014 emit_initial_value_sets ()
3016 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3024 for (i = 0; i < ivs->num_entries; i++)
3025 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3029 emit_insns_after (seq, get_insns ());