1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
31 #include "insn-config.h"
35 #include "integrate.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static void mark_stores PARAMS ((rtx, rtx, void *));
85 static void save_parm_insns PARAMS ((rtx, rtx));
86 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
88 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
90 static int compare_blocks PARAMS ((const PTR, const PTR));
91 static int find_block PARAMS ((const PTR, const PTR));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
109 rtx x = map->label_map[i];
112 x = map->label_map[i] = gen_label_rtx ();
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
141 if (DECL_UNINLINABLE (fndecl))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last && TREE_VALUE (last) != void_type_node)
146 || current_function_varargs)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_calls_eh_return)
156 return N_("function uses __builtin_eh_return");
158 if (current_function_contains_functions)
159 return N_("function with nested functions cannot be inline");
163 N_("function with label addresses used in initializers cannot inline");
165 if (current_function_cannot_inline)
166 return current_function_cannot_inline;
168 /* If its not even close, don't even look. */
169 if (get_max_uid () > 3 * max_insns)
170 return N_("function too large to be inline");
173 /* Don't inline functions which do not specify a function prototype and
174 have BLKmode argument or take the address of a parameter. */
175 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
177 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
178 TREE_ADDRESSABLE (parms) = 1;
179 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
180 return N_("no prototype, and parameter address used; cannot be inline");
184 /* We can't inline functions that return structures
185 the old-fashioned PCC way, copying into a static block. */
186 if (current_function_returns_pcc_struct)
187 return N_("inline functions not supported for this return value type");
189 /* We can't inline functions that return structures of varying size. */
190 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
191 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
192 return N_("function with varying-size return value cannot be inline");
194 /* Cannot inline a function with a varying size argument or one that
195 receives a transparent union. */
196 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
198 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
199 return N_("function with varying-size parameter cannot be inline");
200 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
201 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
202 return N_("function with transparent unit parameter cannot be inline");
205 if (get_max_uid () > max_insns)
207 for (ninsns = 0, insn = get_first_nonparm_insn ();
208 insn && ninsns < max_insns;
209 insn = NEXT_INSN (insn))
213 if (ninsns >= max_insns)
214 return N_("function too large to be inline");
217 /* We will not inline a function which uses computed goto. The addresses of
218 its local labels, which may be tucked into global storage, are of course
219 not constant across instantiations, which causes unexpected behaviour. */
220 if (current_function_has_computed_jump)
221 return N_("function with computed jump cannot inline");
223 /* We cannot inline a nested function that jumps to a nonlocal label. */
224 if (current_function_has_nonlocal_goto)
225 return N_("function with nonlocal goto cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
230 rtx result = DECL_RTL (DECL_RESULT (fndecl));
231 if (GET_CODE (result) == PARALLEL)
232 return N_("inline functions not supported for this return value type");
235 /* If the function has a target specific attribute attached to it,
236 then we assume that we should not inline it. This can be overriden
237 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
238 if (DECL_MACHINE_ATTRIBUTES (fndecl)
239 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
240 return N_("function with target specific attribute(s) cannot be inlined");
245 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
246 Zero for a reg that isn't a parm's home.
247 Only reg numbers less than max_parm_reg are mapped here. */
248 static tree *parmdecl_map;
250 /* In save_for_inline, nonzero if past the parm-initialization insns. */
251 static int in_nonparm_insns;
253 /* Subroutine for `save_for_inline'. Performs initialization
254 needed to save FNDECL's insns and info for future inline expansion. */
257 initialize_for_inline (fndecl)
264 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
265 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
266 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
268 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
270 parms = TREE_CHAIN (parms), i++)
272 rtx p = DECL_RTL (parms);
274 /* If we have (mem (addressof (mem ...))), use the inner MEM since
275 otherwise the copy_rtx call below will not unshare the MEM since
276 it shares ADDRESSOF. */
277 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
278 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
279 p = XEXP (XEXP (p, 0), 0);
281 RTVEC_ELT (arg_vector, i) = p;
283 if (GET_CODE (p) == REG)
284 parmdecl_map[REGNO (p)] = parms;
285 else if (GET_CODE (p) == CONCAT)
287 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
288 rtx pimag = gen_imagpart (GET_MODE (preal), p);
290 if (GET_CODE (preal) == REG)
291 parmdecl_map[REGNO (preal)] = parms;
292 if (GET_CODE (pimag) == REG)
293 parmdecl_map[REGNO (pimag)] = parms;
296 /* This flag is cleared later
297 if the function ever modifies the value of the parm. */
298 TREE_READONLY (parms) = 1;
304 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
305 originally was in the FROM_FN, but now it will be in the
309 copy_decl_for_inlining (decl, from_fn, to_fn)
316 /* Copy the declaration. */
317 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
319 /* For a parameter, we must make an equivalent VAR_DECL, not a
321 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
322 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
323 TREE_READONLY (copy) = TREE_READONLY (decl);
324 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
328 copy = copy_node (decl);
329 if (DECL_LANG_SPECIFIC (copy))
330 copy_lang_decl (copy);
332 /* TREE_ADDRESSABLE isn't used to indicate that a label's
333 address has been taken; it's for internal bookkeeping in
334 expand_goto_internal. */
335 if (TREE_CODE (copy) == LABEL_DECL)
336 TREE_ADDRESSABLE (copy) = 0;
339 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
340 declaration inspired this copy. */
341 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
343 /* The new variable/label has no RTL, yet. */
344 SET_DECL_RTL (copy, NULL_RTX);
346 /* These args would always appear unused, if not for this. */
347 TREE_USED (copy) = 1;
349 /* Set the context for the new declaration. */
350 if (!DECL_CONTEXT (decl))
351 /* Globals stay global. */
353 else if (DECL_CONTEXT (decl) != from_fn)
354 /* Things that weren't in the scope of the function we're inlining
355 from aren't in the scope we're inlining too, either. */
357 else if (TREE_STATIC (decl))
358 /* Function-scoped static variables should say in the original
362 /* Ordinary automatic local variables are now in the scope of the
364 DECL_CONTEXT (copy) = to_fn;
369 /* Make the insns and PARM_DECLs of the current function permanent
370 and record other information in DECL_SAVED_INSNS to allow inlining
371 of this function in subsequent calls.
373 This routine need not copy any insns because we are not going
374 to immediately compile the insns in the insn chain. There
375 are two cases when we would compile the insns for FNDECL:
376 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
377 be output at the end of other compilation, because somebody took
378 its address. In the first case, the insns of FNDECL are copied
379 as it is expanded inline, so FNDECL's saved insns are not
380 modified. In the second case, FNDECL is used for the last time,
381 so modifying the rtl is not a problem.
383 We don't have to worry about FNDECL being inline expanded by
384 other functions which are written at the end of compilation
385 because flag_no_inline is turned on when we begin writing
386 functions at the end of compilation. */
389 save_for_inline (fndecl)
394 rtx first_nonparm_insn;
396 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
397 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
398 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
399 for the parms, prior to elimination of virtual registers.
400 These values are needed for substituting parms properly. */
402 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
404 /* Make and emit a return-label if we have not already done so. */
406 if (return_label == 0)
408 return_label = gen_label_rtx ();
409 emit_label (return_label);
412 argvec = initialize_for_inline (fndecl);
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
419 if (GET_CODE (insn) != NOTE)
422 /* Get the insn which signals the end of parameter setup code. */
423 first_nonparm_insn = get_first_nonparm_insn ();
425 /* Now just scan the chain of insns to see what happens to our
426 PARM_DECLs. If a PARM_DECL is used but never modified, we
427 can substitute its rtl directly when expanding inline (and
428 perform constant folding when its incoming value is constant).
429 Otherwise, we have to copy its value into a new register and track
430 the new register's life. */
431 in_nonparm_insns = 0;
432 save_parm_insns (insn, first_nonparm_insn);
434 cfun->inl_max_label_num = max_label_num ();
435 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
436 cfun->original_arg_vector = argvec;
437 cfun->original_decl_initial = DECL_INITIAL (fndecl);
438 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
439 DECL_SAVED_INSNS (fndecl) = cfun;
445 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
446 PARM_DECL is used but never modified, we can substitute its rtl directly
447 when expanding inline (and perform constant folding when its incoming
448 value is constant). Otherwise, we have to copy its value into a new
449 register and track the new register's life. */
452 save_parm_insns (insn, first_nonparm_insn)
454 rtx first_nonparm_insn;
456 if (insn == NULL_RTX)
459 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
461 if (insn == first_nonparm_insn)
462 in_nonparm_insns = 1;
466 /* Record what interesting things happen to our parameters. */
467 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
469 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
470 three attached sequences: normal call, sibling call and tail
472 if (GET_CODE (insn) == CALL_INSN
473 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
477 for (i = 0; i < 3; i++)
478 save_parm_insns (XEXP (PATTERN (insn), i),
485 /* Note whether a parameter is modified or not. */
488 note_modified_parmregs (reg, x, data)
490 rtx x ATTRIBUTE_UNUSED;
491 void *data ATTRIBUTE_UNUSED;
493 if (GET_CODE (reg) == REG && in_nonparm_insns
494 && REGNO (reg) < max_parm_reg
495 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
496 && parmdecl_map[REGNO (reg)] != 0)
497 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
500 /* Unfortunately, we need a global copy of const_equiv map for communication
501 with a function called from note_stores. Be *very* careful that this
502 is used properly in the presence of recursion. */
504 varray_type global_const_equiv_varray;
506 #define FIXED_BASE_PLUS_P(X) \
507 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
508 && GET_CODE (XEXP (X, 0)) == REG \
509 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
510 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
512 /* Called to set up a mapping for the case where a parameter is in a
513 register. If it is read-only and our argument is a constant, set up the
514 constant equivalence.
516 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
519 Also, don't allow hard registers here; they might not be valid when
520 substituted into insns. */
522 process_reg_param (map, loc, copy)
523 struct inline_remap *map;
526 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
527 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
528 && ! REG_USERVAR_P (copy))
529 || (GET_CODE (copy) == REG
530 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
532 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
533 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
534 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
535 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
538 map->reg_map[REGNO (loc)] = copy;
541 /* Compare two BLOCKs for qsort. The key we sort on is the
542 BLOCK_ABSTRACT_ORIGIN of the blocks. */
545 compare_blocks (v1, v2)
549 tree b1 = *((const tree *) v1);
550 tree b2 = *((const tree *) v2);
552 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
553 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
556 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
557 an original block; the second to a remapped equivalent. */
564 const union tree_node *b1 = (const union tree_node *) v1;
565 tree b2 = *((const tree *) v2);
567 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
570 /* Integrate the procedure defined by FNDECL. Note that this function
571 may wind up calling itself. Since the static variables are not
572 reentrant, we do not assign them until after the possibility
573 of recursion is eliminated.
575 If IGNORE is nonzero, do not produce a value.
576 Otherwise store the value in TARGET if it is nonzero and that is convenient.
579 (rtx)-1 if we could not substitute the function
580 0 if we substituted it and it does not produce a value
581 else an rtx for where the value is stored. */
584 expand_inline_function (fndecl, parms, target, ignore, type,
585 structure_value_addr)
590 rtx structure_value_addr;
592 struct function *inlining_previous;
593 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
594 tree formal, actual, block;
595 rtx parm_insns = inl_f->emit->x_first_insn;
596 rtx insns = (inl_f->inl_last_parm_insn
597 ? NEXT_INSN (inl_f->inl_last_parm_insn)
603 int min_labelno = inl_f->emit->x_first_label_num;
604 int max_labelno = inl_f->inl_max_label_num;
609 struct inline_remap *map = 0;
613 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
614 rtx static_chain_value = 0;
616 int eh_region_offset;
618 /* The pointer used to track the true location of the memory used
619 for MAP->LABEL_MAP. */
620 rtx *real_label_map = 0;
622 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
623 max_regno = inl_f->emit->x_reg_rtx_no + 3;
624 if (max_regno < FIRST_PSEUDO_REGISTER)
627 /* Pull out the decl for the function definition; fndecl may be a
628 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
629 fndecl = inl_f->decl;
631 nargs = list_length (DECL_ARGUMENTS (fndecl));
633 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
634 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
636 /* Check that the parms type match and that sufficient arguments were
637 passed. Since the appropriate conversions or default promotions have
638 already been applied, the machine modes should match exactly. */
640 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
642 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
645 enum machine_mode mode;
648 return (rtx) (HOST_WIDE_INT) -1;
650 arg = TREE_VALUE (actual);
651 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
653 if (mode != TYPE_MODE (TREE_TYPE (arg))
654 /* If they are block mode, the types should match exactly.
655 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
656 which could happen if the parameter has incomplete type. */
658 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
659 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
660 return (rtx) (HOST_WIDE_INT) -1;
663 /* Extra arguments are valid, but will be ignored below, so we must
664 evaluate them here for side-effects. */
665 for (; actual; actual = TREE_CHAIN (actual))
666 expand_expr (TREE_VALUE (actual), const0_rtx,
667 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
669 /* Expand the function arguments. Do this first so that any
670 new registers get created before we allocate the maps. */
672 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
673 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
675 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
677 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
679 /* Actual parameter, converted to the type of the argument within the
681 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
682 /* Mode of the variable used within the function. */
683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
687 loc = RTVEC_ELT (arg_vector, i);
689 /* If this is an object passed by invisible reference, we copy the
690 object into a stack slot and save its address. If this will go
691 into memory, we do nothing now. Otherwise, we just expand the
693 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
694 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
696 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
698 store_expr (arg, stack_slot, 0);
699 arg_vals[i] = XEXP (stack_slot, 0);
702 else if (GET_CODE (loc) != MEM)
704 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
705 /* The mode if LOC and ARG can differ if LOC was a variable
706 that had its mode promoted via PROMOTED_MODE. */
707 arg_vals[i] = convert_modes (GET_MODE (loc),
708 TYPE_MODE (TREE_TYPE (arg)),
709 expand_expr (arg, NULL_RTX, mode,
711 TREE_UNSIGNED (TREE_TYPE (formal)));
713 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
719 && (! TREE_READONLY (formal)
720 /* If the parameter is not read-only, copy our argument through
721 a register. Also, we cannot use ARG_VALS[I] if it overlaps
722 TARGET in any way. In the inline function, they will likely
723 be two different pseudos, and `safe_from_p' will make all
724 sorts of smart assumptions about their not conflicting.
725 But if ARG_VALS[I] overlaps TARGET, these assumptions are
726 wrong, so put ARG_VALS[I] into a fresh register.
727 Don't worry about invisible references, since their stack
728 temps will never overlap the target. */
731 && (GET_CODE (arg_vals[i]) == REG
732 || GET_CODE (arg_vals[i]) == SUBREG
733 || GET_CODE (arg_vals[i]) == MEM)
734 && reg_overlap_mentioned_p (arg_vals[i], target))
735 /* ??? We must always copy a SUBREG into a REG, because it might
736 get substituted into an address, and not all ports correctly
737 handle SUBREGs in addresses. */
738 || (GET_CODE (arg_vals[i]) == SUBREG)))
739 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
741 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
742 && POINTER_TYPE_P (TREE_TYPE (formal)))
743 mark_reg_pointer (arg_vals[i],
744 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
747 /* Allocate the structures we use to remap things. */
749 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
750 map->fndecl = fndecl;
752 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
753 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
755 /* We used to use alloca here, but the size of what it would try to
756 allocate would occasionally cause it to exceed the stack limit and
757 cause unpredictable core dumps. */
759 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
760 map->label_map = real_label_map;
761 map->local_return_label = NULL_RTX;
763 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
764 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
766 map->max_insnno = inl_max_uid;
768 map->integrating = 1;
769 map->compare_src = NULL_RTX;
770 map->compare_mode = VOIDmode;
772 /* const_equiv_varray maps pseudos in our routine to constants, so
773 it needs to be large enough for all our pseudos. This is the
774 number we are currently using plus the number in the called
775 routine, plus 15 for each arg, five to compute the virtual frame
776 pointer, and five for the return value. This should be enough
777 for most cases. We do not reference entries outside the range of
780 ??? These numbers are quite arbitrary and were obtained by
781 experimentation. At some point, we should try to allocate the
782 table after all the parameters are set up so we an more accurately
783 estimate the number of pseudos we will need. */
785 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
787 + (max_regno - FIRST_PSEUDO_REGISTER)
790 "expand_inline_function");
793 /* Record the current insn in case we have to set up pointers to frame
794 and argument memory blocks. If there are no insns yet, add a dummy
795 insn that can be used as an insertion point. */
796 map->insns_at_start = get_last_insn ();
797 if (map->insns_at_start == 0)
798 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
800 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
801 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
803 /* Update the outgoing argument size to allow for those in the inlined
805 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
806 current_function_outgoing_args_size = inl_f->outgoing_args_size;
808 /* If the inline function needs to make PIC references, that means
809 that this function's PIC offset table must be used. */
810 if (inl_f->uses_pic_offset_table)
811 current_function_uses_pic_offset_table = 1;
813 /* If this function needs a context, set it up. */
814 if (inl_f->needs_context)
815 static_chain_value = lookup_static_chain (fndecl);
817 if (GET_CODE (parm_insns) == NOTE
818 && NOTE_LINE_NUMBER (parm_insns) > 0)
820 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
821 NOTE_LINE_NUMBER (parm_insns));
823 RTX_INTEGRATED_P (note) = 1;
826 /* Process each argument. For each, set up things so that the function's
827 reference to the argument will refer to the argument being passed.
828 We only replace REG with REG here. Any simplifications are done
831 We make two passes: In the first, we deal with parameters that will
832 be placed into registers, since we need to ensure that the allocated
833 register number fits in const_equiv_map. Then we store all non-register
834 parameters into their memory location. */
836 /* Don't try to free temp stack slots here, because we may put one of the
837 parameters into a temp stack slot. */
839 for (i = 0; i < nargs; i++)
841 rtx copy = arg_vals[i];
843 loc = RTVEC_ELT (arg_vector, i);
845 /* There are three cases, each handled separately. */
846 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
847 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
849 /* This must be an object passed by invisible reference (it could
850 also be a variable-sized object, but we forbid inlining functions
851 with variable-sized arguments). COPY is the address of the
852 actual value (this computation will cause it to be copied). We
853 map that address for the register, noting the actual address as
854 an equivalent in case it can be substituted into the insns. */
856 if (GET_CODE (copy) != REG)
858 temp = copy_addr_to_reg (copy);
859 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
860 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
863 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
865 else if (GET_CODE (loc) == MEM)
867 /* This is the case of a parameter that lives in memory. It
868 will live in the block we allocate in the called routine's
869 frame that simulates the incoming argument area. Do nothing
870 with the parameter now; we will call store_expr later. In
871 this case, however, we must ensure that the virtual stack and
872 incoming arg rtx values are expanded now so that we can be
873 sure we have enough slots in the const equiv map since the
874 store_expr call can easily blow the size estimate. */
875 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
876 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
878 else if (GET_CODE (loc) == REG)
879 process_reg_param (map, loc, copy);
880 else if (GET_CODE (loc) == CONCAT)
882 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
883 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
884 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
885 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
887 process_reg_param (map, locreal, copyreal);
888 process_reg_param (map, locimag, copyimag);
894 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
895 specially. This function can be called recursively, so we need to
896 save the previous value. */
897 inlining_previous = inlining;
900 /* Now do the parameters that will be placed in memory. */
902 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
903 formal; formal = TREE_CHAIN (formal), i++)
905 loc = RTVEC_ELT (arg_vector, i);
907 if (GET_CODE (loc) == MEM
908 /* Exclude case handled above. */
909 && ! (GET_CODE (XEXP (loc, 0)) == REG
910 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
912 rtx note = emit_note (DECL_SOURCE_FILE (formal),
913 DECL_SOURCE_LINE (formal));
915 RTX_INTEGRATED_P (note) = 1;
917 /* Compute the address in the area we reserved and store the
919 temp = copy_rtx_and_substitute (loc, map, 1);
920 subst_constants (&temp, NULL_RTX, map, 1);
921 apply_change_group ();
922 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
923 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
924 store_expr (arg_trees[i], temp, 0);
928 /* Deal with the places that the function puts its result.
929 We are driven by what is placed into DECL_RESULT.
931 Initially, we assume that we don't have anything special handling for
932 REG_FUNCTION_RETURN_VALUE_P. */
934 map->inline_target = 0;
935 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
936 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
938 if (TYPE_MODE (type) == VOIDmode)
939 /* There is no return value to worry about. */
941 else if (GET_CODE (loc) == MEM)
943 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
945 temp = copy_rtx_and_substitute (loc, map, 1);
946 subst_constants (&temp, NULL_RTX, map, 1);
947 apply_change_group ();
952 if (! structure_value_addr
953 || ! aggregate_value_p (DECL_RESULT (fndecl)))
956 /* Pass the function the address in which to return a structure
957 value. Note that a constructor can cause someone to call us
958 with STRUCTURE_VALUE_ADDR, but the initialization takes place
959 via the first parameter, rather than the struct return address.
961 We have two cases: If the address is a simple register
962 indirect, use the mapping mechanism to point that register to
963 our structure return address. Otherwise, store the structure
964 return value into the place that it will be referenced from. */
966 if (GET_CODE (XEXP (loc, 0)) == REG)
968 temp = force_operand (structure_value_addr, NULL_RTX);
969 temp = force_reg (Pmode, temp);
970 /* A virtual register might be invalid in an insn, because
971 it can cause trouble in reload. Since we don't have access
972 to the expanders at map translation time, make sure we have
973 a proper register now.
974 If a virtual register is actually valid, cse or combine
975 can put it into the mapped insns. */
976 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
977 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
978 temp = copy_to_mode_reg (Pmode, temp);
979 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
981 if (CONSTANT_P (structure_value_addr)
982 || GET_CODE (structure_value_addr) == ADDRESSOF
983 || (GET_CODE (structure_value_addr) == PLUS
984 && (XEXP (structure_value_addr, 0)
985 == virtual_stack_vars_rtx)
986 && (GET_CODE (XEXP (structure_value_addr, 1))
989 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
995 temp = copy_rtx_and_substitute (loc, map, 1);
996 subst_constants (&temp, NULL_RTX, map, 0);
997 apply_change_group ();
998 emit_move_insn (temp, structure_value_addr);
1003 /* We will ignore the result value, so don't look at its structure.
1004 Note that preparations for an aggregate return value
1005 do need to be made (above) even if it will be ignored. */
1007 else if (GET_CODE (loc) == REG)
1009 /* The function returns an object in a register and we use the return
1010 value. Set up our target for remapping. */
1012 /* Machine mode function was declared to return. */
1013 enum machine_mode departing_mode = TYPE_MODE (type);
1014 /* (Possibly wider) machine mode it actually computes
1015 (for the sake of callers that fail to declare it right).
1016 We have to use the mode of the result's RTL, rather than
1017 its type, since expand_function_start may have promoted it. */
1018 enum machine_mode arriving_mode
1019 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1022 /* Don't use MEMs as direct targets because on some machines
1023 substituting a MEM for a REG makes invalid insns.
1024 Let the combiner substitute the MEM if that is valid. */
1025 if (target == 0 || GET_CODE (target) != REG
1026 || GET_MODE (target) != departing_mode)
1028 /* Don't make BLKmode registers. If this looks like
1029 a BLKmode object being returned in a register, get
1030 the mode from that, otherwise abort. */
1031 if (departing_mode == BLKmode)
1033 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1035 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1036 arriving_mode = departing_mode;
1042 target = gen_reg_rtx (departing_mode);
1045 /* If function's value was promoted before return,
1046 avoid machine mode mismatch when we substitute INLINE_TARGET.
1047 But TARGET is what we will return to the caller. */
1048 if (arriving_mode != departing_mode)
1050 /* Avoid creating a paradoxical subreg wider than
1051 BITS_PER_WORD, since that is illegal. */
1052 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1054 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1055 GET_MODE_BITSIZE (arriving_mode)))
1056 /* Maybe could be handled by using convert_move () ? */
1058 reg_to_map = gen_reg_rtx (arriving_mode);
1059 target = gen_lowpart (departing_mode, reg_to_map);
1062 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1065 reg_to_map = target;
1067 /* Usually, the result value is the machine's return register.
1068 Sometimes it may be a pseudo. Handle both cases. */
1069 if (REG_FUNCTION_VALUE_P (loc))
1070 map->inline_target = reg_to_map;
1072 map->reg_map[REGNO (loc)] = reg_to_map;
1077 /* Initialize label_map. get_label_from_map will actually make
1079 memset ((char *) &map->label_map[min_labelno], 0,
1080 (max_labelno - min_labelno) * sizeof (rtx));
1082 /* Make copies of the decls of the symbols in the inline function, so that
1083 the copies of the variables get declared in the current function. Set
1084 up things so that lookup_static_chain knows that to interpret registers
1085 in SAVE_EXPRs for TYPE_SIZEs as local. */
1086 inline_function_decl = fndecl;
1087 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1088 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1089 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1090 inline_function_decl = 0;
1092 /* Make a fresh binding contour that we can easily remove. Do this after
1093 expanding our arguments so cleanups are properly scoped. */
1094 expand_start_bindings_and_block (0, block);
1096 /* Sort the block-map so that it will be easy to find remapped
1098 qsort (&VARRAY_TREE (map->block_map, 0),
1099 map->block_map->elements_used,
1103 /* Perform postincrements before actually calling the function. */
1106 /* Clean up stack so that variables might have smaller offsets. */
1107 do_pending_stack_adjust ();
1109 /* Save a copy of the location of const_equiv_varray for
1110 mark_stores, called via note_stores. */
1111 global_const_equiv_varray = map->const_equiv_varray;
1113 /* If the called function does an alloca, save and restore the
1114 stack pointer around the call. This saves stack space, but
1115 also is required if this inline is being done between two
1117 if (inl_f->calls_alloca)
1118 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1120 /* Now copy the insns one by one. */
1121 copy_insn_list (insns, map, static_chain_value);
1123 /* Duplicate the EH regions. This will create an offset from the
1124 region numbers in the function we're inlining to the region
1125 numbers in the calling function. This must wait until after
1126 copy_insn_list, as we need the insn map to be complete. */
1127 eh_region_offset = duplicate_eh_regions (inl_f, map);
1129 /* Now copy the REG_NOTES for those insns. */
1130 copy_insn_notes (insns, map, eh_region_offset);
1132 /* If the insn sequence required one, emit the return label. */
1133 if (map->local_return_label)
1134 emit_label (map->local_return_label);
1136 /* Restore the stack pointer if we saved it above. */
1137 if (inl_f->calls_alloca)
1138 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1140 if (! cfun->x_whole_function_mode_p)
1141 /* In statement-at-a-time mode, we just tell the front-end to add
1142 this block to the list of blocks at this binding level. We
1143 can't do it the way it's done for function-at-a-time mode the
1144 superblocks have not been created yet. */
1145 insert_block (block);
1149 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1150 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1153 /* End the scope containing the copied formal parameter variables
1154 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1155 here so that expand_end_bindings will not check for unused
1156 variables. That's already been checked for when the inlined
1157 function was defined. */
1158 expand_end_bindings (NULL_TREE, 1, 1);
1160 /* Must mark the line number note after inlined functions as a repeat, so
1161 that the test coverage code can avoid counting the call twice. This
1162 just tells the code to ignore the immediately following line note, since
1163 there already exists a copy of this note before the expanded inline call.
1164 This line number note is still needed for debugging though, so we can't
1166 if (flag_test_coverage)
1167 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1169 emit_line_note (input_filename, lineno);
1171 /* If the function returns a BLKmode object in a register, copy it
1172 out of the temp register into a BLKmode memory object. */
1174 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1175 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1176 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1178 if (structure_value_addr)
1180 target = gen_rtx_MEM (TYPE_MODE (type),
1181 memory_address (TYPE_MODE (type),
1182 structure_value_addr));
1183 set_mem_attributes (target, type, 1);
1186 /* Make sure we free the things we explicitly allocated with xmalloc. */
1188 free (real_label_map);
1189 VARRAY_FREE (map->const_equiv_varray);
1190 free (map->reg_map);
1191 VARRAY_FREE (map->block_map);
1192 free (map->insn_map);
1197 inlining = inlining_previous;
1202 /* Make copies of each insn in the given list using the mapping
1203 computed in expand_inline_function. This function may call itself for
1204 insns containing sequences.
1206 Copying is done in two passes, first the insns and then their REG_NOTES.
1208 If static_chain_value is non-zero, it represents the context-pointer
1209 register for the function. */
1212 copy_insn_list (insns, map, static_chain_value)
1214 struct inline_remap *map;
1215 rtx static_chain_value;
1224 /* Copy the insns one by one. Do this in two passes, first the insns and
1225 then their REG_NOTES. */
1227 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1229 for (insn = insns; insn; insn = NEXT_INSN (insn))
1231 rtx copy, pattern, set;
1233 map->orig_asm_operands_vector = 0;
1235 switch (GET_CODE (insn))
1238 pattern = PATTERN (insn);
1239 set = single_set (insn);
1241 if (GET_CODE (pattern) == USE
1242 && GET_CODE (XEXP (pattern, 0)) == REG
1243 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1244 /* The (USE (REG n)) at return from the function should
1245 be ignored since we are changing (REG n) into
1249 /* Ignore setting a function value that we don't want to use. */
1250 if (map->inline_target == 0
1252 && GET_CODE (SET_DEST (set)) == REG
1253 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1255 if (volatile_refs_p (SET_SRC (set)))
1259 /* If we must not delete the source,
1260 load it into a new temporary. */
1261 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1263 new_set = single_set (copy);
1268 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1270 /* If the source and destination are the same and it
1271 has a note on it, keep the insn. */
1272 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1273 && REG_NOTES (insn) != 0)
1274 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1279 /* Similarly if an ignored return value is clobbered. */
1280 else if (map->inline_target == 0
1281 && GET_CODE (pattern) == CLOBBER
1282 && GET_CODE (XEXP (pattern, 0)) == REG
1283 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1286 /* If this is setting the static chain rtx, omit it. */
1287 else if (static_chain_value != 0
1289 && GET_CODE (SET_DEST (set)) == REG
1290 && rtx_equal_p (SET_DEST (set),
1291 static_chain_incoming_rtx))
1294 /* If this is setting the static chain pseudo, set it from
1295 the value we want to give it instead. */
1296 else if (static_chain_value != 0
1298 && rtx_equal_p (SET_SRC (set),
1299 static_chain_incoming_rtx))
1301 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1303 copy = emit_move_insn (newdest, static_chain_value);
1304 static_chain_value = 0;
1307 /* If this is setting the virtual stack vars register, this must
1308 be the code at the handler for a builtin longjmp. The value
1309 saved in the setjmp buffer will be the address of the frame
1310 we've made for this inlined instance within our frame. But we
1311 know the offset of that value so we can use it to reconstruct
1312 our virtual stack vars register from that value. If we are
1313 copying it from the stack pointer, leave it unchanged. */
1315 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1317 HOST_WIDE_INT offset;
1318 temp = map->reg_map[REGNO (SET_DEST (set))];
1319 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1322 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1324 else if (GET_CODE (temp) == PLUS
1325 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1326 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1327 offset = INTVAL (XEXP (temp, 1));
1331 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1332 temp = SET_SRC (set);
1334 temp = force_operand (plus_constant (SET_SRC (set),
1338 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1342 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1343 /* REG_NOTES will be copied later. */
1346 /* If this insn is setting CC0, it may need to look at
1347 the insn that uses CC0 to see what type of insn it is.
1348 In that case, the call to recog via validate_change will
1349 fail. So don't substitute constants here. Instead,
1350 do it when we emit the following insn.
1352 For example, see the pyr.md file. That machine has signed and
1353 unsigned compares. The compare patterns must check the
1354 following branch insn to see which what kind of compare to
1357 If the previous insn set CC0, substitute constants on it as
1359 if (sets_cc0_p (PATTERN (copy)) != 0)
1364 try_constants (cc0_insn, map);
1366 try_constants (copy, map);
1369 try_constants (copy, map);
1374 if (map->integrating && returnjump_p (insn))
1376 if (map->local_return_label == 0)
1377 map->local_return_label = gen_label_rtx ();
1378 pattern = gen_jump (map->local_return_label);
1381 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1383 copy = emit_jump_insn (pattern);
1387 try_constants (cc0_insn, map);
1390 try_constants (copy, map);
1392 /* If this used to be a conditional jump insn but whose branch
1393 direction is now know, we must do something special. */
1394 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1397 /* If the previous insn set cc0 for us, delete it. */
1398 if (sets_cc0_p (PREV_INSN (copy)))
1399 delete_insn (PREV_INSN (copy));
1402 /* If this is now a no-op, delete it. */
1403 if (map->last_pc_value == pc_rtx)
1409 /* Otherwise, this is unconditional jump so we must put a
1410 BARRIER after it. We could do some dead code elimination
1411 here, but jump.c will do it just as well. */
1417 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1418 three attached sequences: normal call, sibling call and tail
1420 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1425 for (i = 0; i < 3; i++)
1429 sequence[i] = NULL_RTX;
1430 seq = XEXP (PATTERN (insn), i);
1434 copy_insn_list (seq, map, static_chain_value);
1435 sequence[i] = get_insns ();
1440 /* Find the new tail recursion label.
1441 It will already be substituted into sequence[2]. */
1442 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1445 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1453 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1454 copy = emit_call_insn (pattern);
1456 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1457 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1459 /* Because the USAGE information potentially contains objects other
1460 than hard registers, we need to copy it. */
1462 CALL_INSN_FUNCTION_USAGE (copy)
1463 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1468 try_constants (cc0_insn, map);
1471 try_constants (copy, map);
1473 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1474 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1475 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1479 copy = emit_label (get_label_from_map (map,
1480 CODE_LABEL_NUMBER (insn)));
1481 LABEL_NAME (copy) = LABEL_NAME (insn);
1486 copy = emit_barrier ();
1490 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1491 discarded because it is important to have only one of
1492 each in the current function.
1494 NOTE_INSN_DELETED notes aren't useful.
1496 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1497 pointer (which will soon be dangling) confuses flow's
1498 attempts to preserve bb structures during the compilation
1501 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1502 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1503 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1504 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1506 copy = emit_note (NOTE_SOURCE_FILE (insn),
1507 NOTE_LINE_NUMBER (insn));
1509 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1510 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1511 && NOTE_BLOCK (insn))
1513 tree *mapped_block_p;
1516 = (tree *) bsearch (NOTE_BLOCK (insn),
1517 &VARRAY_TREE (map->block_map, 0),
1518 map->block_map->elements_used,
1522 if (!mapped_block_p)
1525 NOTE_BLOCK (copy) = *mapped_block_p;
1537 RTX_INTEGRATED_P (copy) = 1;
1539 map->insn_map[INSN_UID (insn)] = copy;
1543 /* Copy the REG_NOTES. Increment const_age, so that only constants
1544 from parameters can be substituted in. These are the only ones
1545 that are valid across the entire function. */
1548 copy_insn_notes (insns, map, eh_region_offset)
1550 struct inline_remap *map;
1551 int eh_region_offset;
1556 for (insn = insns; insn; insn = NEXT_INSN (insn))
1558 if (! INSN_P (insn))
1561 new_insn = map->insn_map[INSN_UID (insn)];
1565 if (REG_NOTES (insn))
1567 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1569 /* We must also do subst_constants, in case one of our parameters
1570 has const type and constant value. */
1571 subst_constants (¬e, NULL_RTX, map, 0);
1572 apply_change_group ();
1573 REG_NOTES (new_insn) = note;
1575 /* Delete any REG_LABEL notes from the chain. Remap any
1576 REG_EH_REGION notes. */
1577 for (; note; note = next)
1579 next = XEXP (note, 1);
1580 if (REG_NOTE_KIND (note) == REG_LABEL)
1581 remove_note (new_insn, note);
1582 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1583 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1584 + eh_region_offset);
1588 if (GET_CODE (insn) == CALL_INSN
1589 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1592 for (i = 0; i < 3; i++)
1593 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1596 if (GET_CODE (insn) == JUMP_INSN
1597 && GET_CODE (PATTERN (insn)) == RESX)
1598 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1602 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1603 push all of those decls and give each one the corresponding home. */
1606 integrate_parm_decls (args, map, arg_vector)
1608 struct inline_remap *map;
1614 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1616 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1617 current_function_decl);
1619 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1621 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1622 here, but that's going to require some more work. */
1623 /* DECL_INCOMING_RTL (decl) = ?; */
1624 /* Fully instantiate the address with the equivalent form so that the
1625 debugging information contains the actual register, instead of the
1626 virtual register. Do this by not passing an insn to
1628 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1629 apply_change_group ();
1630 SET_DECL_RTL (decl, new_decl_rtl);
1634 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1635 current function a tree of contexts isomorphic to the one that is given.
1637 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1638 registers used in the DECL_RTL field should be remapped. If it is zero,
1639 no mapping is necessary. */
1642 integrate_decl_tree (let, map)
1644 struct inline_remap *map;
1650 new_block = make_node (BLOCK);
1651 VARRAY_PUSH_TREE (map->block_map, new_block);
1652 next = &BLOCK_VARS (new_block);
1654 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1658 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1660 if (DECL_RTL_SET_P (t))
1664 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1666 /* Fully instantiate the address with the equivalent form so that the
1667 debugging information contains the actual register, instead of the
1668 virtual register. Do this by not passing an insn to
1671 subst_constants (&r, NULL_RTX, map, 1);
1672 SET_DECL_RTL (d, r);
1673 apply_change_group ();
1676 /* Add this declaration to the list of variables in the new
1679 next = &TREE_CHAIN (d);
1682 next = &BLOCK_SUBBLOCKS (new_block);
1683 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1685 *next = integrate_decl_tree (t, map);
1686 BLOCK_SUPERCONTEXT (*next) = new_block;
1687 next = &BLOCK_CHAIN (*next);
1690 TREE_USED (new_block) = TREE_USED (let);
1691 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1696 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1697 except for those few rtx codes that are sharable.
1699 We always return an rtx that is similar to that incoming rtx, with the
1700 exception of possibly changing a REG to a SUBREG or vice versa. No
1701 rtl is ever emitted.
1703 If FOR_LHS is nonzero, if means we are processing something that will
1704 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1705 inlining since we need to be conservative in how it is set for
1708 Handle constants that need to be placed in the constant pool by
1709 calling `force_const_mem'. */
1712 copy_rtx_and_substitute (orig, map, for_lhs)
1714 struct inline_remap *map;
1717 register rtx copy, temp;
1719 register RTX_CODE code;
1720 register enum machine_mode mode;
1721 register const char *format_ptr;
1727 code = GET_CODE (orig);
1728 mode = GET_MODE (orig);
1733 /* If the stack pointer register shows up, it must be part of
1734 stack-adjustments (*not* because we eliminated the frame pointer!).
1735 Small hard registers are returned as-is. Pseudo-registers
1736 go through their `reg_map'. */
1737 regno = REGNO (orig);
1738 if (regno <= LAST_VIRTUAL_REGISTER
1739 || (map->integrating
1740 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1742 /* Some hard registers are also mapped,
1743 but others are not translated. */
1744 if (map->reg_map[regno] != 0
1745 /* We shouldn't usually have reg_map set for return
1746 register, but it may happen if we have leaf-register
1747 remapping and the return register is used in one of
1748 the calling sequences of a call_placeholer. In this
1749 case, we'll end up with a reg_map set for this
1750 register, but we don't want to use for registers
1751 marked as return values. */
1752 && ! REG_FUNCTION_VALUE_P (orig))
1753 return map->reg_map[regno];
1755 /* If this is the virtual frame pointer, make space in current
1756 function's stack frame for the stack frame of the inline function.
1758 Copy the address of this area into a pseudo. Map
1759 virtual_stack_vars_rtx to this pseudo and set up a constant
1760 equivalence for it to be the address. This will substitute the
1761 address into insns where it can be substituted and use the new
1762 pseudo where it can't. */
1763 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1766 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1767 #ifdef FRAME_GROWS_DOWNWARD
1769 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1772 /* In this case, virtual_stack_vars_rtx points to one byte
1773 higher than the top of the frame area. So make sure we
1774 allocate a big enough chunk to keep the frame pointer
1775 aligned like a real one. */
1777 size = CEIL_ROUND (size, alignment);
1780 loc = assign_stack_temp (BLKmode, size, 1);
1781 loc = XEXP (loc, 0);
1782 #ifdef FRAME_GROWS_DOWNWARD
1783 /* In this case, virtual_stack_vars_rtx points to one byte
1784 higher than the top of the frame area. So compute the offset
1785 to one byte higher than our substitute frame. */
1786 loc = plus_constant (loc, size);
1788 map->reg_map[regno] = temp
1789 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1791 #ifdef STACK_BOUNDARY
1792 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1795 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1797 seq = gen_sequence ();
1799 emit_insn_after (seq, map->insns_at_start);
1802 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1803 || (map->integrating
1804 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1807 /* Do the same for a block to contain any arguments referenced
1810 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1813 loc = assign_stack_temp (BLKmode, size, 1);
1814 loc = XEXP (loc, 0);
1815 /* When arguments grow downward, the virtual incoming
1816 args pointer points to the top of the argument block,
1817 so the remapped location better do the same. */
1818 #ifdef ARGS_GROW_DOWNWARD
1819 loc = plus_constant (loc, size);
1821 map->reg_map[regno] = temp
1822 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1824 #ifdef STACK_BOUNDARY
1825 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1828 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1830 seq = gen_sequence ();
1832 emit_insn_after (seq, map->insns_at_start);
1835 else if (REG_FUNCTION_VALUE_P (orig))
1837 /* This is a reference to the function return value. If
1838 the function doesn't have a return value, error. If the
1839 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1840 if (map->inline_target == 0)
1842 if (rtx_equal_function_value_matters)
1843 /* This is an ignored return value. We must not
1844 leave it in with REG_FUNCTION_VALUE_P set, since
1845 that would confuse subsequent inlining of the
1846 current function into a later function. */
1847 return gen_rtx_REG (GET_MODE (orig), regno);
1849 /* Must be unrolling loops or replicating code if we
1850 reach here, so return the register unchanged. */
1853 else if (GET_MODE (map->inline_target) != BLKmode
1854 && mode != GET_MODE (map->inline_target))
1855 return gen_lowpart (mode, map->inline_target);
1857 return map->inline_target;
1859 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1860 /* If leaf_renumber_regs_insn() might remap this register to
1861 some other number, make sure we don't share it with the
1862 inlined function, otherwise delayed optimization of the
1863 inlined function may change it in place, breaking our
1864 reference to it. We may still shared it within the
1865 function, so create an entry for this register in the
1867 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1868 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1870 temp = gen_rtx_REG (mode, regno);
1871 map->reg_map[regno] = temp;
1880 if (map->reg_map[regno] == NULL)
1882 map->reg_map[regno] = gen_reg_rtx (mode);
1883 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1884 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1885 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1886 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1888 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1889 mark_reg_pointer (map->reg_map[regno],
1890 map->regno_pointer_align[regno]);
1892 return map->reg_map[regno];
1895 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1896 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1897 if (GET_CODE (copy) == SUBREG)
1898 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1899 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1900 else if (GET_CODE (copy) == CONCAT)
1902 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1904 if (GET_MODE (retval) == GET_MODE (orig))
1907 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1908 (SUBREG_WORD (orig) %
1909 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1910 / (unsigned) UNITS_PER_WORD)));
1913 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1914 SUBREG_WORD (orig));
1917 copy = gen_rtx_ADDRESSOF (mode,
1918 copy_rtx_and_substitute (XEXP (orig, 0),
1920 0, ADDRESSOF_DECL (orig));
1921 regno = ADDRESSOF_REGNO (orig);
1922 if (map->reg_map[regno])
1923 regno = REGNO (map->reg_map[regno]);
1924 else if (regno > LAST_VIRTUAL_REGISTER)
1926 temp = XEXP (orig, 0);
1927 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1928 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1929 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1930 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1931 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1933 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1934 mark_reg_pointer (map->reg_map[regno],
1935 map->regno_pointer_align[regno]);
1936 regno = REGNO (map->reg_map[regno]);
1938 ADDRESSOF_REGNO (copy) = regno;
1943 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1944 to (use foo) if the original insn didn't have a subreg.
1945 Removing the subreg distorts the VAX movstrhi pattern
1946 by changing the mode of an operand. */
1947 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1948 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1949 copy = SUBREG_REG (copy);
1950 return gen_rtx_fmt_e (code, VOIDmode, copy);
1953 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1954 = LABEL_PRESERVE_P (orig);
1955 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1957 /* We need to handle "deleted" labels that appear in the DECL_RTL
1960 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1961 return map->insn_map[INSN_UID (orig)];
1968 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1969 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1971 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1973 /* The fact that this label was previously nonlocal does not mean
1974 it still is, so we must check if it is within the range of
1975 this function's labels. */
1976 LABEL_REF_NONLOCAL_P (copy)
1977 = (LABEL_REF_NONLOCAL_P (orig)
1978 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1979 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1981 /* If we have made a nonlocal label local, it means that this
1982 inlined call will be referring to our nonlocal goto handler.
1983 So make sure we create one for this block; we normally would
1984 not since this is not otherwise considered a "call". */
1985 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1986 function_call_count++;
1996 /* Symbols which represent the address of a label stored in the constant
1997 pool must be modified to point to a constant pool entry for the
1998 remapped label. Otherwise, symbols are returned unchanged. */
1999 if (CONSTANT_POOL_ADDRESS_P (orig))
2001 struct function *f = inlining ? inlining : cfun;
2002 rtx constant = get_pool_constant_for_function (f, orig);
2003 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2006 rtx temp = force_const_mem (const_mode,
2007 copy_rtx_and_substitute (constant,
2011 /* Legitimizing the address here is incorrect.
2013 Since we had a SYMBOL_REF before, we can assume it is valid
2014 to have one in this position in the insn.
2016 Also, change_address may create new registers. These
2017 registers will not have valid reg_map entries. This can
2018 cause try_constants() to fail because assumes that all
2019 registers in the rtx have valid reg_map entries, and it may
2020 end up replacing one of these new registers with junk. */
2022 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2023 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2026 temp = XEXP (temp, 0);
2028 #ifdef POINTERS_EXTEND_UNSIGNED
2029 if (GET_MODE (temp) != GET_MODE (orig))
2030 temp = convert_memory_address (GET_MODE (orig), temp);
2034 else if (GET_CODE (constant) == LABEL_REF)
2035 return XEXP (force_const_mem
2037 copy_rtx_and_substitute (constant, map, for_lhs)),
2044 /* We have to make a new copy of this CONST_DOUBLE because don't want
2045 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2046 duplicate of a CONST_DOUBLE we have already seen. */
2047 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2051 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2052 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2055 return immed_double_const (CONST_DOUBLE_LOW (orig),
2056 CONST_DOUBLE_HIGH (orig), VOIDmode);
2059 /* Make new constant pool entry for a constant
2060 that was in the pool of the inline function. */
2061 if (RTX_INTEGRATED_P (orig))
2066 /* If a single asm insn contains multiple output operands then
2067 it contains multiple ASM_OPERANDS rtx's that share the input
2068 and constraint vecs. We must make sure that the copied insn
2069 continues to share it. */
2070 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2072 copy = rtx_alloc (ASM_OPERANDS);
2073 copy->volatil = orig->volatil;
2074 PUT_MODE (copy, GET_MODE (orig));
2075 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2076 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2077 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2078 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2079 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2080 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2081 = map->copy_asm_constraints_vector;
2082 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2083 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2089 /* This is given special treatment because the first
2090 operand of a CALL is a (MEM ...) which may get
2091 forced into a register for cse. This is undesirable
2092 if function-address cse isn't wanted or if we won't do cse. */
2093 #ifndef NO_FUNCTION_CSE
2094 if (! (optimize && ! flag_no_function_cse))
2099 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2100 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2102 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2106 /* Must be ifdefed out for loop unrolling to work. */
2112 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2113 Adjust the setting by the offset of the area we made.
2114 If the nonlocal goto is into the current function,
2115 this will result in unnecessarily bad code, but should work. */
2116 if (SET_DEST (orig) == virtual_stack_vars_rtx
2117 || SET_DEST (orig) == virtual_incoming_args_rtx)
2119 /* In case a translation hasn't occurred already, make one now. */
2122 HOST_WIDE_INT loc_offset;
2124 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2125 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2126 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2127 REGNO (equiv_reg)).rtx;
2129 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2131 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2134 (copy_rtx_and_substitute (SET_SRC (orig),
2140 return gen_rtx_SET (VOIDmode,
2141 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2142 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2147 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2148 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2150 enum machine_mode const_mode
2151 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2153 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2155 constant = copy_rtx_and_substitute (constant, map, 0);
2157 /* If this was an address of a constant pool entry that itself
2158 had to be placed in the constant pool, it might not be a
2159 valid address. So the recursive call might have turned it
2160 into a register. In that case, it isn't a constant any
2161 more, so return it. This has the potential of changing a
2162 MEM into a REG, but we'll assume that it safe. */
2163 if (! CONSTANT_P (constant))
2166 return validize_mem (force_const_mem (const_mode, constant));
2169 copy = rtx_alloc (MEM);
2170 PUT_MODE (copy, mode);
2171 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2172 MEM_COPY_ATTRIBUTES (copy, orig);
2179 copy = rtx_alloc (code);
2180 PUT_MODE (copy, mode);
2181 copy->in_struct = orig->in_struct;
2182 copy->volatil = orig->volatil;
2183 copy->unchanging = orig->unchanging;
2185 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2187 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2189 switch (*format_ptr++)
2192 /* Copy this through the wide int field; that's safest. */
2193 X0WINT (copy, i) = X0WINT (orig, i);
2198 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2202 /* Change any references to old-insns to point to the
2203 corresponding copied insns. */
2204 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2208 XVEC (copy, i) = XVEC (orig, i);
2209 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2211 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2212 for (j = 0; j < XVECLEN (copy, i); j++)
2213 XVECEXP (copy, i, j)
2214 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2220 XWINT (copy, i) = XWINT (orig, i);
2224 XINT (copy, i) = XINT (orig, i);
2228 XSTR (copy, i) = XSTR (orig, i);
2232 XTREE (copy, i) = XTREE (orig, i);
2240 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2242 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2243 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2244 map->copy_asm_constraints_vector
2245 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2251 /* Substitute known constant values into INSN, if that is valid. */
2254 try_constants (insn, map)
2256 struct inline_remap *map;
2262 /* First try just updating addresses, then other things. This is
2263 important when we have something like the store of a constant
2264 into memory and we can update the memory address but the machine
2265 does not support a constant source. */
2266 subst_constants (&PATTERN (insn), insn, map, 1);
2267 apply_change_group ();
2268 subst_constants (&PATTERN (insn), insn, map, 0);
2269 apply_change_group ();
2271 /* Show we don't know the value of anything stored or clobbered. */
2272 note_stores (PATTERN (insn), mark_stores, NULL);
2273 map->last_pc_value = 0;
2275 map->last_cc0_value = 0;
2278 /* Set up any constant equivalences made in this insn. */
2279 for (i = 0; i < map->num_sets; i++)
2281 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2283 int regno = REGNO (map->equiv_sets[i].dest);
2285 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2286 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2287 /* Following clause is a hack to make case work where GNU C++
2288 reassigns a variable to make cse work right. */
2289 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2291 map->equiv_sets[i].equiv))
2292 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2293 map->equiv_sets[i].equiv, map->const_age);
2295 else if (map->equiv_sets[i].dest == pc_rtx)
2296 map->last_pc_value = map->equiv_sets[i].equiv;
2298 else if (map->equiv_sets[i].dest == cc0_rtx)
2299 map->last_cc0_value = map->equiv_sets[i].equiv;
2304 /* Substitute known constants for pseudo regs in the contents of LOC,
2305 which are part of INSN.
2306 If INSN is zero, the substitution should always be done (this is used to
2308 These changes are taken out by try_constants if the result is not valid.
2310 Note that we are more concerned with determining when the result of a SET
2311 is a constant, for further propagation, than actually inserting constants
2312 into insns; cse will do the latter task better.
2314 This function is also used to adjust address of items previously addressed
2315 via the virtual stack variable or virtual incoming arguments registers.
2317 If MEMONLY is nonzero, only make changes inside a MEM. */
2320 subst_constants (loc, insn, map, memonly)
2323 struct inline_remap *map;
2328 register enum rtx_code code;
2329 register const char *format_ptr;
2330 int num_changes = num_validated_changes ();
2332 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2334 code = GET_CODE (x);
2350 validate_change (insn, loc, map->last_cc0_value, 1);
2356 /* The only thing we can do with a USE or CLOBBER is possibly do
2357 some substitutions in a MEM within it. */
2358 if (GET_CODE (XEXP (x, 0)) == MEM)
2359 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2363 /* Substitute for parms and known constants. Don't replace
2364 hard regs used as user variables with constants. */
2367 int regno = REGNO (x);
2368 struct const_equiv_data *p;
2370 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2371 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2372 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2374 && p->age >= map->const_age)
2375 validate_change (insn, loc, p->rtx, 1);
2380 /* SUBREG applied to something other than a reg
2381 should be treated as ordinary, since that must
2382 be a special hack and we don't know how to treat it specially.
2383 Consider for example mulsidi3 in m68k.md.
2384 Ordinary SUBREG of a REG needs this special treatment. */
2385 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2387 rtx inner = SUBREG_REG (x);
2390 /* We can't call subst_constants on &SUBREG_REG (x) because any
2391 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2392 see what is inside, try to form the new SUBREG and see if that is
2393 valid. We handle two cases: extracting a full word in an
2394 integral mode and extracting the low part. */
2395 subst_constants (&inner, NULL_RTX, map, 0);
2397 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2398 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2399 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2400 new = operand_subword (inner, SUBREG_WORD (x), 0,
2401 GET_MODE (SUBREG_REG (x)));
2403 cancel_changes (num_changes);
2404 if (new == 0 && subreg_lowpart_p (x))
2405 new = gen_lowpart_common (GET_MODE (x), inner);
2408 validate_change (insn, loc, new, 1);
2415 subst_constants (&XEXP (x, 0), insn, map, 0);
2417 /* If a memory address got spoiled, change it back. */
2418 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2419 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2420 cancel_changes (num_changes);
2425 /* Substitute constants in our source, and in any arguments to a
2426 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2428 rtx *dest_loc = &SET_DEST (x);
2429 rtx dest = *dest_loc;
2431 enum machine_mode compare_mode = VOIDmode;
2433 /* If SET_SRC is a COMPARE which subst_constants would turn into
2434 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2436 if (GET_CODE (SET_SRC (x)) == COMPARE)
2439 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2445 compare_mode = GET_MODE (XEXP (src, 0));
2446 if (compare_mode == VOIDmode)
2447 compare_mode = GET_MODE (XEXP (src, 1));
2451 subst_constants (&SET_SRC (x), insn, map, memonly);
2454 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2455 || GET_CODE (*dest_loc) == SUBREG
2456 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2458 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2460 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2461 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2463 dest_loc = &XEXP (*dest_loc, 0);
2466 /* Do substitute in the address of a destination in memory. */
2467 if (GET_CODE (*dest_loc) == MEM)
2468 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2470 /* Check for the case of DEST a SUBREG, both it and the underlying
2471 register are less than one word, and the SUBREG has the wider mode.
2472 In the case, we are really setting the underlying register to the
2473 source converted to the mode of DEST. So indicate that. */
2474 if (GET_CODE (dest) == SUBREG
2475 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2476 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2477 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2478 <= GET_MODE_SIZE (GET_MODE (dest)))
2479 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2481 src = tem, dest = SUBREG_REG (dest);
2483 /* If storing a recognizable value save it for later recording. */
2484 if ((map->num_sets < MAX_RECOG_OPERANDS)
2485 && (CONSTANT_P (src)
2486 || (GET_CODE (src) == REG
2487 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2488 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2489 || (GET_CODE (src) == PLUS
2490 && GET_CODE (XEXP (src, 0)) == REG
2491 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2492 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2493 && CONSTANT_P (XEXP (src, 1)))
2494 || GET_CODE (src) == COMPARE
2499 && (src == pc_rtx || GET_CODE (src) == RETURN
2500 || GET_CODE (src) == LABEL_REF))))
2502 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2503 it will cause us to save the COMPARE with any constants
2504 substituted, which is what we want for later. */
2505 rtx src_copy = copy_rtx (src);
2506 map->equiv_sets[map->num_sets].equiv = src_copy;
2507 map->equiv_sets[map->num_sets++].dest = dest;
2508 if (compare_mode != VOIDmode
2509 && GET_CODE (src) == COMPARE
2510 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2515 && GET_MODE (XEXP (src, 0)) == VOIDmode
2516 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2518 map->compare_src = src_copy;
2519 map->compare_mode = compare_mode;
2529 format_ptr = GET_RTX_FORMAT (code);
2531 /* If the first operand is an expression, save its mode for later. */
2532 if (*format_ptr == 'e')
2533 op0_mode = GET_MODE (XEXP (x, 0));
2535 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2537 switch (*format_ptr++)
2544 subst_constants (&XEXP (x, i), insn, map, memonly);
2556 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2557 for (j = 0; j < XVECLEN (x, i); j++)
2558 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2567 /* If this is a commutative operation, move a constant to the second
2568 operand unless the second operand is already a CONST_INT. */
2570 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2571 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2573 rtx tem = XEXP (x, 0);
2574 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2575 validate_change (insn, &XEXP (x, 1), tem, 1);
2578 /* Simplify the expression in case we put in some constants. */
2580 switch (GET_RTX_CLASS (code))
2583 if (op0_mode == MAX_MACHINE_MODE)
2585 new = simplify_unary_operation (code, GET_MODE (x),
2586 XEXP (x, 0), op0_mode);
2591 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2593 if (op_mode == VOIDmode)
2594 op_mode = GET_MODE (XEXP (x, 1));
2595 new = simplify_relational_operation (code, op_mode,
2596 XEXP (x, 0), XEXP (x, 1));
2597 #ifdef FLOAT_STORE_FLAG_VALUE
2598 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2600 enum machine_mode mode = GET_MODE (x);
2601 if (new == const0_rtx)
2602 new = CONST0_RTX (mode);
2605 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2606 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2615 new = simplify_binary_operation (code, GET_MODE (x),
2616 XEXP (x, 0), XEXP (x, 1));
2621 if (op0_mode == MAX_MACHINE_MODE)
2624 if (code == IF_THEN_ELSE)
2626 rtx op0 = XEXP (x, 0);
2628 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2629 && GET_MODE (op0) == VOIDmode
2630 && ! side_effects_p (op0)
2631 && XEXP (op0, 0) == map->compare_src
2632 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2634 /* We have compare of two VOIDmode constants for which
2635 we recorded the comparison mode. */
2637 simplify_relational_operation (GET_CODE (op0),
2642 if (temp == const0_rtx)
2644 else if (temp == const1_rtx)
2649 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2650 XEXP (x, 0), XEXP (x, 1),
2656 validate_change (insn, loc, new, 1);
2659 /* Show that register modified no longer contain known constants. We are
2660 called from note_stores with parts of the new insn. */
2663 mark_stores (dest, x, data)
2665 rtx x ATTRIBUTE_UNUSED;
2666 void *data ATTRIBUTE_UNUSED;
2669 enum machine_mode mode = VOIDmode;
2671 /* DEST is always the innermost thing set, except in the case of
2672 SUBREGs of hard registers. */
2674 if (GET_CODE (dest) == REG)
2675 regno = REGNO (dest), mode = GET_MODE (dest);
2676 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2678 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2679 mode = GET_MODE (SUBREG_REG (dest));
2684 unsigned int uregno = regno;
2685 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2686 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2689 /* Ignore virtual stack var or virtual arg register since those
2690 are handled separately. */
2691 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2692 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2693 for (i = uregno; i <= last_reg; i++)
2694 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2695 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2699 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2700 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2701 that it points to the node itself, thus indicating that the node is its
2702 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2703 the given node is NULL, recursively descend the decl/block tree which
2704 it is the root of, and for each other ..._DECL or BLOCK node contained
2705 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2706 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2707 values to point to themselves. */
2710 set_block_origin_self (stmt)
2713 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2715 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2718 register tree local_decl;
2720 for (local_decl = BLOCK_VARS (stmt);
2721 local_decl != NULL_TREE;
2722 local_decl = TREE_CHAIN (local_decl))
2723 set_decl_origin_self (local_decl); /* Potential recursion. */
2727 register tree subblock;
2729 for (subblock = BLOCK_SUBBLOCKS (stmt);
2730 subblock != NULL_TREE;
2731 subblock = BLOCK_CHAIN (subblock))
2732 set_block_origin_self (subblock); /* Recurse. */
2737 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2738 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2739 node to so that it points to the node itself, thus indicating that the
2740 node represents its own (abstract) origin. Additionally, if the
2741 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2742 the decl/block tree of which the given node is the root of, and for
2743 each other ..._DECL or BLOCK node contained therein whose
2744 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2745 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2746 point to themselves. */
2749 set_decl_origin_self (decl)
2752 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2754 DECL_ABSTRACT_ORIGIN (decl) = decl;
2755 if (TREE_CODE (decl) == FUNCTION_DECL)
2759 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2760 DECL_ABSTRACT_ORIGIN (arg) = arg;
2761 if (DECL_INITIAL (decl) != NULL_TREE
2762 && DECL_INITIAL (decl) != error_mark_node)
2763 set_block_origin_self (DECL_INITIAL (decl));
2768 /* Given a pointer to some BLOCK node, and a boolean value to set the
2769 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2770 the given block, and for all local decls and all local sub-blocks
2771 (recursively) which are contained therein. */
2774 set_block_abstract_flags (stmt, setting)
2776 register int setting;
2778 register tree local_decl;
2779 register tree subblock;
2781 BLOCK_ABSTRACT (stmt) = setting;
2783 for (local_decl = BLOCK_VARS (stmt);
2784 local_decl != NULL_TREE;
2785 local_decl = TREE_CHAIN (local_decl))
2786 set_decl_abstract_flags (local_decl, setting);
2788 for (subblock = BLOCK_SUBBLOCKS (stmt);
2789 subblock != NULL_TREE;
2790 subblock = BLOCK_CHAIN (subblock))
2791 set_block_abstract_flags (subblock, setting);
2794 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2795 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2796 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2797 set the abstract flags for all of the parameters, local vars, local
2798 blocks and sub-blocks (recursively) to the same setting. */
2801 set_decl_abstract_flags (decl, setting)
2803 register int setting;
2805 DECL_ABSTRACT (decl) = setting;
2806 if (TREE_CODE (decl) == FUNCTION_DECL)
2810 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2811 DECL_ABSTRACT (arg) = setting;
2812 if (DECL_INITIAL (decl) != NULL_TREE
2813 && DECL_INITIAL (decl) != error_mark_node)
2814 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2818 /* Output the assembly language code for the function FNDECL
2819 from its DECL_SAVED_INSNS. Used for inline functions that are output
2820 at end of compilation instead of where they came in the source. */
2823 output_inline_function (fndecl)
2826 struct function *old_cfun = cfun;
2827 enum debug_info_type old_write_symbols = write_symbols;
2828 struct function *f = DECL_SAVED_INSNS (fndecl);
2831 current_function_decl = fndecl;
2832 clear_emit_caches ();
2834 set_new_last_label_num (f->inl_max_label_num);
2836 /* We're not deferring this any longer. */
2837 DECL_DEFER_OUTPUT (fndecl) = 0;
2839 /* If requested, suppress debugging information. */
2840 if (f->no_debugging_symbols)
2841 write_symbols = NO_DEBUG;
2843 /* Do any preparation, such as emitting abstract debug info for the inline
2844 before it gets mangled by optimization. */
2845 note_outlining_of_inline_function (fndecl);
2847 /* Compile this function all the way down to assembly code. */
2848 rest_of_compilation (fndecl);
2850 /* We can't inline this anymore. */
2852 DECL_INLINE (fndecl) = 0;
2855 current_function_decl = old_cfun ? old_cfun->decl : 0;
2856 write_symbols = old_write_symbols;