1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
32 #include "insn-config.h"
36 #include "integrate.h"
46 #include "langhooks.h"
50 extern struct obstack *function_maybepermanent_obstack;
52 /* Similar, but round to the next highest integer that meets the
54 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
56 /* Default max number of insns a function can have and still be inline.
57 This is overridden on RISC machines. */
58 #ifndef INTEGRATE_THRESHOLD
59 /* Inlining small functions might save more space then not inlining at
60 all. Assume 1 instruction for the call and 1.5 insns per argument. */
61 #define INTEGRATE_THRESHOLD(DECL) \
63 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
64 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
68 /* Private type used by {get/has}_func_hard_reg_initial_val. */
69 typedef struct initial_value_pair GTY(()) {
73 typedef struct initial_value_struct GTY(()) {
76 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
77 } initial_value_struct;
79 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
81 static rtvec initialize_for_inline PARAMS ((tree));
82 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
83 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
85 static tree integrate_decl_tree PARAMS ((tree,
86 struct inline_remap *));
87 static void subst_constants PARAMS ((rtx *, rtx,
88 struct inline_remap *, int));
89 static void set_block_origin_self PARAMS ((tree));
90 static void set_block_abstract_flags PARAMS ((tree, int));
91 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
93 void set_decl_abstract_flags PARAMS ((tree, int));
94 static void mark_stores PARAMS ((rtx, rtx, void *));
95 static void save_parm_insns PARAMS ((rtx, rtx));
96 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
98 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
100 static int compare_blocks PARAMS ((const PTR, const PTR));
101 static int find_block PARAMS ((const PTR, const PTR));
103 /* Used by copy_rtx_and_substitute; this indicates whether the function is
104 called for the purpose of inlining or some other purpose (i.e. loop
105 unrolling). This affects how constant pool references are handled.
106 This variable contains the FUNCTION_DECL for the inlined function. */
107 static struct function *inlining = 0;
109 /* Returns the Ith entry in the label_map contained in MAP. If the
110 Ith entry has not yet been set, return a fresh label. This function
111 performs a lazy initialization of label_map, thereby avoiding huge memory
112 explosions when the label_map gets very large. */
115 get_label_from_map (map, i)
116 struct inline_remap *map;
119 rtx x = map->label_map[i];
122 x = map->label_map[i] = gen_label_rtx ();
127 /* Return false if the function FNDECL cannot be inlined on account of its
128 attributes, true otherwise. */
130 function_attribute_inlinable_p (fndecl)
133 if (targetm.attribute_table)
137 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
139 tree name = TREE_PURPOSE (a);
142 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
143 if (is_attribute_p (targetm.attribute_table[i].name, name))
144 return (*targetm.function_attribute_inlinable_p) (fndecl);
151 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
152 is safe and reasonable to integrate into other functions.
153 Nonzero means value is a warning msgid with a single %s
154 for the function's name. */
157 function_cannot_inline_p (fndecl)
161 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
163 /* For functions marked as inline increase the maximum size to
164 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
165 use the limit given by INTEGRATE_THRESHOLD. */
167 int max_insns = (DECL_INLINE (fndecl))
169 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
170 : INTEGRATE_THRESHOLD (fndecl);
175 if (DECL_UNINLINABLE (fndecl))
176 return N_("function cannot be inline");
178 /* No inlines with varargs. */
179 if (last && TREE_VALUE (last) != void_type_node)
180 return N_("varargs function cannot be inline");
182 if (current_function_calls_alloca)
183 return N_("function using alloca cannot be inline");
185 if (current_function_calls_setjmp)
186 return N_("function using setjmp cannot be inline");
188 if (current_function_calls_eh_return)
189 return N_("function uses __builtin_eh_return");
191 if (current_function_contains_functions)
192 return N_("function with nested functions cannot be inline");
196 N_("function with label addresses used in initializers cannot inline");
198 if (current_function_cannot_inline)
199 return current_function_cannot_inline;
201 /* If its not even close, don't even look. */
202 if (get_max_uid () > 3 * max_insns)
203 return N_("function too large to be inline");
206 /* Don't inline functions which do not specify a function prototype and
207 have BLKmode argument or take the address of a parameter. */
208 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
210 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
211 TREE_ADDRESSABLE (parms) = 1;
212 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
213 return N_("no prototype, and parameter address used; cannot be inline");
217 /* We can't inline functions that return structures
218 the old-fashioned PCC way, copying into a static block. */
219 if (current_function_returns_pcc_struct)
220 return N_("inline functions not supported for this return value type");
222 /* We can't inline functions that return structures of varying size. */
223 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
224 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
225 return N_("function with varying-size return value cannot be inline");
227 /* Cannot inline a function with a varying size argument or one that
228 receives a transparent union. */
229 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
231 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
232 return N_("function with varying-size parameter cannot be inline");
233 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
234 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
235 return N_("function with transparent unit parameter cannot be inline");
238 if (get_max_uid () > max_insns)
240 for (ninsns = 0, insn = get_first_nonparm_insn ();
241 insn && ninsns < max_insns;
242 insn = NEXT_INSN (insn))
246 if (ninsns >= max_insns)
247 return N_("function too large to be inline");
250 /* We will not inline a function which uses computed goto. The addresses of
251 its local labels, which may be tucked into global storage, are of course
252 not constant across instantiations, which causes unexpected behaviour. */
253 if (current_function_has_computed_jump)
254 return N_("function with computed jump cannot inline");
256 /* We cannot inline a nested function that jumps to a nonlocal label. */
257 if (current_function_has_nonlocal_goto)
258 return N_("function with nonlocal goto cannot be inline");
260 /* We can't inline functions that return a PARALLEL rtx. */
261 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
263 rtx result = DECL_RTL (DECL_RESULT (fndecl));
264 if (GET_CODE (result) == PARALLEL)
265 return N_("inline functions not supported for this return value type");
268 /* If the function has a target specific attribute attached to it,
269 then we assume that we should not inline it. This can be overriden
270 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
271 if (!function_attribute_inlinable_p (fndecl))
272 return N_("function with target specific attribute(s) cannot be inlined");
277 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
278 Zero for a reg that isn't a parm's home.
279 Only reg numbers less than max_parm_reg are mapped here. */
280 static tree *parmdecl_map;
282 /* In save_for_inline, nonzero if past the parm-initialization insns. */
283 static int in_nonparm_insns;
285 /* Subroutine for `save_for_inline'. Performs initialization
286 needed to save FNDECL's insns and info for future inline expansion. */
289 initialize_for_inline (fndecl)
296 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
297 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
298 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
300 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
302 parms = TREE_CHAIN (parms), i++)
304 rtx p = DECL_RTL (parms);
306 /* If we have (mem (addressof (mem ...))), use the inner MEM since
307 otherwise the copy_rtx call below will not unshare the MEM since
308 it shares ADDRESSOF. */
309 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
310 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
311 p = XEXP (XEXP (p, 0), 0);
313 RTVEC_ELT (arg_vector, i) = p;
315 if (GET_CODE (p) == REG)
316 parmdecl_map[REGNO (p)] = parms;
317 else if (GET_CODE (p) == CONCAT)
319 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
320 rtx pimag = gen_imagpart (GET_MODE (preal), p);
322 if (GET_CODE (preal) == REG)
323 parmdecl_map[REGNO (preal)] = parms;
324 if (GET_CODE (pimag) == REG)
325 parmdecl_map[REGNO (pimag)] = parms;
328 /* This flag is cleared later
329 if the function ever modifies the value of the parm. */
330 TREE_READONLY (parms) = 1;
336 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
337 originally was in the FROM_FN, but now it will be in the
341 copy_decl_for_inlining (decl, from_fn, to_fn)
348 /* Copy the declaration. */
349 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
351 /* For a parameter, we must make an equivalent VAR_DECL, not a
353 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
354 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
355 TREE_READONLY (copy) = TREE_READONLY (decl);
356 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
360 copy = copy_node (decl);
361 (*lang_hooks.dup_lang_specific_decl) (copy);
363 /* TREE_ADDRESSABLE isn't used to indicate that a label's
364 address has been taken; it's for internal bookkeeping in
365 expand_goto_internal. */
366 if (TREE_CODE (copy) == LABEL_DECL)
367 TREE_ADDRESSABLE (copy) = 0;
370 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
371 declaration inspired this copy. */
372 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
374 /* The new variable/label has no RTL, yet. */
375 SET_DECL_RTL (copy, NULL_RTX);
377 /* These args would always appear unused, if not for this. */
378 TREE_USED (copy) = 1;
380 /* Set the context for the new declaration. */
381 if (!DECL_CONTEXT (decl))
382 /* Globals stay global. */
384 else if (DECL_CONTEXT (decl) != from_fn)
385 /* Things that weren't in the scope of the function we're inlining
386 from aren't in the scope we're inlining too, either. */
388 else if (TREE_STATIC (decl))
389 /* Function-scoped static variables should say in the original
393 /* Ordinary automatic local variables are now in the scope of the
395 DECL_CONTEXT (copy) = to_fn;
400 /* Make the insns and PARM_DECLs of the current function permanent
401 and record other information in DECL_SAVED_INSNS to allow inlining
402 of this function in subsequent calls.
404 This routine need not copy any insns because we are not going
405 to immediately compile the insns in the insn chain. There
406 are two cases when we would compile the insns for FNDECL:
407 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
408 be output at the end of other compilation, because somebody took
409 its address. In the first case, the insns of FNDECL are copied
410 as it is expanded inline, so FNDECL's saved insns are not
411 modified. In the second case, FNDECL is used for the last time,
412 so modifying the rtl is not a problem.
414 We don't have to worry about FNDECL being inline expanded by
415 other functions which are written at the end of compilation
416 because flag_no_inline is turned on when we begin writing
417 functions at the end of compilation. */
420 save_for_inline (fndecl)
425 rtx first_nonparm_insn;
427 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
428 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
429 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
430 for the parms, prior to elimination of virtual registers.
431 These values are needed for substituting parms properly. */
432 if (! flag_no_inline)
433 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
435 /* Make and emit a return-label if we have not already done so. */
437 if (return_label == 0)
439 return_label = gen_label_rtx ();
440 emit_label (return_label);
443 if (! flag_no_inline)
444 argvec = initialize_for_inline (fndecl);
448 /* Delete basic block notes created by early run of find_basic_block.
449 The notes would be later used by find_basic_blocks to reuse the memory
450 for basic_block structures on already freed obstack. */
451 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
452 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
453 delete_related_insns (insn);
455 /* If there are insns that copy parms from the stack into pseudo registers,
456 those insns are not copied. `expand_inline_function' must
457 emit the correct code to handle such things. */
460 if (GET_CODE (insn) != NOTE)
463 if (! flag_no_inline)
465 /* Get the insn which signals the end of parameter setup code. */
466 first_nonparm_insn = get_first_nonparm_insn ();
468 /* Now just scan the chain of insns to see what happens to our
469 PARM_DECLs. If a PARM_DECL is used but never modified, we
470 can substitute its rtl directly when expanding inline (and
471 perform constant folding when its incoming value is
472 constant). Otherwise, we have to copy its value into a new
473 register and track the new register's life. */
474 in_nonparm_insns = 0;
475 save_parm_insns (insn, first_nonparm_insn);
477 cfun->inl_max_label_num = max_label_num ();
478 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
479 cfun->original_arg_vector = argvec;
481 cfun->original_decl_initial = DECL_INITIAL (fndecl);
482 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
483 DECL_SAVED_INSNS (fndecl) = cfun;
486 if (! flag_no_inline)
490 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
491 PARM_DECL is used but never modified, we can substitute its rtl directly
492 when expanding inline (and perform constant folding when its incoming
493 value is constant). Otherwise, we have to copy its value into a new
494 register and track the new register's life. */
497 save_parm_insns (insn, first_nonparm_insn)
499 rtx first_nonparm_insn;
501 if (insn == NULL_RTX)
504 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
506 if (insn == first_nonparm_insn)
507 in_nonparm_insns = 1;
511 /* Record what interesting things happen to our parameters. */
512 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
514 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
515 three attached sequences: normal call, sibling call and tail
517 if (GET_CODE (insn) == CALL_INSN
518 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
522 for (i = 0; i < 3; i++)
523 save_parm_insns (XEXP (PATTERN (insn), i),
530 /* Note whether a parameter is modified or not. */
533 note_modified_parmregs (reg, x, data)
535 rtx x ATTRIBUTE_UNUSED;
536 void *data ATTRIBUTE_UNUSED;
538 if (GET_CODE (reg) == REG && in_nonparm_insns
539 && REGNO (reg) < max_parm_reg
540 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
541 && parmdecl_map[REGNO (reg)] != 0)
542 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
545 /* Unfortunately, we need a global copy of const_equiv map for communication
546 with a function called from note_stores. Be *very* careful that this
547 is used properly in the presence of recursion. */
549 varray_type global_const_equiv_varray;
551 #define FIXED_BASE_PLUS_P(X) \
552 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
553 && GET_CODE (XEXP (X, 0)) == REG \
554 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
555 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
557 /* Called to set up a mapping for the case where a parameter is in a
558 register. If it is read-only and our argument is a constant, set up the
559 constant equivalence.
561 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
564 Also, don't allow hard registers here; they might not be valid when
565 substituted into insns. */
567 process_reg_param (map, loc, copy)
568 struct inline_remap *map;
571 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
572 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
573 && ! REG_USERVAR_P (copy))
574 || (GET_CODE (copy) == REG
575 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
577 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
578 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
579 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
580 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
583 map->reg_map[REGNO (loc)] = copy;
586 /* Compare two BLOCKs for qsort. The key we sort on is the
587 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
588 two pointers, because it may overflow sizeof(int). */
591 compare_blocks (v1, v2)
595 tree b1 = *((const tree *) v1);
596 tree b2 = *((const tree *) v2);
597 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
598 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
602 return p1 < p2 ? -1 : 1;
605 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
606 an original block; the second to a remapped equivalent. */
613 const union tree_node *b1 = (const union tree_node *) v1;
614 tree b2 = *((const tree *) v2);
615 char *p1 = (char *) b1;
616 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
620 return p1 < p2 ? -1 : 1;
623 /* Integrate the procedure defined by FNDECL. Note that this function
624 may wind up calling itself. Since the static variables are not
625 reentrant, we do not assign them until after the possibility
626 of recursion is eliminated.
628 If IGNORE is nonzero, do not produce a value.
629 Otherwise store the value in TARGET if it is nonzero and that is convenient.
632 (rtx)-1 if we could not substitute the function
633 0 if we substituted it and it does not produce a value
634 else an rtx for where the value is stored. */
637 expand_inline_function (fndecl, parms, target, ignore, type,
638 structure_value_addr)
643 rtx structure_value_addr;
645 struct function *inlining_previous;
646 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
647 tree formal, actual, block;
648 rtx parm_insns = inl_f->emit->x_first_insn;
649 rtx insns = (inl_f->inl_last_parm_insn
650 ? NEXT_INSN (inl_f->inl_last_parm_insn)
656 int min_labelno = inl_f->emit->x_first_label_num;
657 int max_labelno = inl_f->inl_max_label_num;
662 struct inline_remap *map = 0;
663 rtvec arg_vector = inl_f->original_arg_vector;
664 rtx static_chain_value = 0;
666 int eh_region_offset;
668 /* The pointer used to track the true location of the memory used
669 for MAP->LABEL_MAP. */
670 rtx *real_label_map = 0;
672 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
673 max_regno = inl_f->emit->x_reg_rtx_no + 3;
674 if (max_regno < FIRST_PSEUDO_REGISTER)
677 /* Pull out the decl for the function definition; fndecl may be a
678 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
679 fndecl = inl_f->decl;
681 nargs = list_length (DECL_ARGUMENTS (fndecl));
683 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
684 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
686 /* Check that the parms type match and that sufficient arguments were
687 passed. Since the appropriate conversions or default promotions have
688 already been applied, the machine modes should match exactly. */
690 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
692 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
695 enum machine_mode mode;
698 return (rtx) (size_t) -1;
700 arg = TREE_VALUE (actual);
701 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
703 if (arg == error_mark_node
704 || mode != TYPE_MODE (TREE_TYPE (arg))
705 /* If they are block mode, the types should match exactly.
706 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
707 which could happen if the parameter has incomplete type. */
709 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
710 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
711 return (rtx) (size_t) -1;
714 /* Extra arguments are valid, but will be ignored below, so we must
715 evaluate them here for side-effects. */
716 for (; actual; actual = TREE_CHAIN (actual))
717 expand_expr (TREE_VALUE (actual), const0_rtx,
718 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
720 /* Expand the function arguments. Do this first so that any
721 new registers get created before we allocate the maps. */
723 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
724 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
726 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
728 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
730 /* Actual parameter, converted to the type of the argument within the
732 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
733 /* Mode of the variable used within the function. */
734 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
738 loc = RTVEC_ELT (arg_vector, i);
740 /* If this is an object passed by invisible reference, we copy the
741 object into a stack slot and save its address. If this will go
742 into memory, we do nothing now. Otherwise, we just expand the
744 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
745 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
747 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
749 store_expr (arg, stack_slot, 0);
750 arg_vals[i] = XEXP (stack_slot, 0);
753 else if (GET_CODE (loc) != MEM)
755 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
757 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
758 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
760 pmode = promote_mode (TREE_TYPE (formal), pmode,
763 if (GET_MODE (loc) != pmode)
766 /* The mode if LOC and ARG can differ if LOC was a variable
767 that had its mode promoted via PROMOTED_MODE. */
768 arg_vals[i] = convert_modes (pmode,
769 TYPE_MODE (TREE_TYPE (arg)),
770 expand_expr (arg, NULL_RTX, mode,
775 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
781 && (! TREE_READONLY (formal)
782 /* If the parameter is not read-only, copy our argument through
783 a register. Also, we cannot use ARG_VALS[I] if it overlaps
784 TARGET in any way. In the inline function, they will likely
785 be two different pseudos, and `safe_from_p' will make all
786 sorts of smart assumptions about their not conflicting.
787 But if ARG_VALS[I] overlaps TARGET, these assumptions are
788 wrong, so put ARG_VALS[I] into a fresh register.
789 Don't worry about invisible references, since their stack
790 temps will never overlap the target. */
793 && (GET_CODE (arg_vals[i]) == REG
794 || GET_CODE (arg_vals[i]) == SUBREG
795 || GET_CODE (arg_vals[i]) == MEM)
796 && reg_overlap_mentioned_p (arg_vals[i], target))
797 /* ??? We must always copy a SUBREG into a REG, because it might
798 get substituted into an address, and not all ports correctly
799 handle SUBREGs in addresses. */
800 || (GET_CODE (arg_vals[i]) == SUBREG)))
801 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
803 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
804 && POINTER_TYPE_P (TREE_TYPE (formal)))
805 mark_reg_pointer (arg_vals[i],
806 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
809 /* Allocate the structures we use to remap things. */
811 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
812 map->fndecl = fndecl;
814 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
815 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
817 /* We used to use alloca here, but the size of what it would try to
818 allocate would occasionally cause it to exceed the stack limit and
819 cause unpredictable core dumps. */
821 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
822 map->label_map = real_label_map;
823 map->local_return_label = NULL_RTX;
825 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
826 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
828 map->max_insnno = inl_max_uid;
830 map->integrating = 1;
831 map->compare_src = NULL_RTX;
832 map->compare_mode = VOIDmode;
834 /* const_equiv_varray maps pseudos in our routine to constants, so
835 it needs to be large enough for all our pseudos. This is the
836 number we are currently using plus the number in the called
837 routine, plus 15 for each arg, five to compute the virtual frame
838 pointer, and five for the return value. This should be enough
839 for most cases. We do not reference entries outside the range of
842 ??? These numbers are quite arbitrary and were obtained by
843 experimentation. At some point, we should try to allocate the
844 table after all the parameters are set up so we an more accurately
845 estimate the number of pseudos we will need. */
847 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
849 + (max_regno - FIRST_PSEUDO_REGISTER)
852 "expand_inline_function");
855 /* Record the current insn in case we have to set up pointers to frame
856 and argument memory blocks. If there are no insns yet, add a dummy
857 insn that can be used as an insertion point. */
858 map->insns_at_start = get_last_insn ();
859 if (map->insns_at_start == 0)
860 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
862 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
863 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
865 /* Update the outgoing argument size to allow for those in the inlined
867 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
868 current_function_outgoing_args_size = inl_f->outgoing_args_size;
870 /* If the inline function needs to make PIC references, that means
871 that this function's PIC offset table must be used. */
872 if (inl_f->uses_pic_offset_table)
873 current_function_uses_pic_offset_table = 1;
875 /* If this function needs a context, set it up. */
876 if (inl_f->needs_context)
877 static_chain_value = lookup_static_chain (fndecl);
879 if (GET_CODE (parm_insns) == NOTE
880 && NOTE_LINE_NUMBER (parm_insns) > 0)
882 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
883 NOTE_LINE_NUMBER (parm_insns));
885 RTX_INTEGRATED_P (note) = 1;
888 /* Process each argument. For each, set up things so that the function's
889 reference to the argument will refer to the argument being passed.
890 We only replace REG with REG here. Any simplifications are done
893 We make two passes: In the first, we deal with parameters that will
894 be placed into registers, since we need to ensure that the allocated
895 register number fits in const_equiv_map. Then we store all non-register
896 parameters into their memory location. */
898 /* Don't try to free temp stack slots here, because we may put one of the
899 parameters into a temp stack slot. */
901 for (i = 0; i < nargs; i++)
903 rtx copy = arg_vals[i];
905 loc = RTVEC_ELT (arg_vector, i);
907 /* There are three cases, each handled separately. */
908 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
909 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
911 /* This must be an object passed by invisible reference (it could
912 also be a variable-sized object, but we forbid inlining functions
913 with variable-sized arguments). COPY is the address of the
914 actual value (this computation will cause it to be copied). We
915 map that address for the register, noting the actual address as
916 an equivalent in case it can be substituted into the insns. */
918 if (GET_CODE (copy) != REG)
920 temp = copy_addr_to_reg (copy);
921 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
922 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
925 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
927 else if (GET_CODE (loc) == MEM)
929 /* This is the case of a parameter that lives in memory. It
930 will live in the block we allocate in the called routine's
931 frame that simulates the incoming argument area. Do nothing
932 with the parameter now; we will call store_expr later. In
933 this case, however, we must ensure that the virtual stack and
934 incoming arg rtx values are expanded now so that we can be
935 sure we have enough slots in the const equiv map since the
936 store_expr call can easily blow the size estimate. */
937 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
938 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
940 else if (GET_CODE (loc) == REG)
941 process_reg_param (map, loc, copy);
942 else if (GET_CODE (loc) == CONCAT)
944 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
945 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
946 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
947 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
949 process_reg_param (map, locreal, copyreal);
950 process_reg_param (map, locimag, copyimag);
956 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
957 specially. This function can be called recursively, so we need to
958 save the previous value. */
959 inlining_previous = inlining;
962 /* Now do the parameters that will be placed in memory. */
964 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
965 formal; formal = TREE_CHAIN (formal), i++)
967 loc = RTVEC_ELT (arg_vector, i);
969 if (GET_CODE (loc) == MEM
970 /* Exclude case handled above. */
971 && ! (GET_CODE (XEXP (loc, 0)) == REG
972 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
974 rtx note = emit_note (DECL_SOURCE_FILE (formal),
975 DECL_SOURCE_LINE (formal));
977 RTX_INTEGRATED_P (note) = 1;
979 /* Compute the address in the area we reserved and store the
981 temp = copy_rtx_and_substitute (loc, map, 1);
982 subst_constants (&temp, NULL_RTX, map, 1);
983 apply_change_group ();
984 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
985 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
986 store_expr (arg_trees[i], temp, 0);
990 /* Deal with the places that the function puts its result.
991 We are driven by what is placed into DECL_RESULT.
993 Initially, we assume that we don't have anything special handling for
994 REG_FUNCTION_RETURN_VALUE_P. */
996 map->inline_target = 0;
997 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
998 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1000 if (TYPE_MODE (type) == VOIDmode)
1001 /* There is no return value to worry about. */
1003 else if (GET_CODE (loc) == MEM)
1005 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1007 temp = copy_rtx_and_substitute (loc, map, 1);
1008 subst_constants (&temp, NULL_RTX, map, 1);
1009 apply_change_group ();
1014 if (! structure_value_addr
1015 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1018 /* Pass the function the address in which to return a structure
1019 value. Note that a constructor can cause someone to call us
1020 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1021 via the first parameter, rather than the struct return address.
1023 We have two cases: If the address is a simple register
1024 indirect, use the mapping mechanism to point that register to
1025 our structure return address. Otherwise, store the structure
1026 return value into the place that it will be referenced from. */
1028 if (GET_CODE (XEXP (loc, 0)) == REG)
1030 temp = force_operand (structure_value_addr, NULL_RTX);
1031 temp = force_reg (Pmode, temp);
1032 /* A virtual register might be invalid in an insn, because
1033 it can cause trouble in reload. Since we don't have access
1034 to the expanders at map translation time, make sure we have
1035 a proper register now.
1036 If a virtual register is actually valid, cse or combine
1037 can put it into the mapped insns. */
1038 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1039 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1040 temp = copy_to_mode_reg (Pmode, temp);
1041 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1043 if (CONSTANT_P (structure_value_addr)
1044 || GET_CODE (structure_value_addr) == ADDRESSOF
1045 || (GET_CODE (structure_value_addr) == PLUS
1046 && (XEXP (structure_value_addr, 0)
1047 == virtual_stack_vars_rtx)
1048 && (GET_CODE (XEXP (structure_value_addr, 1))
1051 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1057 temp = copy_rtx_and_substitute (loc, map, 1);
1058 subst_constants (&temp, NULL_RTX, map, 0);
1059 apply_change_group ();
1060 emit_move_insn (temp, structure_value_addr);
1065 /* We will ignore the result value, so don't look at its structure.
1066 Note that preparations for an aggregate return value
1067 do need to be made (above) even if it will be ignored. */
1069 else if (GET_CODE (loc) == REG)
1071 /* The function returns an object in a register and we use the return
1072 value. Set up our target for remapping. */
1074 /* Machine mode function was declared to return. */
1075 enum machine_mode departing_mode = TYPE_MODE (type);
1076 /* (Possibly wider) machine mode it actually computes
1077 (for the sake of callers that fail to declare it right).
1078 We have to use the mode of the result's RTL, rather than
1079 its type, since expand_function_start may have promoted it. */
1080 enum machine_mode arriving_mode
1081 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1084 /* Don't use MEMs as direct targets because on some machines
1085 substituting a MEM for a REG makes invalid insns.
1086 Let the combiner substitute the MEM if that is valid. */
1087 if (target == 0 || GET_CODE (target) != REG
1088 || GET_MODE (target) != departing_mode)
1090 /* Don't make BLKmode registers. If this looks like
1091 a BLKmode object being returned in a register, get
1092 the mode from that, otherwise abort. */
1093 if (departing_mode == BLKmode)
1095 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1097 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1098 arriving_mode = departing_mode;
1104 target = gen_reg_rtx (departing_mode);
1107 /* If function's value was promoted before return,
1108 avoid machine mode mismatch when we substitute INLINE_TARGET.
1109 But TARGET is what we will return to the caller. */
1110 if (arriving_mode != departing_mode)
1112 /* Avoid creating a paradoxical subreg wider than
1113 BITS_PER_WORD, since that is illegal. */
1114 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1116 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1117 GET_MODE_BITSIZE (arriving_mode)))
1118 /* Maybe could be handled by using convert_move () ? */
1120 reg_to_map = gen_reg_rtx (arriving_mode);
1121 target = gen_lowpart (departing_mode, reg_to_map);
1124 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1127 reg_to_map = target;
1129 /* Usually, the result value is the machine's return register.
1130 Sometimes it may be a pseudo. Handle both cases. */
1131 if (REG_FUNCTION_VALUE_P (loc))
1132 map->inline_target = reg_to_map;
1134 map->reg_map[REGNO (loc)] = reg_to_map;
1136 else if (GET_CODE (loc) == CONCAT)
1138 enum machine_mode departing_mode = TYPE_MODE (type);
1139 enum machine_mode arriving_mode
1140 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1142 if (departing_mode != arriving_mode)
1144 if (GET_CODE (XEXP (loc, 0)) != REG
1145 || GET_CODE (XEXP (loc, 1)) != REG)
1148 /* Don't use MEMs as direct targets because on some machines
1149 substituting a MEM for a REG makes invalid insns.
1150 Let the combiner substitute the MEM if that is valid. */
1151 if (target == 0 || GET_CODE (target) != REG
1152 || GET_MODE (target) != departing_mode)
1153 target = gen_reg_rtx (departing_mode);
1155 if (GET_CODE (target) != CONCAT)
1158 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1159 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1164 /* Remap the exception handler data pointer from one to the other. */
1165 temp = get_exception_pointer (inl_f);
1167 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1169 /* Initialize label_map. get_label_from_map will actually make
1171 memset ((char *) &map->label_map[min_labelno], 0,
1172 (max_labelno - min_labelno) * sizeof (rtx));
1174 /* Make copies of the decls of the symbols in the inline function, so that
1175 the copies of the variables get declared in the current function. Set
1176 up things so that lookup_static_chain knows that to interpret registers
1177 in SAVE_EXPRs for TYPE_SIZEs as local. */
1178 inline_function_decl = fndecl;
1179 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1180 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1181 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1182 inline_function_decl = 0;
1184 /* Make a fresh binding contour that we can easily remove. Do this after
1185 expanding our arguments so cleanups are properly scoped. */
1186 expand_start_bindings_and_block (0, block);
1188 /* Sort the block-map so that it will be easy to find remapped
1190 qsort (&VARRAY_TREE (map->block_map, 0),
1191 map->block_map->elements_used,
1195 /* Perform postincrements before actually calling the function. */
1198 /* Clean up stack so that variables might have smaller offsets. */
1199 do_pending_stack_adjust ();
1201 /* Save a copy of the location of const_equiv_varray for
1202 mark_stores, called via note_stores. */
1203 global_const_equiv_varray = map->const_equiv_varray;
1205 /* If the called function does an alloca, save and restore the
1206 stack pointer around the call. This saves stack space, but
1207 also is required if this inline is being done between two
1209 if (inl_f->calls_alloca)
1210 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1212 /* Map pseudos used for initial hard reg values. */
1213 setup_initial_hard_reg_value_integration (inl_f, map);
1215 /* Now copy the insns one by one. */
1216 copy_insn_list (insns, map, static_chain_value);
1218 /* Duplicate the EH regions. This will create an offset from the
1219 region numbers in the function we're inlining to the region
1220 numbers in the calling function. This must wait until after
1221 copy_insn_list, as we need the insn map to be complete. */
1222 eh_region_offset = duplicate_eh_regions (inl_f, map);
1224 /* Now copy the REG_NOTES for those insns. */
1225 copy_insn_notes (insns, map, eh_region_offset);
1227 /* If the insn sequence required one, emit the return label. */
1228 if (map->local_return_label)
1229 emit_label (map->local_return_label);
1231 /* Restore the stack pointer if we saved it above. */
1232 if (inl_f->calls_alloca)
1233 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1235 if (! cfun->x_whole_function_mode_p)
1236 /* In statement-at-a-time mode, we just tell the front-end to add
1237 this block to the list of blocks at this binding level. We
1238 can't do it the way it's done for function-at-a-time mode the
1239 superblocks have not been created yet. */
1240 (*lang_hooks.decls.insert_block) (block);
1244 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1245 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1248 /* End the scope containing the copied formal parameter variables
1249 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1250 here so that expand_end_bindings will not check for unused
1251 variables. That's already been checked for when the inlined
1252 function was defined. */
1253 expand_end_bindings (NULL_TREE, 1, 1);
1255 /* Must mark the line number note after inlined functions as a repeat, so
1256 that the test coverage code can avoid counting the call twice. This
1257 just tells the code to ignore the immediately following line note, since
1258 there already exists a copy of this note before the expanded inline call.
1259 This line number note is still needed for debugging though, so we can't
1261 if (flag_test_coverage)
1262 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1264 emit_line_note (input_filename, lineno);
1266 /* If the function returns a BLKmode object in a register, copy it
1267 out of the temp register into a BLKmode memory object. */
1269 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1270 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1271 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1273 if (structure_value_addr)
1275 target = gen_rtx_MEM (TYPE_MODE (type),
1276 memory_address (TYPE_MODE (type),
1277 structure_value_addr));
1278 set_mem_attributes (target, type, 1);
1281 /* Make sure we free the things we explicitly allocated with xmalloc. */
1283 free (real_label_map);
1284 VARRAY_FREE (map->const_equiv_varray);
1285 free (map->reg_map);
1286 free (map->insn_map);
1291 inlining = inlining_previous;
1296 /* Make copies of each insn in the given list using the mapping
1297 computed in expand_inline_function. This function may call itself for
1298 insns containing sequences.
1300 Copying is done in two passes, first the insns and then their REG_NOTES.
1302 If static_chain_value is non-zero, it represents the context-pointer
1303 register for the function. */
1306 copy_insn_list (insns, map, static_chain_value)
1308 struct inline_remap *map;
1309 rtx static_chain_value;
1317 rtx static_chain_mem = 0;
1319 /* Copy the insns one by one. Do this in two passes, first the insns and
1320 then their REG_NOTES. */
1322 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1324 for (insn = insns; insn; insn = NEXT_INSN (insn))
1326 rtx copy, pattern, set;
1328 map->orig_asm_operands_vector = 0;
1330 switch (GET_CODE (insn))
1333 pattern = PATTERN (insn);
1334 set = single_set (insn);
1336 if (GET_CODE (pattern) == USE
1337 && GET_CODE (XEXP (pattern, 0)) == REG
1338 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1339 /* The (USE (REG n)) at return from the function should
1340 be ignored since we are changing (REG n) into
1344 /* Ignore setting a function value that we don't want to use. */
1345 if (map->inline_target == 0
1347 && GET_CODE (SET_DEST (set)) == REG
1348 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1350 if (volatile_refs_p (SET_SRC (set)))
1354 /* If we must not delete the source,
1355 load it into a new temporary. */
1356 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1358 new_set = single_set (copy);
1363 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1365 /* If the source and destination are the same and it
1366 has a note on it, keep the insn. */
1367 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1368 && REG_NOTES (insn) != 0)
1369 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1374 /* Similarly if an ignored return value is clobbered. */
1375 else if (map->inline_target == 0
1376 && GET_CODE (pattern) == CLOBBER
1377 && GET_CODE (XEXP (pattern, 0)) == REG
1378 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1381 /* Look for the address of the static chain slot. The
1382 rtx_equal_p comparisons against the
1383 static_chain_incoming_rtx below may fail if the static
1384 chain is in memory and the address specified is not
1385 "legitimate". This happens on Xtensa where the static
1386 chain is at a negative offset from argp and where only
1387 positive offsets are legitimate. When the RTL is
1388 generated, the address is "legitimized" by copying it
1389 into a register, causing the rtx_equal_p comparisons to
1390 fail. This workaround looks for code that sets a
1391 register to the address of the static chain. Subsequent
1392 memory references via that register can then be
1393 identified as static chain references. We assume that
1394 the register is only assigned once, and that the static
1395 chain address is only live in one register at a time. */
1397 else if (static_chain_value != 0
1399 && GET_CODE (static_chain_incoming_rtx) == MEM
1400 && GET_CODE (SET_DEST (set)) == REG
1401 && rtx_equal_p (SET_SRC (set),
1402 XEXP (static_chain_incoming_rtx, 0)))
1405 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1408 /* emit the instruction in case it is used for something
1409 other than setting the static chain; if it's not used,
1410 it can always be removed as dead code */
1411 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1414 /* If this is setting the static chain rtx, omit it. */
1415 else if (static_chain_value != 0
1417 && (rtx_equal_p (SET_DEST (set),
1418 static_chain_incoming_rtx)
1419 || (static_chain_mem
1420 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1423 /* If this is setting the static chain pseudo, set it from
1424 the value we want to give it instead. */
1425 else if (static_chain_value != 0
1427 && (rtx_equal_p (SET_SRC (set),
1428 static_chain_incoming_rtx)
1429 || (static_chain_mem
1430 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1432 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1434 copy = emit_move_insn (newdest, static_chain_value);
1435 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1436 static_chain_value = 0;
1439 /* If this is setting the virtual stack vars register, this must
1440 be the code at the handler for a builtin longjmp. The value
1441 saved in the setjmp buffer will be the address of the frame
1442 we've made for this inlined instance within our frame. But we
1443 know the offset of that value so we can use it to reconstruct
1444 our virtual stack vars register from that value. If we are
1445 copying it from the stack pointer, leave it unchanged. */
1447 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1449 HOST_WIDE_INT offset;
1450 temp = map->reg_map[REGNO (SET_DEST (set))];
1451 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1454 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1456 else if (GET_CODE (temp) == PLUS
1457 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1458 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1459 offset = INTVAL (XEXP (temp, 1));
1463 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1464 temp = SET_SRC (set);
1466 temp = force_operand (plus_constant (SET_SRC (set),
1470 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1474 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1475 /* REG_NOTES will be copied later. */
1478 /* If this insn is setting CC0, it may need to look at
1479 the insn that uses CC0 to see what type of insn it is.
1480 In that case, the call to recog via validate_change will
1481 fail. So don't substitute constants here. Instead,
1482 do it when we emit the following insn.
1484 For example, see the pyr.md file. That machine has signed and
1485 unsigned compares. The compare patterns must check the
1486 following branch insn to see which what kind of compare to
1489 If the previous insn set CC0, substitute constants on it as
1491 if (sets_cc0_p (PATTERN (copy)) != 0)
1496 try_constants (cc0_insn, map);
1498 try_constants (copy, map);
1501 try_constants (copy, map);
1503 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1507 if (map->integrating && returnjump_p (insn))
1509 if (map->local_return_label == 0)
1510 map->local_return_label = gen_label_rtx ();
1511 pattern = gen_jump (map->local_return_label);
1514 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1516 copy = emit_jump_insn (pattern);
1520 try_constants (cc0_insn, map);
1523 try_constants (copy, map);
1524 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1526 /* If this used to be a conditional jump insn but whose branch
1527 direction is now know, we must do something special. */
1528 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1531 /* If the previous insn set cc0 for us, delete it. */
1532 if (only_sets_cc0_p (PREV_INSN (copy)))
1533 delete_related_insns (PREV_INSN (copy));
1536 /* If this is now a no-op, delete it. */
1537 if (map->last_pc_value == pc_rtx)
1539 delete_related_insns (copy);
1543 /* Otherwise, this is unconditional jump so we must put a
1544 BARRIER after it. We could do some dead code elimination
1545 here, but jump.c will do it just as well. */
1551 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1552 three attached sequences: normal call, sibling call and tail
1554 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1559 for (i = 0; i < 3; i++)
1563 sequence[i] = NULL_RTX;
1564 seq = XEXP (PATTERN (insn), i);
1568 copy_insn_list (seq, map, static_chain_value);
1569 sequence[i] = get_insns ();
1574 /* Find the new tail recursion label.
1575 It will already be substituted into sequence[2]. */
1576 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1579 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1587 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1588 copy = emit_call_insn (pattern);
1590 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1591 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1592 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1594 /* Because the USAGE information potentially contains objects other
1595 than hard registers, we need to copy it. */
1597 CALL_INSN_FUNCTION_USAGE (copy)
1598 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1603 try_constants (cc0_insn, map);
1606 try_constants (copy, map);
1608 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1609 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1610 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1614 copy = emit_label (get_label_from_map (map,
1615 CODE_LABEL_NUMBER (insn)));
1616 LABEL_NAME (copy) = LABEL_NAME (insn);
1621 copy = emit_barrier ();
1625 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1627 copy = emit_label (get_label_from_map (map,
1628 CODE_LABEL_NUMBER (insn)));
1629 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1634 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1635 discarded because it is important to have only one of
1636 each in the current function.
1638 NOTE_INSN_DELETED notes aren't useful. */
1640 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1641 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1642 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1644 copy = emit_note (NOTE_SOURCE_FILE (insn),
1645 NOTE_LINE_NUMBER (insn));
1647 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1648 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1649 && NOTE_BLOCK (insn))
1651 tree *mapped_block_p;
1654 = (tree *) bsearch (NOTE_BLOCK (insn),
1655 &VARRAY_TREE (map->block_map, 0),
1656 map->block_map->elements_used,
1660 if (!mapped_block_p)
1663 NOTE_BLOCK (copy) = *mapped_block_p;
1666 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1667 NOTE_EXPECTED_VALUE (copy)
1668 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1680 RTX_INTEGRATED_P (copy) = 1;
1682 map->insn_map[INSN_UID (insn)] = copy;
1686 /* Copy the REG_NOTES. Increment const_age, so that only constants
1687 from parameters can be substituted in. These are the only ones
1688 that are valid across the entire function. */
1691 copy_insn_notes (insns, map, eh_region_offset)
1693 struct inline_remap *map;
1694 int eh_region_offset;
1699 for (insn = insns; insn; insn = NEXT_INSN (insn))
1701 if (! INSN_P (insn))
1704 new_insn = map->insn_map[INSN_UID (insn)];
1708 if (REG_NOTES (insn))
1710 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1712 /* We must also do subst_constants, in case one of our parameters
1713 has const type and constant value. */
1714 subst_constants (¬e, NULL_RTX, map, 0);
1715 apply_change_group ();
1716 REG_NOTES (new_insn) = note;
1718 /* Delete any REG_LABEL notes from the chain. Remap any
1719 REG_EH_REGION notes. */
1720 for (; note; note = next)
1722 next = XEXP (note, 1);
1723 if (REG_NOTE_KIND (note) == REG_LABEL)
1724 remove_note (new_insn, note);
1725 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1726 && INTVAL (XEXP (note, 0)) > 0)
1727 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1728 + eh_region_offset);
1732 if (GET_CODE (insn) == CALL_INSN
1733 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1736 for (i = 0; i < 3; i++)
1737 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1740 if (GET_CODE (insn) == JUMP_INSN
1741 && GET_CODE (PATTERN (insn)) == RESX)
1742 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1746 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1747 push all of those decls and give each one the corresponding home. */
1750 integrate_parm_decls (args, map, arg_vector)
1752 struct inline_remap *map;
1758 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1760 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1761 current_function_decl);
1763 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1765 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1766 here, but that's going to require some more work. */
1767 /* DECL_INCOMING_RTL (decl) = ?; */
1768 /* Fully instantiate the address with the equivalent form so that the
1769 debugging information contains the actual register, instead of the
1770 virtual register. Do this by not passing an insn to
1772 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1773 apply_change_group ();
1774 SET_DECL_RTL (decl, new_decl_rtl);
1778 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1779 current function a tree of contexts isomorphic to the one that is given.
1781 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1782 registers used in the DECL_RTL field should be remapped. If it is zero,
1783 no mapping is necessary. */
1786 integrate_decl_tree (let, map)
1788 struct inline_remap *map;
1794 new_block = make_node (BLOCK);
1795 VARRAY_PUSH_TREE (map->block_map, new_block);
1796 next = &BLOCK_VARS (new_block);
1798 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1802 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1804 if (DECL_RTL_SET_P (t))
1808 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1810 /* Fully instantiate the address with the equivalent form so that the
1811 debugging information contains the actual register, instead of the
1812 virtual register. Do this by not passing an insn to
1815 subst_constants (&r, NULL_RTX, map, 1);
1816 SET_DECL_RTL (d, r);
1818 if (GET_CODE (r) == REG)
1819 REGNO_DECL (REGNO (r)) = d;
1820 else if (GET_CODE (r) == CONCAT)
1822 REGNO_DECL (REGNO (XEXP (r, 0))) = d;
1823 REGNO_DECL (REGNO (XEXP (r, 1))) = d;
1826 apply_change_group ();
1829 /* Add this declaration to the list of variables in the new
1832 next = &TREE_CHAIN (d);
1835 next = &BLOCK_SUBBLOCKS (new_block);
1836 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1838 *next = integrate_decl_tree (t, map);
1839 BLOCK_SUPERCONTEXT (*next) = new_block;
1840 next = &BLOCK_CHAIN (*next);
1843 TREE_USED (new_block) = TREE_USED (let);
1844 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1849 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1850 except for those few rtx codes that are sharable.
1852 We always return an rtx that is similar to that incoming rtx, with the
1853 exception of possibly changing a REG to a SUBREG or vice versa. No
1854 rtl is ever emitted.
1856 If FOR_LHS is nonzero, if means we are processing something that will
1857 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1858 inlining since we need to be conservative in how it is set for
1861 Handle constants that need to be placed in the constant pool by
1862 calling `force_const_mem'. */
1865 copy_rtx_and_substitute (orig, map, for_lhs)
1867 struct inline_remap *map;
1873 enum machine_mode mode;
1874 const char *format_ptr;
1880 code = GET_CODE (orig);
1881 mode = GET_MODE (orig);
1886 /* If the stack pointer register shows up, it must be part of
1887 stack-adjustments (*not* because we eliminated the frame pointer!).
1888 Small hard registers are returned as-is. Pseudo-registers
1889 go through their `reg_map'. */
1890 regno = REGNO (orig);
1891 if (regno <= LAST_VIRTUAL_REGISTER
1892 || (map->integrating
1893 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1895 /* Some hard registers are also mapped,
1896 but others are not translated. */
1897 if (map->reg_map[regno] != 0)
1898 return map->reg_map[regno];
1900 /* If this is the virtual frame pointer, make space in current
1901 function's stack frame for the stack frame of the inline function.
1903 Copy the address of this area into a pseudo. Map
1904 virtual_stack_vars_rtx to this pseudo and set up a constant
1905 equivalence for it to be the address. This will substitute the
1906 address into insns where it can be substituted and use the new
1907 pseudo where it can't. */
1908 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1911 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1912 #ifdef FRAME_GROWS_DOWNWARD
1914 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1917 /* In this case, virtual_stack_vars_rtx points to one byte
1918 higher than the top of the frame area. So make sure we
1919 allocate a big enough chunk to keep the frame pointer
1920 aligned like a real one. */
1922 size = CEIL_ROUND (size, alignment);
1925 loc = assign_stack_temp (BLKmode, size, 1);
1926 loc = XEXP (loc, 0);
1927 #ifdef FRAME_GROWS_DOWNWARD
1928 /* In this case, virtual_stack_vars_rtx points to one byte
1929 higher than the top of the frame area. So compute the offset
1930 to one byte higher than our substitute frame. */
1931 loc = plus_constant (loc, size);
1933 map->reg_map[regno] = temp
1934 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1936 #ifdef STACK_BOUNDARY
1937 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1940 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1944 emit_insn_after (seq, map->insns_at_start);
1947 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1948 || (map->integrating
1949 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1952 /* Do the same for a block to contain any arguments referenced
1955 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1958 loc = assign_stack_temp (BLKmode, size, 1);
1959 loc = XEXP (loc, 0);
1960 /* When arguments grow downward, the virtual incoming
1961 args pointer points to the top of the argument block,
1962 so the remapped location better do the same. */
1963 #ifdef ARGS_GROW_DOWNWARD
1964 loc = plus_constant (loc, size);
1966 map->reg_map[regno] = temp
1967 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1969 #ifdef STACK_BOUNDARY
1970 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1973 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1977 emit_insn_after (seq, map->insns_at_start);
1980 else if (REG_FUNCTION_VALUE_P (orig))
1982 /* This is a reference to the function return value. If
1983 the function doesn't have a return value, error. If the
1984 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1985 if (map->inline_target == 0)
1987 if (rtx_equal_function_value_matters)
1988 /* This is an ignored return value. We must not
1989 leave it in with REG_FUNCTION_VALUE_P set, since
1990 that would confuse subsequent inlining of the
1991 current function into a later function. */
1992 return gen_rtx_REG (GET_MODE (orig), regno);
1994 /* Must be unrolling loops or replicating code if we
1995 reach here, so return the register unchanged. */
1998 else if (GET_MODE (map->inline_target) != BLKmode
1999 && mode != GET_MODE (map->inline_target))
2000 return gen_lowpart (mode, map->inline_target);
2002 return map->inline_target;
2004 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2005 /* If leaf_renumber_regs_insn() might remap this register to
2006 some other number, make sure we don't share it with the
2007 inlined function, otherwise delayed optimization of the
2008 inlined function may change it in place, breaking our
2009 reference to it. We may still shared it within the
2010 function, so create an entry for this register in the
2012 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2013 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2015 if (!map->leaf_reg_map[regno][mode])
2016 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2017 return map->leaf_reg_map[regno][mode];
2025 if (map->reg_map[regno] == NULL)
2027 map->reg_map[regno] = gen_reg_rtx (mode);
2028 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2029 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2030 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2031 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2033 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2034 mark_reg_pointer (map->reg_map[regno],
2035 map->regno_pointer_align[regno]);
2037 return map->reg_map[regno];
2040 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2041 return simplify_gen_subreg (GET_MODE (orig), copy,
2042 GET_MODE (SUBREG_REG (orig)),
2043 SUBREG_BYTE (orig));
2046 copy = gen_rtx_ADDRESSOF (mode,
2047 copy_rtx_and_substitute (XEXP (orig, 0),
2049 0, ADDRESSOF_DECL (orig));
2050 regno = ADDRESSOF_REGNO (orig);
2051 if (map->reg_map[regno])
2052 regno = REGNO (map->reg_map[regno]);
2053 else if (regno > LAST_VIRTUAL_REGISTER)
2055 temp = XEXP (orig, 0);
2056 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2057 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2058 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2059 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2060 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2062 /* Objects may initially be represented as registers, but
2063 but turned into a MEM if their address is taken by
2064 put_var_into_stack. Therefore, the register table may have
2065 entries which are MEMs.
2067 We briefly tried to clear such entries, but that ended up
2068 cascading into many changes due to the optimizers not being
2069 prepared for empty entries in the register table. So we've
2070 decided to allow the MEMs in the register table for now. */
2071 if (REG_P (map->x_regno_reg_rtx[regno])
2072 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2073 mark_reg_pointer (map->reg_map[regno],
2074 map->regno_pointer_align[regno]);
2075 regno = REGNO (map->reg_map[regno]);
2077 ADDRESSOF_REGNO (copy) = regno;
2082 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2083 to (use foo) if the original insn didn't have a subreg.
2084 Removing the subreg distorts the VAX movstrhi pattern
2085 by changing the mode of an operand. */
2086 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2087 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2088 copy = SUBREG_REG (copy);
2089 return gen_rtx_fmt_e (code, VOIDmode, copy);
2091 /* We need to handle "deleted" labels that appear in the DECL_RTL
2094 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2097 /* ... FALLTHRU ... */
2099 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2100 = LABEL_PRESERVE_P (orig);
2101 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2107 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2108 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2110 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2112 /* The fact that this label was previously nonlocal does not mean
2113 it still is, so we must check if it is within the range of
2114 this function's labels. */
2115 LABEL_REF_NONLOCAL_P (copy)
2116 = (LABEL_REF_NONLOCAL_P (orig)
2117 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2118 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2120 /* If we have made a nonlocal label local, it means that this
2121 inlined call will be referring to our nonlocal goto handler.
2122 So make sure we create one for this block; we normally would
2123 not since this is not otherwise considered a "call". */
2124 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2125 function_call_count++;
2136 /* Symbols which represent the address of a label stored in the constant
2137 pool must be modified to point to a constant pool entry for the
2138 remapped label. Otherwise, symbols are returned unchanged. */
2139 if (CONSTANT_POOL_ADDRESS_P (orig))
2141 struct function *f = inlining ? inlining : cfun;
2142 rtx constant = get_pool_constant_for_function (f, orig);
2143 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2146 rtx temp = force_const_mem (const_mode,
2147 copy_rtx_and_substitute (constant,
2151 /* Legitimizing the address here is incorrect.
2153 Since we had a SYMBOL_REF before, we can assume it is valid
2154 to have one in this position in the insn.
2156 Also, change_address may create new registers. These
2157 registers will not have valid reg_map entries. This can
2158 cause try_constants() to fail because assumes that all
2159 registers in the rtx have valid reg_map entries, and it may
2160 end up replacing one of these new registers with junk. */
2162 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2163 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2166 temp = XEXP (temp, 0);
2168 #ifdef POINTERS_EXTEND_UNSIGNED
2169 if (GET_MODE (temp) != GET_MODE (orig))
2170 temp = convert_memory_address (GET_MODE (orig), temp);
2174 else if (GET_CODE (constant) == LABEL_REF)
2175 return XEXP (force_const_mem
2177 copy_rtx_and_substitute (constant, map, for_lhs)),
2184 /* We have to make a new copy of this CONST_DOUBLE because don't want
2185 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2186 duplicate of a CONST_DOUBLE we have already seen. */
2187 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2191 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2192 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2195 return immed_double_const (CONST_DOUBLE_LOW (orig),
2196 CONST_DOUBLE_HIGH (orig), VOIDmode);
2199 /* Make new constant pool entry for a constant
2200 that was in the pool of the inline function. */
2201 if (RTX_INTEGRATED_P (orig))
2206 /* If a single asm insn contains multiple output operands then
2207 it contains multiple ASM_OPERANDS rtx's that share the input
2208 and constraint vecs. We must make sure that the copied insn
2209 continues to share it. */
2210 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2212 copy = rtx_alloc (ASM_OPERANDS);
2213 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2214 PUT_MODE (copy, GET_MODE (orig));
2215 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2216 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2217 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2218 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2219 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2220 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2221 = map->copy_asm_constraints_vector;
2222 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2223 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2229 /* This is given special treatment because the first
2230 operand of a CALL is a (MEM ...) which may get
2231 forced into a register for cse. This is undesirable
2232 if function-address cse isn't wanted or if we won't do cse. */
2233 #ifndef NO_FUNCTION_CSE
2234 if (! (optimize && ! flag_no_function_cse))
2238 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2239 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2242 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2245 gen_rtx_CALL (GET_MODE (orig), copy,
2246 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2251 /* Must be ifdefed out for loop unrolling to work. */
2257 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2258 Adjust the setting by the offset of the area we made.
2259 If the nonlocal goto is into the current function,
2260 this will result in unnecessarily bad code, but should work. */
2261 if (SET_DEST (orig) == virtual_stack_vars_rtx
2262 || SET_DEST (orig) == virtual_incoming_args_rtx)
2264 /* In case a translation hasn't occurred already, make one now. */
2267 HOST_WIDE_INT loc_offset;
2269 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2270 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2271 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2272 REGNO (equiv_reg)).rtx;
2274 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2276 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2279 (copy_rtx_and_substitute (SET_SRC (orig),
2285 return gen_rtx_SET (VOIDmode,
2286 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2287 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2292 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2293 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2295 enum machine_mode const_mode
2296 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2298 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2300 constant = copy_rtx_and_substitute (constant, map, 0);
2302 /* If this was an address of a constant pool entry that itself
2303 had to be placed in the constant pool, it might not be a
2304 valid address. So the recursive call might have turned it
2305 into a register. In that case, it isn't a constant any
2306 more, so return it. This has the potential of changing a
2307 MEM into a REG, but we'll assume that it safe. */
2308 if (! CONSTANT_P (constant))
2311 return validize_mem (force_const_mem (const_mode, constant));
2314 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2316 MEM_COPY_ATTRIBUTES (copy, orig);
2318 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2319 since this may be an indirect reference to a parameter and the
2320 actual may not be readonly. */
2321 if (inlining && !for_lhs)
2322 RTX_UNCHANGING_P (copy) = 0;
2324 /* If inlining, squish aliasing data that references the subroutine's
2325 parameter list, since that's no longer applicable. */
2326 if (inlining && MEM_EXPR (copy)
2327 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2328 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2329 set_mem_expr (copy, NULL_TREE);
2337 copy = rtx_alloc (code);
2338 PUT_MODE (copy, mode);
2339 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2340 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2341 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2343 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2345 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2347 switch (*format_ptr++)
2350 /* Copy this through the wide int field; that's safest. */
2351 X0WINT (copy, i) = X0WINT (orig, i);
2356 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2360 /* Change any references to old-insns to point to the
2361 corresponding copied insns. */
2362 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2366 XVEC (copy, i) = XVEC (orig, i);
2367 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2369 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2370 for (j = 0; j < XVECLEN (copy, i); j++)
2371 XVECEXP (copy, i, j)
2372 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2378 XWINT (copy, i) = XWINT (orig, i);
2382 XINT (copy, i) = XINT (orig, i);
2386 XSTR (copy, i) = XSTR (orig, i);
2390 XTREE (copy, i) = XTREE (orig, i);
2398 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2400 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2401 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2402 map->copy_asm_constraints_vector
2403 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2409 /* Substitute known constant values into INSN, if that is valid. */
2412 try_constants (insn, map)
2414 struct inline_remap *map;
2420 /* First try just updating addresses, then other things. This is
2421 important when we have something like the store of a constant
2422 into memory and we can update the memory address but the machine
2423 does not support a constant source. */
2424 subst_constants (&PATTERN (insn), insn, map, 1);
2425 apply_change_group ();
2426 subst_constants (&PATTERN (insn), insn, map, 0);
2427 apply_change_group ();
2429 /* Show we don't know the value of anything stored or clobbered. */
2430 note_stores (PATTERN (insn), mark_stores, NULL);
2431 map->last_pc_value = 0;
2433 map->last_cc0_value = 0;
2436 /* Set up any constant equivalences made in this insn. */
2437 for (i = 0; i < map->num_sets; i++)
2439 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2441 int regno = REGNO (map->equiv_sets[i].dest);
2443 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2444 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2445 /* Following clause is a hack to make case work where GNU C++
2446 reassigns a variable to make cse work right. */
2447 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2449 map->equiv_sets[i].equiv))
2450 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2451 map->equiv_sets[i].equiv, map->const_age);
2453 else if (map->equiv_sets[i].dest == pc_rtx)
2454 map->last_pc_value = map->equiv_sets[i].equiv;
2456 else if (map->equiv_sets[i].dest == cc0_rtx)
2457 map->last_cc0_value = map->equiv_sets[i].equiv;
2462 /* Substitute known constants for pseudo regs in the contents of LOC,
2463 which are part of INSN.
2464 If INSN is zero, the substitution should always be done (this is used to
2466 These changes are taken out by try_constants if the result is not valid.
2468 Note that we are more concerned with determining when the result of a SET
2469 is a constant, for further propagation, than actually inserting constants
2470 into insns; cse will do the latter task better.
2472 This function is also used to adjust address of items previously addressed
2473 via the virtual stack variable or virtual incoming arguments registers.
2475 If MEMONLY is nonzero, only make changes inside a MEM. */
2478 subst_constants (loc, insn, map, memonly)
2481 struct inline_remap *map;
2487 const char *format_ptr;
2488 int num_changes = num_validated_changes ();
2490 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2492 code = GET_CODE (x);
2509 validate_change (insn, loc, map->last_cc0_value, 1);
2515 /* The only thing we can do with a USE or CLOBBER is possibly do
2516 some substitutions in a MEM within it. */
2517 if (GET_CODE (XEXP (x, 0)) == MEM)
2518 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2522 /* Substitute for parms and known constants. Don't replace
2523 hard regs used as user variables with constants. */
2526 int regno = REGNO (x);
2527 struct const_equiv_data *p;
2529 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2530 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2531 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2533 && p->age >= map->const_age)
2534 validate_change (insn, loc, p->rtx, 1);
2539 /* SUBREG applied to something other than a reg
2540 should be treated as ordinary, since that must
2541 be a special hack and we don't know how to treat it specially.
2542 Consider for example mulsidi3 in m68k.md.
2543 Ordinary SUBREG of a REG needs this special treatment. */
2544 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2546 rtx inner = SUBREG_REG (x);
2549 /* We can't call subst_constants on &SUBREG_REG (x) because any
2550 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2551 see what is inside, try to form the new SUBREG and see if that is
2552 valid. We handle two cases: extracting a full word in an
2553 integral mode and extracting the low part. */
2554 subst_constants (&inner, NULL_RTX, map, 0);
2555 new = simplify_gen_subreg (GET_MODE (x), inner,
2556 GET_MODE (SUBREG_REG (x)),
2560 validate_change (insn, loc, new, 1);
2562 cancel_changes (num_changes);
2569 subst_constants (&XEXP (x, 0), insn, map, 0);
2571 /* If a memory address got spoiled, change it back. */
2572 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2573 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2574 cancel_changes (num_changes);
2579 /* Substitute constants in our source, and in any arguments to a
2580 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2582 rtx *dest_loc = &SET_DEST (x);
2583 rtx dest = *dest_loc;
2585 enum machine_mode compare_mode = VOIDmode;
2587 /* If SET_SRC is a COMPARE which subst_constants would turn into
2588 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2590 if (GET_CODE (SET_SRC (x)) == COMPARE)
2593 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2599 compare_mode = GET_MODE (XEXP (src, 0));
2600 if (compare_mode == VOIDmode)
2601 compare_mode = GET_MODE (XEXP (src, 1));
2605 subst_constants (&SET_SRC (x), insn, map, memonly);
2608 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2609 || GET_CODE (*dest_loc) == SUBREG
2610 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2612 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2614 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2615 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2617 dest_loc = &XEXP (*dest_loc, 0);
2620 /* Do substitute in the address of a destination in memory. */
2621 if (GET_CODE (*dest_loc) == MEM)
2622 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2624 /* Check for the case of DEST a SUBREG, both it and the underlying
2625 register are less than one word, and the SUBREG has the wider mode.
2626 In the case, we are really setting the underlying register to the
2627 source converted to the mode of DEST. So indicate that. */
2628 if (GET_CODE (dest) == SUBREG
2629 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2630 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2631 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2632 <= GET_MODE_SIZE (GET_MODE (dest)))
2633 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2635 src = tem, dest = SUBREG_REG (dest);
2637 /* If storing a recognizable value save it for later recording. */
2638 if ((map->num_sets < MAX_RECOG_OPERANDS)
2639 && (CONSTANT_P (src)
2640 || (GET_CODE (src) == REG
2641 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2642 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2643 || (GET_CODE (src) == PLUS
2644 && GET_CODE (XEXP (src, 0)) == REG
2645 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2646 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2647 && CONSTANT_P (XEXP (src, 1)))
2648 || GET_CODE (src) == COMPARE
2653 && (src == pc_rtx || GET_CODE (src) == RETURN
2654 || GET_CODE (src) == LABEL_REF))))
2656 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2657 it will cause us to save the COMPARE with any constants
2658 substituted, which is what we want for later. */
2659 rtx src_copy = copy_rtx (src);
2660 map->equiv_sets[map->num_sets].equiv = src_copy;
2661 map->equiv_sets[map->num_sets++].dest = dest;
2662 if (compare_mode != VOIDmode
2663 && GET_CODE (src) == COMPARE
2664 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2669 && GET_MODE (XEXP (src, 0)) == VOIDmode
2670 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2672 map->compare_src = src_copy;
2673 map->compare_mode = compare_mode;
2683 format_ptr = GET_RTX_FORMAT (code);
2685 /* If the first operand is an expression, save its mode for later. */
2686 if (*format_ptr == 'e')
2687 op0_mode = GET_MODE (XEXP (x, 0));
2689 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2691 switch (*format_ptr++)
2698 subst_constants (&XEXP (x, i), insn, map, memonly);
2711 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2712 for (j = 0; j < XVECLEN (x, i); j++)
2713 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2722 /* If this is a commutative operation, move a constant to the second
2723 operand unless the second operand is already a CONST_INT. */
2725 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2726 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2728 rtx tem = XEXP (x, 0);
2729 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2730 validate_change (insn, &XEXP (x, 1), tem, 1);
2733 /* Simplify the expression in case we put in some constants. */
2735 switch (GET_RTX_CLASS (code))
2738 if (op0_mode == MAX_MACHINE_MODE)
2740 new = simplify_unary_operation (code, GET_MODE (x),
2741 XEXP (x, 0), op0_mode);
2746 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2748 if (op_mode == VOIDmode)
2749 op_mode = GET_MODE (XEXP (x, 1));
2750 new = simplify_relational_operation (code, op_mode,
2751 XEXP (x, 0), XEXP (x, 1));
2752 #ifdef FLOAT_STORE_FLAG_VALUE
2753 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2755 enum machine_mode mode = GET_MODE (x);
2756 if (new == const0_rtx)
2757 new = CONST0_RTX (mode);
2760 REAL_VALUE_TYPE val;
2762 /* Avoid automatic aggregate initialization. */
2763 val = FLOAT_STORE_FLAG_VALUE (mode);
2764 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2773 new = simplify_binary_operation (code, GET_MODE (x),
2774 XEXP (x, 0), XEXP (x, 1));
2779 if (op0_mode == MAX_MACHINE_MODE)
2782 if (code == IF_THEN_ELSE)
2784 rtx op0 = XEXP (x, 0);
2786 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2787 && GET_MODE (op0) == VOIDmode
2788 && ! side_effects_p (op0)
2789 && XEXP (op0, 0) == map->compare_src
2790 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2792 /* We have compare of two VOIDmode constants for which
2793 we recorded the comparison mode. */
2795 simplify_relational_operation (GET_CODE (op0),
2800 if (temp == const0_rtx)
2802 else if (temp == const1_rtx)
2807 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2808 XEXP (x, 0), XEXP (x, 1),
2814 validate_change (insn, loc, new, 1);
2817 /* Show that register modified no longer contain known constants. We are
2818 called from note_stores with parts of the new insn. */
2821 mark_stores (dest, x, data)
2823 rtx x ATTRIBUTE_UNUSED;
2824 void *data ATTRIBUTE_UNUSED;
2827 enum machine_mode mode = VOIDmode;
2829 /* DEST is always the innermost thing set, except in the case of
2830 SUBREGs of hard registers. */
2832 if (GET_CODE (dest) == REG)
2833 regno = REGNO (dest), mode = GET_MODE (dest);
2834 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2836 regno = REGNO (SUBREG_REG (dest));
2837 if (regno < FIRST_PSEUDO_REGISTER)
2838 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2839 GET_MODE (SUBREG_REG (dest)),
2842 mode = GET_MODE (SUBREG_REG (dest));
2847 unsigned int uregno = regno;
2848 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2849 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2852 /* Ignore virtual stack var or virtual arg register since those
2853 are handled separately. */
2854 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2855 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2856 for (i = uregno; i <= last_reg; i++)
2857 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2858 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2862 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2863 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2864 that it points to the node itself, thus indicating that the node is its
2865 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2866 the given node is NULL, recursively descend the decl/block tree which
2867 it is the root of, and for each other ..._DECL or BLOCK node contained
2868 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2869 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2870 values to point to themselves. */
2873 set_block_origin_self (stmt)
2876 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2878 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2883 for (local_decl = BLOCK_VARS (stmt);
2884 local_decl != NULL_TREE;
2885 local_decl = TREE_CHAIN (local_decl))
2886 set_decl_origin_self (local_decl); /* Potential recursion. */
2892 for (subblock = BLOCK_SUBBLOCKS (stmt);
2893 subblock != NULL_TREE;
2894 subblock = BLOCK_CHAIN (subblock))
2895 set_block_origin_self (subblock); /* Recurse. */
2900 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2901 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2902 node to so that it points to the node itself, thus indicating that the
2903 node represents its own (abstract) origin. Additionally, if the
2904 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2905 the decl/block tree of which the given node is the root of, and for
2906 each other ..._DECL or BLOCK node contained therein whose
2907 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2908 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2909 point to themselves. */
2912 set_decl_origin_self (decl)
2915 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2917 DECL_ABSTRACT_ORIGIN (decl) = decl;
2918 if (TREE_CODE (decl) == FUNCTION_DECL)
2922 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2923 DECL_ABSTRACT_ORIGIN (arg) = arg;
2924 if (DECL_INITIAL (decl) != NULL_TREE
2925 && DECL_INITIAL (decl) != error_mark_node)
2926 set_block_origin_self (DECL_INITIAL (decl));
2931 /* Given a pointer to some BLOCK node, and a boolean value to set the
2932 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2933 the given block, and for all local decls and all local sub-blocks
2934 (recursively) which are contained therein. */
2937 set_block_abstract_flags (stmt, setting)
2944 BLOCK_ABSTRACT (stmt) = setting;
2946 for (local_decl = BLOCK_VARS (stmt);
2947 local_decl != NULL_TREE;
2948 local_decl = TREE_CHAIN (local_decl))
2949 set_decl_abstract_flags (local_decl, setting);
2951 for (subblock = BLOCK_SUBBLOCKS (stmt);
2952 subblock != NULL_TREE;
2953 subblock = BLOCK_CHAIN (subblock))
2954 set_block_abstract_flags (subblock, setting);
2957 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2958 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2959 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2960 set the abstract flags for all of the parameters, local vars, local
2961 blocks and sub-blocks (recursively) to the same setting. */
2964 set_decl_abstract_flags (decl, setting)
2968 DECL_ABSTRACT (decl) = setting;
2969 if (TREE_CODE (decl) == FUNCTION_DECL)
2973 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2974 DECL_ABSTRACT (arg) = setting;
2975 if (DECL_INITIAL (decl) != NULL_TREE
2976 && DECL_INITIAL (decl) != error_mark_node)
2977 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2981 /* Output the assembly language code for the function FNDECL
2982 from its DECL_SAVED_INSNS. Used for inline functions that are output
2983 at end of compilation instead of where they came in the source. */
2986 output_inline_function (fndecl)
2989 struct function *old_cfun = cfun;
2990 enum debug_info_type old_write_symbols = write_symbols;
2991 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
2992 struct function *f = DECL_SAVED_INSNS (fndecl);
2995 current_function_decl = fndecl;
2997 set_new_last_label_num (f->inl_max_label_num);
2999 /* We're not deferring this any longer. */
3000 DECL_DEFER_OUTPUT (fndecl) = 0;
3002 /* If requested, suppress debugging information. */
3003 if (f->no_debugging_symbols)
3005 write_symbols = NO_DEBUG;
3006 debug_hooks = &do_nothing_debug_hooks;
3009 /* Compile this function all the way down to assembly code. As a
3010 side effect this destroys the saved RTL representation, but
3011 that's okay, because we don't need to inline this anymore. */
3012 rest_of_compilation (fndecl);
3013 DECL_INLINE (fndecl) = 0;
3016 current_function_decl = old_cfun ? old_cfun->decl : 0;
3017 write_symbols = old_write_symbols;
3018 debug_hooks = old_debug_hooks;
3022 /* Functions to keep track of the values hard regs had at the start of
3026 get_hard_reg_initial_reg (fun, reg)
3027 struct function *fun;
3030 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3036 for (i = 0; i < ivs->num_entries; i++)
3037 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3038 return ivs->entries[i].hard_reg;
3044 has_func_hard_reg_initial_val (fun, reg)
3045 struct function *fun;
3048 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3054 for (i = 0; i < ivs->num_entries; i++)
3055 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3056 return ivs->entries[i].pseudo;
3062 get_func_hard_reg_initial_val (fun, reg)
3063 struct function *fun;
3066 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3067 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3074 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3075 ivs = fun->hard_reg_initial_vals;
3076 ivs->num_entries = 0;
3077 ivs->max_entries = 5;
3078 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3081 if (ivs->num_entries >= ivs->max_entries)
3083 ivs->max_entries += 5;
3085 (initial_value_pair *) ggc_realloc (ivs->entries,
3087 * sizeof (initial_value_pair));
3090 ivs->entries[ivs->num_entries].hard_reg = reg;
3091 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3093 return ivs->entries[ivs->num_entries++].pseudo;
3097 get_hard_reg_initial_val (mode, regno)
3098 enum machine_mode mode;
3101 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3105 has_hard_reg_initial_val (mode, regno)
3106 enum machine_mode mode;
3109 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3113 setup_initial_hard_reg_value_integration (inl_f, remap)
3114 struct function *inl_f;
3115 struct inline_remap *remap;
3117 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3123 for (i = 0; i < ivs->num_entries; i ++)
3124 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3125 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3130 emit_initial_value_sets ()
3132 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3140 for (i = 0; i < ivs->num_entries; i++)
3141 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3145 emit_insn_after (seq, get_insns ());
3148 /* If the backend knows where to allocate pseudos for hard
3149 register initial values, register these allocations now. */
3151 allocate_initial_values (reg_equiv_memory_loc)
3152 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3154 #ifdef ALLOCATE_INITIAL_VALUE
3155 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3161 for (i = 0; i < ivs->num_entries; i++)
3163 int regno = REGNO (ivs->entries[i].pseudo);
3164 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3166 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3168 else if (GET_CODE (x) == MEM)
3169 reg_equiv_memory_loc[regno] = x;
3170 else if (GET_CODE (x) == REG)
3172 reg_renumber[regno] = REGNO (x);
3173 /* Poke the regno right into regno_reg_rtx
3174 so that even fixed regs are accepted. */
3175 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3182 #include "gt-integrate.h"