1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-config.h"
31 #include "insn-flags.h"
35 #include "integrate.h"
43 #define obstack_chunk_alloc xmalloc
44 #define obstack_chunk_free free
46 extern struct obstack *function_maybepermanent_obstack;
48 /* Similar, but round to the next highest integer that meets the
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 /* Inlining small functions might save more space then not inlining at
56 all. Assume 1 instruction for the call and 1.5 insns per argument. */
57 #define INTEGRATE_THRESHOLD(DECL) \
59 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
60 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 static rtvec initialize_for_inline PROTO((tree));
64 static void adjust_copied_decl_tree PROTO((tree));
65 static void note_modified_parmregs PROTO((rtx, rtx));
66 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
68 static void integrate_decl_tree PROTO((tree, int,
69 struct inline_remap *));
70 static void subst_constants PROTO((rtx *, rtx,
71 struct inline_remap *));
72 static void set_block_origin_self PROTO((tree));
73 static void set_decl_origin_self PROTO((tree));
74 static void set_block_abstract_flags PROTO((tree, int));
75 static void process_reg_param PROTO((struct inline_remap *, rtx,
79 void set_decl_abstract_flags PROTO((tree, int));
80 static tree copy_and_set_decl_abstract_origin PROTO((tree));
82 /* The maximum number of instructions accepted for inlining a
83 function. Increasing values mean more agressive inlining.
84 This affects currently only functions explicitly marked as
85 inline (or methods defined within the class definition for C++).
86 The default value of 10000 is arbitrary but high to match the
87 previously unlimited gcc capabilities. */
89 int inline_max_insns = 10000;
91 /* Used by copy_rtx_and_substitute; this indicates whether the function is
92 called for the purpose of inlining or some other purpose (i.e. loop
93 unrolling). This affects how constant pool references are handled.
94 This variable contains the FUNCTION_DECL for the inlined function. */
95 static struct function *inlining = 0;
97 /* Returns the Ith entry in the label_map contained in MAP. If the
98 Ith entry has not yet been set, return a fresh label. This function
99 performs a lazy initialization of label_map, thereby avoiding huge memory
100 explosions when the label_map gets very large. */
103 get_label_from_map (map, i)
104 struct inline_remap *map;
107 rtx x = map->label_map[i];
110 x = map->label_map[i] = gen_label_rtx();
115 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
116 is safe and reasonable to integrate into other functions.
117 Nonzero means value is a warning msgid with a single %s
118 for the function's name. */
121 function_cannot_inline_p (fndecl)
122 register tree fndecl;
125 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
127 /* For functions marked as inline increase the maximum size to
128 inline_max_insns (-finline-limit-<n>). For regular functions
129 use the limit given by INTEGRATE_THRESHOLD. */
131 int max_insns = (DECL_INLINE (fndecl))
133 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
134 : INTEGRATE_THRESHOLD (fndecl);
136 register int ninsns = 0;
140 /* No inlines with varargs. */
141 if ((last && TREE_VALUE (last) != void_type_node)
142 || current_function_varargs)
143 return N_("varargs function cannot be inline");
145 if (current_function_calls_alloca)
146 return N_("function using alloca cannot be inline");
148 if (current_function_contains_functions)
149 return N_("function with nested functions cannot be inline");
151 if (current_function_cannot_inline)
152 return current_function_cannot_inline;
154 /* If its not even close, don't even look. */
155 if (get_max_uid () > 3 * max_insns)
156 return N_("function too large to be inline");
159 /* Don't inline functions which do not specify a function prototype and
160 have BLKmode argument or take the address of a parameter. */
161 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
163 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
164 TREE_ADDRESSABLE (parms) = 1;
165 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
166 return N_("no prototype, and parameter address used; cannot be inline");
170 /* We can't inline functions that return structures
171 the old-fashioned PCC way, copying into a static block. */
172 if (current_function_returns_pcc_struct)
173 return N_("inline functions not supported for this return value type");
175 /* We can't inline functions that return structures of varying size. */
176 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
177 return N_("function with varying-size return value cannot be inline");
179 /* Cannot inline a function with a varying size argument or one that
180 receives a transparent union. */
181 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
183 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
184 return N_("function with varying-size parameter cannot be inline");
185 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
186 return N_("function with transparent unit parameter cannot be inline");
189 if (get_max_uid () > max_insns)
191 for (ninsns = 0, insn = get_first_nonparm_insn ();
192 insn && ninsns < max_insns;
193 insn = NEXT_INSN (insn))
194 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
197 if (ninsns >= max_insns)
198 return N_("function too large to be inline");
201 /* We will not inline a function which uses computed goto. The addresses of
202 its local labels, which may be tucked into global storage, are of course
203 not constant across instantiations, which causes unexpected behaviour. */
204 if (current_function_has_computed_jump)
205 return N_("function with computed jump cannot inline");
207 /* We cannot inline a nested function that jumps to a nonlocal label. */
208 if (current_function_has_nonlocal_goto)
209 return N_("function with nonlocal goto cannot be inline");
211 /* This is a hack, until the inliner is taught about eh regions at
212 the start of the function. */
213 for (insn = get_insns ();
215 && ! (GET_CODE (insn) == NOTE
216 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
217 insn = NEXT_INSN (insn))
219 if (insn && GET_CODE (insn) == NOTE
220 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
221 return N_("function with complex parameters cannot be inline");
224 /* We can't inline functions that return a PARALLEL rtx. */
225 result = DECL_RTL (DECL_RESULT (fndecl));
226 if (result && GET_CODE (result) == PARALLEL)
227 return N_("inline functions not supported for this return value type");
232 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
233 Zero for a reg that isn't a parm's home.
234 Only reg numbers less than max_parm_reg are mapped here. */
235 static tree *parmdecl_map;
237 /* In save_for_inline, nonzero if past the parm-initialization insns. */
238 static int in_nonparm_insns;
240 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
241 needed to save FNDECL's insns and info for future inline expansion. */
244 initialize_for_inline (fndecl)
251 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
252 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
253 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
255 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
257 parms = TREE_CHAIN (parms), i++)
259 rtx p = DECL_RTL (parms);
261 /* If we have (mem (addressof (mem ...))), use the inner MEM since
262 otherwise the copy_rtx call below will not unshare the MEM since
263 it shares ADDRESSOF. */
264 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
265 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
266 p = XEXP (XEXP (p, 0), 0);
268 RTVEC_ELT (arg_vector, i) = p;
270 if (GET_CODE (p) == REG)
271 parmdecl_map[REGNO (p)] = parms;
272 else if (GET_CODE (p) == CONCAT)
274 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
275 rtx pimag = gen_imagpart (GET_MODE (preal), p);
277 if (GET_CODE (preal) == REG)
278 parmdecl_map[REGNO (preal)] = parms;
279 if (GET_CODE (pimag) == REG)
280 parmdecl_map[REGNO (pimag)] = parms;
283 /* This flag is cleared later
284 if the function ever modifies the value of the parm. */
285 TREE_READONLY (parms) = 1;
291 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
292 they all point to the new (copied) rtxs. */
295 adjust_copied_decl_tree (block)
298 register tree subblock;
299 register rtx original_end;
301 original_end = BLOCK_END_NOTE (block);
304 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
305 NOTE_SOURCE_FILE (original_end) = 0;
308 /* Process all subblocks. */
309 for (subblock = BLOCK_SUBBLOCKS (block);
311 subblock = TREE_CHAIN (subblock))
312 adjust_copied_decl_tree (subblock);
315 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
316 DECL_ABSTRACT_ORIGIN for the new accordinly. */
319 copy_and_set_decl_abstract_origin (node)
322 tree copy = copy_node (node);
323 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
324 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
325 situation occurs if we inline a function which itself made
326 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
327 most distant ancestor, we don't have to do anything here. */
330 /* The most distant ancestor must be NODE. */
331 DECL_ABSTRACT_ORIGIN (copy) = node;
336 /* Make the insns and PARM_DECLs of the current function permanent
337 and record other information in DECL_SAVED_INSNS to allow inlining
338 of this function in subsequent calls.
340 This routine need not copy any insns because we are not going
341 to immediately compile the insns in the insn chain. There
342 are two cases when we would compile the insns for FNDECL:
343 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
344 be output at the end of other compilation, because somebody took
345 its address. In the first case, the insns of FNDECL are copied
346 as it is expanded inline, so FNDECL's saved insns are not
347 modified. In the second case, FNDECL is used for the last time,
348 so modifying the rtl is not a problem.
350 We don't have to worry about FNDECL being inline expanded by
351 other functions which are written at the end of compilation
352 because flag_no_inline is turned on when we begin writing
353 functions at the end of compilation. */
356 save_for_inline_nocopy (fndecl)
361 rtx first_nonparm_insn;
363 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
364 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
365 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
366 for the parms, prior to elimination of virtual registers.
367 These values are needed for substituting parms properly. */
369 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
371 /* Make and emit a return-label if we have not already done so. */
373 if (return_label == 0)
375 return_label = gen_label_rtx ();
376 emit_label (return_label);
379 argvec = initialize_for_inline (fndecl);
381 /* If there are insns that copy parms from the stack into pseudo registers,
382 those insns are not copied. `expand_inline_function' must
383 emit the correct code to handle such things. */
386 if (GET_CODE (insn) != NOTE)
389 /* Get the insn which signals the end of parameter setup code. */
390 first_nonparm_insn = get_first_nonparm_insn ();
392 /* Now just scan the chain of insns to see what happens to our
393 PARM_DECLs. If a PARM_DECL is used but never modified, we
394 can substitute its rtl directly when expanding inline (and
395 perform constant folding when its incoming value is constant).
396 Otherwise, we have to copy its value into a new register and track
397 the new register's life. */
399 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
401 if (insn == first_nonparm_insn)
402 in_nonparm_insns = 1;
404 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
405 /* Record what interesting things happen to our parameters. */
406 note_stores (PATTERN (insn), note_modified_parmregs);
409 /* We have now allocated all that needs to be allocated permanently
410 on the rtx obstack. Set our high-water mark, so that we
411 can free the rest of this when the time comes. */
415 current_function->inl_max_label_num = max_label_num ();
416 current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
417 current_function->original_arg_vector = argvec;
418 current_function->original_decl_initial = DECL_INITIAL (fndecl);
419 DECL_SAVED_INSNS (fndecl) = current_function;
422 /* Note whether a parameter is modified or not. */
425 note_modified_parmregs (reg, x)
427 rtx x ATTRIBUTE_UNUSED;
429 if (GET_CODE (reg) == REG && in_nonparm_insns
430 && REGNO (reg) < max_parm_reg
431 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
432 && parmdecl_map[REGNO (reg)] != 0)
433 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
436 /* Unfortunately, we need a global copy of const_equiv map for communication
437 with a function called from note_stores. Be *very* careful that this
438 is used properly in the presence of recursion. */
440 varray_type global_const_equiv_varray;
442 #define FIXED_BASE_PLUS_P(X) \
443 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
444 && GET_CODE (XEXP (X, 0)) == REG \
445 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
446 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
448 /* Called to set up a mapping for the case where a parameter is in a
449 register. If it is read-only and our argument is a constant, set up the
450 constant equivalence.
452 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
455 Also, don't allow hard registers here; they might not be valid when
456 substituted into insns. */
458 process_reg_param (map, loc, copy)
459 struct inline_remap *map;
462 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
463 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
464 && ! REG_USERVAR_P (copy))
465 || (GET_CODE (copy) == REG
466 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
468 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
469 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
470 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
471 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
474 map->reg_map[REGNO (loc)] = copy;
477 /* Used by duplicate_eh_handlers to map labels for the exception table */
478 static struct inline_remap *eif_eh_map;
481 expand_inline_function_eh_labelmap (label)
484 int index = CODE_LABEL_NUMBER (label);
485 return get_label_from_map (eif_eh_map, index);
488 /* Integrate the procedure defined by FNDECL. Note that this function
489 may wind up calling itself. Since the static variables are not
490 reentrant, we do not assign them until after the possibility
491 of recursion is eliminated.
493 If IGNORE is nonzero, do not produce a value.
494 Otherwise store the value in TARGET if it is nonzero and that is convenient.
497 (rtx)-1 if we could not substitute the function
498 0 if we substituted it and it does not produce a value
499 else an rtx for where the value is stored. */
502 expand_inline_function (fndecl, parms, target, ignore, type,
503 structure_value_addr)
508 rtx structure_value_addr;
510 struct function *inlining_previous;
511 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
512 tree formal, actual, block;
513 rtx parm_insns = inl_f->emit->x_first_insn;
514 rtx insns = (inl_f->inl_last_parm_insn
515 ? NEXT_INSN (inl_f->inl_last_parm_insn)
522 int min_labelno = inl_f->emit->x_first_label_num;
523 int max_labelno = inl_f->inl_max_label_num;
525 rtx local_return_label = 0;
529 struct inline_remap *map = 0;
533 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
534 rtx static_chain_value = 0;
537 /* The pointer used to track the true location of the memory used
538 for MAP->LABEL_MAP. */
539 rtx *real_label_map = 0;
541 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
542 max_regno = inl_f->emit->x_reg_rtx_no + 3;
543 if (max_regno < FIRST_PSEUDO_REGISTER)
546 nargs = list_length (DECL_ARGUMENTS (fndecl));
548 /* Check that the parms type match and that sufficient arguments were
549 passed. Since the appropriate conversions or default promotions have
550 already been applied, the machine modes should match exactly. */
552 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
554 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
557 enum machine_mode mode;
560 return (rtx) (HOST_WIDE_INT) -1;
562 arg = TREE_VALUE (actual);
563 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
565 if (mode != TYPE_MODE (TREE_TYPE (arg))
566 /* If they are block mode, the types should match exactly.
567 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
568 which could happen if the parameter has incomplete type. */
570 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
571 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
572 return (rtx) (HOST_WIDE_INT) -1;
575 /* Extra arguments are valid, but will be ignored below, so we must
576 evaluate them here for side-effects. */
577 for (; actual; actual = TREE_CHAIN (actual))
578 expand_expr (TREE_VALUE (actual), const0_rtx,
579 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
581 /* Make a binding contour to keep inline cleanups called at
582 outer function-scope level from looking like they are shadowing
583 parameter declarations. */
586 /* Expand the function arguments. Do this first so that any
587 new registers get created before we allocate the maps. */
589 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
590 arg_trees = (tree *) alloca (nargs * sizeof (tree));
592 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
594 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
596 /* Actual parameter, converted to the type of the argument within the
598 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
599 /* Mode of the variable used within the function. */
600 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
604 loc = RTVEC_ELT (arg_vector, i);
606 /* If this is an object passed by invisible reference, we copy the
607 object into a stack slot and save its address. If this will go
608 into memory, we do nothing now. Otherwise, we just expand the
610 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
611 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
614 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
615 int_size_in_bytes (TREE_TYPE (arg)), 1);
616 MEM_SET_IN_STRUCT_P (stack_slot,
617 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
619 store_expr (arg, stack_slot, 0);
621 arg_vals[i] = XEXP (stack_slot, 0);
624 else if (GET_CODE (loc) != MEM)
626 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
627 /* The mode if LOC and ARG can differ if LOC was a variable
628 that had its mode promoted via PROMOTED_MODE. */
629 arg_vals[i] = convert_modes (GET_MODE (loc),
630 TYPE_MODE (TREE_TYPE (arg)),
631 expand_expr (arg, NULL_RTX, mode,
633 TREE_UNSIGNED (TREE_TYPE (formal)));
635 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
641 && (! TREE_READONLY (formal)
642 /* If the parameter is not read-only, copy our argument through
643 a register. Also, we cannot use ARG_VALS[I] if it overlaps
644 TARGET in any way. In the inline function, they will likely
645 be two different pseudos, and `safe_from_p' will make all
646 sorts of smart assumptions about their not conflicting.
647 But if ARG_VALS[I] overlaps TARGET, these assumptions are
648 wrong, so put ARG_VALS[I] into a fresh register.
649 Don't worry about invisible references, since their stack
650 temps will never overlap the target. */
653 && (GET_CODE (arg_vals[i]) == REG
654 || GET_CODE (arg_vals[i]) == SUBREG
655 || GET_CODE (arg_vals[i]) == MEM)
656 && reg_overlap_mentioned_p (arg_vals[i], target))
657 /* ??? We must always copy a SUBREG into a REG, because it might
658 get substituted into an address, and not all ports correctly
659 handle SUBREGs in addresses. */
660 || (GET_CODE (arg_vals[i]) == SUBREG)))
661 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
663 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
664 && POINTER_TYPE_P (TREE_TYPE (formal)))
665 mark_reg_pointer (arg_vals[i],
666 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
670 /* Allocate the structures we use to remap things. */
672 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
673 map->fndecl = fndecl;
675 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
676 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
678 /* We used to use alloca here, but the size of what it would try to
679 allocate would occasionally cause it to exceed the stack limit and
680 cause unpredictable core dumps. */
682 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
683 map->label_map = real_label_map;
685 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
686 map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
687 bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
689 map->max_insnno = inl_max_uid;
691 map->integrating = 1;
693 /* const_equiv_varray maps pseudos in our routine to constants, so
694 it needs to be large enough for all our pseudos. This is the
695 number we are currently using plus the number in the called
696 routine, plus 15 for each arg, five to compute the virtual frame
697 pointer, and five for the return value. This should be enough
698 for most cases. We do not reference entries outside the range of
701 ??? These numbers are quite arbitrary and were obtained by
702 experimentation. At some point, we should try to allocate the
703 table after all the parameters are set up so we an more accurately
704 estimate the number of pseudos we will need. */
706 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
708 + (max_regno - FIRST_PSEUDO_REGISTER)
711 "expand_inline_function");
714 /* Record the current insn in case we have to set up pointers to frame
715 and argument memory blocks. If there are no insns yet, add a dummy
716 insn that can be used as an insertion point. */
717 map->insns_at_start = get_last_insn ();
718 if (map->insns_at_start == 0)
719 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
721 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
722 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
724 /* Update the outgoing argument size to allow for those in the inlined
726 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
727 current_function_outgoing_args_size = inl_f->outgoing_args_size;
729 /* If the inline function needs to make PIC references, that means
730 that this function's PIC offset table must be used. */
731 if (inl_f->uses_pic_offset_table)
732 current_function_uses_pic_offset_table = 1;
734 /* If this function needs a context, set it up. */
735 if (inl_f->needs_context)
736 static_chain_value = lookup_static_chain (fndecl);
738 if (GET_CODE (parm_insns) == NOTE
739 && NOTE_LINE_NUMBER (parm_insns) > 0)
741 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
742 NOTE_LINE_NUMBER (parm_insns));
744 RTX_INTEGRATED_P (note) = 1;
747 /* Process each argument. For each, set up things so that the function's
748 reference to the argument will refer to the argument being passed.
749 We only replace REG with REG here. Any simplifications are done
752 We make two passes: In the first, we deal with parameters that will
753 be placed into registers, since we need to ensure that the allocated
754 register number fits in const_equiv_map. Then we store all non-register
755 parameters into their memory location. */
757 /* Don't try to free temp stack slots here, because we may put one of the
758 parameters into a temp stack slot. */
760 for (i = 0; i < nargs; i++)
762 rtx copy = arg_vals[i];
764 loc = RTVEC_ELT (arg_vector, i);
766 /* There are three cases, each handled separately. */
767 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
768 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
770 /* This must be an object passed by invisible reference (it could
771 also be a variable-sized object, but we forbid inlining functions
772 with variable-sized arguments). COPY is the address of the
773 actual value (this computation will cause it to be copied). We
774 map that address for the register, noting the actual address as
775 an equivalent in case it can be substituted into the insns. */
777 if (GET_CODE (copy) != REG)
779 temp = copy_addr_to_reg (copy);
780 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
781 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
784 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
786 else if (GET_CODE (loc) == MEM)
788 /* This is the case of a parameter that lives in memory.
789 It will live in the block we allocate in the called routine's
790 frame that simulates the incoming argument area. Do nothing
791 now; we will call store_expr later. */
794 else if (GET_CODE (loc) == REG)
795 process_reg_param (map, loc, copy);
796 else if (GET_CODE (loc) == CONCAT)
798 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
799 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
800 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
801 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
803 process_reg_param (map, locreal, copyreal);
804 process_reg_param (map, locimag, copyimag);
810 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
811 specially. This function can be called recursively, so we need to
812 save the previous value. */
813 inlining_previous = inlining;
816 /* Now do the parameters that will be placed in memory. */
818 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
819 formal; formal = TREE_CHAIN (formal), i++)
821 loc = RTVEC_ELT (arg_vector, i);
823 if (GET_CODE (loc) == MEM
824 /* Exclude case handled above. */
825 && ! (GET_CODE (XEXP (loc, 0)) == REG
826 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
828 rtx note = emit_note (DECL_SOURCE_FILE (formal),
829 DECL_SOURCE_LINE (formal));
831 RTX_INTEGRATED_P (note) = 1;
833 /* Compute the address in the area we reserved and store the
835 temp = copy_rtx_and_substitute (loc, map);
836 subst_constants (&temp, NULL_RTX, map);
837 apply_change_group ();
838 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
839 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
840 store_expr (arg_trees[i], temp, 0);
844 /* Deal with the places that the function puts its result.
845 We are driven by what is placed into DECL_RESULT.
847 Initially, we assume that we don't have anything special handling for
848 REG_FUNCTION_RETURN_VALUE_P. */
850 map->inline_target = 0;
851 loc = DECL_RTL (DECL_RESULT (fndecl));
853 if (TYPE_MODE (type) == VOIDmode)
854 /* There is no return value to worry about. */
856 else if (GET_CODE (loc) == MEM)
858 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
860 temp = copy_rtx_and_substitute (loc, map);
861 subst_constants (&temp, NULL_RTX, map);
862 apply_change_group ();
867 if (! structure_value_addr
868 || ! aggregate_value_p (DECL_RESULT (fndecl)))
871 /* Pass the function the address in which to return a structure
872 value. Note that a constructor can cause someone to call us
873 with STRUCTURE_VALUE_ADDR, but the initialization takes place
874 via the first parameter, rather than the struct return address.
876 We have two cases: If the address is a simple register
877 indirect, use the mapping mechanism to point that register to
878 our structure return address. Otherwise, store the structure
879 return value into the place that it will be referenced from. */
881 if (GET_CODE (XEXP (loc, 0)) == REG)
883 temp = force_operand (structure_value_addr, NULL_RTX);
884 temp = force_reg (Pmode, temp);
885 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
887 if (CONSTANT_P (structure_value_addr)
888 || GET_CODE (structure_value_addr) == ADDRESSOF
889 || (GET_CODE (structure_value_addr) == PLUS
890 && (XEXP (structure_value_addr, 0)
891 == virtual_stack_vars_rtx)
892 && (GET_CODE (XEXP (structure_value_addr, 1))
895 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
901 temp = copy_rtx_and_substitute (loc, map);
902 subst_constants (&temp, NULL_RTX, map);
903 apply_change_group ();
904 emit_move_insn (temp, structure_value_addr);
909 /* We will ignore the result value, so don't look at its structure.
910 Note that preparations for an aggregate return value
911 do need to be made (above) even if it will be ignored. */
913 else if (GET_CODE (loc) == REG)
915 /* The function returns an object in a register and we use the return
916 value. Set up our target for remapping. */
918 /* Machine mode function was declared to return. */
919 enum machine_mode departing_mode = TYPE_MODE (type);
920 /* (Possibly wider) machine mode it actually computes
921 (for the sake of callers that fail to declare it right).
922 We have to use the mode of the result's RTL, rather than
923 its type, since expand_function_start may have promoted it. */
924 enum machine_mode arriving_mode
925 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
928 /* Don't use MEMs as direct targets because on some machines
929 substituting a MEM for a REG makes invalid insns.
930 Let the combiner substitute the MEM if that is valid. */
931 if (target == 0 || GET_CODE (target) != REG
932 || GET_MODE (target) != departing_mode)
934 /* Don't make BLKmode registers. If this looks like
935 a BLKmode object being returned in a register, get
936 the mode from that, otherwise abort. */
937 if (departing_mode == BLKmode)
939 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
941 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
942 arriving_mode = departing_mode;
948 target = gen_reg_rtx (departing_mode);
951 /* If function's value was promoted before return,
952 avoid machine mode mismatch when we substitute INLINE_TARGET.
953 But TARGET is what we will return to the caller. */
954 if (arriving_mode != departing_mode)
956 /* Avoid creating a paradoxical subreg wider than
957 BITS_PER_WORD, since that is illegal. */
958 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
960 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
961 GET_MODE_BITSIZE (arriving_mode)))
962 /* Maybe could be handled by using convert_move () ? */
964 reg_to_map = gen_reg_rtx (arriving_mode);
965 target = gen_lowpart (departing_mode, reg_to_map);
968 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
973 /* Usually, the result value is the machine's return register.
974 Sometimes it may be a pseudo. Handle both cases. */
975 if (REG_FUNCTION_VALUE_P (loc))
976 map->inline_target = reg_to_map;
978 map->reg_map[REGNO (loc)] = reg_to_map;
983 /* Make a fresh binding contour that we can easily remove. Do this after
984 expanding our arguments so cleanups are properly scoped. */
986 expand_start_bindings (0);
988 /* Initialize label_map. get_label_from_map will actually make
990 bzero ((char *) &map->label_map [min_labelno],
991 (max_labelno - min_labelno) * sizeof (rtx));
993 /* Perform postincrements before actually calling the function. */
996 /* Clean up stack so that variables might have smaller offsets. */
997 do_pending_stack_adjust ();
999 /* Save a copy of the location of const_equiv_varray for
1000 mark_stores, called via note_stores. */
1001 global_const_equiv_varray = map->const_equiv_varray;
1003 /* If the called function does an alloca, save and restore the
1004 stack pointer around the call. This saves stack space, but
1005 also is required if this inline is being done between two
1007 if (inl_f->calls_alloca)
1008 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1010 /* Now copy the insns one by one. Do this in two passes, first the insns and
1011 then their REG_NOTES, just like save_for_inline. */
1013 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1015 for (insn = insns; insn; insn = NEXT_INSN (insn))
1017 rtx copy, pattern, set;
1019 map->orig_asm_operands_vector = 0;
1021 switch (GET_CODE (insn))
1024 pattern = PATTERN (insn);
1025 set = single_set (insn);
1027 if (GET_CODE (pattern) == USE
1028 && GET_CODE (XEXP (pattern, 0)) == REG
1029 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1030 /* The (USE (REG n)) at return from the function should
1031 be ignored since we are changing (REG n) into
1035 /* If the inline fn needs eh context, make sure that
1036 the current fn has one. */
1037 if (GET_CODE (pattern) == USE
1038 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1041 /* Ignore setting a function value that we don't want to use. */
1042 if (map->inline_target == 0
1044 && GET_CODE (SET_DEST (set)) == REG
1045 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1047 if (volatile_refs_p (SET_SRC (set)))
1051 /* If we must not delete the source,
1052 load it into a new temporary. */
1053 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1055 new_set = single_set (copy);
1060 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1062 /* If the source and destination are the same and it
1063 has a note on it, keep the insn. */
1064 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1065 && REG_NOTES (insn) != 0)
1066 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1071 /* If this is setting the static chain rtx, omit it. */
1072 else if (static_chain_value != 0
1074 && GET_CODE (SET_DEST (set)) == REG
1075 && rtx_equal_p (SET_DEST (set),
1076 static_chain_incoming_rtx))
1079 /* If this is setting the static chain pseudo, set it from
1080 the value we want to give it instead. */
1081 else if (static_chain_value != 0
1083 && rtx_equal_p (SET_SRC (set),
1084 static_chain_incoming_rtx))
1086 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1088 copy = emit_move_insn (newdest, static_chain_value);
1089 static_chain_value = 0;
1092 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1093 /* REG_NOTES will be copied later. */
1096 /* If this insn is setting CC0, it may need to look at
1097 the insn that uses CC0 to see what type of insn it is.
1098 In that case, the call to recog via validate_change will
1099 fail. So don't substitute constants here. Instead,
1100 do it when we emit the following insn.
1102 For example, see the pyr.md file. That machine has signed and
1103 unsigned compares. The compare patterns must check the
1104 following branch insn to see which what kind of compare to
1107 If the previous insn set CC0, substitute constants on it as
1109 if (sets_cc0_p (PATTERN (copy)) != 0)
1114 try_constants (cc0_insn, map);
1116 try_constants (copy, map);
1119 try_constants (copy, map);
1124 if (GET_CODE (PATTERN (insn)) == RETURN
1125 || (GET_CODE (PATTERN (insn)) == PARALLEL
1126 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1128 if (local_return_label == 0)
1129 local_return_label = gen_label_rtx ();
1130 pattern = gen_jump (local_return_label);
1133 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1135 copy = emit_jump_insn (pattern);
1139 try_constants (cc0_insn, map);
1142 try_constants (copy, map);
1144 /* If this used to be a conditional jump insn but whose branch
1145 direction is now know, we must do something special. */
1146 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1149 /* If the previous insn set cc0 for us, delete it. */
1150 if (sets_cc0_p (PREV_INSN (copy)))
1151 delete_insn (PREV_INSN (copy));
1154 /* If this is now a no-op, delete it. */
1155 if (map->last_pc_value == pc_rtx)
1161 /* Otherwise, this is unconditional jump so we must put a
1162 BARRIER after it. We could do some dead code elimination
1163 here, but jump.c will do it just as well. */
1169 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1170 copy = emit_call_insn (pattern);
1172 /* Because the USAGE information potentially contains objects other
1173 than hard registers, we need to copy it. */
1174 CALL_INSN_FUNCTION_USAGE (copy)
1175 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1179 try_constants (cc0_insn, map);
1182 try_constants (copy, map);
1184 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1185 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1186 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1190 copy = emit_label (get_label_from_map (map,
1191 CODE_LABEL_NUMBER (insn)));
1192 LABEL_NAME (copy) = LABEL_NAME (insn);
1197 copy = emit_barrier ();
1201 /* It is important to discard function-end and function-beg notes,
1202 so we have only one of each in the current function.
1203 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1204 deleted these in the copy used for continuing compilation,
1205 not the copy used for inlining). */
1206 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1207 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1208 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1210 copy = emit_note (NOTE_SOURCE_FILE (insn),
1211 NOTE_LINE_NUMBER (insn));
1213 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1214 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1217 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
1219 /* we have to duplicate the handlers for the original */
1220 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1222 /* We need to duplicate the handlers for the EH region
1223 and we need to indicate where the label map is */
1225 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
1226 CODE_LABEL_NUMBER (label),
1227 expand_inline_function_eh_labelmap);
1230 /* We have to forward these both to match the new exception
1232 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
1245 RTX_INTEGRATED_P (copy) = 1;
1247 map->insn_map[INSN_UID (insn)] = copy;
1250 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1251 from parameters can be substituted in. These are the only ones that
1252 are valid across the entire function. */
1254 for (insn = insns; insn; insn = NEXT_INSN (insn))
1255 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1256 && map->insn_map[INSN_UID (insn)]
1257 && REG_NOTES (insn))
1259 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1260 /* We must also do subst_constants, in case one of our parameters
1261 has const type and constant value. */
1262 subst_constants (&tem, NULL_RTX, map);
1263 apply_change_group ();
1264 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1267 if (local_return_label)
1268 emit_label (local_return_label);
1270 /* Restore the stack pointer if we saved it above. */
1271 if (inl_f->calls_alloca)
1272 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1274 /* Make copies of the decls of the symbols in the inline function, so that
1275 the copies of the variables get declared in the current function. Set
1276 up things so that lookup_static_chain knows that to interpret registers
1277 in SAVE_EXPRs for TYPE_SIZEs as local. */
1279 inline_function_decl = fndecl;
1280 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1281 integrate_decl_tree (inl_f->original_decl_initial, 0, map);
1282 inline_function_decl = 0;
1284 /* End the scope containing the copied formal parameter variables
1285 and copied LABEL_DECLs. */
1287 expand_end_bindings (getdecls (), 1, 1);
1288 block = poplevel (1, 1, 0);
1289 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1290 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1293 /* Must mark the line number note after inlined functions as a repeat, so
1294 that the test coverage code can avoid counting the call twice. This
1295 just tells the code to ignore the immediately following line note, since
1296 there already exists a copy of this note before the expanded inline call.
1297 This line number note is still needed for debugging though, so we can't
1299 if (flag_test_coverage)
1300 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1302 emit_line_note (input_filename, lineno);
1304 /* If the function returns a BLKmode object in a register, copy it
1305 out of the temp register into a BLKmode memory object. */
1306 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1307 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1308 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1310 if (structure_value_addr)
1312 target = gen_rtx_MEM (TYPE_MODE (type),
1313 memory_address (TYPE_MODE (type),
1314 structure_value_addr));
1315 MEM_SET_IN_STRUCT_P (target, 1);
1318 /* Make sure we free the things we explicitly allocated with xmalloc. */
1320 free (real_label_map);
1322 VARRAY_FREE (map->const_equiv_varray);
1323 inlining = inlining_previous;
1328 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1329 push all of those decls and give each one the corresponding home. */
1332 integrate_parm_decls (args, map, arg_vector)
1334 struct inline_remap *map;
1340 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1342 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1345 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1347 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1348 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1349 here, but that's going to require some more work. */
1350 /* DECL_INCOMING_RTL (decl) = ?; */
1351 /* These args would always appear unused, if not for this. */
1352 TREE_USED (decl) = 1;
1353 /* Prevent warning for shadowing with these. */
1354 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
1356 /* Fully instantiate the address with the equivalent form so that the
1357 debugging information contains the actual register, instead of the
1358 virtual register. Do this by not passing an insn to
1360 subst_constants (&new_decl_rtl, NULL_RTX, map);
1361 apply_change_group ();
1362 DECL_RTL (decl) = new_decl_rtl;
1366 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1367 current function a tree of contexts isomorphic to the one that is given.
1369 LEVEL indicates how far down into the BLOCK tree is the node we are
1370 currently traversing. It is always zero except for recursive calls.
1372 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1373 registers used in the DECL_RTL field should be remapped. If it is zero,
1374 no mapping is necessary. */
1377 integrate_decl_tree (let, level, map)
1380 struct inline_remap *map;
1387 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1391 push_obstacks_nochange ();
1392 saveable_allocation ();
1393 d = copy_and_set_decl_abstract_origin (t);
1396 if (DECL_RTL (t) != 0)
1398 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1399 /* Fully instantiate the address with the equivalent form so that the
1400 debugging information contains the actual register, instead of the
1401 virtual register. Do this by not passing an insn to
1403 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1404 apply_change_group ();
1406 /* These args would always appear unused, if not for this. */
1409 if (DECL_LANG_SPECIFIC (d))
1415 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1416 integrate_decl_tree (t, level + 1, map);
1420 node = poplevel (1, 0, 0);
1423 TREE_USED (node) = TREE_USED (let);
1424 BLOCK_ABSTRACT_ORIGIN (node) = let;
1429 /* Create a new copy of an rtx.
1430 Recursively copies the operands of the rtx,
1431 except for those few rtx codes that are sharable.
1433 We always return an rtx that is similar to that incoming rtx, with the
1434 exception of possibly changing a REG to a SUBREG or vice versa. No
1435 rtl is ever emitted.
1437 Handle constants that need to be placed in the constant pool by
1438 calling `force_const_mem'. */
1441 copy_rtx_and_substitute (orig, map)
1443 struct inline_remap *map;
1445 register rtx copy, temp;
1447 register RTX_CODE code;
1448 register enum machine_mode mode;
1449 register const char *format_ptr;
1455 code = GET_CODE (orig);
1456 mode = GET_MODE (orig);
1461 /* If the stack pointer register shows up, it must be part of
1462 stack-adjustments (*not* because we eliminated the frame pointer!).
1463 Small hard registers are returned as-is. Pseudo-registers
1464 go through their `reg_map'. */
1465 regno = REGNO (orig);
1466 if (regno <= LAST_VIRTUAL_REGISTER
1467 || (map->integrating
1468 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1470 /* Some hard registers are also mapped,
1471 but others are not translated. */
1472 if (map->reg_map[regno] != 0)
1473 return map->reg_map[regno];
1475 /* If this is the virtual frame pointer, make space in current
1476 function's stack frame for the stack frame of the inline function.
1478 Copy the address of this area into a pseudo. Map
1479 virtual_stack_vars_rtx to this pseudo and set up a constant
1480 equivalence for it to be the address. This will substitute the
1481 address into insns where it can be substituted and use the new
1482 pseudo where it can't. */
1483 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1486 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1488 #ifdef FRAME_GROWS_DOWNWARD
1489 /* In this case, virtual_stack_vars_rtx points to one byte
1490 higher than the top of the frame area. So make sure we
1491 allocate a big enough chunk to keep the frame pointer
1492 aligned like a real one. */
1493 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1496 loc = assign_stack_temp (BLKmode, size, 1);
1497 loc = XEXP (loc, 0);
1498 #ifdef FRAME_GROWS_DOWNWARD
1499 /* In this case, virtual_stack_vars_rtx points to one byte
1500 higher than the top of the frame area. So compute the offset
1501 to one byte higher than our substitute frame. */
1502 loc = plus_constant (loc, size);
1504 map->reg_map[regno] = temp
1505 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1507 #ifdef STACK_BOUNDARY
1508 mark_reg_pointer (map->reg_map[regno],
1509 STACK_BOUNDARY / BITS_PER_UNIT);
1512 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1514 seq = gen_sequence ();
1516 emit_insn_after (seq, map->insns_at_start);
1519 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1520 || (map->integrating
1521 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1524 /* Do the same for a block to contain any arguments referenced
1527 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1530 loc = assign_stack_temp (BLKmode, size, 1);
1531 loc = XEXP (loc, 0);
1532 /* When arguments grow downward, the virtual incoming
1533 args pointer points to the top of the argument block,
1534 so the remapped location better do the same. */
1535 #ifdef ARGS_GROW_DOWNWARD
1536 loc = plus_constant (loc, size);
1538 map->reg_map[regno] = temp
1539 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1541 #ifdef STACK_BOUNDARY
1542 mark_reg_pointer (map->reg_map[regno],
1543 STACK_BOUNDARY / BITS_PER_UNIT);
1546 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1548 seq = gen_sequence ();
1550 emit_insn_after (seq, map->insns_at_start);
1553 else if (REG_FUNCTION_VALUE_P (orig))
1555 /* This is a reference to the function return value. If
1556 the function doesn't have a return value, error. If the
1557 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1558 if (map->inline_target == 0)
1559 /* Must be unrolling loops or replicating code if we
1560 reach here, so return the register unchanged. */
1562 else if (GET_MODE (map->inline_target) != BLKmode
1563 && mode != GET_MODE (map->inline_target))
1564 return gen_lowpart (mode, map->inline_target);
1566 return map->inline_target;
1570 if (map->reg_map[regno] == NULL)
1572 map->reg_map[regno] = gen_reg_rtx (mode);
1573 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1574 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1575 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1576 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1578 if (map->regno_pointer_flag[regno])
1579 mark_reg_pointer (map->reg_map[regno],
1580 map->regno_pointer_align[regno]);
1582 return map->reg_map[regno];
1585 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1586 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1587 if (GET_CODE (copy) == SUBREG)
1588 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1589 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1590 else if (GET_CODE (copy) == CONCAT)
1592 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1594 if (GET_MODE (retval) == GET_MODE (orig))
1597 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1598 (SUBREG_WORD (orig) %
1599 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1600 / (unsigned) UNITS_PER_WORD)));
1603 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1604 SUBREG_WORD (orig));
1607 copy = gen_rtx_ADDRESSOF (mode,
1608 copy_rtx_and_substitute (XEXP (orig, 0), map),
1609 0, ADDRESSOF_DECL(orig));
1610 regno = ADDRESSOF_REGNO (orig);
1611 if (map->reg_map[regno])
1612 regno = REGNO (map->reg_map[regno]);
1613 else if (regno > LAST_VIRTUAL_REGISTER)
1615 temp = XEXP (orig, 0);
1616 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1617 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1618 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1619 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1620 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1622 if (map->regno_pointer_flag[regno])
1623 mark_reg_pointer (map->reg_map[regno],
1624 map->regno_pointer_align[regno]);
1625 regno = REGNO (map->reg_map[regno]);
1627 ADDRESSOF_REGNO (copy) = regno;
1632 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1633 to (use foo) if the original insn didn't have a subreg.
1634 Removing the subreg distorts the VAX movstrhi pattern
1635 by changing the mode of an operand. */
1636 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1637 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1638 copy = SUBREG_REG (copy);
1639 return gen_rtx_fmt_e (code, VOIDmode, copy);
1642 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1643 = LABEL_PRESERVE_P (orig);
1644 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1647 copy = gen_rtx_LABEL_REF (mode,
1648 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1649 : get_label_from_map (map,
1650 CODE_LABEL_NUMBER (XEXP (orig, 0))));
1651 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1653 /* The fact that this label was previously nonlocal does not mean
1654 it still is, so we must check if it is within the range of
1655 this function's labels. */
1656 LABEL_REF_NONLOCAL_P (copy)
1657 = (LABEL_REF_NONLOCAL_P (orig)
1658 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1659 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1661 /* If we have made a nonlocal label local, it means that this
1662 inlined call will be referring to our nonlocal goto handler.
1663 So make sure we create one for this block; we normally would
1664 not since this is not otherwise considered a "call". */
1665 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1666 function_call_count++;
1676 /* Symbols which represent the address of a label stored in the constant
1677 pool must be modified to point to a constant pool entry for the
1678 remapped label. Otherwise, symbols are returned unchanged. */
1679 if (CONSTANT_POOL_ADDRESS_P (orig))
1681 struct function *f = inlining ? inlining : current_function;
1682 rtx constant = get_pool_constant_for_function (f, orig);
1683 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1686 rtx temp = force_const_mem (const_mode,
1687 copy_rtx_and_substitute (constant, map));
1689 /* Legitimizing the address here is incorrect.
1691 Since we had a SYMBOL_REF before, we can assume it is valid
1692 to have one in this position in the insn.
1694 Also, change_address may create new registers. These
1695 registers will not have valid reg_map entries. This can
1696 cause try_constants() to fail because assumes that all
1697 registers in the rtx have valid reg_map entries, and it may
1698 end up replacing one of these new registers with junk. */
1700 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1701 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1704 temp = XEXP (temp, 0);
1706 #ifdef POINTERS_EXTEND_UNSIGNED
1707 if (GET_MODE (temp) != GET_MODE (orig))
1708 temp = convert_memory_address (GET_MODE (orig), temp);
1712 else if (GET_CODE (constant) == LABEL_REF)
1713 return XEXP (force_const_mem (GET_MODE (orig),
1714 copy_rtx_and_substitute (constant,
1719 if (SYMBOL_REF_NEED_ADJUST (orig))
1722 return rethrow_symbol_map (orig,
1723 expand_inline_function_eh_labelmap);
1729 /* We have to make a new copy of this CONST_DOUBLE because don't want
1730 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1731 duplicate of a CONST_DOUBLE we have already seen. */
1732 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1736 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1737 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
1740 return immed_double_const (CONST_DOUBLE_LOW (orig),
1741 CONST_DOUBLE_HIGH (orig), VOIDmode);
1744 /* Make new constant pool entry for a constant
1745 that was in the pool of the inline function. */
1746 if (RTX_INTEGRATED_P (orig))
1751 /* If a single asm insn contains multiple output operands
1752 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1753 We must make sure that the copied insn continues to share it. */
1754 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1756 copy = rtx_alloc (ASM_OPERANDS);
1757 copy->volatil = orig->volatil;
1758 XSTR (copy, 0) = XSTR (orig, 0);
1759 XSTR (copy, 1) = XSTR (orig, 1);
1760 XINT (copy, 2) = XINT (orig, 2);
1761 XVEC (copy, 3) = map->copy_asm_operands_vector;
1762 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1763 XSTR (copy, 5) = XSTR (orig, 5);
1764 XINT (copy, 6) = XINT (orig, 6);
1770 /* This is given special treatment because the first
1771 operand of a CALL is a (MEM ...) which may get
1772 forced into a register for cse. This is undesirable
1773 if function-address cse isn't wanted or if we won't do cse. */
1774 #ifndef NO_FUNCTION_CSE
1775 if (! (optimize && ! flag_no_function_cse))
1777 return gen_rtx_CALL (GET_MODE (orig),
1778 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1779 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
1780 copy_rtx_and_substitute (XEXP (orig, 1), map));
1784 /* Must be ifdefed out for loop unrolling to work. */
1790 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1791 Adjust the setting by the offset of the area we made.
1792 If the nonlocal goto is into the current function,
1793 this will result in unnecessarily bad code, but should work. */
1794 if (SET_DEST (orig) == virtual_stack_vars_rtx
1795 || SET_DEST (orig) == virtual_incoming_args_rtx)
1797 /* In case a translation hasn't occurred already, make one now. */
1800 HOST_WIDE_INT loc_offset;
1802 copy_rtx_and_substitute (SET_DEST (orig), map);
1803 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
1804 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray, REGNO (equiv_reg)).rtx;
1806 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
1807 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1810 (copy_rtx_and_substitute (SET_SRC (orig), map),
1818 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1819 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1821 enum machine_mode const_mode = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1822 rtx constant = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1823 constant = copy_rtx_and_substitute (constant, map);
1824 /* If this was an address of a constant pool entry that itself
1825 had to be placed in the constant pool, it might not be a
1826 valid address. So the recursive call might have turned it
1827 into a register. In that case, it isn't a constant any
1828 more, so return it. This has the potential of changing a
1829 MEM into a REG, but we'll assume that it safe. */
1830 if (! CONSTANT_P (constant))
1832 return validize_mem (force_const_mem (const_mode, constant));
1834 copy = rtx_alloc (MEM);
1835 PUT_MODE (copy, mode);
1836 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
1837 MEM_COPY_ATTRIBUTES (copy, orig);
1838 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
1840 /* If doing function inlining, this MEM might not be const in the
1841 function that it is being inlined into, and thus may not be
1842 unchanging after function inlining. Constant pool references are
1843 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
1845 if (! map->integrating)
1846 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1854 copy = rtx_alloc (code);
1855 PUT_MODE (copy, mode);
1856 copy->in_struct = orig->in_struct;
1857 copy->volatil = orig->volatil;
1858 copy->unchanging = orig->unchanging;
1860 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
1862 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
1864 switch (*format_ptr++)
1867 /* Copy this through the wide int field; that's safest. */
1868 X0WINT (copy, i) = X0WINT (orig, i);
1872 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
1876 /* Change any references to old-insns to point to the
1877 corresponding copied insns. */
1878 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
1882 XVEC (copy, i) = XVEC (orig, i);
1883 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
1885 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1886 for (j = 0; j < XVECLEN (copy, i); j++)
1887 XVECEXP (copy, i, j)
1888 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
1893 XWINT (copy, i) = XWINT (orig, i);
1897 XINT (copy, i) = XINT (orig, i);
1901 XSTR (copy, i) = XSTR (orig, i);
1905 XTREE (copy, i) = XTREE (orig, i);
1913 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
1915 map->orig_asm_operands_vector = XVEC (orig, 3);
1916 map->copy_asm_operands_vector = XVEC (copy, 3);
1917 map->copy_asm_constraints_vector = XVEC (copy, 4);
1923 /* Substitute known constant values into INSN, if that is valid. */
1926 try_constants (insn, map)
1928 struct inline_remap *map;
1933 subst_constants (&PATTERN (insn), insn, map);
1935 /* Apply the changes if they are valid; otherwise discard them. */
1936 apply_change_group ();
1938 /* Show we don't know the value of anything stored or clobbered. */
1939 note_stores (PATTERN (insn), mark_stores);
1940 map->last_pc_value = 0;
1942 map->last_cc0_value = 0;
1945 /* Set up any constant equivalences made in this insn. */
1946 for (i = 0; i < map->num_sets; i++)
1948 if (GET_CODE (map->equiv_sets[i].dest) == REG)
1950 int regno = REGNO (map->equiv_sets[i].dest);
1952 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
1953 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
1954 /* Following clause is a hack to make case work where GNU C++
1955 reassigns a variable to make cse work right. */
1956 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
1958 map->equiv_sets[i].equiv))
1959 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
1960 map->equiv_sets[i].equiv, map->const_age);
1962 else if (map->equiv_sets[i].dest == pc_rtx)
1963 map->last_pc_value = map->equiv_sets[i].equiv;
1965 else if (map->equiv_sets[i].dest == cc0_rtx)
1966 map->last_cc0_value = map->equiv_sets[i].equiv;
1971 /* Substitute known constants for pseudo regs in the contents of LOC,
1972 which are part of INSN.
1973 If INSN is zero, the substitution should always be done (this is used to
1975 These changes are taken out by try_constants if the result is not valid.
1977 Note that we are more concerned with determining when the result of a SET
1978 is a constant, for further propagation, than actually inserting constants
1979 into insns; cse will do the latter task better.
1981 This function is also used to adjust address of items previously addressed
1982 via the virtual stack variable or virtual incoming arguments registers. */
1985 subst_constants (loc, insn, map)
1988 struct inline_remap *map;
1992 register enum rtx_code code;
1993 register const char *format_ptr;
1994 int num_changes = num_validated_changes ();
1996 enum machine_mode op0_mode = MAX_MACHINE_MODE;
1998 code = GET_CODE (x);
2013 validate_change (insn, loc, map->last_cc0_value, 1);
2019 /* The only thing we can do with a USE or CLOBBER is possibly do
2020 some substitutions in a MEM within it. */
2021 if (GET_CODE (XEXP (x, 0)) == MEM)
2022 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2026 /* Substitute for parms and known constants. Don't replace
2027 hard regs used as user variables with constants. */
2029 int regno = REGNO (x);
2030 struct const_equiv_data *p;
2032 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2033 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2034 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2036 && p->age >= map->const_age)
2037 validate_change (insn, loc, p->rtx, 1);
2042 /* SUBREG applied to something other than a reg
2043 should be treated as ordinary, since that must
2044 be a special hack and we don't know how to treat it specially.
2045 Consider for example mulsidi3 in m68k.md.
2046 Ordinary SUBREG of a REG needs this special treatment. */
2047 if (GET_CODE (SUBREG_REG (x)) == REG)
2049 rtx inner = SUBREG_REG (x);
2052 /* We can't call subst_constants on &SUBREG_REG (x) because any
2053 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2054 see what is inside, try to form the new SUBREG and see if that is
2055 valid. We handle two cases: extracting a full word in an
2056 integral mode and extracting the low part. */
2057 subst_constants (&inner, NULL_RTX, map);
2059 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2060 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2061 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2062 new = operand_subword (inner, SUBREG_WORD (x), 0,
2063 GET_MODE (SUBREG_REG (x)));
2065 cancel_changes (num_changes);
2066 if (new == 0 && subreg_lowpart_p (x))
2067 new = gen_lowpart_common (GET_MODE (x), inner);
2070 validate_change (insn, loc, new, 1);
2077 subst_constants (&XEXP (x, 0), insn, map);
2079 /* If a memory address got spoiled, change it back. */
2080 if (insn != 0 && num_validated_changes () != num_changes
2081 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2082 cancel_changes (num_changes);
2087 /* Substitute constants in our source, and in any arguments to a
2088 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2090 rtx *dest_loc = &SET_DEST (x);
2091 rtx dest = *dest_loc;
2094 subst_constants (&SET_SRC (x), insn, map);
2097 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2098 || GET_CODE (*dest_loc) == SUBREG
2099 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2101 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2103 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2104 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2106 dest_loc = &XEXP (*dest_loc, 0);
2109 /* Do substitute in the address of a destination in memory. */
2110 if (GET_CODE (*dest_loc) == MEM)
2111 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2113 /* Check for the case of DEST a SUBREG, both it and the underlying
2114 register are less than one word, and the SUBREG has the wider mode.
2115 In the case, we are really setting the underlying register to the
2116 source converted to the mode of DEST. So indicate that. */
2117 if (GET_CODE (dest) == SUBREG
2118 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2119 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2120 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2121 <= GET_MODE_SIZE (GET_MODE (dest)))
2122 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2124 src = tem, dest = SUBREG_REG (dest);
2126 /* If storing a recognizable value save it for later recording. */
2127 if ((map->num_sets < MAX_RECOG_OPERANDS)
2128 && (CONSTANT_P (src)
2129 || (GET_CODE (src) == REG
2130 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2131 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2132 || (GET_CODE (src) == PLUS
2133 && GET_CODE (XEXP (src, 0)) == REG
2134 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2135 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2136 && CONSTANT_P (XEXP (src, 1)))
2137 || GET_CODE (src) == COMPARE
2142 && (src == pc_rtx || GET_CODE (src) == RETURN
2143 || GET_CODE (src) == LABEL_REF))))
2145 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2146 it will cause us to save the COMPARE with any constants
2147 substituted, which is what we want for later. */
2148 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2149 map->equiv_sets[map->num_sets++].dest = dest;
2158 format_ptr = GET_RTX_FORMAT (code);
2160 /* If the first operand is an expression, save its mode for later. */
2161 if (*format_ptr == 'e')
2162 op0_mode = GET_MODE (XEXP (x, 0));
2164 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2166 switch (*format_ptr++)
2173 subst_constants (&XEXP (x, i), insn, map);
2184 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2187 for (j = 0; j < XVECLEN (x, i); j++)
2188 subst_constants (&XVECEXP (x, i, j), insn, map);
2197 /* If this is a commutative operation, move a constant to the second
2198 operand unless the second operand is already a CONST_INT. */
2199 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2200 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2202 rtx tem = XEXP (x, 0);
2203 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2204 validate_change (insn, &XEXP (x, 1), tem, 1);
2207 /* Simplify the expression in case we put in some constants. */
2208 switch (GET_RTX_CLASS (code))
2211 if (op0_mode == MAX_MACHINE_MODE)
2213 new = simplify_unary_operation (code, GET_MODE (x),
2214 XEXP (x, 0), op0_mode);
2219 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2220 if (op_mode == VOIDmode)
2221 op_mode = GET_MODE (XEXP (x, 1));
2222 new = simplify_relational_operation (code, op_mode,
2223 XEXP (x, 0), XEXP (x, 1));
2224 #ifdef FLOAT_STORE_FLAG_VALUE
2225 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2226 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2227 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2235 new = simplify_binary_operation (code, GET_MODE (x),
2236 XEXP (x, 0), XEXP (x, 1));
2241 if (op0_mode == MAX_MACHINE_MODE)
2243 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2244 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2249 validate_change (insn, loc, new, 1);
2252 /* Show that register modified no longer contain known constants. We are
2253 called from note_stores with parts of the new insn. */
2256 mark_stores (dest, x)
2258 rtx x ATTRIBUTE_UNUSED;
2261 enum machine_mode mode = VOIDmode;
2263 /* DEST is always the innermost thing set, except in the case of
2264 SUBREGs of hard registers. */
2266 if (GET_CODE (dest) == REG)
2267 regno = REGNO (dest), mode = GET_MODE (dest);
2268 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2270 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2271 mode = GET_MODE (SUBREG_REG (dest));
2276 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2277 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2280 /* Ignore virtual stack var or virtual arg register since those
2281 are handled separately. */
2282 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2283 && regno != VIRTUAL_STACK_VARS_REGNUM)
2284 for (i = regno; i <= last_reg; i++)
2285 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2286 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2290 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2291 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2292 that it points to the node itself, thus indicating that the node is its
2293 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2294 the given node is NULL, recursively descend the decl/block tree which
2295 it is the root of, and for each other ..._DECL or BLOCK node contained
2296 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2297 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2298 values to point to themselves. */
2301 set_block_origin_self (stmt)
2304 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2306 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2309 register tree local_decl;
2311 for (local_decl = BLOCK_VARS (stmt);
2312 local_decl != NULL_TREE;
2313 local_decl = TREE_CHAIN (local_decl))
2314 set_decl_origin_self (local_decl); /* Potential recursion. */
2318 register tree subblock;
2320 for (subblock = BLOCK_SUBBLOCKS (stmt);
2321 subblock != NULL_TREE;
2322 subblock = BLOCK_CHAIN (subblock))
2323 set_block_origin_self (subblock); /* Recurse. */
2328 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2329 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2330 node to so that it points to the node itself, thus indicating that the
2331 node represents its own (abstract) origin. Additionally, if the
2332 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2333 the decl/block tree of which the given node is the root of, and for
2334 each other ..._DECL or BLOCK node contained therein whose
2335 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2336 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2337 point to themselves. */
2340 set_decl_origin_self (decl)
2343 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2345 DECL_ABSTRACT_ORIGIN (decl) = decl;
2346 if (TREE_CODE (decl) == FUNCTION_DECL)
2350 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2351 DECL_ABSTRACT_ORIGIN (arg) = arg;
2352 if (DECL_INITIAL (decl) != NULL_TREE
2353 && DECL_INITIAL (decl) != error_mark_node)
2354 set_block_origin_self (DECL_INITIAL (decl));
2359 /* Given a pointer to some BLOCK node, and a boolean value to set the
2360 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2361 the given block, and for all local decls and all local sub-blocks
2362 (recursively) which are contained therein. */
2365 set_block_abstract_flags (stmt, setting)
2367 register int setting;
2369 register tree local_decl;
2370 register tree subblock;
2372 BLOCK_ABSTRACT (stmt) = setting;
2374 for (local_decl = BLOCK_VARS (stmt);
2375 local_decl != NULL_TREE;
2376 local_decl = TREE_CHAIN (local_decl))
2377 set_decl_abstract_flags (local_decl, setting);
2379 for (subblock = BLOCK_SUBBLOCKS (stmt);
2380 subblock != NULL_TREE;
2381 subblock = BLOCK_CHAIN (subblock))
2382 set_block_abstract_flags (subblock, setting);
2385 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2386 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2387 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2388 set the abstract flags for all of the parameters, local vars, local
2389 blocks and sub-blocks (recursively) to the same setting. */
2392 set_decl_abstract_flags (decl, setting)
2394 register int setting;
2396 DECL_ABSTRACT (decl) = setting;
2397 if (TREE_CODE (decl) == FUNCTION_DECL)
2401 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2402 DECL_ABSTRACT (arg) = setting;
2403 if (DECL_INITIAL (decl) != NULL_TREE
2404 && DECL_INITIAL (decl) != error_mark_node)
2405 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2409 /* Output the assembly language code for the function FNDECL
2410 from its DECL_SAVED_INSNS. Used for inline functions that are output
2411 at end of compilation instead of where they came in the source. */
2414 output_inline_function (fndecl)
2417 struct function *curf = current_function;
2418 struct function *f = DECL_SAVED_INSNS (fndecl);
2420 current_function = f;
2421 current_function_decl = fndecl;
2422 clear_emit_caches ();
2424 /* Things we allocate from here on are part of this function, not
2426 temporary_allocation ();
2428 set_new_last_label_num (f->inl_max_label_num);
2430 /* We must have already output DWARF debugging information for the
2431 original (abstract) inline function declaration/definition, so
2432 we want to make sure that the debugging information we generate
2433 for this special instance of the inline function refers back to
2434 the information we already generated. To make sure that happens,
2435 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2436 node (and for all of the local ..._DECL nodes which are its children)
2437 so that they all point to themselves. */
2439 set_decl_origin_self (fndecl);
2441 /* We're not deferring this any longer. */
2442 DECL_DEFER_OUTPUT (fndecl) = 0;
2444 /* We can't inline this anymore. */
2446 DECL_INLINE (fndecl) = 0;
2448 /* Compile this function all the way down to assembly code. */
2449 rest_of_compilation (fndecl);
2451 current_function = curf;
2452 current_function_decl = curf ? curf->decl : 0;