1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
37 #define obstack_chunk_alloc xmalloc
38 #define obstack_chunk_free free
40 extern struct obstack *function_maybepermanent_obstack;
42 extern tree pushdecl ();
43 extern tree poplevel ();
45 /* Similar, but round to the next highest integer that meets the
47 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
49 /* Default max number of insns a function can have and still be inline.
50 This is overridden on RISC machines. */
51 #ifndef INTEGRATE_THRESHOLD
52 #define INTEGRATE_THRESHOLD(DECL) \
53 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
56 /* Save any constant pool constants in an insn. */
57 static void save_constants ();
59 /* Note when parameter registers are the destination of a SET. */
60 static void note_modified_parmregs ();
62 /* Copy an rtx for save_for_inline_copying. */
63 static rtx copy_for_inline ();
65 /* Make copies of MEMs in DECL_RTLs. */
66 static void copy_decl_rtls ();
68 static tree copy_decl_tree ();
70 /* Return the constant equivalent of a given rtx, or 0 if none. */
71 static rtx const_equiv ();
73 static void integrate_parm_decls ();
74 static void integrate_decl_tree ();
76 static void subst_constants ();
77 static rtx fold_out_const_cc0 ();
79 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
80 is safe and reasonable to integrate into other functions.
81 Nonzero means value is a warning message with a single %s
82 for the function's name. */
85 function_cannot_inline_p (fndecl)
89 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
90 int max_insns = INTEGRATE_THRESHOLD (fndecl);
91 register int ninsns = 0;
94 /* No inlines with varargs. `grokdeclarator' gives a warning
95 message about that if `inline' is specified. This code
96 it put in to catch the volunteers. */
97 if ((last && TREE_VALUE (last) != void_type_node)
98 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
99 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
100 "__builtin_va_alist")))
101 return "varargs function cannot be inline";
103 if (current_function_calls_alloca)
104 return "function using alloca cannot be inline";
106 if (current_function_contains_functions)
107 return "function with nested functions cannot be inline";
109 /* This restriction may be eliminated sometime soon. But for now, don't
110 worry about remapping the static chain. */
111 if (current_function_needs_context)
112 return "nested function cannot be inline";
114 /* If its not even close, don't even look. */
115 if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
116 return "function too large to be inline";
119 /* Large stacks are OK now that inlined functions can share them. */
120 /* Don't inline functions with large stack usage,
121 since they can make other recursive functions burn up stack. */
122 if (!TREE_INLINE (fndecl) && get_frame_size () > 100)
123 return "function stack frame for inlining";
127 /* Don't inline functions which do not specify a function prototype and
128 have BLKmode argument or take the address of a parameter. */
129 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
131 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
132 TREE_ADDRESSABLE (parms) = 1;
133 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
134 return "no prototype, and parameter address used; cannot be inline";
138 /* We can't inline functions that return structures
139 the old-fashioned PCC way, copying into a static block. */
140 if (current_function_returns_pcc_struct)
141 return "inline functions not supported for this return value type";
143 /* We can't inline functions that return structures of varying size. */
144 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
145 return "function with varying-size return value cannot be inline";
147 /* Cannot inline a function with a varying size argument. */
148 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
149 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
150 return "function with varying-size parameter cannot be inline";
152 if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns)
154 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
155 insn = NEXT_INSN (insn))
157 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
161 if (ninsns >= max_insns)
162 return "function too large to be inline";
165 /* We cannot inline this function if forced_labels is non-zero. This
166 implies that a label in this function was used as an initializer.
167 Because labels can not be duplicated, all labels in the function
168 will be renamed when it is inlined. However, there is no way to find
169 and fix all variables initialized with addresses of labels in this
170 function, hence inlining is impossible. */
173 return "function with label addresses used in initializers cannot inline";
178 /* Variables used within save_for_inline. */
180 /* Mapping from old pseudo-register to new pseudo-registers.
181 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
182 It is allocated in `save_for_inline' and `expand_inline_function',
183 and deallocated on exit from each of those routines. */
186 /* Mapping from old code-labels to new code-labels.
187 The first element of this map is label_map[min_labelno].
188 It is allocated in `save_for_inline' and `expand_inline_function',
189 and deallocated on exit from each of those routines. */
190 static rtx *label_map;
192 /* Mapping from old insn uid's to copied insns.
193 It is allocated in `save_for_inline' and `expand_inline_function',
194 and deallocated on exit from each of those routines. */
195 static rtx *insn_map;
197 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
198 Zero for a reg that isn't a parm's home.
199 Only reg numbers less than max_parm_reg are mapped here. */
200 static tree *parmdecl_map;
202 /* Keep track of first pseudo-register beyond those that are parms. */
203 static int max_parm_reg;
205 /* When an insn is being copied by copy_for_inline,
206 this is nonzero if we have copied an ASM_OPERANDS.
207 In that case, it is the original input-operand vector. */
208 static rtvec orig_asm_operands_vector;
210 /* When an insn is being copied by copy_for_inline,
211 this is nonzero if we have copied an ASM_OPERANDS.
212 In that case, it is the copied input-operand vector. */
213 static rtvec copy_asm_operands_vector;
215 /* Likewise, this is the copied constraints vector. */
216 static rtvec copy_asm_constraints_vector;
218 /* In save_for_inline, nonzero if past the parm-initialization insns. */
219 static int in_nonparm_insns;
221 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
222 needed to save FNDECL's insns and info for future inline expansion. */
225 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
232 int function_flags, i;
236 /* Compute the values of any flags we must restore when inlining this. */
239 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
240 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
241 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
242 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
243 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
244 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
245 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
246 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
247 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
248 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
250 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
251 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
252 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
254 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
256 parms = TREE_CHAIN (parms), i++)
258 rtx p = DECL_RTL (parms);
260 if (GET_CODE (p) == MEM && copy)
262 /* Copy the rtl so that modifications of the addresses
263 later in compilation won't affect this arg_vector.
264 Virtual register instantiation can screw the address
266 rtx new = copy_rtx (p);
268 /* Don't leave the old copy anywhere in this decl. */
269 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
270 || (GET_CODE (DECL_RTL (parms)) == MEM
271 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
272 && (XEXP (DECL_RTL (parms), 0)
273 == XEXP (DECL_INCOMING_RTL (parms), 0))))
274 DECL_INCOMING_RTL (parms) = new;
275 DECL_RTL (parms) = new;
278 RTVEC_ELT (arg_vector, i) = p;
280 if (GET_CODE (p) == REG)
281 parmdecl_map[REGNO (p)] = parms;
282 /* This flag is cleared later
283 if the function ever modifies the value of the parm. */
284 TREE_READONLY (parms) = 1;
287 /* Assume we start out in the insns that set up the parameters. */
288 in_nonparm_insns = 0;
290 /* The list of DECL_SAVED_INSNS, starts off with a header which
291 contains the following information:
293 the first insn of the function (not including the insns that copy
294 parameters into registers).
295 the first parameter insn of the function,
296 the first label used by that function,
297 the last label used by that function,
298 the highest register number used for parameters,
299 the total number of registers used,
300 the size of the incoming stack area for parameters,
301 the number of bytes popped on return,
303 some flags that are used to restore compiler globals,
304 the value of current_function_outgoing_args_size,
305 the original argument vector,
306 and the original DECL_INITIAL. */
308 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
309 max_parm_reg, max_reg,
310 current_function_args_size,
311 current_function_pops_args,
312 stack_slot_list, function_flags,
313 current_function_outgoing_args_size,
314 arg_vector, (rtx) DECL_INITIAL (fndecl));
317 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
318 things that must be done to make FNDECL expandable as an inline function.
319 HEAD contains the chain of insns to which FNDECL will expand. */
322 finish_inline (fndecl, head)
326 NEXT_INSN (head) = get_first_nonparm_insn ();
327 FIRST_PARM_INSN (head) = get_insns ();
328 DECL_SAVED_INSNS (fndecl) = head;
329 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
330 TREE_INLINE (fndecl) = 1;
333 /* Make the insns and PARM_DECLs of the current function permanent
334 and record other information in DECL_SAVED_INSNS to allow inlining
335 of this function in subsequent calls.
337 This function is called when we are going to immediately compile
338 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
339 modified by the compilation process, so we copy all of them to
340 new storage and consider the new insns to be the insn chain to be
344 save_for_inline_copying (fndecl)
347 rtx first_insn, last_insn, insn;
349 int max_labelno, min_labelno, i, len;
352 rtx first_nonparm_insn;
354 /* Make and emit a return-label if we have not already done so.
355 Do this before recording the bounds on label numbers. */
357 if (return_label == 0)
359 return_label = gen_label_rtx ();
360 emit_label (return_label);
363 /* Get some bounds on the labels and registers used. */
365 max_labelno = max_label_num ();
366 min_labelno = get_first_label_num ();
367 max_reg = max_reg_num ();
369 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
370 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
371 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
372 for the parms, prior to elimination of virtual registers.
373 These values are needed for substituting parms properly. */
375 max_parm_reg = max_parm_reg_num ();
376 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
378 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
380 if (current_function_uses_const_pool)
382 /* Replace any constant pool references with the actual constant. We
383 will put the constants back in the copy made below. */
384 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
385 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
387 save_constants (&PATTERN (insn));
388 if (REG_NOTES (insn))
389 save_constants (®_NOTES (insn));
392 /* Clear out the constant pool so that we can recreate it with the
393 copied constants below. */
394 init_const_rtx_hash_table ();
395 clear_const_double_mem ();
398 max_uid = INSN_UID (head);
400 /* We have now allocated all that needs to be allocated permanently
401 on the rtx obstack. Set our high-water mark, so that we
402 can free the rest of this when the time comes. */
406 /* Copy the chain insns of this function.
407 Install the copied chain as the insns of this function,
408 for continued compilation;
409 the original chain is recorded as the DECL_SAVED_INSNS
410 for inlining future calls. */
412 /* If there are insns that copy parms from the stack into pseudo registers,
413 those insns are not copied. `expand_inline_function' must
414 emit the correct code to handle such things. */
417 if (GET_CODE (insn) != NOTE)
419 first_insn = rtx_alloc (NOTE);
420 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
421 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
422 INSN_UID (first_insn) = INSN_UID (insn);
423 PREV_INSN (first_insn) = NULL;
424 NEXT_INSN (first_insn) = NULL;
425 last_insn = first_insn;
427 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
428 Make these new rtx's now, and install them in regno_reg_rtx, so they
429 will be the official pseudo-reg rtx's for the rest of compilation. */
431 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
433 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
434 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
435 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
436 regno_reg_rtx[i], len);
438 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
439 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
440 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
442 /* Likewise each label rtx must have a unique rtx as its copy. */
444 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
445 label_map -= min_labelno;
447 for (i = min_labelno; i < max_labelno; i++)
448 label_map[i] = gen_label_rtx ();
450 /* Record the mapping of old insns to copied insns. */
452 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
453 bzero (insn_map, max_uid * sizeof (rtx));
455 /* Get the insn which signals the end of parameter setup code. */
456 first_nonparm_insn = get_first_nonparm_insn ();
458 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
459 (the former occurs when a variable has its address taken)
460 since these may be shared and can be changed by virtual
461 register instantiation. DECL_RTL values for our arguments
462 have already been copied by initialize_for_inline. */
463 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
464 if (GET_CODE (regno_reg_rtx[i]) == MEM)
465 XEXP (regno_reg_rtx[i], 0)
466 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
468 /* Copy the tree of subblocks of the function, and the decls in them.
469 We will use the copy for compiling this function, then restore the original
470 subblocks and decls for use when inlining this function.
472 Several parts of the compiler modify BLOCK trees. In particular,
473 instantiate_virtual_regs will instantiate any virtual regs
474 mentioned in the DECL_RTLs of the decls, and loop
475 unrolling will replicate any BLOCK trees inside an unrolled loop.
477 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
478 which we will use for inlining. The rtl might even contain pseudoregs
479 whose space has been freed. */
481 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
483 /* Now copy each DECL_RTL which is a MEM,
484 so it is safe to modify their addresses. */
485 copy_decl_rtls (DECL_INITIAL (fndecl));
487 /* Now copy the chain of insns. Do this twice. The first copy the insn
488 itself and its body. The second time copy of REG_NOTES. This is because
489 a REG_NOTE may have a forward pointer to another insn. */
491 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
493 orig_asm_operands_vector = 0;
495 if (insn == first_nonparm_insn)
496 in_nonparm_insns = 1;
498 switch (GET_CODE (insn))
501 /* No need to keep these. */
502 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
505 copy = rtx_alloc (NOTE);
506 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
507 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
513 copy = rtx_alloc (GET_CODE (insn));
514 PATTERN (copy) = copy_for_inline (PATTERN (insn));
515 INSN_CODE (copy) = -1;
516 LOG_LINKS (copy) = NULL;
517 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
521 copy = label_map[CODE_LABEL_NUMBER (insn)];
522 LABEL_NAME (copy) = LABEL_NAME (insn);
526 copy = rtx_alloc (BARRIER);
532 INSN_UID (copy) = INSN_UID (insn);
533 insn_map[INSN_UID (insn)] = copy;
534 NEXT_INSN (last_insn) = copy;
535 PREV_INSN (copy) = last_insn;
539 /* Now copy the REG_NOTES. */
540 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
541 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
542 && insn_map[INSN_UID(insn)])
543 REG_NOTES (insn_map[INSN_UID (insn)])
544 = copy_for_inline (REG_NOTES (insn));
546 NEXT_INSN (last_insn) = NULL;
548 finish_inline (fndecl, head);
550 set_new_first_and_last_insn (first_insn, last_insn);
553 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
556 copy_decl_tree (block)
559 tree t, vars, subblocks;
561 vars = copy_list (BLOCK_VARS (block));
564 /* Process all subblocks. */
565 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
567 tree copy = copy_decl_tree (t);
568 TREE_CHAIN (copy) = subblocks;
572 t = copy_node (block);
573 BLOCK_VARS (t) = vars;
574 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
578 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
581 copy_decl_rtls (block)
586 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
587 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
588 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
590 /* Process all subblocks. */
591 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
595 /* Make the insns and PARM_DECLs of the current function permanent
596 and record other information in DECL_SAVED_INSNS to allow inlining
597 of this function in subsequent calls.
599 This routine need not copy any insns because we are not going
600 to immediately compile the insns in the insn chain. There
601 are two cases when we would compile the insns for FNDECL:
602 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
603 be output at the end of other compilation, because somebody took
604 its address. In the first case, the insns of FNDECL are copied
605 as it is expanded inline, so FNDECL's saved insns are not
606 modified. In the second case, FNDECL is used for the last time,
607 so modifying the rtl is not a problem.
609 ??? Actually, we do not verify that FNDECL is not inline expanded
610 by other functions which must also be written down at the end
611 of compilation. We could set flag_no_inline to nonzero when
612 the time comes to write down such functions. */
615 save_for_inline_nocopy (fndecl)
621 int max_labelno, min_labelno, i, len;
624 rtx first_nonparm_insn;
627 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
628 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
629 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
630 for the parms, prior to elimination of virtual registers.
631 These values are needed for substituting parms properly. */
633 max_parm_reg = max_parm_reg_num ();
634 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
636 /* Make and emit a return-label if we have not already done so. */
638 if (return_label == 0)
640 return_label = gen_label_rtx ();
641 emit_label (return_label);
644 head = initialize_for_inline (fndecl, get_first_label_num (),
645 max_label_num (), max_reg_num (), 0);
647 /* If there are insns that copy parms from the stack into pseudo registers,
648 those insns are not copied. `expand_inline_function' must
649 emit the correct code to handle such things. */
652 if (GET_CODE (insn) != NOTE)
655 /* Get the insn which signals the end of parameter setup code. */
656 first_nonparm_insn = get_first_nonparm_insn ();
658 /* Now just scan the chain of insns to see what happens to our
659 PARM_DECLs. If a PARM_DECL is used but never modified, we
660 can substitute its rtl directly when expanding inline (and
661 perform constant folding when its incoming value is constant).
662 Otherwise, we have to copy its value into a new register and track
663 the new register's life. */
665 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
667 if (insn == first_nonparm_insn)
668 in_nonparm_insns = 1;
670 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
672 if (current_function_uses_const_pool)
674 /* Replace any constant pool references with the actual constant.
675 We will put the constant back if we need to write the
676 function out after all. */
677 save_constants (&PATTERN (insn));
678 if (REG_NOTES (insn))
679 save_constants (®_NOTES (insn));
682 /* Record what interesting things happen to our parameters. */
683 note_stores (PATTERN (insn), note_modified_parmregs);
687 /* We have now allocated all that needs to be allocated permanently
688 on the rtx obstack. Set our high-water mark, so that we
689 can free the rest of this when the time comes. */
693 finish_inline (fndecl, head);
696 /* Given PX, a pointer into an insn, search for references to the constant
697 pool. Replace each with a CONST that has the mode of the original
698 constant, contains the constant, and has RTX_INTEGRATED_P set.
699 Similarly, constant pool addresses not enclosed in a MEM are replaced
700 with an ADDRESS rtx which also gives the constant, mode, and has
701 RTX_INTEGRATED_P set. */
713 /* If this is a CONST_DOUBLE, don't try to fix things up in
714 CONST_DOUBLE_MEM, because this is an infinite recursion. */
715 if (GET_CODE (x) == CONST_DOUBLE)
717 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
718 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
720 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
721 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
722 RTX_INTEGRATED_P (new) = 1;
724 /* If the MEM was in a different mode than the constant (perhaps we
725 were only looking at the low-order part), surround it with a
726 SUBREG so we can save both modes. */
728 if (GET_MODE (x) != const_mode)
730 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
731 RTX_INTEGRATED_P (new) = 1;
735 save_constants (&XEXP (*px, 0));
737 else if (GET_CODE (x) == SYMBOL_REF
738 && CONSTANT_POOL_ADDRESS_P (x))
740 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
741 save_constants (&XEXP (*px, 0));
742 RTX_INTEGRATED_P (*px) = 1;
747 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
748 int len = GET_RTX_LENGTH (GET_CODE (x));
750 for (i = len-1; i >= 0; i--)
755 for (j = 0; j < XVECLEN (x, i); j++)
756 save_constants (&XVECEXP (x, i, j));
760 if (XEXP (x, i) == 0)
764 /* Hack tail-recursion here. */
768 save_constants (&XEXP (x, i));
775 /* Note whether a parameter is modified or not. */
778 note_modified_parmregs (reg, x)
782 if (GET_CODE (reg) == REG && in_nonparm_insns
783 && REGNO (reg) < max_parm_reg
784 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
785 && parmdecl_map[REGNO (reg)] != 0)
786 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
789 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
790 according to `reg_map' and `label_map'. The original rtl insns
791 will be saved for inlining; this is used to make a copy
792 which is used to finish compiling the inline function itself.
794 If we find a "saved" constant pool entry, one which was replaced with
795 the value of the constant, convert it back to a constant pool entry.
796 Since the pool wasn't touched, this should simply restore the old
799 All other kinds of rtx are copied except those that can never be
800 changed during compilation. */
803 copy_for_inline (orig)
806 register rtx x = orig;
808 register enum rtx_code code;
809 register char *format_ptr;
816 /* These types may be freely shared. */
828 /* We have to make a new CONST_DOUBLE to ensure that we account for
829 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
830 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
834 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
835 return immed_real_const_1 (d, GET_MODE (x));
838 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
842 /* Get constant pool entry for constant in the pool. */
843 if (RTX_INTEGRATED_P (x))
844 return validize_mem (force_const_mem (GET_MODE (x),
845 copy_for_inline (XEXP (x, 0))));
849 /* Get constant pool entry, but access in different mode. */
850 if (RTX_INTEGRATED_P (x))
853 = force_const_mem (GET_MODE (SUBREG_REG (x)),
854 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
856 PUT_MODE (new, GET_MODE (x));
857 return validize_mem (new);
862 /* If not special for constant pool error. Else get constant pool
864 if (! RTX_INTEGRATED_P (x))
867 return XEXP (force_const_mem (GET_MODE (x),
868 copy_for_inline (XEXP (x, 0))), 0);
871 /* If a single asm insn contains multiple output operands
872 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
873 We must make sure that the copied insn continues to share it. */
874 if (orig_asm_operands_vector == XVEC (orig, 3))
876 x = rtx_alloc (ASM_OPERANDS);
877 XSTR (x, 0) = XSTR (orig, 0);
878 XSTR (x, 1) = XSTR (orig, 1);
879 XINT (x, 2) = XINT (orig, 2);
880 XVEC (x, 3) = copy_asm_operands_vector;
881 XVEC (x, 4) = copy_asm_constraints_vector;
882 XSTR (x, 5) = XSTR (orig, 5);
883 XINT (x, 6) = XINT (orig, 6);
889 /* A MEM is usually allowed to be shared if its address is constant
890 or is a constant plus one of the special registers.
892 We do not allow sharing of addresses that are either a special
893 register or the sum of a constant and a special register because
894 it is possible for unshare_all_rtl to copy the address, into memory
895 that won't be saved. Although the MEM can safely be shared, and
896 won't be copied there, the address itself cannot be shared, and may
899 There are also two exceptions with constants: The first is if the
900 constant is a LABEL_REF or the sum of the LABEL_REF
901 and an integer. This case can happen if we have an inline
902 function that supplies a constant operand to the call of another
903 inline function that uses it in a switch statement. In this case,
904 we will be replacing the LABEL_REF, so we have to replace this MEM
907 The second case is if we have a (const (plus (address ..) ...)).
908 In that case we need to put back the address of the constant pool
911 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
912 && GET_CODE (XEXP (x, 0)) != LABEL_REF
913 && ! (GET_CODE (XEXP (x, 0)) == CONST
914 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
915 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
917 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
924 /* Must point to the new insn. */
925 return gen_rtx (LABEL_REF, GET_MODE (orig),
926 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
930 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
931 return reg_map [REGNO (x)];
936 /* If a parm that gets modified lives in a pseudo-reg,
937 clear its TREE_READONLY to prevent certain optimizations. */
939 rtx dest = SET_DEST (x);
941 while (GET_CODE (dest) == STRICT_LOW_PART
942 || GET_CODE (dest) == ZERO_EXTRACT
943 || GET_CODE (dest) == SUBREG)
944 dest = XEXP (dest, 0);
946 if (GET_CODE (dest) == REG
947 && REGNO (dest) < max_parm_reg
948 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
949 && parmdecl_map[REGNO (dest)] != 0
950 /* The insn to load an arg pseudo from a stack slot
951 does not count as modifying it. */
953 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
957 #if 0 /* This is a good idea, but here is the wrong place for it. */
958 /* Arrange that CONST_INTs always appear as the second operand
959 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
960 always appear as the first. */
962 if (GET_CODE (XEXP (x, 0)) == CONST_INT
963 || (XEXP (x, 1) == frame_pointer_rtx
964 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
965 && XEXP (x, 1) == arg_pointer_rtx)))
968 XEXP (x, 0) = XEXP (x, 1);
975 /* Replace this rtx with a copy of itself. */
977 x = rtx_alloc (code);
978 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
979 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
981 /* Now scan the subexpressions recursively.
982 We can store any replaced subexpressions directly into X
983 since we know X is not shared! Any vectors in X
984 must be copied if X was copied. */
986 format_ptr = GET_RTX_FORMAT (code);
988 for (i = 0; i < GET_RTX_LENGTH (code); i++)
990 switch (*format_ptr++)
993 XEXP (x, i) = copy_for_inline (XEXP (x, i));
997 /* Change any references to old-insns to point to the
998 corresponding copied insns. */
999 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1003 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1007 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1008 for (j = 0; j < XVECLEN (x, i); j++)
1010 = copy_for_inline (XVECEXP (x, i, j));
1016 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1018 orig_asm_operands_vector = XVEC (orig, 3);
1019 copy_asm_operands_vector = XVEC (x, 3);
1020 copy_asm_constraints_vector = XVEC (x, 4);
1026 /* Unfortunately, we need a global copy of const_equiv map for communication
1027 with a function called from note_stores. Be *very* careful that this
1028 is used properly in the presence of recursion. */
1030 rtx *global_const_equiv_map;
1032 #define FIXED_BASE_PLUS_P(X) \
1033 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1034 && GET_CODE (XEXP (X, 0)) == REG \
1035 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1036 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1038 /* Integrate the procedure defined by FNDECL. Note that this function
1039 may wind up calling itself. Since the static variables are not
1040 reentrant, we do not assign them until after the possibility
1041 of recursion is eliminated.
1043 If IGNORE is nonzero, do not produce a value.
1044 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1047 (rtx)-1 if we could not substitute the function
1048 0 if we substituted it and it does not produce a value
1049 else an rtx for where the value is stored. */
1052 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1057 rtx structure_value_addr;
1059 tree formal, actual;
1060 rtx header = DECL_SAVED_INSNS (fndecl);
1061 rtx insns = FIRST_FUNCTION_INSN (header);
1062 rtx parm_insns = FIRST_PARM_INSN (header);
1068 int min_labelno = FIRST_LABELNO (header);
1069 int max_labelno = LAST_LABELNO (header);
1071 rtx local_return_label = 0;
1074 struct inline_remap *map;
1076 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1078 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1079 max_regno = MAX_REGNUM (header) + 3;
1080 if (max_regno < FIRST_PSEUDO_REGISTER)
1083 nargs = list_length (DECL_ARGUMENTS (fndecl));
1085 /* We expect PARMS to have the right length; don't crash if not. */
1086 if (list_length (parms) != nargs)
1088 /* Also check that the parms type match. Since the appropriate
1089 conversions or default promotions have already been applied,
1090 the machine modes should match exactly. */
1091 for (formal = DECL_ARGUMENTS (fndecl),
1094 formal = TREE_CHAIN (formal),
1095 actual = TREE_CHAIN (actual))
1097 tree arg = TREE_VALUE (actual);
1098 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1099 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1101 /* If they are block mode, the types should match exactly.
1102 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1103 which could happen if the parameter has incomplete type. */
1104 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1108 /* Make a binding contour to keep inline cleanups called at
1109 outer function-scope level from looking like they are shadowing
1110 parameter declarations. */
1113 /* Make a fresh binding contour that we can easily remove. */
1115 expand_start_bindings (0);
1116 if (GET_CODE (parm_insns) == NOTE
1117 && NOTE_LINE_NUMBER (parm_insns) > 0)
1118 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
1120 /* Expand the function arguments. Do this first so that any
1121 new registers get created before we allocate the maps. */
1123 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1124 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1126 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1128 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1130 /* Actual parameter, converted to the type of the argument within the
1132 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1133 /* Mode of the variable used within the function. */
1134 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1135 /* Where parameter is located in the function. */
1138 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1141 loc = RTVEC_ELT (arg_vector, i);
1143 /* If this is an object passed by invisible reference, we copy the
1144 object into a stack slot and save its address. If this will go
1145 into memory, we do nothing now. Otherwise, we just expand the
1147 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1148 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1150 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
1151 rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1);
1153 store_expr (arg, stack_slot, 0);
1155 arg_vals[i] = XEXP (stack_slot, 0);
1157 else if (GET_CODE (loc) != MEM)
1158 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1162 if (arg_vals[i] != 0
1163 && (! TREE_READONLY (formal)
1164 /* If the parameter is not read-only, copy our argument through
1165 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1166 TARGET in any way. In the inline function, they will likely
1167 be two different pseudos, and `safe_from_p' will make all
1168 sorts of smart assumptions about their not conflicting.
1169 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1170 wrong, so put ARG_VALS[I] into a fresh register. */
1172 && (GET_CODE (arg_vals[i]) == REG
1173 || GET_CODE (arg_vals[i]) == SUBREG
1174 || GET_CODE (arg_vals[i]) == MEM)
1175 && reg_overlap_mentioned_p (arg_vals[i], target))))
1176 arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]);
1179 /* Allocate the structures we use to remap things. */
1181 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1182 map->fndecl = fndecl;
1184 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1185 bzero (map->reg_map, max_regno * sizeof (rtx));
1187 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1188 map->label_map -= min_labelno;
1190 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1191 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1192 map->min_insnno = 0;
1193 map->max_insnno = INSN_UID (header);
1195 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1196 be large enough for all our pseudos. This is the number we are currently
1197 using plus the number in the called routine, plus 15 for each arg,
1198 five to compute the virtual frame pointer, and five for the return value.
1199 This should be enough for most cases. We do not reference entries
1200 outside the range of the map.
1202 ??? These numbers are quite arbitrary and were obtained by
1203 experimentation. At some point, we should try to allocate the
1204 table after all the parameters are set up so we an more accurately
1205 estimate the number of pseudos we will need. */
1207 map->const_equiv_map_size
1208 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1210 map->const_equiv_map
1211 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1212 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1215 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1216 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
1219 /* Record the current insn in case we have to set up pointers to frame
1220 and argument memory blocks. */
1221 map->insns_at_start = get_last_insn ();
1223 /* Update the outgoing argument size to allow for those in the inlined
1225 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1226 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1228 /* If the inline function needs to make PIC references, that means
1229 that this function's PIC offset table must be used. */
1230 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1231 current_function_uses_pic_offset_table = 1;
1233 /* Process each argument. For each, set up things so that the function's
1234 reference to the argument will refer to the argument being passed.
1235 We only replace REG with REG here. Any simplifications are done
1236 via const_equiv_map.
1238 We make two passes: In the first, we deal with parameters that will
1239 be placed into registers, since we need to ensure that the allocated
1240 register number fits in const_equiv_map. Then we store all non-register
1241 parameters into their memory location. */
1243 for (i = 0; i < nargs; i++)
1245 rtx copy = arg_vals[i];
1247 loc = RTVEC_ELT (arg_vector, i);
1249 /* There are three cases, each handled separately. */
1250 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1251 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1253 /* This must be an object passed by invisible reference (it could
1254 also be a variable-sized object, but we forbid inlining functions
1255 with variable-sized arguments). COPY is the address of the
1256 actual value (this computation will cause it to be copied). We
1257 map that address for the register, noting the actual address as
1258 an equivalent in case it can be substituted into the insns. */
1260 if (GET_CODE (copy) != REG)
1262 temp = copy_addr_to_reg (copy);
1263 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1265 map->const_equiv_map[REGNO (temp)] = copy;
1266 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1270 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1272 else if (GET_CODE (loc) == MEM)
1274 /* This is the case of a parameter that lives in memory.
1275 It will live in the block we allocate in the called routine's
1276 frame that simulates the incoming argument area. Do nothing
1277 now; we will call store_expr later. */
1280 else if (GET_CODE (loc) == REG)
1282 /* This is the good case where the parameter is in a register.
1283 If it is read-only and our argument is a constant, set up the
1284 constant equivalence. */
1285 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1287 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1288 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1290 map->const_equiv_map[REGNO (temp)] = copy;
1291 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1295 map->reg_map[REGNO (loc)] = copy;
1300 /* Free any temporaries we made setting up this parameter. */
1304 /* Now do the parameters that will be placed in memory. */
1306 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1307 formal; formal = TREE_CHAIN (formal), i++)
1309 rtx copy = arg_vals[i];
1311 loc = RTVEC_ELT (arg_vector, i);
1313 if (GET_CODE (loc) == MEM
1314 /* Exclude case handled above. */
1315 && ! (GET_CODE (XEXP (loc, 0)) == REG
1316 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1318 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1320 /* Compute the address in the area we reserved and store the
1322 temp = copy_rtx_and_substitute (loc, map);
1323 subst_constants (&temp, NULL_RTX, map);
1324 apply_change_group ();
1325 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1326 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1327 store_expr (arg_trees[i], temp, 0);
1329 /* Free any temporaries we made setting up this parameter. */
1334 /* Deal with the places that the function puts its result.
1335 We are driven by what is placed into DECL_RESULT.
1337 Initially, we assume that we don't have anything special handling for
1338 REG_FUNCTION_RETURN_VALUE_P. */
1340 map->inline_target = 0;
1341 loc = DECL_RTL (DECL_RESULT (fndecl));
1342 if (TYPE_MODE (type) == VOIDmode)
1343 /* There is no return value to worry about. */
1345 else if (GET_CODE (loc) == MEM)
1347 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1350 /* Pass the function the address in which to return a structure value.
1351 Note that a constructor can cause someone to call us with
1352 STRUCTURE_VALUE_ADDR, but the initialization takes place
1353 via the first parameter, rather than the struct return address.
1355 We have two cases: If the address is a simple register indirect,
1356 use the mapping mechanism to point that register to our structure
1357 return address. Otherwise, store the structure return value into
1358 the place that it will be referenced from. */
1360 if (GET_CODE (XEXP (loc, 0)) == REG)
1362 temp = force_reg (Pmode, structure_value_addr);
1363 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1364 if (CONSTANT_P (structure_value_addr)
1365 || (GET_CODE (structure_value_addr) == PLUS
1366 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1367 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1369 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1370 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1375 temp = copy_rtx_and_substitute (loc, map);
1376 subst_constants (&temp, NULL_RTX, map);
1377 apply_change_group ();
1378 emit_move_insn (temp, structure_value_addr);
1382 /* We will ignore the result value, so don't look at its structure.
1383 Note that preparations for an aggregate return value
1384 do need to be made (above) even if it will be ignored. */
1386 else if (GET_CODE (loc) == REG)
1388 /* The function returns an object in a register and we use the return
1389 value. Set up our target for remapping. */
1391 /* Machine mode function was declared to return. */
1392 enum machine_mode departing_mode = TYPE_MODE (type);
1393 /* (Possibly wider) machine mode it actually computes
1394 (for the sake of callers that fail to declare it right). */
1395 enum machine_mode arriving_mode
1396 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1399 /* Don't use MEMs as direct targets because on some machines
1400 substituting a MEM for a REG makes invalid insns.
1401 Let the combiner substitute the MEM if that is valid. */
1402 if (target == 0 || GET_CODE (target) != REG
1403 || GET_MODE (target) != departing_mode)
1404 target = gen_reg_rtx (departing_mode);
1406 /* If function's value was promoted before return,
1407 avoid machine mode mismatch when we substitute INLINE_TARGET.
1408 But TARGET is what we will return to the caller. */
1409 if (arriving_mode != departing_mode)
1410 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1412 reg_to_map = target;
1414 /* Usually, the result value is the machine's return register.
1415 Sometimes it may be a pseudo. Handle both cases. */
1416 if (REG_FUNCTION_VALUE_P (loc))
1417 map->inline_target = reg_to_map;
1419 map->reg_map[REGNO (loc)] = reg_to_map;
1422 /* Make new label equivalences for the labels in the called function. */
1423 for (i = min_labelno; i < max_labelno; i++)
1424 map->label_map[i] = gen_label_rtx ();
1426 /* Perform postincrements before actually calling the function. */
1429 /* Clean up stack so that variables might have smaller offsets. */
1430 do_pending_stack_adjust ();
1432 /* Save a copy of the location of const_equiv_map for mark_stores, called
1434 global_const_equiv_map = map->const_equiv_map;
1436 /* Now copy the insns one by one. Do this in two passes, first the insns and
1437 then their REG_NOTES, just like save_for_inline. */
1439 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1441 for (insn = insns; insn; insn = NEXT_INSN (insn))
1445 map->orig_asm_operands_vector = 0;
1447 switch (GET_CODE (insn))
1450 pattern = PATTERN (insn);
1452 if (GET_CODE (pattern) == USE
1453 && GET_CODE (XEXP (pattern, 0)) == REG
1454 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1455 /* The (USE (REG n)) at return from the function should
1456 be ignored since we are changing (REG n) into
1460 /* Ignore setting a function value that we don't want to use. */
1461 if (map->inline_target == 0
1462 && GET_CODE (pattern) == SET
1463 && GET_CODE (SET_DEST (pattern)) == REG
1464 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1466 if (volatile_refs_p (SET_SRC (pattern)))
1468 /* If we must not delete the source,
1469 load it into a new temporary. */
1470 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1471 SET_DEST (PATTERN (copy))
1472 = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (copy))));
1478 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1479 /* REG_NOTES will be copied later. */
1482 /* If this insn is setting CC0, it may need to look at
1483 the insn that uses CC0 to see what type of insn it is.
1484 In that case, the call to recog via validate_change will
1485 fail. So don't substitute constants here. Instead,
1486 do it when we emit the following insn.
1488 For example, see the pyr.md file. That machine has signed and
1489 unsigned compares. The compare patterns must check the
1490 following branch insn to see which what kind of compare to
1493 If the previous insn set CC0, substitute constants on it as
1495 if (sets_cc0_p (PATTERN (copy)) != 0)
1500 try_constants (cc0_insn, map);
1502 try_constants (copy, map);
1505 try_constants (copy, map);
1510 if (GET_CODE (PATTERN (insn)) == RETURN)
1512 if (local_return_label == 0)
1513 local_return_label = gen_label_rtx ();
1514 pattern = gen_jump (local_return_label);
1517 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1519 copy = emit_jump_insn (pattern);
1523 try_constants (cc0_insn, map);
1526 try_constants (copy, map);
1528 /* If this used to be a conditional jump insn but whose branch
1529 direction is now know, we must do something special. */
1530 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1533 /* The previous insn set cc0 for us. So delete it. */
1534 delete_insn (PREV_INSN (copy));
1537 /* If this is now a no-op, delete it. */
1538 if (map->last_pc_value == pc_rtx)
1544 /* Otherwise, this is unconditional jump so we must put a
1545 BARRIER after it. We could do some dead code elimination
1546 here, but jump.c will do it just as well. */
1552 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1553 copy = emit_call_insn (pattern);
1557 try_constants (cc0_insn, map);
1560 try_constants (copy, map);
1562 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1563 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1564 map->const_equiv_map[i] = 0;
1568 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1569 LABEL_NAME (copy) = LABEL_NAME (insn);
1574 copy = emit_barrier ();
1578 /* It is important to discard function-end and function-beg notes,
1579 so we have only one of each in the current function.
1580 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1581 deleted these in the copy used for continuing compilation,
1582 not the copy used for inlining). */
1583 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1584 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1585 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1586 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1597 RTX_INTEGRATED_P (copy) = 1;
1599 map->insn_map[INSN_UID (insn)] = copy;
1602 /* Now copy the REG_NOTES. */
1603 for (insn = insns; insn; insn = NEXT_INSN (insn))
1604 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1605 && map->insn_map[INSN_UID (insn)])
1606 REG_NOTES (map->insn_map[INSN_UID (insn)])
1607 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1609 if (local_return_label)
1610 emit_label (local_return_label);
1612 /* Make copies of the decls of the symbols in the inline function, so that
1613 the copies of the variables get declared in the current function. Set
1614 up things so that lookup_static_chain knows that to interpret registers
1615 in SAVE_EXPRs for TYPE_SIZEs as local. */
1617 inline_function_decl = fndecl;
1618 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map, 0);
1619 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1620 inline_function_decl = 0;
1622 /* End the scope containing the copied formal parameter variables. */
1624 expand_end_bindings (getdecls (), 1, 1);
1627 emit_line_note (input_filename, lineno);
1629 if (structure_value_addr)
1630 return gen_rtx (MEM, TYPE_MODE (type),
1631 memory_address (TYPE_MODE (type), structure_value_addr));
1635 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1636 push all of those decls and give each one the corresponding home. */
1639 integrate_parm_decls (args, map, arg_vector)
1641 struct inline_remap *map;
1647 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1649 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1652 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1654 /* These args would always appear unused, if not for this. */
1655 TREE_USED (decl) = 1;
1656 /* Prevent warning for shadowing with these. */
1657 DECL_FROM_INLINE (decl) = 1;
1659 /* Fully instantiate the address with the equivalent form so that the
1660 debugging information contains the actual register, instead of the
1661 virtual register. Do this by not passing an insn to
1663 subst_constants (&new_decl_rtl, NULL_RTX, map);
1664 apply_change_group ();
1665 DECL_RTL (decl) = new_decl_rtl;
1669 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1670 current function a tree of contexts isomorphic to the one that is given.
1672 LEVEL indicates how far down into the BLOCK tree is the node we are
1673 currently traversing. It is always zero for the initial call.
1675 MAP, if nonzero, is a pointer to a inline_remap map which indicates how
1676 registers used in the DECL_RTL field should be remapped. If it is zero,
1677 no mapping is necessary.
1679 FUNCTIONBODY indicates whether the top level block tree corresponds to
1680 a function body. This is identical in meaning to the functionbody
1681 argument of poplevel. */
1684 integrate_decl_tree (let, level, map, functionbody)
1687 struct inline_remap *map;
1694 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1696 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1697 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1698 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1699 if (! functionbody && DECL_RTL (t) != 0)
1701 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1702 /* Fully instantiate the address with the equivalent form so that the
1703 debugging information contains the actual register, instead of the
1704 virtual register. Do this by not passing an insn to
1706 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1707 apply_change_group ();
1709 else if (DECL_RTL (t))
1710 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1711 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1712 TREE_STATIC (d) = TREE_STATIC (t);
1713 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1714 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1715 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1716 TREE_READONLY (d) = TREE_READONLY (t);
1717 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1718 /* These args would always appear unused, if not for this. */
1720 /* Prevent warning for shadowing with these. */
1721 DECL_FROM_INLINE (d) = 1;
1725 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1726 integrate_decl_tree (t, level + 1, map, functionbody);
1728 node = poplevel (level > 0, 0, level == 0 && functionbody);
1730 TREE_USED (node) = TREE_USED (let);
1733 /* Create a new copy of an rtx.
1734 Recursively copies the operands of the rtx,
1735 except for those few rtx codes that are sharable.
1737 We always return an rtx that is similar to that incoming rtx, with the
1738 exception of possibly changing a REG to a SUBREG or vice versa. No
1739 rtl is ever emitted.
1741 Handle constants that need to be placed in the constant pool by
1742 calling `force_const_mem'. */
1745 copy_rtx_and_substitute (orig, map)
1747 struct inline_remap *map;
1749 register rtx copy, temp;
1751 register RTX_CODE code;
1752 register enum machine_mode mode;
1753 register char *format_ptr;
1759 code = GET_CODE (orig);
1760 mode = GET_MODE (orig);
1765 /* If the stack pointer register shows up, it must be part of
1766 stack-adjustments (*not* because we eliminated the frame pointer!).
1767 Small hard registers are returned as-is. Pseudo-registers
1768 go through their `reg_map'. */
1769 regno = REGNO (orig);
1770 if (regno <= LAST_VIRTUAL_REGISTER)
1772 /* Some hard registers are also mapped,
1773 but others are not translated. */
1774 if (map->reg_map[regno] != 0)
1775 return map->reg_map[regno];
1777 /* If this is the virtual frame pointer, make space in current
1778 function's stack frame for the stack frame of the inline function.
1780 Copy the address of this area into a pseudo. Map
1781 virtual_stack_vars_rtx to this pseudo and set up a constant
1782 equivalence for it to be the address. This will substitute the
1783 address into insns where it can be substituted and use the new
1784 pseudo where it can't. */
1785 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1788 int size = DECL_FRAME_SIZE (map->fndecl);
1792 loc = assign_stack_temp (BLKmode, size, 1);
1793 loc = XEXP (loc, 0);
1794 #ifdef FRAME_GROWS_DOWNWARD
1795 /* In this case, virtual_stack_vars_rtx points to one byte
1796 higher than the top of the frame area. So compute the offset
1797 to one byte higher than our substitute frame.
1798 Keep the fake frame pointer aligned like a real one. */
1799 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1800 loc = plus_constant (loc, rounded);
1802 map->reg_map[regno] = temp = force_operand (loc, NULL_RTX);
1803 map->const_equiv_map[REGNO (temp)] = loc;
1804 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1806 seq = gen_sequence ();
1808 emit_insn_after (seq, map->insns_at_start);
1811 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1813 /* Do the same for a block to contain any arguments referenced
1816 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1819 loc = assign_stack_temp (BLKmode, size, 1);
1820 loc = XEXP (loc, 0);
1821 map->reg_map[regno] = temp = force_operand (loc, NULL_RTX);
1822 map->const_equiv_map[REGNO (temp)] = loc;
1823 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1825 seq = gen_sequence ();
1827 emit_insn_after (seq, map->insns_at_start);
1830 else if (REG_FUNCTION_VALUE_P (orig))
1832 /* This is a reference to the function return value. If
1833 the function doesn't have a return value, error. If the
1834 mode doesn't agree, make a SUBREG. */
1835 if (map->inline_target == 0)
1836 /* Must be unrolling loops or replicating code if we
1837 reach here, so return the register unchanged. */
1839 else if (mode != GET_MODE (map->inline_target))
1840 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1842 return map->inline_target;
1846 if (map->reg_map[regno] == NULL)
1848 map->reg_map[regno] = gen_reg_rtx (mode);
1849 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1850 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1851 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1852 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1854 return map->reg_map[regno];
1857 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1858 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1859 if (GET_CODE (copy) == SUBREG)
1860 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1861 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1863 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1864 SUBREG_WORD (orig));
1868 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1870 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1871 if (GET_CODE (copy) == SUBREG)
1872 copy = SUBREG_REG (copy);
1873 return gen_rtx (code, VOIDmode, copy);
1876 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1877 = LABEL_PRESERVE_P (orig);
1878 return map->label_map[CODE_LABEL_NUMBER (orig)];
1881 copy = rtx_alloc (LABEL_REF);
1882 PUT_MODE (copy, mode);
1883 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1884 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1893 /* Symbols which represent the address of a label stored in the constant
1894 pool must be modified to point to a constant pool entry for the
1895 remapped label. Otherwise, symbols are returned unchanged. */
1896 if (CONSTANT_POOL_ADDRESS_P (orig))
1898 rtx constant = get_pool_constant (orig);
1899 if (GET_CODE (constant) == LABEL_REF)
1901 copy = rtx_alloc (LABEL_REF);
1902 PUT_MODE (copy, mode);
1904 = map->label_map[CODE_LABEL_NUMBER (XEXP (constant, 0))];
1905 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1906 copy = force_const_mem (Pmode, copy);
1907 return XEXP (copy, 0);
1913 /* We have to make a new copy of this CONST_DOUBLE because don't want
1914 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1915 duplicate of a CONST_DOUBLE we have already seen. */
1916 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1920 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1921 return immed_real_const_1 (d, GET_MODE (orig));
1924 return immed_double_const (CONST_DOUBLE_LOW (orig),
1925 CONST_DOUBLE_HIGH (orig), VOIDmode);
1928 /* Make new constant pool entry for a constant
1929 that was in the pool of the inline function. */
1930 if (RTX_INTEGRATED_P (orig))
1932 /* If this was an address of a constant pool entry that itself
1933 had to be placed in the constant pool, it might not be a
1934 valid address. So the recursive call below might turn it
1935 into a register. In that case, it isn't a constant any
1936 more, so return it. This has the potential of changing a
1937 MEM into a REG, but we'll assume that it safe. */
1938 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
1939 if (! CONSTANT_P (temp))
1941 return validize_mem (force_const_mem (GET_MODE (orig), temp));
1946 /* If from constant pool address, make new constant pool entry and
1947 return its address. */
1948 if (! RTX_INTEGRATED_P (orig))
1951 temp = force_const_mem (GET_MODE (orig),
1952 copy_rtx_and_substitute (XEXP (orig, 0), map));
1955 /* Legitimizing the address here is incorrect.
1957 The only ADDRESS rtx's that can reach here are ones created by
1958 save_constants. Hence the operand of the ADDRESS is always legal
1959 in this position of the instruction, since the original rtx without
1960 the ADDRESS was legal.
1962 The reason we don't legitimize the address here is that on the
1963 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
1964 This code forces the operand of the address to a register, which
1965 fails because we can not take the HIGH part of a register.
1967 Also, change_address may create new registers. These registers
1968 will not have valid reg_map entries. This can cause try_constants()
1969 to fail because assumes that all registers in the rtx have valid
1970 reg_map entries, and it may end up replacing one of these new
1971 registers with junk. */
1973 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1974 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1977 return XEXP (temp, 0);
1980 /* If a single asm insn contains multiple output operands
1981 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1982 We must make sure that the copied insn continues to share it. */
1983 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1985 copy = rtx_alloc (ASM_OPERANDS);
1986 XSTR (copy, 0) = XSTR (orig, 0);
1987 XSTR (copy, 1) = XSTR (orig, 1);
1988 XINT (copy, 2) = XINT (orig, 2);
1989 XVEC (copy, 3) = map->copy_asm_operands_vector;
1990 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1991 XSTR (copy, 5) = XSTR (orig, 5);
1992 XINT (copy, 6) = XINT (orig, 6);
1998 /* This is given special treatment because the first
1999 operand of a CALL is a (MEM ...) which may get
2000 forced into a register for cse. This is undesirable
2001 if function-address cse isn't wanted or if we won't do cse. */
2002 #ifndef NO_FUNCTION_CSE
2003 if (! (optimize && ! flag_no_function_cse))
2005 return gen_rtx (CALL, GET_MODE (orig),
2006 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2007 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2008 copy_rtx_and_substitute (XEXP (orig, 1), map));
2012 /* Must be ifdefed out for loop unrolling to work. */
2018 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2020 If the nonlocal goto is into the current function,
2021 this will result in unnecessarily bad code, but should work. */
2022 if (SET_DEST (orig) == virtual_stack_vars_rtx
2023 || SET_DEST (orig) == virtual_incoming_args_rtx)
2024 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2025 copy_rtx_and_substitute (SET_SRC (orig), map));
2029 copy = rtx_alloc (MEM);
2030 PUT_MODE (copy, mode);
2031 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2032 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2033 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2034 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2038 copy = rtx_alloc (code);
2039 PUT_MODE (copy, mode);
2040 copy->in_struct = orig->in_struct;
2041 copy->volatil = orig->volatil;
2042 copy->unchanging = orig->unchanging;
2044 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2046 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2048 switch (*format_ptr++)
2054 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2058 /* Change any references to old-insns to point to the
2059 corresponding copied insns. */
2060 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2064 XVEC (copy, i) = XVEC (orig, i);
2065 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2067 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2068 for (j = 0; j < XVECLEN (copy, i); j++)
2069 XVECEXP (copy, i, j)
2070 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2075 XWINT (copy, i) = XWINT (orig, i);
2079 XINT (copy, i) = XINT (orig, i);
2083 XSTR (copy, i) = XSTR (orig, i);
2091 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2093 map->orig_asm_operands_vector = XVEC (orig, 3);
2094 map->copy_asm_operands_vector = XVEC (copy, 3);
2095 map->copy_asm_constraints_vector = XVEC (copy, 4);
2101 /* Substitute known constant values into INSN, if that is valid. */
2104 try_constants (insn, map)
2106 struct inline_remap *map;
2111 subst_constants (&PATTERN (insn), insn, map);
2113 /* Apply the changes if they are valid; otherwise discard them. */
2114 apply_change_group ();
2116 /* Show we don't know the value of anything stored or clobbered. */
2117 note_stores (PATTERN (insn), mark_stores);
2118 map->last_pc_value = 0;
2120 map->last_cc0_value = 0;
2123 /* Set up any constant equivalences made in this insn. */
2124 for (i = 0; i < map->num_sets; i++)
2126 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2128 int regno = REGNO (map->equiv_sets[i].dest);
2130 if (map->const_equiv_map[regno] == 0
2131 /* Following clause is a hack to make case work where GNU C++
2132 reassigns a variable to make cse work right. */
2133 || ! rtx_equal_p (map->const_equiv_map[regno],
2134 map->equiv_sets[i].equiv))
2136 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2137 map->const_age_map[regno] = map->const_age;
2140 else if (map->equiv_sets[i].dest == pc_rtx)
2141 map->last_pc_value = map->equiv_sets[i].equiv;
2143 else if (map->equiv_sets[i].dest == cc0_rtx)
2144 map->last_cc0_value = map->equiv_sets[i].equiv;
2149 /* Substitute known constants for pseudo regs in the contents of LOC,
2150 which are part of INSN.
2151 If INSN is zero, the substitution should always be done (this is used to
2153 These changes are taken out by try_constants if the result is not valid.
2155 Note that we are more concerned with determining when the result of a SET
2156 is a constant, for further propagation, than actually inserting constants
2157 into insns; cse will do the latter task better.
2159 This function is also used to adjust address of items previously addressed
2160 via the virtual stack variable or virtual incoming arguments registers. */
2163 subst_constants (loc, insn, map)
2166 struct inline_remap *map;
2170 register enum rtx_code code;
2171 register char *format_ptr;
2172 int num_changes = num_validated_changes ();
2174 enum machine_mode op0_mode;
2176 code = GET_CODE (x);
2191 validate_change (insn, loc, map->last_cc0_value, 1);
2197 /* The only thing we can do with a USE or CLOBBER is possibly do
2198 some substitutions in a MEM within it. */
2199 if (GET_CODE (XEXP (x, 0)) == MEM)
2200 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2204 /* Substitute for parms and known constants. Don't replace
2205 hard regs used as user variables with constants. */
2207 int regno = REGNO (x);
2209 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2210 && regno < map->const_equiv_map_size
2211 && map->const_equiv_map[regno] != 0
2212 && map->const_age_map[regno] >= map->const_age)
2213 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2218 /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify
2221 rtx inner = SUBREG_REG (x);
2224 /* We can't call subst_constants on &SUBREG_REG (x) because any
2225 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2226 see what is inside, try to form the new SUBREG and see if that is
2227 valid. We handle two cases: extracting a full word in an
2228 integral mode and extracting the low part. */
2229 subst_constants (&inner, NULL_RTX, map);
2231 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2232 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2233 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2234 new = operand_subword (inner, SUBREG_WORD (x), 0,
2235 GET_MODE (SUBREG_REG (x)));
2237 if (new == 0 && subreg_lowpart_p (x))
2238 new = gen_lowpart_common (GET_MODE (x), inner);
2241 validate_change (insn, loc, new, 1);
2247 subst_constants (&XEXP (x, 0), insn, map);
2249 /* If a memory address got spoiled, change it back. */
2250 if (insn != 0 && num_validated_changes () != num_changes
2251 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2252 cancel_changes (num_changes);
2257 /* Substitute constants in our source, and in any arguments to a
2258 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2260 rtx *dest_loc = &SET_DEST (x);
2261 rtx dest = *dest_loc;
2264 subst_constants (&SET_SRC (x), insn, map);
2267 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2268 || GET_CODE (*dest_loc) == SIGN_EXTRACT
2269 || GET_CODE (*dest_loc) == SUBREG
2270 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2272 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2274 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2275 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2277 dest_loc = &XEXP (*dest_loc, 0);
2280 /* Check for the case of DEST a SUBREG, both it and the underlying
2281 register are less than one word, and the SUBREG has the wider mode.
2282 In the case, we are really setting the underlying register to the
2283 source converted to the mode of DEST. So indicate that. */
2284 if (GET_CODE (dest) == SUBREG
2285 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2286 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2287 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2288 <= GET_MODE_SIZE (GET_MODE (dest)))
2289 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2290 src = tem, dest = SUBREG_REG (dest);
2292 /* If storing a recognizable value save it for later recording. */
2293 if ((map->num_sets < MAX_RECOG_OPERANDS)
2294 && (CONSTANT_P (src)
2295 || (GET_CODE (src) == PLUS
2296 && GET_CODE (XEXP (src, 0)) == REG
2297 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2298 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2299 && CONSTANT_P (XEXP (src, 1)))
2300 || GET_CODE (src) == COMPARE
2305 && (src == pc_rtx || GET_CODE (src) == RETURN
2306 || GET_CODE (src) == LABEL_REF))))
2308 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2309 it will cause us to save the COMPARE with any constants
2310 substituted, which is what we want for later. */
2311 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2312 map->equiv_sets[map->num_sets++].dest = dest;
2319 format_ptr = GET_RTX_FORMAT (code);
2321 /* If the first operand is an expression, save its mode for later. */
2322 if (*format_ptr == 'e')
2323 op0_mode = GET_MODE (XEXP (x, 0));
2325 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2327 switch (*format_ptr++)
2334 subst_constants (&XEXP (x, i), insn, map);
2344 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2347 for (j = 0; j < XVECLEN (x, i); j++)
2348 subst_constants (&XVECEXP (x, i, j), insn, map);
2357 /* If this is a commutative operation, move a constant to the second
2358 operand unless the second operand is already a CONST_INT. */
2359 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2360 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2362 rtx tem = XEXP (x, 0);
2363 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2364 validate_change (insn, &XEXP (x, 1), tem, 1);
2367 /* Simplify the expression in case we put in some constants. */
2368 switch (GET_RTX_CLASS (code))
2371 new = simplify_unary_operation (code, GET_MODE (x),
2372 XEXP (x, 0), op0_mode);
2377 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2378 if (op_mode == VOIDmode)
2379 op_mode = GET_MODE (XEXP (x, 1));
2380 new = simplify_relational_operation (code, op_mode,
2381 XEXP (x, 0), XEXP (x, 1));
2382 #ifdef FLOAT_STORE_FLAG_VALUE
2383 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2384 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2385 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2392 new = simplify_binary_operation (code, GET_MODE (x),
2393 XEXP (x, 0), XEXP (x, 1));
2398 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2399 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2404 validate_change (insn, loc, new, 1);
2407 /* Show that register modified no longer contain known constants. We are
2408 called from note_stores with parts of the new insn. */
2411 mark_stores (dest, x)
2415 if (GET_CODE (dest) == SUBREG)
2416 dest = SUBREG_REG (dest);
2418 if (GET_CODE (dest) == REG)
2419 global_const_equiv_map[REGNO (dest)] = 0;
2422 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2423 pointed to by PX, they represent constants in the constant pool.
2424 Replace these with a new memory reference obtained from force_const_mem.
2425 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2426 address of a constant pool entry. Replace them with the address of
2427 a new constant pool entry obtained from force_const_mem. */
2430 restore_constants (px)
2440 if (GET_CODE (x) == CONST_DOUBLE)
2442 /* We have to make a new CONST_DOUBLE to ensure that we account for
2443 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2444 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2448 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2449 *px = immed_real_const_1 (d, GET_MODE (x));
2452 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2456 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2458 restore_constants (&XEXP (x, 0));
2459 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2461 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2463 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2464 rtx new = XEXP (SUBREG_REG (x), 0);
2466 restore_constants (&new);
2467 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2468 PUT_MODE (new, GET_MODE (x));
2469 *px = validize_mem (new);
2471 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2473 restore_constants (&XEXP (x, 0));
2474 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2478 fmt = GET_RTX_FORMAT (GET_CODE (x));
2479 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2484 for (j = 0; j < XVECLEN (x, i); j++)
2485 restore_constants (&XVECEXP (x, i, j));
2489 restore_constants (&XEXP (x, i));
2496 /* Output the assembly language code for the function FNDECL
2497 from its DECL_SAVED_INSNS. Used for inline functions that are output
2498 at end of compilation instead of where they came in the source. */
2501 output_inline_function (fndecl)
2504 rtx head = DECL_SAVED_INSNS (fndecl);
2507 temporary_allocation ();
2509 current_function_decl = fndecl;
2511 /* This call is only used to initialize global variables. */
2512 init_function_start (fndecl, "lossage", 1);
2514 /* Redo parameter determinations in case the FUNCTION_...
2515 macros took machine-specific actions that need to be redone. */
2516 assign_parms (fndecl, 1);
2518 /* Set stack frame size. */
2519 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2521 restore_reg_data (FIRST_PARM_INSN (head));
2523 stack_slot_list = STACK_SLOT_LIST (head);
2525 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2526 current_function_calls_alloca = 1;
2528 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2529 current_function_calls_setjmp = 1;
2531 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2532 current_function_calls_longjmp = 1;
2534 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2535 current_function_returns_struct = 1;
2537 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2538 current_function_returns_pcc_struct = 1;
2540 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2541 current_function_needs_context = 1;
2543 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2544 current_function_has_nonlocal_label = 1;
2546 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2547 current_function_returns_pointer = 1;
2549 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2550 current_function_uses_const_pool = 1;
2552 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2553 current_function_uses_pic_offset_table = 1;
2555 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2556 current_function_pops_args = POPS_ARGS (head);
2558 /* There is no need to output a return label again. */
2561 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2563 /* Find last insn and rebuild the constant pool. */
2564 for (last = FIRST_PARM_INSN (head);
2565 NEXT_INSN (last); last = NEXT_INSN (last))
2567 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2569 restore_constants (&PATTERN (last));
2570 restore_constants (®_NOTES (last));
2574 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2575 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2577 /* Compile this function all the way down to assembly code. */
2578 rest_of_compilation (fndecl);
2580 current_function_decl = 0;
2582 permanent_allocation ();