1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
37 #define obstack_chunk_alloc xmalloc
38 #define obstack_chunk_free free
39 extern int xmalloc ();
42 extern struct obstack *function_maybepermanent_obstack;
44 extern tree pushdecl ();
45 extern tree poplevel ();
47 /* Similar, but round to the next highest integer that meets the
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 #define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 /* Save any constant pool constants in an insn. */
59 static void save_constants ();
61 /* Note when parameter registers are the destination of a SET. */
62 static void note_modified_parmregs ();
64 /* Copy an rtx for save_for_inline_copying. */
65 static rtx copy_for_inline ();
67 /* Make copies of MEMs in DECL_RTLs. */
68 static void copy_decl_rtls ();
70 static tree copy_decl_tree ();
72 /* Return the constant equivalent of a given rtx, or 0 if none. */
73 static rtx const_equiv ();
75 static void integrate_parm_decls ();
76 static void integrate_decl_tree ();
78 static void subst_constants ();
79 static rtx fold_out_const_cc0 ();
81 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
82 is safe and reasonable to integrate into other functions.
83 Nonzero means value is a warning message with a single %s
84 for the function's name. */
87 function_cannot_inline_p (fndecl)
91 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
92 int max_insns = INTEGRATE_THRESHOLD (fndecl);
93 register int ninsns = 0;
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last && TREE_VALUE (last) != void_type_node)
100 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
101 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
102 "__builtin_va_alist")))
103 return "varargs function cannot be inline";
105 if (current_function_calls_alloca)
106 return "function using alloca cannot be inline";
108 if (current_function_contains_functions)
109 return "function with nested functions cannot be inline";
111 /* This restriction may be eliminated sometime soon. But for now, don't
112 worry about remapping the static chain. */
113 if (current_function_needs_context)
114 return "nested function cannot be inline";
116 /* If its not even close, don't even look. */
117 if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
118 return "function too large to be inline";
121 /* Large stacks are OK now that inlined functions can share them. */
122 /* Don't inline functions with large stack usage,
123 since they can make other recursive functions burn up stack. */
124 if (!TREE_INLINE (fndecl) && get_frame_size () > 100)
125 return "function stack frame for inlining";
129 /* Don't inline functions which do not specify a function prototype and
130 have BLKmode argument or take the address of a parameter. */
131 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
133 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
134 TREE_ADDRESSABLE (parms) = 1;
135 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
136 return "no prototype, and parameter address used; cannot be inline";
140 /* We can't inline functions that return structures
141 the old-fashioned PCC way, copying into a static block. */
142 if (current_function_returns_pcc_struct)
143 return "inline functions not supported for this return value type";
145 /* We can't inline functions that return structures of varying size. */
146 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
147 return "function with varying-size return value cannot be inline";
149 /* Cannot inline a function with a varying size argument. */
150 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
151 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
152 return "function with varying-size parameter cannot be inline";
154 if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns)
156 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
157 insn = NEXT_INSN (insn))
159 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
163 if (ninsns >= max_insns)
164 return "function too large to be inline";
167 /* We cannot inline this function if forced_labels is non-zero. This
168 implies that a label in this function was used as an initializer.
169 Because labels can not be duplicated, all labels in the function
170 will be renamed when it is inlined. However, there is no way to find
171 and fix all variables initialized with addresses of labels in this
172 function, hence inlining is impossible. */
175 return "function with label addresses used in initializers cannot inline";
180 /* Variables used within save_for_inline. */
182 /* Mapping from old pseudo-register to new pseudo-registers.
183 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
184 It is allocated in `save_for_inline' and `expand_inline_function',
185 and deallocated on exit from each of those routines. */
188 /* Mapping from old code-labels to new code-labels.
189 The first element of this map is label_map[min_labelno].
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192 static rtx *label_map;
194 /* Mapping from old insn uid's to copied insns.
195 It is allocated in `save_for_inline' and `expand_inline_function',
196 and deallocated on exit from each of those routines. */
197 static rtx *insn_map;
199 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
200 Zero for a reg that isn't a parm's home.
201 Only reg numbers less than max_parm_reg are mapped here. */
202 static tree *parmdecl_map;
204 /* Keep track of first pseudo-register beyond those that are parms. */
205 static int max_parm_reg;
207 /* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the original input-operand vector. */
210 static rtvec orig_asm_operands_vector;
212 /* When an insn is being copied by copy_for_inline,
213 this is nonzero if we have copied an ASM_OPERANDS.
214 In that case, it is the copied input-operand vector. */
215 static rtvec copy_asm_operands_vector;
217 /* Likewise, this is the copied constraints vector. */
218 static rtvec copy_asm_constraints_vector;
220 /* In save_for_inline, nonzero if past the parm-initialization insns. */
221 static int in_nonparm_insns;
223 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
224 needed to save FNDECL's insns and info for future inline expansion. */
227 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
234 int function_flags, i;
238 /* Compute the values of any flags we must restore when inlining this. */
241 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
242 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
243 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
244 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
245 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
246 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
247 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
248 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
249 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
250 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
252 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
253 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
254 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
256 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
258 parms = TREE_CHAIN (parms), i++)
260 rtx p = DECL_RTL (parms);
262 if (GET_CODE (p) == MEM && copy)
263 /* Copy the rtl so that modifications of the address
264 later in compilation won't affect this arg_vector.
265 Virtual register instantiation can screw the address
267 DECL_RTL (parms) = copy_rtx (p);
269 RTVEC_ELT (arg_vector, i) = p;
271 if (GET_CODE (p) == REG)
272 parmdecl_map[REGNO (p)] = parms;
273 TREE_READONLY (parms) = 1;
276 /* Assume we start out in the insns that set up the parameters. */
277 in_nonparm_insns = 0;
279 /* The list of DECL_SAVED_INSNS, starts off with a header which
280 contains the following information:
282 the first insn of the function (not including the insns that copy
283 parameters into registers).
284 the first parameter insn of the function,
285 the first label used by that function,
286 the last label used by that function,
287 the highest register number used for parameters,
288 the total number of registers used,
289 the size of the incoming stack area for parameters,
290 the number of bytes popped on return,
292 some flags that are used to restore compiler globals,
293 the value of current_function_outgoing_args_size,
294 the original argument vector,
295 and the original DECL_INITIAL. */
297 return gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
298 max_parm_reg, max_reg,
299 current_function_args_size,
300 current_function_pops_args,
301 stack_slot_list, function_flags,
302 current_function_outgoing_args_size,
303 arg_vector, (rtx) DECL_INITIAL (fndecl));
306 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
307 things that must be done to make FNDECL expandable as an inline function.
308 HEAD contains the chain of insns to which FNDECL will expand. */
311 finish_inline (fndecl, head)
315 NEXT_INSN (head) = get_first_nonparm_insn ();
316 FIRST_PARM_INSN (head) = get_insns ();
317 DECL_SAVED_INSNS (fndecl) = head;
318 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
319 TREE_INLINE (fndecl) = 1;
322 /* Make the insns and PARM_DECLs of the current function permanent
323 and record other information in DECL_SAVED_INSNS to allow inlining
324 of this function in subsequent calls.
326 This function is called when we are going to immediately compile
327 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
328 modified by the compilation process, so we copy all of them to
329 new storage and consider the new insns to be the insn chain to be
333 save_for_inline_copying (fndecl)
336 rtx first_insn, last_insn, insn;
338 int max_labelno, min_labelno, i, len;
341 rtx first_nonparm_insn;
343 /* Make and emit a return-label if we have not already done so.
344 Do this before recording the bounds on label numbers. */
346 if (return_label == 0)
348 return_label = gen_label_rtx ();
349 emit_label (return_label);
352 /* Get some bounds on the labels and registers used. */
354 max_labelno = max_label_num ();
355 min_labelno = get_first_label_num ();
356 max_reg = max_reg_num ();
358 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
359 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
360 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
361 for the parms, prior to elimination of virtual registers.
362 These values are needed for substituting parms properly. */
364 max_parm_reg = max_parm_reg_num ();
365 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
367 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
369 if (current_function_uses_const_pool)
371 /* Replace any constant pool references with the actual constant. We
372 will put the constants back in the copy made below. */
373 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
374 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
376 save_constants (&PATTERN (insn));
377 if (REG_NOTES (insn))
378 save_constants (®_NOTES (insn));
381 /* Clear out the constant pool so that we can recreate it with the
382 copied constants below. */
383 init_const_rtx_hash_table ();
384 clear_const_double_mem ();
387 max_uid = INSN_UID (head);
389 /* We have now allocated all that needs to be allocated permanently
390 on the rtx obstack. Set our high-water mark, so that we
391 can free the rest of this when the time comes. */
395 /* Copy the chain insns of this function.
396 Install the copied chain as the insns of this function,
397 for continued compilation;
398 the original chain is recorded as the DECL_SAVED_INSNS
399 for inlining future calls. */
401 /* If there are insns that copy parms from the stack into pseudo registers,
402 those insns are not copied. `expand_inline_function' must
403 emit the correct code to handle such things. */
406 if (GET_CODE (insn) != NOTE)
408 first_insn = rtx_alloc (NOTE);
409 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
410 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
411 INSN_UID (first_insn) = INSN_UID (insn);
412 PREV_INSN (first_insn) = NULL;
413 NEXT_INSN (first_insn) = NULL;
414 last_insn = first_insn;
416 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
417 Make these new rtx's now, and install them in regno_reg_rtx, so they
418 will be the official pseudo-reg rtx's for the rest of compilation. */
420 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
422 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
423 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
424 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
425 regno_reg_rtx[i], len);
427 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
428 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
429 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
431 /* Likewise each label rtx must have a unique rtx as its copy. */
433 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
434 label_map -= min_labelno;
436 for (i = min_labelno; i < max_labelno; i++)
437 label_map[i] = gen_label_rtx ();
439 /* Record the mapping of old insns to copied insns. */
441 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
442 bzero (insn_map, max_uid * sizeof (rtx));
444 /* Get the insn which signals the end of parameter setup code. */
445 first_nonparm_insn = get_first_nonparm_insn ();
447 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
448 (the former occurs when a variable has its address taken)
449 since these may be shared and can be changed by virtual
450 register instantiation. DECL_RTL values for our arguments
451 have already been copied by initialize_for_inline. */
452 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
453 if (GET_CODE (regno_reg_rtx[i]) == MEM)
454 XEXP (regno_reg_rtx[i], 0)
455 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
457 /* Copy the tree of subblocks of the function, and the decls in them.
458 We will use the copy for compiling this function, then restore the original
459 subblocks and decls for use when inlining this function.
461 Several parts of the compiler modify BLOCK trees. In particular,
462 instantiate_virtual_regs will instantiate any virtual regs
463 mentioned in the DECL_RTLs of the decls, and loop
464 unrolling will replicate any BLOCK trees inside an unrolled loop.
466 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
467 which we will use for inlining. The rtl might even contain pseudoregs
468 whose space has been freed. */
470 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
472 /* Now copy each DECL_RTL which is a MEM,
473 so it is safe to modify their addresses. */
474 copy_decl_rtls (DECL_INITIAL (fndecl));
476 /* Now copy the chain of insns. Do this twice. The first copy the insn
477 itself and its body. The second time copy of REG_NOTES. This is because
478 a REG_NOTE may have a forward pointer to another insn. */
480 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
482 orig_asm_operands_vector = 0;
484 if (insn == first_nonparm_insn)
485 in_nonparm_insns = 1;
487 switch (GET_CODE (insn))
490 /* No need to keep these. */
491 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
494 copy = rtx_alloc (NOTE);
495 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
496 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
502 copy = rtx_alloc (GET_CODE (insn));
503 PATTERN (copy) = copy_for_inline (PATTERN (insn));
504 INSN_CODE (copy) = -1;
505 LOG_LINKS (copy) = NULL;
506 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
510 copy = label_map[CODE_LABEL_NUMBER (insn)];
514 copy = rtx_alloc (BARRIER);
520 INSN_UID (copy) = INSN_UID (insn);
521 insn_map[INSN_UID (insn)] = copy;
522 NEXT_INSN (last_insn) = copy;
523 PREV_INSN (copy) = last_insn;
527 /* Now copy the REG_NOTES. */
528 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
529 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
530 && insn_map[INSN_UID(insn)])
531 REG_NOTES (insn_map[INSN_UID (insn)])
532 = copy_for_inline (REG_NOTES (insn));
534 NEXT_INSN (last_insn) = NULL;
536 finish_inline (fndecl, head);
538 set_new_first_and_last_insn (first_insn, last_insn);
541 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
544 copy_decl_tree (block)
547 tree t, vars, subblocks;
549 vars = copy_list (BLOCK_VARS (block));
552 /* Process all subblocks. */
553 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
555 tree copy = copy_decl_tree (t);
556 TREE_CHAIN (copy) = subblocks;
560 t = copy_node (block);
561 BLOCK_VARS (t) = vars;
562 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
566 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
569 copy_decl_rtls (block)
574 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
575 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
576 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
578 /* Process all subblocks. */
579 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
583 /* Make the insns and PARM_DECLs of the current function permanent
584 and record other information in DECL_SAVED_INSNS to allow inlining
585 of this function in subsequent calls.
587 This routine need not copy any insns because we are not going
588 to immediately compile the insns in the insn chain. There
589 are two cases when we would compile the insns for FNDECL:
590 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
591 be output at the end of other compilation, because somebody took
592 its address. In the first case, the insns of FNDECL are copied
593 as it is expanded inline, so FNDECL's saved insns are not
594 modified. In the second case, FNDECL is used for the last time,
595 so modifying the rtl is not a problem.
597 ??? Actually, we do not verify that FNDECL is not inline expanded
598 by other functions which must also be written down at the end
599 of compilation. We could set flag_no_inline to nonzero when
600 the time comes to write down such functions. */
603 save_for_inline_nocopy (fndecl)
609 int max_labelno, min_labelno, i, len;
612 rtx first_nonparm_insn;
615 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
616 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
617 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
618 for the parms, prior to elimination of virtual registers.
619 These values are needed for substituting parms properly. */
621 max_parm_reg = max_parm_reg_num ();
622 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
624 /* Make and emit a return-label if we have not already done so. */
626 if (return_label == 0)
628 return_label = gen_label_rtx ();
629 emit_label (return_label);
632 head = initialize_for_inline (fndecl, get_first_label_num (),
633 max_label_num (), max_reg_num (), 0);
635 /* If there are insns that copy parms from the stack into pseudo registers,
636 those insns are not copied. `expand_inline_function' must
637 emit the correct code to handle such things. */
640 if (GET_CODE (insn) != NOTE)
643 /* Get the insn which signals the end of parameter setup code. */
644 first_nonparm_insn = get_first_nonparm_insn ();
646 /* Now just scan the chain of insns to see what happens to our
647 PARM_DECLs. If a PARM_DECL is used but never modified, we
648 can substitute its rtl directly when expanding inline (and
649 perform constant folding when its incoming value is constant).
650 Otherwise, we have to copy its value into a new register and track
651 the new register's life. */
653 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
655 if (insn == first_nonparm_insn)
656 in_nonparm_insns = 1;
658 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
660 if (current_function_uses_const_pool)
662 /* Replace any constant pool references with the actual constant.
663 We will put the constant back if we need to write the
664 function out after all. */
665 save_constants (&PATTERN (insn));
666 if (REG_NOTES (insn))
667 save_constants (®_NOTES (insn));
670 /* Record what interesting things happen to our parameters. */
671 note_stores (PATTERN (insn), note_modified_parmregs);
675 /* We have now allocated all that needs to be allocated permanently
676 on the rtx obstack. Set our high-water mark, so that we
677 can free the rest of this when the time comes. */
681 finish_inline (fndecl, head);
684 /* Given PX, a pointer into an insn, search for references to the constant
685 pool. Replace each with a CONST that has the mode of the original
686 constant, contains the constant, and has RTX_INTEGRATED_P set.
687 Similarly, constant pool addresses not enclosed in a MEM are replaced
688 with an ADDRESS rtx which also gives the constant, mode, and has
689 RTX_INTEGRATED_P set. */
701 /* If this is a CONST_DOUBLE, don't try to fix things up in
702 CONST_DOUBLE_MEM, because this is an infinite recursion. */
703 if (GET_CODE (x) == CONST_DOUBLE)
705 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
706 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
708 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
709 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
710 RTX_INTEGRATED_P (new) = 1;
712 /* If the MEM was in a different mode than the constant (perhaps we
713 were only looking at the low-order part), surround it with a
714 SUBREG so we can save both modes. */
716 if (GET_MODE (x) != const_mode)
718 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
719 RTX_INTEGRATED_P (new) = 1;
723 save_constants (&XEXP (*px, 0));
725 else if (GET_CODE (x) == SYMBOL_REF
726 && CONSTANT_POOL_ADDRESS_P (x))
728 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
729 save_constants (&XEXP (*px, 0));
730 RTX_INTEGRATED_P (*px) = 1;
735 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
736 int len = GET_RTX_LENGTH (GET_CODE (x));
738 for (i = len-1; i >= 0; i--)
743 for (j = 0; j < XVECLEN (x, i); j++)
744 save_constants (&XVECEXP (x, i, j));
748 if (XEXP (x, i) == 0)
752 /* Hack tail-recursion here. */
756 save_constants (&XEXP (x, i));
763 /* Note whether a parameter is modified or not. */
766 note_modified_parmregs (reg, x)
770 if (GET_CODE (reg) == REG && in_nonparm_insns
771 && REGNO (reg) < max_parm_reg
772 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
773 && parmdecl_map[REGNO (reg)] != 0)
774 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
777 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
778 according to `reg_map' and `label_map'. The original rtl insns
779 will be saved for inlining; this is used to make a copy
780 which is used to finish compiling the inline function itself.
782 If we find a "saved" constant pool entry, one which was replaced with
783 the value of the constant, convert it back to a constant pool entry.
784 Since the pool wasn't touched, this should simply restore the old
787 All other kinds of rtx are copied except those that can never be
788 changed during compilation. */
791 copy_for_inline (orig)
794 register rtx x = orig;
796 register enum rtx_code code;
797 register char *format_ptr;
804 /* These types may be freely shared. */
816 /* We have to make a new CONST_DOUBLE to ensure that we account for
817 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
818 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
822 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
823 return immed_real_const_1 (d, GET_MODE (x));
826 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
830 /* Get constant pool entry for constant in the pool. */
831 if (RTX_INTEGRATED_P (x))
832 return validize_mem (force_const_mem (GET_MODE (x),
833 copy_for_inline (XEXP (x, 0))));
837 /* Get constant pool entry, but access in different mode. */
838 if (RTX_INTEGRATED_P (x))
841 = force_const_mem (GET_MODE (SUBREG_REG (x)),
842 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
844 PUT_MODE (new, GET_MODE (x));
845 return validize_mem (new);
850 /* If not special for constant pool error. Else get constant pool
852 if (! RTX_INTEGRATED_P (x))
855 return XEXP (force_const_mem (GET_MODE (x),
856 copy_for_inline (XEXP (x, 0))), 0);
859 /* If a single asm insn contains multiple output operands
860 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
861 We must make sure that the copied insn continues to share it. */
862 if (orig_asm_operands_vector == XVEC (orig, 3))
864 x = rtx_alloc (ASM_OPERANDS);
865 XSTR (x, 0) = XSTR (orig, 0);
866 XSTR (x, 1) = XSTR (orig, 1);
867 XINT (x, 2) = XINT (orig, 2);
868 XVEC (x, 3) = copy_asm_operands_vector;
869 XVEC (x, 4) = copy_asm_constraints_vector;
870 XSTR (x, 5) = XSTR (orig, 5);
871 XINT (x, 6) = XINT (orig, 6);
877 /* A MEM is usually allowed to be shared if its address is constant
878 or is a constant plus one of the special registers.
880 We do not allow sharing of addresses that are either a special
881 register or the sum of a constant and a special register because
882 it is possible for unshare_all_rtl to copy the address, into memory
883 that won't be saved. Although the MEM can safely be shared, and
884 won't be copied there, the address itself cannot be shared, and may
887 There are also two exceptions with constants: The first is if the
888 constant is a LABEL_REF or the sum of the LABEL_REF
889 and an integer. This case can happen if we have an inline
890 function that supplies a constant operand to the call of another
891 inline function that uses it in a switch statement. In this case,
892 we will be replacing the LABEL_REF, so we have to replace this MEM
895 The second case is if we have a (const (plus (address ..) ...)).
896 In that case we need to put back the address of the constant pool
899 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
900 && GET_CODE (XEXP (x, 0)) != LABEL_REF
901 && ! (GET_CODE (XEXP (x, 0)) == CONST
902 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
903 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
905 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
912 /* Must point to the new insn. */
913 return gen_rtx (LABEL_REF, GET_MODE (orig),
914 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
918 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
919 return reg_map [REGNO (x)];
924 /* If a parm that gets modified lives in a pseudo-reg,
925 clear its TREE_READONLY to prevent certain optimizations. */
927 rtx dest = SET_DEST (x);
929 while (GET_CODE (dest) == STRICT_LOW_PART
930 || GET_CODE (dest) == ZERO_EXTRACT
931 || GET_CODE (dest) == SUBREG)
932 dest = XEXP (dest, 0);
934 if (GET_CODE (dest) == REG
935 && REGNO (dest) < max_parm_reg
936 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
937 && parmdecl_map[REGNO (dest)] != 0
938 /* The insn to load an arg pseudo from a stack slot
939 does not count as modifying it. */
941 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
945 #if 0 /* This is a good idea, but here is the wrong place for it. */
946 /* Arrange that CONST_INTs always appear as the second operand
947 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
948 always appear as the first. */
950 if (GET_CODE (XEXP (x, 0)) == CONST_INT
951 || (XEXP (x, 1) == frame_pointer_rtx
952 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
953 && XEXP (x, 1) == arg_pointer_rtx)))
956 XEXP (x, 0) = XEXP (x, 1);
963 /* Replace this rtx with a copy of itself. */
965 x = rtx_alloc (code);
966 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
967 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
969 /* Now scan the subexpressions recursively.
970 We can store any replaced subexpressions directly into X
971 since we know X is not shared! Any vectors in X
972 must be copied if X was copied. */
974 format_ptr = GET_RTX_FORMAT (code);
976 for (i = 0; i < GET_RTX_LENGTH (code); i++)
978 switch (*format_ptr++)
981 XEXP (x, i) = copy_for_inline (XEXP (x, i));
985 /* Change any references to old-insns to point to the
986 corresponding copied insns. */
987 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
991 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
995 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
996 for (j = 0; j < XVECLEN (x, i); j++)
998 = copy_for_inline (XVECEXP (x, i, j));
1004 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1006 orig_asm_operands_vector = XVEC (orig, 3);
1007 copy_asm_operands_vector = XVEC (x, 3);
1008 copy_asm_constraints_vector = XVEC (x, 4);
1014 /* Unfortunately, we need a global copy of const_equiv map for communication
1015 with a function called from note_stores. Be *very* careful that this
1016 is used properly in the presence of recursion. */
1018 rtx *global_const_equiv_map;
1020 #define FIXED_BASE_PLUS_P(X) \
1021 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1022 && GET_CODE (XEXP (X, 0)) == REG \
1023 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1024 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1026 /* Integrate the procedure defined by FNDECL. Note that this function
1027 may wind up calling itself. Since the static variables are not
1028 reentrant, we do not assign them until after the possibility
1029 or recursion is eliminated.
1031 If IGNORE is nonzero, do not produce a value.
1032 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1035 (rtx)-1 if we could not substitute the function
1036 0 if we substituted it and it does not produce a value
1037 else an rtx for where the value is stored. */
1040 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1045 rtx structure_value_addr;
1047 tree formal, actual;
1048 rtx header = DECL_SAVED_INSNS (fndecl);
1049 rtx insns = FIRST_FUNCTION_INSN (header);
1050 rtx parm_insns = FIRST_PARM_INSN (header);
1056 int min_labelno = FIRST_LABELNO (header);
1057 int max_labelno = LAST_LABELNO (header);
1059 rtx local_return_label = 0;
1062 struct inline_remap *map;
1064 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1066 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1067 max_regno = MAX_REGNUM (header) + 3;
1068 if (max_regno < FIRST_PSEUDO_REGISTER)
1071 nargs = list_length (DECL_ARGUMENTS (fndecl));
1073 /* We expect PARMS to have the right length; don't crash if not. */
1074 if (list_length (parms) != nargs)
1076 /* Also check that the parms type match. Since the appropriate
1077 conversions or default promotions have already been applied,
1078 the machine modes should match exactly. */
1079 for (formal = DECL_ARGUMENTS (fndecl),
1082 formal = TREE_CHAIN (formal),
1083 actual = TREE_CHAIN (actual))
1085 tree arg = TREE_VALUE (actual);
1086 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1087 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1089 /* If they are block mode, the types should match exactly.
1090 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1091 which could happen if the parameter has incomplete type. */
1092 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1096 /* Make a binding contour to keep inline cleanups called at
1097 outer function-scope level from looking like they are shadowing
1098 parameter declarations. */
1101 /* Make a fresh binding contour that we can easily remove. */
1103 expand_start_bindings (0);
1104 if (GET_CODE (parm_insns) == NOTE
1105 && NOTE_LINE_NUMBER (parm_insns) > 0)
1106 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
1108 /* Expand the function arguments. Do this first so that any
1109 new registers get created before we allocate the maps. */
1111 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1112 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1114 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1116 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1118 /* Actual parameter, converted to the type of the argument within the
1120 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1121 /* Mode of the variable used within the function. */
1122 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1123 /* Where parameter is located in the function. */
1126 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1129 loc = RTVEC_ELT (arg_vector, i);
1131 /* If this is an object passed by invisible reference, we copy the
1132 object into a stack slot and save its address. If this will go
1133 into memory, we do nothing now. Otherwise, we just expand the
1135 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1136 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1138 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
1139 rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1);
1141 store_expr (arg, stack_slot, 0);
1143 arg_vals[i] = XEXP (stack_slot, 0);
1145 else if (GET_CODE (loc) != MEM)
1146 arg_vals[i] = expand_expr (arg, 0, mode, EXPAND_SUM);
1150 if (arg_vals[i] != 0
1151 && (! TREE_READONLY (formal)
1152 /* If the parameter is not read-only, copy our argument through
1153 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1154 TARGET in any way. In the inline function, they will likely
1155 be two different pseudos, and `safe_from_p' will make all
1156 sorts of smart assumptions about their not conflicting.
1157 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1158 wrong, so put ARG_VALS[I] into a fresh register. */
1160 && (GET_CODE (arg_vals[i]) == REG
1161 || GET_CODE (arg_vals[i]) == SUBREG
1162 || GET_CODE (arg_vals[i]) == MEM)
1163 && reg_overlap_mentioned_p (arg_vals[i], target))))
1164 arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]);
1167 /* Allocate the structures we use to remap things. */
1169 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1170 map->fndecl = fndecl;
1172 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1173 bzero (map->reg_map, max_regno * sizeof (rtx));
1175 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1176 map->label_map -= min_labelno;
1178 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1179 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1180 map->min_insnno = 0;
1181 map->max_insnno = INSN_UID (header);
1183 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1184 be large enough for all our pseudos. This is the number we are currently
1185 using plus the number in the called routine, plus 15 for each arg,
1186 five to compute the virtual frame pointer, and five for the return value.
1187 This should be enough for most cases. We do not reference entries
1188 outside the range of the map.
1190 ??? These numbers are quite arbitrary and were obtained by
1191 experimentation. At some point, we should try to allocate the
1192 table after all the parameters are set up so we an more accurately
1193 estimate the number of pseudos we will need. */
1195 map->const_equiv_map_size
1196 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1198 map->const_equiv_map
1199 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1200 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1203 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1204 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
1207 /* Record the current insn in case we have to set up pointers to frame
1208 and argument memory blocks. */
1209 map->insns_at_start = get_last_insn ();
1211 /* Update the outgoing argument size to allow for those in the inlined
1213 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1214 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1216 /* If the inline function needs to make PIC references, that means
1217 that this function's PIC offset table must be used. */
1218 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1219 current_function_uses_pic_offset_table = 1;
1221 /* Process each argument. For each, set up things so that the function's
1222 reference to the argument will refer to the argument being passed.
1223 We only replace REG with REG here. Any simplifications are done
1224 via const_equiv_map.
1226 We make two passes: In the first, we deal with parameters that will
1227 be placed into registers, since we need to ensure that the allocated
1228 register number fits in const_equiv_map. Then we store all non-register
1229 parameters into their memory location. */
1231 for (i = 0; i < nargs; i++)
1233 rtx copy = arg_vals[i];
1235 loc = RTVEC_ELT (arg_vector, i);
1237 /* There are three cases, each handled separately. */
1238 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1239 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1241 /* This must be an object passed by invisible reference (it could
1242 also be a variable-sized object, but we forbid inlining functions
1243 with variable-sized arguments). COPY is the address of the
1244 actual value (this computation will cause it to be copied). We
1245 map that address for the register, noting the actual address as
1246 an equivalent in case it can be substituted into the insns. */
1248 if (GET_CODE (copy) != REG)
1250 temp = copy_addr_to_reg (copy);
1251 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1253 map->const_equiv_map[REGNO (temp)] = copy;
1254 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1258 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1260 else if (GET_CODE (loc) == MEM)
1262 /* This is the case of a parameter that lives in memory.
1263 It will live in the block we allocate in the called routine's
1264 frame that simulates the incoming argument area. Do nothing
1265 now; we will call store_expr later. */
1268 else if (GET_CODE (loc) == REG)
1270 /* This is the good case where the parameter is in a register.
1271 If it is read-only and our argument is a constant, set up the
1272 constant equivalence. */
1273 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1275 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1276 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1278 map->const_equiv_map[REGNO (temp)] = copy;
1279 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1283 map->reg_map[REGNO (loc)] = copy;
1288 /* Free any temporaries we made setting up this parameter. */
1292 /* Now do the parameters that will be placed in memory. */
1294 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1295 formal; formal = TREE_CHAIN (formal), i++)
1297 rtx copy = arg_vals[i];
1299 loc = RTVEC_ELT (arg_vector, i);
1301 if (GET_CODE (loc) == MEM
1302 /* Exclude case handled above. */
1303 && ! (GET_CODE (XEXP (loc, 0)) == REG
1304 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1306 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1308 /* Compute the address in the area we reserved and store the
1310 temp = copy_rtx_and_substitute (loc, map);
1311 subst_constants (&temp, 0, map);
1312 apply_change_group ();
1313 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1314 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1315 store_expr (arg_trees[i], temp, 0);
1317 /* Free any temporaries we made setting up this parameter. */
1322 /* Deal with the places that the function puts its result.
1323 We are driven by what is placed into DECL_RESULT.
1325 Initially, we assume that we don't have anything special handling for
1326 REG_FUNCTION_RETURN_VALUE_P. */
1328 map->inline_target = 0;
1329 loc = DECL_RTL (DECL_RESULT (fndecl));
1330 if (TYPE_MODE (type) == VOIDmode)
1331 /* There is no return value to worry about. */
1333 else if (GET_CODE (loc) == MEM)
1335 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1338 /* Pass the function the address in which to return a structure value.
1339 Note that a constructor can cause someone to call us with
1340 STRUCTURE_VALUE_ADDR, but the initialization takes place
1341 via the first parameter, rather than the struct return address.
1343 We have two cases: If the address is a simple register indirect,
1344 use the mapping mechanism to point that register to our structure
1345 return address. Otherwise, store the structure return value into
1346 the place that it will be referenced from. */
1348 if (GET_CODE (XEXP (loc, 0)) == REG)
1350 temp = force_reg (Pmode, structure_value_addr);
1351 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1352 if (CONSTANT_P (structure_value_addr)
1353 || (GET_CODE (structure_value_addr) == PLUS
1354 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1355 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1357 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1358 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1363 temp = copy_rtx_and_substitute (loc, map);
1364 subst_constants (&temp, 0, map);
1365 apply_change_group ();
1366 emit_move_insn (temp, structure_value_addr);
1370 /* We will ignore the result value, so don't look at its structure.
1371 Note that preparations for an aggregate return value
1372 do need to be made (above) even if it will be ignored. */
1374 else if (GET_CODE (loc) == REG)
1376 /* The function returns an object in a register and we use the return
1377 value. Set up our target for remapping. */
1379 /* Machine mode function was declared to return. */
1380 enum machine_mode departing_mode = TYPE_MODE (type);
1381 /* (Possibly wider) machine mode it actually computes
1382 (for the sake of callers that fail to declare it right). */
1383 enum machine_mode arriving_mode
1384 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1387 /* Don't use MEMs as direct targets because on some machines
1388 substituting a MEM for a REG makes invalid insns.
1389 Let the combiner substitute the MEM if that is valid. */
1390 if (target == 0 || GET_CODE (target) != REG
1391 || GET_MODE (target) != departing_mode)
1392 target = gen_reg_rtx (departing_mode);
1394 /* If function's value was promoted before return,
1395 avoid machine mode mismatch when we substitute INLINE_TARGET.
1396 But TARGET is what we will return to the caller. */
1397 if (arriving_mode != departing_mode)
1398 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1400 reg_to_map = target;
1402 /* Usually, the result value is the machine's return register.
1403 Sometimes it may be a pseudo. Handle both cases. */
1404 if (REG_FUNCTION_VALUE_P (loc))
1405 map->inline_target = reg_to_map;
1407 map->reg_map[REGNO (loc)] = reg_to_map;
1410 /* Make new label equivalences for the labels in the called function. */
1411 for (i = min_labelno; i < max_labelno; i++)
1412 map->label_map[i] = gen_label_rtx ();
1414 /* Perform postincrements before actually calling the function. */
1417 /* Clean up stack so that variables might have smaller offsets. */
1418 do_pending_stack_adjust ();
1420 /* Save a copy of the location of const_equiv_map for mark_stores, called
1422 global_const_equiv_map = map->const_equiv_map;
1424 /* Now copy the insns one by one. Do this in two passes, first the insns and
1425 then their REG_NOTES, just like save_for_inline. */
1427 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1429 for (insn = insns; insn; insn = NEXT_INSN (insn))
1433 map->orig_asm_operands_vector = 0;
1435 switch (GET_CODE (insn))
1438 pattern = PATTERN (insn);
1440 if (GET_CODE (pattern) == USE
1441 && GET_CODE (XEXP (pattern, 0)) == REG
1442 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1443 /* The (USE (REG n)) at return from the function should
1444 be ignored since we are changing (REG n) into
1448 /* Ignore setting a function value that we don't want to use. */
1449 if (map->inline_target == 0
1450 && GET_CODE (pattern) == SET
1451 && GET_CODE (SET_DEST (pattern)) == REG
1452 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1455 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1456 /* REG_NOTES will be copied later. */
1459 /* If this insn is setting CC0, it may need to look at
1460 the insn that uses CC0 to see what type of insn it is.
1461 In that case, the call to recog via validate_change will
1462 fail. So don't substitute constants here. Instead,
1463 do it when we emit the following insn.
1465 For example, see the pyr.md file. That machine has signed and
1466 unsigned compares. The compare patterns must check the
1467 following branch insn to see which what kind of compare to
1470 If the previous insn set CC0, substitute constants on it as
1472 if (sets_cc0_p (PATTERN (copy)) != 0)
1477 try_constants (cc0_insn, map);
1479 try_constants (copy, map);
1482 try_constants (copy, map);
1487 if (GET_CODE (PATTERN (insn)) == RETURN)
1489 if (local_return_label == 0)
1490 local_return_label = gen_label_rtx ();
1491 pattern = gen_jump (local_return_label);
1494 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1496 copy = emit_jump_insn (pattern);
1500 try_constants (cc0_insn, map);
1503 try_constants (copy, map);
1505 /* If this used to be a conditional jump insn but whose branch
1506 direction is now know, we must do something special. */
1507 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1510 /* The previous insn set cc0 for us. So delete it. */
1511 delete_insn (PREV_INSN (copy));
1514 /* If this is now a no-op, delete it. */
1515 if (map->last_pc_value == pc_rtx)
1521 /* Otherwise, this is unconditional jump so we must put a
1522 BARRIER after it. We could do some dead code elimination
1523 here, but jump.c will do it just as well. */
1529 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1530 copy = emit_call_insn (pattern);
1534 try_constants (cc0_insn, map);
1537 try_constants (copy, map);
1539 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1540 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1541 map->const_equiv_map[i] = 0;
1545 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1550 copy = emit_barrier ();
1554 /* It is important to discard function-end and function-beg notes,
1555 so we have only one of each in the current function.
1556 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1557 deleted these in the copy used for continuing compilation,
1558 not the copy used for inlining). */
1559 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1560 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1561 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1562 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1573 RTX_INTEGRATED_P (copy) = 1;
1575 map->insn_map[INSN_UID (insn)] = copy;
1578 /* Now copy the REG_NOTES. */
1579 for (insn = insns; insn; insn = NEXT_INSN (insn))
1580 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1581 && map->insn_map[INSN_UID (insn)])
1582 REG_NOTES (map->insn_map[INSN_UID (insn)])
1583 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1585 if (local_return_label)
1586 emit_label (local_return_label);
1588 /* Make copies of the decls of the symbols in the inline function, so that
1589 the copies of the variables get declared in the current function. Set
1590 up things so that lookup_static_chain knows that to interpret registers
1591 in SAVE_EXPRs for TYPE_SIZEs as local. */
1593 inline_function_decl = fndecl;
1594 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map, 0);
1595 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1596 inline_function_decl = 0;
1598 /* End the scope containing the copied formal parameter variables. */
1600 expand_end_bindings (getdecls (), 1, 1);
1603 emit_line_note (input_filename, lineno);
1605 if (structure_value_addr)
1606 return gen_rtx (MEM, TYPE_MODE (type),
1607 memory_address (TYPE_MODE (type), structure_value_addr));
1611 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1612 push all of those decls and give each one the corresponding home. */
1615 integrate_parm_decls (args, map, arg_vector)
1617 struct inline_remap *map;
1623 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1625 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1628 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1630 /* These args would always appear unused, if not for this. */
1631 TREE_USED (decl) = 1;
1632 /* Prevent warning for shadowing with these. */
1633 DECL_FROM_INLINE (decl) = 1;
1635 /* Fully instantiate the address with the equivalent form so that the
1636 debugging information contains the actual register, instead of the
1637 virtual register. Do this by not passing an insn to
1639 subst_constants (&new_decl_rtl, 0, map);
1640 apply_change_group ();
1641 DECL_RTL (decl) = new_decl_rtl;
1645 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1646 current function a tree of contexts isomorphic to the one that is given.
1648 LEVEL indicates how far down into the BLOCK tree is the node we are
1649 currently traversing. It is always zero for the initial call.
1651 MAP, if nonzero, is a pointer to a inline_remap map which indicates how
1652 registers used in the DECL_RTL field should be remapped. If it is zero,
1653 no mapping is necessary.
1655 FUNCTIONBODY indicates whether the top level block tree corresponds to
1656 a function body. This is identical in meaning to the functionbody
1657 argument of poplevel. */
1660 integrate_decl_tree (let, level, map, functionbody)
1663 struct inline_remap *map;
1670 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1672 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1673 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1674 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1675 if (! functionbody && DECL_RTL (t) != 0)
1677 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1678 /* Fully instantiate the address with the equivalent form so that the
1679 debugging information contains the actual register, instead of the
1680 virtual register. Do this by not passing an insn to
1682 subst_constants (&DECL_RTL (d), 0, map);
1683 apply_change_group ();
1685 else if (DECL_RTL (t))
1686 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1687 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1688 TREE_STATIC (d) = TREE_STATIC (t);
1689 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1690 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1691 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1692 TREE_READONLY (d) = TREE_READONLY (t);
1693 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1694 /* These args would always appear unused, if not for this. */
1696 /* Prevent warning for shadowing with these. */
1697 DECL_FROM_INLINE (d) = 1;
1701 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1702 integrate_decl_tree (t, level + 1, map, functionbody);
1704 node = poplevel (level > 0, 0, level == 0 && functionbody);
1706 TREE_USED (node) = TREE_USED (let);
1709 /* Create a new copy of an rtx.
1710 Recursively copies the operands of the rtx,
1711 except for those few rtx codes that are sharable.
1713 We always return an rtx that is similar to that incoming rtx, with the
1714 exception of possibly changing a REG to a SUBREG or vice versa. No
1715 rtl is ever emitted.
1717 Handle constants that need to be placed in the constant pool by
1718 calling `force_const_mem'. */
1721 copy_rtx_and_substitute (orig, map)
1723 struct inline_remap *map;
1725 register rtx copy, temp;
1727 register RTX_CODE code;
1728 register enum machine_mode mode;
1729 register char *format_ptr;
1735 code = GET_CODE (orig);
1736 mode = GET_MODE (orig);
1741 /* If the stack pointer register shows up, it must be part of
1742 stack-adjustments (*not* because we eliminated the frame pointer!).
1743 Small hard registers are returned as-is. Pseudo-registers
1744 go through their `reg_map'. */
1745 regno = REGNO (orig);
1746 if (regno <= LAST_VIRTUAL_REGISTER)
1748 /* Some hard registers are also mapped,
1749 but others are not translated. */
1750 if (map->reg_map[regno] != 0)
1751 return map->reg_map[regno];
1753 /* If this is the virtual frame pointer, make space in current
1754 function's stack frame for the stack frame of the inline function.
1756 Copy the address of this area into a pseudo. Map
1757 virtual_stack_vars_rtx to this pseudo and set up a constant
1758 equivalence for it to be the address. This will substitute the
1759 address into insns where it can be substituted and use the new
1760 pseudo where it can't. */
1761 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1764 int size = DECL_FRAME_SIZE (map->fndecl);
1768 loc = assign_stack_temp (BLKmode, size, 1);
1769 loc = XEXP (loc, 0);
1770 #ifdef FRAME_GROWS_DOWNWARD
1771 /* In this case, virtual_stack_vars_rtx points to one byte
1772 higher than the top of the frame area. So compute the offset
1773 to one byte higher than our substitute frame.
1774 Keep the fake frame pointer aligned like a real one. */
1775 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1776 loc = plus_constant (loc, rounded);
1778 map->reg_map[regno] = temp = force_operand (loc, 0);
1779 map->const_equiv_map[REGNO (temp)] = loc;
1780 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1782 seq = gen_sequence ();
1784 emit_insn_after (seq, map->insns_at_start);
1787 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1789 /* Do the same for a block to contain any arguments referenced
1792 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1795 loc = assign_stack_temp (BLKmode, size, 1);
1796 loc = XEXP (loc, 0);
1797 map->reg_map[regno] = temp = force_operand (loc, 0);
1798 map->const_equiv_map[REGNO (temp)] = loc;
1799 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1801 seq = gen_sequence ();
1803 emit_insn_after (seq, map->insns_at_start);
1806 else if (REG_FUNCTION_VALUE_P (orig))
1808 /* This is a reference to the function return value. If
1809 the function doesn't have a return value, error. If the
1810 mode doesn't agree, make a SUBREG. */
1811 if (map->inline_target == 0)
1812 /* Must be unrolling loops or replicating code if we
1813 reach here, so return the register unchanged. */
1815 else if (mode != GET_MODE (map->inline_target))
1816 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1818 return map->inline_target;
1822 if (map->reg_map[regno] == NULL)
1824 map->reg_map[regno] = gen_reg_rtx (mode);
1825 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1826 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1827 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1828 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1830 return map->reg_map[regno];
1833 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1834 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1835 if (GET_CODE (copy) == SUBREG)
1836 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1837 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1839 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1840 SUBREG_WORD (orig));
1844 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1846 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1847 if (GET_CODE (copy) == SUBREG)
1848 copy = SUBREG_REG (copy);
1849 return gen_rtx (code, VOIDmode, copy);
1852 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1853 = LABEL_PRESERVE_P (orig);
1854 return map->label_map[CODE_LABEL_NUMBER (orig)];
1857 copy = rtx_alloc (LABEL_REF);
1858 PUT_MODE (copy, mode);
1859 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1860 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1870 /* We have to make a new copy of this CONST_DOUBLE because don't want
1871 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1872 duplicate of a CONST_DOUBLE we have already seen. */
1873 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1877 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1878 return immed_real_const_1 (d, GET_MODE (orig));
1881 return immed_double_const (CONST_DOUBLE_LOW (orig),
1882 CONST_DOUBLE_HIGH (orig), VOIDmode);
1885 /* Make new constant pool entry for a constant
1886 that was in the pool of the inline function. */
1887 if (RTX_INTEGRATED_P (orig))
1889 /* If this was an address of a constant pool entry that itself
1890 had to be placed in the constant pool, it might not be a
1891 valid address. So the recursive call below might turn it
1892 into a register. In that case, it isn't a constant any
1893 more, so return it. This has the potential of changing a
1894 MEM into a REG, but we'll assume that it safe. */
1895 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
1896 if (! CONSTANT_P (temp))
1898 return validize_mem (force_const_mem (GET_MODE (orig), temp));
1903 /* If from constant pool address, make new constant pool entry and
1904 return its address. */
1905 if (! RTX_INTEGRATED_P (orig))
1908 temp = force_const_mem (GET_MODE (orig),
1909 copy_rtx_and_substitute (XEXP (orig, 0), map));
1912 /* Legitimizing the address here is incorrect.
1914 The only ADDRESS rtx's that can reach here are ones created by
1915 save_constants. Hence the operand of the ADDRESS is always legal
1916 in this position of the instruction, since the original rtx without
1917 the ADDRESS was legal.
1919 The reason we don't legitimize the address here is that on the
1920 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
1921 This code forces the operand of the address to a register, which
1922 fails because we can not take the HIGH part of a register.
1924 Also, change_address may create new registers. These registers
1925 will not have valid reg_map entries. This can cause try_constants()
1926 to fail because assumes that all registers in the rtx have valid
1927 reg_map entries, and it may end up replacing one of these new
1928 registers with junk. */
1930 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1931 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1934 return XEXP (temp, 0);
1937 /* If a single asm insn contains multiple output operands
1938 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1939 We must make sure that the copied insn continues to share it. */
1940 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1942 copy = rtx_alloc (ASM_OPERANDS);
1943 XSTR (copy, 0) = XSTR (orig, 0);
1944 XSTR (copy, 1) = XSTR (orig, 1);
1945 XINT (copy, 2) = XINT (orig, 2);
1946 XVEC (copy, 3) = map->copy_asm_operands_vector;
1947 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1948 XSTR (copy, 5) = XSTR (orig, 5);
1949 XINT (copy, 6) = XINT (orig, 6);
1955 /* This is given special treatment because the first
1956 operand of a CALL is a (MEM ...) which may get
1957 forced into a register for cse. This is undesirable
1958 if function-address cse isn't wanted or if we won't do cse. */
1959 #ifndef NO_FUNCTION_CSE
1960 if (! (optimize && ! flag_no_function_cse))
1962 return gen_rtx (CALL, GET_MODE (orig),
1963 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
1964 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
1965 copy_rtx_and_substitute (XEXP (orig, 1), map));
1969 /* Must be ifdefed out for loop unrolling to work. */
1975 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1977 If the nonlocal goto is into the current function,
1978 this will result in unnecessarily bad code, but should work. */
1979 if (SET_DEST (orig) == virtual_stack_vars_rtx
1980 || SET_DEST (orig) == virtual_incoming_args_rtx)
1981 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
1982 copy_rtx_and_substitute (SET_SRC (orig), map));
1986 copy = rtx_alloc (MEM);
1987 PUT_MODE (copy, mode);
1988 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
1989 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
1990 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
1991 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1995 copy = rtx_alloc (code);
1996 PUT_MODE (copy, mode);
1997 copy->in_struct = orig->in_struct;
1998 copy->volatil = orig->volatil;
1999 copy->unchanging = orig->unchanging;
2001 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2003 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2005 switch (*format_ptr++)
2011 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2015 /* Change any references to old-insns to point to the
2016 corresponding copied insns. */
2017 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2021 XVEC (copy, i) = XVEC (orig, i);
2022 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2024 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2025 for (j = 0; j < XVECLEN (copy, i); j++)
2026 XVECEXP (copy, i, j)
2027 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2032 XINT (copy, i) = XINT (orig, i);
2036 XSTR (copy, i) = XSTR (orig, i);
2044 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2046 map->orig_asm_operands_vector = XVEC (orig, 3);
2047 map->copy_asm_operands_vector = XVEC (copy, 3);
2048 map->copy_asm_constraints_vector = XVEC (copy, 4);
2054 /* Substitute known constant values into INSN, if that is valid. */
2057 try_constants (insn, map)
2059 struct inline_remap *map;
2064 subst_constants (&PATTERN (insn), insn, map);
2066 /* Apply the changes if they are valid; otherwise discard them. */
2067 apply_change_group ();
2069 /* Show we don't know the value of anything stored or clobbered. */
2070 note_stores (PATTERN (insn), mark_stores);
2071 map->last_pc_value = 0;
2073 map->last_cc0_value = 0;
2076 /* Set up any constant equivalences made in this insn. */
2077 for (i = 0; i < map->num_sets; i++)
2079 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2081 int regno = REGNO (map->equiv_sets[i].dest);
2083 if (map->const_equiv_map[regno] == 0
2084 /* Following clause is a hack to make case work where GNU C++
2085 reassigns a variable to make cse work right. */
2086 || ! rtx_equal_p (map->const_equiv_map[regno],
2087 map->equiv_sets[i].equiv))
2089 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2090 map->const_age_map[regno] = map->const_age;
2093 else if (map->equiv_sets[i].dest == pc_rtx)
2094 map->last_pc_value = map->equiv_sets[i].equiv;
2096 else if (map->equiv_sets[i].dest == cc0_rtx)
2097 map->last_cc0_value = map->equiv_sets[i].equiv;
2102 /* Substitute known constants for pseudo regs in the contents of LOC,
2103 which are part of INSN.
2104 If INSN is zero, the substition should always be done (this is used to
2106 These changes are taken out by try_constants if the result is not valid.
2108 Note that we are more concerned with determining when the result of a SET
2109 is a constant, for further propagation, than actually inserting constants
2110 into insns; cse will do the latter task better.
2112 This function is also used to adjust address of items previously addressed
2113 via the virtual stack variable or virtual incoming arguments registers. */
2116 subst_constants (loc, insn, map)
2119 struct inline_remap *map;
2123 register enum rtx_code code;
2124 register char *format_ptr;
2125 int num_changes = num_validated_changes ();
2127 enum machine_mode op0_mode;
2129 code = GET_CODE (x);
2144 validate_change (insn, loc, map->last_cc0_value, 1);
2150 /* The only thing we can do with a USE or CLOBBER is possibly do
2151 some substitutions in a MEM within it. */
2152 if (GET_CODE (XEXP (x, 0)) == MEM)
2153 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2157 /* Substitute for parms and known constants. Don't replace
2158 hard regs used as user variables with constants. */
2160 int regno = REGNO (x);
2162 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2163 && regno < map->const_equiv_map_size
2164 && map->const_equiv_map[regno] != 0
2165 && map->const_age_map[regno] >= map->const_age)
2166 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2171 /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify
2174 rtx inner = SUBREG_REG (x);
2177 /* We can't call subst_constants on &SUBREG_REG (x) because any
2178 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2179 see what is inside, try to form the new SUBREG and see if that is
2180 valid. We handle two cases: extracting a full word in an
2181 integral mode and extracting the low part. */
2182 subst_constants (&inner, 0, map);
2184 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2185 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2186 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2187 new = operand_subword (inner, SUBREG_WORD (x), 0,
2188 GET_MODE (SUBREG_REG (x)));
2190 if (new == 0 && subreg_lowpart_p (x))
2191 new = gen_lowpart_common (GET_MODE (x), inner);
2194 validate_change (insn, loc, new, 1);
2200 subst_constants (&XEXP (x, 0), insn, map);
2202 /* If a memory address got spoiled, change it back. */
2203 if (insn != 0 && num_validated_changes () != num_changes
2204 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2205 cancel_changes (num_changes);
2210 /* Substitute constants in our source, and in any arguments to a
2211 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2213 rtx *dest_loc = &SET_DEST (x);
2214 rtx dest = *dest_loc;
2217 subst_constants (&SET_SRC (x), insn, map);
2220 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2221 || GET_CODE (*dest_loc) == SIGN_EXTRACT
2222 || GET_CODE (*dest_loc) == SUBREG
2223 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2225 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2227 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2228 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2230 dest_loc = &XEXP (*dest_loc, 0);
2233 /* Check for the case of DEST a SUBREG, both it and the underlying
2234 register are less than one word, and the SUBREG has the wider mode.
2235 In the case, we are really setting the underlying register to the
2236 source converted to the mode of DEST. So indicate that. */
2237 if (GET_CODE (dest) == SUBREG
2238 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2239 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2240 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2241 <= GET_MODE_SIZE (GET_MODE (dest)))
2242 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2243 src = tem, dest = SUBREG_REG (dest);
2245 /* If storing a recognizable value save it for later recording. */
2246 if ((map->num_sets < MAX_RECOG_OPERANDS)
2247 && (CONSTANT_P (src)
2248 || (GET_CODE (src) == PLUS
2249 && GET_CODE (XEXP (src, 0)) == REG
2250 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2251 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2252 && CONSTANT_P (XEXP (src, 1)))
2253 || GET_CODE (src) == COMPARE
2258 && (src == pc_rtx || GET_CODE (src) == RETURN
2259 || GET_CODE (src) == LABEL_REF))))
2261 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2262 it will cause us to save the COMPARE with any constants
2263 substituted, which is what we want for later. */
2264 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2265 map->equiv_sets[map->num_sets++].dest = dest;
2272 format_ptr = GET_RTX_FORMAT (code);
2274 /* If the first operand is an expression, save its mode for later. */
2275 if (*format_ptr == 'e')
2276 op0_mode = GET_MODE (XEXP (x, 0));
2278 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2280 switch (*format_ptr++)
2287 subst_constants (&XEXP (x, i), insn, map);
2296 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2299 for (j = 0; j < XVECLEN (x, i); j++)
2300 subst_constants (&XVECEXP (x, i, j), insn, map);
2309 /* If this is a commutative operation, move a constant to the second
2310 operand unless the second operand is already a CONST_INT. */
2311 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2312 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2314 rtx tem = XEXP (x, 0);
2315 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2316 validate_change (insn, &XEXP (x, 1), tem, 1);
2319 /* Simplify the expression in case we put in some constants. */
2320 switch (GET_RTX_CLASS (code))
2323 new = simplify_unary_operation (code, GET_MODE (x),
2324 XEXP (x, 0), op0_mode);
2329 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2330 if (op_mode == VOIDmode)
2331 op_mode = GET_MODE (XEXP (x, 1));
2332 new = simplify_relational_operation (code, op_mode,
2333 XEXP (x, 0), XEXP (x, 1));
2339 new = simplify_binary_operation (code, GET_MODE (x),
2340 XEXP (x, 0), XEXP (x, 1));
2345 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2346 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2351 validate_change (insn, loc, new, 1);
2354 /* Show that register modified no longer contain known constants. We are
2355 called from note_stores with parts of the new insn. */
2358 mark_stores (dest, x)
2362 if (GET_CODE (dest) == SUBREG)
2363 dest = SUBREG_REG (dest);
2365 if (GET_CODE (dest) == REG)
2366 global_const_equiv_map[REGNO (dest)] = 0;
2369 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2370 pointed to by PX, they represent constants in the constant pool.
2371 Replace these with a new memory reference obtained from force_const_mem.
2372 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2373 address of a constant pool entry. Replace them with the address of
2374 a new constant pool entry obtained from force_const_mem. */
2377 restore_constants (px)
2387 if (GET_CODE (x) == CONST_DOUBLE)
2389 /* We have to make a new CONST_DOUBLE to ensure that we account for
2390 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2391 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2395 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2396 *px = immed_real_const_1 (d, GET_MODE (x));
2399 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2403 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2405 restore_constants (&XEXP (x, 0));
2406 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2408 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2410 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2411 rtx new = XEXP (SUBREG_REG (x), 0);
2413 restore_constants (&new);
2414 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2415 PUT_MODE (new, GET_MODE (x));
2416 *px = validize_mem (new);
2418 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2420 restore_constants (&XEXP (x, 0));
2421 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2425 fmt = GET_RTX_FORMAT (GET_CODE (x));
2426 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2431 for (j = 0; j < XVECLEN (x, i); j++)
2432 restore_constants (&XVECEXP (x, i, j));
2436 restore_constants (&XEXP (x, i));
2443 /* Output the assembly language code for the function FNDECL
2444 from its DECL_SAVED_INSNS. Used for inline functions that are output
2445 at end of compilation instead of where they came in the source. */
2448 output_inline_function (fndecl)
2451 rtx head = DECL_SAVED_INSNS (fndecl);
2454 temporary_allocation ();
2456 current_function_decl = fndecl;
2458 /* This call is only used to initialize global variables. */
2459 init_function_start (fndecl, "lossage", 1);
2461 /* Redo parameter determinations in case the FUNCTION_...
2462 macros took machine-specific actions that need to be redone. */
2463 assign_parms (fndecl, 1);
2465 /* Set stack frame size. */
2466 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2468 restore_reg_data (FIRST_PARM_INSN (head));
2470 stack_slot_list = STACK_SLOT_LIST (head);
2472 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2473 current_function_calls_alloca = 1;
2475 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2476 current_function_calls_setjmp = 1;
2478 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2479 current_function_calls_longjmp = 1;
2481 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2482 current_function_returns_struct = 1;
2484 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2485 current_function_returns_pcc_struct = 1;
2487 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2488 current_function_needs_context = 1;
2490 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2491 current_function_has_nonlocal_label = 1;
2493 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2494 current_function_returns_pointer = 1;
2496 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2497 current_function_uses_const_pool = 1;
2499 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2500 current_function_uses_pic_offset_table = 1;
2502 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2503 current_function_pops_args = POPS_ARGS (head);
2505 /* There is no need to output a return label again. */
2508 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2510 /* Find last insn and rebuild the constant pool. */
2511 for (last = FIRST_PARM_INSN (head);
2512 NEXT_INSN (last); last = NEXT_INSN (last))
2514 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2516 restore_constants (&PATTERN (last));
2517 restore_constants (®_NOTES (last));
2521 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2522 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2524 /* Compile this function all the way down to assembly code. */
2525 rest_of_compilation (fndecl);
2527 current_function_decl = 0;
2529 permanent_allocation ();