1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "insn-config.h"
30 #include "insn-flags.h"
34 #include "integrate.h"
40 #define obstack_chunk_alloc xmalloc
41 #define obstack_chunk_free free
43 extern struct obstack *function_maybepermanent_obstack;
45 extern tree pushdecl ();
46 extern tree poplevel ();
48 /* Similar, but round to the next highest integer that meets the
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 #define INTEGRATE_THRESHOLD(DECL) \
56 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
59 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
60 static void finish_inline PROTO((tree, rtx));
61 static void adjust_copied_decl_tree PROTO((tree));
62 static tree copy_decl_list PROTO((tree));
63 static tree copy_decl_tree PROTO((tree));
64 static void copy_decl_rtls PROTO((tree));
65 static void save_constants PROTO((rtx *));
66 static void note_modified_parmregs PROTO((rtx, rtx));
67 static rtx copy_for_inline PROTO((rtx));
68 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
69 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
70 static void save_constants_in_decl_trees PROTO ((tree));
71 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
72 static void restore_constants PROTO((rtx *));
73 static void set_block_origin_self PROTO((tree));
74 static void set_decl_origin_self PROTO((tree));
75 static void set_block_abstract_flags PROTO((tree, int));
77 void set_decl_abstract_flags PROTO((tree, int));
79 /* Returns the Ith entry in the label_map contained in MAP. If the
80 Ith entry has not yet been set, it is assumed to be a fresh label.
81 Essentially, we use this function to perform a lazy initialization
82 of label_map, thereby avoiding huge memory explosions when the
83 label_map gets very large. */
85 get_label_from_map (map, i)
86 struct inline_remap* map;
89 rtx x = map->label_map[i];
92 x = map->label_map[i] = gen_label_rtx();
98 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
99 is safe and reasonable to integrate into other functions.
100 Nonzero means value is a warning message with a single %s
101 for the function's name. */
104 function_cannot_inline_p (fndecl)
105 register tree fndecl;
108 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
109 int max_insns = INTEGRATE_THRESHOLD (fndecl);
110 register int ninsns = 0;
114 /* No inlines with varargs. `grokdeclarator' gives a warning
115 message about that if `inline' is specified. This code
116 it put in to catch the volunteers. */
117 if ((last && TREE_VALUE (last) != void_type_node)
118 || current_function_varargs)
119 return "varargs function cannot be inline";
121 if (current_function_calls_alloca)
122 return "function using alloca cannot be inline";
124 if (current_function_contains_functions)
125 return "function with nested functions cannot be inline";
127 /* If its not even close, don't even look. */
128 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
129 return "function too large to be inline";
132 /* Don't inline functions which do not specify a function prototype and
133 have BLKmode argument or take the address of a parameter. */
134 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
136 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
137 TREE_ADDRESSABLE (parms) = 1;
138 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
139 return "no prototype, and parameter address used; cannot be inline";
143 /* We can't inline functions that return structures
144 the old-fashioned PCC way, copying into a static block. */
145 if (current_function_returns_pcc_struct)
146 return "inline functions not supported for this return value type";
148 /* We can't inline functions that return BLKmode structures in registers. */
149 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
150 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
151 return "inline functions not supported for this return value type";
153 /* We can't inline functions that return structures of varying size. */
154 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
155 return "function with varying-size return value cannot be inline";
157 /* Cannot inline a function with a varying size argument or one that
158 receives a transparent union. */
159 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
161 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
162 return "function with varying-size parameter cannot be inline";
163 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
164 return "function with transparent unit parameter cannot be inline";
167 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
169 for (ninsns = 0, insn = get_first_nonparm_insn ();
170 insn && ninsns < max_insns;
171 insn = NEXT_INSN (insn))
172 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
175 if (ninsns >= max_insns)
176 return "function too large to be inline";
179 /* We cannot inline this function if forced_labels is non-zero. This
180 implies that a label in this function was used as an initializer.
181 Because labels can not be duplicated, all labels in the function
182 will be renamed when it is inlined. However, there is no way to find
183 and fix all variables initialized with addresses of labels in this
184 function, hence inlining is impossible. */
187 return "function with label addresses used in initializers cannot inline";
189 /* We cannot inline a nested function that jumps to a nonlocal label. */
190 if (current_function_has_nonlocal_goto)
191 return "function with nonlocal goto cannot be inline";
193 /* This is a hack, until the inliner is taught about eh regions at
194 the start of the function. */
195 for (insn = get_insns ();
197 && ! (GET_CODE (insn) == NOTE
198 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
199 insn = NEXT_INSN (insn))
201 if (insn && GET_CODE (insn) == NOTE
202 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
203 return "function with complex parameters cannot be inline";
206 /* We can't inline functions that return a PARALLEL rtx. */
207 result = DECL_RTL (DECL_RESULT (fndecl));
208 if (result && GET_CODE (result) == PARALLEL)
209 return "inline functions not supported for this return value type";
214 /* Variables used within save_for_inline. */
216 /* Mapping from old pseudo-register to new pseudo-registers.
217 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
218 It is allocated in `save_for_inline' and `expand_inline_function',
219 and deallocated on exit from each of those routines. */
222 /* Mapping from old code-labels to new code-labels.
223 The first element of this map is label_map[min_labelno].
224 It is allocated in `save_for_inline' and `expand_inline_function',
225 and deallocated on exit from each of those routines. */
226 static rtx *label_map;
228 /* Mapping from old insn uid's to copied insns.
229 It is allocated in `save_for_inline' and `expand_inline_function',
230 and deallocated on exit from each of those routines. */
231 static rtx *insn_map;
233 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
234 Zero for a reg that isn't a parm's home.
235 Only reg numbers less than max_parm_reg are mapped here. */
236 static tree *parmdecl_map;
238 /* Keep track of first pseudo-register beyond those that are parms. */
239 extern int max_parm_reg;
240 extern rtx *parm_reg_stack_loc;
242 /* When an insn is being copied by copy_for_inline,
243 this is nonzero if we have copied an ASM_OPERANDS.
244 In that case, it is the original input-operand vector. */
245 static rtvec orig_asm_operands_vector;
247 /* When an insn is being copied by copy_for_inline,
248 this is nonzero if we have copied an ASM_OPERANDS.
249 In that case, it is the copied input-operand vector. */
250 static rtvec copy_asm_operands_vector;
252 /* Likewise, this is the copied constraints vector. */
253 static rtvec copy_asm_constraints_vector;
255 /* In save_for_inline, nonzero if past the parm-initialization insns. */
256 static int in_nonparm_insns;
258 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
259 needed to save FNDECL's insns and info for future inline expansion. */
262 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
269 int function_flags, i;
273 /* Compute the values of any flags we must restore when inlining this. */
276 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
277 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
278 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
279 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
280 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
281 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
282 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
283 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
284 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
285 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
287 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
288 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
289 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
291 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
293 parms = TREE_CHAIN (parms), i++)
295 rtx p = DECL_RTL (parms);
297 /* If we have (mem (addressof (mem ...))), use the inner MEM since
298 otherwise the copy_rtx call below will not unshare the MEM since
299 it shares ADDRESSOF. */
300 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
301 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
302 p = XEXP (XEXP (p, 0), 0);
304 if (GET_CODE (p) == MEM && copy)
306 /* Copy the rtl so that modifications of the addresses
307 later in compilation won't affect this arg_vector.
308 Virtual register instantiation can screw the address
310 rtx new = copy_rtx (p);
312 /* Don't leave the old copy anywhere in this decl. */
313 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
314 || (GET_CODE (DECL_RTL (parms)) == MEM
315 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
316 && (XEXP (DECL_RTL (parms), 0)
317 == XEXP (DECL_INCOMING_RTL (parms), 0))))
318 DECL_INCOMING_RTL (parms) = new;
319 DECL_RTL (parms) = new;
322 RTVEC_ELT (arg_vector, i) = p;
324 if (GET_CODE (p) == REG)
325 parmdecl_map[REGNO (p)] = parms;
326 else if (GET_CODE (p) == CONCAT)
328 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
329 rtx pimag = gen_imagpart (GET_MODE (preal), p);
331 if (GET_CODE (preal) == REG)
332 parmdecl_map[REGNO (preal)] = parms;
333 if (GET_CODE (pimag) == REG)
334 parmdecl_map[REGNO (pimag)] = parms;
337 /* This flag is cleared later
338 if the function ever modifies the value of the parm. */
339 TREE_READONLY (parms) = 1;
342 /* Assume we start out in the insns that set up the parameters. */
343 in_nonparm_insns = 0;
345 /* The list of DECL_SAVED_INSNS, starts off with a header which
346 contains the following information:
348 the first insn of the function (not including the insns that copy
349 parameters into registers).
350 the first parameter insn of the function,
351 the first label used by that function,
352 the last label used by that function,
353 the highest register number used for parameters,
354 the total number of registers used,
355 the size of the incoming stack area for parameters,
356 the number of bytes popped on return,
358 the labels that are forced to exist,
359 some flags that are used to restore compiler globals,
360 the value of current_function_outgoing_args_size,
361 the original argument vector,
362 the original DECL_INITIAL,
363 and pointers to the table of pseudo regs, pointer flags, and alignment. */
365 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
366 max_parm_reg, max_reg,
367 current_function_args_size,
368 current_function_pops_args,
369 stack_slot_list, forced_labels, function_flags,
370 current_function_outgoing_args_size,
371 arg_vector, (rtx) DECL_INITIAL (fndecl),
372 (rtvec) regno_reg_rtx, regno_pointer_flag,
374 (rtvec) parm_reg_stack_loc);
377 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
378 things that must be done to make FNDECL expandable as an inline function.
379 HEAD contains the chain of insns to which FNDECL will expand. */
382 finish_inline (fndecl, head)
386 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
387 FIRST_PARM_INSN (head) = get_insns ();
388 DECL_SAVED_INSNS (fndecl) = head;
389 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
392 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
393 they all point to the new (copied) rtxs. */
396 adjust_copied_decl_tree (block)
399 register tree subblock;
400 register rtx original_end;
402 original_end = BLOCK_END_NOTE (block);
405 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
406 NOTE_SOURCE_FILE (original_end) = 0;
409 /* Process all subblocks. */
410 for (subblock = BLOCK_SUBBLOCKS (block);
412 subblock = TREE_CHAIN (subblock))
413 adjust_copied_decl_tree (subblock);
416 /* Make the insns and PARM_DECLs of the current function permanent
417 and record other information in DECL_SAVED_INSNS to allow inlining
418 of this function in subsequent calls.
420 This function is called when we are going to immediately compile
421 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
422 modified by the compilation process, so we copy all of them to
423 new storage and consider the new insns to be the insn chain to be
424 compiled. Our caller (rest_of_compilation) saves the original
425 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
427 /* ??? The nonlocal_label list should be adjusted also. However, since
428 a function that contains a nested function never gets inlined currently,
429 the nonlocal_label list will always be empty, so we don't worry about
433 save_for_inline_copying (fndecl)
436 rtx first_insn, last_insn, insn;
438 int max_labelno, min_labelno, i, len;
441 rtx first_nonparm_insn;
443 rtx *new_parm_reg_stack_loc;
446 /* Make and emit a return-label if we have not already done so.
447 Do this before recording the bounds on label numbers. */
449 if (return_label == 0)
451 return_label = gen_label_rtx ();
452 emit_label (return_label);
455 /* Get some bounds on the labels and registers used. */
457 max_labelno = max_label_num ();
458 min_labelno = get_first_label_num ();
459 max_reg = max_reg_num ();
461 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
462 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
463 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
464 for the parms, prior to elimination of virtual registers.
465 These values are needed for substituting parms properly. */
467 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
469 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
471 if (current_function_uses_const_pool)
473 /* Replace any constant pool references with the actual constant. We
474 will put the constants back in the copy made below. */
475 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
476 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
478 save_constants (&PATTERN (insn));
479 if (REG_NOTES (insn))
480 save_constants (®_NOTES (insn));
483 /* Also scan all decls, and replace any constant pool references with the
485 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
487 /* Clear out the constant pool so that we can recreate it with the
488 copied constants below. */
489 init_const_rtx_hash_table ();
490 clear_const_double_mem ();
493 max_uid = INSN_UID (head);
495 /* We have now allocated all that needs to be allocated permanently
496 on the rtx obstack. Set our high-water mark, so that we
497 can free the rest of this when the time comes. */
501 /* Copy the chain insns of this function.
502 Install the copied chain as the insns of this function,
503 for continued compilation;
504 the original chain is recorded as the DECL_SAVED_INSNS
505 for inlining future calls. */
507 /* If there are insns that copy parms from the stack into pseudo registers,
508 those insns are not copied. `expand_inline_function' must
509 emit the correct code to handle such things. */
512 if (GET_CODE (insn) != NOTE)
514 first_insn = rtx_alloc (NOTE);
515 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
516 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
517 INSN_UID (first_insn) = INSN_UID (insn);
518 PREV_INSN (first_insn) = NULL;
519 NEXT_INSN (first_insn) = NULL;
520 last_insn = first_insn;
522 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
523 Make these new rtx's now, and install them in regno_reg_rtx, so they
524 will be the official pseudo-reg rtx's for the rest of compilation. */
526 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
528 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
529 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
530 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
531 regno_reg_rtx[i], len);
533 regno_reg_rtx = reg_map;
535 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
536 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
537 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
538 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
539 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
541 /* Likewise each label rtx must have a unique rtx as its copy. */
543 /* We used to use alloca here, but the size of what it would try to
544 allocate would occasionally cause it to exceed the stack limit and
545 cause unpredictable core dumps. Some examples were > 2Mb in size. */
546 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
548 for (i = min_labelno; i < max_labelno; i++)
549 label_map[i] = gen_label_rtx ();
551 /* Likewise for parm_reg_stack_slot. */
552 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
553 for (i = 0; i < max_parm_reg; i++)
554 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
556 parm_reg_stack_loc = new_parm_reg_stack_loc;
558 /* Record the mapping of old insns to copied insns. */
560 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
561 bzero ((char *) insn_map, max_uid * sizeof (rtx));
563 /* Get the insn which signals the end of parameter setup code. */
564 first_nonparm_insn = get_first_nonparm_insn ();
566 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
567 (the former occurs when a variable has its address taken)
568 since these may be shared and can be changed by virtual
569 register instantiation. DECL_RTL values for our arguments
570 have already been copied by initialize_for_inline. */
571 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
572 if (GET_CODE (regno_reg_rtx[i]) == MEM)
573 XEXP (regno_reg_rtx[i], 0)
574 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
576 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
578 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
579 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
580 max_parm_reg * sizeof (rtx));
581 parm_reg_stack_loc = new2;
582 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
583 if (parm_reg_stack_loc[i])
584 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
586 /* Copy the tree of subblocks of the function, and the decls in them.
587 We will use the copy for compiling this function, then restore the original
588 subblocks and decls for use when inlining this function.
590 Several parts of the compiler modify BLOCK trees. In particular,
591 instantiate_virtual_regs will instantiate any virtual regs
592 mentioned in the DECL_RTLs of the decls, and loop
593 unrolling will replicate any BLOCK trees inside an unrolled loop.
595 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
596 which we will use for inlining. The rtl might even contain pseudoregs
597 whose space has been freed. */
599 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
600 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
602 /* Now copy each DECL_RTL which is a MEM,
603 so it is safe to modify their addresses. */
604 copy_decl_rtls (DECL_INITIAL (fndecl));
606 /* The fndecl node acts as its own progenitor, so mark it as such. */
607 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
609 /* Now copy the chain of insns. Do this twice. The first copy the insn
610 itself and its body. The second time copy of REG_NOTES. This is because
611 a REG_NOTE may have a forward pointer to another insn. */
613 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
615 orig_asm_operands_vector = 0;
617 if (insn == first_nonparm_insn)
618 in_nonparm_insns = 1;
620 switch (GET_CODE (insn))
623 /* No need to keep these. */
624 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
627 copy = rtx_alloc (NOTE);
628 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
629 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
630 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
633 NOTE_SOURCE_FILE (insn) = (char *) copy;
634 NOTE_SOURCE_FILE (copy) = 0;
636 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
637 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
639 /* We have to forward these both to match the new exception
641 NOTE_BLOCK_NUMBER (copy)
642 = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
645 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
651 copy = rtx_alloc (GET_CODE (insn));
653 if (GET_CODE (insn) == CALL_INSN)
654 CALL_INSN_FUNCTION_USAGE (copy)
655 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
657 PATTERN (copy) = copy_for_inline (PATTERN (insn));
658 INSN_CODE (copy) = -1;
659 LOG_LINKS (copy) = NULL_RTX;
660 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
664 copy = label_map[CODE_LABEL_NUMBER (insn)];
665 LABEL_NAME (copy) = LABEL_NAME (insn);
669 copy = rtx_alloc (BARRIER);
675 INSN_UID (copy) = INSN_UID (insn);
676 insn_map[INSN_UID (insn)] = copy;
677 NEXT_INSN (last_insn) = copy;
678 PREV_INSN (copy) = last_insn;
682 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
684 /* Now copy the REG_NOTES. */
685 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
686 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
687 && insn_map[INSN_UID(insn)])
688 REG_NOTES (insn_map[INSN_UID (insn)])
689 = copy_for_inline (REG_NOTES (insn));
691 NEXT_INSN (last_insn) = NULL;
693 finish_inline (fndecl, head);
695 /* Make new versions of the register tables. */
696 new = (char *) savealloc (regno_pointer_flag_length);
697 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
698 new1 = (char *) savealloc (regno_pointer_flag_length);
699 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
701 regno_pointer_flag = new;
702 regno_pointer_align = new1;
704 set_new_first_and_last_insn (first_insn, last_insn);
710 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
711 For example, this can copy a list made of TREE_LIST nodes. While copying,
712 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
713 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
714 point to the corresponding (abstract) original node. */
717 copy_decl_list (list)
721 register tree prev, next;
726 head = prev = copy_node (list);
727 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
728 DECL_ABSTRACT_ORIGIN (head) = list;
729 next = TREE_CHAIN (list);
734 copy = copy_node (next);
735 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
736 DECL_ABSTRACT_ORIGIN (copy) = next;
737 TREE_CHAIN (prev) = copy;
739 next = TREE_CHAIN (next);
744 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
747 copy_decl_tree (block)
750 tree t, vars, subblocks;
752 vars = copy_decl_list (BLOCK_VARS (block));
755 /* Process all subblocks. */
756 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
758 tree copy = copy_decl_tree (t);
759 TREE_CHAIN (copy) = subblocks;
763 t = copy_node (block);
764 BLOCK_VARS (t) = vars;
765 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
766 /* If the BLOCK being cloned is already marked as having been instantiated
767 from something else, then leave that `origin' marking alone. Otherwise,
768 mark the clone as having originated from the BLOCK we are cloning. */
769 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
770 BLOCK_ABSTRACT_ORIGIN (t) = block;
774 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
777 copy_decl_rtls (block)
782 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
783 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
784 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
786 /* Process all subblocks. */
787 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
791 /* Make the insns and PARM_DECLs of the current function permanent
792 and record other information in DECL_SAVED_INSNS to allow inlining
793 of this function in subsequent calls.
795 This routine need not copy any insns because we are not going
796 to immediately compile the insns in the insn chain. There
797 are two cases when we would compile the insns for FNDECL:
798 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
799 be output at the end of other compilation, because somebody took
800 its address. In the first case, the insns of FNDECL are copied
801 as it is expanded inline, so FNDECL's saved insns are not
802 modified. In the second case, FNDECL is used for the last time,
803 so modifying the rtl is not a problem.
805 We don't have to worry about FNDECL being inline expanded by
806 other functions which are written at the end of compilation
807 because flag_no_inline is turned on when we begin writing
808 functions at the end of compilation. */
811 save_for_inline_nocopy (fndecl)
816 rtx first_nonparm_insn;
818 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
819 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
820 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
821 for the parms, prior to elimination of virtual registers.
822 These values are needed for substituting parms properly. */
824 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
826 /* Make and emit a return-label if we have not already done so. */
828 if (return_label == 0)
830 return_label = gen_label_rtx ();
831 emit_label (return_label);
834 head = initialize_for_inline (fndecl, get_first_label_num (),
835 max_label_num (), max_reg_num (), 0);
837 /* If there are insns that copy parms from the stack into pseudo registers,
838 those insns are not copied. `expand_inline_function' must
839 emit the correct code to handle such things. */
842 if (GET_CODE (insn) != NOTE)
845 /* Get the insn which signals the end of parameter setup code. */
846 first_nonparm_insn = get_first_nonparm_insn ();
848 /* Now just scan the chain of insns to see what happens to our
849 PARM_DECLs. If a PARM_DECL is used but never modified, we
850 can substitute its rtl directly when expanding inline (and
851 perform constant folding when its incoming value is constant).
852 Otherwise, we have to copy its value into a new register and track
853 the new register's life. */
855 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
857 if (insn == first_nonparm_insn)
858 in_nonparm_insns = 1;
860 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
862 if (current_function_uses_const_pool)
864 /* Replace any constant pool references with the actual constant.
865 We will put the constant back if we need to write the
866 function out after all. */
867 save_constants (&PATTERN (insn));
868 if (REG_NOTES (insn))
869 save_constants (®_NOTES (insn));
872 /* Record what interesting things happen to our parameters. */
873 note_stores (PATTERN (insn), note_modified_parmregs);
877 /* Also scan all decls, and replace any constant pool references with the
879 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
881 /* We have now allocated all that needs to be allocated permanently
882 on the rtx obstack. Set our high-water mark, so that we
883 can free the rest of this when the time comes. */
887 finish_inline (fndecl, head);
890 /* Given PX, a pointer into an insn, search for references to the constant
891 pool. Replace each with a CONST that has the mode of the original
892 constant, contains the constant, and has RTX_INTEGRATED_P set.
893 Similarly, constant pool addresses not enclosed in a MEM are replaced
894 with an ADDRESS and CONST rtx which also gives the constant, its
895 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
907 /* If this is a CONST_DOUBLE, don't try to fix things up in
908 CONST_DOUBLE_MEM, because this is an infinite recursion. */
909 if (GET_CODE (x) == CONST_DOUBLE)
911 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
912 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
914 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
915 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
916 RTX_INTEGRATED_P (new) = 1;
918 /* If the MEM was in a different mode than the constant (perhaps we
919 were only looking at the low-order part), surround it with a
920 SUBREG so we can save both modes. */
922 if (GET_MODE (x) != const_mode)
924 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
925 RTX_INTEGRATED_P (new) = 1;
929 save_constants (&XEXP (*px, 0));
931 else if (GET_CODE (x) == SYMBOL_REF
932 && CONSTANT_POOL_ADDRESS_P (x))
934 *px = gen_rtx_ADDRESS (GET_MODE (x),
935 gen_rtx_CONST (get_pool_mode (x),
936 get_pool_constant (x)));
937 save_constants (&XEXP (*px, 0));
938 RTX_INTEGRATED_P (*px) = 1;
943 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
944 int len = GET_RTX_LENGTH (GET_CODE (x));
946 for (i = len-1; i >= 0; i--)
951 for (j = 0; j < XVECLEN (x, i); j++)
952 save_constants (&XVECEXP (x, i, j));
956 if (XEXP (x, i) == 0)
960 /* Hack tail-recursion here. */
964 save_constants (&XEXP (x, i));
971 /* Note whether a parameter is modified or not. */
974 note_modified_parmregs (reg, x)
978 if (GET_CODE (reg) == REG && in_nonparm_insns
979 && REGNO (reg) < max_parm_reg
980 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
981 && parmdecl_map[REGNO (reg)] != 0)
982 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
985 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
986 according to `reg_map' and `label_map'. The original rtl insns
987 will be saved for inlining; this is used to make a copy
988 which is used to finish compiling the inline function itself.
990 If we find a "saved" constant pool entry, one which was replaced with
991 the value of the constant, convert it back to a constant pool entry.
992 Since the pool wasn't touched, this should simply restore the old
995 All other kinds of rtx are copied except those that can never be
996 changed during compilation. */
999 copy_for_inline (orig)
1002 register rtx x = orig;
1005 register enum rtx_code code;
1006 register char *format_ptr;
1011 code = GET_CODE (x);
1013 /* These types may be freely shared. */
1025 /* We have to make a new CONST_DOUBLE to ensure that we account for
1026 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1027 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1031 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1032 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1035 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1039 /* Get constant pool entry for constant in the pool. */
1040 if (RTX_INTEGRATED_P (x))
1041 return validize_mem (force_const_mem (GET_MODE (x),
1042 copy_for_inline (XEXP (x, 0))));
1046 /* Get constant pool entry, but access in different mode. */
1047 if (RTX_INTEGRATED_P (x))
1049 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1050 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1052 PUT_MODE (new, GET_MODE (x));
1053 return validize_mem (new);
1058 /* If not special for constant pool error. Else get constant pool
1060 if (! RTX_INTEGRATED_P (x))
1063 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1064 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1065 new = XEXP (new, 0);
1067 #ifdef POINTERS_EXTEND_UNSIGNED
1068 if (GET_MODE (new) != GET_MODE (x))
1069 new = convert_memory_address (GET_MODE (x), new);
1075 /* If a single asm insn contains multiple output operands
1076 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1077 We must make sure that the copied insn continues to share it. */
1078 if (orig_asm_operands_vector == XVEC (orig, 3))
1080 x = rtx_alloc (ASM_OPERANDS);
1081 x->volatil = orig->volatil;
1082 XSTR (x, 0) = XSTR (orig, 0);
1083 XSTR (x, 1) = XSTR (orig, 1);
1084 XINT (x, 2) = XINT (orig, 2);
1085 XVEC (x, 3) = copy_asm_operands_vector;
1086 XVEC (x, 4) = copy_asm_constraints_vector;
1087 XSTR (x, 5) = XSTR (orig, 5);
1088 XINT (x, 6) = XINT (orig, 6);
1094 /* A MEM is usually allowed to be shared if its address is constant
1095 or is a constant plus one of the special registers.
1097 We do not allow sharing of addresses that are either a special
1098 register or the sum of a constant and a special register because
1099 it is possible for unshare_all_rtl to copy the address, into memory
1100 that won't be saved. Although the MEM can safely be shared, and
1101 won't be copied there, the address itself cannot be shared, and may
1104 There are also two exceptions with constants: The first is if the
1105 constant is a LABEL_REF or the sum of the LABEL_REF
1106 and an integer. This case can happen if we have an inline
1107 function that supplies a constant operand to the call of another
1108 inline function that uses it in a switch statement. In this case,
1109 we will be replacing the LABEL_REF, so we have to replace this MEM
1112 The second case is if we have a (const (plus (address ..) ...)).
1113 In that case we need to put back the address of the constant pool
1116 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1117 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1118 && ! (GET_CODE (XEXP (x, 0)) == CONST
1119 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1120 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1122 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1128 /* If this is a non-local label, just make a new LABEL_REF.
1129 Otherwise, use the new label as well. */
1130 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1131 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1132 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1133 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1134 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1138 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1139 return reg_map [REGNO (x)];
1144 /* If a parm that gets modified lives in a pseudo-reg,
1145 clear its TREE_READONLY to prevent certain optimizations. */
1147 rtx dest = SET_DEST (x);
1149 while (GET_CODE (dest) == STRICT_LOW_PART
1150 || GET_CODE (dest) == ZERO_EXTRACT
1151 || GET_CODE (dest) == SUBREG)
1152 dest = XEXP (dest, 0);
1154 if (GET_CODE (dest) == REG
1155 && REGNO (dest) < max_parm_reg
1156 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1157 && parmdecl_map[REGNO (dest)] != 0
1158 /* The insn to load an arg pseudo from a stack slot
1159 does not count as modifying it. */
1160 && in_nonparm_insns)
1161 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1165 #if 0 /* This is a good idea, but here is the wrong place for it. */
1166 /* Arrange that CONST_INTs always appear as the second operand
1167 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1168 always appear as the first. */
1170 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1171 || (XEXP (x, 1) == frame_pointer_rtx
1172 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1173 && XEXP (x, 1) == arg_pointer_rtx)))
1175 rtx t = XEXP (x, 0);
1176 XEXP (x, 0) = XEXP (x, 1);
1185 /* Replace this rtx with a copy of itself. */
1187 x = rtx_alloc (code);
1188 bcopy ((char *) orig, (char *) x,
1189 (sizeof (*x) - sizeof (x->fld)
1190 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1192 /* Now scan the subexpressions recursively.
1193 We can store any replaced subexpressions directly into X
1194 since we know X is not shared! Any vectors in X
1195 must be copied if X was copied. */
1197 format_ptr = GET_RTX_FORMAT (code);
1199 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1201 switch (*format_ptr++)
1204 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1208 /* Change any references to old-insns to point to the
1209 corresponding copied insns. */
1210 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1214 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1218 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1219 for (j = 0; j < XVECLEN (x, i); j++)
1221 = copy_for_inline (XVECEXP (x, i, j));
1227 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1229 orig_asm_operands_vector = XVEC (orig, 3);
1230 copy_asm_operands_vector = XVEC (x, 3);
1231 copy_asm_constraints_vector = XVEC (x, 4);
1237 /* Unfortunately, we need a global copy of const_equiv map for communication
1238 with a function called from note_stores. Be *very* careful that this
1239 is used properly in the presence of recursion. */
1241 rtx *global_const_equiv_map;
1242 int global_const_equiv_map_size;
1244 #define FIXED_BASE_PLUS_P(X) \
1245 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1246 && GET_CODE (XEXP (X, 0)) == REG \
1247 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1248 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1250 /* Integrate the procedure defined by FNDECL. Note that this function
1251 may wind up calling itself. Since the static variables are not
1252 reentrant, we do not assign them until after the possibility
1253 of recursion is eliminated.
1255 If IGNORE is nonzero, do not produce a value.
1256 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1259 (rtx)-1 if we could not substitute the function
1260 0 if we substituted it and it does not produce a value
1261 else an rtx for where the value is stored. */
1264 expand_inline_function (fndecl, parms, target, ignore, type,
1265 structure_value_addr)
1270 rtx structure_value_addr;
1272 tree formal, actual, block;
1273 rtx header = DECL_SAVED_INSNS (fndecl);
1274 rtx insns = FIRST_FUNCTION_INSN (header);
1275 rtx parm_insns = FIRST_PARM_INSN (header);
1281 int min_labelno = FIRST_LABELNO (header);
1282 int max_labelno = LAST_LABELNO (header);
1284 rtx local_return_label = 0;
1288 struct inline_remap *map;
1290 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1291 rtx static_chain_value = 0;
1293 /* The pointer used to track the true location of the memory used
1294 for MAP->LABEL_MAP. */
1295 rtx *real_label_map = 0;
1297 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1298 max_regno = MAX_REGNUM (header) + 3;
1299 if (max_regno < FIRST_PSEUDO_REGISTER)
1302 nargs = list_length (DECL_ARGUMENTS (fndecl));
1304 /* Check that the parms type match and that sufficient arguments were
1305 passed. Since the appropriate conversions or default promotions have
1306 already been applied, the machine modes should match exactly. */
1308 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1310 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1313 enum machine_mode mode;
1316 return (rtx) (HOST_WIDE_INT) -1;
1318 arg = TREE_VALUE (actual);
1319 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1321 if (mode != TYPE_MODE (TREE_TYPE (arg))
1322 /* If they are block mode, the types should match exactly.
1323 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1324 which could happen if the parameter has incomplete type. */
1326 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1327 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1328 return (rtx) (HOST_WIDE_INT) -1;
1331 /* Extra arguments are valid, but will be ignored below, so we must
1332 evaluate them here for side-effects. */
1333 for (; actual; actual = TREE_CHAIN (actual))
1334 expand_expr (TREE_VALUE (actual), const0_rtx,
1335 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1337 /* Make a binding contour to keep inline cleanups called at
1338 outer function-scope level from looking like they are shadowing
1339 parameter declarations. */
1342 /* Expand the function arguments. Do this first so that any
1343 new registers get created before we allocate the maps. */
1345 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1346 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1348 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1350 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1352 /* Actual parameter, converted to the type of the argument within the
1354 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1355 /* Mode of the variable used within the function. */
1356 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1360 loc = RTVEC_ELT (arg_vector, i);
1362 /* If this is an object passed by invisible reference, we copy the
1363 object into a stack slot and save its address. If this will go
1364 into memory, we do nothing now. Otherwise, we just expand the
1366 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1367 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1370 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1371 int_size_in_bytes (TREE_TYPE (arg)), 1);
1372 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1374 store_expr (arg, stack_slot, 0);
1376 arg_vals[i] = XEXP (stack_slot, 0);
1379 else if (GET_CODE (loc) != MEM)
1381 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1382 /* The mode if LOC and ARG can differ if LOC was a variable
1383 that had its mode promoted via PROMOTED_MODE. */
1384 arg_vals[i] = convert_modes (GET_MODE (loc),
1385 TYPE_MODE (TREE_TYPE (arg)),
1386 expand_expr (arg, NULL_RTX, mode,
1388 TREE_UNSIGNED (TREE_TYPE (formal)));
1390 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1395 if (arg_vals[i] != 0
1396 && (! TREE_READONLY (formal)
1397 /* If the parameter is not read-only, copy our argument through
1398 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1399 TARGET in any way. In the inline function, they will likely
1400 be two different pseudos, and `safe_from_p' will make all
1401 sorts of smart assumptions about their not conflicting.
1402 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1403 wrong, so put ARG_VALS[I] into a fresh register.
1404 Don't worry about invisible references, since their stack
1405 temps will never overlap the target. */
1408 && (GET_CODE (arg_vals[i]) == REG
1409 || GET_CODE (arg_vals[i]) == SUBREG
1410 || GET_CODE (arg_vals[i]) == MEM)
1411 && reg_overlap_mentioned_p (arg_vals[i], target))
1412 /* ??? We must always copy a SUBREG into a REG, because it might
1413 get substituted into an address, and not all ports correctly
1414 handle SUBREGs in addresses. */
1415 || (GET_CODE (arg_vals[i]) == SUBREG)))
1416 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1418 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1419 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1420 mark_reg_pointer (arg_vals[i],
1421 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1425 /* Allocate the structures we use to remap things. */
1427 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1428 map->fndecl = fndecl;
1430 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1431 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1433 /* We used to use alloca here, but the size of what it would try to
1434 allocate would occasionally cause it to exceed the stack limit and
1435 cause unpredictable core dumps. */
1437 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1438 map->label_map = real_label_map;
1440 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1441 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1442 map->min_insnno = 0;
1443 map->max_insnno = INSN_UID (header);
1445 map->integrating = 1;
1447 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1448 be large enough for all our pseudos. This is the number we are currently
1449 using plus the number in the called routine, plus 15 for each arg,
1450 five to compute the virtual frame pointer, and five for the return value.
1451 This should be enough for most cases. We do not reference entries
1452 outside the range of the map.
1454 ??? These numbers are quite arbitrary and were obtained by
1455 experimentation. At some point, we should try to allocate the
1456 table after all the parameters are set up so we an more accurately
1457 estimate the number of pseudos we will need. */
1459 map->const_equiv_map_size
1460 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1462 map->const_equiv_map
1463 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1464 bzero ((char *) map->const_equiv_map,
1465 map->const_equiv_map_size * sizeof (rtx));
1468 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1469 bzero ((char *) map->const_age_map,
1470 map->const_equiv_map_size * sizeof (unsigned));
1473 /* Record the current insn in case we have to set up pointers to frame
1474 and argument memory blocks. If there are no insns yet, add a dummy
1475 insn that can be used as an insertion point. */
1476 map->insns_at_start = get_last_insn ();
1477 if (map->insns_at_start == 0)
1478 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1480 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1481 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1483 /* Update the outgoing argument size to allow for those in the inlined
1485 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1486 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1488 /* If the inline function needs to make PIC references, that means
1489 that this function's PIC offset table must be used. */
1490 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1491 current_function_uses_pic_offset_table = 1;
1493 /* If this function needs a context, set it up. */
1494 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1495 static_chain_value = lookup_static_chain (fndecl);
1497 if (GET_CODE (parm_insns) == NOTE
1498 && NOTE_LINE_NUMBER (parm_insns) > 0)
1500 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1501 NOTE_LINE_NUMBER (parm_insns));
1503 RTX_INTEGRATED_P (note) = 1;
1506 /* Process each argument. For each, set up things so that the function's
1507 reference to the argument will refer to the argument being passed.
1508 We only replace REG with REG here. Any simplifications are done
1509 via const_equiv_map.
1511 We make two passes: In the first, we deal with parameters that will
1512 be placed into registers, since we need to ensure that the allocated
1513 register number fits in const_equiv_map. Then we store all non-register
1514 parameters into their memory location. */
1516 /* Don't try to free temp stack slots here, because we may put one of the
1517 parameters into a temp stack slot. */
1519 for (i = 0; i < nargs; i++)
1521 rtx copy = arg_vals[i];
1523 loc = RTVEC_ELT (arg_vector, i);
1525 /* There are three cases, each handled separately. */
1526 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1527 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1529 /* This must be an object passed by invisible reference (it could
1530 also be a variable-sized object, but we forbid inlining functions
1531 with variable-sized arguments). COPY is the address of the
1532 actual value (this computation will cause it to be copied). We
1533 map that address for the register, noting the actual address as
1534 an equivalent in case it can be substituted into the insns. */
1536 if (GET_CODE (copy) != REG)
1538 temp = copy_addr_to_reg (copy);
1539 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1540 && REGNO (temp) < map->const_equiv_map_size)
1542 map->const_equiv_map[REGNO (temp)] = copy;
1543 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1547 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1549 else if (GET_CODE (loc) == MEM)
1551 /* This is the case of a parameter that lives in memory.
1552 It will live in the block we allocate in the called routine's
1553 frame that simulates the incoming argument area. Do nothing
1554 now; we will call store_expr later. */
1557 else if (GET_CODE (loc) == REG)
1559 /* This is the good case where the parameter is in a register.
1560 If it is read-only and our argument is a constant, set up the
1561 constant equivalence.
1563 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1564 that flag set if it is a register.
1566 Also, don't allow hard registers here; they might not be valid
1567 when substituted into insns. */
1569 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1570 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1571 && ! REG_USERVAR_P (copy))
1572 || (GET_CODE (copy) == REG
1573 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1575 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1576 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1577 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1578 && REGNO (temp) < map->const_equiv_map_size)
1580 map->const_equiv_map[REGNO (temp)] = copy;
1581 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1585 map->reg_map[REGNO (loc)] = copy;
1587 else if (GET_CODE (loc) == CONCAT)
1589 /* This is the good case where the parameter is in a
1590 pair of separate pseudos.
1591 If it is read-only and our argument is a constant, set up the
1592 constant equivalence.
1594 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1595 that flag set if it is a register.
1597 Also, don't allow hard registers here; they might not be valid
1598 when substituted into insns. */
1599 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1600 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1601 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1602 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1604 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1605 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1606 && ! REG_USERVAR_P (copyreal))
1607 || (GET_CODE (copyreal) == REG
1608 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1610 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1611 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1612 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1613 && REGNO (temp) < map->const_equiv_map_size)
1615 map->const_equiv_map[REGNO (temp)] = copyreal;
1616 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1620 map->reg_map[REGNO (locreal)] = copyreal;
1622 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1623 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1624 && ! REG_USERVAR_P (copyimag))
1625 || (GET_CODE (copyimag) == REG
1626 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1628 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1629 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1630 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1631 && REGNO (temp) < map->const_equiv_map_size)
1633 map->const_equiv_map[REGNO (temp)] = copyimag;
1634 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1638 map->reg_map[REGNO (locimag)] = copyimag;
1644 /* Now do the parameters that will be placed in memory. */
1646 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1647 formal; formal = TREE_CHAIN (formal), i++)
1649 loc = RTVEC_ELT (arg_vector, i);
1651 if (GET_CODE (loc) == MEM
1652 /* Exclude case handled above. */
1653 && ! (GET_CODE (XEXP (loc, 0)) == REG
1654 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1656 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1657 DECL_SOURCE_LINE (formal));
1659 RTX_INTEGRATED_P (note) = 1;
1661 /* Compute the address in the area we reserved and store the
1663 temp = copy_rtx_and_substitute (loc, map);
1664 subst_constants (&temp, NULL_RTX, map);
1665 apply_change_group ();
1666 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1667 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1668 store_expr (arg_trees[i], temp, 0);
1672 /* Deal with the places that the function puts its result.
1673 We are driven by what is placed into DECL_RESULT.
1675 Initially, we assume that we don't have anything special handling for
1676 REG_FUNCTION_RETURN_VALUE_P. */
1678 map->inline_target = 0;
1679 loc = DECL_RTL (DECL_RESULT (fndecl));
1680 if (TYPE_MODE (type) == VOIDmode)
1681 /* There is no return value to worry about. */
1683 else if (GET_CODE (loc) == MEM)
1685 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1688 /* Pass the function the address in which to return a structure value.
1689 Note that a constructor can cause someone to call us with
1690 STRUCTURE_VALUE_ADDR, but the initialization takes place
1691 via the first parameter, rather than the struct return address.
1693 We have two cases: If the address is a simple register indirect,
1694 use the mapping mechanism to point that register to our structure
1695 return address. Otherwise, store the structure return value into
1696 the place that it will be referenced from. */
1698 if (GET_CODE (XEXP (loc, 0)) == REG)
1700 temp = force_reg (Pmode,
1701 force_operand (structure_value_addr, NULL_RTX));
1702 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1703 if ((CONSTANT_P (structure_value_addr)
1704 || GET_CODE (structure_value_addr) == ADDRESSOF
1705 || (GET_CODE (structure_value_addr) == PLUS
1706 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1707 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1708 && REGNO (temp) < map->const_equiv_map_size)
1710 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1711 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1716 temp = copy_rtx_and_substitute (loc, map);
1717 subst_constants (&temp, NULL_RTX, map);
1718 apply_change_group ();
1719 emit_move_insn (temp, structure_value_addr);
1723 /* We will ignore the result value, so don't look at its structure.
1724 Note that preparations for an aggregate return value
1725 do need to be made (above) even if it will be ignored. */
1727 else if (GET_CODE (loc) == REG)
1729 /* The function returns an object in a register and we use the return
1730 value. Set up our target for remapping. */
1732 /* Machine mode function was declared to return. */
1733 enum machine_mode departing_mode = TYPE_MODE (type);
1734 /* (Possibly wider) machine mode it actually computes
1735 (for the sake of callers that fail to declare it right).
1736 We have to use the mode of the result's RTL, rather than
1737 its type, since expand_function_start may have promoted it. */
1738 enum machine_mode arriving_mode
1739 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1742 /* Don't use MEMs as direct targets because on some machines
1743 substituting a MEM for a REG makes invalid insns.
1744 Let the combiner substitute the MEM if that is valid. */
1745 if (target == 0 || GET_CODE (target) != REG
1746 || GET_MODE (target) != departing_mode)
1747 target = gen_reg_rtx (departing_mode);
1749 /* If function's value was promoted before return,
1750 avoid machine mode mismatch when we substitute INLINE_TARGET.
1751 But TARGET is what we will return to the caller. */
1752 if (arriving_mode != departing_mode)
1754 /* Avoid creating a paradoxical subreg wider than
1755 BITS_PER_WORD, since that is illegal. */
1756 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1758 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1759 GET_MODE_BITSIZE (arriving_mode)))
1760 /* Maybe could be handled by using convert_move () ? */
1762 reg_to_map = gen_reg_rtx (arriving_mode);
1763 target = gen_lowpart (departing_mode, reg_to_map);
1766 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1769 reg_to_map = target;
1771 /* Usually, the result value is the machine's return register.
1772 Sometimes it may be a pseudo. Handle both cases. */
1773 if (REG_FUNCTION_VALUE_P (loc))
1774 map->inline_target = reg_to_map;
1776 map->reg_map[REGNO (loc)] = reg_to_map;
1781 /* Make a fresh binding contour that we can easily remove. Do this after
1782 expanding our arguments so cleanups are properly scoped. */
1784 expand_start_bindings (0);
1786 /* Make new label equivalences for the labels in the called function. */
1787 for (i = min_labelno; i < max_labelno; i++)
1788 map->label_map[i] = NULL_RTX;
1790 /* Perform postincrements before actually calling the function. */
1793 /* Clean up stack so that variables might have smaller offsets. */
1794 do_pending_stack_adjust ();
1796 /* Save a copy of the location of const_equiv_map for mark_stores, called
1798 global_const_equiv_map = map->const_equiv_map;
1799 global_const_equiv_map_size = map->const_equiv_map_size;
1801 /* If the called function does an alloca, save and restore the
1802 stack pointer around the call. This saves stack space, but
1803 also is required if this inline is being done between two
1805 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1806 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1808 /* Now copy the insns one by one. Do this in two passes, first the insns and
1809 then their REG_NOTES, just like save_for_inline. */
1811 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1813 for (insn = insns; insn; insn = NEXT_INSN (insn))
1815 rtx copy, pattern, set;
1817 map->orig_asm_operands_vector = 0;
1819 switch (GET_CODE (insn))
1822 pattern = PATTERN (insn);
1823 set = single_set (insn);
1825 if (GET_CODE (pattern) == USE
1826 && GET_CODE (XEXP (pattern, 0)) == REG
1827 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1828 /* The (USE (REG n)) at return from the function should
1829 be ignored since we are changing (REG n) into
1833 /* If the inline fn needs eh context, make sure that
1834 the current fn has one. */
1835 if (GET_CODE (pattern) == USE
1836 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1839 /* Ignore setting a function value that we don't want to use. */
1840 if (map->inline_target == 0
1842 && GET_CODE (SET_DEST (set)) == REG
1843 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1845 if (volatile_refs_p (SET_SRC (set)))
1849 /* If we must not delete the source,
1850 load it into a new temporary. */
1851 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1853 new_set = single_set (copy);
1858 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1860 /* If the source and destination are the same and it
1861 has a note on it, keep the insn. */
1862 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1863 && REG_NOTES (insn) != 0)
1864 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1869 /* If this is setting the static chain rtx, omit it. */
1870 else if (static_chain_value != 0
1872 && GET_CODE (SET_DEST (set)) == REG
1873 && rtx_equal_p (SET_DEST (set),
1874 static_chain_incoming_rtx))
1877 /* If this is setting the static chain pseudo, set it from
1878 the value we want to give it instead. */
1879 else if (static_chain_value != 0
1881 && rtx_equal_p (SET_SRC (set),
1882 static_chain_incoming_rtx))
1884 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1886 copy = emit_move_insn (newdest, static_chain_value);
1887 static_chain_value = 0;
1890 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1891 /* REG_NOTES will be copied later. */
1894 /* If this insn is setting CC0, it may need to look at
1895 the insn that uses CC0 to see what type of insn it is.
1896 In that case, the call to recog via validate_change will
1897 fail. So don't substitute constants here. Instead,
1898 do it when we emit the following insn.
1900 For example, see the pyr.md file. That machine has signed and
1901 unsigned compares. The compare patterns must check the
1902 following branch insn to see which what kind of compare to
1905 If the previous insn set CC0, substitute constants on it as
1907 if (sets_cc0_p (PATTERN (copy)) != 0)
1912 try_constants (cc0_insn, map);
1914 try_constants (copy, map);
1917 try_constants (copy, map);
1922 if (GET_CODE (PATTERN (insn)) == RETURN
1923 || (GET_CODE (PATTERN (insn)) == PARALLEL
1924 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1926 if (local_return_label == 0)
1927 local_return_label = gen_label_rtx ();
1928 pattern = gen_jump (local_return_label);
1931 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1933 copy = emit_jump_insn (pattern);
1937 try_constants (cc0_insn, map);
1940 try_constants (copy, map);
1942 /* If this used to be a conditional jump insn but whose branch
1943 direction is now know, we must do something special. */
1944 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1947 /* The previous insn set cc0 for us. So delete it. */
1948 delete_insn (PREV_INSN (copy));
1951 /* If this is now a no-op, delete it. */
1952 if (map->last_pc_value == pc_rtx)
1958 /* Otherwise, this is unconditional jump so we must put a
1959 BARRIER after it. We could do some dead code elimination
1960 here, but jump.c will do it just as well. */
1966 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1967 copy = emit_call_insn (pattern);
1969 /* Because the USAGE information potentially contains objects other
1970 than hard registers, we need to copy it. */
1971 CALL_INSN_FUNCTION_USAGE (copy)
1972 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1976 try_constants (cc0_insn, map);
1979 try_constants (copy, map);
1981 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1982 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1983 map->const_equiv_map[i] = 0;
1988 emit_label (get_label_from_map(map,
1989 CODE_LABEL_NUMBER (insn)));
1990 LABEL_NAME (copy) = LABEL_NAME (insn);
1995 copy = emit_barrier ();
1999 /* It is important to discard function-end and function-beg notes,
2000 so we have only one of each in the current function.
2001 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2002 deleted these in the copy used for continuing compilation,
2003 not the copy used for inlining). */
2004 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2005 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2006 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2008 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
2009 if (copy && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2010 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2013 get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2015 /* We have to forward these both to match the new exception
2017 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2030 RTX_INTEGRATED_P (copy) = 1;
2032 map->insn_map[INSN_UID (insn)] = copy;
2035 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2036 from parameters can be substituted in. These are the only ones that
2037 are valid across the entire function. */
2039 for (insn = insns; insn; insn = NEXT_INSN (insn))
2040 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2041 && map->insn_map[INSN_UID (insn)]
2042 && REG_NOTES (insn))
2044 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2045 /* We must also do subst_constants, in case one of our parameters
2046 has const type and constant value. */
2047 subst_constants (&tem, NULL_RTX, map);
2048 apply_change_group ();
2049 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2052 if (local_return_label)
2053 emit_label (local_return_label);
2055 /* Restore the stack pointer if we saved it above. */
2056 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2057 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2059 /* Make copies of the decls of the symbols in the inline function, so that
2060 the copies of the variables get declared in the current function. Set
2061 up things so that lookup_static_chain knows that to interpret registers
2062 in SAVE_EXPRs for TYPE_SIZEs as local. */
2064 inline_function_decl = fndecl;
2065 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2066 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2067 inline_function_decl = 0;
2069 /* End the scope containing the copied formal parameter variables
2070 and copied LABEL_DECLs. */
2072 expand_end_bindings (getdecls (), 1, 1);
2073 block = poplevel (1, 1, 0);
2074 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2075 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2078 /* Must mark the line number note after inlined functions as a repeat, so
2079 that the test coverage code can avoid counting the call twice. This
2080 just tells the code to ignore the immediately following line note, since
2081 there already exists a copy of this note before the expanded inline call.
2082 This line number note is still needed for debugging though, so we can't
2084 if (flag_test_coverage)
2085 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2087 emit_line_note (input_filename, lineno);
2089 if (structure_value_addr)
2091 target = gen_rtx_MEM (TYPE_MODE (type),
2092 memory_address (TYPE_MODE (type),
2093 structure_value_addr));
2094 MEM_IN_STRUCT_P (target) = 1;
2097 /* Make sure we free the things we explicitly allocated with xmalloc. */
2099 free (real_label_map);
2104 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2105 push all of those decls and give each one the corresponding home. */
2108 integrate_parm_decls (args, map, arg_vector)
2110 struct inline_remap *map;
2116 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2118 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2121 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2123 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2124 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2125 here, but that's going to require some more work. */
2126 /* DECL_INCOMING_RTL (decl) = ?; */
2127 /* These args would always appear unused, if not for this. */
2128 TREE_USED (decl) = 1;
2129 /* Prevent warning for shadowing with these. */
2130 DECL_ABSTRACT_ORIGIN (decl) = tail;
2132 /* Fully instantiate the address with the equivalent form so that the
2133 debugging information contains the actual register, instead of the
2134 virtual register. Do this by not passing an insn to
2136 subst_constants (&new_decl_rtl, NULL_RTX, map);
2137 apply_change_group ();
2138 DECL_RTL (decl) = new_decl_rtl;
2142 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2143 current function a tree of contexts isomorphic to the one that is given.
2145 LEVEL indicates how far down into the BLOCK tree is the node we are
2146 currently traversing. It is always zero except for recursive calls.
2148 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2149 registers used in the DECL_RTL field should be remapped. If it is zero,
2150 no mapping is necessary. */
2153 integrate_decl_tree (let, level, map)
2156 struct inline_remap *map;
2163 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2167 push_obstacks_nochange ();
2168 saveable_allocation ();
2172 if (DECL_RTL (t) != 0)
2174 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2175 /* Fully instantiate the address with the equivalent form so that the
2176 debugging information contains the actual register, instead of the
2177 virtual register. Do this by not passing an insn to
2179 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2180 apply_change_group ();
2182 /* These args would always appear unused, if not for this. */
2184 /* Prevent warning for shadowing with these. */
2185 DECL_ABSTRACT_ORIGIN (d) = t;
2187 if (DECL_LANG_SPECIFIC (d))
2193 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2194 integrate_decl_tree (t, level + 1, map);
2198 node = poplevel (1, 0, 0);
2201 TREE_USED (node) = TREE_USED (let);
2202 BLOCK_ABSTRACT_ORIGIN (node) = let;
2207 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2208 through save_constants. */
2211 save_constants_in_decl_trees (let)
2216 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2217 if (DECL_RTL (t) != 0)
2218 save_constants (&DECL_RTL (t));
2220 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2221 save_constants_in_decl_trees (t);
2224 /* Create a new copy of an rtx.
2225 Recursively copies the operands of the rtx,
2226 except for those few rtx codes that are sharable.
2228 We always return an rtx that is similar to that incoming rtx, with the
2229 exception of possibly changing a REG to a SUBREG or vice versa. No
2230 rtl is ever emitted.
2232 Handle constants that need to be placed in the constant pool by
2233 calling `force_const_mem'. */
2236 copy_rtx_and_substitute (orig, map)
2238 struct inline_remap *map;
2240 register rtx copy, temp;
2242 register RTX_CODE code;
2243 register enum machine_mode mode;
2244 register char *format_ptr;
2250 code = GET_CODE (orig);
2251 mode = GET_MODE (orig);
2256 /* If the stack pointer register shows up, it must be part of
2257 stack-adjustments (*not* because we eliminated the frame pointer!).
2258 Small hard registers are returned as-is. Pseudo-registers
2259 go through their `reg_map'. */
2260 regno = REGNO (orig);
2261 if (regno <= LAST_VIRTUAL_REGISTER)
2263 /* Some hard registers are also mapped,
2264 but others are not translated. */
2265 if (map->reg_map[regno] != 0)
2266 return map->reg_map[regno];
2268 /* If this is the virtual frame pointer, make space in current
2269 function's stack frame for the stack frame of the inline function.
2271 Copy the address of this area into a pseudo. Map
2272 virtual_stack_vars_rtx to this pseudo and set up a constant
2273 equivalence for it to be the address. This will substitute the
2274 address into insns where it can be substituted and use the new
2275 pseudo where it can't. */
2276 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2279 int size = DECL_FRAME_SIZE (map->fndecl);
2281 #ifdef FRAME_GROWS_DOWNWARD
2282 /* In this case, virtual_stack_vars_rtx points to one byte
2283 higher than the top of the frame area. So make sure we
2284 allocate a big enough chunk to keep the frame pointer
2285 aligned like a real one. */
2286 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2289 loc = assign_stack_temp (BLKmode, size, 1);
2290 loc = XEXP (loc, 0);
2291 #ifdef FRAME_GROWS_DOWNWARD
2292 /* In this case, virtual_stack_vars_rtx points to one byte
2293 higher than the top of the frame area. So compute the offset
2294 to one byte higher than our substitute frame. */
2295 loc = plus_constant (loc, size);
2297 map->reg_map[regno] = temp
2298 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2300 #ifdef STACK_BOUNDARY
2301 mark_reg_pointer (map->reg_map[regno],
2302 STACK_BOUNDARY / BITS_PER_UNIT);
2305 if (REGNO (temp) < map->const_equiv_map_size)
2307 map->const_equiv_map[REGNO (temp)] = loc;
2308 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2311 seq = gen_sequence ();
2313 emit_insn_after (seq, map->insns_at_start);
2316 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2318 /* Do the same for a block to contain any arguments referenced
2321 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2324 loc = assign_stack_temp (BLKmode, size, 1);
2325 loc = XEXP (loc, 0);
2326 /* When arguments grow downward, the virtual incoming
2327 args pointer points to the top of the argument block,
2328 so the remapped location better do the same. */
2329 #ifdef ARGS_GROW_DOWNWARD
2330 loc = plus_constant (loc, size);
2332 map->reg_map[regno] = temp
2333 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2335 #ifdef STACK_BOUNDARY
2336 mark_reg_pointer (map->reg_map[regno],
2337 STACK_BOUNDARY / BITS_PER_UNIT);
2340 if (REGNO (temp) < map->const_equiv_map_size)
2342 map->const_equiv_map[REGNO (temp)] = loc;
2343 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2346 seq = gen_sequence ();
2348 emit_insn_after (seq, map->insns_at_start);
2351 else if (REG_FUNCTION_VALUE_P (orig))
2353 /* This is a reference to the function return value. If
2354 the function doesn't have a return value, error. If the
2355 mode doesn't agree, make a SUBREG. */
2356 if (map->inline_target == 0)
2357 /* Must be unrolling loops or replicating code if we
2358 reach here, so return the register unchanged. */
2360 else if (mode != GET_MODE (map->inline_target))
2361 return gen_lowpart (mode, map->inline_target);
2363 return map->inline_target;
2367 if (map->reg_map[regno] == NULL)
2369 map->reg_map[regno] = gen_reg_rtx (mode);
2370 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2371 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2372 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2373 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2375 if (map->regno_pointer_flag[regno])
2376 mark_reg_pointer (map->reg_map[regno],
2377 map->regno_pointer_align[regno]);
2379 return map->reg_map[regno];
2382 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2383 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2384 if (GET_CODE (copy) == SUBREG)
2385 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2386 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2387 else if (GET_CODE (copy) == CONCAT)
2388 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2390 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2391 SUBREG_WORD (orig));
2394 copy = gen_rtx_ADDRESSOF (mode,
2395 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2396 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2397 regno = ADDRESSOF_REGNO (orig);
2398 if (map->reg_map[regno])
2399 regno = REGNO (map->reg_map[regno]);
2400 else if (regno > LAST_VIRTUAL_REGISTER)
2402 temp = XEXP (orig, 0);
2403 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2404 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2405 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2406 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2407 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2409 if (map->regno_pointer_flag[regno])
2410 mark_reg_pointer (map->reg_map[regno],
2411 map->regno_pointer_align[regno]);
2412 regno = REGNO (map->reg_map[regno]);
2414 ADDRESSOF_REGNO (copy) = regno;
2419 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2420 to (use foo) if the original insn didn't have a subreg.
2421 Removing the subreg distorts the VAX movstrhi pattern
2422 by changing the mode of an operand. */
2423 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2424 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2425 copy = SUBREG_REG (copy);
2426 return gen_rtx_fmt_e (code, VOIDmode, copy);
2429 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2430 = LABEL_PRESERVE_P (orig);
2431 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2434 copy = gen_rtx_LABEL_REF (mode,
2435 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2436 : get_label_from_map (map,
2437 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2438 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2440 /* The fact that this label was previously nonlocal does not mean
2441 it still is, so we must check if it is within the range of
2442 this function's labels. */
2443 LABEL_REF_NONLOCAL_P (copy)
2444 = (LABEL_REF_NONLOCAL_P (orig)
2445 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2446 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2448 /* If we have made a nonlocal label local, it means that this
2449 inlined call will be referring to our nonlocal goto handler.
2450 So make sure we create one for this block; we normally would
2451 not since this is not otherwise considered a "call". */
2452 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2453 function_call_count++;
2463 /* Symbols which represent the address of a label stored in the constant
2464 pool must be modified to point to a constant pool entry for the
2465 remapped label. Otherwise, symbols are returned unchanged. */
2466 if (CONSTANT_POOL_ADDRESS_P (orig))
2468 rtx constant = get_pool_constant (orig);
2469 if (GET_CODE (constant) == LABEL_REF)
2470 return XEXP (force_const_mem (GET_MODE (orig),
2471 copy_rtx_and_substitute (constant,
2479 /* We have to make a new copy of this CONST_DOUBLE because don't want
2480 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2481 duplicate of a CONST_DOUBLE we have already seen. */
2482 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2486 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2487 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2490 return immed_double_const (CONST_DOUBLE_LOW (orig),
2491 CONST_DOUBLE_HIGH (orig), VOIDmode);
2494 /* Make new constant pool entry for a constant
2495 that was in the pool of the inline function. */
2496 if (RTX_INTEGRATED_P (orig))
2498 /* If this was an address of a constant pool entry that itself
2499 had to be placed in the constant pool, it might not be a
2500 valid address. So the recursive call below might turn it
2501 into a register. In that case, it isn't a constant any
2502 more, so return it. This has the potential of changing a
2503 MEM into a REG, but we'll assume that it safe. */
2504 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2505 if (! CONSTANT_P (temp))
2507 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2512 /* If from constant pool address, make new constant pool entry and
2513 return its address. */
2514 if (! RTX_INTEGRATED_P (orig))
2518 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2519 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2523 /* Legitimizing the address here is incorrect.
2525 The only ADDRESS rtx's that can reach here are ones created by
2526 save_constants. Hence the operand of the ADDRESS is always valid
2527 in this position of the instruction, since the original rtx without
2528 the ADDRESS was valid.
2530 The reason we don't legitimize the address here is that on the
2531 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2532 This code forces the operand of the address to a register, which
2533 fails because we can not take the HIGH part of a register.
2535 Also, change_address may create new registers. These registers
2536 will not have valid reg_map entries. This can cause try_constants()
2537 to fail because assumes that all registers in the rtx have valid
2538 reg_map entries, and it may end up replacing one of these new
2539 registers with junk. */
2541 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2542 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2545 temp = XEXP (temp, 0);
2547 #ifdef POINTERS_EXTEND_UNSIGNED
2548 if (GET_MODE (temp) != GET_MODE (orig))
2549 temp = convert_memory_address (GET_MODE (orig), temp);
2555 /* If a single asm insn contains multiple output operands
2556 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2557 We must make sure that the copied insn continues to share it. */
2558 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2560 copy = rtx_alloc (ASM_OPERANDS);
2561 copy->volatil = orig->volatil;
2562 XSTR (copy, 0) = XSTR (orig, 0);
2563 XSTR (copy, 1) = XSTR (orig, 1);
2564 XINT (copy, 2) = XINT (orig, 2);
2565 XVEC (copy, 3) = map->copy_asm_operands_vector;
2566 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2567 XSTR (copy, 5) = XSTR (orig, 5);
2568 XINT (copy, 6) = XINT (orig, 6);
2574 /* This is given special treatment because the first
2575 operand of a CALL is a (MEM ...) which may get
2576 forced into a register for cse. This is undesirable
2577 if function-address cse isn't wanted or if we won't do cse. */
2578 #ifndef NO_FUNCTION_CSE
2579 if (! (optimize && ! flag_no_function_cse))
2581 return gen_rtx_CALL (GET_MODE (orig),
2582 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2583 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2584 copy_rtx_and_substitute (XEXP (orig, 1), map));
2588 /* Must be ifdefed out for loop unrolling to work. */
2594 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2595 Adjust the setting by the offset of the area we made.
2596 If the nonlocal goto is into the current function,
2597 this will result in unnecessarily bad code, but should work. */
2598 if (SET_DEST (orig) == virtual_stack_vars_rtx
2599 || SET_DEST (orig) == virtual_incoming_args_rtx)
2601 /* In case a translation hasn't occurred already, make one now. */
2602 rtx junk = copy_rtx_and_substitute (SET_DEST (orig), map);
2603 rtx equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2604 rtx equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2605 HOST_WIDE_INT loc_offset
2606 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2608 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2611 (copy_rtx_and_substitute (SET_SRC (orig), map),
2618 copy = rtx_alloc (MEM);
2619 PUT_MODE (copy, mode);
2620 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2621 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2622 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2624 /* If doing function inlining, this MEM might not be const in the
2625 function that it is being inlined into, and thus may not be
2626 unchanging after function inlining. Constant pool references are
2627 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2629 if (! map->integrating)
2630 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2638 copy = rtx_alloc (code);
2639 PUT_MODE (copy, mode);
2640 copy->in_struct = orig->in_struct;
2641 copy->volatil = orig->volatil;
2642 copy->unchanging = orig->unchanging;
2644 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2646 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2648 switch (*format_ptr++)
2651 XEXP (copy, i) = XEXP (orig, i);
2655 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2659 /* Change any references to old-insns to point to the
2660 corresponding copied insns. */
2661 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2665 XVEC (copy, i) = XVEC (orig, i);
2666 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2668 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2669 for (j = 0; j < XVECLEN (copy, i); j++)
2670 XVECEXP (copy, i, j)
2671 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2676 XWINT (copy, i) = XWINT (orig, i);
2680 XINT (copy, i) = XINT (orig, i);
2684 XSTR (copy, i) = XSTR (orig, i);
2692 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2694 map->orig_asm_operands_vector = XVEC (orig, 3);
2695 map->copy_asm_operands_vector = XVEC (copy, 3);
2696 map->copy_asm_constraints_vector = XVEC (copy, 4);
2702 /* Substitute known constant values into INSN, if that is valid. */
2705 try_constants (insn, map)
2707 struct inline_remap *map;
2712 subst_constants (&PATTERN (insn), insn, map);
2714 /* Apply the changes if they are valid; otherwise discard them. */
2715 apply_change_group ();
2717 /* Show we don't know the value of anything stored or clobbered. */
2718 note_stores (PATTERN (insn), mark_stores);
2719 map->last_pc_value = 0;
2721 map->last_cc0_value = 0;
2724 /* Set up any constant equivalences made in this insn. */
2725 for (i = 0; i < map->num_sets; i++)
2727 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2729 int regno = REGNO (map->equiv_sets[i].dest);
2731 if (regno < map->const_equiv_map_size
2732 && (map->const_equiv_map[regno] == 0
2733 /* Following clause is a hack to make case work where GNU C++
2734 reassigns a variable to make cse work right. */
2735 || ! rtx_equal_p (map->const_equiv_map[regno],
2736 map->equiv_sets[i].equiv)))
2738 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2739 map->const_age_map[regno] = map->const_age;
2742 else if (map->equiv_sets[i].dest == pc_rtx)
2743 map->last_pc_value = map->equiv_sets[i].equiv;
2745 else if (map->equiv_sets[i].dest == cc0_rtx)
2746 map->last_cc0_value = map->equiv_sets[i].equiv;
2751 /* Substitute known constants for pseudo regs in the contents of LOC,
2752 which are part of INSN.
2753 If INSN is zero, the substitution should always be done (this is used to
2755 These changes are taken out by try_constants if the result is not valid.
2757 Note that we are more concerned with determining when the result of a SET
2758 is a constant, for further propagation, than actually inserting constants
2759 into insns; cse will do the latter task better.
2761 This function is also used to adjust address of items previously addressed
2762 via the virtual stack variable or virtual incoming arguments registers. */
2765 subst_constants (loc, insn, map)
2768 struct inline_remap *map;
2772 register enum rtx_code code;
2773 register char *format_ptr;
2774 int num_changes = num_validated_changes ();
2776 enum machine_mode op0_mode;
2778 code = GET_CODE (x);
2793 validate_change (insn, loc, map->last_cc0_value, 1);
2799 /* The only thing we can do with a USE or CLOBBER is possibly do
2800 some substitutions in a MEM within it. */
2801 if (GET_CODE (XEXP (x, 0)) == MEM)
2802 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2806 /* Substitute for parms and known constants. Don't replace
2807 hard regs used as user variables with constants. */
2809 int regno = REGNO (x);
2811 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2812 && regno < map->const_equiv_map_size
2813 && map->const_equiv_map[regno] != 0
2814 && map->const_age_map[regno] >= map->const_age)
2815 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2820 /* SUBREG applied to something other than a reg
2821 should be treated as ordinary, since that must
2822 be a special hack and we don't know how to treat it specially.
2823 Consider for example mulsidi3 in m68k.md.
2824 Ordinary SUBREG of a REG needs this special treatment. */
2825 if (GET_CODE (SUBREG_REG (x)) == REG)
2827 rtx inner = SUBREG_REG (x);
2830 /* We can't call subst_constants on &SUBREG_REG (x) because any
2831 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2832 see what is inside, try to form the new SUBREG and see if that is
2833 valid. We handle two cases: extracting a full word in an
2834 integral mode and extracting the low part. */
2835 subst_constants (&inner, NULL_RTX, map);
2837 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2838 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2839 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2840 new = operand_subword (inner, SUBREG_WORD (x), 0,
2841 GET_MODE (SUBREG_REG (x)));
2843 cancel_changes (num_changes);
2844 if (new == 0 && subreg_lowpart_p (x))
2845 new = gen_lowpart_common (GET_MODE (x), inner);
2848 validate_change (insn, loc, new, 1);
2855 subst_constants (&XEXP (x, 0), insn, map);
2857 /* If a memory address got spoiled, change it back. */
2858 if (insn != 0 && num_validated_changes () != num_changes
2859 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2860 cancel_changes (num_changes);
2865 /* Substitute constants in our source, and in any arguments to a
2866 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2868 rtx *dest_loc = &SET_DEST (x);
2869 rtx dest = *dest_loc;
2872 subst_constants (&SET_SRC (x), insn, map);
2875 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2876 || GET_CODE (*dest_loc) == SUBREG
2877 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2879 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2881 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2882 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2884 dest_loc = &XEXP (*dest_loc, 0);
2887 /* Do substitute in the address of a destination in memory. */
2888 if (GET_CODE (*dest_loc) == MEM)
2889 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2891 /* Check for the case of DEST a SUBREG, both it and the underlying
2892 register are less than one word, and the SUBREG has the wider mode.
2893 In the case, we are really setting the underlying register to the
2894 source converted to the mode of DEST. So indicate that. */
2895 if (GET_CODE (dest) == SUBREG
2896 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2897 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2898 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2899 <= GET_MODE_SIZE (GET_MODE (dest)))
2900 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2902 src = tem, dest = SUBREG_REG (dest);
2904 /* If storing a recognizable value save it for later recording. */
2905 if ((map->num_sets < MAX_RECOG_OPERANDS)
2906 && (CONSTANT_P (src)
2907 || (GET_CODE (src) == REG
2908 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2909 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2910 || (GET_CODE (src) == PLUS
2911 && GET_CODE (XEXP (src, 0)) == REG
2912 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2913 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2914 && CONSTANT_P (XEXP (src, 1)))
2915 || GET_CODE (src) == COMPARE
2920 && (src == pc_rtx || GET_CODE (src) == RETURN
2921 || GET_CODE (src) == LABEL_REF))))
2923 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2924 it will cause us to save the COMPARE with any constants
2925 substituted, which is what we want for later. */
2926 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2927 map->equiv_sets[map->num_sets++].dest = dest;
2936 format_ptr = GET_RTX_FORMAT (code);
2938 /* If the first operand is an expression, save its mode for later. */
2939 if (*format_ptr == 'e')
2940 op0_mode = GET_MODE (XEXP (x, 0));
2942 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2944 switch (*format_ptr++)
2951 subst_constants (&XEXP (x, i), insn, map);
2961 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2964 for (j = 0; j < XVECLEN (x, i); j++)
2965 subst_constants (&XVECEXP (x, i, j), insn, map);
2974 /* If this is a commutative operation, move a constant to the second
2975 operand unless the second operand is already a CONST_INT. */
2976 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2977 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2979 rtx tem = XEXP (x, 0);
2980 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2981 validate_change (insn, &XEXP (x, 1), tem, 1);
2984 /* Simplify the expression in case we put in some constants. */
2985 switch (GET_RTX_CLASS (code))
2988 new = simplify_unary_operation (code, GET_MODE (x),
2989 XEXP (x, 0), op0_mode);
2994 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2995 if (op_mode == VOIDmode)
2996 op_mode = GET_MODE (XEXP (x, 1));
2997 new = simplify_relational_operation (code, op_mode,
2998 XEXP (x, 0), XEXP (x, 1));
2999 #ifdef FLOAT_STORE_FLAG_VALUE
3000 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3001 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3002 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3010 new = simplify_binary_operation (code, GET_MODE (x),
3011 XEXP (x, 0), XEXP (x, 1));
3016 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3017 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3022 validate_change (insn, loc, new, 1);
3025 /* Show that register modified no longer contain known constants. We are
3026 called from note_stores with parts of the new insn. */
3029 mark_stores (dest, x)
3034 enum machine_mode mode;
3036 /* DEST is always the innermost thing set, except in the case of
3037 SUBREGs of hard registers. */
3039 if (GET_CODE (dest) == REG)
3040 regno = REGNO (dest), mode = GET_MODE (dest);
3041 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3043 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3044 mode = GET_MODE (SUBREG_REG (dest));
3049 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3050 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3053 /* Ignore virtual stack var or virtual arg register since those
3054 are handled separately. */
3055 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3056 && regno != VIRTUAL_STACK_VARS_REGNUM)
3057 for (i = regno; i <= last_reg; i++)
3058 if (i < global_const_equiv_map_size)
3059 global_const_equiv_map[i] = 0;
3063 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3064 pointed to by PX, they represent constants in the constant pool.
3065 Replace these with a new memory reference obtained from force_const_mem.
3066 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3067 address of a constant pool entry. Replace them with the address of
3068 a new constant pool entry obtained from force_const_mem. */
3071 restore_constants (px)
3081 if (GET_CODE (x) == CONST_DOUBLE)
3083 /* We have to make a new CONST_DOUBLE to ensure that we account for
3084 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3085 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3089 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3090 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3093 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3097 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3099 restore_constants (&XEXP (x, 0));
3100 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3102 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3104 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3105 rtx new = XEXP (SUBREG_REG (x), 0);
3107 restore_constants (&new);
3108 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3109 PUT_MODE (new, GET_MODE (x));
3110 *px = validize_mem (new);
3112 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3114 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3115 XEXP (XEXP (x, 0), 0)),
3118 #ifdef POINTERS_EXTEND_UNSIGNED
3119 if (GET_MODE (new) != GET_MODE (x))
3120 new = convert_memory_address (GET_MODE (x), new);
3127 fmt = GET_RTX_FORMAT (GET_CODE (x));
3128 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3133 for (j = 0; j < XVECLEN (x, i); j++)
3134 restore_constants (&XVECEXP (x, i, j));
3138 restore_constants (&XEXP (x, i));
3145 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3146 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3147 that it points to the node itself, thus indicating that the node is its
3148 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3149 the given node is NULL, recursively descend the decl/block tree which
3150 it is the root of, and for each other ..._DECL or BLOCK node contained
3151 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3152 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3153 values to point to themselves. */
3156 set_block_origin_self (stmt)
3159 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3161 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3164 register tree local_decl;
3166 for (local_decl = BLOCK_VARS (stmt);
3167 local_decl != NULL_TREE;
3168 local_decl = TREE_CHAIN (local_decl))
3169 set_decl_origin_self (local_decl); /* Potential recursion. */
3173 register tree subblock;
3175 for (subblock = BLOCK_SUBBLOCKS (stmt);
3176 subblock != NULL_TREE;
3177 subblock = BLOCK_CHAIN (subblock))
3178 set_block_origin_self (subblock); /* Recurse. */
3183 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3184 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3185 node to so that it points to the node itself, thus indicating that the
3186 node represents its own (abstract) origin. Additionally, if the
3187 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3188 the decl/block tree of which the given node is the root of, and for
3189 each other ..._DECL or BLOCK node contained therein whose
3190 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3191 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3192 point to themselves. */
3195 set_decl_origin_self (decl)
3198 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3200 DECL_ABSTRACT_ORIGIN (decl) = decl;
3201 if (TREE_CODE (decl) == FUNCTION_DECL)
3205 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3206 DECL_ABSTRACT_ORIGIN (arg) = arg;
3207 if (DECL_INITIAL (decl) != NULL_TREE
3208 && DECL_INITIAL (decl) != error_mark_node)
3209 set_block_origin_self (DECL_INITIAL (decl));
3214 /* Given a pointer to some BLOCK node, and a boolean value to set the
3215 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3216 the given block, and for all local decls and all local sub-blocks
3217 (recursively) which are contained therein. */
3220 set_block_abstract_flags (stmt, setting)
3222 register int setting;
3224 register tree local_decl;
3225 register tree subblock;
3227 BLOCK_ABSTRACT (stmt) = setting;
3229 for (local_decl = BLOCK_VARS (stmt);
3230 local_decl != NULL_TREE;
3231 local_decl = TREE_CHAIN (local_decl))
3232 set_decl_abstract_flags (local_decl, setting);
3234 for (subblock = BLOCK_SUBBLOCKS (stmt);
3235 subblock != NULL_TREE;
3236 subblock = BLOCK_CHAIN (subblock))
3237 set_block_abstract_flags (subblock, setting);
3240 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3241 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3242 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3243 set the abstract flags for all of the parameters, local vars, local
3244 blocks and sub-blocks (recursively) to the same setting. */
3247 set_decl_abstract_flags (decl, setting)
3249 register int setting;
3251 DECL_ABSTRACT (decl) = setting;
3252 if (TREE_CODE (decl) == FUNCTION_DECL)
3256 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3257 DECL_ABSTRACT (arg) = setting;
3258 if (DECL_INITIAL (decl) != NULL_TREE
3259 && DECL_INITIAL (decl) != error_mark_node)
3260 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3264 /* Output the assembly language code for the function FNDECL
3265 from its DECL_SAVED_INSNS. Used for inline functions that are output
3266 at end of compilation instead of where they came in the source. */
3269 output_inline_function (fndecl)
3275 /* Things we allocate from here on are part of this function, not
3277 temporary_allocation ();
3279 head = DECL_SAVED_INSNS (fndecl);
3280 current_function_decl = fndecl;
3282 /* This call is only used to initialize global variables. */
3283 init_function_start (fndecl, "lossage", 1);
3285 /* Redo parameter determinations in case the FUNCTION_...
3286 macros took machine-specific actions that need to be redone. */
3287 assign_parms (fndecl, 1);
3289 /* Set stack frame size. */
3290 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3292 /* The first is a bit of a lie (the array may be larger), but doesn't
3293 matter too much and it isn't worth saving the actual bound. */
3294 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3295 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3296 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3297 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3298 max_parm_reg = MAX_PARMREG (head);
3299 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3301 stack_slot_list = STACK_SLOT_LIST (head);
3302 forced_labels = FORCED_LABELS (head);
3304 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3305 current_function_calls_alloca = 1;
3307 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3308 current_function_calls_setjmp = 1;
3310 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3311 current_function_calls_longjmp = 1;
3313 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3314 current_function_returns_struct = 1;
3316 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3317 current_function_returns_pcc_struct = 1;
3319 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3320 current_function_needs_context = 1;
3322 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3323 current_function_has_nonlocal_label = 1;
3325 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3326 current_function_returns_pointer = 1;
3328 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3329 current_function_uses_const_pool = 1;
3331 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3332 current_function_uses_pic_offset_table = 1;
3334 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3335 current_function_pops_args = POPS_ARGS (head);
3337 /* This is the only thing the expand_function_end call that uses to be here
3338 actually does and that call can cause problems. */
3339 immediate_size_expand--;
3341 /* Find last insn and rebuild the constant pool. */
3342 for (last = FIRST_PARM_INSN (head);
3343 NEXT_INSN (last); last = NEXT_INSN (last))
3345 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3347 restore_constants (&PATTERN (last));
3348 restore_constants (®_NOTES (last));
3352 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3353 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3355 /* We must have already output DWARF debugging information for the
3356 original (abstract) inline function declaration/definition, so
3357 we want to make sure that the debugging information we generate
3358 for this special instance of the inline function refers back to
3359 the information we already generated. To make sure that happens,
3360 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3361 node (and for all of the local ..._DECL nodes which are its children)
3362 so that they all point to themselves. */
3364 set_decl_origin_self (fndecl);
3366 /* We're not deferring this any longer. */
3367 DECL_DEFER_OUTPUT (fndecl) = 0;
3369 /* We can't inline this anymore. */
3370 DECL_INLINE (fndecl) = 0;
3372 /* Compile this function all the way down to assembly code. */
3373 rest_of_compilation (fndecl);
3375 current_function_decl = 0;