1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-config.h"
31 #include "insn-flags.h"
34 #include "integrate.h"
40 #define obstack_chunk_alloc xmalloc
41 #define obstack_chunk_free free
43 extern struct obstack *function_maybepermanent_obstack;
45 extern tree pushdecl ();
46 extern tree poplevel ();
48 /* Similar, but round to the next highest integer that meets the
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 #define INTEGRATE_THRESHOLD(DECL) \
56 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
59 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
60 static void finish_inline PROTO((tree, rtx));
61 static void adjust_copied_decl_tree PROTO((tree));
62 static tree copy_decl_list PROTO((tree));
63 static tree copy_decl_tree PROTO((tree));
64 static void copy_decl_rtls PROTO((tree));
65 static void save_constants PROTO((rtx *));
66 static void note_modified_parmregs PROTO((rtx, rtx));
67 static rtx copy_for_inline PROTO((rtx));
68 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
69 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
70 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
71 static void restore_constants PROTO((rtx *));
72 static void set_block_origin_self PROTO((tree));
73 static void set_decl_origin_self PROTO((tree));
74 static void set_block_abstract_flags PROTO((tree, int));
76 void set_decl_abstract_flags PROTO((tree, int));
78 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
79 is safe and reasonable to integrate into other functions.
80 Nonzero means value is a warning message with a single %s
81 for the function's name. */
84 function_cannot_inline_p (fndecl)
88 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
89 int max_insns = INTEGRATE_THRESHOLD (fndecl);
90 register int ninsns = 0;
93 /* No inlines with varargs. `grokdeclarator' gives a warning
94 message about that if `inline' is specified. This code
95 it put in to catch the volunteers. */
96 if ((last && TREE_VALUE (last) != void_type_node)
97 || current_function_varargs)
98 return "varargs function cannot be inline";
100 if (current_function_calls_alloca)
101 return "function using alloca cannot be inline";
103 if (current_function_contains_functions)
104 return "function with nested functions cannot be inline";
106 /* If its not even close, don't even look. */
107 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
108 return "function too large to be inline";
111 /* Don't inline functions which do not specify a function prototype and
112 have BLKmode argument or take the address of a parameter. */
113 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
115 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
116 TREE_ADDRESSABLE (parms) = 1;
117 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
118 return "no prototype, and parameter address used; cannot be inline";
122 /* We can't inline functions that return structures
123 the old-fashioned PCC way, copying into a static block. */
124 if (current_function_returns_pcc_struct)
125 return "inline functions not supported for this return value type";
127 /* We can't inline functions that return BLKmode structures in registers. */
128 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
129 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
130 return "inline functions not supported for this return value type";
132 /* We can't inline functions that return structures of varying size. */
133 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
134 return "function with varying-size return value cannot be inline";
136 /* Cannot inline a function with a varying size argument or one that
137 receives a transparent union. */
138 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
140 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
141 return "function with varying-size parameter cannot be inline";
142 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
143 return "function with transparent unit parameter cannot be inline";
146 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
148 for (ninsns = 0, insn = get_first_nonparm_insn ();
149 insn && ninsns < max_insns;
150 insn = NEXT_INSN (insn))
151 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
154 if (ninsns >= max_insns)
155 return "function too large to be inline";
158 /* We cannot inline this function if forced_labels is non-zero. This
159 implies that a label in this function was used as an initializer.
160 Because labels can not be duplicated, all labels in the function
161 will be renamed when it is inlined. However, there is no way to find
162 and fix all variables initialized with addresses of labels in this
163 function, hence inlining is impossible. */
166 return "function with label addresses used in initializers cannot inline";
168 /* We cannot inline a nested function that jumps to a nonlocal label. */
169 if (current_function_has_nonlocal_goto)
170 return "function with nonlocal goto cannot be inline";
175 /* Variables used within save_for_inline. */
177 /* Mapping from old pseudo-register to new pseudo-registers.
178 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
179 It is allocated in `save_for_inline' and `expand_inline_function',
180 and deallocated on exit from each of those routines. */
183 /* Mapping from old code-labels to new code-labels.
184 The first element of this map is label_map[min_labelno].
185 It is allocated in `save_for_inline' and `expand_inline_function',
186 and deallocated on exit from each of those routines. */
187 static rtx *label_map;
189 /* Mapping from old insn uid's to copied insns.
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192 static rtx *insn_map;
194 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
195 Zero for a reg that isn't a parm's home.
196 Only reg numbers less than max_parm_reg are mapped here. */
197 static tree *parmdecl_map;
199 /* Keep track of first pseudo-register beyond those that are parms. */
200 static int max_parm_reg;
202 /* When an insn is being copied by copy_for_inline,
203 this is nonzero if we have copied an ASM_OPERANDS.
204 In that case, it is the original input-operand vector. */
205 static rtvec orig_asm_operands_vector;
207 /* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the copied input-operand vector. */
210 static rtvec copy_asm_operands_vector;
212 /* Likewise, this is the copied constraints vector. */
213 static rtvec copy_asm_constraints_vector;
215 /* In save_for_inline, nonzero if past the parm-initialization insns. */
216 static int in_nonparm_insns;
218 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
219 needed to save FNDECL's insns and info for future inline expansion. */
222 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
229 int function_flags, i;
233 /* Compute the values of any flags we must restore when inlining this. */
236 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
237 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
238 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
239 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
240 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
241 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
242 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
243 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
244 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
245 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
247 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
248 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
249 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
251 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
253 parms = TREE_CHAIN (parms), i++)
255 rtx p = DECL_RTL (parms);
257 if (GET_CODE (p) == MEM && copy)
259 /* Copy the rtl so that modifications of the addresses
260 later in compilation won't affect this arg_vector.
261 Virtual register instantiation can screw the address
263 rtx new = copy_rtx (p);
265 /* Don't leave the old copy anywhere in this decl. */
266 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
267 || (GET_CODE (DECL_RTL (parms)) == MEM
268 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
269 && (XEXP (DECL_RTL (parms), 0)
270 == XEXP (DECL_INCOMING_RTL (parms), 0))))
271 DECL_INCOMING_RTL (parms) = new;
272 DECL_RTL (parms) = new;
275 RTVEC_ELT (arg_vector, i) = p;
277 if (GET_CODE (p) == REG)
278 parmdecl_map[REGNO (p)] = parms;
279 else if (GET_CODE (p) == CONCAT)
281 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
282 rtx pimag = gen_imagpart (GET_MODE (preal), p);
284 if (GET_CODE (preal) == REG)
285 parmdecl_map[REGNO (preal)] = parms;
286 if (GET_CODE (pimag) == REG)
287 parmdecl_map[REGNO (pimag)] = parms;
290 /* This flag is cleared later
291 if the function ever modifies the value of the parm. */
292 TREE_READONLY (parms) = 1;
295 /* Assume we start out in the insns that set up the parameters. */
296 in_nonparm_insns = 0;
298 /* The list of DECL_SAVED_INSNS, starts off with a header which
299 contains the following information:
301 the first insn of the function (not including the insns that copy
302 parameters into registers).
303 the first parameter insn of the function,
304 the first label used by that function,
305 the last label used by that function,
306 the highest register number used for parameters,
307 the total number of registers used,
308 the size of the incoming stack area for parameters,
309 the number of bytes popped on return,
311 some flags that are used to restore compiler globals,
312 the value of current_function_outgoing_args_size,
313 the original argument vector,
314 the original DECL_INITIAL,
315 and pointers to the table of psuedo regs, pointer flags, and alignment. */
317 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
318 max_parm_reg, max_reg,
319 current_function_args_size,
320 current_function_pops_args,
321 stack_slot_list, forced_labels, function_flags,
322 current_function_outgoing_args_size,
323 arg_vector, (rtx) DECL_INITIAL (fndecl),
324 (rtvec) regno_reg_rtx, regno_pointer_flag,
325 regno_pointer_align);
328 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
329 things that must be done to make FNDECL expandable as an inline function.
330 HEAD contains the chain of insns to which FNDECL will expand. */
333 finish_inline (fndecl, head)
337 NEXT_INSN (head) = get_first_nonparm_insn ();
338 FIRST_PARM_INSN (head) = get_insns ();
339 DECL_SAVED_INSNS (fndecl) = head;
340 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
343 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
344 they all point to the new (copied) rtxs. */
347 adjust_copied_decl_tree (block)
350 register tree subblock;
351 register rtx original_end;
353 original_end = BLOCK_END_NOTE (block);
356 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
357 NOTE_SOURCE_FILE (original_end) = 0;
360 /* Process all subblocks. */
361 for (subblock = BLOCK_SUBBLOCKS (block);
363 subblock = TREE_CHAIN (subblock))
364 adjust_copied_decl_tree (subblock);
367 /* Make the insns and PARM_DECLs of the current function permanent
368 and record other information in DECL_SAVED_INSNS to allow inlining
369 of this function in subsequent calls.
371 This function is called when we are going to immediately compile
372 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
373 modified by the compilation process, so we copy all of them to
374 new storage and consider the new insns to be the insn chain to be
375 compiled. Our caller (rest_of_compilation) saves the original
376 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
378 /* ??? The nonlocal_label list should be adjusted also. However, since
379 a function that contains a nested function never gets inlined currently,
380 the nonlocal_label list will always be empty, so we don't worry about
384 save_for_inline_copying (fndecl)
387 rtx first_insn, last_insn, insn;
389 int max_labelno, min_labelno, i, len;
392 rtx first_nonparm_insn;
396 /* Make and emit a return-label if we have not already done so.
397 Do this before recording the bounds on label numbers. */
399 if (return_label == 0)
401 return_label = gen_label_rtx ();
402 emit_label (return_label);
405 /* Get some bounds on the labels and registers used. */
407 max_labelno = max_label_num ();
408 min_labelno = get_first_label_num ();
409 max_reg = max_reg_num ();
411 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
412 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
413 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
414 for the parms, prior to elimination of virtual registers.
415 These values are needed for substituting parms properly. */
417 max_parm_reg = max_parm_reg_num ();
418 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
420 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
422 if (current_function_uses_const_pool)
424 /* Replace any constant pool references with the actual constant. We
425 will put the constants back in the copy made below. */
426 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
427 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
429 save_constants (&PATTERN (insn));
430 if (REG_NOTES (insn))
431 save_constants (®_NOTES (insn));
434 /* Clear out the constant pool so that we can recreate it with the
435 copied constants below. */
436 init_const_rtx_hash_table ();
437 clear_const_double_mem ();
440 max_uid = INSN_UID (head);
442 /* We have now allocated all that needs to be allocated permanently
443 on the rtx obstack. Set our high-water mark, so that we
444 can free the rest of this when the time comes. */
448 /* Copy the chain insns of this function.
449 Install the copied chain as the insns of this function,
450 for continued compilation;
451 the original chain is recorded as the DECL_SAVED_INSNS
452 for inlining future calls. */
454 /* If there are insns that copy parms from the stack into pseudo registers,
455 those insns are not copied. `expand_inline_function' must
456 emit the correct code to handle such things. */
459 if (GET_CODE (insn) != NOTE)
461 first_insn = rtx_alloc (NOTE);
462 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
463 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
464 INSN_UID (first_insn) = INSN_UID (insn);
465 PREV_INSN (first_insn) = NULL;
466 NEXT_INSN (first_insn) = NULL;
467 last_insn = first_insn;
469 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
470 Make these new rtx's now, and install them in regno_reg_rtx, so they
471 will be the official pseudo-reg rtx's for the rest of compilation. */
473 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
475 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
476 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
477 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
478 regno_reg_rtx[i], len);
480 bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
481 (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
482 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
484 /* Likewise each label rtx must have a unique rtx as its copy. */
486 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
487 label_map -= min_labelno;
489 for (i = min_labelno; i < max_labelno; i++)
490 label_map[i] = gen_label_rtx ();
492 /* Record the mapping of old insns to copied insns. */
494 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
495 bzero ((char *) insn_map, max_uid * sizeof (rtx));
497 /* Get the insn which signals the end of parameter setup code. */
498 first_nonparm_insn = get_first_nonparm_insn ();
500 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
501 (the former occurs when a variable has its address taken)
502 since these may be shared and can be changed by virtual
503 register instantiation. DECL_RTL values for our arguments
504 have already been copied by initialize_for_inline. */
505 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
506 if (GET_CODE (regno_reg_rtx[i]) == MEM)
507 XEXP (regno_reg_rtx[i], 0)
508 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
510 /* Copy the tree of subblocks of the function, and the decls in them.
511 We will use the copy for compiling this function, then restore the original
512 subblocks and decls for use when inlining this function.
514 Several parts of the compiler modify BLOCK trees. In particular,
515 instantiate_virtual_regs will instantiate any virtual regs
516 mentioned in the DECL_RTLs of the decls, and loop
517 unrolling will replicate any BLOCK trees inside an unrolled loop.
519 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
520 which we will use for inlining. The rtl might even contain pseudoregs
521 whose space has been freed. */
523 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
524 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
526 /* Now copy each DECL_RTL which is a MEM,
527 so it is safe to modify their addresses. */
528 copy_decl_rtls (DECL_INITIAL (fndecl));
530 /* The fndecl node acts as its own progenitor, so mark it as such. */
531 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
533 /* Now copy the chain of insns. Do this twice. The first copy the insn
534 itself and its body. The second time copy of REG_NOTES. This is because
535 a REG_NOTE may have a forward pointer to another insn. */
537 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
539 orig_asm_operands_vector = 0;
541 if (insn == first_nonparm_insn)
542 in_nonparm_insns = 1;
544 switch (GET_CODE (insn))
547 /* No need to keep these. */
548 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
551 copy = rtx_alloc (NOTE);
552 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
553 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
554 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
557 NOTE_SOURCE_FILE (insn) = (char *) copy;
558 NOTE_SOURCE_FILE (copy) = 0;
560 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
566 copy = rtx_alloc (GET_CODE (insn));
568 if (GET_CODE (insn) == CALL_INSN)
569 CALL_INSN_FUNCTION_USAGE (copy) =
570 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
572 PATTERN (copy) = copy_for_inline (PATTERN (insn));
573 INSN_CODE (copy) = -1;
574 LOG_LINKS (copy) = NULL_RTX;
575 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
579 copy = label_map[CODE_LABEL_NUMBER (insn)];
580 LABEL_NAME (copy) = LABEL_NAME (insn);
584 copy = rtx_alloc (BARRIER);
590 INSN_UID (copy) = INSN_UID (insn);
591 insn_map[INSN_UID (insn)] = copy;
592 NEXT_INSN (last_insn) = copy;
593 PREV_INSN (copy) = last_insn;
597 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
599 /* Now copy the REG_NOTES. */
600 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
601 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
602 && insn_map[INSN_UID(insn)])
603 REG_NOTES (insn_map[INSN_UID (insn)])
604 = copy_for_inline (REG_NOTES (insn));
606 NEXT_INSN (last_insn) = NULL;
608 finish_inline (fndecl, head);
610 /* Make new versions of the register tables. */
611 new = (char *) savealloc (regno_pointer_flag_length);
612 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
613 new1 = (char *) savealloc (regno_pointer_flag_length);
614 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
615 new2 = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
616 bcopy ((char *) regno_reg_rtx, (char *) new2,
617 regno_pointer_flag_length * sizeof (rtx));
619 regno_pointer_flag = new;
620 regno_pointer_align = new1;
621 regno_reg_rtx = new2;
623 set_new_first_and_last_insn (first_insn, last_insn);
626 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
627 For example, this can copy a list made of TREE_LIST nodes. While copying,
628 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
629 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
630 point to the corresponding (abstract) original node. */
633 copy_decl_list (list)
637 register tree prev, next;
642 head = prev = copy_node (list);
643 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
644 DECL_ABSTRACT_ORIGIN (head) = list;
645 next = TREE_CHAIN (list);
650 copy = copy_node (next);
651 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
652 DECL_ABSTRACT_ORIGIN (copy) = next;
653 TREE_CHAIN (prev) = copy;
655 next = TREE_CHAIN (next);
660 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
663 copy_decl_tree (block)
666 tree t, vars, subblocks;
668 vars = copy_decl_list (BLOCK_VARS (block));
671 /* Process all subblocks. */
672 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
674 tree copy = copy_decl_tree (t);
675 TREE_CHAIN (copy) = subblocks;
679 t = copy_node (block);
680 BLOCK_VARS (t) = vars;
681 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
682 /* If the BLOCK being cloned is already marked as having been instantiated
683 from something else, then leave that `origin' marking alone. Otherwise,
684 mark the clone as having originated from the BLOCK we are cloning. */
685 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
686 BLOCK_ABSTRACT_ORIGIN (t) = block;
690 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
693 copy_decl_rtls (block)
698 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
699 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
700 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
702 /* Process all subblocks. */
703 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
707 /* Make the insns and PARM_DECLs of the current function permanent
708 and record other information in DECL_SAVED_INSNS to allow inlining
709 of this function in subsequent calls.
711 This routine need not copy any insns because we are not going
712 to immediately compile the insns in the insn chain. There
713 are two cases when we would compile the insns for FNDECL:
714 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
715 be output at the end of other compilation, because somebody took
716 its address. In the first case, the insns of FNDECL are copied
717 as it is expanded inline, so FNDECL's saved insns are not
718 modified. In the second case, FNDECL is used for the last time,
719 so modifying the rtl is not a problem.
721 We don't have to worry about FNDECL being inline expanded by
722 other functions which are written at the end of compilation
723 because flag_no_inline is turned on when we begin writing
724 functions at the end of compilation. */
727 save_for_inline_nocopy (fndecl)
732 rtx first_nonparm_insn;
734 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
735 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
736 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
737 for the parms, prior to elimination of virtual registers.
738 These values are needed for substituting parms properly. */
740 max_parm_reg = max_parm_reg_num ();
741 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
743 /* Make and emit a return-label if we have not already done so. */
745 if (return_label == 0)
747 return_label = gen_label_rtx ();
748 emit_label (return_label);
751 head = initialize_for_inline (fndecl, get_first_label_num (),
752 max_label_num (), max_reg_num (), 0);
754 /* If there are insns that copy parms from the stack into pseudo registers,
755 those insns are not copied. `expand_inline_function' must
756 emit the correct code to handle such things. */
759 if (GET_CODE (insn) != NOTE)
762 /* Get the insn which signals the end of parameter setup code. */
763 first_nonparm_insn = get_first_nonparm_insn ();
765 /* Now just scan the chain of insns to see what happens to our
766 PARM_DECLs. If a PARM_DECL is used but never modified, we
767 can substitute its rtl directly when expanding inline (and
768 perform constant folding when its incoming value is constant).
769 Otherwise, we have to copy its value into a new register and track
770 the new register's life. */
772 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
774 if (insn == first_nonparm_insn)
775 in_nonparm_insns = 1;
777 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
779 if (current_function_uses_const_pool)
781 /* Replace any constant pool references with the actual constant.
782 We will put the constant back if we need to write the
783 function out after all. */
784 save_constants (&PATTERN (insn));
785 if (REG_NOTES (insn))
786 save_constants (®_NOTES (insn));
789 /* Record what interesting things happen to our parameters. */
790 note_stores (PATTERN (insn), note_modified_parmregs);
794 /* We have now allocated all that needs to be allocated permanently
795 on the rtx obstack. Set our high-water mark, so that we
796 can free the rest of this when the time comes. */
800 finish_inline (fndecl, head);
803 /* Given PX, a pointer into an insn, search for references to the constant
804 pool. Replace each with a CONST that has the mode of the original
805 constant, contains the constant, and has RTX_INTEGRATED_P set.
806 Similarly, constant pool addresses not enclosed in a MEM are replaced
807 with an ADDRESS rtx which also gives the constant, mode, and has
808 RTX_INTEGRATED_P set. */
820 /* If this is a CONST_DOUBLE, don't try to fix things up in
821 CONST_DOUBLE_MEM, because this is an infinite recursion. */
822 if (GET_CODE (x) == CONST_DOUBLE)
824 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
825 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
827 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
828 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
829 RTX_INTEGRATED_P (new) = 1;
831 /* If the MEM was in a different mode than the constant (perhaps we
832 were only looking at the low-order part), surround it with a
833 SUBREG so we can save both modes. */
835 if (GET_MODE (x) != const_mode)
837 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
838 RTX_INTEGRATED_P (new) = 1;
842 save_constants (&XEXP (*px, 0));
844 else if (GET_CODE (x) == SYMBOL_REF
845 && CONSTANT_POOL_ADDRESS_P (x))
847 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
848 save_constants (&XEXP (*px, 0));
849 RTX_INTEGRATED_P (*px) = 1;
854 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
855 int len = GET_RTX_LENGTH (GET_CODE (x));
857 for (i = len-1; i >= 0; i--)
862 for (j = 0; j < XVECLEN (x, i); j++)
863 save_constants (&XVECEXP (x, i, j));
867 if (XEXP (x, i) == 0)
871 /* Hack tail-recursion here. */
875 save_constants (&XEXP (x, i));
882 /* Note whether a parameter is modified or not. */
885 note_modified_parmregs (reg, x)
889 if (GET_CODE (reg) == REG && in_nonparm_insns
890 && REGNO (reg) < max_parm_reg
891 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
892 && parmdecl_map[REGNO (reg)] != 0)
893 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
896 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
897 according to `reg_map' and `label_map'. The original rtl insns
898 will be saved for inlining; this is used to make a copy
899 which is used to finish compiling the inline function itself.
901 If we find a "saved" constant pool entry, one which was replaced with
902 the value of the constant, convert it back to a constant pool entry.
903 Since the pool wasn't touched, this should simply restore the old
906 All other kinds of rtx are copied except those that can never be
907 changed during compilation. */
910 copy_for_inline (orig)
913 register rtx x = orig;
915 register enum rtx_code code;
916 register char *format_ptr;
923 /* These types may be freely shared. */
935 /* We have to make a new CONST_DOUBLE to ensure that we account for
936 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
937 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
941 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
942 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
945 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
949 /* Get constant pool entry for constant in the pool. */
950 if (RTX_INTEGRATED_P (x))
951 return validize_mem (force_const_mem (GET_MODE (x),
952 copy_for_inline (XEXP (x, 0))));
956 /* Get constant pool entry, but access in different mode. */
957 if (RTX_INTEGRATED_P (x))
960 = force_const_mem (GET_MODE (SUBREG_REG (x)),
961 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
963 PUT_MODE (new, GET_MODE (x));
964 return validize_mem (new);
969 /* If not special for constant pool error. Else get constant pool
971 if (! RTX_INTEGRATED_P (x))
974 return XEXP (force_const_mem (GET_MODE (x),
975 copy_for_inline (XEXP (x, 0))), 0);
978 /* If a single asm insn contains multiple output operands
979 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
980 We must make sure that the copied insn continues to share it. */
981 if (orig_asm_operands_vector == XVEC (orig, 3))
983 x = rtx_alloc (ASM_OPERANDS);
984 x->volatil = orig->volatil;
985 XSTR (x, 0) = XSTR (orig, 0);
986 XSTR (x, 1) = XSTR (orig, 1);
987 XINT (x, 2) = XINT (orig, 2);
988 XVEC (x, 3) = copy_asm_operands_vector;
989 XVEC (x, 4) = copy_asm_constraints_vector;
990 XSTR (x, 5) = XSTR (orig, 5);
991 XINT (x, 6) = XINT (orig, 6);
997 /* A MEM is usually allowed to be shared if its address is constant
998 or is a constant plus one of the special registers.
1000 We do not allow sharing of addresses that are either a special
1001 register or the sum of a constant and a special register because
1002 it is possible for unshare_all_rtl to copy the address, into memory
1003 that won't be saved. Although the MEM can safely be shared, and
1004 won't be copied there, the address itself cannot be shared, and may
1007 There are also two exceptions with constants: The first is if the
1008 constant is a LABEL_REF or the sum of the LABEL_REF
1009 and an integer. This case can happen if we have an inline
1010 function that supplies a constant operand to the call of another
1011 inline function that uses it in a switch statement. In this case,
1012 we will be replacing the LABEL_REF, so we have to replace this MEM
1015 The second case is if we have a (const (plus (address ..) ...)).
1016 In that case we need to put back the address of the constant pool
1019 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1020 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1021 && ! (GET_CODE (XEXP (x, 0)) == CONST
1022 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1023 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1025 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1031 /* If this is a non-local label, just make a new LABEL_REF.
1032 Otherwise, use the new label as well. */
1033 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1034 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1035 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1036 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1037 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1041 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1042 return reg_map [REGNO (x)];
1047 /* If a parm that gets modified lives in a pseudo-reg,
1048 clear its TREE_READONLY to prevent certain optimizations. */
1050 rtx dest = SET_DEST (x);
1052 while (GET_CODE (dest) == STRICT_LOW_PART
1053 || GET_CODE (dest) == ZERO_EXTRACT
1054 || GET_CODE (dest) == SUBREG)
1055 dest = XEXP (dest, 0);
1057 if (GET_CODE (dest) == REG
1058 && REGNO (dest) < max_parm_reg
1059 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1060 && parmdecl_map[REGNO (dest)] != 0
1061 /* The insn to load an arg pseudo from a stack slot
1062 does not count as modifying it. */
1063 && in_nonparm_insns)
1064 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1068 #if 0 /* This is a good idea, but here is the wrong place for it. */
1069 /* Arrange that CONST_INTs always appear as the second operand
1070 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1071 always appear as the first. */
1073 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1074 || (XEXP (x, 1) == frame_pointer_rtx
1075 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1076 && XEXP (x, 1) == arg_pointer_rtx)))
1078 rtx t = XEXP (x, 0);
1079 XEXP (x, 0) = XEXP (x, 1);
1086 /* Replace this rtx with a copy of itself. */
1088 x = rtx_alloc (code);
1089 bcopy ((char *) orig, (char *) x,
1090 (sizeof (*x) - sizeof (x->fld)
1091 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1093 /* Now scan the subexpressions recursively.
1094 We can store any replaced subexpressions directly into X
1095 since we know X is not shared! Any vectors in X
1096 must be copied if X was copied. */
1098 format_ptr = GET_RTX_FORMAT (code);
1100 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1102 switch (*format_ptr++)
1105 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1109 /* Change any references to old-insns to point to the
1110 corresponding copied insns. */
1111 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1115 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1119 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1120 for (j = 0; j < XVECLEN (x, i); j++)
1122 = copy_for_inline (XVECEXP (x, i, j));
1128 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1130 orig_asm_operands_vector = XVEC (orig, 3);
1131 copy_asm_operands_vector = XVEC (x, 3);
1132 copy_asm_constraints_vector = XVEC (x, 4);
1138 /* Unfortunately, we need a global copy of const_equiv map for communication
1139 with a function called from note_stores. Be *very* careful that this
1140 is used properly in the presence of recursion. */
1142 rtx *global_const_equiv_map;
1143 int global_const_equiv_map_size;
1145 #define FIXED_BASE_PLUS_P(X) \
1146 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1147 && GET_CODE (XEXP (X, 0)) == REG \
1148 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1149 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1151 /* Integrate the procedure defined by FNDECL. Note that this function
1152 may wind up calling itself. Since the static variables are not
1153 reentrant, we do not assign them until after the possibility
1154 of recursion is eliminated.
1156 If IGNORE is nonzero, do not produce a value.
1157 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1160 (rtx)-1 if we could not substitute the function
1161 0 if we substituted it and it does not produce a value
1162 else an rtx for where the value is stored. */
1165 expand_inline_function (fndecl, parms, target, ignore, type,
1166 structure_value_addr)
1171 rtx structure_value_addr;
1173 tree formal, actual, block;
1174 rtx header = DECL_SAVED_INSNS (fndecl);
1175 rtx insns = FIRST_FUNCTION_INSN (header);
1176 rtx parm_insns = FIRST_PARM_INSN (header);
1182 int min_labelno = FIRST_LABELNO (header);
1183 int max_labelno = LAST_LABELNO (header);
1185 rtx local_return_label = 0;
1189 struct inline_remap *map;
1191 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1192 rtx static_chain_value = 0;
1194 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1195 max_regno = MAX_REGNUM (header) + 3;
1196 if (max_regno < FIRST_PSEUDO_REGISTER)
1199 nargs = list_length (DECL_ARGUMENTS (fndecl));
1201 /* Check that the parms type match and that sufficient arguments were
1202 passed. Since the appropriate conversions or default promotions have
1203 already been applied, the machine modes should match exactly. */
1205 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1207 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1210 enum machine_mode mode;
1213 return (rtx) (HOST_WIDE_INT) -1;
1215 arg = TREE_VALUE (actual);
1216 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1218 if (mode != TYPE_MODE (TREE_TYPE (arg))
1219 /* If they are block mode, the types should match exactly.
1220 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1221 which could happen if the parameter has incomplete type. */
1222 || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1223 return (rtx) (HOST_WIDE_INT) -1;
1226 /* Extra arguments are valid, but will be ignored below, so we must
1227 evaluate them here for side-effects. */
1228 for (; actual; actual = TREE_CHAIN (actual))
1229 expand_expr (TREE_VALUE (actual), const0_rtx,
1230 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1232 /* Make a binding contour to keep inline cleanups called at
1233 outer function-scope level from looking like they are shadowing
1234 parameter declarations. */
1237 /* Make a fresh binding contour that we can easily remove. */
1239 expand_start_bindings (0);
1241 /* Expand the function arguments. Do this first so that any
1242 new registers get created before we allocate the maps. */
1244 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1245 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1247 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1249 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1251 /* Actual parameter, converted to the type of the argument within the
1253 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1254 /* Mode of the variable used within the function. */
1255 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1259 loc = RTVEC_ELT (arg_vector, i);
1261 /* If this is an object passed by invisible reference, we copy the
1262 object into a stack slot and save its address. If this will go
1263 into memory, we do nothing now. Otherwise, we just expand the
1265 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1266 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1269 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1270 int_size_in_bytes (TREE_TYPE (arg)), 1);
1271 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1273 store_expr (arg, stack_slot, 0);
1275 arg_vals[i] = XEXP (stack_slot, 0);
1278 else if (GET_CODE (loc) != MEM)
1280 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1281 /* The mode if LOC and ARG can differ if LOC was a variable
1282 that had its mode promoted via PROMOTED_MODE. */
1283 arg_vals[i] = convert_modes (GET_MODE (loc),
1284 TYPE_MODE (TREE_TYPE (arg)),
1285 expand_expr (arg, NULL_RTX, mode,
1287 TREE_UNSIGNED (TREE_TYPE (formal)));
1289 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1294 if (arg_vals[i] != 0
1295 && (! TREE_READONLY (formal)
1296 /* If the parameter is not read-only, copy our argument through
1297 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1298 TARGET in any way. In the inline function, they will likely
1299 be two different pseudos, and `safe_from_p' will make all
1300 sorts of smart assumptions about their not conflicting.
1301 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1302 wrong, so put ARG_VALS[I] into a fresh register.
1303 Don't worry about invisible references, since their stack
1304 temps will never overlap the target. */
1307 && (GET_CODE (arg_vals[i]) == REG
1308 || GET_CODE (arg_vals[i]) == SUBREG
1309 || GET_CODE (arg_vals[i]) == MEM)
1310 && reg_overlap_mentioned_p (arg_vals[i], target))
1311 /* ??? We must always copy a SUBREG into a REG, because it might
1312 get substituted into an address, and not all ports correctly
1313 handle SUBREGs in addresses. */
1314 || (GET_CODE (arg_vals[i]) == SUBREG)))
1315 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1317 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1318 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1319 mark_reg_pointer (arg_vals[i],
1320 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1324 /* Allocate the structures we use to remap things. */
1326 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1327 map->fndecl = fndecl;
1329 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1330 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1332 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1333 map->label_map -= min_labelno;
1335 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1336 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1337 map->min_insnno = 0;
1338 map->max_insnno = INSN_UID (header);
1340 map->integrating = 1;
1342 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1343 be large enough for all our pseudos. This is the number we are currently
1344 using plus the number in the called routine, plus 15 for each arg,
1345 five to compute the virtual frame pointer, and five for the return value.
1346 This should be enough for most cases. We do not reference entries
1347 outside the range of the map.
1349 ??? These numbers are quite arbitrary and were obtained by
1350 experimentation. At some point, we should try to allocate the
1351 table after all the parameters are set up so we an more accurately
1352 estimate the number of pseudos we will need. */
1354 map->const_equiv_map_size
1355 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1357 map->const_equiv_map
1358 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1359 bzero ((char *) map->const_equiv_map,
1360 map->const_equiv_map_size * sizeof (rtx));
1363 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1364 bzero ((char *) map->const_age_map,
1365 map->const_equiv_map_size * sizeof (unsigned));
1368 /* Record the current insn in case we have to set up pointers to frame
1369 and argument memory blocks. */
1370 map->insns_at_start = get_last_insn ();
1372 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1373 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1375 /* Update the outgoing argument size to allow for those in the inlined
1377 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1378 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1380 /* If the inline function needs to make PIC references, that means
1381 that this function's PIC offset table must be used. */
1382 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1383 current_function_uses_pic_offset_table = 1;
1385 /* If this function needs a context, set it up. */
1386 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1387 static_chain_value = lookup_static_chain (fndecl);
1389 if (GET_CODE (parm_insns) == NOTE
1390 && NOTE_LINE_NUMBER (parm_insns) > 0)
1392 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1393 NOTE_LINE_NUMBER (parm_insns));
1395 RTX_INTEGRATED_P (note) = 1;
1398 /* Process each argument. For each, set up things so that the function's
1399 reference to the argument will refer to the argument being passed.
1400 We only replace REG with REG here. Any simplifications are done
1401 via const_equiv_map.
1403 We make two passes: In the first, we deal with parameters that will
1404 be placed into registers, since we need to ensure that the allocated
1405 register number fits in const_equiv_map. Then we store all non-register
1406 parameters into their memory location. */
1408 /* Don't try to free temp stack slots here, because we may put one of the
1409 parameters into a temp stack slot. */
1411 for (i = 0; i < nargs; i++)
1413 rtx copy = arg_vals[i];
1415 loc = RTVEC_ELT (arg_vector, i);
1417 /* There are three cases, each handled separately. */
1418 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1419 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1421 /* This must be an object passed by invisible reference (it could
1422 also be a variable-sized object, but we forbid inlining functions
1423 with variable-sized arguments). COPY is the address of the
1424 actual value (this computation will cause it to be copied). We
1425 map that address for the register, noting the actual address as
1426 an equivalent in case it can be substituted into the insns. */
1428 if (GET_CODE (copy) != REG)
1430 temp = copy_addr_to_reg (copy);
1431 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1432 && REGNO (temp) < map->const_equiv_map_size)
1434 map->const_equiv_map[REGNO (temp)] = copy;
1435 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1439 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1441 else if (GET_CODE (loc) == MEM)
1443 /* This is the case of a parameter that lives in memory.
1444 It will live in the block we allocate in the called routine's
1445 frame that simulates the incoming argument area. Do nothing
1446 now; we will call store_expr later. */
1449 else if (GET_CODE (loc) == REG)
1451 /* This is the good case where the parameter is in a register.
1452 If it is read-only and our argument is a constant, set up the
1453 constant equivalence.
1455 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1456 that flag set if it is a register.
1458 Also, don't allow hard registers here; they might not be valid
1459 when substituted into insns. */
1461 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1462 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1463 && ! REG_USERVAR_P (copy))
1464 || (GET_CODE (copy) == REG
1465 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1467 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1468 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1469 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1470 && REGNO (temp) < map->const_equiv_map_size)
1472 map->const_equiv_map[REGNO (temp)] = copy;
1473 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1477 map->reg_map[REGNO (loc)] = copy;
1479 else if (GET_CODE (loc) == CONCAT)
1481 /* This is the good case where the parameter is in a
1482 pair of separate pseudos.
1483 If it is read-only and our argument is a constant, set up the
1484 constant equivalence.
1486 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1487 that flag set if it is a register.
1489 Also, don't allow hard registers here; they might not be valid
1490 when substituted into insns. */
1491 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1492 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1493 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1494 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1496 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1497 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1498 && ! REG_USERVAR_P (copyreal))
1499 || (GET_CODE (copyreal) == REG
1500 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1502 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1503 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1504 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1505 && REGNO (temp) < map->const_equiv_map_size)
1507 map->const_equiv_map[REGNO (temp)] = copyreal;
1508 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1512 map->reg_map[REGNO (locreal)] = copyreal;
1514 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1515 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1516 && ! REG_USERVAR_P (copyimag))
1517 || (GET_CODE (copyimag) == REG
1518 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1520 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1521 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1522 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1523 && REGNO (temp) < map->const_equiv_map_size)
1525 map->const_equiv_map[REGNO (temp)] = copyimag;
1526 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1530 map->reg_map[REGNO (locimag)] = copyimag;
1536 /* Now do the parameters that will be placed in memory. */
1538 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1539 formal; formal = TREE_CHAIN (formal), i++)
1541 loc = RTVEC_ELT (arg_vector, i);
1543 if (GET_CODE (loc) == MEM
1544 /* Exclude case handled above. */
1545 && ! (GET_CODE (XEXP (loc, 0)) == REG
1546 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1548 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1549 DECL_SOURCE_LINE (formal));
1551 RTX_INTEGRATED_P (note) = 1;
1553 /* Compute the address in the area we reserved and store the
1555 temp = copy_rtx_and_substitute (loc, map);
1556 subst_constants (&temp, NULL_RTX, map);
1557 apply_change_group ();
1558 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1559 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1560 store_expr (arg_trees[i], temp, 0);
1564 /* Deal with the places that the function puts its result.
1565 We are driven by what is placed into DECL_RESULT.
1567 Initially, we assume that we don't have anything special handling for
1568 REG_FUNCTION_RETURN_VALUE_P. */
1570 map->inline_target = 0;
1571 loc = DECL_RTL (DECL_RESULT (fndecl));
1572 if (TYPE_MODE (type) == VOIDmode)
1573 /* There is no return value to worry about. */
1575 else if (GET_CODE (loc) == MEM)
1577 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1580 /* Pass the function the address in which to return a structure value.
1581 Note that a constructor can cause someone to call us with
1582 STRUCTURE_VALUE_ADDR, but the initialization takes place
1583 via the first parameter, rather than the struct return address.
1585 We have two cases: If the address is a simple register indirect,
1586 use the mapping mechanism to point that register to our structure
1587 return address. Otherwise, store the structure return value into
1588 the place that it will be referenced from. */
1590 if (GET_CODE (XEXP (loc, 0)) == REG)
1592 temp = force_reg (Pmode, structure_value_addr);
1593 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1594 if ((CONSTANT_P (structure_value_addr)
1595 || (GET_CODE (structure_value_addr) == PLUS
1596 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1597 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1598 && REGNO (temp) < map->const_equiv_map_size)
1600 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1601 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1606 temp = copy_rtx_and_substitute (loc, map);
1607 subst_constants (&temp, NULL_RTX, map);
1608 apply_change_group ();
1609 emit_move_insn (temp, structure_value_addr);
1613 /* We will ignore the result value, so don't look at its structure.
1614 Note that preparations for an aggregate return value
1615 do need to be made (above) even if it will be ignored. */
1617 else if (GET_CODE (loc) == REG)
1619 /* The function returns an object in a register and we use the return
1620 value. Set up our target for remapping. */
1622 /* Machine mode function was declared to return. */
1623 enum machine_mode departing_mode = TYPE_MODE (type);
1624 /* (Possibly wider) machine mode it actually computes
1625 (for the sake of callers that fail to declare it right). */
1626 enum machine_mode arriving_mode
1627 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1630 /* Don't use MEMs as direct targets because on some machines
1631 substituting a MEM for a REG makes invalid insns.
1632 Let the combiner substitute the MEM if that is valid. */
1633 if (target == 0 || GET_CODE (target) != REG
1634 || GET_MODE (target) != departing_mode)
1635 target = gen_reg_rtx (departing_mode);
1637 /* If function's value was promoted before return,
1638 avoid machine mode mismatch when we substitute INLINE_TARGET.
1639 But TARGET is what we will return to the caller. */
1640 if (arriving_mode != departing_mode)
1641 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1643 reg_to_map = target;
1645 /* Usually, the result value is the machine's return register.
1646 Sometimes it may be a pseudo. Handle both cases. */
1647 if (REG_FUNCTION_VALUE_P (loc))
1648 map->inline_target = reg_to_map;
1650 map->reg_map[REGNO (loc)] = reg_to_map;
1653 /* Make new label equivalences for the labels in the called function. */
1654 for (i = min_labelno; i < max_labelno; i++)
1655 map->label_map[i] = gen_label_rtx ();
1657 /* Perform postincrements before actually calling the function. */
1660 /* Clean up stack so that variables might have smaller offsets. */
1661 do_pending_stack_adjust ();
1663 /* Save a copy of the location of const_equiv_map for mark_stores, called
1665 global_const_equiv_map = map->const_equiv_map;
1666 global_const_equiv_map_size = map->const_equiv_map_size;
1668 /* If the called function does an alloca, save and restore the
1669 stack pointer around the call. This saves stack space, but
1670 also is required if this inline is being done between two
1672 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1673 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1675 /* Now copy the insns one by one. Do this in two passes, first the insns and
1676 then their REG_NOTES, just like save_for_inline. */
1678 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1680 for (insn = insns; insn; insn = NEXT_INSN (insn))
1682 rtx copy, pattern, set;
1684 map->orig_asm_operands_vector = 0;
1686 switch (GET_CODE (insn))
1689 pattern = PATTERN (insn);
1690 set = single_set (insn);
1692 if (GET_CODE (pattern) == USE
1693 && GET_CODE (XEXP (pattern, 0)) == REG
1694 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1695 /* The (USE (REG n)) at return from the function should
1696 be ignored since we are changing (REG n) into
1700 /* Ignore setting a function value that we don't want to use. */
1701 if (map->inline_target == 0
1703 && GET_CODE (SET_DEST (set)) == REG
1704 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1706 if (volatile_refs_p (SET_SRC (set)))
1710 /* If we must not delete the source,
1711 load it into a new temporary. */
1712 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1714 new_set = single_set (copy);
1719 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1721 /* If the source and destination are the same and it
1722 has a note on it, keep the insn. */
1723 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1724 && REG_NOTES (insn) != 0)
1725 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1730 /* If this is setting the static chain rtx, omit it. */
1731 else if (static_chain_value != 0
1733 && GET_CODE (SET_DEST (set)) == REG
1734 && rtx_equal_p (SET_DEST (set),
1735 static_chain_incoming_rtx))
1738 /* If this is setting the static chain pseudo, set it from
1739 the value we want to give it instead. */
1740 else if (static_chain_value != 0
1742 && rtx_equal_p (SET_SRC (set),
1743 static_chain_incoming_rtx))
1745 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1747 copy = emit_move_insn (newdest, static_chain_value);
1748 static_chain_value = 0;
1751 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1752 /* REG_NOTES will be copied later. */
1755 /* If this insn is setting CC0, it may need to look at
1756 the insn that uses CC0 to see what type of insn it is.
1757 In that case, the call to recog via validate_change will
1758 fail. So don't substitute constants here. Instead,
1759 do it when we emit the following insn.
1761 For example, see the pyr.md file. That machine has signed and
1762 unsigned compares. The compare patterns must check the
1763 following branch insn to see which what kind of compare to
1766 If the previous insn set CC0, substitute constants on it as
1768 if (sets_cc0_p (PATTERN (copy)) != 0)
1773 try_constants (cc0_insn, map);
1775 try_constants (copy, map);
1778 try_constants (copy, map);
1783 if (GET_CODE (PATTERN (insn)) == RETURN)
1785 if (local_return_label == 0)
1786 local_return_label = gen_label_rtx ();
1787 pattern = gen_jump (local_return_label);
1790 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1792 copy = emit_jump_insn (pattern);
1796 try_constants (cc0_insn, map);
1799 try_constants (copy, map);
1801 /* If this used to be a conditional jump insn but whose branch
1802 direction is now know, we must do something special. */
1803 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1806 /* The previous insn set cc0 for us. So delete it. */
1807 delete_insn (PREV_INSN (copy));
1810 /* If this is now a no-op, delete it. */
1811 if (map->last_pc_value == pc_rtx)
1817 /* Otherwise, this is unconditional jump so we must put a
1818 BARRIER after it. We could do some dead code elimination
1819 here, but jump.c will do it just as well. */
1825 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1826 copy = emit_call_insn (pattern);
1828 /* Because the USAGE information potentially contains objects other
1829 than hard registers, we need to copy it. */
1830 CALL_INSN_FUNCTION_USAGE (copy) =
1831 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1835 try_constants (cc0_insn, map);
1838 try_constants (copy, map);
1840 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1841 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1842 map->const_equiv_map[i] = 0;
1846 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1847 LABEL_NAME (copy) = LABEL_NAME (insn);
1852 copy = emit_barrier ();
1856 /* It is important to discard function-end and function-beg notes,
1857 so we have only one of each in the current function.
1858 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1859 deleted these in the copy used for continuing compilation,
1860 not the copy used for inlining). */
1861 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1862 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1863 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1864 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1875 RTX_INTEGRATED_P (copy) = 1;
1877 map->insn_map[INSN_UID (insn)] = copy;
1880 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1881 from parameters can be substituted in. These are the only ones that
1882 are valid across the entire function. */
1884 for (insn = insns; insn; insn = NEXT_INSN (insn))
1885 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1886 && map->insn_map[INSN_UID (insn)]
1887 && REG_NOTES (insn))
1889 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1890 /* We must also do subst_constants, in case one of our parameters
1891 has const type and constant value. */
1892 subst_constants (&tem, NULL_RTX, map);
1893 apply_change_group ();
1894 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1897 if (local_return_label)
1898 emit_label (local_return_label);
1900 /* Restore the stack pointer if we saved it above. */
1901 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1902 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1904 /* Make copies of the decls of the symbols in the inline function, so that
1905 the copies of the variables get declared in the current function. Set
1906 up things so that lookup_static_chain knows that to interpret registers
1907 in SAVE_EXPRs for TYPE_SIZEs as local. */
1909 inline_function_decl = fndecl;
1910 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1911 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1912 inline_function_decl = 0;
1914 /* End the scope containing the copied formal parameter variables
1915 and copied LABEL_DECLs. */
1917 expand_end_bindings (getdecls (), 1, 1);
1918 block = poplevel (1, 1, 0);
1919 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1920 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1922 emit_line_note (input_filename, lineno);
1924 if (structure_value_addr)
1926 target = gen_rtx (MEM, TYPE_MODE (type),
1927 memory_address (TYPE_MODE (type), structure_value_addr));
1928 MEM_IN_STRUCT_P (target) = 1;
1933 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1934 push all of those decls and give each one the corresponding home. */
1937 integrate_parm_decls (args, map, arg_vector)
1939 struct inline_remap *map;
1945 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1947 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1950 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1952 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1953 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1954 here, but that's going to require some more work. */
1955 /* DECL_INCOMING_RTL (decl) = ?; */
1956 /* These args would always appear unused, if not for this. */
1957 TREE_USED (decl) = 1;
1958 /* Prevent warning for shadowing with these. */
1959 DECL_ABSTRACT_ORIGIN (decl) = tail;
1961 /* Fully instantiate the address with the equivalent form so that the
1962 debugging information contains the actual register, instead of the
1963 virtual register. Do this by not passing an insn to
1965 subst_constants (&new_decl_rtl, NULL_RTX, map);
1966 apply_change_group ();
1967 DECL_RTL (decl) = new_decl_rtl;
1971 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1972 current function a tree of contexts isomorphic to the one that is given.
1974 LEVEL indicates how far down into the BLOCK tree is the node we are
1975 currently traversing. It is always zero except for recursive calls.
1977 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1978 registers used in the DECL_RTL field should be remapped. If it is zero,
1979 no mapping is necessary. */
1982 integrate_decl_tree (let, level, map)
1985 struct inline_remap *map;
1992 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1996 push_obstacks_nochange ();
1997 saveable_allocation ();
2001 if (DECL_RTL (t) != 0)
2003 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2004 /* Fully instantiate the address with the equivalent form so that the
2005 debugging information contains the actual register, instead of the
2006 virtual register. Do this by not passing an insn to
2008 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2009 apply_change_group ();
2011 /* These args would always appear unused, if not for this. */
2013 /* Prevent warning for shadowing with these. */
2014 DECL_ABSTRACT_ORIGIN (d) = t;
2016 if (DECL_LANG_SPECIFIC (d))
2022 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2023 integrate_decl_tree (t, level + 1, map);
2027 node = poplevel (1, 0, 0);
2030 TREE_USED (node) = TREE_USED (let);
2031 BLOCK_ABSTRACT_ORIGIN (node) = let;
2036 /* Create a new copy of an rtx.
2037 Recursively copies the operands of the rtx,
2038 except for those few rtx codes that are sharable.
2040 We always return an rtx that is similar to that incoming rtx, with the
2041 exception of possibly changing a REG to a SUBREG or vice versa. No
2042 rtl is ever emitted.
2044 Handle constants that need to be placed in the constant pool by
2045 calling `force_const_mem'. */
2048 copy_rtx_and_substitute (orig, map)
2050 struct inline_remap *map;
2052 register rtx copy, temp;
2054 register RTX_CODE code;
2055 register enum machine_mode mode;
2056 register char *format_ptr;
2062 code = GET_CODE (orig);
2063 mode = GET_MODE (orig);
2068 /* If the stack pointer register shows up, it must be part of
2069 stack-adjustments (*not* because we eliminated the frame pointer!).
2070 Small hard registers are returned as-is. Pseudo-registers
2071 go through their `reg_map'. */
2072 regno = REGNO (orig);
2073 if (regno <= LAST_VIRTUAL_REGISTER)
2075 /* Some hard registers are also mapped,
2076 but others are not translated. */
2077 if (map->reg_map[regno] != 0)
2078 return map->reg_map[regno];
2080 /* If this is the virtual frame pointer, make space in current
2081 function's stack frame for the stack frame of the inline function.
2083 Copy the address of this area into a pseudo. Map
2084 virtual_stack_vars_rtx to this pseudo and set up a constant
2085 equivalence for it to be the address. This will substitute the
2086 address into insns where it can be substituted and use the new
2087 pseudo where it can't. */
2088 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2091 int size = DECL_FRAME_SIZE (map->fndecl);
2095 loc = assign_stack_temp (BLKmode, size, 1);
2096 loc = XEXP (loc, 0);
2097 #ifdef FRAME_GROWS_DOWNWARD
2098 /* In this case, virtual_stack_vars_rtx points to one byte
2099 higher than the top of the frame area. So compute the offset
2100 to one byte higher than our substitute frame.
2101 Keep the fake frame pointer aligned like a real one. */
2102 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2103 loc = plus_constant (loc, rounded);
2105 map->reg_map[regno] = temp
2106 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2108 #ifdef STACK_BOUNDARY
2109 mark_reg_pointer (map->reg_map[regno],
2110 STACK_BOUNDARY / BITS_PER_UNIT);
2113 if (REGNO (temp) < map->const_equiv_map_size)
2115 map->const_equiv_map[REGNO (temp)] = loc;
2116 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2119 seq = gen_sequence ();
2121 emit_insn_after (seq, map->insns_at_start);
2124 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2126 /* Do the same for a block to contain any arguments referenced
2129 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2132 loc = assign_stack_temp (BLKmode, size, 1);
2133 loc = XEXP (loc, 0);
2134 /* When arguments grow downward, the virtual incoming
2135 args pointer points to the top of the argument block,
2136 so the remapped location better do the same. */
2137 #ifdef ARGS_GROW_DOWNWARD
2138 loc = plus_constant (loc, size);
2140 map->reg_map[regno] = temp
2141 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2143 #ifdef STACK_BOUNDARY
2144 mark_reg_pointer (map->reg_map[regno],
2145 STACK_BOUNDARY / BITS_PER_UNIT);
2148 if (REGNO (temp) < map->const_equiv_map_size)
2150 map->const_equiv_map[REGNO (temp)] = loc;
2151 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2154 seq = gen_sequence ();
2156 emit_insn_after (seq, map->insns_at_start);
2159 else if (REG_FUNCTION_VALUE_P (orig))
2161 /* This is a reference to the function return value. If
2162 the function doesn't have a return value, error. If the
2163 mode doesn't agree, make a SUBREG. */
2164 if (map->inline_target == 0)
2165 /* Must be unrolling loops or replicating code if we
2166 reach here, so return the register unchanged. */
2168 else if (mode != GET_MODE (map->inline_target))
2169 return gen_lowpart (mode, map->inline_target);
2171 return map->inline_target;
2175 if (map->reg_map[regno] == NULL)
2177 map->reg_map[regno] = gen_reg_rtx (mode);
2178 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2179 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2180 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2181 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2183 if (map->regno_pointer_flag[regno])
2184 mark_reg_pointer (map->reg_map[regno],
2185 map->regno_pointer_align[regno]);
2187 return map->reg_map[regno];
2190 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2191 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2192 if (GET_CODE (copy) == SUBREG)
2193 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2194 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2195 else if (GET_CODE (copy) == CONCAT)
2196 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2198 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2199 SUBREG_WORD (orig));
2203 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2204 to (use foo) if the original insn didn't have a subreg.
2205 Removing the subreg distorts the VAX movstrhi pattern
2206 by changing the mode of an operand. */
2207 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2208 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2209 copy = SUBREG_REG (copy);
2210 return gen_rtx (code, VOIDmode, copy);
2213 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2214 = LABEL_PRESERVE_P (orig);
2215 return map->label_map[CODE_LABEL_NUMBER (orig)];
2218 copy = gen_rtx (LABEL_REF, mode,
2219 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2220 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2221 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2223 /* The fact that this label was previously nonlocal does not mean
2224 it still is, so we must check if it is within the range of
2225 this function's labels. */
2226 LABEL_REF_NONLOCAL_P (copy)
2227 = (LABEL_REF_NONLOCAL_P (orig)
2228 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2229 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2231 /* If we have made a nonlocal label local, it means that this
2232 inlined call will be referring to our nonlocal goto handler.
2233 So make sure we create one for this block; we normally would
2234 not since this is not otherwise considered a "call". */
2235 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2236 function_call_count++;
2246 /* Symbols which represent the address of a label stored in the constant
2247 pool must be modified to point to a constant pool entry for the
2248 remapped label. Otherwise, symbols are returned unchanged. */
2249 if (CONSTANT_POOL_ADDRESS_P (orig))
2251 rtx constant = get_pool_constant (orig);
2252 if (GET_CODE (constant) == LABEL_REF)
2253 return XEXP (force_const_mem (Pmode,
2254 copy_rtx_and_substitute (constant,
2262 /* We have to make a new copy of this CONST_DOUBLE because don't want
2263 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2264 duplicate of a CONST_DOUBLE we have already seen. */
2265 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2269 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2270 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2273 return immed_double_const (CONST_DOUBLE_LOW (orig),
2274 CONST_DOUBLE_HIGH (orig), VOIDmode);
2277 /* Make new constant pool entry for a constant
2278 that was in the pool of the inline function. */
2279 if (RTX_INTEGRATED_P (orig))
2281 /* If this was an address of a constant pool entry that itself
2282 had to be placed in the constant pool, it might not be a
2283 valid address. So the recursive call below might turn it
2284 into a register. In that case, it isn't a constant any
2285 more, so return it. This has the potential of changing a
2286 MEM into a REG, but we'll assume that it safe. */
2287 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2288 if (! CONSTANT_P (temp))
2290 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2295 /* If from constant pool address, make new constant pool entry and
2296 return its address. */
2297 if (! RTX_INTEGRATED_P (orig))
2300 temp = force_const_mem (GET_MODE (orig),
2301 copy_rtx_and_substitute (XEXP (orig, 0), map));
2304 /* Legitimizing the address here is incorrect.
2306 The only ADDRESS rtx's that can reach here are ones created by
2307 save_constants. Hence the operand of the ADDRESS is always valid
2308 in this position of the instruction, since the original rtx without
2309 the ADDRESS was valid.
2311 The reason we don't legitimize the address here is that on the
2312 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2313 This code forces the operand of the address to a register, which
2314 fails because we can not take the HIGH part of a register.
2316 Also, change_address may create new registers. These registers
2317 will not have valid reg_map entries. This can cause try_constants()
2318 to fail because assumes that all registers in the rtx have valid
2319 reg_map entries, and it may end up replacing one of these new
2320 registers with junk. */
2322 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2323 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2326 return XEXP (temp, 0);
2329 /* If a single asm insn contains multiple output operands
2330 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2331 We must make sure that the copied insn continues to share it. */
2332 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2334 copy = rtx_alloc (ASM_OPERANDS);
2335 copy->volatil = orig->volatil;
2336 XSTR (copy, 0) = XSTR (orig, 0);
2337 XSTR (copy, 1) = XSTR (orig, 1);
2338 XINT (copy, 2) = XINT (orig, 2);
2339 XVEC (copy, 3) = map->copy_asm_operands_vector;
2340 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2341 XSTR (copy, 5) = XSTR (orig, 5);
2342 XINT (copy, 6) = XINT (orig, 6);
2348 /* This is given special treatment because the first
2349 operand of a CALL is a (MEM ...) which may get
2350 forced into a register for cse. This is undesirable
2351 if function-address cse isn't wanted or if we won't do cse. */
2352 #ifndef NO_FUNCTION_CSE
2353 if (! (optimize && ! flag_no_function_cse))
2355 return gen_rtx (CALL, GET_MODE (orig),
2356 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2357 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2358 copy_rtx_and_substitute (XEXP (orig, 1), map));
2362 /* Must be ifdefed out for loop unrolling to work. */
2368 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2370 If the nonlocal goto is into the current function,
2371 this will result in unnecessarily bad code, but should work. */
2372 if (SET_DEST (orig) == virtual_stack_vars_rtx
2373 || SET_DEST (orig) == virtual_incoming_args_rtx)
2374 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2375 copy_rtx_and_substitute (SET_SRC (orig), map));
2379 copy = rtx_alloc (MEM);
2380 PUT_MODE (copy, mode);
2381 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2382 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2383 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2385 /* If doing function inlining, this MEM might not be const in the
2386 function that it is being inlined into, and thus may not be
2387 unchanging after function inlining. Constant pool references are
2388 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2390 if (! map->integrating)
2391 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2396 copy = rtx_alloc (code);
2397 PUT_MODE (copy, mode);
2398 copy->in_struct = orig->in_struct;
2399 copy->volatil = orig->volatil;
2400 copy->unchanging = orig->unchanging;
2402 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2404 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2406 switch (*format_ptr++)
2412 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2416 /* Change any references to old-insns to point to the
2417 corresponding copied insns. */
2418 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2422 XVEC (copy, i) = XVEC (orig, i);
2423 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2425 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2426 for (j = 0; j < XVECLEN (copy, i); j++)
2427 XVECEXP (copy, i, j)
2428 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2433 XWINT (copy, i) = XWINT (orig, i);
2437 XINT (copy, i) = XINT (orig, i);
2441 XSTR (copy, i) = XSTR (orig, i);
2449 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2451 map->orig_asm_operands_vector = XVEC (orig, 3);
2452 map->copy_asm_operands_vector = XVEC (copy, 3);
2453 map->copy_asm_constraints_vector = XVEC (copy, 4);
2459 /* Substitute known constant values into INSN, if that is valid. */
2462 try_constants (insn, map)
2464 struct inline_remap *map;
2469 subst_constants (&PATTERN (insn), insn, map);
2471 /* Apply the changes if they are valid; otherwise discard them. */
2472 apply_change_group ();
2474 /* Show we don't know the value of anything stored or clobbered. */
2475 note_stores (PATTERN (insn), mark_stores);
2476 map->last_pc_value = 0;
2478 map->last_cc0_value = 0;
2481 /* Set up any constant equivalences made in this insn. */
2482 for (i = 0; i < map->num_sets; i++)
2484 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2486 int regno = REGNO (map->equiv_sets[i].dest);
2488 if (regno < map->const_equiv_map_size
2489 && (map->const_equiv_map[regno] == 0
2490 /* Following clause is a hack to make case work where GNU C++
2491 reassigns a variable to make cse work right. */
2492 || ! rtx_equal_p (map->const_equiv_map[regno],
2493 map->equiv_sets[i].equiv)))
2495 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2496 map->const_age_map[regno] = map->const_age;
2499 else if (map->equiv_sets[i].dest == pc_rtx)
2500 map->last_pc_value = map->equiv_sets[i].equiv;
2502 else if (map->equiv_sets[i].dest == cc0_rtx)
2503 map->last_cc0_value = map->equiv_sets[i].equiv;
2508 /* Substitute known constants for pseudo regs in the contents of LOC,
2509 which are part of INSN.
2510 If INSN is zero, the substitution should always be done (this is used to
2512 These changes are taken out by try_constants if the result is not valid.
2514 Note that we are more concerned with determining when the result of a SET
2515 is a constant, for further propagation, than actually inserting constants
2516 into insns; cse will do the latter task better.
2518 This function is also used to adjust address of items previously addressed
2519 via the virtual stack variable or virtual incoming arguments registers. */
2522 subst_constants (loc, insn, map)
2525 struct inline_remap *map;
2529 register enum rtx_code code;
2530 register char *format_ptr;
2531 int num_changes = num_validated_changes ();
2533 enum machine_mode op0_mode;
2535 code = GET_CODE (x);
2550 validate_change (insn, loc, map->last_cc0_value, 1);
2556 /* The only thing we can do with a USE or CLOBBER is possibly do
2557 some substitutions in a MEM within it. */
2558 if (GET_CODE (XEXP (x, 0)) == MEM)
2559 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2563 /* Substitute for parms and known constants. Don't replace
2564 hard regs used as user variables with constants. */
2566 int regno = REGNO (x);
2568 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2569 && regno < map->const_equiv_map_size
2570 && map->const_equiv_map[regno] != 0
2571 && map->const_age_map[regno] >= map->const_age)
2572 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2577 /* SUBREG applied to something other than a reg
2578 should be treated as ordinary, since that must
2579 be a special hack and we don't know how to treat it specially.
2580 Consider for example mulsidi3 in m68k.md.
2581 Ordinary SUBREG of a REG needs this special treatment. */
2582 if (GET_CODE (SUBREG_REG (x)) == REG)
2584 rtx inner = SUBREG_REG (x);
2587 /* We can't call subst_constants on &SUBREG_REG (x) because any
2588 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2589 see what is inside, try to form the new SUBREG and see if that is
2590 valid. We handle two cases: extracting a full word in an
2591 integral mode and extracting the low part. */
2592 subst_constants (&inner, NULL_RTX, map);
2594 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2595 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2596 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2597 new = operand_subword (inner, SUBREG_WORD (x), 0,
2598 GET_MODE (SUBREG_REG (x)));
2600 if (new == 0 && subreg_lowpart_p (x))
2601 new = gen_lowpart_common (GET_MODE (x), inner);
2604 validate_change (insn, loc, new, 1);
2611 subst_constants (&XEXP (x, 0), insn, map);
2613 /* If a memory address got spoiled, change it back. */
2614 if (insn != 0 && num_validated_changes () != num_changes
2615 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2616 cancel_changes (num_changes);
2621 /* Substitute constants in our source, and in any arguments to a
2622 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2624 rtx *dest_loc = &SET_DEST (x);
2625 rtx dest = *dest_loc;
2628 subst_constants (&SET_SRC (x), insn, map);
2631 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2632 || GET_CODE (*dest_loc) == SUBREG
2633 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2635 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2637 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2638 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2640 dest_loc = &XEXP (*dest_loc, 0);
2643 /* Do substitute in the address of a destination in memory. */
2644 if (GET_CODE (*dest_loc) == MEM)
2645 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2647 /* Check for the case of DEST a SUBREG, both it and the underlying
2648 register are less than one word, and the SUBREG has the wider mode.
2649 In the case, we are really setting the underlying register to the
2650 source converted to the mode of DEST. So indicate that. */
2651 if (GET_CODE (dest) == SUBREG
2652 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2653 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2654 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2655 <= GET_MODE_SIZE (GET_MODE (dest)))
2656 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2658 src = tem, dest = SUBREG_REG (dest);
2660 /* If storing a recognizable value save it for later recording. */
2661 if ((map->num_sets < MAX_RECOG_OPERANDS)
2662 && (CONSTANT_P (src)
2663 || (GET_CODE (src) == REG
2664 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2665 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2666 || (GET_CODE (src) == PLUS
2667 && GET_CODE (XEXP (src, 0)) == REG
2668 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2669 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2670 && CONSTANT_P (XEXP (src, 1)))
2671 || GET_CODE (src) == COMPARE
2676 && (src == pc_rtx || GET_CODE (src) == RETURN
2677 || GET_CODE (src) == LABEL_REF))))
2679 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2680 it will cause us to save the COMPARE with any constants
2681 substituted, which is what we want for later. */
2682 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2683 map->equiv_sets[map->num_sets++].dest = dest;
2690 format_ptr = GET_RTX_FORMAT (code);
2692 /* If the first operand is an expression, save its mode for later. */
2693 if (*format_ptr == 'e')
2694 op0_mode = GET_MODE (XEXP (x, 0));
2696 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2698 switch (*format_ptr++)
2705 subst_constants (&XEXP (x, i), insn, map);
2715 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2718 for (j = 0; j < XVECLEN (x, i); j++)
2719 subst_constants (&XVECEXP (x, i, j), insn, map);
2728 /* If this is a commutative operation, move a constant to the second
2729 operand unless the second operand is already a CONST_INT. */
2730 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2731 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2733 rtx tem = XEXP (x, 0);
2734 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2735 validate_change (insn, &XEXP (x, 1), tem, 1);
2738 /* Simplify the expression in case we put in some constants. */
2739 switch (GET_RTX_CLASS (code))
2742 new = simplify_unary_operation (code, GET_MODE (x),
2743 XEXP (x, 0), op0_mode);
2748 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2749 if (op_mode == VOIDmode)
2750 op_mode = GET_MODE (XEXP (x, 1));
2751 new = simplify_relational_operation (code, op_mode,
2752 XEXP (x, 0), XEXP (x, 1));
2753 #ifdef FLOAT_STORE_FLAG_VALUE
2754 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2755 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2756 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2764 new = simplify_binary_operation (code, GET_MODE (x),
2765 XEXP (x, 0), XEXP (x, 1));
2770 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2771 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2776 validate_change (insn, loc, new, 1);
2779 /* Show that register modified no longer contain known constants. We are
2780 called from note_stores with parts of the new insn. */
2783 mark_stores (dest, x)
2788 enum machine_mode mode;
2790 /* DEST is always the innermost thing set, except in the case of
2791 SUBREGs of hard registers. */
2793 if (GET_CODE (dest) == REG)
2794 regno = REGNO (dest), mode = GET_MODE (dest);
2795 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2797 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2798 mode = GET_MODE (SUBREG_REG (dest));
2803 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2804 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2807 for (i = regno; i <= last_reg; i++)
2808 if (i < global_const_equiv_map_size)
2809 global_const_equiv_map[i] = 0;
2813 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2814 pointed to by PX, they represent constants in the constant pool.
2815 Replace these with a new memory reference obtained from force_const_mem.
2816 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2817 address of a constant pool entry. Replace them with the address of
2818 a new constant pool entry obtained from force_const_mem. */
2821 restore_constants (px)
2831 if (GET_CODE (x) == CONST_DOUBLE)
2833 /* We have to make a new CONST_DOUBLE to ensure that we account for
2834 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2835 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2839 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2840 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2843 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2847 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2849 restore_constants (&XEXP (x, 0));
2850 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2852 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2854 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2855 rtx new = XEXP (SUBREG_REG (x), 0);
2857 restore_constants (&new);
2858 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2859 PUT_MODE (new, GET_MODE (x));
2860 *px = validize_mem (new);
2862 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2864 restore_constants (&XEXP (x, 0));
2865 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2869 fmt = GET_RTX_FORMAT (GET_CODE (x));
2870 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2875 for (j = 0; j < XVECLEN (x, i); j++)
2876 restore_constants (&XVECEXP (x, i, j));
2880 restore_constants (&XEXP (x, i));
2887 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2888 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2889 that it points to the node itself, thus indicating that the node is its
2890 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2891 the given node is NULL, recursively descend the decl/block tree which
2892 it is the root of, and for each other ..._DECL or BLOCK node contained
2893 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2894 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2895 values to point to themselves. */
2898 set_block_origin_self (stmt)
2901 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2903 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2906 register tree local_decl;
2908 for (local_decl = BLOCK_VARS (stmt);
2909 local_decl != NULL_TREE;
2910 local_decl = TREE_CHAIN (local_decl))
2911 set_decl_origin_self (local_decl); /* Potential recursion. */
2915 register tree subblock;
2917 for (subblock = BLOCK_SUBBLOCKS (stmt);
2918 subblock != NULL_TREE;
2919 subblock = BLOCK_CHAIN (subblock))
2920 set_block_origin_self (subblock); /* Recurse. */
2925 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2926 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2927 node to so that it points to the node itself, thus indicating that the
2928 node represents its own (abstract) origin. Additionally, if the
2929 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2930 the decl/block tree of which the given node is the root of, and for
2931 each other ..._DECL or BLOCK node contained therein whose
2932 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2933 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2934 point to themselves. */
2937 set_decl_origin_self (decl)
2940 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2942 DECL_ABSTRACT_ORIGIN (decl) = decl;
2943 if (TREE_CODE (decl) == FUNCTION_DECL)
2947 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2948 DECL_ABSTRACT_ORIGIN (arg) = arg;
2949 if (DECL_INITIAL (decl) != NULL_TREE
2950 && DECL_INITIAL (decl) != error_mark_node)
2951 set_block_origin_self (DECL_INITIAL (decl));
2956 /* Given a pointer to some BLOCK node, and a boolean value to set the
2957 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2958 the given block, and for all local decls and all local sub-blocks
2959 (recursively) which are contained therein. */
2962 set_block_abstract_flags (stmt, setting)
2964 register int setting;
2966 register tree local_decl;
2967 register tree subblock;
2969 BLOCK_ABSTRACT (stmt) = setting;
2971 for (local_decl = BLOCK_VARS (stmt);
2972 local_decl != NULL_TREE;
2973 local_decl = TREE_CHAIN (local_decl))
2974 set_decl_abstract_flags (local_decl, setting);
2976 for (subblock = BLOCK_SUBBLOCKS (stmt);
2977 subblock != NULL_TREE;
2978 subblock = BLOCK_CHAIN (subblock))
2979 set_block_abstract_flags (subblock, setting);
2982 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2983 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2984 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2985 set the abstract flags for all of the parameters, local vars, local
2986 blocks and sub-blocks (recursively) to the same setting. */
2989 set_decl_abstract_flags (decl, setting)
2991 register int setting;
2993 DECL_ABSTRACT (decl) = setting;
2994 if (TREE_CODE (decl) == FUNCTION_DECL)
2998 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2999 DECL_ABSTRACT (arg) = setting;
3000 if (DECL_INITIAL (decl) != NULL_TREE
3001 && DECL_INITIAL (decl) != error_mark_node)
3002 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3006 /* Output the assembly language code for the function FNDECL
3007 from its DECL_SAVED_INSNS. Used for inline functions that are output
3008 at end of compilation instead of where they came in the source. */
3011 output_inline_function (fndecl)
3016 int save_flag_no_inline = flag_no_inline;
3018 if (output_bytecode)
3020 warning ("`inline' ignored for bytecode output");
3024 /* Things we allocate from here on are part of this function, not
3026 temporary_allocation ();
3028 head = DECL_SAVED_INSNS (fndecl);
3029 current_function_decl = fndecl;
3031 /* This call is only used to initialize global variables. */
3032 init_function_start (fndecl, "lossage", 1);
3034 /* Redo parameter determinations in case the FUNCTION_...
3035 macros took machine-specific actions that need to be redone. */
3036 assign_parms (fndecl, 1);
3038 /* Set stack frame size. */
3039 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3041 /* The first is a bit of a lie (the array may be larger), but doesn't
3042 matter too much and it isn't worth saving the actual bound. */
3043 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3044 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3045 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3046 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3048 stack_slot_list = STACK_SLOT_LIST (head);
3049 forced_labels = FORCED_LABELS (head);
3051 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3052 current_function_calls_alloca = 1;
3054 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3055 current_function_calls_setjmp = 1;
3057 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3058 current_function_calls_longjmp = 1;
3060 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3061 current_function_returns_struct = 1;
3063 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3064 current_function_returns_pcc_struct = 1;
3066 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3067 current_function_needs_context = 1;
3069 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3070 current_function_has_nonlocal_label = 1;
3072 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3073 current_function_returns_pointer = 1;
3075 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3076 current_function_uses_const_pool = 1;
3078 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3079 current_function_uses_pic_offset_table = 1;
3081 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3082 current_function_pops_args = POPS_ARGS (head);
3084 /* This is the only thing the expand_function_end call that uses to be here
3085 actually does and that call can cause problems. */
3086 immediate_size_expand--;
3088 /* Find last insn and rebuild the constant pool. */
3089 for (last = FIRST_PARM_INSN (head);
3090 NEXT_INSN (last); last = NEXT_INSN (last))
3092 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3094 restore_constants (&PATTERN (last));
3095 restore_constants (®_NOTES (last));
3099 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3100 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3102 /* We must have already output DWARF debugging information for the
3103 original (abstract) inline function declaration/definition, so
3104 we want to make sure that the debugging information we generate
3105 for this special instance of the inline function refers back to
3106 the information we already generated. To make sure that happens,
3107 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3108 node (and for all of the local ..._DECL nodes which are its children)
3109 so that they all point to themselves. */
3111 set_decl_origin_self (fndecl);
3113 /* We're not deferring this any longer. */
3114 DECL_DEFER_OUTPUT (fndecl) = 0;
3116 /* Integrating function calls isn't safe anymore, so turn on
3120 /* Compile this function all the way down to assembly code. */
3121 rest_of_compilation (fndecl);
3123 /* Reset flag_no_inline to its original value. */
3124 flag_no_inline = save_flag_no_inline;
3126 current_function_decl = 0;