1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89 /* Similar, but round to the next highest integer that meets the
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
107 int current_function_is_leaf;
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging;
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs;
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated;
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status) PARAMS ((struct function *));
127 void (*save_machine_status) PARAMS ((struct function *));
128 void (*restore_machine_status) PARAMS ((struct function *));
129 void (*mark_machine_status) PARAMS ((struct function *));
130 void (*free_machine_status) PARAMS ((struct function *));
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status) PARAMS ((struct function *));
134 void (*save_lang_status) PARAMS ((struct function *));
135 void (*restore_lang_status) PARAMS ((struct function *));
136 void (*mark_lang_status) PARAMS ((struct function *));
137 void (*free_lang_status) PARAMS ((struct function *));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
142 /* The currently compiled function. */
143 struct function *cfun = 0;
145 /* Global list of all compiled functions. */
146 struct function *all_functions = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue;
150 static int *epilogue;
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 /* Non-zero if this temporary is currently in use. */
198 /* Non-zero if this temporary has its address taken. */
200 /* Nesting level at which this slot is being used. */
202 /* Non-zero if this should survive a call to free_temp_slots. */
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size;
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
217 struct fixup_replacement
221 struct fixup_replacement *next;
224 struct insns_for_mem_entry {
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he;
227 /* These are the INSNS which reference the MEM. */
231 /* Forward declarations. */
233 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
234 int, struct function *));
235 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
236 HOST_WIDE_INT, int, tree));
237 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
238 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
240 int, int, int, struct hash_table *));
241 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
242 struct hash_table *));
243 static struct fixup_replacement
244 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
245 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
246 rtx, int, struct hash_table *));
247 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
248 struct fixup_replacement **));
249 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
250 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
251 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
252 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
253 static void instantiate_decls PARAMS ((tree, int));
254 static void instantiate_decls_1 PARAMS ((tree, int));
255 static void instantiate_decl PARAMS ((rtx, int, int));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down PARAMS ((tree, int));
267 static rtx round_trampoline_addr PARAMS ((rtx));
268 static tree blocks_nreverse PARAMS ((tree));
269 static int all_blocks PARAMS ((tree, tree *));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
273 static int contains PARAMS ((rtx, int *));
274 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
275 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
276 struct hash_table *));
277 static int is_addressof PARAMS ((rtx *, void *));
278 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
281 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
282 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
283 static int insns_for_mem_walk PARAMS ((rtx *, void *));
284 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
285 static void mark_temp_slot PARAMS ((struct temp_slot *));
286 static void mark_function_status PARAMS ((struct function *));
287 static void mark_function_chain PARAMS ((void *));
288 static void prepare_function_start PARAMS ((void));
291 /* Pointer to chain of `struct function' for containing functions. */
292 struct function *outer_function_chain;
294 /* Given a function decl for a containing function,
295 return the `struct function' for it. */
298 find_function_data (decl)
303 for (p = outer_function_chain; p; p = p->next)
310 /* Save the current context for compilation of a nested function.
311 This is called from language-specific code. The caller should use
312 the save_lang_status callback to save any language-specific state,
313 since this function knows only about language-independent
317 push_function_context_to (context)
320 struct function *p, *context_data;
324 context_data = (context == current_function_decl
326 : find_function_data (context));
327 context_data->contains_functions = 1;
331 init_dummy_function_start ();
334 p->next = outer_function_chain;
335 outer_function_chain = p;
336 p->fixup_var_refs_queue = 0;
338 save_tree_status (p);
339 if (save_lang_status)
340 (*save_lang_status) (p);
341 if (save_machine_status)
342 (*save_machine_status) (p);
348 push_function_context ()
350 push_function_context_to (current_function_decl);
353 /* Restore the last saved context, at the end of a nested function.
354 This function is called from language-specific code. */
357 pop_function_context_from (context)
358 tree context ATTRIBUTE_UNUSED;
360 struct function *p = outer_function_chain;
361 struct var_refs_queue *queue;
362 struct var_refs_queue *next;
365 outer_function_chain = p->next;
367 current_function_decl = p->decl;
370 restore_tree_status (p);
371 restore_emit_status (p);
373 if (restore_machine_status)
374 (*restore_machine_status) (p);
375 if (restore_lang_status)
376 (*restore_lang_status) (p);
378 /* Finish doing put_var_into_stack for any of our variables
379 which became addressable during the nested function. */
380 for (queue = p->fixup_var_refs_queue; queue; queue = next)
383 fixup_var_refs (queue->modified, queue->promoted_mode,
384 queue->unsignedp, 0);
387 p->fixup_var_refs_queue = 0;
389 /* Reset variables that have known state during rtx generation. */
390 rtx_equal_function_value_matters = 1;
391 virtuals_instantiated = 0;
395 pop_function_context ()
397 pop_function_context_from (current_function_decl);
400 /* Clear out all parts of the state in F that can safely be discarded
401 after the function has been parsed, but not compiled, to let
402 garbage collection reclaim the memory. */
405 free_after_parsing (f)
408 /* f->expr->forced_labels is used by code generation. */
409 /* f->emit->regno_reg_rtx is used by code generation. */
410 /* f->varasm is used by code generation. */
411 /* f->eh->eh_return_stub_label is used by code generation. */
413 if (free_lang_status)
414 (*free_lang_status) (f);
415 free_stmt_status (f);
418 /* Clear out all parts of the state in F that can safely be discarded
419 after the function has been compiled, to let garbage collection
420 reclaim the memory. */
423 free_after_compilation (f)
427 free_expr_status (f);
428 free_emit_status (f);
429 free_varasm_status (f);
431 if (free_machine_status)
432 (*free_machine_status) (f);
434 if (f->x_parm_reg_stack_loc)
435 free (f->x_parm_reg_stack_loc);
437 f->arg_offset_rtx = NULL;
438 f->return_rtx = NULL;
439 f->internal_arg_pointer = NULL;
440 f->x_nonlocal_labels = NULL;
441 f->x_nonlocal_goto_handler_slots = NULL;
442 f->x_nonlocal_goto_handler_labels = NULL;
443 f->x_nonlocal_goto_stack_level = NULL;
444 f->x_cleanup_label = NULL;
445 f->x_return_label = NULL;
446 f->x_save_expr_regs = NULL;
447 f->x_stack_slot_list = NULL;
448 f->x_rtl_expr_chain = NULL;
449 f->x_tail_recursion_label = NULL;
450 f->x_tail_recursion_reentry = NULL;
451 f->x_arg_pointer_save_area = NULL;
452 f->x_context_display = NULL;
453 f->x_trampoline_list = NULL;
454 f->x_parm_birth_insn = NULL;
455 f->x_last_parm_insn = NULL;
456 f->x_parm_reg_stack_loc = NULL;
457 f->x_temp_slots = NULL;
458 f->fixup_var_refs_queue = NULL;
459 f->original_arg_vector = NULL;
460 f->original_decl_initial = NULL;
461 f->inl_last_parm_insn = NULL;
462 f->epilogue_delay_list = NULL;
466 /* Allocate fixed slots in the stack frame of the current function. */
468 /* Return size needed for stack frame based on slots so far allocated in
470 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
471 the caller may have to do that. */
474 get_func_frame_size (f)
477 #ifdef FRAME_GROWS_DOWNWARD
478 return -f->x_frame_offset;
480 return f->x_frame_offset;
484 /* Return size needed for stack frame based on slots so far allocated.
485 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
486 the caller may have to do that. */
490 return get_func_frame_size (cfun);
493 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
494 with machine mode MODE.
496 ALIGN controls the amount of alignment for the address of the slot:
497 0 means according to MODE,
498 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
499 positive specifies alignment boundary in bits.
501 We do not round to stack_boundary here.
503 FUNCTION specifies the function to allocate in. */
506 assign_stack_local_1 (mode, size, align, function)
507 enum machine_mode mode;
510 struct function *function;
512 register rtx x, addr;
513 int bigend_correction = 0;
516 /* Allocate in the memory associated with the function in whose frame
518 if (function != cfun)
519 push_obstacks (function->function_obstack,
520 function->function_maybepermanent_obstack);
526 alignment = GET_MODE_ALIGNMENT (mode);
528 alignment = BIGGEST_ALIGNMENT;
530 /* Allow the target to (possibly) increase the alignment of this
532 type = type_for_mode (mode, 0);
534 alignment = LOCAL_ALIGNMENT (type, alignment);
536 alignment /= BITS_PER_UNIT;
538 else if (align == -1)
540 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
541 size = CEIL_ROUND (size, alignment);
544 alignment = align / BITS_PER_UNIT;
546 #ifdef FRAME_GROWS_DOWNWARD
547 function->x_frame_offset -= size;
550 /* Ignore alignment we can't do with expected alignment of the boundary. */
551 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
552 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
554 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
555 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
557 /* Round frame offset to that alignment.
558 We must be careful here, since FRAME_OFFSET might be negative and
559 division with a negative dividend isn't as well defined as we might
560 like. So we instead assume that ALIGNMENT is a power of two and
561 use logical operations which are unambiguous. */
562 #ifdef FRAME_GROWS_DOWNWARD
563 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
565 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
568 /* On a big-endian machine, if we are allocating more space than we will use,
569 use the least significant bytes of those that are allocated. */
570 if (BYTES_BIG_ENDIAN && mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
573 /* If we have already instantiated virtual registers, return the actual
574 address relative to the frame pointer. */
575 if (function == cfun && virtuals_instantiated)
576 addr = plus_constant (frame_pointer_rtx,
577 (frame_offset + bigend_correction
578 + STARTING_FRAME_OFFSET));
580 addr = plus_constant (virtual_stack_vars_rtx,
581 function->x_frame_offset + bigend_correction);
583 #ifndef FRAME_GROWS_DOWNWARD
584 function->x_frame_offset += size;
587 x = gen_rtx_MEM (mode, addr);
589 function->x_stack_slot_list
590 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
592 if (function != cfun)
598 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
601 assign_stack_local (mode, size, align)
602 enum machine_mode mode;
606 return assign_stack_local_1 (mode, size, align, cfun);
609 /* Allocate a temporary stack slot and record it for possible later
612 MODE is the machine mode to be given to the returned rtx.
614 SIZE is the size in units of the space required. We do no rounding here
615 since assign_stack_local will do any required rounding.
617 KEEP is 1 if this slot is to be retained after a call to
618 free_temp_slots. Automatic variables for a block are allocated
619 with this flag. KEEP is 2 if we allocate a longer term temporary,
620 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
621 if we are to allocate something at an inner level to be treated as
622 a variable in the block (e.g., a SAVE_EXPR).
624 TYPE is the type that will be used for the stack slot. */
627 assign_stack_temp_for_type (mode, size, keep, type)
628 enum machine_mode mode;
635 struct temp_slot *p, *best_p = 0;
637 /* If SIZE is -1 it means that somebody tried to allocate a temporary
638 of a variable size. */
642 /* If we know the alias set for the memory that will be used, use
643 it. If there's no TYPE, then we don't know anything about the
644 alias set for the memory. */
646 alias_set = get_alias_set (type);
650 align = GET_MODE_ALIGNMENT (mode);
652 align = BIGGEST_ALIGNMENT;
655 type = type_for_mode (mode, 0);
657 align = LOCAL_ALIGNMENT (type, align);
659 /* Try to find an available, already-allocated temporary of the proper
660 mode which meets the size and alignment requirements. Choose the
661 smallest one with the closest alignment. */
662 for (p = temp_slots; p; p = p->next)
663 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
665 && (!flag_strict_aliasing
666 || (alias_set && p->alias_set == alias_set))
667 && (best_p == 0 || best_p->size > p->size
668 || (best_p->size == p->size && best_p->align > p->align)))
670 if (p->align == align && p->size == size)
678 /* Make our best, if any, the one to use. */
681 /* If there are enough aligned bytes left over, make them into a new
682 temp_slot so that the extra bytes don't get wasted. Do this only
683 for BLKmode slots, so that we can be sure of the alignment. */
684 if (GET_MODE (best_p->slot) == BLKmode
685 /* We can't split slots if -fstrict-aliasing because the
686 information about the alias set for the new slot will be
688 && !flag_strict_aliasing)
690 int alignment = best_p->align / BITS_PER_UNIT;
691 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
693 if (best_p->size - rounded_size >= alignment)
695 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
696 p->in_use = p->addr_taken = 0;
697 p->size = best_p->size - rounded_size;
698 p->base_offset = best_p->base_offset + rounded_size;
699 p->full_size = best_p->full_size - rounded_size;
700 p->slot = gen_rtx_MEM (BLKmode,
701 plus_constant (XEXP (best_p->slot, 0),
703 p->align = best_p->align;
706 p->next = temp_slots;
709 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
712 best_p->size = rounded_size;
713 best_p->full_size = rounded_size;
720 /* If we still didn't find one, make a new temporary. */
723 HOST_WIDE_INT frame_offset_old = frame_offset;
725 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
727 /* We are passing an explicit alignment request to assign_stack_local.
728 One side effect of that is assign_stack_local will not round SIZE
729 to ensure the frame offset remains suitably aligned.
731 So for requests which depended on the rounding of SIZE, we go ahead
732 and round it now. We also make sure ALIGNMENT is at least
733 BIGGEST_ALIGNMENT. */
734 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
736 p->slot = assign_stack_local (mode,
738 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
743 p->alias_set = alias_set;
745 /* The following slot size computation is necessary because we don't
746 know the actual size of the temporary slot until assign_stack_local
747 has performed all the frame alignment and size rounding for the
748 requested temporary. Note that extra space added for alignment
749 can be either above or below this stack slot depending on which
750 way the frame grows. We include the extra space if and only if it
751 is above this slot. */
752 #ifdef FRAME_GROWS_DOWNWARD
753 p->size = frame_offset_old - frame_offset;
758 /* Now define the fields used by combine_temp_slots. */
759 #ifdef FRAME_GROWS_DOWNWARD
760 p->base_offset = frame_offset;
761 p->full_size = frame_offset_old - frame_offset;
763 p->base_offset = frame_offset_old;
764 p->full_size = frame_offset - frame_offset_old;
767 p->next = temp_slots;
773 p->rtl_expr = seq_rtl_expr;
777 p->level = target_temp_slot_level;
782 p->level = var_temp_slot_level;
787 p->level = temp_slot_level;
791 /* We may be reusing an old slot, so clear any MEM flags that may have been
793 RTX_UNCHANGING_P (p->slot) = 0;
794 MEM_IN_STRUCT_P (p->slot) = 0;
795 MEM_SCALAR_P (p->slot) = 0;
796 MEM_ALIAS_SET (p->slot) = 0;
800 /* Allocate a temporary stack slot and record it for possible later
801 reuse. First three arguments are same as in preceding function. */
804 assign_stack_temp (mode, size, keep)
805 enum machine_mode mode;
809 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
812 /* Assign a temporary of given TYPE.
813 KEEP is as for assign_stack_temp.
814 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
815 it is 0 if a register is OK.
816 DONT_PROMOTE is 1 if we should not promote values in register
820 assign_temp (type, keep, memory_required, dont_promote)
824 int dont_promote ATTRIBUTE_UNUSED;
826 enum machine_mode mode = TYPE_MODE (type);
827 #ifndef PROMOTE_FOR_CALL_ONLY
828 int unsignedp = TREE_UNSIGNED (type);
831 if (mode == BLKmode || memory_required)
833 HOST_WIDE_INT size = int_size_in_bytes (type);
836 /* Unfortunately, we don't yet know how to allocate variable-sized
837 temporaries. However, sometimes we have a fixed upper limit on
838 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
839 instead. This is the case for Chill variable-sized strings. */
840 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
841 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
842 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
843 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
845 tmp = assign_stack_temp_for_type (mode, size, keep, type);
846 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
850 #ifndef PROMOTE_FOR_CALL_ONLY
852 mode = promote_mode (type, mode, &unsignedp, 0);
855 return gen_reg_rtx (mode);
858 /* Combine temporary stack slots which are adjacent on the stack.
860 This allows for better use of already allocated stack space. This is only
861 done for BLKmode slots because we can be sure that we won't have alignment
862 problems in this case. */
865 combine_temp_slots ()
867 struct temp_slot *p, *q;
868 struct temp_slot *prev_p, *prev_q;
871 /* We can't combine slots, because the information about which slot
872 is in which alias set will be lost. */
873 if (flag_strict_aliasing)
876 /* If there are a lot of temp slots, don't do anything unless
877 high levels of optimizaton. */
878 if (! flag_expensive_optimizations)
879 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
880 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
883 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
887 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
888 for (q = p->next, prev_q = p; q; q = prev_q->next)
891 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
893 if (p->base_offset + p->full_size == q->base_offset)
895 /* Q comes after P; combine Q into P. */
897 p->full_size += q->full_size;
900 else if (q->base_offset + q->full_size == p->base_offset)
902 /* P comes after Q; combine P into Q. */
904 q->full_size += p->full_size;
909 /* Either delete Q or advance past it. */
911 prev_q->next = q->next;
915 /* Either delete P or advance past it. */
919 prev_p->next = p->next;
921 temp_slots = p->next;
928 /* Find the temp slot corresponding to the object at address X. */
930 static struct temp_slot *
931 find_temp_slot_from_address (x)
937 for (p = temp_slots; p; p = p->next)
942 else if (XEXP (p->slot, 0) == x
944 || (GET_CODE (x) == PLUS
945 && XEXP (x, 0) == virtual_stack_vars_rtx
946 && GET_CODE (XEXP (x, 1)) == CONST_INT
947 && INTVAL (XEXP (x, 1)) >= p->base_offset
948 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
951 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
952 for (next = p->address; next; next = XEXP (next, 1))
953 if (XEXP (next, 0) == x)
957 /* If we have a sum involving a register, see if it points to a temp
959 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
960 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
962 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
963 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
969 /* Indicate that NEW is an alternate way of referring to the temp slot
970 that previously was known by OLD. */
973 update_temp_slot_address (old, new)
978 if (rtx_equal_p (old, new))
981 p = find_temp_slot_from_address (old);
983 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
984 is a register, see if one operand of the PLUS is a temporary
985 location. If so, NEW points into it. Otherwise, if both OLD and
986 NEW are a PLUS and if there is a register in common between them.
987 If so, try a recursive call on those values. */
990 if (GET_CODE (old) != PLUS)
993 if (GET_CODE (new) == REG)
995 update_temp_slot_address (XEXP (old, 0), new);
996 update_temp_slot_address (XEXP (old, 1), new);
999 else if (GET_CODE (new) != PLUS)
1002 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1003 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1004 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1005 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1006 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1007 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1008 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1009 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1014 /* Otherwise add an alias for the temp's address. */
1015 else if (p->address == 0)
1019 if (GET_CODE (p->address) != EXPR_LIST)
1020 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1022 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1026 /* If X could be a reference to a temporary slot, mark the fact that its
1027 address was taken. */
1030 mark_temp_addr_taken (x)
1033 struct temp_slot *p;
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot. */
1040 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1043 p = find_temp_slot_from_address (XEXP (x, 0));
1048 /* If X could be a reference to a temporary slot, mark that slot as
1049 belonging to the to one level higher than the current level. If X
1050 matched one of our slots, just mark that one. Otherwise, we can't
1051 easily predict which it is, so upgrade all of them. Kept slots
1052 need not be touched.
1054 This is called when an ({...}) construct occurs and a statement
1055 returns a value in memory. */
1058 preserve_temp_slots (x)
1061 struct temp_slot *p = 0;
1063 /* If there is no result, we still might have some objects whose address
1064 were taken, so we need to make sure they stay around. */
1067 for (p = temp_slots; p; p = p->next)
1068 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1074 /* If X is a register that is being used as a pointer, see if we have
1075 a temporary slot we know it points to. To be consistent with
1076 the code below, we really should preserve all non-kept slots
1077 if we can't find a match, but that seems to be much too costly. */
1078 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1079 p = find_temp_slot_from_address (x);
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot, but it can contain something whose address was
1084 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1086 for (p = temp_slots; p; p = p->next)
1087 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1093 /* First see if we can find a match. */
1095 p = find_temp_slot_from_address (XEXP (x, 0));
1099 /* Move everything at our level whose address was taken to our new
1100 level in case we used its address. */
1101 struct temp_slot *q;
1103 if (p->level == temp_slot_level)
1105 for (q = temp_slots; q; q = q->next)
1106 if (q != p && q->addr_taken && q->level == p->level)
1115 /* Otherwise, preserve all non-kept slots at this level. */
1116 for (p = temp_slots; p; p = p->next)
1117 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1121 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1122 with that RTL_EXPR, promote it into a temporary slot at the present
1123 level so it will not be freed when we free slots made in the
1127 preserve_rtl_expr_result (x)
1130 struct temp_slot *p;
1132 /* If X is not in memory or is at a constant address, it cannot be in
1133 a temporary slot. */
1134 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1137 /* If we can find a match, move it to our level unless it is already at
1139 p = find_temp_slot_from_address (XEXP (x, 0));
1142 p->level = MIN (p->level, temp_slot_level);
1149 /* Free all temporaries used so far. This is normally called at the end
1150 of generating code for a statement. Don't free any temporaries
1151 currently in use for an RTL_EXPR that hasn't yet been emitted.
1152 We could eventually do better than this since it can be reused while
1153 generating the same RTL_EXPR, but this is complex and probably not
1159 struct temp_slot *p;
1161 for (p = temp_slots; p; p = p->next)
1162 if (p->in_use && p->level == temp_slot_level && ! p->keep
1163 && p->rtl_expr == 0)
1166 combine_temp_slots ();
1169 /* Free all temporary slots used in T, an RTL_EXPR node. */
1172 free_temps_for_rtl_expr (t)
1175 struct temp_slot *p;
1177 for (p = temp_slots; p; p = p->next)
1178 if (p->rtl_expr == t)
1181 combine_temp_slots ();
1184 /* Mark all temporaries ever allocated in this function as not suitable
1185 for reuse until the current level is exited. */
1188 mark_all_temps_used ()
1190 struct temp_slot *p;
1192 for (p = temp_slots; p; p = p->next)
1194 p->in_use = p->keep = 1;
1195 p->level = MIN (p->level, temp_slot_level);
1199 /* Push deeper into the nesting level for stack temporaries. */
1207 /* Likewise, but save the new level as the place to allocate variables
1212 push_temp_slots_for_block ()
1216 var_temp_slot_level = temp_slot_level;
1219 /* Likewise, but save the new level as the place to allocate temporaries
1220 for TARGET_EXPRs. */
1223 push_temp_slots_for_target ()
1227 target_temp_slot_level = temp_slot_level;
1230 /* Set and get the value of target_temp_slot_level. The only
1231 permitted use of these functions is to save and restore this value. */
1234 get_target_temp_slot_level ()
1236 return target_temp_slot_level;
1240 set_target_temp_slot_level (level)
1243 target_temp_slot_level = level;
1247 /* Pop a temporary nesting level. All slots in use in the current level
1253 struct temp_slot *p;
1255 for (p = temp_slots; p; p = p->next)
1256 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1259 combine_temp_slots ();
1264 /* Initialize temporary slots. */
1269 /* We have not allocated any temporaries yet. */
1271 temp_slot_level = 0;
1272 var_temp_slot_level = 0;
1273 target_temp_slot_level = 0;
1276 /* Retroactively move an auto variable from a register to a stack slot.
1277 This is done when an address-reference to the variable is seen. */
1280 put_var_into_stack (decl)
1284 enum machine_mode promoted_mode, decl_mode;
1285 struct function *function = 0;
1287 int can_use_addressof;
1289 context = decl_function_context (decl);
1291 /* Get the current rtl used for this object and its original mode. */
1292 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1294 /* No need to do anything if decl has no rtx yet
1295 since in that case caller is setting TREE_ADDRESSABLE
1296 and a stack slot will be assigned when the rtl is made. */
1300 /* Get the declared mode for this object. */
1301 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1302 : DECL_MODE (decl));
1303 /* Get the mode it's actually stored in. */
1304 promoted_mode = GET_MODE (reg);
1306 /* If this variable comes from an outer function,
1307 find that function's saved context. */
1308 if (context != current_function_decl && context != inline_function_decl)
1309 for (function = outer_function_chain; function; function = function->next)
1310 if (function->decl == context)
1313 /* If this is a variable-size object with a pseudo to address it,
1314 put that pseudo into the stack, if the var is nonlocal. */
1315 if (DECL_NONLOCAL (decl)
1316 && GET_CODE (reg) == MEM
1317 && GET_CODE (XEXP (reg, 0)) == REG
1318 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1320 reg = XEXP (reg, 0);
1321 decl_mode = promoted_mode = GET_MODE (reg);
1327 /* FIXME make it work for promoted modes too */
1328 && decl_mode == promoted_mode
1329 #ifdef NON_SAVING_SETJMP
1330 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1334 /* If we can't use ADDRESSOF, make sure we see through one we already
1336 if (! can_use_addressof && GET_CODE (reg) == MEM
1337 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1338 reg = XEXP (XEXP (reg, 0), 0);
1340 /* Now we should have a value that resides in one or more pseudo regs. */
1342 if (GET_CODE (reg) == REG)
1344 /* If this variable lives in the current function and we don't need
1345 to put things in the stack for the sake of setjmp, try to keep it
1346 in a register until we know we actually need the address. */
1347 if (can_use_addressof)
1348 gen_mem_addressof (reg, decl);
1350 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1351 promoted_mode, decl_mode,
1352 TREE_SIDE_EFFECTS (decl), 0,
1353 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1356 else if (GET_CODE (reg) == CONCAT)
1358 /* A CONCAT contains two pseudos; put them both in the stack.
1359 We do it so they end up consecutive. */
1360 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1361 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1362 #ifdef FRAME_GROWS_DOWNWARD
1363 /* Since part 0 should have a lower address, do it second. */
1364 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1365 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1366 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1368 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1369 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1370 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1373 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1374 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1375 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1377 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1378 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1379 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1383 /* Change the CONCAT into a combined MEM for both parts. */
1384 PUT_CODE (reg, MEM);
1385 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1386 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1388 /* The two parts are in memory order already.
1389 Use the lower parts address as ours. */
1390 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1391 /* Prevent sharing of rtl that might lose. */
1392 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1393 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1398 if (current_function_check_memory_usage)
1399 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1400 XEXP (reg, 0), Pmode,
1401 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1402 TYPE_MODE (sizetype),
1403 GEN_INT (MEMORY_USE_RW),
1404 TYPE_MODE (integer_type_node));
1407 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1408 into the stack frame of FUNCTION (0 means the current function).
1409 DECL_MODE is the machine mode of the user-level data type.
1410 PROMOTED_MODE is the machine mode of the register.
1411 VOLATILE_P is nonzero if this is for a "volatile" decl.
1412 USED_P is nonzero if this reg might have already been used in an insn. */
1415 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1416 original_regno, used_p, ht)
1417 struct function *function;
1420 enum machine_mode promoted_mode, decl_mode;
1424 struct hash_table *ht;
1426 struct function *func = function ? function : cfun;
1428 int regno = original_regno;
1431 regno = REGNO (reg);
1433 if (regno < func->x_max_parm_reg)
1434 new = func->x_parm_reg_stack_loc[regno];
1436 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1438 PUT_CODE (reg, MEM);
1439 PUT_MODE (reg, decl_mode);
1440 XEXP (reg, 0) = XEXP (new, 0);
1441 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1442 MEM_VOLATILE_P (reg) = volatile_p;
1444 /* If this is a memory ref that contains aggregate components,
1445 mark it as such for cse and loop optimize. If we are reusing a
1446 previously generated stack slot, then we need to copy the bit in
1447 case it was set for other reasons. For instance, it is set for
1448 __builtin_va_alist. */
1449 MEM_SET_IN_STRUCT_P (reg,
1450 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1451 MEM_ALIAS_SET (reg) = get_alias_set (type);
1453 /* Now make sure that all refs to the variable, previously made
1454 when it was a register, are fixed up to be valid again. */
1456 if (used_p && function != 0)
1458 struct var_refs_queue *temp;
1461 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1462 temp->modified = reg;
1463 temp->promoted_mode = promoted_mode;
1464 temp->unsignedp = TREE_UNSIGNED (type);
1465 temp->next = function->fixup_var_refs_queue;
1466 function->fixup_var_refs_queue = temp;
1469 /* Variable is local; fix it up now. */
1470 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1474 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1476 enum machine_mode promoted_mode;
1478 struct hash_table *ht;
1481 rtx first_insn = get_insns ();
1482 struct sequence_stack *stack = seq_stack;
1483 tree rtl_exps = rtl_expr_chain;
1485 /* Must scan all insns for stack-refs that exceed the limit. */
1486 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1488 /* If there's a hash table, it must record all uses of VAR. */
1492 /* Scan all pending sequences too. */
1493 for (; stack; stack = stack->next)
1495 push_to_sequence (stack->first);
1496 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1497 stack->first, stack->next != 0, 0);
1498 /* Update remembered end of sequence
1499 in case we added an insn at the end. */
1500 stack->last = get_last_insn ();
1504 /* Scan all waiting RTL_EXPRs too. */
1505 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1507 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1508 if (seq != const0_rtx && seq != 0)
1510 push_to_sequence (seq);
1511 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1517 /* Scan the catch clauses for exception handling too. */
1518 push_to_sequence (catch_clauses);
1519 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1524 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1525 some part of an insn. Return a struct fixup_replacement whose OLD
1526 value is equal to X. Allocate a new structure if no such entry exists. */
1528 static struct fixup_replacement *
1529 find_fixup_replacement (replacements, x)
1530 struct fixup_replacement **replacements;
1533 struct fixup_replacement *p;
1535 /* See if we have already replaced this. */
1536 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1541 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1544 p->next = *replacements;
1551 /* Scan the insn-chain starting with INSN for refs to VAR
1552 and fix them up. TOPLEVEL is nonzero if this chain is the
1553 main chain of insns for the current function. */
1556 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1558 enum machine_mode promoted_mode;
1562 struct hash_table *ht;
1565 rtx insn_list = NULL_RTX;
1567 /* If we already know which INSNs reference VAR there's no need
1568 to walk the entire instruction chain. */
1571 insn_list = ((struct insns_for_mem_entry *)
1572 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1573 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1574 insn_list = XEXP (insn_list, 1);
1579 rtx next = NEXT_INSN (insn);
1580 rtx set, prev, prev_set;
1583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1585 /* Remember the notes in case we delete the insn. */
1586 note = REG_NOTES (insn);
1588 /* If this is a CLOBBER of VAR, delete it.
1590 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1591 and REG_RETVAL notes too. */
1592 if (GET_CODE (PATTERN (insn)) == CLOBBER
1593 && (XEXP (PATTERN (insn), 0) == var
1594 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1595 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1596 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1598 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1599 /* The REG_LIBCALL note will go away since we are going to
1600 turn INSN into a NOTE, so just delete the
1601 corresponding REG_RETVAL note. */
1602 remove_note (XEXP (note, 0),
1603 find_reg_note (XEXP (note, 0), REG_RETVAL,
1606 /* In unoptimized compilation, we shouldn't call delete_insn
1607 except in jump.c doing warnings. */
1608 PUT_CODE (insn, NOTE);
1609 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1610 NOTE_SOURCE_FILE (insn) = 0;
1613 /* The insn to load VAR from a home in the arglist
1614 is now a no-op. When we see it, just delete it.
1615 Similarly if this is storing VAR from a register from which
1616 it was loaded in the previous insn. This will occur
1617 when an ADDRESSOF was made for an arglist slot. */
1619 && (set = single_set (insn)) != 0
1620 && SET_DEST (set) == var
1621 /* If this represents the result of an insn group,
1622 don't delete the insn. */
1623 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1624 && (rtx_equal_p (SET_SRC (set), var)
1625 || (GET_CODE (SET_SRC (set)) == REG
1626 && (prev = prev_nonnote_insn (insn)) != 0
1627 && (prev_set = single_set (prev)) != 0
1628 && SET_DEST (prev_set) == SET_SRC (set)
1629 && rtx_equal_p (SET_SRC (prev_set), var))))
1631 /* In unoptimized compilation, we shouldn't call delete_insn
1632 except in jump.c doing warnings. */
1633 PUT_CODE (insn, NOTE);
1634 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1635 NOTE_SOURCE_FILE (insn) = 0;
1636 if (insn == last_parm_insn)
1637 last_parm_insn = PREV_INSN (next);
1641 struct fixup_replacement *replacements = 0;
1642 rtx next_insn = NEXT_INSN (insn);
1644 if (SMALL_REGISTER_CLASSES)
1646 /* If the insn that copies the results of a CALL_INSN
1647 into a pseudo now references VAR, we have to use an
1648 intermediate pseudo since we want the life of the
1649 return value register to be only a single insn.
1651 If we don't use an intermediate pseudo, such things as
1652 address computations to make the address of VAR valid
1653 if it is not can be placed between the CALL_INSN and INSN.
1655 To make sure this doesn't happen, we record the destination
1656 of the CALL_INSN and see if the next insn uses both that
1659 if (call_dest != 0 && GET_CODE (insn) == INSN
1660 && reg_mentioned_p (var, PATTERN (insn))
1661 && reg_mentioned_p (call_dest, PATTERN (insn)))
1663 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1665 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1667 PATTERN (insn) = replace_rtx (PATTERN (insn),
1671 if (GET_CODE (insn) == CALL_INSN
1672 && GET_CODE (PATTERN (insn)) == SET)
1673 call_dest = SET_DEST (PATTERN (insn));
1674 else if (GET_CODE (insn) == CALL_INSN
1675 && GET_CODE (PATTERN (insn)) == PARALLEL
1676 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1677 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1682 /* See if we have to do anything to INSN now that VAR is in
1683 memory. If it needs to be loaded into a pseudo, use a single
1684 pseudo for the entire insn in case there is a MATCH_DUP
1685 between two operands. We pass a pointer to the head of
1686 a list of struct fixup_replacements. If fixup_var_refs_1
1687 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1688 it will record them in this list.
1690 If it allocated a pseudo for any replacement, we copy into
1693 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1696 /* If this is last_parm_insn, and any instructions were output
1697 after it to fix it up, then we must set last_parm_insn to
1698 the last such instruction emitted. */
1699 if (insn == last_parm_insn)
1700 last_parm_insn = PREV_INSN (next_insn);
1702 while (replacements)
1704 if (GET_CODE (replacements->new) == REG)
1709 /* OLD might be a (subreg (mem)). */
1710 if (GET_CODE (replacements->old) == SUBREG)
1712 = fixup_memory_subreg (replacements->old, insn, 0);
1715 = fixup_stack_1 (replacements->old, insn);
1717 insert_before = insn;
1719 /* If we are changing the mode, do a conversion.
1720 This might be wasteful, but combine.c will
1721 eliminate much of the waste. */
1723 if (GET_MODE (replacements->new)
1724 != GET_MODE (replacements->old))
1727 convert_move (replacements->new,
1728 replacements->old, unsignedp);
1729 seq = gen_sequence ();
1733 seq = gen_move_insn (replacements->new,
1736 emit_insn_before (seq, insert_before);
1739 replacements = replacements->next;
1743 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1744 But don't touch other insns referred to by reg-notes;
1745 we will get them elsewhere. */
1748 if (GET_CODE (note) != INSN_LIST)
1750 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1751 note = XEXP (note, 1);
1759 insn = XEXP (insn_list, 0);
1760 insn_list = XEXP (insn_list, 1);
1767 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1768 See if the rtx expression at *LOC in INSN needs to be changed.
1770 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1771 contain a list of original rtx's and replacements. If we find that we need
1772 to modify this insn by replacing a memory reference with a pseudo or by
1773 making a new MEM to implement a SUBREG, we consult that list to see if
1774 we have already chosen a replacement. If none has already been allocated,
1775 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1776 or the SUBREG, as appropriate, to the pseudo. */
1779 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1781 enum machine_mode promoted_mode;
1784 struct fixup_replacement **replacements;
1787 register rtx x = *loc;
1788 RTX_CODE code = GET_CODE (x);
1789 register const char *fmt;
1790 register rtx tem, tem1;
1791 struct fixup_replacement *replacement;
1796 if (XEXP (x, 0) == var)
1798 /* Prevent sharing of rtl that might lose. */
1799 rtx sub = copy_rtx (XEXP (var, 0));
1801 if (! validate_change (insn, loc, sub, 0))
1803 rtx y = gen_reg_rtx (GET_MODE (sub));
1806 /* We should be able to replace with a register or all is lost.
1807 Note that we can't use validate_change to verify this, since
1808 we're not caring for replacing all dups simultaneously. */
1809 if (! validate_replace_rtx (*loc, y, insn))
1812 /* Careful! First try to recognize a direct move of the
1813 value, mimicking how things are done in gen_reload wrt
1814 PLUS. Consider what happens when insn is a conditional
1815 move instruction and addsi3 clobbers flags. */
1818 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1819 seq = gen_sequence ();
1822 if (recog_memoized (new_insn) < 0)
1824 /* That failed. Fall back on force_operand and hope. */
1827 force_operand (sub, y);
1828 seq = gen_sequence ();
1833 /* Don't separate setter from user. */
1834 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1835 insn = PREV_INSN (insn);
1838 emit_insn_before (seq, insn);
1846 /* If we already have a replacement, use it. Otherwise,
1847 try to fix up this address in case it is invalid. */
1849 replacement = find_fixup_replacement (replacements, var);
1850 if (replacement->new)
1852 *loc = replacement->new;
1856 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1858 /* Unless we are forcing memory to register or we changed the mode,
1859 we can leave things the way they are if the insn is valid. */
1861 INSN_CODE (insn) = -1;
1862 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1863 && recog_memoized (insn) >= 0)
1866 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1870 /* If X contains VAR, we need to unshare it here so that we update
1871 each occurrence separately. But all identical MEMs in one insn
1872 must be replaced with the same rtx because of the possibility of
1875 if (reg_mentioned_p (var, x))
1877 replacement = find_fixup_replacement (replacements, x);
1878 if (replacement->new == 0)
1879 replacement->new = copy_most_rtx (x, var);
1881 *loc = x = replacement->new;
1897 /* Note that in some cases those types of expressions are altered
1898 by optimize_bit_field, and do not survive to get here. */
1899 if (XEXP (x, 0) == var
1900 || (GET_CODE (XEXP (x, 0)) == SUBREG
1901 && SUBREG_REG (XEXP (x, 0)) == var))
1903 /* Get TEM as a valid MEM in the mode presently in the insn.
1905 We don't worry about the possibility of MATCH_DUP here; it
1906 is highly unlikely and would be tricky to handle. */
1909 if (GET_CODE (tem) == SUBREG)
1911 if (GET_MODE_BITSIZE (GET_MODE (tem))
1912 > GET_MODE_BITSIZE (GET_MODE (var)))
1914 replacement = find_fixup_replacement (replacements, var);
1915 if (replacement->new == 0)
1916 replacement->new = gen_reg_rtx (GET_MODE (var));
1917 SUBREG_REG (tem) = replacement->new;
1920 tem = fixup_memory_subreg (tem, insn, 0);
1923 tem = fixup_stack_1 (tem, insn);
1925 /* Unless we want to load from memory, get TEM into the proper mode
1926 for an extract from memory. This can only be done if the
1927 extract is at a constant position and length. */
1929 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1930 && GET_CODE (XEXP (x, 2)) == CONST_INT
1931 && ! mode_dependent_address_p (XEXP (tem, 0))
1932 && ! MEM_VOLATILE_P (tem))
1934 enum machine_mode wanted_mode = VOIDmode;
1935 enum machine_mode is_mode = GET_MODE (tem);
1936 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1939 if (GET_CODE (x) == ZERO_EXTRACT)
1942 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1943 if (wanted_mode == VOIDmode)
1944 wanted_mode = word_mode;
1948 if (GET_CODE (x) == SIGN_EXTRACT)
1950 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1951 if (wanted_mode == VOIDmode)
1952 wanted_mode = word_mode;
1955 /* If we have a narrower mode, we can do something. */
1956 if (wanted_mode != VOIDmode
1957 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1959 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1960 rtx old_pos = XEXP (x, 2);
1963 /* If the bytes and bits are counted differently, we
1964 must adjust the offset. */
1965 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1966 offset = (GET_MODE_SIZE (is_mode)
1967 - GET_MODE_SIZE (wanted_mode) - offset);
1969 pos %= GET_MODE_BITSIZE (wanted_mode);
1971 newmem = gen_rtx_MEM (wanted_mode,
1972 plus_constant (XEXP (tem, 0), offset));
1973 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1974 MEM_COPY_ATTRIBUTES (newmem, tem);
1976 /* Make the change and see if the insn remains valid. */
1977 INSN_CODE (insn) = -1;
1978 XEXP (x, 0) = newmem;
1979 XEXP (x, 2) = GEN_INT (pos);
1981 if (recog_memoized (insn) >= 0)
1984 /* Otherwise, restore old position. XEXP (x, 0) will be
1986 XEXP (x, 2) = old_pos;
1990 /* If we get here, the bitfield extract insn can't accept a memory
1991 reference. Copy the input into a register. */
1993 tem1 = gen_reg_rtx (GET_MODE (tem));
1994 emit_insn_before (gen_move_insn (tem1, tem), insn);
2001 if (SUBREG_REG (x) == var)
2003 /* If this is a special SUBREG made because VAR was promoted
2004 from a wider mode, replace it with VAR and call ourself
2005 recursively, this time saying that the object previously
2006 had its current mode (by virtue of the SUBREG). */
2008 if (SUBREG_PROMOTED_VAR_P (x))
2011 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2015 /* If this SUBREG makes VAR wider, it has become a paradoxical
2016 SUBREG with VAR in memory, but these aren't allowed at this
2017 stage of the compilation. So load VAR into a pseudo and take
2018 a SUBREG of that pseudo. */
2019 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2021 replacement = find_fixup_replacement (replacements, var);
2022 if (replacement->new == 0)
2023 replacement->new = gen_reg_rtx (GET_MODE (var));
2024 SUBREG_REG (x) = replacement->new;
2028 /* See if we have already found a replacement for this SUBREG.
2029 If so, use it. Otherwise, make a MEM and see if the insn
2030 is recognized. If not, or if we should force MEM into a register,
2031 make a pseudo for this SUBREG. */
2032 replacement = find_fixup_replacement (replacements, x);
2033 if (replacement->new)
2035 *loc = replacement->new;
2039 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2041 INSN_CODE (insn) = -1;
2042 if (! flag_force_mem && recog_memoized (insn) >= 0)
2045 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2051 /* First do special simplification of bit-field references. */
2052 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2053 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2054 optimize_bit_field (x, insn, 0);
2055 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2056 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2057 optimize_bit_field (x, insn, NULL_PTR);
2059 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2060 into a register and then store it back out. */
2061 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2062 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2063 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2064 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2065 > GET_MODE_SIZE (GET_MODE (var))))
2067 replacement = find_fixup_replacement (replacements, var);
2068 if (replacement->new == 0)
2069 replacement->new = gen_reg_rtx (GET_MODE (var));
2071 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2072 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2075 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2076 insn into a pseudo and store the low part of the pseudo into VAR. */
2077 if (GET_CODE (SET_DEST (x)) == SUBREG
2078 && SUBREG_REG (SET_DEST (x)) == var
2079 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2080 > GET_MODE_SIZE (GET_MODE (var))))
2082 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2083 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2090 rtx dest = SET_DEST (x);
2091 rtx src = SET_SRC (x);
2093 rtx outerdest = dest;
2096 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2097 || GET_CODE (dest) == SIGN_EXTRACT
2098 || GET_CODE (dest) == ZERO_EXTRACT)
2099 dest = XEXP (dest, 0);
2101 if (GET_CODE (src) == SUBREG)
2102 src = XEXP (src, 0);
2104 /* If VAR does not appear at the top level of the SET
2105 just scan the lower levels of the tree. */
2107 if (src != var && dest != var)
2110 /* We will need to rerecognize this insn. */
2111 INSN_CODE (insn) = -1;
2114 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2116 /* Since this case will return, ensure we fixup all the
2118 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2119 insn, replacements);
2120 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2121 insn, replacements);
2122 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2123 insn, replacements);
2125 tem = XEXP (outerdest, 0);
2127 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2128 that may appear inside a ZERO_EXTRACT.
2129 This was legitimate when the MEM was a REG. */
2130 if (GET_CODE (tem) == SUBREG
2131 && SUBREG_REG (tem) == var)
2132 tem = fixup_memory_subreg (tem, insn, 0);
2134 tem = fixup_stack_1 (tem, insn);
2136 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2137 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2138 && ! mode_dependent_address_p (XEXP (tem, 0))
2139 && ! MEM_VOLATILE_P (tem))
2141 enum machine_mode wanted_mode;
2142 enum machine_mode is_mode = GET_MODE (tem);
2143 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2145 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2146 if (wanted_mode == VOIDmode)
2147 wanted_mode = word_mode;
2149 /* If we have a narrower mode, we can do something. */
2150 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2152 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2153 rtx old_pos = XEXP (outerdest, 2);
2156 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2157 offset = (GET_MODE_SIZE (is_mode)
2158 - GET_MODE_SIZE (wanted_mode) - offset);
2160 pos %= GET_MODE_BITSIZE (wanted_mode);
2162 newmem = gen_rtx_MEM (wanted_mode,
2163 plus_constant (XEXP (tem, 0),
2165 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2166 MEM_COPY_ATTRIBUTES (newmem, tem);
2168 /* Make the change and see if the insn remains valid. */
2169 INSN_CODE (insn) = -1;
2170 XEXP (outerdest, 0) = newmem;
2171 XEXP (outerdest, 2) = GEN_INT (pos);
2173 if (recog_memoized (insn) >= 0)
2176 /* Otherwise, restore old position. XEXP (x, 0) will be
2178 XEXP (outerdest, 2) = old_pos;
2182 /* If we get here, the bit-field store doesn't allow memory
2183 or isn't located at a constant position. Load the value into
2184 a register, do the store, and put it back into memory. */
2186 tem1 = gen_reg_rtx (GET_MODE (tem));
2187 emit_insn_before (gen_move_insn (tem1, tem), insn);
2188 emit_insn_after (gen_move_insn (tem, tem1), insn);
2189 XEXP (outerdest, 0) = tem1;
2194 /* STRICT_LOW_PART is a no-op on memory references
2195 and it can cause combinations to be unrecognizable,
2198 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2199 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2201 /* A valid insn to copy VAR into or out of a register
2202 must be left alone, to avoid an infinite loop here.
2203 If the reference to VAR is by a subreg, fix that up,
2204 since SUBREG is not valid for a memref.
2205 Also fix up the address of the stack slot.
2207 Note that we must not try to recognize the insn until
2208 after we know that we have valid addresses and no
2209 (subreg (mem ...) ...) constructs, since these interfere
2210 with determining the validity of the insn. */
2212 if ((SET_SRC (x) == var
2213 || (GET_CODE (SET_SRC (x)) == SUBREG
2214 && SUBREG_REG (SET_SRC (x)) == var))
2215 && (GET_CODE (SET_DEST (x)) == REG
2216 || (GET_CODE (SET_DEST (x)) == SUBREG
2217 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2218 && GET_MODE (var) == promoted_mode
2219 && x == single_set (insn))
2223 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2224 if (replacement->new)
2225 SET_SRC (x) = replacement->new;
2226 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2227 SET_SRC (x) = replacement->new
2228 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2230 SET_SRC (x) = replacement->new
2231 = fixup_stack_1 (SET_SRC (x), insn);
2233 if (recog_memoized (insn) >= 0)
2236 /* INSN is not valid, but we know that we want to
2237 copy SET_SRC (x) to SET_DEST (x) in some way. So
2238 we generate the move and see whether it requires more
2239 than one insn. If it does, we emit those insns and
2240 delete INSN. Otherwise, we an just replace the pattern
2241 of INSN; we have already verified above that INSN has
2242 no other function that to do X. */
2244 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2245 if (GET_CODE (pat) == SEQUENCE)
2247 emit_insn_after (pat, insn);
2248 PUT_CODE (insn, NOTE);
2249 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2250 NOTE_SOURCE_FILE (insn) = 0;
2253 PATTERN (insn) = pat;
2258 if ((SET_DEST (x) == var
2259 || (GET_CODE (SET_DEST (x)) == SUBREG
2260 && SUBREG_REG (SET_DEST (x)) == var))
2261 && (GET_CODE (SET_SRC (x)) == REG
2262 || (GET_CODE (SET_SRC (x)) == SUBREG
2263 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2264 && GET_MODE (var) == promoted_mode
2265 && x == single_set (insn))
2269 if (GET_CODE (SET_DEST (x)) == SUBREG)
2270 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2272 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2274 if (recog_memoized (insn) >= 0)
2277 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2278 if (GET_CODE (pat) == SEQUENCE)
2280 emit_insn_after (pat, insn);
2281 PUT_CODE (insn, NOTE);
2282 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2283 NOTE_SOURCE_FILE (insn) = 0;
2286 PATTERN (insn) = pat;
2291 /* Otherwise, storing into VAR must be handled specially
2292 by storing into a temporary and copying that into VAR
2293 with a new insn after this one. Note that this case
2294 will be used when storing into a promoted scalar since
2295 the insn will now have different modes on the input
2296 and output and hence will be invalid (except for the case
2297 of setting it to a constant, which does not need any
2298 change if it is valid). We generate extra code in that case,
2299 but combine.c will eliminate it. */
2304 rtx fixeddest = SET_DEST (x);
2306 /* STRICT_LOW_PART can be discarded, around a MEM. */
2307 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2308 fixeddest = XEXP (fixeddest, 0);
2309 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2310 if (GET_CODE (fixeddest) == SUBREG)
2312 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2313 promoted_mode = GET_MODE (fixeddest);
2316 fixeddest = fixup_stack_1 (fixeddest, insn);
2318 temp = gen_reg_rtx (promoted_mode);
2320 emit_insn_after (gen_move_insn (fixeddest,
2321 gen_lowpart (GET_MODE (fixeddest),
2325 SET_DEST (x) = temp;
2333 /* Nothing special about this RTX; fix its operands. */
2335 fmt = GET_RTX_FORMAT (code);
2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2339 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2340 else if (fmt[i] == 'E')
2343 for (j = 0; j < XVECLEN (x, i); j++)
2344 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2345 insn, replacements);
2350 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2351 return an rtx (MEM:m1 newaddr) which is equivalent.
2352 If any insns must be emitted to compute NEWADDR, put them before INSN.
2354 UNCRITICAL nonzero means accept paradoxical subregs.
2355 This is used for subregs found inside REG_NOTES. */
2358 fixup_memory_subreg (x, insn, uncritical)
2363 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2364 rtx addr = XEXP (SUBREG_REG (x), 0);
2365 enum machine_mode mode = GET_MODE (x);
2368 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2369 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2373 if (BYTES_BIG_ENDIAN)
2374 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2375 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2376 addr = plus_constant (addr, offset);
2377 if (!flag_force_addr && memory_address_p (mode, addr))
2378 /* Shortcut if no insns need be emitted. */
2379 return change_address (SUBREG_REG (x), mode, addr);
2381 result = change_address (SUBREG_REG (x), mode, addr);
2382 emit_insn_before (gen_sequence (), insn);
2387 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2388 Replace subexpressions of X in place.
2389 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2390 Otherwise return X, with its contents possibly altered.
2392 If any insns must be emitted to compute NEWADDR, put them before INSN.
2394 UNCRITICAL is as in fixup_memory_subreg. */
2397 walk_fixup_memory_subreg (x, insn, uncritical)
2402 register enum rtx_code code;
2403 register const char *fmt;
2409 code = GET_CODE (x);
2411 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2412 return fixup_memory_subreg (x, insn, uncritical);
2414 /* Nothing special about this RTX; fix its operands. */
2416 fmt = GET_RTX_FORMAT (code);
2417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2420 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2421 else if (fmt[i] == 'E')
2424 for (j = 0; j < XVECLEN (x, i); j++)
2426 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2432 /* For each memory ref within X, if it refers to a stack slot
2433 with an out of range displacement, put the address in a temp register
2434 (emitting new insns before INSN to load these registers)
2435 and alter the memory ref to use that register.
2436 Replace each such MEM rtx with a copy, to avoid clobberage. */
2439 fixup_stack_1 (x, insn)
2444 register RTX_CODE code = GET_CODE (x);
2445 register const char *fmt;
2449 register rtx ad = XEXP (x, 0);
2450 /* If we have address of a stack slot but it's not valid
2451 (displacement is too large), compute the sum in a register. */
2452 if (GET_CODE (ad) == PLUS
2453 && GET_CODE (XEXP (ad, 0)) == REG
2454 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2455 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2456 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2457 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2458 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2460 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2461 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2462 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2463 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2466 if (memory_address_p (GET_MODE (x), ad))
2470 temp = copy_to_reg (ad);
2471 seq = gen_sequence ();
2473 emit_insn_before (seq, insn);
2474 return change_address (x, VOIDmode, temp);
2479 fmt = GET_RTX_FORMAT (code);
2480 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2483 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2484 else if (fmt[i] == 'E')
2487 for (j = 0; j < XVECLEN (x, i); j++)
2488 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2494 /* Optimization: a bit-field instruction whose field
2495 happens to be a byte or halfword in memory
2496 can be changed to a move instruction.
2498 We call here when INSN is an insn to examine or store into a bit-field.
2499 BODY is the SET-rtx to be altered.
2501 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2502 (Currently this is called only from function.c, and EQUIV_MEM
2506 optimize_bit_field (body, insn, equiv_mem)
2511 register rtx bitfield;
2514 enum machine_mode mode;
2516 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2517 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2518 bitfield = SET_DEST (body), destflag = 1;
2520 bitfield = SET_SRC (body), destflag = 0;
2522 /* First check that the field being stored has constant size and position
2523 and is in fact a byte or halfword suitably aligned. */
2525 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2526 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2527 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2529 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2531 register rtx memref = 0;
2533 /* Now check that the containing word is memory, not a register,
2534 and that it is safe to change the machine mode. */
2536 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2537 memref = XEXP (bitfield, 0);
2538 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2540 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2541 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2542 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2543 memref = SUBREG_REG (XEXP (bitfield, 0));
2544 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2546 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2547 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2550 && ! mode_dependent_address_p (XEXP (memref, 0))
2551 && ! MEM_VOLATILE_P (memref))
2553 /* Now adjust the address, first for any subreg'ing
2554 that we are now getting rid of,
2555 and then for which byte of the word is wanted. */
2557 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2560 /* Adjust OFFSET to count bits from low-address byte. */
2561 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2562 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2563 - offset - INTVAL (XEXP (bitfield, 1)));
2565 /* Adjust OFFSET to count bytes from low-address byte. */
2566 offset /= BITS_PER_UNIT;
2567 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2569 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2570 if (BYTES_BIG_ENDIAN)
2571 offset -= (MIN (UNITS_PER_WORD,
2572 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2573 - MIN (UNITS_PER_WORD,
2574 GET_MODE_SIZE (GET_MODE (memref))));
2578 memref = change_address (memref, mode,
2579 plus_constant (XEXP (memref, 0), offset));
2580 insns = get_insns ();
2582 emit_insns_before (insns, insn);
2584 /* Store this memory reference where
2585 we found the bit field reference. */
2589 validate_change (insn, &SET_DEST (body), memref, 1);
2590 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2592 rtx src = SET_SRC (body);
2593 while (GET_CODE (src) == SUBREG
2594 && SUBREG_WORD (src) == 0)
2595 src = SUBREG_REG (src);
2596 if (GET_MODE (src) != GET_MODE (memref))
2597 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2598 validate_change (insn, &SET_SRC (body), src, 1);
2600 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2601 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2602 /* This shouldn't happen because anything that didn't have
2603 one of these modes should have got converted explicitly
2604 and then referenced through a subreg.
2605 This is so because the original bit-field was
2606 handled by agg_mode and so its tree structure had
2607 the same mode that memref now has. */
2612 rtx dest = SET_DEST (body);
2614 while (GET_CODE (dest) == SUBREG
2615 && SUBREG_WORD (dest) == 0
2616 && (GET_MODE_CLASS (GET_MODE (dest))
2617 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2618 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2620 dest = SUBREG_REG (dest);
2622 validate_change (insn, &SET_DEST (body), dest, 1);
2624 if (GET_MODE (dest) == GET_MODE (memref))
2625 validate_change (insn, &SET_SRC (body), memref, 1);
2628 /* Convert the mem ref to the destination mode. */
2629 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2632 convert_move (newreg, memref,
2633 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2637 validate_change (insn, &SET_SRC (body), newreg, 1);
2641 /* See if we can convert this extraction or insertion into
2642 a simple move insn. We might not be able to do so if this
2643 was, for example, part of a PARALLEL.
2645 If we succeed, write out any needed conversions. If we fail,
2646 it is hard to guess why we failed, so don't do anything
2647 special; just let the optimization be suppressed. */
2649 if (apply_change_group () && seq)
2650 emit_insns_before (seq, insn);
2655 /* These routines are responsible for converting virtual register references
2656 to the actual hard register references once RTL generation is complete.
2658 The following four variables are used for communication between the
2659 routines. They contain the offsets of the virtual registers from their
2660 respective hard registers. */
2662 static int in_arg_offset;
2663 static int var_offset;
2664 static int dynamic_offset;
2665 static int out_arg_offset;
2666 static int cfa_offset;
2668 /* In most machines, the stack pointer register is equivalent to the bottom
2671 #ifndef STACK_POINTER_OFFSET
2672 #define STACK_POINTER_OFFSET 0
2675 /* If not defined, pick an appropriate default for the offset of dynamically
2676 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2677 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2679 #ifndef STACK_DYNAMIC_OFFSET
2681 #ifdef ACCUMULATE_OUTGOING_ARGS
2682 /* The bottom of the stack points to the actual arguments. If
2683 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2684 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2685 stack space for register parameters is not pushed by the caller, but
2686 rather part of the fixed stack areas and hence not included in
2687 `current_function_outgoing_args_size'. Nevertheless, we must allow
2688 for it when allocating stack dynamic objects. */
2690 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2691 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2692 (current_function_outgoing_args_size \
2693 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2696 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2697 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2701 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2705 /* On a few machines, the CFA coincides with the arg pointer. */
2707 #ifndef ARG_POINTER_CFA_OFFSET
2708 #define ARG_POINTER_CFA_OFFSET 0
2712 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2713 its address taken. DECL is the decl for the object stored in the
2714 register, for later use if we do need to force REG into the stack.
2715 REG is overwritten by the MEM like in put_reg_into_stack. */
2718 gen_mem_addressof (reg, decl)
2722 tree type = TREE_TYPE (decl);
2723 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2726 /* If the original REG was a user-variable, then so is the REG whose
2727 address is being taken. Likewise for unchanging. */
2728 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2729 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2731 PUT_CODE (reg, MEM);
2732 PUT_MODE (reg, DECL_MODE (decl));
2734 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2735 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2736 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2738 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2739 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2744 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2748 flush_addressof (decl)
2751 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2752 && DECL_RTL (decl) != 0
2753 && GET_CODE (DECL_RTL (decl)) == MEM
2754 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2755 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2756 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2760 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2763 put_addressof_into_stack (r, ht)
2765 struct hash_table *ht;
2767 tree decl = ADDRESSOF_DECL (r);
2768 rtx reg = XEXP (r, 0);
2770 if (GET_CODE (reg) != REG)
2773 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2774 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2775 ADDRESSOF_REGNO (r),
2776 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2779 /* List of replacements made below in purge_addressof_1 when creating
2780 bitfield insertions. */
2781 static rtx purge_bitfield_addressof_replacements;
2783 /* List of replacements made below in purge_addressof_1 for patterns
2784 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2785 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2786 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2787 enough in complex cases, e.g. when some field values can be
2788 extracted by usage MEM with narrower mode. */
2789 static rtx purge_addressof_replacements;
2791 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2792 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2793 the stack. If the function returns FALSE then the replacement could not
2797 purge_addressof_1 (loc, insn, force, store, ht)
2801 struct hash_table *ht;
2807 boolean result = true;
2809 /* Re-start here to avoid recursion in common cases. */
2816 code = GET_CODE (x);
2818 /* If we don't return in any of the cases below, we will recurse inside
2819 the RTX, which will normally result in any ADDRESSOF being forced into
2823 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2824 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2828 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2830 /* We must create a copy of the rtx because it was created by
2831 overwriting a REG rtx which is always shared. */
2832 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2835 if (validate_change (insn, loc, sub, 0)
2836 || validate_replace_rtx (x, sub, insn))
2840 sub = force_operand (sub, NULL_RTX);
2841 if (! validate_change (insn, loc, sub, 0)
2842 && ! validate_replace_rtx (x, sub, insn))
2845 insns = gen_sequence ();
2847 emit_insn_before (insns, insn);
2851 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2853 rtx sub = XEXP (XEXP (x, 0), 0);
2856 if (GET_CODE (sub) == MEM)
2858 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2859 MEM_COPY_ATTRIBUTES (sub2, sub);
2860 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2863 else if (GET_CODE (sub) == REG
2864 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2866 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2868 int size_x, size_sub;
2872 /* When processing REG_NOTES look at the list of
2873 replacements done on the insn to find the register that X
2877 for (tem = purge_bitfield_addressof_replacements;
2879 tem = XEXP (XEXP (tem, 1), 1))
2880 if (rtx_equal_p (x, XEXP (tem, 0)))
2882 *loc = XEXP (XEXP (tem, 1), 0);
2886 /* See comment for purge_addressof_replacements. */
2887 for (tem = purge_addressof_replacements;
2889 tem = XEXP (XEXP (tem, 1), 1))
2890 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2892 rtx z = XEXP (XEXP (tem, 1), 0);
2894 if (GET_MODE (x) == GET_MODE (z)
2895 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2896 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2899 /* It can happen that the note may speak of things
2900 in a wider (or just different) mode than the
2901 code did. This is especially true of
2904 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2907 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2908 && (GET_MODE_SIZE (GET_MODE (x))
2909 > GET_MODE_SIZE (GET_MODE (z))))
2911 /* This can occur as a result in invalid
2912 pointer casts, e.g. float f; ...
2913 *(long long int *)&f.
2914 ??? We could emit a warning here, but
2915 without a line number that wouldn't be
2917 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2920 z = gen_lowpart (GET_MODE (x), z);
2926 /* Sometimes we may not be able to find the replacement. For
2927 example when the original insn was a MEM in a wider mode,
2928 and the note is part of a sign extension of a narrowed
2929 version of that MEM. Gcc testcase compile/990829-1.c can
2930 generate an example of this siutation. Rather than complain
2931 we return false, which will prompt our caller to remove the
2936 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2937 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2939 /* Don't even consider working with paradoxical subregs,
2940 or the moral equivalent seen here. */
2941 if (size_x <= size_sub
2942 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2944 /* Do a bitfield insertion to mirror what would happen
2951 rtx p = PREV_INSN (insn);
2954 val = gen_reg_rtx (GET_MODE (x));
2955 if (! validate_change (insn, loc, val, 0))
2957 /* Discard the current sequence and put the
2958 ADDRESSOF on stack. */
2962 seq = gen_sequence ();
2964 emit_insn_before (seq, insn);
2965 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2969 store_bit_field (sub, size_x, 0, GET_MODE (x),
2970 val, GET_MODE_SIZE (GET_MODE (sub)),
2971 GET_MODE_SIZE (GET_MODE (sub)));
2973 /* Make sure to unshare any shared rtl that store_bit_field
2974 might have created. */
2975 unshare_all_rtl_again (get_insns ());
2977 seq = gen_sequence ();
2979 p = emit_insn_after (seq, insn);
2980 if (NEXT_INSN (insn))
2981 compute_insns_for_mem (NEXT_INSN (insn),
2982 p ? NEXT_INSN (p) : NULL_RTX,
2987 rtx p = PREV_INSN (insn);
2990 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2991 GET_MODE (x), GET_MODE (x),
2992 GET_MODE_SIZE (GET_MODE (sub)),
2993 GET_MODE_SIZE (GET_MODE (sub)));
2995 if (! validate_change (insn, loc, val, 0))
2997 /* Discard the current sequence and put the
2998 ADDRESSOF on stack. */
3003 seq = gen_sequence ();
3005 emit_insn_before (seq, insn);
3006 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3010 /* Remember the replacement so that the same one can be done
3011 on the REG_NOTES. */
3012 purge_bitfield_addressof_replacements
3013 = gen_rtx_EXPR_LIST (VOIDmode, x,
3016 purge_bitfield_addressof_replacements));
3018 /* We replaced with a reg -- all done. */
3023 else if (validate_change (insn, loc, sub, 0))
3025 /* Remember the replacement so that the same one can be done
3026 on the REG_NOTES. */
3027 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3031 for (tem = purge_addressof_replacements;
3033 tem = XEXP (XEXP (tem, 1), 1))
3034 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3036 XEXP (XEXP (tem, 1), 0) = sub;
3039 purge_addressof_replacements
3040 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3041 gen_rtx_EXPR_LIST (VOIDmode, sub,
3042 purge_addressof_replacements));
3048 /* else give up and put it into the stack */
3051 else if (code == ADDRESSOF)
3053 put_addressof_into_stack (x, ht);
3056 else if (code == SET)
3058 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3059 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3063 /* Scan all subexpressions. */
3064 fmt = GET_RTX_FORMAT (code);
3065 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3068 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3069 else if (*fmt == 'E')
3070 for (j = 0; j < XVECLEN (x, i); j++)
3071 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3077 /* Return a new hash table entry in HT. */
3079 static struct hash_entry *
3080 insns_for_mem_newfunc (he, ht, k)
3081 struct hash_entry *he;
3082 struct hash_table *ht;
3083 hash_table_key k ATTRIBUTE_UNUSED;
3085 struct insns_for_mem_entry *ifmhe;
3089 ifmhe = ((struct insns_for_mem_entry *)
3090 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3091 ifmhe->insns = NULL_RTX;
3096 /* Return a hash value for K, a REG. */
3098 static unsigned long
3099 insns_for_mem_hash (k)
3102 /* K is really a RTX. Just use the address as the hash value. */
3103 return (unsigned long) k;
3106 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3109 insns_for_mem_comp (k1, k2)
3116 struct insns_for_mem_walk_info {
3117 /* The hash table that we are using to record which INSNs use which
3119 struct hash_table *ht;
3121 /* The INSN we are currently proessing. */
3124 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3125 to find the insns that use the REGs in the ADDRESSOFs. */
3129 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3130 that might be used in an ADDRESSOF expression, record this INSN in
3131 the hash table given by DATA (which is really a pointer to an
3132 insns_for_mem_walk_info structure). */
3135 insns_for_mem_walk (r, data)
3139 struct insns_for_mem_walk_info *ifmwi
3140 = (struct insns_for_mem_walk_info *) data;
3142 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3143 && GET_CODE (XEXP (*r, 0)) == REG)
3144 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3145 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3147 /* Lookup this MEM in the hashtable, creating it if necessary. */
3148 struct insns_for_mem_entry *ifme
3149 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3154 /* If we have not already recorded this INSN, do so now. Since
3155 we process the INSNs in order, we know that if we have
3156 recorded it it must be at the front of the list. */
3157 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3159 /* We do the allocation on the same obstack as is used for
3160 the hash table since this memory will not be used once
3161 the hash table is deallocated. */
3162 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3163 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3172 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3173 which REGs in HT. */
3176 compute_insns_for_mem (insns, last_insn, ht)
3179 struct hash_table *ht;
3182 struct insns_for_mem_walk_info ifmwi;
3185 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3186 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3187 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3190 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3194 /* Helper function for purge_addressof called through for_each_rtx.
3195 Returns true iff the rtl is an ADDRESSOF. */
3197 is_addressof (rtl, data)
3199 void * data ATTRIBUTE_UNUSED;
3201 return GET_CODE (* rtl) == ADDRESSOF;
3204 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3205 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3209 purge_addressof (insns)
3213 struct hash_table ht;
3215 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3216 requires a fixup pass over the instruction stream to correct
3217 INSNs that depended on the REG being a REG, and not a MEM. But,
3218 these fixup passes are slow. Furthermore, more MEMs are not
3219 mentioned in very many instructions. So, we speed up the process
3220 by pre-calculating which REGs occur in which INSNs; that allows
3221 us to perform the fixup passes much more quickly. */
3222 hash_table_init (&ht,
3223 insns_for_mem_newfunc,
3225 insns_for_mem_comp);
3226 compute_insns_for_mem (insns, NULL_RTX, &ht);
3228 for (insn = insns; insn; insn = NEXT_INSN (insn))
3229 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3230 || GET_CODE (insn) == CALL_INSN)
3232 if (! purge_addressof_1 (&PATTERN (insn), insn,
3233 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3234 /* If we could not replace the ADDRESSOFs in the insn,
3235 something is wrong. */
3238 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3240 /* If we could not replace the ADDRESSOFs in the insn's notes,
3241 we can just remove the offending notes instead. */
3244 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3246 /* If we find a REG_RETVAL note then the insn is a libcall.
3247 Such insns must have REG_EQUAL notes as well, in order
3248 for later passes of the compiler to work. So it is not
3249 safe to delete the notes here, and instead we abort. */
3250 if (REG_NOTE_KIND (note) == REG_RETVAL)
3252 if (for_each_rtx (& note, is_addressof, NULL))
3253 remove_note (insn, note);
3259 hash_table_free (&ht);
3260 purge_bitfield_addressof_replacements = 0;
3261 purge_addressof_replacements = 0;
3264 /* Pass through the INSNS of function FNDECL and convert virtual register
3265 references to hard register references. */
3268 instantiate_virtual_regs (fndecl, insns)
3275 /* Compute the offsets to use for this function. */
3276 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3277 var_offset = STARTING_FRAME_OFFSET;
3278 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3279 out_arg_offset = STACK_POINTER_OFFSET;
3280 cfa_offset = ARG_POINTER_CFA_OFFSET;
3282 /* Scan all variables and parameters of this function. For each that is
3283 in memory, instantiate all virtual registers if the result is a valid
3284 address. If not, we do it later. That will handle most uses of virtual
3285 regs on many machines. */
3286 instantiate_decls (fndecl, 1);
3288 /* Initialize recognition, indicating that volatile is OK. */
3291 /* Scan through all the insns, instantiating every virtual register still
3293 for (insn = insns; insn; insn = NEXT_INSN (insn))
3294 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3295 || GET_CODE (insn) == CALL_INSN)
3297 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3298 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3301 /* Instantiate the stack slots for the parm registers, for later use in
3302 addressof elimination. */
3303 for (i = 0; i < max_parm_reg; ++i)
3304 if (parm_reg_stack_loc[i])
3305 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3307 /* Now instantiate the remaining register equivalences for debugging info.
3308 These will not be valid addresses. */
3309 instantiate_decls (fndecl, 0);
3311 /* Indicate that, from now on, assign_stack_local should use
3312 frame_pointer_rtx. */
3313 virtuals_instantiated = 1;
3316 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3317 all virtual registers in their DECL_RTL's.
3319 If VALID_ONLY, do this only if the resulting address is still valid.
3320 Otherwise, always do it. */
3323 instantiate_decls (fndecl, valid_only)
3329 if (DECL_SAVED_INSNS (fndecl))
3330 /* When compiling an inline function, the obstack used for
3331 rtl allocation is the maybepermanent_obstack. Calling
3332 `resume_temporary_allocation' switches us back to that
3333 obstack while we process this function's parameters. */
3334 resume_temporary_allocation ();
3336 /* Process all parameters of the function. */
3337 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3339 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3341 instantiate_decl (DECL_RTL (decl), size, valid_only);
3343 /* If the parameter was promoted, then the incoming RTL mode may be
3344 larger than the declared type size. We must use the larger of
3346 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3347 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3350 /* Now process all variables defined in the function or its subblocks. */
3351 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3353 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3355 /* Save all rtl allocated for this function by raising the
3356 high-water mark on the maybepermanent_obstack. */
3358 /* All further rtl allocation is now done in the current_obstack. */
3359 rtl_in_current_obstack ();
3363 /* Subroutine of instantiate_decls: Process all decls in the given
3364 BLOCK node and all its subblocks. */
3367 instantiate_decls_1 (let, valid_only)
3373 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3374 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3377 /* Process all subblocks. */
3378 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3379 instantiate_decls_1 (t, valid_only);
3382 /* Subroutine of the preceding procedures: Given RTL representing a
3383 decl and the size of the object, do any instantiation required.
3385 If VALID_ONLY is non-zero, it means that the RTL should only be
3386 changed if the new address is valid. */
3389 instantiate_decl (x, size, valid_only)
3394 enum machine_mode mode;
3397 /* If this is not a MEM, no need to do anything. Similarly if the
3398 address is a constant or a register that is not a virtual register. */
3400 if (x == 0 || GET_CODE (x) != MEM)
3404 if (CONSTANT_P (addr)
3405 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3406 || (GET_CODE (addr) == REG
3407 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3408 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3411 /* If we should only do this if the address is valid, copy the address.
3412 We need to do this so we can undo any changes that might make the
3413 address invalid. This copy is unfortunate, but probably can't be
3417 addr = copy_rtx (addr);
3419 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3423 /* Now verify that the resulting address is valid for every integer or
3424 floating-point mode up to and including SIZE bytes long. We do this
3425 since the object might be accessed in any mode and frame addresses
3428 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3429 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3430 mode = GET_MODE_WIDER_MODE (mode))
3431 if (! memory_address_p (mode, addr))
3434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3435 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3436 mode = GET_MODE_WIDER_MODE (mode))
3437 if (! memory_address_p (mode, addr))
3441 /* Put back the address now that we have updated it and we either know
3442 it is valid or we don't care whether it is valid. */
3447 /* Given a pointer to a piece of rtx and an optional pointer to the
3448 containing object, instantiate any virtual registers present in it.
3450 If EXTRA_INSNS, we always do the replacement and generate
3451 any extra insns before OBJECT. If it zero, we do nothing if replacement
3454 Return 1 if we either had nothing to do or if we were able to do the
3455 needed replacement. Return 0 otherwise; we only return zero if
3456 EXTRA_INSNS is zero.
3458 We first try some simple transformations to avoid the creation of extra
3462 instantiate_virtual_regs_1 (loc, object, extra_insns)
3470 HOST_WIDE_INT offset = 0;
3476 /* Re-start here to avoid recursion in common cases. */
3483 code = GET_CODE (x);
3485 /* Check for some special cases. */
3502 /* We are allowed to set the virtual registers. This means that
3503 the actual register should receive the source minus the
3504 appropriate offset. This is used, for example, in the handling
3505 of non-local gotos. */
3506 if (SET_DEST (x) == virtual_incoming_args_rtx)
3507 new = arg_pointer_rtx, offset = - in_arg_offset;
3508 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3509 new = frame_pointer_rtx, offset = - var_offset;
3510 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3511 new = stack_pointer_rtx, offset = - dynamic_offset;
3512 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3513 new = stack_pointer_rtx, offset = - out_arg_offset;
3514 else if (SET_DEST (x) == virtual_cfa_rtx)
3515 new = arg_pointer_rtx, offset = - cfa_offset;
3519 rtx src = SET_SRC (x);
3521 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3523 /* The only valid sources here are PLUS or REG. Just do
3524 the simplest possible thing to handle them. */
3525 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3529 if (GET_CODE (src) != REG)
3530 temp = force_operand (src, NULL_RTX);
3533 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3537 emit_insns_before (seq, object);
3540 if (! validate_change (object, &SET_SRC (x), temp, 0)
3547 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3552 /* Handle special case of virtual register plus constant. */
3553 if (CONSTANT_P (XEXP (x, 1)))
3555 rtx old, new_offset;
3557 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3558 if (GET_CODE (XEXP (x, 0)) == PLUS)
3560 rtx inner = XEXP (XEXP (x, 0), 0);
3562 if (inner == virtual_incoming_args_rtx)
3563 new = arg_pointer_rtx, offset = in_arg_offset;
3564 else if (inner == virtual_stack_vars_rtx)
3565 new = frame_pointer_rtx, offset = var_offset;
3566 else if (inner == virtual_stack_dynamic_rtx)
3567 new = stack_pointer_rtx, offset = dynamic_offset;
3568 else if (inner == virtual_outgoing_args_rtx)
3569 new = stack_pointer_rtx, offset = out_arg_offset;
3570 else if (inner == virtual_cfa_rtx)
3571 new = arg_pointer_rtx, offset = cfa_offset;
3578 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3580 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3583 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3584 new = arg_pointer_rtx, offset = in_arg_offset;
3585 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3586 new = frame_pointer_rtx, offset = var_offset;
3587 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3588 new = stack_pointer_rtx, offset = dynamic_offset;
3589 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3590 new = stack_pointer_rtx, offset = out_arg_offset;
3591 else if (XEXP (x, 0) == virtual_cfa_rtx)
3592 new = arg_pointer_rtx, offset = cfa_offset;
3595 /* We know the second operand is a constant. Unless the
3596 first operand is a REG (which has been already checked),
3597 it needs to be checked. */
3598 if (GET_CODE (XEXP (x, 0)) != REG)
3606 new_offset = plus_constant (XEXP (x, 1), offset);
3608 /* If the new constant is zero, try to replace the sum with just
3610 if (new_offset == const0_rtx
3611 && validate_change (object, loc, new, 0))
3614 /* Next try to replace the register and new offset.
3615 There are two changes to validate here and we can't assume that
3616 in the case of old offset equals new just changing the register
3617 will yield a valid insn. In the interests of a little efficiency,
3618 however, we only call validate change once (we don't queue up the
3619 changes and then call apply_change_group). */
3623 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3624 : (XEXP (x, 0) = new,
3625 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3633 /* Otherwise copy the new constant into a register and replace
3634 constant with that register. */
3635 temp = gen_reg_rtx (Pmode);
3637 if (validate_change (object, &XEXP (x, 1), temp, 0))
3638 emit_insn_before (gen_move_insn (temp, new_offset), object);
3641 /* If that didn't work, replace this expression with a
3642 register containing the sum. */
3645 new = gen_rtx_PLUS (Pmode, new, new_offset);
3648 temp = force_operand (new, NULL_RTX);
3652 emit_insns_before (seq, object);
3653 if (! validate_change (object, loc, temp, 0)
3654 && ! validate_replace_rtx (x, temp, object))
3662 /* Fall through to generic two-operand expression case. */
3668 case DIV: case UDIV:
3669 case MOD: case UMOD:
3670 case AND: case IOR: case XOR:
3671 case ROTATERT: case ROTATE:
3672 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3674 case GE: case GT: case GEU: case GTU:
3675 case LE: case LT: case LEU: case LTU:
3676 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3677 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3682 /* Most cases of MEM that convert to valid addresses have already been
3683 handled by our scan of decls. The only special handling we
3684 need here is to make a copy of the rtx to ensure it isn't being
3685 shared if we have to change it to a pseudo.
3687 If the rtx is a simple reference to an address via a virtual register,
3688 it can potentially be shared. In such cases, first try to make it
3689 a valid address, which can also be shared. Otherwise, copy it and
3692 First check for common cases that need no processing. These are
3693 usually due to instantiation already being done on a previous instance
3697 if (CONSTANT_ADDRESS_P (temp)
3698 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3699 || temp == arg_pointer_rtx
3701 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3702 || temp == hard_frame_pointer_rtx
3704 || temp == frame_pointer_rtx)
3707 if (GET_CODE (temp) == PLUS
3708 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3709 && (XEXP (temp, 0) == frame_pointer_rtx
3710 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3711 || XEXP (temp, 0) == hard_frame_pointer_rtx
3713 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3714 || XEXP (temp, 0) == arg_pointer_rtx
3719 if (temp == virtual_stack_vars_rtx
3720 || temp == virtual_incoming_args_rtx
3721 || (GET_CODE (temp) == PLUS
3722 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3723 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3724 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3726 /* This MEM may be shared. If the substitution can be done without
3727 the need to generate new pseudos, we want to do it in place
3728 so all copies of the shared rtx benefit. The call below will
3729 only make substitutions if the resulting address is still
3732 Note that we cannot pass X as the object in the recursive call
3733 since the insn being processed may not allow all valid
3734 addresses. However, if we were not passed on object, we can
3735 only modify X without copying it if X will have a valid
3738 ??? Also note that this can still lose if OBJECT is an insn that
3739 has less restrictions on an address that some other insn.
3740 In that case, we will modify the shared address. This case
3741 doesn't seem very likely, though. One case where this could
3742 happen is in the case of a USE or CLOBBER reference, but we
3743 take care of that below. */
3745 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3746 object ? object : x, 0))
3749 /* Otherwise make a copy and process that copy. We copy the entire
3750 RTL expression since it might be a PLUS which could also be
3752 *loc = x = copy_rtx (x);
3755 /* Fall through to generic unary operation case. */
3757 case STRICT_LOW_PART:
3759 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3760 case SIGN_EXTEND: case ZERO_EXTEND:
3761 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3762 case FLOAT: case FIX:
3763 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3767 /* These case either have just one operand or we know that we need not
3768 check the rest of the operands. */
3774 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3775 go ahead and make the invalid one, but do it to a copy. For a REG,
3776 just make the recursive call, since there's no chance of a problem. */
3778 if ((GET_CODE (XEXP (x, 0)) == MEM
3779 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3781 || (GET_CODE (XEXP (x, 0)) == REG
3782 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3785 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3790 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3791 in front of this insn and substitute the temporary. */
3792 if (x == virtual_incoming_args_rtx)
3793 new = arg_pointer_rtx, offset = in_arg_offset;
3794 else if (x == virtual_stack_vars_rtx)
3795 new = frame_pointer_rtx, offset = var_offset;
3796 else if (x == virtual_stack_dynamic_rtx)
3797 new = stack_pointer_rtx, offset = dynamic_offset;
3798 else if (x == virtual_outgoing_args_rtx)
3799 new = stack_pointer_rtx, offset = out_arg_offset;
3800 else if (x == virtual_cfa_rtx)
3801 new = arg_pointer_rtx, offset = cfa_offset;
3805 temp = plus_constant (new, offset);
3806 if (!validate_change (object, loc, temp, 0))
3812 temp = force_operand (temp, NULL_RTX);
3816 emit_insns_before (seq, object);
3817 if (! validate_change (object, loc, temp, 0)
3818 && ! validate_replace_rtx (x, temp, object))
3826 if (GET_CODE (XEXP (x, 0)) == REG)
3829 else if (GET_CODE (XEXP (x, 0)) == MEM)
3831 /* If we have a (addressof (mem ..)), do any instantiation inside
3832 since we know we'll be making the inside valid when we finally
3833 remove the ADDRESSOF. */
3834 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3843 /* Scan all subexpressions. */
3844 fmt = GET_RTX_FORMAT (code);
3845 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3848 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3851 else if (*fmt == 'E')
3852 for (j = 0; j < XVECLEN (x, i); j++)
3853 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3860 /* Optimization: assuming this function does not receive nonlocal gotos,
3861 delete the handlers for such, as well as the insns to establish
3862 and disestablish them. */
3868 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3870 /* Delete the handler by turning off the flag that would
3871 prevent jump_optimize from deleting it.
3872 Also permit deletion of the nonlocal labels themselves
3873 if nothing local refers to them. */
3874 if (GET_CODE (insn) == CODE_LABEL)
3878 LABEL_PRESERVE_P (insn) = 0;
3880 /* Remove it from the nonlocal_label list, to avoid confusing
3882 for (t = nonlocal_labels, last_t = 0; t;
3883 last_t = t, t = TREE_CHAIN (t))
3884 if (DECL_RTL (TREE_VALUE (t)) == insn)
3889 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3891 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3894 if (GET_CODE (insn) == INSN)
3898 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3899 if (reg_mentioned_p (t, PATTERN (insn)))
3905 || (nonlocal_goto_stack_level != 0
3906 && reg_mentioned_p (nonlocal_goto_stack_level,
3916 return max_parm_reg;
3919 /* Return the first insn following those generated by `assign_parms'. */
3922 get_first_nonparm_insn ()
3925 return NEXT_INSN (last_parm_insn);
3926 return get_insns ();
3929 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3930 Crash if there is none. */
3933 get_first_block_beg ()
3935 register rtx searcher;
3936 register rtx insn = get_first_nonparm_insn ();
3938 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3939 if (GET_CODE (searcher) == NOTE
3940 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3943 abort (); /* Invalid call to this function. (See comments above.) */
3947 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3948 This means a type for which function calls must pass an address to the
3949 function or get an address back from the function.
3950 EXP may be a type node or an expression (whose type is tested). */
3953 aggregate_value_p (exp)
3956 int i, regno, nregs;
3959 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3962 type = TREE_TYPE (exp);
3964 if (RETURN_IN_MEMORY (type))
3966 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3967 and thus can't be returned in registers. */
3968 if (TREE_ADDRESSABLE (type))
3970 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3972 /* Make sure we have suitable call-clobbered regs to return
3973 the value in; if not, we must return it in memory. */
3974 reg = hard_function_value (type, 0, 0);
3976 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3978 if (GET_CODE (reg) != REG)
3981 regno = REGNO (reg);
3982 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3983 for (i = 0; i < nregs; i++)
3984 if (! call_used_regs[regno + i])
3989 /* Assign RTL expressions to the function's parameters.
3990 This may involve copying them into registers and using
3991 those registers as the RTL for them. */
3994 assign_parms (fndecl)
3998 register rtx entry_parm = 0;
3999 register rtx stack_parm = 0;
4000 CUMULATIVE_ARGS args_so_far;
4001 enum machine_mode promoted_mode, passed_mode;
4002 enum machine_mode nominal_mode, promoted_nominal_mode;
4004 /* Total space needed so far for args on the stack,
4005 given as a constant and a tree-expression. */
4006 struct args_size stack_args_size;
4007 tree fntype = TREE_TYPE (fndecl);
4008 tree fnargs = DECL_ARGUMENTS (fndecl);
4009 /* This is used for the arg pointer when referring to stack args. */
4010 rtx internal_arg_pointer;
4011 /* This is a dummy PARM_DECL that we used for the function result if
4012 the function returns a structure. */
4013 tree function_result_decl = 0;
4014 #ifdef SETUP_INCOMING_VARARGS
4015 int varargs_setup = 0;
4017 rtx conversion_insns = 0;
4018 struct args_size alignment_pad;
4020 /* Nonzero if the last arg is named `__builtin_va_alist',
4021 which is used on some machines for old-fashioned non-ANSI varargs.h;
4022 this should be stuck onto the stack as if it had arrived there. */
4024 = (current_function_varargs
4026 && (parm = tree_last (fnargs)) != 0
4028 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4029 "__builtin_va_alist")));
4031 /* Nonzero if function takes extra anonymous args.
4032 This means the last named arg must be on the stack
4033 right before the anonymous ones. */
4035 = (TYPE_ARG_TYPES (fntype) != 0
4036 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4037 != void_type_node));
4039 current_function_stdarg = stdarg;
4041 /* If the reg that the virtual arg pointer will be translated into is
4042 not a fixed reg or is the stack pointer, make a copy of the virtual
4043 arg pointer, and address parms via the copy. The frame pointer is
4044 considered fixed even though it is not marked as such.
4046 The second time through, simply use ap to avoid generating rtx. */
4048 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4049 || ! (fixed_regs[ARG_POINTER_REGNUM]
4050 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4051 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4053 internal_arg_pointer = virtual_incoming_args_rtx;
4054 current_function_internal_arg_pointer = internal_arg_pointer;
4056 stack_args_size.constant = 0;
4057 stack_args_size.var = 0;
4059 /* If struct value address is treated as the first argument, make it so. */
4060 if (aggregate_value_p (DECL_RESULT (fndecl))
4061 && ! current_function_returns_pcc_struct
4062 && struct_value_incoming_rtx == 0)
4064 tree type = build_pointer_type (TREE_TYPE (fntype));
4066 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4068 DECL_ARG_TYPE (function_result_decl) = type;
4069 TREE_CHAIN (function_result_decl) = fnargs;
4070 fnargs = function_result_decl;
4073 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4074 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4076 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4077 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4079 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4082 /* We haven't yet found an argument that we must push and pretend the
4084 current_function_pretend_args_size = 0;
4086 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4088 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4089 struct args_size stack_offset;
4090 struct args_size arg_size;
4091 int passed_pointer = 0;
4092 int did_conversion = 0;
4093 tree passed_type = DECL_ARG_TYPE (parm);
4094 tree nominal_type = TREE_TYPE (parm);
4097 /* Set LAST_NAMED if this is last named arg before some
4099 int last_named = ((TREE_CHAIN (parm) == 0
4100 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4101 && (stdarg || current_function_varargs));
4102 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4103 most machines, if this is a varargs/stdarg function, then we treat
4104 the last named arg as if it were anonymous too. */
4105 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4107 if (TREE_TYPE (parm) == error_mark_node
4108 /* This can happen after weird syntax errors
4109 or if an enum type is defined among the parms. */
4110 || TREE_CODE (parm) != PARM_DECL
4111 || passed_type == NULL)
4113 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4114 = gen_rtx_MEM (BLKmode, const0_rtx);
4115 TREE_USED (parm) = 1;
4119 /* For varargs.h function, save info about regs and stack space
4120 used by the individual args, not including the va_alist arg. */
4121 if (hide_last_arg && last_named)
4122 current_function_args_info = args_so_far;
4124 /* Find mode of arg as it is passed, and mode of arg
4125 as it should be during execution of this function. */
4126 passed_mode = TYPE_MODE (passed_type);
4127 nominal_mode = TYPE_MODE (nominal_type);
4129 /* If the parm's mode is VOID, its value doesn't matter,
4130 and avoid the usual things like emit_move_insn that could crash. */
4131 if (nominal_mode == VOIDmode)
4133 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4137 /* If the parm is to be passed as a transparent union, use the
4138 type of the first field for the tests below. We have already
4139 verified that the modes are the same. */
4140 if (DECL_TRANSPARENT_UNION (parm)
4141 || TYPE_TRANSPARENT_UNION (passed_type))
4142 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4144 /* See if this arg was passed by invisible reference. It is if
4145 it is an object whose size depends on the contents of the
4146 object itself or if the machine requires these objects be passed
4149 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4150 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4151 || TREE_ADDRESSABLE (passed_type)
4152 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4153 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4154 passed_type, named_arg)
4158 passed_type = nominal_type = build_pointer_type (passed_type);
4160 passed_mode = nominal_mode = Pmode;
4163 promoted_mode = passed_mode;
4165 #ifdef PROMOTE_FUNCTION_ARGS
4166 /* Compute the mode in which the arg is actually extended to. */
4167 unsignedp = TREE_UNSIGNED (passed_type);
4168 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4171 /* Let machine desc say which reg (if any) the parm arrives in.
4172 0 means it arrives on the stack. */
4173 #ifdef FUNCTION_INCOMING_ARG
4174 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4175 passed_type, named_arg);
4177 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4178 passed_type, named_arg);
4181 if (entry_parm == 0)
4182 promoted_mode = passed_mode;
4184 #ifdef SETUP_INCOMING_VARARGS
4185 /* If this is the last named parameter, do any required setup for
4186 varargs or stdargs. We need to know about the case of this being an
4187 addressable type, in which case we skip the registers it
4188 would have arrived in.
4190 For stdargs, LAST_NAMED will be set for two parameters, the one that
4191 is actually the last named, and the dummy parameter. We only
4192 want to do this action once.
4194 Also, indicate when RTL generation is to be suppressed. */
4195 if (last_named && !varargs_setup)
4197 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4198 current_function_pretend_args_size, 0);
4203 /* Determine parm's home in the stack,
4204 in case it arrives in the stack or we should pretend it did.
4206 Compute the stack position and rtx where the argument arrives
4209 There is one complexity here: If this was a parameter that would
4210 have been passed in registers, but wasn't only because it is
4211 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4212 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4213 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4214 0 as it was the previous time. */
4216 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4217 locate_and_pad_parm (promoted_mode, passed_type,
4218 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4221 #ifdef FUNCTION_INCOMING_ARG
4222 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4224 pretend_named) != 0,
4226 FUNCTION_ARG (args_so_far, promoted_mode,
4228 pretend_named) != 0,
4231 fndecl, &stack_args_size, &stack_offset, &arg_size,
4235 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4237 if (offset_rtx == const0_rtx)
4238 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4240 stack_parm = gen_rtx_MEM (promoted_mode,
4241 gen_rtx_PLUS (Pmode,
4242 internal_arg_pointer,
4245 /* If this is a memory ref that contains aggregate components,
4246 mark it as such for cse and loop optimize. Likewise if it
4248 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4249 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4250 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4253 /* If this parameter was passed both in registers and in the stack,
4254 use the copy on the stack. */
4255 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4258 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4259 /* If this parm was passed part in regs and part in memory,
4260 pretend it arrived entirely in memory
4261 by pushing the register-part onto the stack.
4263 In the special case of a DImode or DFmode that is split,
4264 we could put it together in a pseudoreg directly,
4265 but for now that's not worth bothering with. */
4269 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4270 passed_type, named_arg);
4274 current_function_pretend_args_size
4275 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4276 / (PARM_BOUNDARY / BITS_PER_UNIT)
4277 * (PARM_BOUNDARY / BITS_PER_UNIT));
4279 /* Handle calls that pass values in multiple non-contiguous
4280 locations. The Irix 6 ABI has examples of this. */
4281 if (GET_CODE (entry_parm) == PARALLEL)
4282 emit_group_store (validize_mem (stack_parm), entry_parm,
4283 int_size_in_bytes (TREE_TYPE (parm)),
4284 (TYPE_ALIGN (TREE_TYPE (parm))
4287 move_block_from_reg (REGNO (entry_parm),
4288 validize_mem (stack_parm), nregs,
4289 int_size_in_bytes (TREE_TYPE (parm)));
4291 entry_parm = stack_parm;
4296 /* If we didn't decide this parm came in a register,
4297 by default it came on the stack. */
4298 if (entry_parm == 0)
4299 entry_parm = stack_parm;
4301 /* Record permanently how this parm was passed. */
4302 DECL_INCOMING_RTL (parm) = entry_parm;
4304 /* If there is actually space on the stack for this parm,
4305 count it in stack_args_size; otherwise set stack_parm to 0
4306 to indicate there is no preallocated stack slot for the parm. */
4308 if (entry_parm == stack_parm
4309 || (GET_CODE (entry_parm) == PARALLEL
4310 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4311 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4312 /* On some machines, even if a parm value arrives in a register
4313 there is still an (uninitialized) stack slot allocated for it.
4315 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4316 whether this parameter already has a stack slot allocated,
4317 because an arg block exists only if current_function_args_size
4318 is larger than some threshold, and we haven't calculated that
4319 yet. So, for now, we just assume that stack slots never exist
4321 || REG_PARM_STACK_SPACE (fndecl) > 0
4325 stack_args_size.constant += arg_size.constant;
4327 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4330 /* No stack slot was pushed for this parm. */
4333 /* Update info on where next arg arrives in registers. */
4335 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4336 passed_type, named_arg);
4338 /* If we can't trust the parm stack slot to be aligned enough
4339 for its ultimate type, don't use that slot after entry.
4340 We'll make another stack slot, if we need one. */
4342 int thisparm_boundary
4343 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4345 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4349 /* If parm was passed in memory, and we need to convert it on entry,
4350 don't store it back in that same slot. */
4352 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4356 /* Now adjust STACK_PARM to the mode and precise location
4357 where this parameter should live during execution,
4358 if we discover that it must live in the stack during execution.
4359 To make debuggers happier on big-endian machines, we store
4360 the value in the last bytes of the space available. */
4362 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4367 if (BYTES_BIG_ENDIAN
4368 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4369 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4370 - GET_MODE_SIZE (nominal_mode));
4372 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4373 if (offset_rtx == const0_rtx)
4374 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4376 stack_parm = gen_rtx_MEM (nominal_mode,
4377 gen_rtx_PLUS (Pmode,
4378 internal_arg_pointer,
4381 /* If this is a memory ref that contains aggregate components,
4382 mark it as such for cse and loop optimize. */
4383 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4387 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4388 in the mode in which it arrives.
4389 STACK_PARM is an RTX for a stack slot where the parameter can live
4390 during the function (in case we want to put it there).
4391 STACK_PARM is 0 if no stack slot was pushed for it.
4393 Now output code if necessary to convert ENTRY_PARM to
4394 the type in which this function declares it,
4395 and store that result in an appropriate place,
4396 which may be a pseudo reg, may be STACK_PARM,
4397 or may be a local stack slot if STACK_PARM is 0.
4399 Set DECL_RTL to that place. */
4401 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4403 /* If a BLKmode arrives in registers, copy it to a stack slot.
4404 Handle calls that pass values in multiple non-contiguous
4405 locations. The Irix 6 ABI has examples of this. */
4406 if (GET_CODE (entry_parm) == REG
4407 || GET_CODE (entry_parm) == PARALLEL)
4410 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4413 /* Note that we will be storing an integral number of words.
4414 So we have to be careful to ensure that we allocate an
4415 integral number of words. We do this below in the
4416 assign_stack_local if space was not allocated in the argument
4417 list. If it was, this will not work if PARM_BOUNDARY is not
4418 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4419 if it becomes a problem. */
4421 if (stack_parm == 0)
4424 = assign_stack_local (GET_MODE (entry_parm),
4427 /* If this is a memory ref that contains aggregate
4428 components, mark it as such for cse and loop optimize. */
4429 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4432 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4435 if (TREE_READONLY (parm))
4436 RTX_UNCHANGING_P (stack_parm) = 1;
4438 /* Handle calls that pass values in multiple non-contiguous
4439 locations. The Irix 6 ABI has examples of this. */
4440 if (GET_CODE (entry_parm) == PARALLEL)
4441 emit_group_store (validize_mem (stack_parm), entry_parm,
4442 int_size_in_bytes (TREE_TYPE (parm)),
4443 (TYPE_ALIGN (TREE_TYPE (parm))
4446 move_block_from_reg (REGNO (entry_parm),
4447 validize_mem (stack_parm),
4448 size_stored / UNITS_PER_WORD,
4449 int_size_in_bytes (TREE_TYPE (parm)));
4451 DECL_RTL (parm) = stack_parm;
4453 else if (! ((! optimize
4454 && ! DECL_REGISTER (parm)
4455 && ! DECL_INLINE (fndecl))
4456 /* layout_decl may set this. */
4457 || TREE_ADDRESSABLE (parm)
4458 || TREE_SIDE_EFFECTS (parm)
4459 /* If -ffloat-store specified, don't put explicit
4460 float variables into registers. */
4461 || (flag_float_store
4462 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4463 /* Always assign pseudo to structure return or item passed
4464 by invisible reference. */
4465 || passed_pointer || parm == function_result_decl)
4467 /* Store the parm in a pseudoregister during the function, but we
4468 may need to do it in a wider mode. */
4470 register rtx parmreg;
4471 int regno, regnoi = 0, regnor = 0;
4473 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4475 promoted_nominal_mode
4476 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4478 parmreg = gen_reg_rtx (promoted_nominal_mode);
4479 mark_user_reg (parmreg);
4481 /* If this was an item that we received a pointer to, set DECL_RTL
4486 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4487 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4490 DECL_RTL (parm) = parmreg;
4492 /* Copy the value into the register. */
4493 if (nominal_mode != passed_mode
4494 || promoted_nominal_mode != promoted_mode)
4497 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4498 mode, by the caller. We now have to convert it to
4499 NOMINAL_MODE, if different. However, PARMREG may be in
4500 a different mode than NOMINAL_MODE if it is being stored
4503 If ENTRY_PARM is a hard register, it might be in a register
4504 not valid for operating in its mode (e.g., an odd-numbered
4505 register for a DFmode). In that case, moves are the only
4506 thing valid, so we can't do a convert from there. This
4507 occurs when the calling sequence allow such misaligned
4510 In addition, the conversion may involve a call, which could
4511 clobber parameters which haven't been copied to pseudo
4512 registers yet. Therefore, we must first copy the parm to
4513 a pseudo reg here, and save the conversion until after all
4514 parameters have been moved. */
4516 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4518 emit_move_insn (tempreg, validize_mem (entry_parm));
4520 push_to_sequence (conversion_insns);
4521 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4523 /* TREE_USED gets set erroneously during expand_assignment. */
4524 save_tree_used = TREE_USED (parm);
4525 expand_assignment (parm,
4526 make_tree (nominal_type, tempreg), 0, 0);
4527 TREE_USED (parm) = save_tree_used;
4528 conversion_insns = get_insns ();
4533 emit_move_insn (parmreg, validize_mem (entry_parm));
4535 /* If we were passed a pointer but the actual value
4536 can safely live in a register, put it in one. */
4537 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4539 && ! DECL_REGISTER (parm)
4540 && ! DECL_INLINE (fndecl))
4541 /* layout_decl may set this. */
4542 || TREE_ADDRESSABLE (parm)
4543 || TREE_SIDE_EFFECTS (parm)
4544 /* If -ffloat-store specified, don't put explicit
4545 float variables into registers. */
4546 || (flag_float_store
4547 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4549 /* We can't use nominal_mode, because it will have been set to
4550 Pmode above. We must use the actual mode of the parm. */
4551 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4552 mark_user_reg (parmreg);
4553 emit_move_insn (parmreg, DECL_RTL (parm));
4554 DECL_RTL (parm) = parmreg;
4555 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4559 #ifdef FUNCTION_ARG_CALLEE_COPIES
4560 /* If we are passed an arg by reference and it is our responsibility
4561 to make a copy, do it now.
4562 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4563 original argument, so we must recreate them in the call to
4564 FUNCTION_ARG_CALLEE_COPIES. */
4565 /* ??? Later add code to handle the case that if the argument isn't
4566 modified, don't do the copy. */
4568 else if (passed_pointer
4569 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4570 TYPE_MODE (DECL_ARG_TYPE (parm)),
4571 DECL_ARG_TYPE (parm),
4573 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4576 tree type = DECL_ARG_TYPE (parm);
4578 /* This sequence may involve a library call perhaps clobbering
4579 registers that haven't been copied to pseudos yet. */
4581 push_to_sequence (conversion_insns);
4583 if (TYPE_SIZE (type) == 0
4584 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4585 /* This is a variable sized object. */
4586 copy = gen_rtx_MEM (BLKmode,
4587 allocate_dynamic_stack_space
4588 (expr_size (parm), NULL_RTX,
4589 TYPE_ALIGN (type)));
4591 copy = assign_stack_temp (TYPE_MODE (type),
4592 int_size_in_bytes (type), 1);
4593 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4594 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4596 store_expr (parm, copy, 0);
4597 emit_move_insn (parmreg, XEXP (copy, 0));
4598 if (current_function_check_memory_usage)
4599 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4600 XEXP (copy, 0), Pmode,
4601 GEN_INT (int_size_in_bytes (type)),
4602 TYPE_MODE (sizetype),
4603 GEN_INT (MEMORY_USE_RW),
4604 TYPE_MODE (integer_type_node));
4605 conversion_insns = get_insns ();
4609 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4611 /* In any case, record the parm's desired stack location
4612 in case we later discover it must live in the stack.
4614 If it is a COMPLEX value, store the stack location for both
4617 if (GET_CODE (parmreg) == CONCAT)
4618 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4620 regno = REGNO (parmreg);
4622 if (regno >= max_parm_reg)
4625 int old_max_parm_reg = max_parm_reg;
4627 /* It's slow to expand this one register at a time,
4628 but it's also rare and we need max_parm_reg to be
4629 precisely correct. */
4630 max_parm_reg = regno + 1;
4631 new = (rtx *) xrealloc (parm_reg_stack_loc,
4632 max_parm_reg * sizeof (rtx));
4633 bzero ((char *) (new + old_max_parm_reg),
4634 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4635 parm_reg_stack_loc = new;
4638 if (GET_CODE (parmreg) == CONCAT)
4640 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4642 regnor = REGNO (gen_realpart (submode, parmreg));
4643 regnoi = REGNO (gen_imagpart (submode, parmreg));
4645 if (stack_parm != 0)
4647 parm_reg_stack_loc[regnor]
4648 = gen_realpart (submode, stack_parm);
4649 parm_reg_stack_loc[regnoi]
4650 = gen_imagpart (submode, stack_parm);
4654 parm_reg_stack_loc[regnor] = 0;
4655 parm_reg_stack_loc[regnoi] = 0;
4659 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4661 /* Mark the register as eliminable if we did no conversion
4662 and it was copied from memory at a fixed offset,
4663 and the arg pointer was not copied to a pseudo-reg.
4664 If the arg pointer is a pseudo reg or the offset formed
4665 an invalid address, such memory-equivalences
4666 as we make here would screw up life analysis for it. */
4667 if (nominal_mode == passed_mode
4670 && GET_CODE (stack_parm) == MEM
4671 && stack_offset.var == 0
4672 && reg_mentioned_p (virtual_incoming_args_rtx,
4673 XEXP (stack_parm, 0)))
4675 rtx linsn = get_last_insn ();
4678 /* Mark complex types separately. */
4679 if (GET_CODE (parmreg) == CONCAT)
4680 /* Scan backwards for the set of the real and
4682 for (sinsn = linsn; sinsn != 0;
4683 sinsn = prev_nonnote_insn (sinsn))
4685 set = single_set (sinsn);
4687 && SET_DEST (set) == regno_reg_rtx [regnoi])
4689 = gen_rtx_EXPR_LIST (REG_EQUIV,
4690 parm_reg_stack_loc[regnoi],
4693 && SET_DEST (set) == regno_reg_rtx [regnor])
4695 = gen_rtx_EXPR_LIST (REG_EQUIV,
4696 parm_reg_stack_loc[regnor],
4699 else if ((set = single_set (linsn)) != 0
4700 && SET_DEST (set) == parmreg)
4702 = gen_rtx_EXPR_LIST (REG_EQUIV,
4703 stack_parm, REG_NOTES (linsn));
4706 /* For pointer data type, suggest pointer register. */
4707 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4708 mark_reg_pointer (parmreg,
4709 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4714 /* Value must be stored in the stack slot STACK_PARM
4715 during function execution. */
4717 if (promoted_mode != nominal_mode)
4719 /* Conversion is required. */
4720 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4722 emit_move_insn (tempreg, validize_mem (entry_parm));
4724 push_to_sequence (conversion_insns);
4725 entry_parm = convert_to_mode (nominal_mode, tempreg,
4726 TREE_UNSIGNED (TREE_TYPE (parm)));
4729 /* ??? This may need a big-endian conversion on sparc64. */
4730 stack_parm = change_address (stack_parm, nominal_mode,
4733 conversion_insns = get_insns ();
4738 if (entry_parm != stack_parm)
4740 if (stack_parm == 0)
4743 = assign_stack_local (GET_MODE (entry_parm),
4744 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4745 /* If this is a memory ref that contains aggregate components,
4746 mark it as such for cse and loop optimize. */
4747 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4750 if (promoted_mode != nominal_mode)
4752 push_to_sequence (conversion_insns);
4753 emit_move_insn (validize_mem (stack_parm),
4754 validize_mem (entry_parm));
4755 conversion_insns = get_insns ();
4759 emit_move_insn (validize_mem (stack_parm),
4760 validize_mem (entry_parm));
4762 if (current_function_check_memory_usage)
4764 push_to_sequence (conversion_insns);
4765 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4766 XEXP (stack_parm, 0), Pmode,
4767 GEN_INT (GET_MODE_SIZE (GET_MODE
4769 TYPE_MODE (sizetype),
4770 GEN_INT (MEMORY_USE_RW),
4771 TYPE_MODE (integer_type_node));
4773 conversion_insns = get_insns ();
4776 DECL_RTL (parm) = stack_parm;
4779 /* If this "parameter" was the place where we are receiving the
4780 function's incoming structure pointer, set up the result. */
4781 if (parm == function_result_decl)
4783 tree result = DECL_RESULT (fndecl);
4784 tree restype = TREE_TYPE (result);
4787 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4789 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4790 AGGREGATE_TYPE_P (restype));
4793 if (TREE_THIS_VOLATILE (parm))
4794 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4795 if (TREE_READONLY (parm))
4796 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4799 /* Output all parameter conversion instructions (possibly including calls)
4800 now that all parameters have been copied out of hard registers. */
4801 emit_insns (conversion_insns);
4803 last_parm_insn = get_last_insn ();
4805 current_function_args_size = stack_args_size.constant;
4807 /* Adjust function incoming argument size for alignment and
4810 #ifdef REG_PARM_STACK_SPACE
4811 #ifndef MAYBE_REG_PARM_STACK_SPACE
4812 current_function_args_size = MAX (current_function_args_size,
4813 REG_PARM_STACK_SPACE (fndecl));
4817 #ifdef STACK_BOUNDARY
4818 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4820 current_function_args_size
4821 = ((current_function_args_size + STACK_BYTES - 1)
4822 / STACK_BYTES) * STACK_BYTES;
4825 #ifdef ARGS_GROW_DOWNWARD
4826 current_function_arg_offset_rtx
4827 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4828 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4829 size_int (-stack_args_size.constant)),
4830 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4832 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4835 /* See how many bytes, if any, of its args a function should try to pop
4838 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4839 current_function_args_size);
4841 /* For stdarg.h function, save info about
4842 regs and stack space used by the named args. */
4845 current_function_args_info = args_so_far;
4847 /* Set the rtx used for the function return value. Put this in its
4848 own variable so any optimizers that need this information don't have
4849 to include tree.h. Do this here so it gets done when an inlined
4850 function gets output. */
4852 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4855 /* Indicate whether REGNO is an incoming argument to the current function
4856 that was promoted to a wider mode. If so, return the RTX for the
4857 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4858 that REGNO is promoted from and whether the promotion was signed or
4861 #ifdef PROMOTE_FUNCTION_ARGS
4864 promoted_input_arg (regno, pmode, punsignedp)
4866 enum machine_mode *pmode;
4871 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4872 arg = TREE_CHAIN (arg))
4873 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4874 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4875 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4877 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4878 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4880 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4881 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4882 && mode != DECL_MODE (arg))
4884 *pmode = DECL_MODE (arg);
4885 *punsignedp = unsignedp;
4886 return DECL_INCOMING_RTL (arg);
4895 /* Compute the size and offset from the start of the stacked arguments for a
4896 parm passed in mode PASSED_MODE and with type TYPE.
4898 INITIAL_OFFSET_PTR points to the current offset into the stacked
4901 The starting offset and size for this parm are returned in *OFFSET_PTR
4902 and *ARG_SIZE_PTR, respectively.
4904 IN_REGS is non-zero if the argument will be passed in registers. It will
4905 never be set if REG_PARM_STACK_SPACE is not defined.
4907 FNDECL is the function in which the argument was defined.
4909 There are two types of rounding that are done. The first, controlled by
4910 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4911 list to be aligned to the specific boundary (in bits). This rounding
4912 affects the initial and starting offsets, but not the argument size.
4914 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4915 optionally rounds the size of the parm to PARM_BOUNDARY. The
4916 initial offset is not affected by this rounding, while the size always
4917 is and the starting offset may be. */
4919 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4920 initial_offset_ptr is positive because locate_and_pad_parm's
4921 callers pass in the total size of args so far as
4922 initial_offset_ptr. arg_size_ptr is always positive.*/
4925 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4926 initial_offset_ptr, offset_ptr, arg_size_ptr,
4928 enum machine_mode passed_mode;
4930 int in_regs ATTRIBUTE_UNUSED;
4931 tree fndecl ATTRIBUTE_UNUSED;
4932 struct args_size *initial_offset_ptr;
4933 struct args_size *offset_ptr;
4934 struct args_size *arg_size_ptr;
4935 struct args_size *alignment_pad;
4939 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4940 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4941 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4943 #ifdef REG_PARM_STACK_SPACE
4944 /* If we have found a stack parm before we reach the end of the
4945 area reserved for registers, skip that area. */
4948 int reg_parm_stack_space = 0;
4950 #ifdef MAYBE_REG_PARM_STACK_SPACE
4951 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4953 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4955 if (reg_parm_stack_space > 0)
4957 if (initial_offset_ptr->var)
4959 initial_offset_ptr->var
4960 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4961 size_int (reg_parm_stack_space));
4962 initial_offset_ptr->constant = 0;
4964 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4965 initial_offset_ptr->constant = reg_parm_stack_space;
4968 #endif /* REG_PARM_STACK_SPACE */
4970 arg_size_ptr->var = 0;
4971 arg_size_ptr->constant = 0;
4973 #ifdef ARGS_GROW_DOWNWARD
4974 if (initial_offset_ptr->var)
4976 offset_ptr->constant = 0;
4977 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4978 initial_offset_ptr->var);
4982 offset_ptr->constant = - initial_offset_ptr->constant;
4983 offset_ptr->var = 0;
4985 if (where_pad != none
4986 && (TREE_CODE (sizetree) != INTEGER_CST
4987 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4988 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4989 SUB_PARM_SIZE (*offset_ptr, sizetree);
4990 if (where_pad != downward)
4991 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
4992 if (initial_offset_ptr->var)
4994 arg_size_ptr->var = size_binop (MINUS_EXPR,
4995 size_binop (MINUS_EXPR,
4997 initial_offset_ptr->var),
5002 arg_size_ptr->constant = (- initial_offset_ptr->constant
5003 - offset_ptr->constant);
5005 #else /* !ARGS_GROW_DOWNWARD */
5006 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5007 *offset_ptr = *initial_offset_ptr;
5009 #ifdef PUSH_ROUNDING
5010 if (passed_mode != BLKmode)
5011 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5014 /* Pad_below needs the pre-rounded size to know how much to pad below
5015 so this must be done before rounding up. */
5016 if (where_pad == downward
5017 /* However, BLKmode args passed in regs have their padding done elsewhere.
5018 The stack slot must be able to hold the entire register. */
5019 && !(in_regs && passed_mode == BLKmode))
5020 pad_below (offset_ptr, passed_mode, sizetree);
5022 if (where_pad != none
5023 && (TREE_CODE (sizetree) != INTEGER_CST
5024 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5025 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5027 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5028 #endif /* ARGS_GROW_DOWNWARD */
5031 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5032 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5035 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5036 struct args_size *offset_ptr;
5038 struct args_size *alignment_pad;
5040 tree save_var = NULL_TREE;
5041 HOST_WIDE_INT save_constant = 0;
5043 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5045 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5047 save_var = offset_ptr->var;
5048 save_constant = offset_ptr->constant;
5051 alignment_pad->var = NULL_TREE;
5052 alignment_pad->constant = 0;
5054 if (boundary > BITS_PER_UNIT)
5056 if (offset_ptr->var)
5059 #ifdef ARGS_GROW_DOWNWARD
5064 (ARGS_SIZE_TREE (*offset_ptr),
5065 boundary / BITS_PER_UNIT);
5066 offset_ptr->constant = 0; /*?*/
5067 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5068 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5072 offset_ptr->constant =
5073 #ifdef ARGS_GROW_DOWNWARD
5074 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5076 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5078 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5079 alignment_pad->constant = offset_ptr->constant - save_constant;
5084 #ifndef ARGS_GROW_DOWNWARD
5086 pad_below (offset_ptr, passed_mode, sizetree)
5087 struct args_size *offset_ptr;
5088 enum machine_mode passed_mode;
5091 if (passed_mode != BLKmode)
5093 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5094 offset_ptr->constant
5095 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5096 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5097 - GET_MODE_SIZE (passed_mode));
5101 if (TREE_CODE (sizetree) != INTEGER_CST
5102 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5104 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5105 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5107 ADD_PARM_SIZE (*offset_ptr, s2);
5108 SUB_PARM_SIZE (*offset_ptr, sizetree);
5114 #ifdef ARGS_GROW_DOWNWARD
5116 round_down (value, divisor)
5120 return size_binop (MULT_EXPR,
5121 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5122 size_int (divisor));
5126 /* Walk the tree of blocks describing the binding levels within a function
5127 and warn about uninitialized variables.
5128 This is done after calling flow_analysis and before global_alloc
5129 clobbers the pseudo-regs to hard regs. */
5132 uninitialized_vars_warning (block)
5135 register tree decl, sub;
5136 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5138 if (warn_uninitialized
5139 && TREE_CODE (decl) == VAR_DECL
5140 /* These warnings are unreliable for and aggregates
5141 because assigning the fields one by one can fail to convince
5142 flow.c that the entire aggregate was initialized.
5143 Unions are troublesome because members may be shorter. */
5144 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5145 && DECL_RTL (decl) != 0
5146 && GET_CODE (DECL_RTL (decl)) == REG
5147 /* Global optimizations can make it difficult to determine if a
5148 particular variable has been initialized. However, a VAR_DECL
5149 with a nonzero DECL_INITIAL had an initializer, so do not
5150 claim it is potentially uninitialized.
5152 We do not care about the actual value in DECL_INITIAL, so we do
5153 not worry that it may be a dangling pointer. */
5154 && DECL_INITIAL (decl) == NULL_TREE
5155 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5156 warning_with_decl (decl,
5157 "`%s' might be used uninitialized in this function");
5159 && TREE_CODE (decl) == VAR_DECL
5160 && DECL_RTL (decl) != 0
5161 && GET_CODE (DECL_RTL (decl)) == REG
5162 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5163 warning_with_decl (decl,
5164 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5166 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5167 uninitialized_vars_warning (sub);
5170 /* Do the appropriate part of uninitialized_vars_warning
5171 but for arguments instead of local variables. */
5174 setjmp_args_warning ()
5177 for (decl = DECL_ARGUMENTS (current_function_decl);
5178 decl; decl = TREE_CHAIN (decl))
5179 if (DECL_RTL (decl) != 0
5180 && GET_CODE (DECL_RTL (decl)) == REG
5181 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5182 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5185 /* If this function call setjmp, put all vars into the stack
5186 unless they were declared `register'. */
5189 setjmp_protect (block)
5192 register tree decl, sub;
5193 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5194 if ((TREE_CODE (decl) == VAR_DECL
5195 || TREE_CODE (decl) == PARM_DECL)
5196 && DECL_RTL (decl) != 0
5197 && (GET_CODE (DECL_RTL (decl)) == REG
5198 || (GET_CODE (DECL_RTL (decl)) == MEM
5199 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5200 /* If this variable came from an inline function, it must be
5201 that its life doesn't overlap the setjmp. If there was a
5202 setjmp in the function, it would already be in memory. We
5203 must exclude such variable because their DECL_RTL might be
5204 set to strange things such as virtual_stack_vars_rtx. */
5205 && ! DECL_FROM_INLINE (decl)
5207 #ifdef NON_SAVING_SETJMP
5208 /* If longjmp doesn't restore the registers,
5209 don't put anything in them. */
5213 ! DECL_REGISTER (decl)))
5214 put_var_into_stack (decl);
5215 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5216 setjmp_protect (sub);
5219 /* Like the previous function, but for args instead of local variables. */
5222 setjmp_protect_args ()
5225 for (decl = DECL_ARGUMENTS (current_function_decl);
5226 decl; decl = TREE_CHAIN (decl))
5227 if ((TREE_CODE (decl) == VAR_DECL
5228 || TREE_CODE (decl) == PARM_DECL)
5229 && DECL_RTL (decl) != 0
5230 && (GET_CODE (DECL_RTL (decl)) == REG
5231 || (GET_CODE (DECL_RTL (decl)) == MEM
5232 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5234 /* If longjmp doesn't restore the registers,
5235 don't put anything in them. */
5236 #ifdef NON_SAVING_SETJMP
5240 ! DECL_REGISTER (decl)))
5241 put_var_into_stack (decl);
5244 /* Return the context-pointer register corresponding to DECL,
5245 or 0 if it does not need one. */
5248 lookup_static_chain (decl)
5251 tree context = decl_function_context (decl);
5255 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5258 /* We treat inline_function_decl as an alias for the current function
5259 because that is the inline function whose vars, types, etc.
5260 are being merged into the current function.
5261 See expand_inline_function. */
5262 if (context == current_function_decl || context == inline_function_decl)
5263 return virtual_stack_vars_rtx;
5265 for (link = context_display; link; link = TREE_CHAIN (link))
5266 if (TREE_PURPOSE (link) == context)
5267 return RTL_EXPR_RTL (TREE_VALUE (link));
5272 /* Convert a stack slot address ADDR for variable VAR
5273 (from a containing function)
5274 into an address valid in this function (using a static chain). */
5277 fix_lexical_addr (addr, var)
5282 HOST_WIDE_INT displacement;
5283 tree context = decl_function_context (var);
5284 struct function *fp;
5287 /* If this is the present function, we need not do anything. */
5288 if (context == current_function_decl || context == inline_function_decl)
5291 for (fp = outer_function_chain; fp; fp = fp->next)
5292 if (fp->decl == context)
5298 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5299 addr = XEXP (XEXP (addr, 0), 0);
5301 /* Decode given address as base reg plus displacement. */
5302 if (GET_CODE (addr) == REG)
5303 basereg = addr, displacement = 0;
5304 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5305 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5309 /* We accept vars reached via the containing function's
5310 incoming arg pointer and via its stack variables pointer. */
5311 if (basereg == fp->internal_arg_pointer)
5313 /* If reached via arg pointer, get the arg pointer value
5314 out of that function's stack frame.
5316 There are two cases: If a separate ap is needed, allocate a
5317 slot in the outer function for it and dereference it that way.
5318 This is correct even if the real ap is actually a pseudo.
5319 Otherwise, just adjust the offset from the frame pointer to
5322 #ifdef NEED_SEPARATE_AP
5325 if (fp->x_arg_pointer_save_area == 0)
5326 fp->x_arg_pointer_save_area
5327 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5329 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5330 addr = memory_address (Pmode, addr);
5332 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5334 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5335 base = lookup_static_chain (var);
5339 else if (basereg == virtual_stack_vars_rtx)
5341 /* This is the same code as lookup_static_chain, duplicated here to
5342 avoid an extra call to decl_function_context. */
5345 for (link = context_display; link; link = TREE_CHAIN (link))
5346 if (TREE_PURPOSE (link) == context)
5348 base = RTL_EXPR_RTL (TREE_VALUE (link));
5356 /* Use same offset, relative to appropriate static chain or argument
5358 return plus_constant (base, displacement);
5361 /* Return the address of the trampoline for entering nested fn FUNCTION.
5362 If necessary, allocate a trampoline (in the stack frame)
5363 and emit rtl to initialize its contents (at entry to this function). */
5366 trampoline_address (function)
5372 struct function *fp;
5375 /* Find an existing trampoline and return it. */
5376 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5377 if (TREE_PURPOSE (link) == function)
5379 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5381 for (fp = outer_function_chain; fp; fp = fp->next)
5382 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5383 if (TREE_PURPOSE (link) == function)
5385 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5387 return round_trampoline_addr (tramp);
5390 /* None exists; we must make one. */
5392 /* Find the `struct function' for the function containing FUNCTION. */
5394 fn_context = decl_function_context (function);
5395 if (fn_context != current_function_decl
5396 && fn_context != inline_function_decl)
5397 for (fp = outer_function_chain; fp; fp = fp->next)
5398 if (fp->decl == fn_context)
5401 /* Allocate run-time space for this trampoline
5402 (usually in the defining function's stack frame). */
5403 #ifdef ALLOCATE_TRAMPOLINE
5404 tramp = ALLOCATE_TRAMPOLINE (fp);
5406 /* If rounding needed, allocate extra space
5407 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5408 #ifdef TRAMPOLINE_ALIGNMENT
5409 #define TRAMPOLINE_REAL_SIZE \
5410 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5412 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5414 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5418 /* Record the trampoline for reuse and note it for later initialization
5419 by expand_function_end. */
5422 push_obstacks (fp->function_maybepermanent_obstack,
5423 fp->function_maybepermanent_obstack);
5424 rtlexp = make_node (RTL_EXPR);
5425 RTL_EXPR_RTL (rtlexp) = tramp;
5426 fp->x_trampoline_list = tree_cons (function, rtlexp,
5427 fp->x_trampoline_list);
5432 /* Make the RTL_EXPR node temporary, not momentary, so that the
5433 trampoline_list doesn't become garbage. */
5434 int momentary = suspend_momentary ();
5435 rtlexp = make_node (RTL_EXPR);
5436 resume_momentary (momentary);
5438 RTL_EXPR_RTL (rtlexp) = tramp;
5439 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5442 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5443 return round_trampoline_addr (tramp);
5446 /* Given a trampoline address,
5447 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5450 round_trampoline_addr (tramp)
5453 #ifdef TRAMPOLINE_ALIGNMENT
5454 /* Round address up to desired boundary. */
5455 rtx temp = gen_reg_rtx (Pmode);
5456 temp = expand_binop (Pmode, add_optab, tramp,
5457 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5458 temp, 0, OPTAB_LIB_WIDEN);
5459 tramp = expand_binop (Pmode, and_optab, temp,
5460 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5461 temp, 0, OPTAB_LIB_WIDEN);
5466 /* The functions identify_blocks and reorder_blocks provide a way to
5467 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5468 duplicate portions of the RTL code. Call identify_blocks before
5469 changing the RTL, and call reorder_blocks after. */
5471 /* Put all this function's BLOCK nodes including those that are chained
5472 onto the first block into a vector, and return it.
5473 Also store in each NOTE for the beginning or end of a block
5474 the index of that block in the vector.
5475 The arguments are BLOCK, the chain of top-level blocks of the function,
5476 and INSNS, the insn chain of the function. */
5479 identify_blocks (block, insns)
5487 int current_block_number = 1;
5493 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5494 depth-first order. */
5495 n_blocks = all_blocks (block, 0);
5496 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5497 all_blocks (block, block_vector);
5499 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5501 for (insn = insns; insn; insn = NEXT_INSN (insn))
5502 if (GET_CODE (insn) == NOTE)
5504 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5508 /* If there are more block notes than BLOCKs, something
5510 if (current_block_number == n_blocks)
5513 b = block_vector[current_block_number++];
5514 NOTE_BLOCK (insn) = b;
5515 block_stack[depth++] = b;
5517 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5520 /* There are more NOTE_INSN_BLOCK_ENDs that
5521 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5524 NOTE_BLOCK (insn) = block_stack[--depth];
5528 /* In whole-function mode, we might not have seen the whole function
5529 yet, so we might not use up all the blocks. */
5530 if (n_blocks != current_block_number
5531 && !cfun->x_whole_function_mode_p)
5534 free (block_vector);
5538 /* Given a revised instruction chain, rebuild the tree structure of
5539 BLOCK nodes to correspond to the new order of RTL. The new block
5540 tree is inserted below TOP_BLOCK. Returns the current top-level
5544 reorder_blocks (block, insns)
5548 tree current_block = block;
5551 if (block == NULL_TREE)
5554 /* Prune the old trees away, so that it doesn't get in the way. */
5555 BLOCK_SUBBLOCKS (current_block) = 0;
5556 BLOCK_CHAIN (current_block) = 0;
5558 for (insn = insns; insn; insn = NEXT_INSN (insn))
5559 if (GET_CODE (insn) == NOTE)
5561 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5563 tree block = NOTE_BLOCK (insn);
5564 /* If we have seen this block before, copy it. */
5565 if (TREE_ASM_WRITTEN (block))
5566 block = copy_node (block);
5567 BLOCK_SUBBLOCKS (block) = 0;
5568 TREE_ASM_WRITTEN (block) = 1;
5569 BLOCK_SUPERCONTEXT (block) = current_block;
5570 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5571 BLOCK_SUBBLOCKS (current_block) = block;
5572 current_block = block;
5574 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5576 BLOCK_SUBBLOCKS (current_block)
5577 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5578 current_block = BLOCK_SUPERCONTEXT (current_block);
5582 BLOCK_SUBBLOCKS (current_block)
5583 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5584 return current_block;
5587 /* Reverse the order of elements in the chain T of blocks,
5588 and return the new head of the chain (old last element). */
5594 register tree prev = 0, decl, next;
5595 for (decl = t; decl; decl = next)
5597 next = BLOCK_CHAIN (decl);
5598 BLOCK_CHAIN (decl) = prev;
5604 /* Count the subblocks of the list starting with BLOCK, and list them
5605 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5609 all_blocks (block, vector)
5617 TREE_ASM_WRITTEN (block) = 0;
5619 /* Record this block. */
5621 vector[n_blocks] = block;
5625 /* Record the subblocks, and their subblocks... */
5626 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5627 vector ? vector + n_blocks : 0);
5628 block = BLOCK_CHAIN (block);
5634 /* Allocate a function structure and reset its contents to the defaults. */
5636 prepare_function_start ()
5638 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5640 init_stmt_for_function ();
5641 init_eh_for_function ();
5643 cse_not_expected = ! optimize;
5645 /* Caller save not needed yet. */
5646 caller_save_needed = 0;
5648 /* No stack slots have been made yet. */
5649 stack_slot_list = 0;
5651 current_function_has_nonlocal_label = 0;
5652 current_function_has_nonlocal_goto = 0;
5654 /* There is no stack slot for handling nonlocal gotos. */
5655 nonlocal_goto_handler_slots = 0;
5656 nonlocal_goto_stack_level = 0;
5658 /* No labels have been declared for nonlocal use. */
5659 nonlocal_labels = 0;
5660 nonlocal_goto_handler_labels = 0;
5662 /* No function calls so far in this function. */
5663 function_call_count = 0;
5665 /* No parm regs have been allocated.
5666 (This is important for output_inline_function.) */
5667 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5669 /* Initialize the RTL mechanism. */
5672 /* Initialize the queue of pending postincrement and postdecrements,
5673 and some other info in expr.c. */
5676 /* We haven't done register allocation yet. */
5679 init_varasm_status (cfun);
5681 /* Clear out data used for inlining. */
5682 cfun->inlinable = 0;
5683 cfun->original_decl_initial = 0;
5684 cfun->original_arg_vector = 0;
5686 cfun->stack_alignment_needed = 0;
5688 /* Set if a call to setjmp is seen. */
5689 current_function_calls_setjmp = 0;
5691 /* Set if a call to longjmp is seen. */
5692 current_function_calls_longjmp = 0;
5694 current_function_calls_alloca = 0;
5695 current_function_contains_functions = 0;
5696 current_function_is_leaf = 0;
5697 current_function_sp_is_unchanging = 0;
5698 current_function_uses_only_leaf_regs = 0;
5699 current_function_has_computed_jump = 0;
5700 current_function_is_thunk = 0;
5702 current_function_returns_pcc_struct = 0;
5703 current_function_returns_struct = 0;
5704 current_function_epilogue_delay_list = 0;
5705 current_function_uses_const_pool = 0;
5706 current_function_uses_pic_offset_table = 0;
5707 current_function_cannot_inline = 0;
5709 /* We have not yet needed to make a label to jump to for tail-recursion. */
5710 tail_recursion_label = 0;
5712 /* We haven't had a need to make a save area for ap yet. */
5713 arg_pointer_save_area = 0;
5715 /* No stack slots allocated yet. */
5718 /* No SAVE_EXPRs in this function yet. */
5721 /* No RTL_EXPRs in this function yet. */
5724 /* Set up to allocate temporaries. */
5727 /* Indicate that we need to distinguish between the return value of the
5728 present function and the return value of a function being called. */
5729 rtx_equal_function_value_matters = 1;
5731 /* Indicate that we have not instantiated virtual registers yet. */
5732 virtuals_instantiated = 0;
5734 /* Indicate we have no need of a frame pointer yet. */
5735 frame_pointer_needed = 0;
5737 /* By default assume not varargs or stdarg. */
5738 current_function_varargs = 0;
5739 current_function_stdarg = 0;
5741 /* We haven't made any trampolines for this function yet. */
5742 trampoline_list = 0;
5744 init_pending_stack_adjust ();
5745 inhibit_defer_pop = 0;
5747 current_function_outgoing_args_size = 0;
5749 if (init_lang_status)
5750 (*init_lang_status) (cfun);
5751 if (init_machine_status)
5752 (*init_machine_status) (cfun);
5755 /* Initialize the rtl expansion mechanism so that we can do simple things
5756 like generate sequences. This is used to provide a context during global
5757 initialization of some passes. */
5759 init_dummy_function_start ()
5761 prepare_function_start ();
5764 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5765 and initialize static variables for generating RTL for the statements
5769 init_function_start (subr, filename, line)
5774 prepare_function_start ();
5776 /* Remember this function for later. */
5777 cfun->next_global = all_functions;
5778 all_functions = cfun;
5780 current_function_name = (*decl_printable_name) (subr, 2);
5783 /* Nonzero if this is a nested function that uses a static chain. */
5785 current_function_needs_context
5786 = (decl_function_context (current_function_decl) != 0
5787 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5789 /* Within function body, compute a type's size as soon it is laid out. */
5790 immediate_size_expand++;
5792 /* Prevent ever trying to delete the first instruction of a function.
5793 Also tell final how to output a linenum before the function prologue.
5794 Note linenums could be missing, e.g. when compiling a Java .class file. */
5796 emit_line_note (filename, line);
5798 /* Make sure first insn is a note even if we don't want linenums.
5799 This makes sure the first insn will never be deleted.
5800 Also, final expects a note to appear there. */
5801 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5803 /* Set flags used by final.c. */
5804 if (aggregate_value_p (DECL_RESULT (subr)))
5806 #ifdef PCC_STATIC_STRUCT_RETURN
5807 current_function_returns_pcc_struct = 1;
5809 current_function_returns_struct = 1;
5812 /* Warn if this value is an aggregate type,
5813 regardless of which calling convention we are using for it. */
5814 if (warn_aggregate_return
5815 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5816 warning ("function returns an aggregate");
5818 current_function_returns_pointer
5819 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5822 /* Make sure all values used by the optimization passes have sane
5825 init_function_for_compilation ()
5828 /* No prologue/epilogue insns yet. */
5829 prologue = epilogue = 0;
5832 /* Indicate that the current function uses extra args
5833 not explicitly mentioned in the argument list in any fashion. */
5838 current_function_varargs = 1;
5841 /* Expand a call to __main at the beginning of a possible main function. */
5843 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5844 #undef HAS_INIT_SECTION
5845 #define HAS_INIT_SECTION
5849 expand_main_function ()
5851 #if !defined (HAS_INIT_SECTION)
5852 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5854 #endif /* not HAS_INIT_SECTION */
5857 extern struct obstack permanent_obstack;
5859 /* Start the RTL for a new function, and set variables used for
5861 SUBR is the FUNCTION_DECL node.
5862 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5863 the function's parameters, which must be run at any return statement. */
5866 expand_function_start (subr, parms_have_cleanups)
5868 int parms_have_cleanups;
5872 rtx last_ptr = NULL_RTX;
5874 /* Make sure volatile mem refs aren't considered
5875 valid operands of arithmetic insns. */
5876 init_recog_no_volatile ();
5878 /* Set this before generating any memory accesses. */
5879 current_function_check_memory_usage
5880 = (flag_check_memory_usage
5881 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5883 current_function_instrument_entry_exit
5884 = (flag_instrument_function_entry_exit
5885 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5887 current_function_limit_stack
5888 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5890 /* If function gets a static chain arg, store it in the stack frame.
5891 Do this first, so it gets the first stack slot offset. */
5892 if (current_function_needs_context)
5894 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5896 /* Delay copying static chain if it is not a register to avoid
5897 conflicts with regs used for parameters. */
5898 if (! SMALL_REGISTER_CLASSES
5899 || GET_CODE (static_chain_incoming_rtx) == REG)
5900 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5903 /* If the parameters of this function need cleaning up, get a label
5904 for the beginning of the code which executes those cleanups. This must
5905 be done before doing anything with return_label. */
5906 if (parms_have_cleanups)
5907 cleanup_label = gen_label_rtx ();
5911 /* Make the label for return statements to jump to, if this machine
5912 does not have a one-instruction return and uses an epilogue,
5913 or if it returns a structure, or if it has parm cleanups. */
5915 if (cleanup_label == 0 && HAVE_return
5916 && ! current_function_instrument_entry_exit
5917 && ! current_function_returns_pcc_struct
5918 && ! (current_function_returns_struct && ! optimize))
5921 return_label = gen_label_rtx ();
5923 return_label = gen_label_rtx ();
5926 /* Initialize rtx used to return the value. */
5927 /* Do this before assign_parms so that we copy the struct value address
5928 before any library calls that assign parms might generate. */
5930 /* Decide whether to return the value in memory or in a register. */
5931 if (aggregate_value_p (DECL_RESULT (subr)))
5933 /* Returning something that won't go in a register. */
5934 register rtx value_address = 0;
5936 #ifdef PCC_STATIC_STRUCT_RETURN
5937 if (current_function_returns_pcc_struct)
5939 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5940 value_address = assemble_static_space (size);
5945 /* Expect to be passed the address of a place to store the value.
5946 If it is passed as an argument, assign_parms will take care of
5948 if (struct_value_incoming_rtx)
5950 value_address = gen_reg_rtx (Pmode);
5951 emit_move_insn (value_address, struct_value_incoming_rtx);
5956 DECL_RTL (DECL_RESULT (subr))
5957 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5958 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5959 AGGREGATE_TYPE_P (TREE_TYPE
5964 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5965 /* If return mode is void, this decl rtl should not be used. */
5966 DECL_RTL (DECL_RESULT (subr)) = 0;
5967 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5969 /* If function will end with cleanup code for parms,
5970 compute the return values into a pseudo reg,
5971 which we will copy into the true return register
5972 after the cleanups are done. */
5974 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5976 #ifdef PROMOTE_FUNCTION_RETURN
5977 tree type = TREE_TYPE (DECL_RESULT (subr));
5978 int unsignedp = TREE_UNSIGNED (type);
5980 mode = promote_mode (type, mode, &unsignedp, 1);
5983 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5986 /* Scalar, returned in a register. */
5988 #ifdef FUNCTION_OUTGOING_VALUE
5989 DECL_RTL (DECL_RESULT (subr))
5990 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5992 DECL_RTL (DECL_RESULT (subr))
5993 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5996 /* Mark this reg as the function's return value. */
5997 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5999 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6000 /* Needed because we may need to move this to memory
6001 in case it's a named return value whose address is taken. */
6002 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6006 /* Initialize rtx for parameters and local variables.
6007 In some cases this requires emitting insns. */
6009 assign_parms (subr);
6011 /* Copy the static chain now if it wasn't a register. The delay is to
6012 avoid conflicts with the parameter passing registers. */
6014 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6015 if (GET_CODE (static_chain_incoming_rtx) != REG)
6016 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6018 /* The following was moved from init_function_start.
6019 The move is supposed to make sdb output more accurate. */
6020 /* Indicate the beginning of the function body,
6021 as opposed to parm setup. */
6022 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6024 if (GET_CODE (get_last_insn ()) != NOTE)
6025 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6026 parm_birth_insn = get_last_insn ();
6028 context_display = 0;
6029 if (current_function_needs_context)
6031 /* Fetch static chain values for containing functions. */
6032 tem = decl_function_context (current_function_decl);
6033 /* Copy the static chain pointer into a pseudo. If we have
6034 small register classes, copy the value from memory if
6035 static_chain_incoming_rtx is a REG. */
6038 /* If the static chain originally came in a register, put it back
6039 there, then move it out in the next insn. The reason for
6040 this peculiar code is to satisfy function integration. */
6041 if (SMALL_REGISTER_CLASSES
6042 && GET_CODE (static_chain_incoming_rtx) == REG)
6043 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6044 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6049 tree rtlexp = make_node (RTL_EXPR);
6051 RTL_EXPR_RTL (rtlexp) = last_ptr;
6052 context_display = tree_cons (tem, rtlexp, context_display);
6053 tem = decl_function_context (tem);
6056 /* Chain thru stack frames, assuming pointer to next lexical frame
6057 is found at the place we always store it. */
6058 #ifdef FRAME_GROWS_DOWNWARD
6059 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6061 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6062 memory_address (Pmode,
6065 /* If we are not optimizing, ensure that we know that this
6066 piece of context is live over the entire function. */
6068 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6073 if (current_function_instrument_entry_exit)
6075 rtx fun = DECL_RTL (current_function_decl);
6076 if (GET_CODE (fun) == MEM)
6077 fun = XEXP (fun, 0);
6080 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6082 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6084 hard_frame_pointer_rtx),
6088 /* After the display initializations is where the tail-recursion label
6089 should go, if we end up needing one. Ensure we have a NOTE here
6090 since some things (like trampolines) get placed before this. */
6091 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6093 /* Evaluate now the sizes of any types declared among the arguments. */
6094 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6096 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6097 EXPAND_MEMORY_USE_BAD);
6098 /* Flush the queue in case this parameter declaration has
6103 /* Make sure there is a line number after the function entry setup code. */
6104 force_next_line_note ();
6107 /* Undo the effects of init_dummy_function_start. */
6109 expand_dummy_function_end ()
6111 /* End any sequences that failed to be closed due to syntax errors. */
6112 while (in_sequence_p ())
6115 /* Outside function body, can't compute type's actual size
6116 until next function's body starts. */
6118 free_after_parsing (cfun);
6119 free_after_compilation (cfun);
6124 /* Emit CODE for each register of the return value. Useful values for
6125 code are USE and CLOBBER. */
6128 diddle_return_value (code)
6131 tree decl_result = DECL_RESULT (current_function_decl);
6132 rtx return_reg = DECL_RTL (decl_result);
6136 if (GET_CODE (return_reg) == REG
6137 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
6139 /* Use hard_function_value to avoid creating a reference to a BLKmode
6140 register in the USE/CLOBBER insn. */
6141 return_reg = hard_function_value (TREE_TYPE (decl_result),
6142 current_function_decl, 1);
6143 REG_FUNCTION_VALUE_P (return_reg) = 1;
6144 emit_insn (gen_rtx_fmt_e (code, VOIDmode, return_reg));
6146 else if (GET_CODE (return_reg) == PARALLEL)
6150 for (i = 0; i < XVECLEN (return_reg, 0); i++)
6152 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
6154 if (GET_CODE (x) == REG
6155 && REGNO (x) < FIRST_PSEUDO_REGISTER)
6156 emit_insn (gen_rtx_fmt_e (code, VOIDmode, x));
6162 /* Generate RTL for the end of the current function.
6163 FILENAME and LINE are the current position in the source file.
6165 It is up to language-specific callers to do cleanups for parameters--
6166 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6169 expand_function_end (filename, line, end_bindings)
6177 #ifdef TRAMPOLINE_TEMPLATE
6178 static rtx initial_trampoline;
6181 finish_expr_for_function ();
6183 #ifdef NON_SAVING_SETJMP
6184 /* Don't put any variables in registers if we call setjmp
6185 on a machine that fails to restore the registers. */
6186 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6188 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6189 setjmp_protect (DECL_INITIAL (current_function_decl));
6191 setjmp_protect_args ();
6195 /* Save the argument pointer if a save area was made for it. */
6196 if (arg_pointer_save_area)
6198 /* arg_pointer_save_area may not be a valid memory address, so we
6199 have to check it and fix it if necessary. */
6202 emit_move_insn (validize_mem (arg_pointer_save_area),
6203 virtual_incoming_args_rtx);
6204 seq = gen_sequence ();
6206 emit_insn_before (seq, tail_recursion_reentry);
6209 /* Initialize any trampolines required by this function. */
6210 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6212 tree function = TREE_PURPOSE (link);
6213 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6214 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6215 #ifdef TRAMPOLINE_TEMPLATE
6220 #ifdef TRAMPOLINE_TEMPLATE
6221 /* First make sure this compilation has a template for
6222 initializing trampolines. */
6223 if (initial_trampoline == 0)
6225 end_temporary_allocation ();
6227 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6228 resume_temporary_allocation ();
6230 ggc_add_rtx_root (&initial_trampoline, 1);
6234 /* Generate insns to initialize the trampoline. */
6236 tramp = round_trampoline_addr (XEXP (tramp, 0));
6237 #ifdef TRAMPOLINE_TEMPLATE
6238 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6239 emit_block_move (blktramp, initial_trampoline,
6240 GEN_INT (TRAMPOLINE_SIZE),
6241 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6243 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6247 /* Put those insns at entry to the containing function (this one). */
6248 emit_insns_before (seq, tail_recursion_reentry);
6251 /* If we are doing stack checking and this function makes calls,
6252 do a stack probe at the start of the function to ensure we have enough
6253 space for another stack frame. */
6254 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6258 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6259 if (GET_CODE (insn) == CALL_INSN)
6262 probe_stack_range (STACK_CHECK_PROTECT,
6263 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6266 emit_insns_before (seq, tail_recursion_reentry);
6271 /* Warn about unused parms if extra warnings were specified. */
6272 if (warn_unused && extra_warnings)
6276 for (decl = DECL_ARGUMENTS (current_function_decl);
6277 decl; decl = TREE_CHAIN (decl))
6278 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6279 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6280 warning_with_decl (decl, "unused parameter `%s'");
6283 /* Delete handlers for nonlocal gotos if nothing uses them. */
6284 if (nonlocal_goto_handler_slots != 0
6285 && ! current_function_has_nonlocal_label)
6288 /* End any sequences that failed to be closed due to syntax errors. */
6289 while (in_sequence_p ())
6292 /* Outside function body, can't compute type's actual size
6293 until next function's body starts. */
6294 immediate_size_expand--;
6296 clear_pending_stack_adjust ();
6297 do_pending_stack_adjust ();
6299 /* Mark the end of the function body.
6300 If control reaches this insn, the function can drop through
6301 without returning a value. */
6302 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6304 /* Must mark the last line number note in the function, so that the test
6305 coverage code can avoid counting the last line twice. This just tells
6306 the code to ignore the immediately following line note, since there
6307 already exists a copy of this note somewhere above. This line number
6308 note is still needed for debugging though, so we can't delete it. */
6309 if (flag_test_coverage)
6310 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6312 /* Output a linenumber for the end of the function.
6313 SDB depends on this. */
6314 emit_line_note_force (filename, line);
6316 /* Output the label for the actual return from the function,
6317 if one is expected. This happens either because a function epilogue
6318 is used instead of a return instruction, or because a return was done
6319 with a goto in order to run local cleanups, or because of pcc-style
6320 structure returning. */
6324 /* Before the return label, clobber the return registers so that
6325 they are not propogated live to the rest of the function. This
6326 can only happen with functions that drop through; if there had
6327 been a return statement, there would have either been a return
6328 rtx, or a jump to the return label. */
6329 diddle_return_value (CLOBBER);
6331 emit_label (return_label);
6334 /* C++ uses this. */
6336 expand_end_bindings (0, 0, 0);
6338 /* Now handle any leftover exception regions that may have been
6339 created for the parameters. */
6341 rtx last = get_last_insn ();
6344 expand_leftover_cleanups ();
6346 /* If there are any catch_clauses remaining, output them now. */
6347 emit_insns (catch_clauses);
6348 catch_clauses = NULL_RTX;
6349 /* If the above emitted any code, may sure we jump around it. */
6350 if (last != get_last_insn ())
6352 label = gen_label_rtx ();
6353 last = emit_jump_insn_after (gen_jump (label), last);
6354 last = emit_barrier_after (last);
6359 if (current_function_instrument_entry_exit)
6361 rtx fun = DECL_RTL (current_function_decl);
6362 if (GET_CODE (fun) == MEM)
6363 fun = XEXP (fun, 0);
6366 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6368 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6370 hard_frame_pointer_rtx),
6374 /* If we had calls to alloca, and this machine needs
6375 an accurate stack pointer to exit the function,
6376 insert some code to save and restore the stack pointer. */
6377 #ifdef EXIT_IGNORE_STACK
6378 if (! EXIT_IGNORE_STACK)
6380 if (current_function_calls_alloca)
6384 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6385 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6388 /* If scalar return value was computed in a pseudo-reg,
6389 copy that to the hard return register. */
6390 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6391 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6392 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6393 >= FIRST_PSEUDO_REGISTER))
6395 rtx real_decl_result;
6397 #ifdef FUNCTION_OUTGOING_VALUE
6399 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6400 current_function_decl);
6403 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6404 current_function_decl);
6406 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6407 /* If this is a BLKmode structure being returned in registers, then use
6408 the mode computed in expand_return. */
6409 if (GET_MODE (real_decl_result) == BLKmode)
6410 PUT_MODE (real_decl_result,
6411 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6412 emit_move_insn (real_decl_result,
6413 DECL_RTL (DECL_RESULT (current_function_decl)));
6415 /* The delay slot scheduler assumes that current_function_return_rtx
6416 holds the hard register containing the return value, not a temporary
6418 current_function_return_rtx = real_decl_result;
6421 /* If returning a structure, arrange to return the address of the value
6422 in a place where debuggers expect to find it.
6424 If returning a structure PCC style,
6425 the caller also depends on this value.
6426 And current_function_returns_pcc_struct is not necessarily set. */
6427 if (current_function_returns_struct
6428 || current_function_returns_pcc_struct)
6430 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6431 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6432 #ifdef FUNCTION_OUTGOING_VALUE
6434 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6435 current_function_decl);
6438 = FUNCTION_VALUE (build_pointer_type (type),
6439 current_function_decl);
6442 /* Mark this as a function return value so integrate will delete the
6443 assignment and USE below when inlining this function. */
6444 REG_FUNCTION_VALUE_P (outgoing) = 1;
6446 emit_move_insn (outgoing, value_address);
6449 /* If this is an implementation of __throw, do what's necessary to
6450 communicate between __builtin_eh_return and the epilogue. */
6451 expand_eh_return ();
6453 /* Output a return insn if we are using one.
6454 Otherwise, let the rtl chain end here, to drop through
6455 into the epilogue. */
6460 emit_jump_insn (gen_return ());
6465 /* Fix up any gotos that jumped out to the outermost
6466 binding level of the function.
6467 Must follow emitting RETURN_LABEL. */
6469 /* If you have any cleanups to do at this point,
6470 and they need to create temporary variables,
6471 then you will lose. */
6472 expand_fixups (get_insns ());
6475 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6476 or a single insn). */
6479 record_insns (insns)
6484 if (GET_CODE (insns) == SEQUENCE)
6486 int len = XVECLEN (insns, 0);
6487 vec = (int *) oballoc ((len + 1) * sizeof (int));
6490 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6494 vec = (int *) oballoc (2 * sizeof (int));
6495 vec[0] = INSN_UID (insns);
6501 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6504 contains (insn, vec)
6510 if (GET_CODE (insn) == INSN
6511 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6514 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6515 for (j = 0; vec[j]; j++)
6516 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6522 for (j = 0; vec[j]; j++)
6523 if (INSN_UID (insn) == vec[j])
6530 prologue_epilogue_contains (insn)
6533 if (prologue && contains (insn, prologue))
6535 if (epilogue && contains (insn, epilogue))
6540 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6541 this into place with notes indicating where the prologue ends and where
6542 the epilogue begins. Update the basic block information when possible. */
6545 thread_prologue_and_epilogue_insns (f)
6546 rtx f ATTRIBUTE_UNUSED;
6550 #ifdef HAVE_prologue
6556 seq = gen_prologue();
6559 /* Retain a map of the prologue insns. */
6560 if (GET_CODE (seq) != SEQUENCE)
6562 prologue = record_insns (seq);
6564 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6565 seq = gen_sequence ();
6568 /* If optimization is off, and perhaps in an empty function,
6569 the entry block will have no successors. */
6570 if (ENTRY_BLOCK_PTR->succ)
6572 /* Can't deal with multiple successsors of the entry block. */
6573 if (ENTRY_BLOCK_PTR->succ->succ_next)
6576 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6580 emit_insn_after (seq, f);
6584 #ifdef HAVE_epilogue
6589 rtx tail = get_last_insn ();
6591 /* ??? This is gastly. If function returns were not done via uses,
6592 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6593 and all of this uglyness would go away. */
6598 /* If the exit block has no non-fake predecessors, we don't
6599 need an epilogue. Furthermore, only pay attention to the
6600 fallthru predecessors; if (conditional) return insns were
6601 generated, by definition we do not need to emit epilogue
6604 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6605 if ((e->flags & EDGE_FAKE) == 0
6606 && (e->flags & EDGE_FALLTHRU) != 0)
6611 /* We can't handle multiple epilogues -- if one is needed,
6612 we won't be able to place it multiple times.
6614 ??? Fix epilogue expanders to not assume they are the
6615 last thing done compiling the function. Either that
6616 or copy_rtx each insn.
6618 ??? Blah, it's not a simple expression to assert that
6619 we've exactly one fallthru exit edge. */
6624 /* ??? If the last insn of the basic block is a jump, then we
6625 are creating a new basic block. Wimp out and leave these
6626 insns outside any block. */
6627 if (GET_CODE (tail) == JUMP_INSN)
6633 rtx prev, seq, first_use;
6635 /* Move the USE insns at the end of a function onto a list. */
6637 if (GET_CODE (prev) == BARRIER
6638 || GET_CODE (prev) == NOTE)
6639 prev = prev_nonnote_insn (prev);
6643 && GET_CODE (prev) == INSN
6644 && GET_CODE (PATTERN (prev)) == USE)
6646 /* If the end of the block is the use, grab hold of something
6647 else so that we emit barriers etc in the right place. */
6651 tail = PREV_INSN (tail);
6652 while (GET_CODE (tail) == INSN
6653 && GET_CODE (PATTERN (tail)) == USE);
6659 prev = prev_nonnote_insn (prev);
6664 NEXT_INSN (use) = first_use;
6665 PREV_INSN (first_use) = use;
6668 NEXT_INSN (use) = NULL_RTX;
6672 && GET_CODE (prev) == INSN
6673 && GET_CODE (PATTERN (prev)) == USE);
6676 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6677 epilogue insns, the USE insns at the end of a function,
6678 the jump insn that returns, and then a BARRIER. */
6680 if (GET_CODE (tail) != BARRIER)
6682 prev = next_nonnote_insn (tail);
6683 if (!prev || GET_CODE (prev) != BARRIER)
6684 emit_barrier_after (tail);
6687 seq = gen_epilogue ();
6689 tail = emit_jump_insn_after (seq, tail);
6691 /* Insert the USE insns immediately before the return insn, which
6692 must be the last instruction emitted in the sequence. */
6694 emit_insns_before (first_use, tail);
6695 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6697 /* Update the tail of the basic block. */
6701 /* Retain a map of the epilogue insns. */
6702 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6709 commit_edge_insertions ();
6712 /* Reposition the prologue-end and epilogue-begin notes after instruction
6713 scheduling and delayed branch scheduling. */
6716 reposition_prologue_and_epilogue_notes (f)
6717 rtx f ATTRIBUTE_UNUSED;
6719 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6720 /* Reposition the prologue and epilogue notes. */
6727 register rtx insn, note = 0;
6729 /* Scan from the beginning until we reach the last prologue insn.
6730 We apparently can't depend on basic_block_{head,end} after
6732 for (len = 0; prologue[len]; len++)
6734 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6736 if (GET_CODE (insn) == NOTE)
6738 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6741 else if ((len -= contains (insn, prologue)) == 0)
6744 /* Find the prologue-end note if we haven't already, and
6745 move it to just after the last prologue insn. */
6748 for (note = insn; (note = NEXT_INSN (note));)
6749 if (GET_CODE (note) == NOTE
6750 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6754 next = NEXT_INSN (note);
6756 /* Whether or not we can depend on BLOCK_HEAD,
6757 attempt to keep it up-to-date. */
6758 if (BLOCK_HEAD (0) == note)
6759 BLOCK_HEAD (0) = next;
6762 add_insn_after (note, insn);
6769 register rtx insn, note = 0;
6771 /* Scan from the end until we reach the first epilogue insn.
6772 We apparently can't depend on basic_block_{head,end} after
6774 for (len = 0; epilogue[len]; len++)
6776 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6778 if (GET_CODE (insn) == NOTE)
6780 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6783 else if ((len -= contains (insn, epilogue)) == 0)
6785 /* Find the epilogue-begin note if we haven't already, and
6786 move it to just before the first epilogue insn. */
6789 for (note = insn; (note = PREV_INSN (note));)
6790 if (GET_CODE (note) == NOTE
6791 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6795 /* Whether or not we can depend on BLOCK_HEAD,
6796 attempt to keep it up-to-date. */
6798 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6799 BLOCK_HEAD (n_basic_blocks-1) = note;
6802 add_insn_before (note, insn);
6807 #endif /* HAVE_prologue or HAVE_epilogue */
6810 /* Mark T for GC. */
6814 struct temp_slot *t;
6818 ggc_mark_rtx (t->slot);
6819 ggc_mark_rtx (t->address);
6820 ggc_mark_tree (t->rtl_expr);
6826 /* Mark P for GC. */
6829 mark_function_status (p)
6838 ggc_mark_rtx (p->arg_offset_rtx);
6840 if (p->x_parm_reg_stack_loc)
6841 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6845 ggc_mark_rtx (p->return_rtx);
6846 ggc_mark_rtx (p->x_cleanup_label);
6847 ggc_mark_rtx (p->x_return_label);
6848 ggc_mark_rtx (p->x_save_expr_regs);
6849 ggc_mark_rtx (p->x_stack_slot_list);
6850 ggc_mark_rtx (p->x_parm_birth_insn);
6851 ggc_mark_rtx (p->x_tail_recursion_label);
6852 ggc_mark_rtx (p->x_tail_recursion_reentry);
6853 ggc_mark_rtx (p->internal_arg_pointer);
6854 ggc_mark_rtx (p->x_arg_pointer_save_area);
6855 ggc_mark_tree (p->x_rtl_expr_chain);
6856 ggc_mark_rtx (p->x_last_parm_insn);
6857 ggc_mark_tree (p->x_context_display);
6858 ggc_mark_tree (p->x_trampoline_list);
6859 ggc_mark_rtx (p->epilogue_delay_list);
6861 mark_temp_slot (p->x_temp_slots);
6864 struct var_refs_queue *q = p->fixup_var_refs_queue;
6867 ggc_mark_rtx (q->modified);
6872 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6873 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6874 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6875 ggc_mark_tree (p->x_nonlocal_labels);
6878 /* Mark the function chain ARG (which is really a struct function **)
6882 mark_function_chain (arg)
6885 struct function *f = *(struct function **) arg;
6887 for (; f; f = f->next_global)
6889 ggc_mark_tree (f->decl);
6891 mark_function_status (f);
6892 mark_eh_status (f->eh);
6893 mark_stmt_status (f->stmt);
6894 mark_expr_status (f->expr);
6895 mark_emit_status (f->emit);
6896 mark_varasm_status (f->varasm);
6898 if (mark_machine_status)
6899 (*mark_machine_status) (f);
6900 if (mark_lang_status)
6901 (*mark_lang_status) (f);
6903 if (f->original_arg_vector)
6904 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6905 if (f->original_decl_initial)
6906 ggc_mark_tree (f->original_decl_initial);
6910 /* Called once, at initialization, to initialize function.c. */
6913 init_function_once ()
6915 ggc_add_root (&all_functions, 1, sizeof all_functions,
6916 mark_function_chain);