1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89 /* Similar, but round to the next highest integer that meets the
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
107 int current_function_is_leaf;
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging;
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs;
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated;
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status) PROTO((struct function *));
127 void (*save_machine_status) PROTO((struct function *));
128 void (*restore_machine_status) PROTO((struct function *));
129 void (*mark_machine_status) PROTO((struct function *));
130 void (*free_machine_status) PROTO((struct function *));
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status) PROTO((struct function *));
134 void (*save_lang_status) PROTO((struct function *));
135 void (*restore_lang_status) PROTO((struct function *));
136 void (*mark_lang_status) PROTO((struct function *));
137 void (*free_lang_status) PROTO((struct function *));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
142 /* The currently compiled function. */
143 struct function *cfun = 0;
145 /* Global list of all compiled functions. */
146 struct function *all_functions = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue;
150 static int *epilogue;
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 /* Non-zero if this temporary is currently in use. */
198 /* Non-zero if this temporary has its address taken. */
200 /* Nesting level at which this slot is being used. */
202 /* Non-zero if this should survive a call to free_temp_slots. */
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size;
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
217 struct fixup_replacement
221 struct fixup_replacement *next;
224 struct insns_for_mem_entry {
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he;
227 /* These are the INSNS which reference the MEM. */
231 /* Forward declarations. */
233 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
234 int, struct function *));
235 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
237 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
238 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
241 struct hash_table *));
242 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
247 rtx, int, struct hash_table *));
248 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
249 struct fixup_replacement **));
250 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
251 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
252 static rtx fixup_stack_1 PROTO((rtx, rtx));
253 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
254 static void instantiate_decls PROTO((tree, int));
255 static void instantiate_decls_1 PROTO((tree, int));
256 static void instantiate_decl PROTO((rtx, int, int));
257 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
258 static void delete_handlers PROTO((void));
259 static void pad_to_arg_alignment PROTO((struct args_size *, int, struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PROTO((struct args_size *, enum machine_mode,
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down PROTO((tree, int));
267 static rtx round_trampoline_addr PROTO((rtx));
268 static tree blocks_nreverse PROTO((tree));
269 static int all_blocks PROTO((tree, tree *));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
273 static int contains PROTO((rtx, int *));
274 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
275 static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
276 struct hash_table *));
277 static int is_addressof PROTO ((rtx *, void *));
278 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
281 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
282 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
283 static int insns_for_mem_walk PROTO ((rtx *, void *));
284 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
285 static void mark_temp_slot PROTO ((struct temp_slot *));
286 static void mark_function_status PROTO ((struct function *));
287 static void mark_function_chain PROTO ((void *));
288 static void prepare_function_start PROTO ((void));
291 /* Pointer to chain of `struct function' for containing functions. */
292 struct function *outer_function_chain;
294 /* Given a function decl for a containing function,
295 return the `struct function' for it. */
298 find_function_data (decl)
303 for (p = outer_function_chain; p; p = p->next)
310 /* Save the current context for compilation of a nested function.
311 This is called from language-specific code. The caller should use
312 the save_lang_status callback to save any language-specific state,
313 since this function knows only about language-independent
317 push_function_context_to (context)
320 struct function *p, *context_data;
324 context_data = (context == current_function_decl
326 : find_function_data (context));
327 context_data->contains_functions = 1;
331 init_dummy_function_start ();
334 p->next = outer_function_chain;
335 outer_function_chain = p;
336 p->fixup_var_refs_queue = 0;
338 save_tree_status (p);
339 if (save_lang_status)
340 (*save_lang_status) (p);
341 if (save_machine_status)
342 (*save_machine_status) (p);
348 push_function_context ()
350 push_function_context_to (current_function_decl);
353 /* Restore the last saved context, at the end of a nested function.
354 This function is called from language-specific code. */
357 pop_function_context_from (context)
358 tree context ATTRIBUTE_UNUSED;
360 struct function *p = outer_function_chain;
361 struct var_refs_queue *queue;
362 struct var_refs_queue *next;
365 outer_function_chain = p->next;
367 current_function_decl = p->decl;
370 restore_tree_status (p);
371 restore_emit_status (p);
373 if (restore_machine_status)
374 (*restore_machine_status) (p);
375 if (restore_lang_status)
376 (*restore_lang_status) (p);
378 /* Finish doing put_var_into_stack for any of our variables
379 which became addressable during the nested function. */
380 for (queue = p->fixup_var_refs_queue; queue; queue = next)
383 fixup_var_refs (queue->modified, queue->promoted_mode,
384 queue->unsignedp, 0);
387 p->fixup_var_refs_queue = 0;
389 /* Reset variables that have known state during rtx generation. */
390 rtx_equal_function_value_matters = 1;
391 virtuals_instantiated = 0;
395 pop_function_context ()
397 pop_function_context_from (current_function_decl);
400 /* Clear out all parts of the state in F that can safely be discarded
401 after the function has been parsed, but not compiled, to let
402 garbage collection reclaim the memory. */
405 free_after_parsing (f)
408 /* f->expr->forced_labels is used by code generation. */
409 /* f->emit->regno_reg_rtx is used by code generation. */
410 /* f->varasm is used by code generation. */
411 /* f->eh->eh_return_stub_label is used by code generation. */
413 if (free_lang_status)
414 (*free_lang_status) (f);
415 free_stmt_status (f);
418 /* Clear out all parts of the state in F that can safely be discarded
419 after the function has been compiled, to let garbage collection
420 reclaim the memory. */
423 free_after_compilation (f)
427 free_expr_status (f);
428 free_emit_status (f);
429 free_varasm_status (f);
431 if (free_machine_status)
432 (*free_machine_status) (f);
434 if (f->x_parm_reg_stack_loc)
435 free (f->x_parm_reg_stack_loc);
437 f->arg_offset_rtx = NULL;
438 f->return_rtx = NULL;
439 f->internal_arg_pointer = NULL;
440 f->x_nonlocal_labels = NULL;
441 f->x_nonlocal_goto_handler_slots = NULL;
442 f->x_nonlocal_goto_handler_labels = NULL;
443 f->x_nonlocal_goto_stack_level = NULL;
444 f->x_cleanup_label = NULL;
445 f->x_return_label = NULL;
446 f->x_save_expr_regs = NULL;
447 f->x_stack_slot_list = NULL;
448 f->x_rtl_expr_chain = NULL;
449 f->x_tail_recursion_label = NULL;
450 f->x_tail_recursion_reentry = NULL;
451 f->x_arg_pointer_save_area = NULL;
452 f->x_context_display = NULL;
453 f->x_trampoline_list = NULL;
454 f->x_parm_birth_insn = NULL;
455 f->x_last_parm_insn = NULL;
456 f->x_parm_reg_stack_loc = NULL;
457 f->x_temp_slots = NULL;
458 f->fixup_var_refs_queue = NULL;
459 f->original_arg_vector = NULL;
460 f->original_decl_initial = NULL;
461 f->inl_last_parm_insn = NULL;
462 f->epilogue_delay_list = NULL;
466 /* Allocate fixed slots in the stack frame of the current function. */
468 /* Return size needed for stack frame based on slots so far allocated in
470 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
471 the caller may have to do that. */
474 get_func_frame_size (f)
477 #ifdef FRAME_GROWS_DOWNWARD
478 return -f->x_frame_offset;
480 return f->x_frame_offset;
484 /* Return size needed for stack frame based on slots so far allocated.
485 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
486 the caller may have to do that. */
490 return get_func_frame_size (cfun);
493 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
494 with machine mode MODE.
496 ALIGN controls the amount of alignment for the address of the slot:
497 0 means according to MODE,
498 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
499 positive specifies alignment boundary in bits.
501 We do not round to stack_boundary here.
503 FUNCTION specifies the function to allocate in. */
506 assign_stack_local_1 (mode, size, align, function)
507 enum machine_mode mode;
510 struct function *function;
512 register rtx x, addr;
513 int bigend_correction = 0;
516 /* Allocate in the memory associated with the function in whose frame
518 if (function != cfun)
519 push_obstacks (function->function_obstack,
520 function->function_maybepermanent_obstack);
526 alignment = GET_MODE_ALIGNMENT (mode);
528 alignment = BIGGEST_ALIGNMENT;
530 /* Allow the target to (possibly) increase the alignment of this
532 type = type_for_mode (mode, 0);
534 alignment = LOCAL_ALIGNMENT (type, alignment);
536 alignment /= BITS_PER_UNIT;
538 else if (align == -1)
540 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
541 size = CEIL_ROUND (size, alignment);
544 alignment = align / BITS_PER_UNIT;
546 #ifdef FRAME_GROWS_DOWNWARD
547 function->x_frame_offset -= size;
550 /* Ignore alignment we can't do with expected alignment of the boundary. */
551 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
552 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
554 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
555 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
557 /* Round frame offset to that alignment.
558 We must be careful here, since FRAME_OFFSET might be negative and
559 division with a negative dividend isn't as well defined as we might
560 like. So we instead assume that ALIGNMENT is a power of two and
561 use logical operations which are unambiguous. */
562 #ifdef FRAME_GROWS_DOWNWARD
563 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
565 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
568 /* On a big-endian machine, if we are allocating more space than we will use,
569 use the least significant bytes of those that are allocated. */
570 if (BYTES_BIG_ENDIAN && mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
573 /* If we have already instantiated virtual registers, return the actual
574 address relative to the frame pointer. */
575 if (function == cfun && virtuals_instantiated)
576 addr = plus_constant (frame_pointer_rtx,
577 (frame_offset + bigend_correction
578 + STARTING_FRAME_OFFSET));
580 addr = plus_constant (virtual_stack_vars_rtx,
581 function->x_frame_offset + bigend_correction);
583 #ifndef FRAME_GROWS_DOWNWARD
584 function->x_frame_offset += size;
587 x = gen_rtx_MEM (mode, addr);
589 function->x_stack_slot_list
590 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
592 if (function != cfun)
598 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
601 assign_stack_local (mode, size, align)
602 enum machine_mode mode;
606 return assign_stack_local_1 (mode, size, align, cfun);
609 /* Allocate a temporary stack slot and record it for possible later
612 MODE is the machine mode to be given to the returned rtx.
614 SIZE is the size in units of the space required. We do no rounding here
615 since assign_stack_local will do any required rounding.
617 KEEP is 1 if this slot is to be retained after a call to
618 free_temp_slots. Automatic variables for a block are allocated
619 with this flag. KEEP is 2 if we allocate a longer term temporary,
620 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
621 if we are to allocate something at an inner level to be treated as
622 a variable in the block (e.g., a SAVE_EXPR).
624 TYPE is the type that will be used for the stack slot. */
627 assign_stack_temp_for_type (mode, size, keep, type)
628 enum machine_mode mode;
635 struct temp_slot *p, *best_p = 0;
637 /* If SIZE is -1 it means that somebody tried to allocate a temporary
638 of a variable size. */
642 /* If we know the alias set for the memory that will be used, use
643 it. If there's no TYPE, then we don't know anything about the
644 alias set for the memory. */
646 alias_set = get_alias_set (type);
650 align = GET_MODE_ALIGNMENT (mode);
652 align = BIGGEST_ALIGNMENT;
655 type = type_for_mode (mode, 0);
657 align = LOCAL_ALIGNMENT (type, align);
659 /* Try to find an available, already-allocated temporary of the proper
660 mode which meets the size and alignment requirements. Choose the
661 smallest one with the closest alignment. */
662 for (p = temp_slots; p; p = p->next)
663 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
665 && (!flag_strict_aliasing
666 || (alias_set && p->alias_set == alias_set))
667 && (best_p == 0 || best_p->size > p->size
668 || (best_p->size == p->size && best_p->align > p->align)))
670 if (p->align == align && p->size == size)
678 /* Make our best, if any, the one to use. */
681 /* If there are enough aligned bytes left over, make them into a new
682 temp_slot so that the extra bytes don't get wasted. Do this only
683 for BLKmode slots, so that we can be sure of the alignment. */
684 if (GET_MODE (best_p->slot) == BLKmode
685 /* We can't split slots if -fstrict-aliasing because the
686 information about the alias set for the new slot will be
688 && !flag_strict_aliasing)
690 int alignment = best_p->align / BITS_PER_UNIT;
691 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
693 if (best_p->size - rounded_size >= alignment)
695 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
696 p->in_use = p->addr_taken = 0;
697 p->size = best_p->size - rounded_size;
698 p->base_offset = best_p->base_offset + rounded_size;
699 p->full_size = best_p->full_size - rounded_size;
700 p->slot = gen_rtx_MEM (BLKmode,
701 plus_constant (XEXP (best_p->slot, 0),
703 p->align = best_p->align;
706 p->next = temp_slots;
709 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
712 best_p->size = rounded_size;
713 best_p->full_size = rounded_size;
720 /* If we still didn't find one, make a new temporary. */
723 HOST_WIDE_INT frame_offset_old = frame_offset;
725 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
727 /* We are passing an explicit alignment request to assign_stack_local.
728 One side effect of that is assign_stack_local will not round SIZE
729 to ensure the frame offset remains suitably aligned.
731 So for requests which depended on the rounding of SIZE, we go ahead
732 and round it now. We also make sure ALIGNMENT is at least
733 BIGGEST_ALIGNMENT. */
734 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
736 p->slot = assign_stack_local (mode,
738 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
743 p->alias_set = alias_set;
745 /* The following slot size computation is necessary because we don't
746 know the actual size of the temporary slot until assign_stack_local
747 has performed all the frame alignment and size rounding for the
748 requested temporary. Note that extra space added for alignment
749 can be either above or below this stack slot depending on which
750 way the frame grows. We include the extra space if and only if it
751 is above this slot. */
752 #ifdef FRAME_GROWS_DOWNWARD
753 p->size = frame_offset_old - frame_offset;
758 /* Now define the fields used by combine_temp_slots. */
759 #ifdef FRAME_GROWS_DOWNWARD
760 p->base_offset = frame_offset;
761 p->full_size = frame_offset_old - frame_offset;
763 p->base_offset = frame_offset_old;
764 p->full_size = frame_offset - frame_offset_old;
767 p->next = temp_slots;
773 p->rtl_expr = seq_rtl_expr;
777 p->level = target_temp_slot_level;
782 p->level = var_temp_slot_level;
787 p->level = temp_slot_level;
791 /* We may be reusing an old slot, so clear any MEM flags that may have been
793 RTX_UNCHANGING_P (p->slot) = 0;
794 MEM_IN_STRUCT_P (p->slot) = 0;
795 MEM_SCALAR_P (p->slot) = 0;
796 MEM_ALIAS_SET (p->slot) = 0;
800 /* Allocate a temporary stack slot and record it for possible later
801 reuse. First three arguments are same as in preceding function. */
804 assign_stack_temp (mode, size, keep)
805 enum machine_mode mode;
809 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
812 /* Assign a temporary of given TYPE.
813 KEEP is as for assign_stack_temp.
814 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
815 it is 0 if a register is OK.
816 DONT_PROMOTE is 1 if we should not promote values in register
820 assign_temp (type, keep, memory_required, dont_promote)
826 enum machine_mode mode = TYPE_MODE (type);
827 int unsignedp = TREE_UNSIGNED (type);
829 if (mode == BLKmode || memory_required)
831 HOST_WIDE_INT size = int_size_in_bytes (type);
834 /* Unfortunately, we don't yet know how to allocate variable-sized
835 temporaries. However, sometimes we have a fixed upper limit on
836 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
837 instead. This is the case for Chill variable-sized strings. */
838 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
839 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
840 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
841 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
843 tmp = assign_stack_temp_for_type (mode, size, keep, type);
844 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
848 #ifndef PROMOTE_FOR_CALL_ONLY
850 mode = promote_mode (type, mode, &unsignedp, 0);
853 return gen_reg_rtx (mode);
856 /* Combine temporary stack slots which are adjacent on the stack.
858 This allows for better use of already allocated stack space. This is only
859 done for BLKmode slots because we can be sure that we won't have alignment
860 problems in this case. */
863 combine_temp_slots ()
865 struct temp_slot *p, *q;
866 struct temp_slot *prev_p, *prev_q;
869 /* We can't combine slots, because the information about which slot
870 is in which alias set will be lost. */
871 if (flag_strict_aliasing)
874 /* If there are a lot of temp slots, don't do anything unless
875 high levels of optimizaton. */
876 if (! flag_expensive_optimizations)
877 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
878 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
881 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
885 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
886 for (q = p->next, prev_q = p; q; q = prev_q->next)
889 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
891 if (p->base_offset + p->full_size == q->base_offset)
893 /* Q comes after P; combine Q into P. */
895 p->full_size += q->full_size;
898 else if (q->base_offset + q->full_size == p->base_offset)
900 /* P comes after Q; combine P into Q. */
902 q->full_size += p->full_size;
907 /* Either delete Q or advance past it. */
909 prev_q->next = q->next;
913 /* Either delete P or advance past it. */
917 prev_p->next = p->next;
919 temp_slots = p->next;
926 /* Find the temp slot corresponding to the object at address X. */
928 static struct temp_slot *
929 find_temp_slot_from_address (x)
935 for (p = temp_slots; p; p = p->next)
940 else if (XEXP (p->slot, 0) == x
942 || (GET_CODE (x) == PLUS
943 && XEXP (x, 0) == virtual_stack_vars_rtx
944 && GET_CODE (XEXP (x, 1)) == CONST_INT
945 && INTVAL (XEXP (x, 1)) >= p->base_offset
946 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
949 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
950 for (next = p->address; next; next = XEXP (next, 1))
951 if (XEXP (next, 0) == x)
955 /* If we have a sum involving a register, see if it points to a temp
957 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
958 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
960 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
961 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
967 /* Indicate that NEW is an alternate way of referring to the temp slot
968 that previously was known by OLD. */
971 update_temp_slot_address (old, new)
976 if (rtx_equal_p (old, new))
979 p = find_temp_slot_from_address (old);
981 /* If we didn't find one, see if both OLD and NEW are a PLUS and if
982 there is a register in common between them. If so, try a recursive
983 call on those values. */
986 if (GET_CODE (old) != PLUS || GET_CODE (new) != PLUS)
989 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
990 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
991 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
992 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
993 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
994 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
995 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
996 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1001 /* Otherwise add an alias for the temp's address. */
1002 else if (p->address == 0)
1006 if (GET_CODE (p->address) != EXPR_LIST)
1007 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1009 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1013 /* If X could be a reference to a temporary slot, mark the fact that its
1014 address was taken. */
1017 mark_temp_addr_taken (x)
1020 struct temp_slot *p;
1025 /* If X is not in memory or is at a constant address, it cannot be in
1026 a temporary slot. */
1027 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1030 p = find_temp_slot_from_address (XEXP (x, 0));
1035 /* If X could be a reference to a temporary slot, mark that slot as
1036 belonging to the to one level higher than the current level. If X
1037 matched one of our slots, just mark that one. Otherwise, we can't
1038 easily predict which it is, so upgrade all of them. Kept slots
1039 need not be touched.
1041 This is called when an ({...}) construct occurs and a statement
1042 returns a value in memory. */
1045 preserve_temp_slots (x)
1048 struct temp_slot *p = 0;
1050 /* If there is no result, we still might have some objects whose address
1051 were taken, so we need to make sure they stay around. */
1054 for (p = temp_slots; p; p = p->next)
1055 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1061 /* If X is a register that is being used as a pointer, see if we have
1062 a temporary slot we know it points to. To be consistent with
1063 the code below, we really should preserve all non-kept slots
1064 if we can't find a match, but that seems to be much too costly. */
1065 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1066 p = find_temp_slot_from_address (x);
1068 /* If X is not in memory or is at a constant address, it cannot be in
1069 a temporary slot, but it can contain something whose address was
1071 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1073 for (p = temp_slots; p; p = p->next)
1074 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1080 /* First see if we can find a match. */
1082 p = find_temp_slot_from_address (XEXP (x, 0));
1086 /* Move everything at our level whose address was taken to our new
1087 level in case we used its address. */
1088 struct temp_slot *q;
1090 if (p->level == temp_slot_level)
1092 for (q = temp_slots; q; q = q->next)
1093 if (q != p && q->addr_taken && q->level == p->level)
1102 /* Otherwise, preserve all non-kept slots at this level. */
1103 for (p = temp_slots; p; p = p->next)
1104 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1108 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1109 with that RTL_EXPR, promote it into a temporary slot at the present
1110 level so it will not be freed when we free slots made in the
1114 preserve_rtl_expr_result (x)
1117 struct temp_slot *p;
1119 /* If X is not in memory or is at a constant address, it cannot be in
1120 a temporary slot. */
1121 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1124 /* If we can find a match, move it to our level unless it is already at
1126 p = find_temp_slot_from_address (XEXP (x, 0));
1129 p->level = MIN (p->level, temp_slot_level);
1136 /* Free all temporaries used so far. This is normally called at the end
1137 of generating code for a statement. Don't free any temporaries
1138 currently in use for an RTL_EXPR that hasn't yet been emitted.
1139 We could eventually do better than this since it can be reused while
1140 generating the same RTL_EXPR, but this is complex and probably not
1146 struct temp_slot *p;
1148 for (p = temp_slots; p; p = p->next)
1149 if (p->in_use && p->level == temp_slot_level && ! p->keep
1150 && p->rtl_expr == 0)
1153 combine_temp_slots ();
1156 /* Free all temporary slots used in T, an RTL_EXPR node. */
1159 free_temps_for_rtl_expr (t)
1162 struct temp_slot *p;
1164 for (p = temp_slots; p; p = p->next)
1165 if (p->rtl_expr == t)
1168 combine_temp_slots ();
1171 /* Mark all temporaries ever allocated in this function as not suitable
1172 for reuse until the current level is exited. */
1175 mark_all_temps_used ()
1177 struct temp_slot *p;
1179 for (p = temp_slots; p; p = p->next)
1181 p->in_use = p->keep = 1;
1182 p->level = MIN (p->level, temp_slot_level);
1186 /* Push deeper into the nesting level for stack temporaries. */
1194 /* Likewise, but save the new level as the place to allocate variables
1199 push_temp_slots_for_block ()
1203 var_temp_slot_level = temp_slot_level;
1206 /* Likewise, but save the new level as the place to allocate temporaries
1207 for TARGET_EXPRs. */
1210 push_temp_slots_for_target ()
1214 target_temp_slot_level = temp_slot_level;
1217 /* Set and get the value of target_temp_slot_level. The only
1218 permitted use of these functions is to save and restore this value. */
1221 get_target_temp_slot_level ()
1223 return target_temp_slot_level;
1227 set_target_temp_slot_level (level)
1230 target_temp_slot_level = level;
1234 /* Pop a temporary nesting level. All slots in use in the current level
1240 struct temp_slot *p;
1242 for (p = temp_slots; p; p = p->next)
1243 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1246 combine_temp_slots ();
1251 /* Initialize temporary slots. */
1256 /* We have not allocated any temporaries yet. */
1258 temp_slot_level = 0;
1259 var_temp_slot_level = 0;
1260 target_temp_slot_level = 0;
1263 /* Retroactively move an auto variable from a register to a stack slot.
1264 This is done when an address-reference to the variable is seen. */
1267 put_var_into_stack (decl)
1271 enum machine_mode promoted_mode, decl_mode;
1272 struct function *function = 0;
1274 int can_use_addressof;
1276 context = decl_function_context (decl);
1278 /* Get the current rtl used for this object and its original mode. */
1279 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1281 /* No need to do anything if decl has no rtx yet
1282 since in that case caller is setting TREE_ADDRESSABLE
1283 and a stack slot will be assigned when the rtl is made. */
1287 /* Get the declared mode for this object. */
1288 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1289 : DECL_MODE (decl));
1290 /* Get the mode it's actually stored in. */
1291 promoted_mode = GET_MODE (reg);
1293 /* If this variable comes from an outer function,
1294 find that function's saved context. */
1295 if (context != current_function_decl && context != inline_function_decl)
1296 for (function = outer_function_chain; function; function = function->next)
1297 if (function->decl == context)
1300 /* If this is a variable-size object with a pseudo to address it,
1301 put that pseudo into the stack, if the var is nonlocal. */
1302 if (DECL_NONLOCAL (decl)
1303 && GET_CODE (reg) == MEM
1304 && GET_CODE (XEXP (reg, 0)) == REG
1305 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1307 reg = XEXP (reg, 0);
1308 decl_mode = promoted_mode = GET_MODE (reg);
1314 /* FIXME make it work for promoted modes too */
1315 && decl_mode == promoted_mode
1316 #ifdef NON_SAVING_SETJMP
1317 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1321 /* If we can't use ADDRESSOF, make sure we see through one we already
1323 if (! can_use_addressof && GET_CODE (reg) == MEM
1324 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1325 reg = XEXP (XEXP (reg, 0), 0);
1327 /* Now we should have a value that resides in one or more pseudo regs. */
1329 if (GET_CODE (reg) == REG)
1331 /* If this variable lives in the current function and we don't need
1332 to put things in the stack for the sake of setjmp, try to keep it
1333 in a register until we know we actually need the address. */
1334 if (can_use_addressof)
1335 gen_mem_addressof (reg, decl);
1337 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1338 promoted_mode, decl_mode,
1339 TREE_SIDE_EFFECTS (decl), 0,
1340 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1343 else if (GET_CODE (reg) == CONCAT)
1345 /* A CONCAT contains two pseudos; put them both in the stack.
1346 We do it so they end up consecutive. */
1347 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1348 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1349 #ifdef FRAME_GROWS_DOWNWARD
1350 /* Since part 0 should have a lower address, do it second. */
1351 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1352 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1353 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1355 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1356 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1357 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1360 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1361 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1362 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1364 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1365 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1366 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1370 /* Change the CONCAT into a combined MEM for both parts. */
1371 PUT_CODE (reg, MEM);
1372 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1373 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1375 /* The two parts are in memory order already.
1376 Use the lower parts address as ours. */
1377 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1378 /* Prevent sharing of rtl that might lose. */
1379 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1380 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1385 if (current_function_check_memory_usage)
1386 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1387 XEXP (reg, 0), Pmode,
1388 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1389 TYPE_MODE (sizetype),
1390 GEN_INT (MEMORY_USE_RW),
1391 TYPE_MODE (integer_type_node));
1394 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1395 into the stack frame of FUNCTION (0 means the current function).
1396 DECL_MODE is the machine mode of the user-level data type.
1397 PROMOTED_MODE is the machine mode of the register.
1398 VOLATILE_P is nonzero if this is for a "volatile" decl.
1399 USED_P is nonzero if this reg might have already been used in an insn. */
1402 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1403 original_regno, used_p, ht)
1404 struct function *function;
1407 enum machine_mode promoted_mode, decl_mode;
1411 struct hash_table *ht;
1413 struct function *func = function ? function : cfun;
1415 int regno = original_regno;
1418 regno = REGNO (reg);
1420 if (regno < func->x_max_parm_reg)
1421 new = func->x_parm_reg_stack_loc[regno];
1423 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1425 PUT_CODE (reg, MEM);
1426 PUT_MODE (reg, decl_mode);
1427 XEXP (reg, 0) = XEXP (new, 0);
1428 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1429 MEM_VOLATILE_P (reg) = volatile_p;
1431 /* If this is a memory ref that contains aggregate components,
1432 mark it as such for cse and loop optimize. If we are reusing a
1433 previously generated stack slot, then we need to copy the bit in
1434 case it was set for other reasons. For instance, it is set for
1435 __builtin_va_alist. */
1436 MEM_SET_IN_STRUCT_P (reg,
1437 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1438 MEM_ALIAS_SET (reg) = get_alias_set (type);
1440 /* Now make sure that all refs to the variable, previously made
1441 when it was a register, are fixed up to be valid again. */
1443 if (used_p && function != 0)
1445 struct var_refs_queue *temp;
1448 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1449 temp->modified = reg;
1450 temp->promoted_mode = promoted_mode;
1451 temp->unsignedp = TREE_UNSIGNED (type);
1452 temp->next = function->fixup_var_refs_queue;
1453 function->fixup_var_refs_queue = temp;
1456 /* Variable is local; fix it up now. */
1457 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1461 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1463 enum machine_mode promoted_mode;
1465 struct hash_table *ht;
1468 rtx first_insn = get_insns ();
1469 struct sequence_stack *stack = seq_stack;
1470 tree rtl_exps = rtl_expr_chain;
1472 /* Must scan all insns for stack-refs that exceed the limit. */
1473 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1475 /* If there's a hash table, it must record all uses of VAR. */
1479 /* Scan all pending sequences too. */
1480 for (; stack; stack = stack->next)
1482 push_to_sequence (stack->first);
1483 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1484 stack->first, stack->next != 0, 0);
1485 /* Update remembered end of sequence
1486 in case we added an insn at the end. */
1487 stack->last = get_last_insn ();
1491 /* Scan all waiting RTL_EXPRs too. */
1492 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1494 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1495 if (seq != const0_rtx && seq != 0)
1497 push_to_sequence (seq);
1498 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1504 /* Scan the catch clauses for exception handling too. */
1505 push_to_sequence (catch_clauses);
1506 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1511 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1512 some part of an insn. Return a struct fixup_replacement whose OLD
1513 value is equal to X. Allocate a new structure if no such entry exists. */
1515 static struct fixup_replacement *
1516 find_fixup_replacement (replacements, x)
1517 struct fixup_replacement **replacements;
1520 struct fixup_replacement *p;
1522 /* See if we have already replaced this. */
1523 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1528 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1531 p->next = *replacements;
1538 /* Scan the insn-chain starting with INSN for refs to VAR
1539 and fix them up. TOPLEVEL is nonzero if this chain is the
1540 main chain of insns for the current function. */
1543 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1545 enum machine_mode promoted_mode;
1549 struct hash_table *ht;
1552 rtx insn_list = NULL_RTX;
1554 /* If we already know which INSNs reference VAR there's no need
1555 to walk the entire instruction chain. */
1558 insn_list = ((struct insns_for_mem_entry *)
1559 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1560 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1561 insn_list = XEXP (insn_list, 1);
1566 rtx next = NEXT_INSN (insn);
1567 rtx set, prev, prev_set;
1570 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1572 /* Remember the notes in case we delete the insn. */
1573 note = REG_NOTES (insn);
1575 /* If this is a CLOBBER of VAR, delete it.
1577 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1578 and REG_RETVAL notes too. */
1579 if (GET_CODE (PATTERN (insn)) == CLOBBER
1580 && (XEXP (PATTERN (insn), 0) == var
1581 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1582 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1583 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1585 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1586 /* The REG_LIBCALL note will go away since we are going to
1587 turn INSN into a NOTE, so just delete the
1588 corresponding REG_RETVAL note. */
1589 remove_note (XEXP (note, 0),
1590 find_reg_note (XEXP (note, 0), REG_RETVAL,
1593 /* In unoptimized compilation, we shouldn't call delete_insn
1594 except in jump.c doing warnings. */
1595 PUT_CODE (insn, NOTE);
1596 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1597 NOTE_SOURCE_FILE (insn) = 0;
1600 /* The insn to load VAR from a home in the arglist
1601 is now a no-op. When we see it, just delete it.
1602 Similarly if this is storing VAR from a register from which
1603 it was loaded in the previous insn. This will occur
1604 when an ADDRESSOF was made for an arglist slot. */
1606 && (set = single_set (insn)) != 0
1607 && SET_DEST (set) == var
1608 /* If this represents the result of an insn group,
1609 don't delete the insn. */
1610 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1611 && (rtx_equal_p (SET_SRC (set), var)
1612 || (GET_CODE (SET_SRC (set)) == REG
1613 && (prev = prev_nonnote_insn (insn)) != 0
1614 && (prev_set = single_set (prev)) != 0
1615 && SET_DEST (prev_set) == SET_SRC (set)
1616 && rtx_equal_p (SET_SRC (prev_set), var))))
1618 /* In unoptimized compilation, we shouldn't call delete_insn
1619 except in jump.c doing warnings. */
1620 PUT_CODE (insn, NOTE);
1621 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1622 NOTE_SOURCE_FILE (insn) = 0;
1623 if (insn == last_parm_insn)
1624 last_parm_insn = PREV_INSN (next);
1628 struct fixup_replacement *replacements = 0;
1629 rtx next_insn = NEXT_INSN (insn);
1631 if (SMALL_REGISTER_CLASSES)
1633 /* If the insn that copies the results of a CALL_INSN
1634 into a pseudo now references VAR, we have to use an
1635 intermediate pseudo since we want the life of the
1636 return value register to be only a single insn.
1638 If we don't use an intermediate pseudo, such things as
1639 address computations to make the address of VAR valid
1640 if it is not can be placed between the CALL_INSN and INSN.
1642 To make sure this doesn't happen, we record the destination
1643 of the CALL_INSN and see if the next insn uses both that
1646 if (call_dest != 0 && GET_CODE (insn) == INSN
1647 && reg_mentioned_p (var, PATTERN (insn))
1648 && reg_mentioned_p (call_dest, PATTERN (insn)))
1650 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1652 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1654 PATTERN (insn) = replace_rtx (PATTERN (insn),
1658 if (GET_CODE (insn) == CALL_INSN
1659 && GET_CODE (PATTERN (insn)) == SET)
1660 call_dest = SET_DEST (PATTERN (insn));
1661 else if (GET_CODE (insn) == CALL_INSN
1662 && GET_CODE (PATTERN (insn)) == PARALLEL
1663 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1664 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1669 /* See if we have to do anything to INSN now that VAR is in
1670 memory. If it needs to be loaded into a pseudo, use a single
1671 pseudo for the entire insn in case there is a MATCH_DUP
1672 between two operands. We pass a pointer to the head of
1673 a list of struct fixup_replacements. If fixup_var_refs_1
1674 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1675 it will record them in this list.
1677 If it allocated a pseudo for any replacement, we copy into
1680 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1683 /* If this is last_parm_insn, and any instructions were output
1684 after it to fix it up, then we must set last_parm_insn to
1685 the last such instruction emitted. */
1686 if (insn == last_parm_insn)
1687 last_parm_insn = PREV_INSN (next_insn);
1689 while (replacements)
1691 if (GET_CODE (replacements->new) == REG)
1696 /* OLD might be a (subreg (mem)). */
1697 if (GET_CODE (replacements->old) == SUBREG)
1699 = fixup_memory_subreg (replacements->old, insn, 0);
1702 = fixup_stack_1 (replacements->old, insn);
1704 insert_before = insn;
1706 /* If we are changing the mode, do a conversion.
1707 This might be wasteful, but combine.c will
1708 eliminate much of the waste. */
1710 if (GET_MODE (replacements->new)
1711 != GET_MODE (replacements->old))
1714 convert_move (replacements->new,
1715 replacements->old, unsignedp);
1716 seq = gen_sequence ();
1720 seq = gen_move_insn (replacements->new,
1723 emit_insn_before (seq, insert_before);
1726 replacements = replacements->next;
1730 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1731 But don't touch other insns referred to by reg-notes;
1732 we will get them elsewhere. */
1735 if (GET_CODE (note) != INSN_LIST)
1737 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1738 note = XEXP (note, 1);
1746 insn = XEXP (insn_list, 0);
1747 insn_list = XEXP (insn_list, 1);
1754 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1755 See if the rtx expression at *LOC in INSN needs to be changed.
1757 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1758 contain a list of original rtx's and replacements. If we find that we need
1759 to modify this insn by replacing a memory reference with a pseudo or by
1760 making a new MEM to implement a SUBREG, we consult that list to see if
1761 we have already chosen a replacement. If none has already been allocated,
1762 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1763 or the SUBREG, as appropriate, to the pseudo. */
1766 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1768 enum machine_mode promoted_mode;
1771 struct fixup_replacement **replacements;
1774 register rtx x = *loc;
1775 RTX_CODE code = GET_CODE (x);
1776 register const char *fmt;
1777 register rtx tem, tem1;
1778 struct fixup_replacement *replacement;
1783 if (XEXP (x, 0) == var)
1785 /* Prevent sharing of rtl that might lose. */
1786 rtx sub = copy_rtx (XEXP (var, 0));
1788 if (! validate_change (insn, loc, sub, 0))
1790 rtx y = gen_reg_rtx (GET_MODE (sub));
1793 /* We should be able to replace with a register or all is lost.
1794 Note that we can't use validate_change to verify this, since
1795 we're not caring for replacing all dups simultaneously. */
1796 if (! validate_replace_rtx (*loc, y, insn))
1799 /* Careful! First try to recognize a direct move of the
1800 value, mimicking how things are done in gen_reload wrt
1801 PLUS. Consider what happens when insn is a conditional
1802 move instruction and addsi3 clobbers flags. */
1805 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1806 seq = gen_sequence ();
1809 if (recog_memoized (new_insn) < 0)
1811 /* That failed. Fall back on force_operand and hope. */
1814 force_operand (sub, y);
1815 seq = gen_sequence ();
1820 /* Don't separate setter from user. */
1821 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1822 insn = PREV_INSN (insn);
1825 emit_insn_before (seq, insn);
1833 /* If we already have a replacement, use it. Otherwise,
1834 try to fix up this address in case it is invalid. */
1836 replacement = find_fixup_replacement (replacements, var);
1837 if (replacement->new)
1839 *loc = replacement->new;
1843 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1845 /* Unless we are forcing memory to register or we changed the mode,
1846 we can leave things the way they are if the insn is valid. */
1848 INSN_CODE (insn) = -1;
1849 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1850 && recog_memoized (insn) >= 0)
1853 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1857 /* If X contains VAR, we need to unshare it here so that we update
1858 each occurrence separately. But all identical MEMs in one insn
1859 must be replaced with the same rtx because of the possibility of
1862 if (reg_mentioned_p (var, x))
1864 replacement = find_fixup_replacement (replacements, x);
1865 if (replacement->new == 0)
1866 replacement->new = copy_most_rtx (x, var);
1868 *loc = x = replacement->new;
1884 /* Note that in some cases those types of expressions are altered
1885 by optimize_bit_field, and do not survive to get here. */
1886 if (XEXP (x, 0) == var
1887 || (GET_CODE (XEXP (x, 0)) == SUBREG
1888 && SUBREG_REG (XEXP (x, 0)) == var))
1890 /* Get TEM as a valid MEM in the mode presently in the insn.
1892 We don't worry about the possibility of MATCH_DUP here; it
1893 is highly unlikely and would be tricky to handle. */
1896 if (GET_CODE (tem) == SUBREG)
1898 if (GET_MODE_BITSIZE (GET_MODE (tem))
1899 > GET_MODE_BITSIZE (GET_MODE (var)))
1901 replacement = find_fixup_replacement (replacements, var);
1902 if (replacement->new == 0)
1903 replacement->new = gen_reg_rtx (GET_MODE (var));
1904 SUBREG_REG (tem) = replacement->new;
1907 tem = fixup_memory_subreg (tem, insn, 0);
1910 tem = fixup_stack_1 (tem, insn);
1912 /* Unless we want to load from memory, get TEM into the proper mode
1913 for an extract from memory. This can only be done if the
1914 extract is at a constant position and length. */
1916 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1917 && GET_CODE (XEXP (x, 2)) == CONST_INT
1918 && ! mode_dependent_address_p (XEXP (tem, 0))
1919 && ! MEM_VOLATILE_P (tem))
1921 enum machine_mode wanted_mode = VOIDmode;
1922 enum machine_mode is_mode = GET_MODE (tem);
1923 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1926 if (GET_CODE (x) == ZERO_EXTRACT)
1929 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1930 if (wanted_mode == VOIDmode)
1931 wanted_mode = word_mode;
1935 if (GET_CODE (x) == SIGN_EXTRACT)
1937 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1938 if (wanted_mode == VOIDmode)
1939 wanted_mode = word_mode;
1942 /* If we have a narrower mode, we can do something. */
1943 if (wanted_mode != VOIDmode
1944 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1946 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1947 rtx old_pos = XEXP (x, 2);
1950 /* If the bytes and bits are counted differently, we
1951 must adjust the offset. */
1952 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1953 offset = (GET_MODE_SIZE (is_mode)
1954 - GET_MODE_SIZE (wanted_mode) - offset);
1956 pos %= GET_MODE_BITSIZE (wanted_mode);
1958 newmem = gen_rtx_MEM (wanted_mode,
1959 plus_constant (XEXP (tem, 0), offset));
1960 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1961 MEM_COPY_ATTRIBUTES (newmem, tem);
1963 /* Make the change and see if the insn remains valid. */
1964 INSN_CODE (insn) = -1;
1965 XEXP (x, 0) = newmem;
1966 XEXP (x, 2) = GEN_INT (pos);
1968 if (recog_memoized (insn) >= 0)
1971 /* Otherwise, restore old position. XEXP (x, 0) will be
1973 XEXP (x, 2) = old_pos;
1977 /* If we get here, the bitfield extract insn can't accept a memory
1978 reference. Copy the input into a register. */
1980 tem1 = gen_reg_rtx (GET_MODE (tem));
1981 emit_insn_before (gen_move_insn (tem1, tem), insn);
1988 if (SUBREG_REG (x) == var)
1990 /* If this is a special SUBREG made because VAR was promoted
1991 from a wider mode, replace it with VAR and call ourself
1992 recursively, this time saying that the object previously
1993 had its current mode (by virtue of the SUBREG). */
1995 if (SUBREG_PROMOTED_VAR_P (x))
1998 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2002 /* If this SUBREG makes VAR wider, it has become a paradoxical
2003 SUBREG with VAR in memory, but these aren't allowed at this
2004 stage of the compilation. So load VAR into a pseudo and take
2005 a SUBREG of that pseudo. */
2006 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2008 replacement = find_fixup_replacement (replacements, var);
2009 if (replacement->new == 0)
2010 replacement->new = gen_reg_rtx (GET_MODE (var));
2011 SUBREG_REG (x) = replacement->new;
2015 /* See if we have already found a replacement for this SUBREG.
2016 If so, use it. Otherwise, make a MEM and see if the insn
2017 is recognized. If not, or if we should force MEM into a register,
2018 make a pseudo for this SUBREG. */
2019 replacement = find_fixup_replacement (replacements, x);
2020 if (replacement->new)
2022 *loc = replacement->new;
2026 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2028 INSN_CODE (insn) = -1;
2029 if (! flag_force_mem && recog_memoized (insn) >= 0)
2032 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2038 /* First do special simplification of bit-field references. */
2039 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2040 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2041 optimize_bit_field (x, insn, 0);
2042 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2043 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2044 optimize_bit_field (x, insn, NULL_PTR);
2046 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2047 into a register and then store it back out. */
2048 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2049 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2050 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2051 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2052 > GET_MODE_SIZE (GET_MODE (var))))
2054 replacement = find_fixup_replacement (replacements, var);
2055 if (replacement->new == 0)
2056 replacement->new = gen_reg_rtx (GET_MODE (var));
2058 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2059 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2062 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2063 insn into a pseudo and store the low part of the pseudo into VAR. */
2064 if (GET_CODE (SET_DEST (x)) == SUBREG
2065 && SUBREG_REG (SET_DEST (x)) == var
2066 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2067 > GET_MODE_SIZE (GET_MODE (var))))
2069 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2070 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2077 rtx dest = SET_DEST (x);
2078 rtx src = SET_SRC (x);
2080 rtx outerdest = dest;
2083 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2084 || GET_CODE (dest) == SIGN_EXTRACT
2085 || GET_CODE (dest) == ZERO_EXTRACT)
2086 dest = XEXP (dest, 0);
2088 if (GET_CODE (src) == SUBREG)
2089 src = XEXP (src, 0);
2091 /* If VAR does not appear at the top level of the SET
2092 just scan the lower levels of the tree. */
2094 if (src != var && dest != var)
2097 /* We will need to rerecognize this insn. */
2098 INSN_CODE (insn) = -1;
2101 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2103 /* Since this case will return, ensure we fixup all the
2105 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2106 insn, replacements);
2107 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2108 insn, replacements);
2109 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2110 insn, replacements);
2112 tem = XEXP (outerdest, 0);
2114 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2115 that may appear inside a ZERO_EXTRACT.
2116 This was legitimate when the MEM was a REG. */
2117 if (GET_CODE (tem) == SUBREG
2118 && SUBREG_REG (tem) == var)
2119 tem = fixup_memory_subreg (tem, insn, 0);
2121 tem = fixup_stack_1 (tem, insn);
2123 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2124 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2125 && ! mode_dependent_address_p (XEXP (tem, 0))
2126 && ! MEM_VOLATILE_P (tem))
2128 enum machine_mode wanted_mode;
2129 enum machine_mode is_mode = GET_MODE (tem);
2130 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2132 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2133 if (wanted_mode == VOIDmode)
2134 wanted_mode = word_mode;
2136 /* If we have a narrower mode, we can do something. */
2137 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2139 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2140 rtx old_pos = XEXP (outerdest, 2);
2143 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2144 offset = (GET_MODE_SIZE (is_mode)
2145 - GET_MODE_SIZE (wanted_mode) - offset);
2147 pos %= GET_MODE_BITSIZE (wanted_mode);
2149 newmem = gen_rtx_MEM (wanted_mode,
2150 plus_constant (XEXP (tem, 0),
2152 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2153 MEM_COPY_ATTRIBUTES (newmem, tem);
2155 /* Make the change and see if the insn remains valid. */
2156 INSN_CODE (insn) = -1;
2157 XEXP (outerdest, 0) = newmem;
2158 XEXP (outerdest, 2) = GEN_INT (pos);
2160 if (recog_memoized (insn) >= 0)
2163 /* Otherwise, restore old position. XEXP (x, 0) will be
2165 XEXP (outerdest, 2) = old_pos;
2169 /* If we get here, the bit-field store doesn't allow memory
2170 or isn't located at a constant position. Load the value into
2171 a register, do the store, and put it back into memory. */
2173 tem1 = gen_reg_rtx (GET_MODE (tem));
2174 emit_insn_before (gen_move_insn (tem1, tem), insn);
2175 emit_insn_after (gen_move_insn (tem, tem1), insn);
2176 XEXP (outerdest, 0) = tem1;
2181 /* STRICT_LOW_PART is a no-op on memory references
2182 and it can cause combinations to be unrecognizable,
2185 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2186 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2188 /* A valid insn to copy VAR into or out of a register
2189 must be left alone, to avoid an infinite loop here.
2190 If the reference to VAR is by a subreg, fix that up,
2191 since SUBREG is not valid for a memref.
2192 Also fix up the address of the stack slot.
2194 Note that we must not try to recognize the insn until
2195 after we know that we have valid addresses and no
2196 (subreg (mem ...) ...) constructs, since these interfere
2197 with determining the validity of the insn. */
2199 if ((SET_SRC (x) == var
2200 || (GET_CODE (SET_SRC (x)) == SUBREG
2201 && SUBREG_REG (SET_SRC (x)) == var))
2202 && (GET_CODE (SET_DEST (x)) == REG
2203 || (GET_CODE (SET_DEST (x)) == SUBREG
2204 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2205 && GET_MODE (var) == promoted_mode
2206 && x == single_set (insn))
2210 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2211 if (replacement->new)
2212 SET_SRC (x) = replacement->new;
2213 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2214 SET_SRC (x) = replacement->new
2215 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2217 SET_SRC (x) = replacement->new
2218 = fixup_stack_1 (SET_SRC (x), insn);
2220 if (recog_memoized (insn) >= 0)
2223 /* INSN is not valid, but we know that we want to
2224 copy SET_SRC (x) to SET_DEST (x) in some way. So
2225 we generate the move and see whether it requires more
2226 than one insn. If it does, we emit those insns and
2227 delete INSN. Otherwise, we an just replace the pattern
2228 of INSN; we have already verified above that INSN has
2229 no other function that to do X. */
2231 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2232 if (GET_CODE (pat) == SEQUENCE)
2234 emit_insn_after (pat, insn);
2235 PUT_CODE (insn, NOTE);
2236 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2237 NOTE_SOURCE_FILE (insn) = 0;
2240 PATTERN (insn) = pat;
2245 if ((SET_DEST (x) == var
2246 || (GET_CODE (SET_DEST (x)) == SUBREG
2247 && SUBREG_REG (SET_DEST (x)) == var))
2248 && (GET_CODE (SET_SRC (x)) == REG
2249 || (GET_CODE (SET_SRC (x)) == SUBREG
2250 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2251 && GET_MODE (var) == promoted_mode
2252 && x == single_set (insn))
2256 if (GET_CODE (SET_DEST (x)) == SUBREG)
2257 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2259 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2261 if (recog_memoized (insn) >= 0)
2264 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2265 if (GET_CODE (pat) == SEQUENCE)
2267 emit_insn_after (pat, insn);
2268 PUT_CODE (insn, NOTE);
2269 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2270 NOTE_SOURCE_FILE (insn) = 0;
2273 PATTERN (insn) = pat;
2278 /* Otherwise, storing into VAR must be handled specially
2279 by storing into a temporary and copying that into VAR
2280 with a new insn after this one. Note that this case
2281 will be used when storing into a promoted scalar since
2282 the insn will now have different modes on the input
2283 and output and hence will be invalid (except for the case
2284 of setting it to a constant, which does not need any
2285 change if it is valid). We generate extra code in that case,
2286 but combine.c will eliminate it. */
2291 rtx fixeddest = SET_DEST (x);
2293 /* STRICT_LOW_PART can be discarded, around a MEM. */
2294 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2295 fixeddest = XEXP (fixeddest, 0);
2296 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2297 if (GET_CODE (fixeddest) == SUBREG)
2299 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2300 promoted_mode = GET_MODE (fixeddest);
2303 fixeddest = fixup_stack_1 (fixeddest, insn);
2305 temp = gen_reg_rtx (promoted_mode);
2307 emit_insn_after (gen_move_insn (fixeddest,
2308 gen_lowpart (GET_MODE (fixeddest),
2312 SET_DEST (x) = temp;
2320 /* Nothing special about this RTX; fix its operands. */
2322 fmt = GET_RTX_FORMAT (code);
2323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2326 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2330 for (j = 0; j < XVECLEN (x, i); j++)
2331 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2332 insn, replacements);
2337 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2338 return an rtx (MEM:m1 newaddr) which is equivalent.
2339 If any insns must be emitted to compute NEWADDR, put them before INSN.
2341 UNCRITICAL nonzero means accept paradoxical subregs.
2342 This is used for subregs found inside REG_NOTES. */
2345 fixup_memory_subreg (x, insn, uncritical)
2350 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2351 rtx addr = XEXP (SUBREG_REG (x), 0);
2352 enum machine_mode mode = GET_MODE (x);
2355 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2356 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2360 if (BYTES_BIG_ENDIAN)
2361 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2362 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2363 addr = plus_constant (addr, offset);
2364 if (!flag_force_addr && memory_address_p (mode, addr))
2365 /* Shortcut if no insns need be emitted. */
2366 return change_address (SUBREG_REG (x), mode, addr);
2368 result = change_address (SUBREG_REG (x), mode, addr);
2369 emit_insn_before (gen_sequence (), insn);
2374 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2375 Replace subexpressions of X in place.
2376 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2377 Otherwise return X, with its contents possibly altered.
2379 If any insns must be emitted to compute NEWADDR, put them before INSN.
2381 UNCRITICAL is as in fixup_memory_subreg. */
2384 walk_fixup_memory_subreg (x, insn, uncritical)
2389 register enum rtx_code code;
2390 register const char *fmt;
2396 code = GET_CODE (x);
2398 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2399 return fixup_memory_subreg (x, insn, uncritical);
2401 /* Nothing special about this RTX; fix its operands. */
2403 fmt = GET_RTX_FORMAT (code);
2404 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2407 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2411 for (j = 0; j < XVECLEN (x, i); j++)
2413 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2419 /* For each memory ref within X, if it refers to a stack slot
2420 with an out of range displacement, put the address in a temp register
2421 (emitting new insns before INSN to load these registers)
2422 and alter the memory ref to use that register.
2423 Replace each such MEM rtx with a copy, to avoid clobberage. */
2426 fixup_stack_1 (x, insn)
2431 register RTX_CODE code = GET_CODE (x);
2432 register const char *fmt;
2436 register rtx ad = XEXP (x, 0);
2437 /* If we have address of a stack slot but it's not valid
2438 (displacement is too large), compute the sum in a register. */
2439 if (GET_CODE (ad) == PLUS
2440 && GET_CODE (XEXP (ad, 0)) == REG
2441 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2442 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2443 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2444 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2445 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2447 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2448 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2449 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2450 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2453 if (memory_address_p (GET_MODE (x), ad))
2457 temp = copy_to_reg (ad);
2458 seq = gen_sequence ();
2460 emit_insn_before (seq, insn);
2461 return change_address (x, VOIDmode, temp);
2466 fmt = GET_RTX_FORMAT (code);
2467 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2470 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2474 for (j = 0; j < XVECLEN (x, i); j++)
2475 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2481 /* Optimization: a bit-field instruction whose field
2482 happens to be a byte or halfword in memory
2483 can be changed to a move instruction.
2485 We call here when INSN is an insn to examine or store into a bit-field.
2486 BODY is the SET-rtx to be altered.
2488 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2489 (Currently this is called only from function.c, and EQUIV_MEM
2493 optimize_bit_field (body, insn, equiv_mem)
2498 register rtx bitfield;
2501 enum machine_mode mode;
2503 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2504 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2505 bitfield = SET_DEST (body), destflag = 1;
2507 bitfield = SET_SRC (body), destflag = 0;
2509 /* First check that the field being stored has constant size and position
2510 and is in fact a byte or halfword suitably aligned. */
2512 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2513 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2514 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2516 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2518 register rtx memref = 0;
2520 /* Now check that the containing word is memory, not a register,
2521 and that it is safe to change the machine mode. */
2523 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2524 memref = XEXP (bitfield, 0);
2525 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2527 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2528 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2529 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2530 memref = SUBREG_REG (XEXP (bitfield, 0));
2531 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2533 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2534 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2537 && ! mode_dependent_address_p (XEXP (memref, 0))
2538 && ! MEM_VOLATILE_P (memref))
2540 /* Now adjust the address, first for any subreg'ing
2541 that we are now getting rid of,
2542 and then for which byte of the word is wanted. */
2544 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2547 /* Adjust OFFSET to count bits from low-address byte. */
2548 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2549 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2550 - offset - INTVAL (XEXP (bitfield, 1)));
2552 /* Adjust OFFSET to count bytes from low-address byte. */
2553 offset /= BITS_PER_UNIT;
2554 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2556 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2557 if (BYTES_BIG_ENDIAN)
2558 offset -= (MIN (UNITS_PER_WORD,
2559 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2560 - MIN (UNITS_PER_WORD,
2561 GET_MODE_SIZE (GET_MODE (memref))));
2565 memref = change_address (memref, mode,
2566 plus_constant (XEXP (memref, 0), offset));
2567 insns = get_insns ();
2569 emit_insns_before (insns, insn);
2571 /* Store this memory reference where
2572 we found the bit field reference. */
2576 validate_change (insn, &SET_DEST (body), memref, 1);
2577 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2579 rtx src = SET_SRC (body);
2580 while (GET_CODE (src) == SUBREG
2581 && SUBREG_WORD (src) == 0)
2582 src = SUBREG_REG (src);
2583 if (GET_MODE (src) != GET_MODE (memref))
2584 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2585 validate_change (insn, &SET_SRC (body), src, 1);
2587 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2588 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2589 /* This shouldn't happen because anything that didn't have
2590 one of these modes should have got converted explicitly
2591 and then referenced through a subreg.
2592 This is so because the original bit-field was
2593 handled by agg_mode and so its tree structure had
2594 the same mode that memref now has. */
2599 rtx dest = SET_DEST (body);
2601 while (GET_CODE (dest) == SUBREG
2602 && SUBREG_WORD (dest) == 0
2603 && (GET_MODE_CLASS (GET_MODE (dest))
2604 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2605 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2607 dest = SUBREG_REG (dest);
2609 validate_change (insn, &SET_DEST (body), dest, 1);
2611 if (GET_MODE (dest) == GET_MODE (memref))
2612 validate_change (insn, &SET_SRC (body), memref, 1);
2615 /* Convert the mem ref to the destination mode. */
2616 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2619 convert_move (newreg, memref,
2620 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2624 validate_change (insn, &SET_SRC (body), newreg, 1);
2628 /* See if we can convert this extraction or insertion into
2629 a simple move insn. We might not be able to do so if this
2630 was, for example, part of a PARALLEL.
2632 If we succeed, write out any needed conversions. If we fail,
2633 it is hard to guess why we failed, so don't do anything
2634 special; just let the optimization be suppressed. */
2636 if (apply_change_group () && seq)
2637 emit_insns_before (seq, insn);
2642 /* These routines are responsible for converting virtual register references
2643 to the actual hard register references once RTL generation is complete.
2645 The following four variables are used for communication between the
2646 routines. They contain the offsets of the virtual registers from their
2647 respective hard registers. */
2649 static int in_arg_offset;
2650 static int var_offset;
2651 static int dynamic_offset;
2652 static int out_arg_offset;
2653 static int cfa_offset;
2655 /* In most machines, the stack pointer register is equivalent to the bottom
2658 #ifndef STACK_POINTER_OFFSET
2659 #define STACK_POINTER_OFFSET 0
2662 /* If not defined, pick an appropriate default for the offset of dynamically
2663 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2664 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2666 #ifndef STACK_DYNAMIC_OFFSET
2668 #ifdef ACCUMULATE_OUTGOING_ARGS
2669 /* The bottom of the stack points to the actual arguments. If
2670 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2671 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2672 stack space for register parameters is not pushed by the caller, but
2673 rather part of the fixed stack areas and hence not included in
2674 `current_function_outgoing_args_size'. Nevertheless, we must allow
2675 for it when allocating stack dynamic objects. */
2677 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2678 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2679 (current_function_outgoing_args_size \
2680 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2683 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2684 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2688 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2692 /* On a few machines, the CFA coincides with the arg pointer. */
2694 #ifndef ARG_POINTER_CFA_OFFSET
2695 #define ARG_POINTER_CFA_OFFSET 0
2699 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2700 its address taken. DECL is the decl for the object stored in the
2701 register, for later use if we do need to force REG into the stack.
2702 REG is overwritten by the MEM like in put_reg_into_stack. */
2705 gen_mem_addressof (reg, decl)
2709 tree type = TREE_TYPE (decl);
2710 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2713 /* If the original REG was a user-variable, then so is the REG whose
2714 address is being taken. Likewise for unchanging. */
2715 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2716 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2718 PUT_CODE (reg, MEM);
2719 PUT_MODE (reg, DECL_MODE (decl));
2721 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2722 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2723 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2725 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2726 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2731 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2735 flush_addressof (decl)
2738 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2739 && DECL_RTL (decl) != 0
2740 && GET_CODE (DECL_RTL (decl)) == MEM
2741 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2742 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2743 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2747 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2750 put_addressof_into_stack (r, ht)
2752 struct hash_table *ht;
2754 tree decl = ADDRESSOF_DECL (r);
2755 rtx reg = XEXP (r, 0);
2757 if (GET_CODE (reg) != REG)
2760 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2761 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2762 ADDRESSOF_REGNO (r),
2763 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2766 /* List of replacements made below in purge_addressof_1 when creating
2767 bitfield insertions. */
2768 static rtx purge_bitfield_addressof_replacements;
2770 /* List of replacements made below in purge_addressof_1 for patterns
2771 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2772 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2773 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2774 enough in complex cases, e.g. when some field values can be
2775 extracted by usage MEM with narrower mode. */
2776 static rtx purge_addressof_replacements;
2778 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2779 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2780 the stack. If the function returns FALSE then the replacement could not
2784 purge_addressof_1 (loc, insn, force, store, ht)
2788 struct hash_table *ht;
2794 boolean result = true;
2796 /* Re-start here to avoid recursion in common cases. */
2803 code = GET_CODE (x);
2805 /* If we don't return in any of the cases below, we will recurse inside
2806 the RTX, which will normally result in any ADDRESSOF being forced into
2810 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2811 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2815 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2817 /* We must create a copy of the rtx because it was created by
2818 overwriting a REG rtx which is always shared. */
2819 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2822 if (validate_change (insn, loc, sub, 0)
2823 || validate_replace_rtx (x, sub, insn))
2827 sub = force_operand (sub, NULL_RTX);
2828 if (! validate_change (insn, loc, sub, 0)
2829 && ! validate_replace_rtx (x, sub, insn))
2832 insns = gen_sequence ();
2834 emit_insn_before (insns, insn);
2838 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2840 rtx sub = XEXP (XEXP (x, 0), 0);
2843 if (GET_CODE (sub) == MEM)
2845 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2846 MEM_COPY_ATTRIBUTES (sub2, sub);
2847 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2850 else if (GET_CODE (sub) == REG
2851 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2853 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2855 int size_x, size_sub;
2859 /* When processing REG_NOTES look at the list of
2860 replacements done on the insn to find the register that X
2864 for (tem = purge_bitfield_addressof_replacements;
2866 tem = XEXP (XEXP (tem, 1), 1))
2867 if (rtx_equal_p (x, XEXP (tem, 0)))
2869 *loc = XEXP (XEXP (tem, 1), 0);
2873 /* See comment for purge_addressof_replacements. */
2874 for (tem = purge_addressof_replacements;
2876 tem = XEXP (XEXP (tem, 1), 1))
2877 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2879 rtx z = XEXP (XEXP (tem, 1), 0);
2881 if (GET_MODE (x) == GET_MODE (z)
2882 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2883 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2886 /* It can happen that the note may speak of things
2887 in a wider (or just different) mode than the
2888 code did. This is especially true of
2891 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2894 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2895 && (GET_MODE_SIZE (GET_MODE (x))
2896 > GET_MODE_SIZE (GET_MODE (z))))
2898 /* This can occur as a result in invalid
2899 pointer casts, e.g. float f; ...
2900 *(long long int *)&f.
2901 ??? We could emit a warning here, but
2902 without a line number that wouldn't be
2904 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2907 z = gen_lowpart (GET_MODE (x), z);
2913 /* Sometimes we may not be able to find the replacement. For
2914 example when the original insn was a MEM in a wider mode,
2915 and the note is part of a sign extension of a narrowed
2916 version of that MEM. Gcc testcase compile/990829-1.c can
2917 generate an example of this siutation. Rather than complain
2918 we return false, which will prompt our caller to remove the
2923 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2924 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2926 /* Don't even consider working with paradoxical subregs,
2927 or the moral equivalent seen here. */
2928 if (size_x <= size_sub
2929 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2931 /* Do a bitfield insertion to mirror what would happen
2938 rtx p = PREV_INSN (insn);
2941 val = gen_reg_rtx (GET_MODE (x));
2942 if (! validate_change (insn, loc, val, 0))
2944 /* Discard the current sequence and put the
2945 ADDRESSOF on stack. */
2949 seq = gen_sequence ();
2951 emit_insn_before (seq, insn);
2952 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2956 store_bit_field (sub, size_x, 0, GET_MODE (x),
2957 val, GET_MODE_SIZE (GET_MODE (sub)),
2958 GET_MODE_SIZE (GET_MODE (sub)));
2960 /* Make sure to unshare any shared rtl that store_bit_field
2961 might have created. */
2962 for (p = get_insns(); p; p = NEXT_INSN (p))
2964 reset_used_flags (PATTERN (p));
2965 reset_used_flags (REG_NOTES (p));
2966 reset_used_flags (LOG_LINKS (p));
2968 unshare_all_rtl (get_insns ());
2970 seq = gen_sequence ();
2972 p = emit_insn_after (seq, insn);
2973 if (NEXT_INSN (insn))
2974 compute_insns_for_mem (NEXT_INSN (insn),
2975 p ? NEXT_INSN (p) : NULL_RTX,
2980 rtx p = PREV_INSN (insn);
2983 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2984 GET_MODE (x), GET_MODE (x),
2985 GET_MODE_SIZE (GET_MODE (sub)),
2986 GET_MODE_SIZE (GET_MODE (sub)));
2988 if (! validate_change (insn, loc, val, 0))
2990 /* Discard the current sequence and put the
2991 ADDRESSOF on stack. */
2996 seq = gen_sequence ();
2998 emit_insn_before (seq, insn);
2999 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3003 /* Remember the replacement so that the same one can be done
3004 on the REG_NOTES. */
3005 purge_bitfield_addressof_replacements
3006 = gen_rtx_EXPR_LIST (VOIDmode, x,
3009 purge_bitfield_addressof_replacements));
3011 /* We replaced with a reg -- all done. */
3016 else if (validate_change (insn, loc, sub, 0))
3018 /* Remember the replacement so that the same one can be done
3019 on the REG_NOTES. */
3020 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3024 for (tem = purge_addressof_replacements;
3026 tem = XEXP (XEXP (tem, 1), 1))
3027 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3029 XEXP (XEXP (tem, 1), 0) = sub;
3032 purge_addressof_replacements
3033 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3034 gen_rtx_EXPR_LIST (VOIDmode, sub,
3035 purge_addressof_replacements));
3041 /* else give up and put it into the stack */
3044 else if (code == ADDRESSOF)
3046 put_addressof_into_stack (x, ht);
3049 else if (code == SET)
3051 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3052 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3056 /* Scan all subexpressions. */
3057 fmt = GET_RTX_FORMAT (code);
3058 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3061 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3062 else if (*fmt == 'E')
3063 for (j = 0; j < XVECLEN (x, i); j++)
3064 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3070 /* Return a new hash table entry in HT. */
3072 static struct hash_entry *
3073 insns_for_mem_newfunc (he, ht, k)
3074 struct hash_entry *he;
3075 struct hash_table *ht;
3076 hash_table_key k ATTRIBUTE_UNUSED;
3078 struct insns_for_mem_entry *ifmhe;
3082 ifmhe = ((struct insns_for_mem_entry *)
3083 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3084 ifmhe->insns = NULL_RTX;
3089 /* Return a hash value for K, a REG. */
3091 static unsigned long
3092 insns_for_mem_hash (k)
3095 /* K is really a RTX. Just use the address as the hash value. */
3096 return (unsigned long) k;
3099 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3102 insns_for_mem_comp (k1, k2)
3109 struct insns_for_mem_walk_info {
3110 /* The hash table that we are using to record which INSNs use which
3112 struct hash_table *ht;
3114 /* The INSN we are currently proessing. */
3117 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3118 to find the insns that use the REGs in the ADDRESSOFs. */
3122 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3123 that might be used in an ADDRESSOF expression, record this INSN in
3124 the hash table given by DATA (which is really a pointer to an
3125 insns_for_mem_walk_info structure). */
3128 insns_for_mem_walk (r, data)
3132 struct insns_for_mem_walk_info *ifmwi
3133 = (struct insns_for_mem_walk_info *) data;
3135 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3136 && GET_CODE (XEXP (*r, 0)) == REG)
3137 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3138 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3140 /* Lookup this MEM in the hashtable, creating it if necessary. */
3141 struct insns_for_mem_entry *ifme
3142 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3147 /* If we have not already recorded this INSN, do so now. Since
3148 we process the INSNs in order, we know that if we have
3149 recorded it it must be at the front of the list. */
3150 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3152 /* We do the allocation on the same obstack as is used for
3153 the hash table since this memory will not be used once
3154 the hash table is deallocated. */
3155 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3156 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3165 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3166 which REGs in HT. */
3169 compute_insns_for_mem (insns, last_insn, ht)
3172 struct hash_table *ht;
3175 struct insns_for_mem_walk_info ifmwi;
3178 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3179 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3180 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3183 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3187 /* Helper function for purge_addressof called through for_each_rtx.
3188 Returns true iff the rtl is an ADDRESSOF. */
3190 is_addressof (rtl, data)
3192 void * data ATTRIBUTE_UNUSED;
3194 return GET_CODE (* rtl) == ADDRESSOF;
3197 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3198 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3202 purge_addressof (insns)
3206 struct hash_table ht;
3208 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3209 requires a fixup pass over the instruction stream to correct
3210 INSNs that depended on the REG being a REG, and not a MEM. But,
3211 these fixup passes are slow. Furthermore, more MEMs are not
3212 mentioned in very many instructions. So, we speed up the process
3213 by pre-calculating which REGs occur in which INSNs; that allows
3214 us to perform the fixup passes much more quickly. */
3215 hash_table_init (&ht,
3216 insns_for_mem_newfunc,
3218 insns_for_mem_comp);
3219 compute_insns_for_mem (insns, NULL_RTX, &ht);
3221 for (insn = insns; insn; insn = NEXT_INSN (insn))
3222 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3223 || GET_CODE (insn) == CALL_INSN)
3225 if (! purge_addressof_1 (&PATTERN (insn), insn,
3226 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3227 /* If we could not replace the ADDRESSOFs in the insn,
3228 something is wrong. */
3231 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3233 /* If we could not replace the ADDRESSOFs in the insn's notes,
3234 we can just remove the offending notes instead. */
3237 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3239 /* If we find a REG_RETVAL note then the insn is a libcall.
3240 Such insns must have REG_EQUAL notes as well, in order
3241 for later passes of the compiler to work. So it is not
3242 safe to delete the notes here, and instead we abort. */
3243 if (REG_NOTE_KIND (note) == REG_RETVAL)
3245 if (for_each_rtx (& note, is_addressof, NULL))
3246 remove_note (insn, note);
3252 hash_table_free (&ht);
3253 purge_bitfield_addressof_replacements = 0;
3254 purge_addressof_replacements = 0;
3257 /* Pass through the INSNS of function FNDECL and convert virtual register
3258 references to hard register references. */
3261 instantiate_virtual_regs (fndecl, insns)
3268 /* Compute the offsets to use for this function. */
3269 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3270 var_offset = STARTING_FRAME_OFFSET;
3271 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3272 out_arg_offset = STACK_POINTER_OFFSET;
3273 cfa_offset = ARG_POINTER_CFA_OFFSET;
3275 /* Scan all variables and parameters of this function. For each that is
3276 in memory, instantiate all virtual registers if the result is a valid
3277 address. If not, we do it later. That will handle most uses of virtual
3278 regs on many machines. */
3279 instantiate_decls (fndecl, 1);
3281 /* Initialize recognition, indicating that volatile is OK. */
3284 /* Scan through all the insns, instantiating every virtual register still
3286 for (insn = insns; insn; insn = NEXT_INSN (insn))
3287 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3288 || GET_CODE (insn) == CALL_INSN)
3290 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3291 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3294 /* Instantiate the stack slots for the parm registers, for later use in
3295 addressof elimination. */
3296 for (i = 0; i < max_parm_reg; ++i)
3297 if (parm_reg_stack_loc[i])
3298 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3300 /* Now instantiate the remaining register equivalences for debugging info.
3301 These will not be valid addresses. */
3302 instantiate_decls (fndecl, 0);
3304 /* Indicate that, from now on, assign_stack_local should use
3305 frame_pointer_rtx. */
3306 virtuals_instantiated = 1;
3309 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3310 all virtual registers in their DECL_RTL's.
3312 If VALID_ONLY, do this only if the resulting address is still valid.
3313 Otherwise, always do it. */
3316 instantiate_decls (fndecl, valid_only)
3322 if (DECL_SAVED_INSNS (fndecl))
3323 /* When compiling an inline function, the obstack used for
3324 rtl allocation is the maybepermanent_obstack. Calling
3325 `resume_temporary_allocation' switches us back to that
3326 obstack while we process this function's parameters. */
3327 resume_temporary_allocation ();
3329 /* Process all parameters of the function. */
3330 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3332 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3334 instantiate_decl (DECL_RTL (decl), size, valid_only);
3336 /* If the parameter was promoted, then the incoming RTL mode may be
3337 larger than the declared type size. We must use the larger of
3339 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3340 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3343 /* Now process all variables defined in the function or its subblocks. */
3344 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3346 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3348 /* Save all rtl allocated for this function by raising the
3349 high-water mark on the maybepermanent_obstack. */
3351 /* All further rtl allocation is now done in the current_obstack. */
3352 rtl_in_current_obstack ();
3356 /* Subroutine of instantiate_decls: Process all decls in the given
3357 BLOCK node and all its subblocks. */
3360 instantiate_decls_1 (let, valid_only)
3366 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3367 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3370 /* Process all subblocks. */
3371 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3372 instantiate_decls_1 (t, valid_only);
3375 /* Subroutine of the preceding procedures: Given RTL representing a
3376 decl and the size of the object, do any instantiation required.
3378 If VALID_ONLY is non-zero, it means that the RTL should only be
3379 changed if the new address is valid. */
3382 instantiate_decl (x, size, valid_only)
3387 enum machine_mode mode;
3390 /* If this is not a MEM, no need to do anything. Similarly if the
3391 address is a constant or a register that is not a virtual register. */
3393 if (x == 0 || GET_CODE (x) != MEM)
3397 if (CONSTANT_P (addr)
3398 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3399 || (GET_CODE (addr) == REG
3400 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3401 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3404 /* If we should only do this if the address is valid, copy the address.
3405 We need to do this so we can undo any changes that might make the
3406 address invalid. This copy is unfortunate, but probably can't be
3410 addr = copy_rtx (addr);
3412 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3416 /* Now verify that the resulting address is valid for every integer or
3417 floating-point mode up to and including SIZE bytes long. We do this
3418 since the object might be accessed in any mode and frame addresses
3421 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3422 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3423 mode = GET_MODE_WIDER_MODE (mode))
3424 if (! memory_address_p (mode, addr))
3427 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3428 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3429 mode = GET_MODE_WIDER_MODE (mode))
3430 if (! memory_address_p (mode, addr))
3434 /* Put back the address now that we have updated it and we either know
3435 it is valid or we don't care whether it is valid. */
3440 /* Given a pointer to a piece of rtx and an optional pointer to the
3441 containing object, instantiate any virtual registers present in it.
3443 If EXTRA_INSNS, we always do the replacement and generate
3444 any extra insns before OBJECT. If it zero, we do nothing if replacement
3447 Return 1 if we either had nothing to do or if we were able to do the
3448 needed replacement. Return 0 otherwise; we only return zero if
3449 EXTRA_INSNS is zero.
3451 We first try some simple transformations to avoid the creation of extra
3455 instantiate_virtual_regs_1 (loc, object, extra_insns)
3463 HOST_WIDE_INT offset = 0;
3469 /* Re-start here to avoid recursion in common cases. */
3476 code = GET_CODE (x);
3478 /* Check for some special cases. */
3495 /* We are allowed to set the virtual registers. This means that
3496 the actual register should receive the source minus the
3497 appropriate offset. This is used, for example, in the handling
3498 of non-local gotos. */
3499 if (SET_DEST (x) == virtual_incoming_args_rtx)
3500 new = arg_pointer_rtx, offset = - in_arg_offset;
3501 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3502 new = frame_pointer_rtx, offset = - var_offset;
3503 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3504 new = stack_pointer_rtx, offset = - dynamic_offset;
3505 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3506 new = stack_pointer_rtx, offset = - out_arg_offset;
3507 else if (SET_DEST (x) == virtual_cfa_rtx)
3508 new = arg_pointer_rtx, offset = - cfa_offset;
3512 rtx src = SET_SRC (x);
3514 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3516 /* The only valid sources here are PLUS or REG. Just do
3517 the simplest possible thing to handle them. */
3518 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3522 if (GET_CODE (src) != REG)
3523 temp = force_operand (src, NULL_RTX);
3526 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3530 emit_insns_before (seq, object);
3533 if (! validate_change (object, &SET_SRC (x), temp, 0)
3540 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3545 /* Handle special case of virtual register plus constant. */
3546 if (CONSTANT_P (XEXP (x, 1)))
3548 rtx old, new_offset;
3550 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3551 if (GET_CODE (XEXP (x, 0)) == PLUS)
3553 rtx inner = XEXP (XEXP (x, 0), 0);
3555 if (inner == virtual_incoming_args_rtx)
3556 new = arg_pointer_rtx, offset = in_arg_offset;
3557 else if (inner == virtual_stack_vars_rtx)
3558 new = frame_pointer_rtx, offset = var_offset;
3559 else if (inner == virtual_stack_dynamic_rtx)
3560 new = stack_pointer_rtx, offset = dynamic_offset;
3561 else if (inner == virtual_outgoing_args_rtx)
3562 new = stack_pointer_rtx, offset = out_arg_offset;
3563 else if (inner == virtual_cfa_rtx)
3564 new = arg_pointer_rtx, offset = cfa_offset;
3571 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3573 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3576 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3577 new = arg_pointer_rtx, offset = in_arg_offset;
3578 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3579 new = frame_pointer_rtx, offset = var_offset;
3580 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3581 new = stack_pointer_rtx, offset = dynamic_offset;
3582 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3583 new = stack_pointer_rtx, offset = out_arg_offset;
3584 else if (XEXP (x, 0) == virtual_cfa_rtx)
3585 new = arg_pointer_rtx, offset = cfa_offset;
3588 /* We know the second operand is a constant. Unless the
3589 first operand is a REG (which has been already checked),
3590 it needs to be checked. */
3591 if (GET_CODE (XEXP (x, 0)) != REG)
3599 new_offset = plus_constant (XEXP (x, 1), offset);
3601 /* If the new constant is zero, try to replace the sum with just
3603 if (new_offset == const0_rtx
3604 && validate_change (object, loc, new, 0))
3607 /* Next try to replace the register and new offset.
3608 There are two changes to validate here and we can't assume that
3609 in the case of old offset equals new just changing the register
3610 will yield a valid insn. In the interests of a little efficiency,
3611 however, we only call validate change once (we don't queue up the
3612 changes and then call apply_change_group). */
3616 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3617 : (XEXP (x, 0) = new,
3618 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3626 /* Otherwise copy the new constant into a register and replace
3627 constant with that register. */
3628 temp = gen_reg_rtx (Pmode);
3630 if (validate_change (object, &XEXP (x, 1), temp, 0))
3631 emit_insn_before (gen_move_insn (temp, new_offset), object);
3634 /* If that didn't work, replace this expression with a
3635 register containing the sum. */
3638 new = gen_rtx_PLUS (Pmode, new, new_offset);
3641 temp = force_operand (new, NULL_RTX);
3645 emit_insns_before (seq, object);
3646 if (! validate_change (object, loc, temp, 0)
3647 && ! validate_replace_rtx (x, temp, object))
3655 /* Fall through to generic two-operand expression case. */
3661 case DIV: case UDIV:
3662 case MOD: case UMOD:
3663 case AND: case IOR: case XOR:
3664 case ROTATERT: case ROTATE:
3665 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3667 case GE: case GT: case GEU: case GTU:
3668 case LE: case LT: case LEU: case LTU:
3669 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3670 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3675 /* Most cases of MEM that convert to valid addresses have already been
3676 handled by our scan of decls. The only special handling we
3677 need here is to make a copy of the rtx to ensure it isn't being
3678 shared if we have to change it to a pseudo.
3680 If the rtx is a simple reference to an address via a virtual register,
3681 it can potentially be shared. In such cases, first try to make it
3682 a valid address, which can also be shared. Otherwise, copy it and
3685 First check for common cases that need no processing. These are
3686 usually due to instantiation already being done on a previous instance
3690 if (CONSTANT_ADDRESS_P (temp)
3691 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3692 || temp == arg_pointer_rtx
3694 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3695 || temp == hard_frame_pointer_rtx
3697 || temp == frame_pointer_rtx)
3700 if (GET_CODE (temp) == PLUS
3701 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3702 && (XEXP (temp, 0) == frame_pointer_rtx
3703 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3704 || XEXP (temp, 0) == hard_frame_pointer_rtx
3706 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3707 || XEXP (temp, 0) == arg_pointer_rtx
3712 if (temp == virtual_stack_vars_rtx
3713 || temp == virtual_incoming_args_rtx
3714 || (GET_CODE (temp) == PLUS
3715 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3716 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3717 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3719 /* This MEM may be shared. If the substitution can be done without
3720 the need to generate new pseudos, we want to do it in place
3721 so all copies of the shared rtx benefit. The call below will
3722 only make substitutions if the resulting address is still
3725 Note that we cannot pass X as the object in the recursive call
3726 since the insn being processed may not allow all valid
3727 addresses. However, if we were not passed on object, we can
3728 only modify X without copying it if X will have a valid
3731 ??? Also note that this can still lose if OBJECT is an insn that
3732 has less restrictions on an address that some other insn.
3733 In that case, we will modify the shared address. This case
3734 doesn't seem very likely, though. One case where this could
3735 happen is in the case of a USE or CLOBBER reference, but we
3736 take care of that below. */
3738 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3739 object ? object : x, 0))
3742 /* Otherwise make a copy and process that copy. We copy the entire
3743 RTL expression since it might be a PLUS which could also be
3745 *loc = x = copy_rtx (x);
3748 /* Fall through to generic unary operation case. */
3750 case STRICT_LOW_PART:
3752 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3753 case SIGN_EXTEND: case ZERO_EXTEND:
3754 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3755 case FLOAT: case FIX:
3756 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3760 /* These case either have just one operand or we know that we need not
3761 check the rest of the operands. */
3767 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3768 go ahead and make the invalid one, but do it to a copy. For a REG,
3769 just make the recursive call, since there's no chance of a problem. */
3771 if ((GET_CODE (XEXP (x, 0)) == MEM
3772 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3774 || (GET_CODE (XEXP (x, 0)) == REG
3775 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3778 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3783 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3784 in front of this insn and substitute the temporary. */
3785 if (x == virtual_incoming_args_rtx)
3786 new = arg_pointer_rtx, offset = in_arg_offset;
3787 else if (x == virtual_stack_vars_rtx)
3788 new = frame_pointer_rtx, offset = var_offset;
3789 else if (x == virtual_stack_dynamic_rtx)
3790 new = stack_pointer_rtx, offset = dynamic_offset;
3791 else if (x == virtual_outgoing_args_rtx)
3792 new = stack_pointer_rtx, offset = out_arg_offset;
3793 else if (x == virtual_cfa_rtx)
3794 new = arg_pointer_rtx, offset = cfa_offset;
3798 temp = plus_constant (new, offset);
3799 if (!validate_change (object, loc, temp, 0))
3805 temp = force_operand (temp, NULL_RTX);
3809 emit_insns_before (seq, object);
3810 if (! validate_change (object, loc, temp, 0)
3811 && ! validate_replace_rtx (x, temp, object))
3819 if (GET_CODE (XEXP (x, 0)) == REG)
3822 else if (GET_CODE (XEXP (x, 0)) == MEM)
3824 /* If we have a (addressof (mem ..)), do any instantiation inside
3825 since we know we'll be making the inside valid when we finally
3826 remove the ADDRESSOF. */
3827 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3836 /* Scan all subexpressions. */
3837 fmt = GET_RTX_FORMAT (code);
3838 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3841 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3844 else if (*fmt == 'E')
3845 for (j = 0; j < XVECLEN (x, i); j++)
3846 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3853 /* Optimization: assuming this function does not receive nonlocal gotos,
3854 delete the handlers for such, as well as the insns to establish
3855 and disestablish them. */
3861 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3863 /* Delete the handler by turning off the flag that would
3864 prevent jump_optimize from deleting it.
3865 Also permit deletion of the nonlocal labels themselves
3866 if nothing local refers to them. */
3867 if (GET_CODE (insn) == CODE_LABEL)
3871 LABEL_PRESERVE_P (insn) = 0;
3873 /* Remove it from the nonlocal_label list, to avoid confusing
3875 for (t = nonlocal_labels, last_t = 0; t;
3876 last_t = t, t = TREE_CHAIN (t))
3877 if (DECL_RTL (TREE_VALUE (t)) == insn)
3882 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3884 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3887 if (GET_CODE (insn) == INSN)
3891 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3892 if (reg_mentioned_p (t, PATTERN (insn)))
3898 || (nonlocal_goto_stack_level != 0
3899 && reg_mentioned_p (nonlocal_goto_stack_level,
3906 /* Output a USE for any register use in RTL.
3907 This is used with -noreg to mark the extent of lifespan
3908 of any registers used in a user-visible variable's DECL_RTL. */
3914 if (GET_CODE (rtl) == REG)
3915 /* This is a register variable. */
3916 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3917 else if (GET_CODE (rtl) == MEM
3918 && GET_CODE (XEXP (rtl, 0)) == REG
3919 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3920 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3921 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3922 /* This is a variable-sized structure. */
3923 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3926 /* Like use_variable except that it outputs the USEs after INSN
3927 instead of at the end of the insn-chain. */
3930 use_variable_after (rtl, insn)
3933 if (GET_CODE (rtl) == REG)
3934 /* This is a register variable. */
3935 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3936 else if (GET_CODE (rtl) == MEM
3937 && GET_CODE (XEXP (rtl, 0)) == REG
3938 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3939 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3940 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3941 /* This is a variable-sized structure. */
3942 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3948 return max_parm_reg;
3951 /* Return the first insn following those generated by `assign_parms'. */
3954 get_first_nonparm_insn ()
3957 return NEXT_INSN (last_parm_insn);
3958 return get_insns ();
3961 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3962 Crash if there is none. */
3965 get_first_block_beg ()
3967 register rtx searcher;
3968 register rtx insn = get_first_nonparm_insn ();
3970 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3971 if (GET_CODE (searcher) == NOTE
3972 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3975 abort (); /* Invalid call to this function. (See comments above.) */
3979 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3980 This means a type for which function calls must pass an address to the
3981 function or get an address back from the function.
3982 EXP may be a type node or an expression (whose type is tested). */
3985 aggregate_value_p (exp)
3988 int i, regno, nregs;
3991 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3994 type = TREE_TYPE (exp);
3996 if (RETURN_IN_MEMORY (type))
3998 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3999 and thus can't be returned in registers. */
4000 if (TREE_ADDRESSABLE (type))
4002 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4004 /* Make sure we have suitable call-clobbered regs to return
4005 the value in; if not, we must return it in memory. */
4006 reg = hard_function_value (type, 0, 0);
4008 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4010 if (GET_CODE (reg) != REG)
4013 regno = REGNO (reg);
4014 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4015 for (i = 0; i < nregs; i++)
4016 if (! call_used_regs[regno + i])
4021 /* Assign RTL expressions to the function's parameters.
4022 This may involve copying them into registers and using
4023 those registers as the RTL for them. */
4026 assign_parms (fndecl)
4030 register rtx entry_parm = 0;
4031 register rtx stack_parm = 0;
4032 CUMULATIVE_ARGS args_so_far;
4033 enum machine_mode promoted_mode, passed_mode;
4034 enum machine_mode nominal_mode, promoted_nominal_mode;
4036 /* Total space needed so far for args on the stack,
4037 given as a constant and a tree-expression. */
4038 struct args_size stack_args_size;
4039 tree fntype = TREE_TYPE (fndecl);
4040 tree fnargs = DECL_ARGUMENTS (fndecl);
4041 /* This is used for the arg pointer when referring to stack args. */
4042 rtx internal_arg_pointer;
4043 /* This is a dummy PARM_DECL that we used for the function result if
4044 the function returns a structure. */
4045 tree function_result_decl = 0;
4046 #ifdef SETUP_INCOMING_VARARGS
4047 int varargs_setup = 0;
4049 rtx conversion_insns = 0;
4050 struct args_size alignment_pad;
4052 /* Nonzero if the last arg is named `__builtin_va_alist',
4053 which is used on some machines for old-fashioned non-ANSI varargs.h;
4054 this should be stuck onto the stack as if it had arrived there. */
4056 = (current_function_varargs
4058 && (parm = tree_last (fnargs)) != 0
4060 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4061 "__builtin_va_alist")));
4063 /* Nonzero if function takes extra anonymous args.
4064 This means the last named arg must be on the stack
4065 right before the anonymous ones. */
4067 = (TYPE_ARG_TYPES (fntype) != 0
4068 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4069 != void_type_node));
4071 current_function_stdarg = stdarg;
4073 /* If the reg that the virtual arg pointer will be translated into is
4074 not a fixed reg or is the stack pointer, make a copy of the virtual
4075 arg pointer, and address parms via the copy. The frame pointer is
4076 considered fixed even though it is not marked as such.
4078 The second time through, simply use ap to avoid generating rtx. */
4080 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4081 || ! (fixed_regs[ARG_POINTER_REGNUM]
4082 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4083 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4085 internal_arg_pointer = virtual_incoming_args_rtx;
4086 current_function_internal_arg_pointer = internal_arg_pointer;
4088 stack_args_size.constant = 0;
4089 stack_args_size.var = 0;
4091 /* If struct value address is treated as the first argument, make it so. */
4092 if (aggregate_value_p (DECL_RESULT (fndecl))
4093 && ! current_function_returns_pcc_struct
4094 && struct_value_incoming_rtx == 0)
4096 tree type = build_pointer_type (TREE_TYPE (fntype));
4098 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4100 DECL_ARG_TYPE (function_result_decl) = type;
4101 TREE_CHAIN (function_result_decl) = fnargs;
4102 fnargs = function_result_decl;
4105 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4106 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4108 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4109 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4111 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4114 /* We haven't yet found an argument that we must push and pretend the
4116 current_function_pretend_args_size = 0;
4118 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4120 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4121 struct args_size stack_offset;
4122 struct args_size arg_size;
4123 int passed_pointer = 0;
4124 int did_conversion = 0;
4125 tree passed_type = DECL_ARG_TYPE (parm);
4126 tree nominal_type = TREE_TYPE (parm);
4129 /* Set LAST_NAMED if this is last named arg before some
4131 int last_named = ((TREE_CHAIN (parm) == 0
4132 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4133 && (stdarg || current_function_varargs));
4134 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4135 most machines, if this is a varargs/stdarg function, then we treat
4136 the last named arg as if it were anonymous too. */
4137 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4139 if (TREE_TYPE (parm) == error_mark_node
4140 /* This can happen after weird syntax errors
4141 or if an enum type is defined among the parms. */
4142 || TREE_CODE (parm) != PARM_DECL
4143 || passed_type == NULL)
4145 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4146 = gen_rtx_MEM (BLKmode, const0_rtx);
4147 TREE_USED (parm) = 1;
4151 /* For varargs.h function, save info about regs and stack space
4152 used by the individual args, not including the va_alist arg. */
4153 if (hide_last_arg && last_named)
4154 current_function_args_info = args_so_far;
4156 /* Find mode of arg as it is passed, and mode of arg
4157 as it should be during execution of this function. */
4158 passed_mode = TYPE_MODE (passed_type);
4159 nominal_mode = TYPE_MODE (nominal_type);
4161 /* If the parm's mode is VOID, its value doesn't matter,
4162 and avoid the usual things like emit_move_insn that could crash. */
4163 if (nominal_mode == VOIDmode)
4165 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4169 /* If the parm is to be passed as a transparent union, use the
4170 type of the first field for the tests below. We have already
4171 verified that the modes are the same. */
4172 if (DECL_TRANSPARENT_UNION (parm)
4173 || TYPE_TRANSPARENT_UNION (passed_type))
4174 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4176 /* See if this arg was passed by invisible reference. It is if
4177 it is an object whose size depends on the contents of the
4178 object itself or if the machine requires these objects be passed
4181 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4182 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4183 || TREE_ADDRESSABLE (passed_type)
4184 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4185 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4186 passed_type, named_arg)
4190 passed_type = nominal_type = build_pointer_type (passed_type);
4192 passed_mode = nominal_mode = Pmode;
4195 promoted_mode = passed_mode;
4197 #ifdef PROMOTE_FUNCTION_ARGS
4198 /* Compute the mode in which the arg is actually extended to. */
4199 unsignedp = TREE_UNSIGNED (passed_type);
4200 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4203 /* Let machine desc say which reg (if any) the parm arrives in.
4204 0 means it arrives on the stack. */
4205 #ifdef FUNCTION_INCOMING_ARG
4206 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4207 passed_type, named_arg);
4209 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4210 passed_type, named_arg);
4213 if (entry_parm == 0)
4214 promoted_mode = passed_mode;
4216 #ifdef SETUP_INCOMING_VARARGS
4217 /* If this is the last named parameter, do any required setup for
4218 varargs or stdargs. We need to know about the case of this being an
4219 addressable type, in which case we skip the registers it
4220 would have arrived in.
4222 For stdargs, LAST_NAMED will be set for two parameters, the one that
4223 is actually the last named, and the dummy parameter. We only
4224 want to do this action once.
4226 Also, indicate when RTL generation is to be suppressed. */
4227 if (last_named && !varargs_setup)
4229 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4230 current_function_pretend_args_size, 0);
4235 /* Determine parm's home in the stack,
4236 in case it arrives in the stack or we should pretend it did.
4238 Compute the stack position and rtx where the argument arrives
4241 There is one complexity here: If this was a parameter that would
4242 have been passed in registers, but wasn't only because it is
4243 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4244 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4245 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4246 0 as it was the previous time. */
4248 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4249 locate_and_pad_parm (promoted_mode, passed_type,
4250 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4253 #ifdef FUNCTION_INCOMING_ARG
4254 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4256 pretend_named) != 0,
4258 FUNCTION_ARG (args_so_far, promoted_mode,
4260 pretend_named) != 0,
4263 fndecl, &stack_args_size, &stack_offset, &arg_size,
4267 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4269 if (offset_rtx == const0_rtx)
4270 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4272 stack_parm = gen_rtx_MEM (promoted_mode,
4273 gen_rtx_PLUS (Pmode,
4274 internal_arg_pointer,
4277 /* If this is a memory ref that contains aggregate components,
4278 mark it as such for cse and loop optimize. Likewise if it
4280 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4281 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4282 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4285 /* If this parameter was passed both in registers and in the stack,
4286 use the copy on the stack. */
4287 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4290 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4291 /* If this parm was passed part in regs and part in memory,
4292 pretend it arrived entirely in memory
4293 by pushing the register-part onto the stack.
4295 In the special case of a DImode or DFmode that is split,
4296 we could put it together in a pseudoreg directly,
4297 but for now that's not worth bothering with. */
4301 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4302 passed_type, named_arg);
4306 current_function_pretend_args_size
4307 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4308 / (PARM_BOUNDARY / BITS_PER_UNIT)
4309 * (PARM_BOUNDARY / BITS_PER_UNIT));
4311 /* Handle calls that pass values in multiple non-contiguous
4312 locations. The Irix 6 ABI has examples of this. */
4313 if (GET_CODE (entry_parm) == PARALLEL)
4314 emit_group_store (validize_mem (stack_parm), entry_parm,
4315 int_size_in_bytes (TREE_TYPE (parm)),
4316 (TYPE_ALIGN (TREE_TYPE (parm))
4319 move_block_from_reg (REGNO (entry_parm),
4320 validize_mem (stack_parm), nregs,
4321 int_size_in_bytes (TREE_TYPE (parm)));
4323 entry_parm = stack_parm;
4328 /* If we didn't decide this parm came in a register,
4329 by default it came on the stack. */
4330 if (entry_parm == 0)
4331 entry_parm = stack_parm;
4333 /* Record permanently how this parm was passed. */
4334 DECL_INCOMING_RTL (parm) = entry_parm;
4336 /* If there is actually space on the stack for this parm,
4337 count it in stack_args_size; otherwise set stack_parm to 0
4338 to indicate there is no preallocated stack slot for the parm. */
4340 if (entry_parm == stack_parm
4341 || (GET_CODE (entry_parm) == PARALLEL
4342 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4343 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4344 /* On some machines, even if a parm value arrives in a register
4345 there is still an (uninitialized) stack slot allocated for it.
4347 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4348 whether this parameter already has a stack slot allocated,
4349 because an arg block exists only if current_function_args_size
4350 is larger than some threshold, and we haven't calculated that
4351 yet. So, for now, we just assume that stack slots never exist
4353 || REG_PARM_STACK_SPACE (fndecl) > 0
4357 stack_args_size.constant += arg_size.constant;
4359 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4362 /* No stack slot was pushed for this parm. */
4365 /* Update info on where next arg arrives in registers. */
4367 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4368 passed_type, named_arg);
4370 /* If we can't trust the parm stack slot to be aligned enough
4371 for its ultimate type, don't use that slot after entry.
4372 We'll make another stack slot, if we need one. */
4374 int thisparm_boundary
4375 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4377 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4381 /* If parm was passed in memory, and we need to convert it on entry,
4382 don't store it back in that same slot. */
4384 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4388 /* Now adjust STACK_PARM to the mode and precise location
4389 where this parameter should live during execution,
4390 if we discover that it must live in the stack during execution.
4391 To make debuggers happier on big-endian machines, we store
4392 the value in the last bytes of the space available. */
4394 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4399 if (BYTES_BIG_ENDIAN
4400 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4401 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4402 - GET_MODE_SIZE (nominal_mode));
4404 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4405 if (offset_rtx == const0_rtx)
4406 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4408 stack_parm = gen_rtx_MEM (nominal_mode,
4409 gen_rtx_PLUS (Pmode,
4410 internal_arg_pointer,
4413 /* If this is a memory ref that contains aggregate components,
4414 mark it as such for cse and loop optimize. */
4415 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4419 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4420 in the mode in which it arrives.
4421 STACK_PARM is an RTX for a stack slot where the parameter can live
4422 during the function (in case we want to put it there).
4423 STACK_PARM is 0 if no stack slot was pushed for it.
4425 Now output code if necessary to convert ENTRY_PARM to
4426 the type in which this function declares it,
4427 and store that result in an appropriate place,
4428 which may be a pseudo reg, may be STACK_PARM,
4429 or may be a local stack slot if STACK_PARM is 0.
4431 Set DECL_RTL to that place. */
4433 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4435 /* If a BLKmode arrives in registers, copy it to a stack slot.
4436 Handle calls that pass values in multiple non-contiguous
4437 locations. The Irix 6 ABI has examples of this. */
4438 if (GET_CODE (entry_parm) == REG
4439 || GET_CODE (entry_parm) == PARALLEL)
4442 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4445 /* Note that we will be storing an integral number of words.
4446 So we have to be careful to ensure that we allocate an
4447 integral number of words. We do this below in the
4448 assign_stack_local if space was not allocated in the argument
4449 list. If it was, this will not work if PARM_BOUNDARY is not
4450 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4451 if it becomes a problem. */
4453 if (stack_parm == 0)
4456 = assign_stack_local (GET_MODE (entry_parm),
4459 /* If this is a memory ref that contains aggregate
4460 components, mark it as such for cse and loop optimize. */
4461 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4464 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4467 if (TREE_READONLY (parm))
4468 RTX_UNCHANGING_P (stack_parm) = 1;
4470 /* Handle calls that pass values in multiple non-contiguous
4471 locations. The Irix 6 ABI has examples of this. */
4472 if (GET_CODE (entry_parm) == PARALLEL)
4473 emit_group_store (validize_mem (stack_parm), entry_parm,
4474 int_size_in_bytes (TREE_TYPE (parm)),
4475 (TYPE_ALIGN (TREE_TYPE (parm))
4478 move_block_from_reg (REGNO (entry_parm),
4479 validize_mem (stack_parm),
4480 size_stored / UNITS_PER_WORD,
4481 int_size_in_bytes (TREE_TYPE (parm)));
4483 DECL_RTL (parm) = stack_parm;
4485 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4486 && ! DECL_INLINE (fndecl))
4487 /* layout_decl may set this. */
4488 || TREE_ADDRESSABLE (parm)
4489 || TREE_SIDE_EFFECTS (parm)
4490 /* If -ffloat-store specified, don't put explicit
4491 float variables into registers. */
4492 || (flag_float_store
4493 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4494 /* Always assign pseudo to structure return or item passed
4495 by invisible reference. */
4496 || passed_pointer || parm == function_result_decl)
4498 /* Store the parm in a pseudoregister during the function, but we
4499 may need to do it in a wider mode. */
4501 register rtx parmreg;
4502 int regno, regnoi = 0, regnor = 0;
4504 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4506 promoted_nominal_mode
4507 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4509 parmreg = gen_reg_rtx (promoted_nominal_mode);
4510 mark_user_reg (parmreg);
4512 /* If this was an item that we received a pointer to, set DECL_RTL
4517 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4518 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4521 DECL_RTL (parm) = parmreg;
4523 /* Copy the value into the register. */
4524 if (nominal_mode != passed_mode
4525 || promoted_nominal_mode != promoted_mode)
4528 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4529 mode, by the caller. We now have to convert it to
4530 NOMINAL_MODE, if different. However, PARMREG may be in
4531 a different mode than NOMINAL_MODE if it is being stored
4534 If ENTRY_PARM is a hard register, it might be in a register
4535 not valid for operating in its mode (e.g., an odd-numbered
4536 register for a DFmode). In that case, moves are the only
4537 thing valid, so we can't do a convert from there. This
4538 occurs when the calling sequence allow such misaligned
4541 In addition, the conversion may involve a call, which could
4542 clobber parameters which haven't been copied to pseudo
4543 registers yet. Therefore, we must first copy the parm to
4544 a pseudo reg here, and save the conversion until after all
4545 parameters have been moved. */
4547 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4549 emit_move_insn (tempreg, validize_mem (entry_parm));
4551 push_to_sequence (conversion_insns);
4552 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4554 /* TREE_USED gets set erroneously during expand_assignment. */
4555 save_tree_used = TREE_USED (parm);
4556 expand_assignment (parm,
4557 make_tree (nominal_type, tempreg), 0, 0);
4558 TREE_USED (parm) = save_tree_used;
4559 conversion_insns = get_insns ();
4564 emit_move_insn (parmreg, validize_mem (entry_parm));
4566 /* If we were passed a pointer but the actual value
4567 can safely live in a register, put it in one. */
4568 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4569 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4570 && ! DECL_INLINE (fndecl))
4571 /* layout_decl may set this. */
4572 || TREE_ADDRESSABLE (parm)
4573 || TREE_SIDE_EFFECTS (parm)
4574 /* If -ffloat-store specified, don't put explicit
4575 float variables into registers. */
4576 || (flag_float_store
4577 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4579 /* We can't use nominal_mode, because it will have been set to
4580 Pmode above. We must use the actual mode of the parm. */
4581 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4582 mark_user_reg (parmreg);
4583 emit_move_insn (parmreg, DECL_RTL (parm));
4584 DECL_RTL (parm) = parmreg;
4585 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4589 #ifdef FUNCTION_ARG_CALLEE_COPIES
4590 /* If we are passed an arg by reference and it is our responsibility
4591 to make a copy, do it now.
4592 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4593 original argument, so we must recreate them in the call to
4594 FUNCTION_ARG_CALLEE_COPIES. */
4595 /* ??? Later add code to handle the case that if the argument isn't
4596 modified, don't do the copy. */
4598 else if (passed_pointer
4599 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4600 TYPE_MODE (DECL_ARG_TYPE (parm)),
4601 DECL_ARG_TYPE (parm),
4603 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4606 tree type = DECL_ARG_TYPE (parm);
4608 /* This sequence may involve a library call perhaps clobbering
4609 registers that haven't been copied to pseudos yet. */
4611 push_to_sequence (conversion_insns);
4613 if (TYPE_SIZE (type) == 0
4614 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4615 /* This is a variable sized object. */
4616 copy = gen_rtx_MEM (BLKmode,
4617 allocate_dynamic_stack_space
4618 (expr_size (parm), NULL_RTX,
4619 TYPE_ALIGN (type)));
4621 copy = assign_stack_temp (TYPE_MODE (type),
4622 int_size_in_bytes (type), 1);
4623 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4624 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4626 store_expr (parm, copy, 0);
4627 emit_move_insn (parmreg, XEXP (copy, 0));
4628 if (current_function_check_memory_usage)
4629 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4630 XEXP (copy, 0), Pmode,
4631 GEN_INT (int_size_in_bytes (type)),
4632 TYPE_MODE (sizetype),
4633 GEN_INT (MEMORY_USE_RW),
4634 TYPE_MODE (integer_type_node));
4635 conversion_insns = get_insns ();
4639 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4641 /* In any case, record the parm's desired stack location
4642 in case we later discover it must live in the stack.
4644 If it is a COMPLEX value, store the stack location for both
4647 if (GET_CODE (parmreg) == CONCAT)
4648 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4650 regno = REGNO (parmreg);
4652 if (regno >= max_parm_reg)
4655 int old_max_parm_reg = max_parm_reg;
4657 /* It's slow to expand this one register at a time,
4658 but it's also rare and we need max_parm_reg to be
4659 precisely correct. */
4660 max_parm_reg = regno + 1;
4661 new = (rtx *) xrealloc (parm_reg_stack_loc,
4662 max_parm_reg * sizeof (rtx));
4663 bzero ((char *) (new + old_max_parm_reg),
4664 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4665 parm_reg_stack_loc = new;
4668 if (GET_CODE (parmreg) == CONCAT)
4670 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4672 regnor = REGNO (gen_realpart (submode, parmreg));
4673 regnoi = REGNO (gen_imagpart (submode, parmreg));
4675 if (stack_parm != 0)
4677 parm_reg_stack_loc[regnor]
4678 = gen_realpart (submode, stack_parm);
4679 parm_reg_stack_loc[regnoi]
4680 = gen_imagpart (submode, stack_parm);
4684 parm_reg_stack_loc[regnor] = 0;
4685 parm_reg_stack_loc[regnoi] = 0;
4689 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4691 /* Mark the register as eliminable if we did no conversion
4692 and it was copied from memory at a fixed offset,
4693 and the arg pointer was not copied to a pseudo-reg.
4694 If the arg pointer is a pseudo reg or the offset formed
4695 an invalid address, such memory-equivalences
4696 as we make here would screw up life analysis for it. */
4697 if (nominal_mode == passed_mode
4700 && GET_CODE (stack_parm) == MEM
4701 && stack_offset.var == 0
4702 && reg_mentioned_p (virtual_incoming_args_rtx,
4703 XEXP (stack_parm, 0)))
4705 rtx linsn = get_last_insn ();
4708 /* Mark complex types separately. */
4709 if (GET_CODE (parmreg) == CONCAT)
4710 /* Scan backwards for the set of the real and
4712 for (sinsn = linsn; sinsn != 0;
4713 sinsn = prev_nonnote_insn (sinsn))
4715 set = single_set (sinsn);
4717 && SET_DEST (set) == regno_reg_rtx [regnoi])
4719 = gen_rtx_EXPR_LIST (REG_EQUIV,
4720 parm_reg_stack_loc[regnoi],
4723 && SET_DEST (set) == regno_reg_rtx [regnor])
4725 = gen_rtx_EXPR_LIST (REG_EQUIV,
4726 parm_reg_stack_loc[regnor],
4729 else if ((set = single_set (linsn)) != 0
4730 && SET_DEST (set) == parmreg)
4732 = gen_rtx_EXPR_LIST (REG_EQUIV,
4733 stack_parm, REG_NOTES (linsn));
4736 /* For pointer data type, suggest pointer register. */
4737 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4738 mark_reg_pointer (parmreg,
4739 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4744 /* Value must be stored in the stack slot STACK_PARM
4745 during function execution. */
4747 if (promoted_mode != nominal_mode)
4749 /* Conversion is required. */
4750 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4752 emit_move_insn (tempreg, validize_mem (entry_parm));
4754 push_to_sequence (conversion_insns);
4755 entry_parm = convert_to_mode (nominal_mode, tempreg,
4756 TREE_UNSIGNED (TREE_TYPE (parm)));
4759 /* ??? This may need a big-endian conversion on sparc64. */
4760 stack_parm = change_address (stack_parm, nominal_mode,
4763 conversion_insns = get_insns ();
4768 if (entry_parm != stack_parm)
4770 if (stack_parm == 0)
4773 = assign_stack_local (GET_MODE (entry_parm),
4774 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4775 /* If this is a memory ref that contains aggregate components,
4776 mark it as such for cse and loop optimize. */
4777 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4780 if (promoted_mode != nominal_mode)
4782 push_to_sequence (conversion_insns);
4783 emit_move_insn (validize_mem (stack_parm),
4784 validize_mem (entry_parm));
4785 conversion_insns = get_insns ();
4789 emit_move_insn (validize_mem (stack_parm),
4790 validize_mem (entry_parm));
4792 if (current_function_check_memory_usage)
4794 push_to_sequence (conversion_insns);
4795 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4796 XEXP (stack_parm, 0), Pmode,
4797 GEN_INT (GET_MODE_SIZE (GET_MODE
4799 TYPE_MODE (sizetype),
4800 GEN_INT (MEMORY_USE_RW),
4801 TYPE_MODE (integer_type_node));
4803 conversion_insns = get_insns ();
4806 DECL_RTL (parm) = stack_parm;
4809 /* If this "parameter" was the place where we are receiving the
4810 function's incoming structure pointer, set up the result. */
4811 if (parm == function_result_decl)
4813 tree result = DECL_RESULT (fndecl);
4814 tree restype = TREE_TYPE (result);
4817 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4819 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4820 AGGREGATE_TYPE_P (restype));
4823 if (TREE_THIS_VOLATILE (parm))
4824 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4825 if (TREE_READONLY (parm))
4826 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4829 /* Output all parameter conversion instructions (possibly including calls)
4830 now that all parameters have been copied out of hard registers. */
4831 emit_insns (conversion_insns);
4833 last_parm_insn = get_last_insn ();
4835 current_function_args_size = stack_args_size.constant;
4837 /* Adjust function incoming argument size for alignment and
4840 #ifdef REG_PARM_STACK_SPACE
4841 #ifndef MAYBE_REG_PARM_STACK_SPACE
4842 current_function_args_size = MAX (current_function_args_size,
4843 REG_PARM_STACK_SPACE (fndecl));
4847 #ifdef STACK_BOUNDARY
4848 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4850 current_function_args_size
4851 = ((current_function_args_size + STACK_BYTES - 1)
4852 / STACK_BYTES) * STACK_BYTES;
4855 #ifdef ARGS_GROW_DOWNWARD
4856 current_function_arg_offset_rtx
4857 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4858 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4859 size_int (-stack_args_size.constant)),
4860 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4862 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4865 /* See how many bytes, if any, of its args a function should try to pop
4868 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4869 current_function_args_size);
4871 /* For stdarg.h function, save info about
4872 regs and stack space used by the named args. */
4875 current_function_args_info = args_so_far;
4877 /* Set the rtx used for the function return value. Put this in its
4878 own variable so any optimizers that need this information don't have
4879 to include tree.h. Do this here so it gets done when an inlined
4880 function gets output. */
4882 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4885 /* Indicate whether REGNO is an incoming argument to the current function
4886 that was promoted to a wider mode. If so, return the RTX for the
4887 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4888 that REGNO is promoted from and whether the promotion was signed or
4891 #ifdef PROMOTE_FUNCTION_ARGS
4894 promoted_input_arg (regno, pmode, punsignedp)
4896 enum machine_mode *pmode;
4901 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4902 arg = TREE_CHAIN (arg))
4903 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4904 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4905 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4907 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4908 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4910 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4911 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4912 && mode != DECL_MODE (arg))
4914 *pmode = DECL_MODE (arg);
4915 *punsignedp = unsignedp;
4916 return DECL_INCOMING_RTL (arg);
4925 /* Compute the size and offset from the start of the stacked arguments for a
4926 parm passed in mode PASSED_MODE and with type TYPE.
4928 INITIAL_OFFSET_PTR points to the current offset into the stacked
4931 The starting offset and size for this parm are returned in *OFFSET_PTR
4932 and *ARG_SIZE_PTR, respectively.
4934 IN_REGS is non-zero if the argument will be passed in registers. It will
4935 never be set if REG_PARM_STACK_SPACE is not defined.
4937 FNDECL is the function in which the argument was defined.
4939 There are two types of rounding that are done. The first, controlled by
4940 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4941 list to be aligned to the specific boundary (in bits). This rounding
4942 affects the initial and starting offsets, but not the argument size.
4944 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4945 optionally rounds the size of the parm to PARM_BOUNDARY. The
4946 initial offset is not affected by this rounding, while the size always
4947 is and the starting offset may be. */
4949 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4950 initial_offset_ptr is positive because locate_and_pad_parm's
4951 callers pass in the total size of args so far as
4952 initial_offset_ptr. arg_size_ptr is always positive.*/
4955 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4956 initial_offset_ptr, offset_ptr, arg_size_ptr,
4958 enum machine_mode passed_mode;
4961 tree fndecl ATTRIBUTE_UNUSED;
4962 struct args_size *initial_offset_ptr;
4963 struct args_size *offset_ptr;
4964 struct args_size *arg_size_ptr;
4965 struct args_size *alignment_pad;
4969 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4970 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4971 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4973 #ifdef REG_PARM_STACK_SPACE
4974 /* If we have found a stack parm before we reach the end of the
4975 area reserved for registers, skip that area. */
4978 int reg_parm_stack_space = 0;
4980 #ifdef MAYBE_REG_PARM_STACK_SPACE
4981 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4983 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4985 if (reg_parm_stack_space > 0)
4987 if (initial_offset_ptr->var)
4989 initial_offset_ptr->var
4990 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4991 size_int (reg_parm_stack_space));
4992 initial_offset_ptr->constant = 0;
4994 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4995 initial_offset_ptr->constant = reg_parm_stack_space;
4998 #endif /* REG_PARM_STACK_SPACE */
5000 arg_size_ptr->var = 0;
5001 arg_size_ptr->constant = 0;
5003 #ifdef ARGS_GROW_DOWNWARD
5004 if (initial_offset_ptr->var)
5006 offset_ptr->constant = 0;
5007 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5008 initial_offset_ptr->var);
5012 offset_ptr->constant = - initial_offset_ptr->constant;
5013 offset_ptr->var = 0;
5015 if (where_pad != none
5016 && (TREE_CODE (sizetree) != INTEGER_CST
5017 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5018 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5019 SUB_PARM_SIZE (*offset_ptr, sizetree);
5020 if (where_pad != downward)
5021 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5022 if (initial_offset_ptr->var)
5024 arg_size_ptr->var = size_binop (MINUS_EXPR,
5025 size_binop (MINUS_EXPR,
5027 initial_offset_ptr->var),
5032 arg_size_ptr->constant = (- initial_offset_ptr->constant
5033 - offset_ptr->constant);
5035 #else /* !ARGS_GROW_DOWNWARD */
5036 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5037 *offset_ptr = *initial_offset_ptr;
5039 #ifdef PUSH_ROUNDING
5040 if (passed_mode != BLKmode)
5041 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5044 /* Pad_below needs the pre-rounded size to know how much to pad below
5045 so this must be done before rounding up. */
5046 if (where_pad == downward
5047 /* However, BLKmode args passed in regs have their padding done elsewhere.
5048 The stack slot must be able to hold the entire register. */
5049 && !(in_regs && passed_mode == BLKmode))
5050 pad_below (offset_ptr, passed_mode, sizetree);
5052 if (where_pad != none
5053 && (TREE_CODE (sizetree) != INTEGER_CST
5054 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5055 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5057 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5058 #endif /* ARGS_GROW_DOWNWARD */
5061 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5062 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5065 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5066 struct args_size *offset_ptr;
5068 struct args_size *alignment_pad;
5071 HOST_WIDE_INT save_constant;
5073 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5075 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5077 save_var = offset_ptr->var;
5078 save_constant = offset_ptr->constant;
5081 alignment_pad->var = NULL_TREE;
5082 alignment_pad->constant = 0;
5084 if (boundary > BITS_PER_UNIT)
5086 if (offset_ptr->var)
5089 #ifdef ARGS_GROW_DOWNWARD
5094 (ARGS_SIZE_TREE (*offset_ptr),
5095 boundary / BITS_PER_UNIT);
5096 offset_ptr->constant = 0; /*?*/
5097 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5098 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5102 offset_ptr->constant =
5103 #ifdef ARGS_GROW_DOWNWARD
5104 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5106 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5108 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5109 alignment_pad->constant = offset_ptr->constant - save_constant;
5114 #ifndef ARGS_GROW_DOWNWARD
5116 pad_below (offset_ptr, passed_mode, sizetree)
5117 struct args_size *offset_ptr;
5118 enum machine_mode passed_mode;
5121 if (passed_mode != BLKmode)
5123 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5124 offset_ptr->constant
5125 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5126 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5127 - GET_MODE_SIZE (passed_mode));
5131 if (TREE_CODE (sizetree) != INTEGER_CST
5132 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5134 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5135 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5137 ADD_PARM_SIZE (*offset_ptr, s2);
5138 SUB_PARM_SIZE (*offset_ptr, sizetree);
5144 #ifdef ARGS_GROW_DOWNWARD
5146 round_down (value, divisor)
5150 return size_binop (MULT_EXPR,
5151 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5152 size_int (divisor));
5156 /* Walk the tree of blocks describing the binding levels within a function
5157 and warn about uninitialized variables.
5158 This is done after calling flow_analysis and before global_alloc
5159 clobbers the pseudo-regs to hard regs. */
5162 uninitialized_vars_warning (block)
5165 register tree decl, sub;
5166 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5168 if (warn_uninitialized
5169 && TREE_CODE (decl) == VAR_DECL
5170 /* These warnings are unreliable for and aggregates
5171 because assigning the fields one by one can fail to convince
5172 flow.c that the entire aggregate was initialized.
5173 Unions are troublesome because members may be shorter. */
5174 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5175 && DECL_RTL (decl) != 0
5176 && GET_CODE (DECL_RTL (decl)) == REG
5177 /* Global optimizations can make it difficult to determine if a
5178 particular variable has been initialized. However, a VAR_DECL
5179 with a nonzero DECL_INITIAL had an initializer, so do not
5180 claim it is potentially uninitialized.
5182 We do not care about the actual value in DECL_INITIAL, so we do
5183 not worry that it may be a dangling pointer. */
5184 && DECL_INITIAL (decl) == NULL_TREE
5185 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5186 warning_with_decl (decl,
5187 "`%s' might be used uninitialized in this function");
5189 && TREE_CODE (decl) == VAR_DECL
5190 && DECL_RTL (decl) != 0
5191 && GET_CODE (DECL_RTL (decl)) == REG
5192 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5193 warning_with_decl (decl,
5194 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5196 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5197 uninitialized_vars_warning (sub);
5200 /* Do the appropriate part of uninitialized_vars_warning
5201 but for arguments instead of local variables. */
5204 setjmp_args_warning ()
5207 for (decl = DECL_ARGUMENTS (current_function_decl);
5208 decl; decl = TREE_CHAIN (decl))
5209 if (DECL_RTL (decl) != 0
5210 && GET_CODE (DECL_RTL (decl)) == REG
5211 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5212 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5215 /* If this function call setjmp, put all vars into the stack
5216 unless they were declared `register'. */
5219 setjmp_protect (block)
5222 register tree decl, sub;
5223 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5224 if ((TREE_CODE (decl) == VAR_DECL
5225 || TREE_CODE (decl) == PARM_DECL)
5226 && DECL_RTL (decl) != 0
5227 && (GET_CODE (DECL_RTL (decl)) == REG
5228 || (GET_CODE (DECL_RTL (decl)) == MEM
5229 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5230 /* If this variable came from an inline function, it must be
5231 that its life doesn't overlap the setjmp. If there was a
5232 setjmp in the function, it would already be in memory. We
5233 must exclude such variable because their DECL_RTL might be
5234 set to strange things such as virtual_stack_vars_rtx. */
5235 && ! DECL_FROM_INLINE (decl)
5237 #ifdef NON_SAVING_SETJMP
5238 /* If longjmp doesn't restore the registers,
5239 don't put anything in them. */
5243 ! DECL_REGISTER (decl)))
5244 put_var_into_stack (decl);
5245 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5246 setjmp_protect (sub);
5249 /* Like the previous function, but for args instead of local variables. */
5252 setjmp_protect_args ()
5255 for (decl = DECL_ARGUMENTS (current_function_decl);
5256 decl; decl = TREE_CHAIN (decl))
5257 if ((TREE_CODE (decl) == VAR_DECL
5258 || TREE_CODE (decl) == PARM_DECL)
5259 && DECL_RTL (decl) != 0
5260 && (GET_CODE (DECL_RTL (decl)) == REG
5261 || (GET_CODE (DECL_RTL (decl)) == MEM
5262 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5264 /* If longjmp doesn't restore the registers,
5265 don't put anything in them. */
5266 #ifdef NON_SAVING_SETJMP
5270 ! DECL_REGISTER (decl)))
5271 put_var_into_stack (decl);
5274 /* Return the context-pointer register corresponding to DECL,
5275 or 0 if it does not need one. */
5278 lookup_static_chain (decl)
5281 tree context = decl_function_context (decl);
5285 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5288 /* We treat inline_function_decl as an alias for the current function
5289 because that is the inline function whose vars, types, etc.
5290 are being merged into the current function.
5291 See expand_inline_function. */
5292 if (context == current_function_decl || context == inline_function_decl)
5293 return virtual_stack_vars_rtx;
5295 for (link = context_display; link; link = TREE_CHAIN (link))
5296 if (TREE_PURPOSE (link) == context)
5297 return RTL_EXPR_RTL (TREE_VALUE (link));
5302 /* Convert a stack slot address ADDR for variable VAR
5303 (from a containing function)
5304 into an address valid in this function (using a static chain). */
5307 fix_lexical_addr (addr, var)
5312 HOST_WIDE_INT displacement;
5313 tree context = decl_function_context (var);
5314 struct function *fp;
5317 /* If this is the present function, we need not do anything. */
5318 if (context == current_function_decl || context == inline_function_decl)
5321 for (fp = outer_function_chain; fp; fp = fp->next)
5322 if (fp->decl == context)
5328 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5329 addr = XEXP (XEXP (addr, 0), 0);
5331 /* Decode given address as base reg plus displacement. */
5332 if (GET_CODE (addr) == REG)
5333 basereg = addr, displacement = 0;
5334 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5335 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5339 /* We accept vars reached via the containing function's
5340 incoming arg pointer and via its stack variables pointer. */
5341 if (basereg == fp->internal_arg_pointer)
5343 /* If reached via arg pointer, get the arg pointer value
5344 out of that function's stack frame.
5346 There are two cases: If a separate ap is needed, allocate a
5347 slot in the outer function for it and dereference it that way.
5348 This is correct even if the real ap is actually a pseudo.
5349 Otherwise, just adjust the offset from the frame pointer to
5352 #ifdef NEED_SEPARATE_AP
5355 if (fp->x_arg_pointer_save_area == 0)
5356 fp->x_arg_pointer_save_area
5357 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5359 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5360 addr = memory_address (Pmode, addr);
5362 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5364 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5365 base = lookup_static_chain (var);
5369 else if (basereg == virtual_stack_vars_rtx)
5371 /* This is the same code as lookup_static_chain, duplicated here to
5372 avoid an extra call to decl_function_context. */
5375 for (link = context_display; link; link = TREE_CHAIN (link))
5376 if (TREE_PURPOSE (link) == context)
5378 base = RTL_EXPR_RTL (TREE_VALUE (link));
5386 /* Use same offset, relative to appropriate static chain or argument
5388 return plus_constant (base, displacement);
5391 /* Return the address of the trampoline for entering nested fn FUNCTION.
5392 If necessary, allocate a trampoline (in the stack frame)
5393 and emit rtl to initialize its contents (at entry to this function). */
5396 trampoline_address (function)
5402 struct function *fp;
5405 /* Find an existing trampoline and return it. */
5406 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5407 if (TREE_PURPOSE (link) == function)
5409 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5411 for (fp = outer_function_chain; fp; fp = fp->next)
5412 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5413 if (TREE_PURPOSE (link) == function)
5415 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5417 return round_trampoline_addr (tramp);
5420 /* None exists; we must make one. */
5422 /* Find the `struct function' for the function containing FUNCTION. */
5424 fn_context = decl_function_context (function);
5425 if (fn_context != current_function_decl
5426 && fn_context != inline_function_decl)
5427 for (fp = outer_function_chain; fp; fp = fp->next)
5428 if (fp->decl == fn_context)
5431 /* Allocate run-time space for this trampoline
5432 (usually in the defining function's stack frame). */
5433 #ifdef ALLOCATE_TRAMPOLINE
5434 tramp = ALLOCATE_TRAMPOLINE (fp);
5436 /* If rounding needed, allocate extra space
5437 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5438 #ifdef TRAMPOLINE_ALIGNMENT
5439 #define TRAMPOLINE_REAL_SIZE \
5440 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5442 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5444 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5448 /* Record the trampoline for reuse and note it for later initialization
5449 by expand_function_end. */
5452 push_obstacks (fp->function_maybepermanent_obstack,
5453 fp->function_maybepermanent_obstack);
5454 rtlexp = make_node (RTL_EXPR);
5455 RTL_EXPR_RTL (rtlexp) = tramp;
5456 fp->x_trampoline_list = tree_cons (function, rtlexp,
5457 fp->x_trampoline_list);
5462 /* Make the RTL_EXPR node temporary, not momentary, so that the
5463 trampoline_list doesn't become garbage. */
5464 int momentary = suspend_momentary ();
5465 rtlexp = make_node (RTL_EXPR);
5466 resume_momentary (momentary);
5468 RTL_EXPR_RTL (rtlexp) = tramp;
5469 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5472 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5473 return round_trampoline_addr (tramp);
5476 /* Given a trampoline address,
5477 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5480 round_trampoline_addr (tramp)
5483 #ifdef TRAMPOLINE_ALIGNMENT
5484 /* Round address up to desired boundary. */
5485 rtx temp = gen_reg_rtx (Pmode);
5486 temp = expand_binop (Pmode, add_optab, tramp,
5487 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5488 temp, 0, OPTAB_LIB_WIDEN);
5489 tramp = expand_binop (Pmode, and_optab, temp,
5490 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5491 temp, 0, OPTAB_LIB_WIDEN);
5496 /* The functions identify_blocks and reorder_blocks provide a way to
5497 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5498 duplicate portions of the RTL code. Call identify_blocks before
5499 changing the RTL, and call reorder_blocks after. */
5501 /* Put all this function's BLOCK nodes including those that are chained
5502 onto the first block into a vector, and return it.
5503 Also store in each NOTE for the beginning or end of a block
5504 the index of that block in the vector.
5505 The arguments are BLOCK, the chain of top-level blocks of the function,
5506 and INSNS, the insn chain of the function. */
5509 identify_blocks (block, insns)
5517 int current_block_number = 1;
5523 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5524 depth-first order. */
5525 n_blocks = all_blocks (block, 0);
5526 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5527 all_blocks (block, block_vector);
5529 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5531 for (insn = insns; insn; insn = NEXT_INSN (insn))
5532 if (GET_CODE (insn) == NOTE)
5534 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5538 /* If there are more block notes than BLOCKs, something
5540 if (current_block_number == n_blocks)
5543 b = block_vector[current_block_number++];
5544 NOTE_BLOCK (insn) = b;
5545 block_stack[depth++] = b;
5547 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5550 /* There are more NOTE_INSN_BLOCK_ENDs that
5551 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5554 NOTE_BLOCK (insn) = block_stack[--depth];
5558 /* In whole-function mode, we might not have seen the whole function
5559 yet, so we might not use up all the blocks. */
5560 if (n_blocks != current_block_number
5561 && !cfun->x_whole_function_mode_p)
5564 free (block_vector);
5568 /* Given a revised instruction chain, rebuild the tree structure of
5569 BLOCK nodes to correspond to the new order of RTL. The new block
5570 tree is inserted below TOP_BLOCK. Returns the current top-level
5574 reorder_blocks (block, insns)
5578 tree current_block = block;
5581 if (block == NULL_TREE)
5584 /* Prune the old trees away, so that it doesn't get in the way. */
5585 BLOCK_SUBBLOCKS (current_block) = 0;
5586 BLOCK_CHAIN (current_block) = 0;
5588 for (insn = insns; insn; insn = NEXT_INSN (insn))
5589 if (GET_CODE (insn) == NOTE)
5591 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5593 tree block = NOTE_BLOCK (insn);
5594 /* If we have seen this block before, copy it. */
5595 if (TREE_ASM_WRITTEN (block))
5596 block = copy_node (block);
5597 BLOCK_SUBBLOCKS (block) = 0;
5598 TREE_ASM_WRITTEN (block) = 1;
5599 BLOCK_SUPERCONTEXT (block) = current_block;
5600 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5601 BLOCK_SUBBLOCKS (current_block) = block;
5602 current_block = block;
5604 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5606 BLOCK_SUBBLOCKS (current_block)
5607 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5608 current_block = BLOCK_SUPERCONTEXT (current_block);
5612 BLOCK_SUBBLOCKS (current_block)
5613 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5614 return current_block;
5617 /* Reverse the order of elements in the chain T of blocks,
5618 and return the new head of the chain (old last element). */
5624 register tree prev = 0, decl, next;
5625 for (decl = t; decl; decl = next)
5627 next = BLOCK_CHAIN (decl);
5628 BLOCK_CHAIN (decl) = prev;
5634 /* Count the subblocks of the list starting with BLOCK, and list them
5635 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5639 all_blocks (block, vector)
5647 TREE_ASM_WRITTEN (block) = 0;
5649 /* Record this block. */
5651 vector[n_blocks] = block;
5655 /* Record the subblocks, and their subblocks... */
5656 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5657 vector ? vector + n_blocks : 0);
5658 block = BLOCK_CHAIN (block);
5664 /* Allocate a function structure and reset its contents to the defaults. */
5666 prepare_function_start ()
5668 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5670 init_stmt_for_function ();
5671 init_eh_for_function ();
5673 cse_not_expected = ! optimize;
5675 /* Caller save not needed yet. */
5676 caller_save_needed = 0;
5678 /* No stack slots have been made yet. */
5679 stack_slot_list = 0;
5681 current_function_has_nonlocal_label = 0;
5682 current_function_has_nonlocal_goto = 0;
5684 /* There is no stack slot for handling nonlocal gotos. */
5685 nonlocal_goto_handler_slots = 0;
5686 nonlocal_goto_stack_level = 0;
5688 /* No labels have been declared for nonlocal use. */
5689 nonlocal_labels = 0;
5690 nonlocal_goto_handler_labels = 0;
5692 /* No function calls so far in this function. */
5693 function_call_count = 0;
5695 /* No parm regs have been allocated.
5696 (This is important for output_inline_function.) */
5697 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5699 /* Initialize the RTL mechanism. */
5702 /* Initialize the queue of pending postincrement and postdecrements,
5703 and some other info in expr.c. */
5706 /* We haven't done register allocation yet. */
5709 init_varasm_status (cfun);
5711 /* Clear out data used for inlining. */
5712 cfun->inlinable = 0;
5713 cfun->original_decl_initial = 0;
5714 cfun->original_arg_vector = 0;
5716 cfun->stack_alignment_needed = 0;
5718 /* Set if a call to setjmp is seen. */
5719 current_function_calls_setjmp = 0;
5721 /* Set if a call to longjmp is seen. */
5722 current_function_calls_longjmp = 0;
5724 current_function_calls_alloca = 0;
5725 current_function_contains_functions = 0;
5726 current_function_is_leaf = 0;
5727 current_function_sp_is_unchanging = 0;
5728 current_function_uses_only_leaf_regs = 0;
5729 current_function_has_computed_jump = 0;
5730 current_function_is_thunk = 0;
5732 current_function_returns_pcc_struct = 0;
5733 current_function_returns_struct = 0;
5734 current_function_epilogue_delay_list = 0;
5735 current_function_uses_const_pool = 0;
5736 current_function_uses_pic_offset_table = 0;
5737 current_function_cannot_inline = 0;
5739 /* We have not yet needed to make a label to jump to for tail-recursion. */
5740 tail_recursion_label = 0;
5742 /* We haven't had a need to make a save area for ap yet. */
5743 arg_pointer_save_area = 0;
5745 /* No stack slots allocated yet. */
5748 /* No SAVE_EXPRs in this function yet. */
5751 /* No RTL_EXPRs in this function yet. */
5754 /* Set up to allocate temporaries. */
5757 /* Indicate that we need to distinguish between the return value of the
5758 present function and the return value of a function being called. */
5759 rtx_equal_function_value_matters = 1;
5761 /* Indicate that we have not instantiated virtual registers yet. */
5762 virtuals_instantiated = 0;
5764 /* Indicate we have no need of a frame pointer yet. */
5765 frame_pointer_needed = 0;
5767 /* By default assume not varargs or stdarg. */
5768 current_function_varargs = 0;
5769 current_function_stdarg = 0;
5771 /* We haven't made any trampolines for this function yet. */
5772 trampoline_list = 0;
5774 init_pending_stack_adjust ();
5775 inhibit_defer_pop = 0;
5777 current_function_outgoing_args_size = 0;
5779 if (init_lang_status)
5780 (*init_lang_status) (cfun);
5781 if (init_machine_status)
5782 (*init_machine_status) (cfun);
5785 /* Initialize the rtl expansion mechanism so that we can do simple things
5786 like generate sequences. This is used to provide a context during global
5787 initialization of some passes. */
5789 init_dummy_function_start ()
5791 prepare_function_start ();
5794 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5795 and initialize static variables for generating RTL for the statements
5799 init_function_start (subr, filename, line)
5804 prepare_function_start ();
5806 /* Remember this function for later. */
5807 cfun->next_global = all_functions;
5808 all_functions = cfun;
5810 current_function_name = (*decl_printable_name) (subr, 2);
5813 /* Nonzero if this is a nested function that uses a static chain. */
5815 current_function_needs_context
5816 = (decl_function_context (current_function_decl) != 0
5817 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5819 /* Within function body, compute a type's size as soon it is laid out. */
5820 immediate_size_expand++;
5822 /* Prevent ever trying to delete the first instruction of a function.
5823 Also tell final how to output a linenum before the function prologue.
5824 Note linenums could be missing, e.g. when compiling a Java .class file. */
5826 emit_line_note (filename, line);
5828 /* Make sure first insn is a note even if we don't want linenums.
5829 This makes sure the first insn will never be deleted.
5830 Also, final expects a note to appear there. */
5831 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5833 /* Set flags used by final.c. */
5834 if (aggregate_value_p (DECL_RESULT (subr)))
5836 #ifdef PCC_STATIC_STRUCT_RETURN
5837 current_function_returns_pcc_struct = 1;
5839 current_function_returns_struct = 1;
5842 /* Warn if this value is an aggregate type,
5843 regardless of which calling convention we are using for it. */
5844 if (warn_aggregate_return
5845 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5846 warning ("function returns an aggregate");
5848 current_function_returns_pointer
5849 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5852 /* Make sure all values used by the optimization passes have sane
5855 init_function_for_compilation ()
5858 /* No prologue/epilogue insns yet. */
5859 prologue = epilogue = 0;
5862 /* Indicate that the current function uses extra args
5863 not explicitly mentioned in the argument list in any fashion. */
5868 current_function_varargs = 1;
5871 /* Expand a call to __main at the beginning of a possible main function. */
5873 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5874 #undef HAS_INIT_SECTION
5875 #define HAS_INIT_SECTION
5879 expand_main_function ()
5881 #if !defined (HAS_INIT_SECTION)
5882 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5884 #endif /* not HAS_INIT_SECTION */
5887 extern struct obstack permanent_obstack;
5889 /* Start the RTL for a new function, and set variables used for
5891 SUBR is the FUNCTION_DECL node.
5892 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5893 the function's parameters, which must be run at any return statement. */
5896 expand_function_start (subr, parms_have_cleanups)
5898 int parms_have_cleanups;
5902 rtx last_ptr = NULL_RTX;
5904 /* Make sure volatile mem refs aren't considered
5905 valid operands of arithmetic insns. */
5906 init_recog_no_volatile ();
5908 /* Set this before generating any memory accesses. */
5909 current_function_check_memory_usage
5910 = (flag_check_memory_usage
5911 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5913 current_function_instrument_entry_exit
5914 = (flag_instrument_function_entry_exit
5915 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5917 current_function_limit_stack
5918 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5920 /* If function gets a static chain arg, store it in the stack frame.
5921 Do this first, so it gets the first stack slot offset. */
5922 if (current_function_needs_context)
5924 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5926 /* Delay copying static chain if it is not a register to avoid
5927 conflicts with regs used for parameters. */
5928 if (! SMALL_REGISTER_CLASSES
5929 || GET_CODE (static_chain_incoming_rtx) == REG)
5930 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5933 /* If the parameters of this function need cleaning up, get a label
5934 for the beginning of the code which executes those cleanups. This must
5935 be done before doing anything with return_label. */
5936 if (parms_have_cleanups)
5937 cleanup_label = gen_label_rtx ();
5941 /* Make the label for return statements to jump to, if this machine
5942 does not have a one-instruction return and uses an epilogue,
5943 or if it returns a structure, or if it has parm cleanups. */
5945 if (cleanup_label == 0 && HAVE_return
5946 && ! current_function_instrument_entry_exit
5947 && ! current_function_returns_pcc_struct
5948 && ! (current_function_returns_struct && ! optimize))
5951 return_label = gen_label_rtx ();
5953 return_label = gen_label_rtx ();
5956 /* Initialize rtx used to return the value. */
5957 /* Do this before assign_parms so that we copy the struct value address
5958 before any library calls that assign parms might generate. */
5960 /* Decide whether to return the value in memory or in a register. */
5961 if (aggregate_value_p (DECL_RESULT (subr)))
5963 /* Returning something that won't go in a register. */
5964 register rtx value_address = 0;
5966 #ifdef PCC_STATIC_STRUCT_RETURN
5967 if (current_function_returns_pcc_struct)
5969 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5970 value_address = assemble_static_space (size);
5975 /* Expect to be passed the address of a place to store the value.
5976 If it is passed as an argument, assign_parms will take care of
5978 if (struct_value_incoming_rtx)
5980 value_address = gen_reg_rtx (Pmode);
5981 emit_move_insn (value_address, struct_value_incoming_rtx);
5986 DECL_RTL (DECL_RESULT (subr))
5987 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5988 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5989 AGGREGATE_TYPE_P (TREE_TYPE
5994 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5995 /* If return mode is void, this decl rtl should not be used. */
5996 DECL_RTL (DECL_RESULT (subr)) = 0;
5997 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5999 /* If function will end with cleanup code for parms,
6000 compute the return values into a pseudo reg,
6001 which we will copy into the true return register
6002 after the cleanups are done. */
6004 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6006 #ifdef PROMOTE_FUNCTION_RETURN
6007 tree type = TREE_TYPE (DECL_RESULT (subr));
6008 int unsignedp = TREE_UNSIGNED (type);
6010 mode = promote_mode (type, mode, &unsignedp, 1);
6013 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6016 /* Scalar, returned in a register. */
6018 #ifdef FUNCTION_OUTGOING_VALUE
6019 DECL_RTL (DECL_RESULT (subr))
6020 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6022 DECL_RTL (DECL_RESULT (subr))
6023 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6026 /* Mark this reg as the function's return value. */
6027 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6029 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6030 /* Needed because we may need to move this to memory
6031 in case it's a named return value whose address is taken. */
6032 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6036 /* Initialize rtx for parameters and local variables.
6037 In some cases this requires emitting insns. */
6039 assign_parms (subr);
6041 /* Copy the static chain now if it wasn't a register. The delay is to
6042 avoid conflicts with the parameter passing registers. */
6044 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6045 if (GET_CODE (static_chain_incoming_rtx) != REG)
6046 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6048 /* The following was moved from init_function_start.
6049 The move is supposed to make sdb output more accurate. */
6050 /* Indicate the beginning of the function body,
6051 as opposed to parm setup. */
6052 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6054 /* If doing stupid allocation, mark parms as born here. */
6056 if (GET_CODE (get_last_insn ()) != NOTE)
6057 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6058 parm_birth_insn = get_last_insn ();
6062 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6063 use_variable (regno_reg_rtx[i]);
6065 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6066 use_variable (current_function_internal_arg_pointer);
6069 context_display = 0;
6070 if (current_function_needs_context)
6072 /* Fetch static chain values for containing functions. */
6073 tem = decl_function_context (current_function_decl);
6074 /* If not doing stupid register allocation copy the static chain
6075 pointer into a pseudo. If we have small register classes, copy
6076 the value from memory if static_chain_incoming_rtx is a REG. If
6077 we do stupid register allocation, we use the stack address
6079 if (tem && ! obey_regdecls)
6081 /* If the static chain originally came in a register, put it back
6082 there, then move it out in the next insn. The reason for
6083 this peculiar code is to satisfy function integration. */
6084 if (SMALL_REGISTER_CLASSES
6085 && GET_CODE (static_chain_incoming_rtx) == REG)
6086 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6087 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6092 tree rtlexp = make_node (RTL_EXPR);
6094 RTL_EXPR_RTL (rtlexp) = last_ptr;
6095 context_display = tree_cons (tem, rtlexp, context_display);
6096 tem = decl_function_context (tem);
6099 /* Chain thru stack frames, assuming pointer to next lexical frame
6100 is found at the place we always store it. */
6101 #ifdef FRAME_GROWS_DOWNWARD
6102 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6104 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6105 memory_address (Pmode,
6108 /* If we are not optimizing, ensure that we know that this
6109 piece of context is live over the entire function. */
6111 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6116 if (current_function_instrument_entry_exit)
6118 rtx fun = DECL_RTL (current_function_decl);
6119 if (GET_CODE (fun) == MEM)
6120 fun = XEXP (fun, 0);
6123 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6125 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6127 hard_frame_pointer_rtx),
6131 /* After the display initializations is where the tail-recursion label
6132 should go, if we end up needing one. Ensure we have a NOTE here
6133 since some things (like trampolines) get placed before this. */
6134 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6136 /* Evaluate now the sizes of any types declared among the arguments. */
6137 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6139 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6140 EXPAND_MEMORY_USE_BAD);
6141 /* Flush the queue in case this parameter declaration has
6146 /* Make sure there is a line number after the function entry setup code. */
6147 force_next_line_note ();
6150 /* Undo the effects of init_dummy_function_start. */
6152 expand_dummy_function_end ()
6154 /* End any sequences that failed to be closed due to syntax errors. */
6155 while (in_sequence_p ())
6158 /* Outside function body, can't compute type's actual size
6159 until next function's body starts. */
6161 free_after_parsing (cfun);
6162 free_after_compilation (cfun);
6167 /* Emit CODE for each register of the return value. Useful values for
6168 code are USE and CLOBBER. */
6171 diddle_return_value (code)
6174 tree decl_result = DECL_RESULT (current_function_decl);
6175 rtx return_reg = DECL_RTL (decl_result);
6179 if (GET_CODE (return_reg) == REG
6180 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
6182 /* Use hard_function_value to avoid creating a reference to a BLKmode
6183 register in the USE/CLOBBER insn. */
6184 return_reg = hard_function_value (TREE_TYPE (decl_result),
6185 current_function_decl, 1);
6186 REG_FUNCTION_VALUE_P (return_reg) = 1;
6187 emit_insn (gen_rtx_fmt_e (code, VOIDmode, return_reg));
6189 else if (GET_CODE (return_reg) == PARALLEL)
6193 for (i = 0; i < XVECLEN (return_reg, 0); i++)
6195 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
6197 if (GET_CODE (x) == REG
6198 && REGNO (x) < FIRST_PSEUDO_REGISTER)
6199 emit_insn (gen_rtx_fmt_e (code, VOIDmode, x));
6205 /* Generate RTL for the end of the current function.
6206 FILENAME and LINE are the current position in the source file.
6208 It is up to language-specific callers to do cleanups for parameters--
6209 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6212 expand_function_end (filename, line, end_bindings)
6220 #ifdef TRAMPOLINE_TEMPLATE
6221 static rtx initial_trampoline;
6224 finish_expr_for_function ();
6226 #ifdef NON_SAVING_SETJMP
6227 /* Don't put any variables in registers if we call setjmp
6228 on a machine that fails to restore the registers. */
6229 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6231 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6232 setjmp_protect (DECL_INITIAL (current_function_decl));
6234 setjmp_protect_args ();
6238 /* Save the argument pointer if a save area was made for it. */
6239 if (arg_pointer_save_area)
6241 /* arg_pointer_save_area may not be a valid memory address, so we
6242 have to check it and fix it if necessary. */
6245 emit_move_insn (validize_mem (arg_pointer_save_area),
6246 virtual_incoming_args_rtx);
6247 seq = gen_sequence ();
6249 emit_insn_before (seq, tail_recursion_reentry);
6252 /* Initialize any trampolines required by this function. */
6253 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6255 tree function = TREE_PURPOSE (link);
6256 rtx context = lookup_static_chain (function);
6257 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6258 #ifdef TRAMPOLINE_TEMPLATE
6263 #ifdef TRAMPOLINE_TEMPLATE
6264 /* First make sure this compilation has a template for
6265 initializing trampolines. */
6266 if (initial_trampoline == 0)
6268 end_temporary_allocation ();
6270 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6271 resume_temporary_allocation ();
6273 ggc_add_rtx_root (&initial_trampoline, 1);
6277 /* Generate insns to initialize the trampoline. */
6279 tramp = round_trampoline_addr (XEXP (tramp, 0));
6280 #ifdef TRAMPOLINE_TEMPLATE
6281 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6282 emit_block_move (blktramp, initial_trampoline,
6283 GEN_INT (TRAMPOLINE_SIZE),
6284 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6286 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6290 /* Put those insns at entry to the containing function (this one). */
6291 emit_insns_before (seq, tail_recursion_reentry);
6294 /* If we are doing stack checking and this function makes calls,
6295 do a stack probe at the start of the function to ensure we have enough
6296 space for another stack frame. */
6297 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6301 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6302 if (GET_CODE (insn) == CALL_INSN)
6305 probe_stack_range (STACK_CHECK_PROTECT,
6306 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6309 emit_insns_before (seq, tail_recursion_reentry);
6314 /* Warn about unused parms if extra warnings were specified. */
6315 if (warn_unused && extra_warnings)
6319 for (decl = DECL_ARGUMENTS (current_function_decl);
6320 decl; decl = TREE_CHAIN (decl))
6321 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6322 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6323 warning_with_decl (decl, "unused parameter `%s'");
6326 /* Delete handlers for nonlocal gotos if nothing uses them. */
6327 if (nonlocal_goto_handler_slots != 0
6328 && ! current_function_has_nonlocal_label)
6331 /* End any sequences that failed to be closed due to syntax errors. */
6332 while (in_sequence_p ())
6335 /* Outside function body, can't compute type's actual size
6336 until next function's body starts. */
6337 immediate_size_expand--;
6339 /* If doing stupid register allocation,
6340 mark register parms as dying here. */
6345 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6346 use_variable (regno_reg_rtx[i]);
6348 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6350 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6352 use_variable (XEXP (tem, 0));
6353 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6356 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6357 use_variable (current_function_internal_arg_pointer);
6360 clear_pending_stack_adjust ();
6361 do_pending_stack_adjust ();
6363 /* Mark the end of the function body.
6364 If control reaches this insn, the function can drop through
6365 without returning a value. */
6366 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6368 /* Must mark the last line number note in the function, so that the test
6369 coverage code can avoid counting the last line twice. This just tells
6370 the code to ignore the immediately following line note, since there
6371 already exists a copy of this note somewhere above. This line number
6372 note is still needed for debugging though, so we can't delete it. */
6373 if (flag_test_coverage)
6374 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6376 /* Output a linenumber for the end of the function.
6377 SDB depends on this. */
6378 emit_line_note_force (filename, line);
6380 /* Output the label for the actual return from the function,
6381 if one is expected. This happens either because a function epilogue
6382 is used instead of a return instruction, or because a return was done
6383 with a goto in order to run local cleanups, or because of pcc-style
6384 structure returning. */
6388 /* Before the return label, clobber the return registers so that
6389 they are not propogated live to the rest of the function. This
6390 can only happen with functions that drop through; if there had
6391 been a return statement, there would have either been a return
6392 rtx, or a jump to the return label. */
6393 diddle_return_value (CLOBBER);
6395 emit_label (return_label);
6398 /* C++ uses this. */
6400 expand_end_bindings (0, 0, 0);
6402 /* Now handle any leftover exception regions that may have been
6403 created for the parameters. */
6405 rtx last = get_last_insn ();
6408 expand_leftover_cleanups ();
6410 /* If there are any catch_clauses remaining, output them now. */
6411 emit_insns (catch_clauses);
6412 catch_clauses = NULL_RTX;
6413 /* If the above emitted any code, may sure we jump around it. */
6414 if (last != get_last_insn ())
6416 label = gen_label_rtx ();
6417 last = emit_jump_insn_after (gen_jump (label), last);
6418 last = emit_barrier_after (last);
6423 if (current_function_instrument_entry_exit)
6425 rtx fun = DECL_RTL (current_function_decl);
6426 if (GET_CODE (fun) == MEM)
6427 fun = XEXP (fun, 0);
6430 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6432 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6434 hard_frame_pointer_rtx),
6438 /* If we had calls to alloca, and this machine needs
6439 an accurate stack pointer to exit the function,
6440 insert some code to save and restore the stack pointer. */
6441 #ifdef EXIT_IGNORE_STACK
6442 if (! EXIT_IGNORE_STACK)
6444 if (current_function_calls_alloca)
6448 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6449 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6452 /* If scalar return value was computed in a pseudo-reg,
6453 copy that to the hard return register. */
6454 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6455 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6456 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6457 >= FIRST_PSEUDO_REGISTER))
6459 rtx real_decl_result;
6461 #ifdef FUNCTION_OUTGOING_VALUE
6463 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6464 current_function_decl);
6467 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6468 current_function_decl);
6470 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6471 /* If this is a BLKmode structure being returned in registers, then use
6472 the mode computed in expand_return. */
6473 if (GET_MODE (real_decl_result) == BLKmode)
6474 PUT_MODE (real_decl_result,
6475 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6476 emit_move_insn (real_decl_result,
6477 DECL_RTL (DECL_RESULT (current_function_decl)));
6478 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6480 /* The delay slot scheduler assumes that current_function_return_rtx
6481 holds the hard register containing the return value, not a temporary
6483 current_function_return_rtx = real_decl_result;
6486 /* If returning a structure, arrange to return the address of the value
6487 in a place where debuggers expect to find it.
6489 If returning a structure PCC style,
6490 the caller also depends on this value.
6491 And current_function_returns_pcc_struct is not necessarily set. */
6492 if (current_function_returns_struct
6493 || current_function_returns_pcc_struct)
6495 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6496 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6497 #ifdef FUNCTION_OUTGOING_VALUE
6499 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6500 current_function_decl);
6503 = FUNCTION_VALUE (build_pointer_type (type),
6504 current_function_decl);
6507 /* Mark this as a function return value so integrate will delete the
6508 assignment and USE below when inlining this function. */
6509 REG_FUNCTION_VALUE_P (outgoing) = 1;
6511 emit_move_insn (outgoing, value_address);
6512 use_variable (outgoing);
6515 /* If this is an implementation of __throw, do what's necessary to
6516 communicate between __builtin_eh_return and the epilogue. */
6517 expand_eh_return ();
6519 /* Output a return insn if we are using one.
6520 Otherwise, let the rtl chain end here, to drop through
6521 into the epilogue. */
6526 emit_jump_insn (gen_return ());
6531 /* Fix up any gotos that jumped out to the outermost
6532 binding level of the function.
6533 Must follow emitting RETURN_LABEL. */
6535 /* If you have any cleanups to do at this point,
6536 and they need to create temporary variables,
6537 then you will lose. */
6538 expand_fixups (get_insns ());
6541 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6542 or a single insn). */
6545 record_insns (insns)
6550 if (GET_CODE (insns) == SEQUENCE)
6552 int len = XVECLEN (insns, 0);
6553 vec = (int *) oballoc ((len + 1) * sizeof (int));
6556 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6560 vec = (int *) oballoc (2 * sizeof (int));
6561 vec[0] = INSN_UID (insns);
6567 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6570 contains (insn, vec)
6576 if (GET_CODE (insn) == INSN
6577 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6580 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6581 for (j = 0; vec[j]; j++)
6582 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6588 for (j = 0; vec[j]; j++)
6589 if (INSN_UID (insn) == vec[j])
6596 prologue_epilogue_contains (insn)
6599 if (prologue && contains (insn, prologue))
6601 if (epilogue && contains (insn, epilogue))
6606 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6607 this into place with notes indicating where the prologue ends and where
6608 the epilogue begins. Update the basic block information when possible. */
6611 thread_prologue_and_epilogue_insns (f)
6612 rtx f ATTRIBUTE_UNUSED;
6616 #ifdef HAVE_prologue
6622 seq = gen_prologue();
6625 /* Retain a map of the prologue insns. */
6626 if (GET_CODE (seq) != SEQUENCE)
6628 prologue = record_insns (seq);
6630 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6631 seq = gen_sequence ();
6634 /* If optimization is off, and perhaps in an empty function,
6635 the entry block will have no successors. */
6636 if (ENTRY_BLOCK_PTR->succ)
6638 /* Can't deal with multiple successsors of the entry block. */
6639 if (ENTRY_BLOCK_PTR->succ->succ_next)
6642 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6646 emit_insn_after (seq, f);
6650 #ifdef HAVE_epilogue
6655 rtx tail = get_last_insn ();
6657 /* ??? This is gastly. If function returns were not done via uses,
6658 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6659 and all of this uglyness would go away. */
6664 /* If the exit block has no non-fake predecessors, we don't
6665 need an epilogue. Furthermore, only pay attention to the
6666 fallthru predecessors; if (conditional) return insns were
6667 generated, by definition we do not need to emit epilogue
6670 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6671 if ((e->flags & EDGE_FAKE) == 0
6672 && (e->flags & EDGE_FALLTHRU) != 0)
6677 /* We can't handle multiple epilogues -- if one is needed,
6678 we won't be able to place it multiple times.
6680 ??? Fix epilogue expanders to not assume they are the
6681 last thing done compiling the function. Either that
6682 or copy_rtx each insn.
6684 ??? Blah, it's not a simple expression to assert that
6685 we've exactly one fallthru exit edge. */
6690 /* ??? If the last insn of the basic block is a jump, then we
6691 are creating a new basic block. Wimp out and leave these
6692 insns outside any block. */
6693 if (GET_CODE (tail) == JUMP_INSN)
6699 rtx prev, seq, first_use;
6701 /* Move the USE insns at the end of a function onto a list. */
6703 if (GET_CODE (prev) == BARRIER
6704 || GET_CODE (prev) == NOTE)
6705 prev = prev_nonnote_insn (prev);
6709 && GET_CODE (prev) == INSN
6710 && GET_CODE (PATTERN (prev)) == USE)
6712 /* If the end of the block is the use, grab hold of something
6713 else so that we emit barriers etc in the right place. */
6717 tail = PREV_INSN (tail);
6718 while (GET_CODE (tail) == INSN
6719 && GET_CODE (PATTERN (tail)) == USE);
6725 prev = prev_nonnote_insn (prev);
6730 NEXT_INSN (use) = first_use;
6731 PREV_INSN (first_use) = use;
6734 NEXT_INSN (use) = NULL_RTX;
6738 && GET_CODE (prev) == INSN
6739 && GET_CODE (PATTERN (prev)) == USE);
6742 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6743 epilogue insns, the USE insns at the end of a function,
6744 the jump insn that returns, and then a BARRIER. */
6746 if (GET_CODE (tail) != BARRIER)
6748 prev = next_nonnote_insn (tail);
6749 if (!prev || GET_CODE (prev) != BARRIER)
6750 emit_barrier_after (tail);
6753 seq = gen_epilogue ();
6755 tail = emit_jump_insn_after (seq, tail);
6757 /* Insert the USE insns immediately before the return insn, which
6758 must be the last instruction emitted in the sequence. */
6760 emit_insns_before (first_use, tail);
6761 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6763 /* Update the tail of the basic block. */
6767 /* Retain a map of the epilogue insns. */
6768 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6775 commit_edge_insertions ();
6778 /* Reposition the prologue-end and epilogue-begin notes after instruction
6779 scheduling and delayed branch scheduling. */
6782 reposition_prologue_and_epilogue_notes (f)
6783 rtx f ATTRIBUTE_UNUSED;
6785 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6786 /* Reposition the prologue and epilogue notes. */
6793 register rtx insn, note = 0;
6795 /* Scan from the beginning until we reach the last prologue insn.
6796 We apparently can't depend on basic_block_{head,end} after
6798 for (len = 0; prologue[len]; len++)
6800 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6802 if (GET_CODE (insn) == NOTE)
6804 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6807 else if ((len -= contains (insn, prologue)) == 0)
6810 /* Find the prologue-end note if we haven't already, and
6811 move it to just after the last prologue insn. */
6814 for (note = insn; (note = NEXT_INSN (note));)
6815 if (GET_CODE (note) == NOTE
6816 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6820 next = NEXT_INSN (note);
6822 /* Whether or not we can depend on BLOCK_HEAD,
6823 attempt to keep it up-to-date. */
6824 if (BLOCK_HEAD (0) == note)
6825 BLOCK_HEAD (0) = next;
6828 add_insn_after (note, insn);
6835 register rtx insn, note = 0;
6837 /* Scan from the end until we reach the first epilogue insn.
6838 We apparently can't depend on basic_block_{head,end} after
6840 for (len = 0; epilogue[len]; len++)
6842 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6844 if (GET_CODE (insn) == NOTE)
6846 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6849 else if ((len -= contains (insn, epilogue)) == 0)
6851 /* Find the epilogue-begin note if we haven't already, and
6852 move it to just before the first epilogue insn. */
6855 for (note = insn; (note = PREV_INSN (note));)
6856 if (GET_CODE (note) == NOTE
6857 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6861 /* Whether or not we can depend on BLOCK_HEAD,
6862 attempt to keep it up-to-date. */
6864 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6865 BLOCK_HEAD (n_basic_blocks-1) = note;
6868 add_insn_before (note, insn);
6873 #endif /* HAVE_prologue or HAVE_epilogue */
6876 /* Mark T for GC. */
6880 struct temp_slot *t;
6884 ggc_mark_rtx (t->slot);
6885 ggc_mark_rtx (t->address);
6886 ggc_mark_tree (t->rtl_expr);
6892 /* Mark P for GC. */
6895 mark_function_status (p)
6904 ggc_mark_rtx (p->arg_offset_rtx);
6906 if (p->x_parm_reg_stack_loc)
6907 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6911 ggc_mark_rtx (p->return_rtx);
6912 ggc_mark_rtx (p->x_cleanup_label);
6913 ggc_mark_rtx (p->x_return_label);
6914 ggc_mark_rtx (p->x_save_expr_regs);
6915 ggc_mark_rtx (p->x_stack_slot_list);
6916 ggc_mark_rtx (p->x_parm_birth_insn);
6917 ggc_mark_rtx (p->x_tail_recursion_label);
6918 ggc_mark_rtx (p->x_tail_recursion_reentry);
6919 ggc_mark_rtx (p->internal_arg_pointer);
6920 ggc_mark_rtx (p->x_arg_pointer_save_area);
6921 ggc_mark_tree (p->x_rtl_expr_chain);
6922 ggc_mark_rtx (p->x_last_parm_insn);
6923 ggc_mark_tree (p->x_context_display);
6924 ggc_mark_tree (p->x_trampoline_list);
6925 ggc_mark_rtx (p->epilogue_delay_list);
6927 mark_temp_slot (p->x_temp_slots);
6930 struct var_refs_queue *q = p->fixup_var_refs_queue;
6933 ggc_mark_rtx (q->modified);
6938 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6939 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6940 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6941 ggc_mark_tree (p->x_nonlocal_labels);
6944 /* Mark the function chain ARG (which is really a struct function **)
6948 mark_function_chain (arg)
6951 struct function *f = *(struct function **) arg;
6953 for (; f; f = f->next_global)
6955 ggc_mark_tree (f->decl);
6957 mark_function_status (f);
6958 mark_eh_status (f->eh);
6959 mark_stmt_status (f->stmt);
6960 mark_expr_status (f->expr);
6961 mark_emit_status (f->emit);
6962 mark_varasm_status (f->varasm);
6964 if (mark_machine_status)
6965 (*mark_machine_status) (f);
6966 if (mark_lang_status)
6967 (*mark_lang_status) (f);
6969 if (f->original_arg_vector)
6970 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6971 if (f->original_decl_initial)
6972 ggc_mark_tree (f->original_decl_initial);
6976 /* Called once, at initialization, to initialize function.c. */
6979 init_function_once ()
6981 ggc_add_root (&all_functions, 1, sizeof all_functions,
6982 mark_function_chain);