1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89 /* Similar, but round to the next highest integer that meets the
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
107 int current_function_is_leaf;
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging;
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs;
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated;
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status) PROTO((struct function *));
127 void (*save_machine_status) PROTO((struct function *));
128 void (*restore_machine_status) PROTO((struct function *));
129 void (*mark_machine_status) PROTO((struct function *));
130 void (*free_machine_status) PROTO((struct function *));
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status) PROTO((struct function *));
134 void (*save_lang_status) PROTO((struct function *));
135 void (*restore_lang_status) PROTO((struct function *));
136 void (*mark_lang_status) PROTO((struct function *));
137 void (*free_lang_status) PROTO((struct function *));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
142 /* The currently compiled function. */
143 struct function *cfun = 0;
145 /* Global list of all compiled functions. */
146 struct function *all_functions = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue;
150 static int *epilogue;
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 /* Non-zero if this temporary is currently in use. */
198 /* Non-zero if this temporary has its address taken. */
200 /* Nesting level at which this slot is being used. */
202 /* Non-zero if this should survive a call to free_temp_slots. */
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size;
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
217 struct fixup_replacement
221 struct fixup_replacement *next;
224 struct insns_for_mem_entry {
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he;
227 /* These are the INSNS which reference the MEM. */
231 /* Forward declarations. */
233 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
234 int, struct function *));
235 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
237 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
238 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
241 struct hash_table *));
242 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
247 rtx, int, struct hash_table *));
248 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
249 struct fixup_replacement **));
250 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
251 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
252 static rtx fixup_stack_1 PROTO((rtx, rtx));
253 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
254 static void instantiate_decls PROTO((tree, int));
255 static void instantiate_decls_1 PROTO((tree, int));
256 static void instantiate_decl PROTO((rtx, int, int));
257 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
258 static void delete_handlers PROTO((void));
259 static void pad_to_arg_alignment PROTO((struct args_size *, int, struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PROTO((struct args_size *, enum machine_mode,
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down PROTO((tree, int));
267 static rtx round_trampoline_addr PROTO((rtx));
268 static tree blocks_nreverse PROTO((tree));
269 static int all_blocks PROTO((tree, tree *));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
273 static int contains PROTO((rtx, int *));
274 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
275 static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
276 struct hash_table *));
277 static int is_addressof PROTO ((rtx *, void *));
278 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
281 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
282 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
283 static int insns_for_mem_walk PROTO ((rtx *, void *));
284 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
285 static void mark_temp_slot PROTO ((struct temp_slot *));
286 static void mark_function_status PROTO ((struct function *));
287 static void mark_function_chain PROTO ((void *));
288 static void prepare_function_start PROTO ((void));
291 /* Pointer to chain of `struct function' for containing functions. */
292 struct function *outer_function_chain;
294 /* Given a function decl for a containing function,
295 return the `struct function' for it. */
298 find_function_data (decl)
303 for (p = outer_function_chain; p; p = p->next)
310 /* Save the current context for compilation of a nested function.
311 This is called from language-specific code. The caller should use
312 the save_lang_status callback to save any language-specific state,
313 since this function knows only about language-independent
317 push_function_context_to (context)
320 struct function *p, *context_data;
324 context_data = (context == current_function_decl
326 : find_function_data (context));
327 context_data->contains_functions = 1;
331 init_dummy_function_start ();
334 p->next = outer_function_chain;
335 outer_function_chain = p;
336 p->fixup_var_refs_queue = 0;
338 save_tree_status (p);
339 if (save_lang_status)
340 (*save_lang_status) (p);
341 if (save_machine_status)
342 (*save_machine_status) (p);
348 push_function_context ()
350 push_function_context_to (current_function_decl);
353 /* Restore the last saved context, at the end of a nested function.
354 This function is called from language-specific code. */
357 pop_function_context_from (context)
358 tree context ATTRIBUTE_UNUSED;
360 struct function *p = outer_function_chain;
361 struct var_refs_queue *queue;
362 struct var_refs_queue *next;
365 outer_function_chain = p->next;
367 current_function_decl = p->decl;
370 restore_tree_status (p);
371 restore_emit_status (p);
373 if (restore_machine_status)
374 (*restore_machine_status) (p);
375 if (restore_lang_status)
376 (*restore_lang_status) (p);
378 /* Finish doing put_var_into_stack for any of our variables
379 which became addressable during the nested function. */
380 for (queue = p->fixup_var_refs_queue; queue; queue = next)
383 fixup_var_refs (queue->modified, queue->promoted_mode,
384 queue->unsignedp, 0);
387 p->fixup_var_refs_queue = 0;
389 /* Reset variables that have known state during rtx generation. */
390 rtx_equal_function_value_matters = 1;
391 virtuals_instantiated = 0;
395 pop_function_context ()
397 pop_function_context_from (current_function_decl);
400 /* Clear out all parts of the state in F that can safely be discarded
401 after the function has been parsed, but not compiled, to let
402 garbage collection reclaim the memory. */
405 free_after_parsing (f)
408 /* f->expr->forced_labels is used by code generation. */
409 /* f->emit->regno_reg_rtx is used by code generation. */
410 /* f->varasm is used by code generation. */
411 /* f->eh->eh_return_stub_label is used by code generation. */
413 if (free_lang_status)
414 (*free_lang_status) (f);
415 free_stmt_status (f);
418 /* Clear out all parts of the state in F that can safely be discarded
419 after the function has been compiled, to let garbage collection
420 reclaim the memory. */
423 free_after_compilation (f)
427 free_expr_status (f);
428 free_emit_status (f);
429 free_varasm_status (f);
431 if (free_machine_status)
432 (*free_machine_status) (f);
434 if (f->x_parm_reg_stack_loc)
435 free (f->x_parm_reg_stack_loc);
437 f->arg_offset_rtx = NULL;
438 f->return_rtx = NULL;
439 f->internal_arg_pointer = NULL;
440 f->x_nonlocal_labels = NULL;
441 f->x_nonlocal_goto_handler_slots = NULL;
442 f->x_nonlocal_goto_handler_labels = NULL;
443 f->x_nonlocal_goto_stack_level = NULL;
444 f->x_cleanup_label = NULL;
445 f->x_return_label = NULL;
446 f->x_save_expr_regs = NULL;
447 f->x_stack_slot_list = NULL;
448 f->x_rtl_expr_chain = NULL;
449 f->x_tail_recursion_label = NULL;
450 f->x_tail_recursion_reentry = NULL;
451 f->x_arg_pointer_save_area = NULL;
452 f->x_context_display = NULL;
453 f->x_trampoline_list = NULL;
454 f->x_parm_birth_insn = NULL;
455 f->x_last_parm_insn = NULL;
456 f->x_parm_reg_stack_loc = NULL;
457 f->x_temp_slots = NULL;
458 f->fixup_var_refs_queue = NULL;
459 f->original_arg_vector = NULL;
460 f->original_decl_initial = NULL;
461 f->inl_last_parm_insn = NULL;
462 f->epilogue_delay_list = NULL;
466 /* Allocate fixed slots in the stack frame of the current function. */
468 /* Return size needed for stack frame based on slots so far allocated in
470 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
471 the caller may have to do that. */
474 get_func_frame_size (f)
477 #ifdef FRAME_GROWS_DOWNWARD
478 return -f->x_frame_offset;
480 return f->x_frame_offset;
484 /* Return size needed for stack frame based on slots so far allocated.
485 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
486 the caller may have to do that. */
490 return get_func_frame_size (cfun);
493 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
494 with machine mode MODE.
496 ALIGN controls the amount of alignment for the address of the slot:
497 0 means according to MODE,
498 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
499 positive specifies alignment boundary in bits.
501 We do not round to stack_boundary here.
503 FUNCTION specifies the function to allocate in. */
506 assign_stack_local_1 (mode, size, align, function)
507 enum machine_mode mode;
510 struct function *function;
512 register rtx x, addr;
513 int bigend_correction = 0;
516 /* Allocate in the memory associated with the function in whose frame
518 if (function != cfun)
519 push_obstacks (function->function_obstack,
520 function->function_maybepermanent_obstack);
526 alignment = GET_MODE_ALIGNMENT (mode);
528 alignment = BIGGEST_ALIGNMENT;
530 /* Allow the target to (possibly) increase the alignment of this
532 type = type_for_mode (mode, 0);
534 alignment = LOCAL_ALIGNMENT (type, alignment);
536 alignment /= BITS_PER_UNIT;
538 else if (align == -1)
540 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
541 size = CEIL_ROUND (size, alignment);
544 alignment = align / BITS_PER_UNIT;
546 #ifdef FRAME_GROWS_DOWNWARD
547 function->x_frame_offset -= size;
550 /* Ignore alignment we can't do with expected alignment of the boundary. */
551 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
552 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
554 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
555 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
557 /* Round frame offset to that alignment.
558 We must be careful here, since FRAME_OFFSET might be negative and
559 division with a negative dividend isn't as well defined as we might
560 like. So we instead assume that ALIGNMENT is a power of two and
561 use logical operations which are unambiguous. */
562 #ifdef FRAME_GROWS_DOWNWARD
563 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
565 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
568 /* On a big-endian machine, if we are allocating more space than we will use,
569 use the least significant bytes of those that are allocated. */
570 if (BYTES_BIG_ENDIAN && mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
573 /* If we have already instantiated virtual registers, return the actual
574 address relative to the frame pointer. */
575 if (function == cfun && virtuals_instantiated)
576 addr = plus_constant (frame_pointer_rtx,
577 (frame_offset + bigend_correction
578 + STARTING_FRAME_OFFSET));
580 addr = plus_constant (virtual_stack_vars_rtx,
581 function->x_frame_offset + bigend_correction);
583 #ifndef FRAME_GROWS_DOWNWARD
584 function->x_frame_offset += size;
587 x = gen_rtx_MEM (mode, addr);
589 function->x_stack_slot_list
590 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
592 if (function != cfun)
598 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
601 assign_stack_local (mode, size, align)
602 enum machine_mode mode;
606 return assign_stack_local_1 (mode, size, align, cfun);
609 /* Allocate a temporary stack slot and record it for possible later
612 MODE is the machine mode to be given to the returned rtx.
614 SIZE is the size in units of the space required. We do no rounding here
615 since assign_stack_local will do any required rounding.
617 KEEP is 1 if this slot is to be retained after a call to
618 free_temp_slots. Automatic variables for a block are allocated
619 with this flag. KEEP is 2 if we allocate a longer term temporary,
620 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
621 if we are to allocate something at an inner level to be treated as
622 a variable in the block (e.g., a SAVE_EXPR).
624 TYPE is the type that will be used for the stack slot. */
627 assign_stack_temp_for_type (mode, size, keep, type)
628 enum machine_mode mode;
635 struct temp_slot *p, *best_p = 0;
637 /* If SIZE is -1 it means that somebody tried to allocate a temporary
638 of a variable size. */
642 /* If we know the alias set for the memory that will be used, use
643 it. If there's no TYPE, then we don't know anything about the
644 alias set for the memory. */
646 alias_set = get_alias_set (type);
650 align = GET_MODE_ALIGNMENT (mode);
652 align = BIGGEST_ALIGNMENT;
655 type = type_for_mode (mode, 0);
657 align = LOCAL_ALIGNMENT (type, align);
659 /* Try to find an available, already-allocated temporary of the proper
660 mode which meets the size and alignment requirements. Choose the
661 smallest one with the closest alignment. */
662 for (p = temp_slots; p; p = p->next)
663 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
665 && (!flag_strict_aliasing
666 || (alias_set && p->alias_set == alias_set))
667 && (best_p == 0 || best_p->size > p->size
668 || (best_p->size == p->size && best_p->align > p->align)))
670 if (p->align == align && p->size == size)
678 /* Make our best, if any, the one to use. */
681 /* If there are enough aligned bytes left over, make them into a new
682 temp_slot so that the extra bytes don't get wasted. Do this only
683 for BLKmode slots, so that we can be sure of the alignment. */
684 if (GET_MODE (best_p->slot) == BLKmode
685 /* We can't split slots if -fstrict-aliasing because the
686 information about the alias set for the new slot will be
688 && !flag_strict_aliasing)
690 int alignment = best_p->align / BITS_PER_UNIT;
691 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
693 if (best_p->size - rounded_size >= alignment)
695 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
696 p->in_use = p->addr_taken = 0;
697 p->size = best_p->size - rounded_size;
698 p->base_offset = best_p->base_offset + rounded_size;
699 p->full_size = best_p->full_size - rounded_size;
700 p->slot = gen_rtx_MEM (BLKmode,
701 plus_constant (XEXP (best_p->slot, 0),
703 p->align = best_p->align;
706 p->next = temp_slots;
709 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
712 best_p->size = rounded_size;
713 best_p->full_size = rounded_size;
720 /* If we still didn't find one, make a new temporary. */
723 HOST_WIDE_INT frame_offset_old = frame_offset;
725 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
727 /* We are passing an explicit alignment request to assign_stack_local.
728 One side effect of that is assign_stack_local will not round SIZE
729 to ensure the frame offset remains suitably aligned.
731 So for requests which depended on the rounding of SIZE, we go ahead
732 and round it now. We also make sure ALIGNMENT is at least
733 BIGGEST_ALIGNMENT. */
734 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
736 p->slot = assign_stack_local (mode,
738 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
743 p->alias_set = alias_set;
745 /* The following slot size computation is necessary because we don't
746 know the actual size of the temporary slot until assign_stack_local
747 has performed all the frame alignment and size rounding for the
748 requested temporary. Note that extra space added for alignment
749 can be either above or below this stack slot depending on which
750 way the frame grows. We include the extra space if and only if it
751 is above this slot. */
752 #ifdef FRAME_GROWS_DOWNWARD
753 p->size = frame_offset_old - frame_offset;
758 /* Now define the fields used by combine_temp_slots. */
759 #ifdef FRAME_GROWS_DOWNWARD
760 p->base_offset = frame_offset;
761 p->full_size = frame_offset_old - frame_offset;
763 p->base_offset = frame_offset_old;
764 p->full_size = frame_offset - frame_offset_old;
767 p->next = temp_slots;
773 p->rtl_expr = seq_rtl_expr;
777 p->level = target_temp_slot_level;
782 p->level = var_temp_slot_level;
787 p->level = temp_slot_level;
791 /* We may be reusing an old slot, so clear any MEM flags that may have been
793 RTX_UNCHANGING_P (p->slot) = 0;
794 MEM_IN_STRUCT_P (p->slot) = 0;
795 MEM_SCALAR_P (p->slot) = 0;
796 MEM_ALIAS_SET (p->slot) = 0;
800 /* Allocate a temporary stack slot and record it for possible later
801 reuse. First three arguments are same as in preceding function. */
804 assign_stack_temp (mode, size, keep)
805 enum machine_mode mode;
809 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
812 /* Assign a temporary of given TYPE.
813 KEEP is as for assign_stack_temp.
814 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
815 it is 0 if a register is OK.
816 DONT_PROMOTE is 1 if we should not promote values in register
820 assign_temp (type, keep, memory_required, dont_promote)
826 enum machine_mode mode = TYPE_MODE (type);
827 int unsignedp = TREE_UNSIGNED (type);
829 if (mode == BLKmode || memory_required)
831 HOST_WIDE_INT size = int_size_in_bytes (type);
834 /* Unfortunately, we don't yet know how to allocate variable-sized
835 temporaries. However, sometimes we have a fixed upper limit on
836 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
837 instead. This is the case for Chill variable-sized strings. */
838 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
839 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
840 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
841 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
843 tmp = assign_stack_temp_for_type (mode, size, keep, type);
844 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
848 #ifndef PROMOTE_FOR_CALL_ONLY
850 mode = promote_mode (type, mode, &unsignedp, 0);
853 return gen_reg_rtx (mode);
856 /* Combine temporary stack slots which are adjacent on the stack.
858 This allows for better use of already allocated stack space. This is only
859 done for BLKmode slots because we can be sure that we won't have alignment
860 problems in this case. */
863 combine_temp_slots ()
865 struct temp_slot *p, *q;
866 struct temp_slot *prev_p, *prev_q;
869 /* We can't combine slots, because the information about which slot
870 is in which alias set will be lost. */
871 if (flag_strict_aliasing)
874 /* If there are a lot of temp slots, don't do anything unless
875 high levels of optimizaton. */
876 if (! flag_expensive_optimizations)
877 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
878 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
881 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
885 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
886 for (q = p->next, prev_q = p; q; q = prev_q->next)
889 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
891 if (p->base_offset + p->full_size == q->base_offset)
893 /* Q comes after P; combine Q into P. */
895 p->full_size += q->full_size;
898 else if (q->base_offset + q->full_size == p->base_offset)
900 /* P comes after Q; combine P into Q. */
902 q->full_size += p->full_size;
907 /* Either delete Q or advance past it. */
909 prev_q->next = q->next;
913 /* Either delete P or advance past it. */
917 prev_p->next = p->next;
919 temp_slots = p->next;
926 /* Find the temp slot corresponding to the object at address X. */
928 static struct temp_slot *
929 find_temp_slot_from_address (x)
935 for (p = temp_slots; p; p = p->next)
940 else if (XEXP (p->slot, 0) == x
942 || (GET_CODE (x) == PLUS
943 && XEXP (x, 0) == virtual_stack_vars_rtx
944 && GET_CODE (XEXP (x, 1)) == CONST_INT
945 && INTVAL (XEXP (x, 1)) >= p->base_offset
946 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
949 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
950 for (next = p->address; next; next = XEXP (next, 1))
951 if (XEXP (next, 0) == x)
955 /* If we have a sum involving a register, see if it points to a temp
957 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
958 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
960 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
961 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
967 /* Indicate that NEW is an alternate way of referring to the temp slot
968 that previously was known by OLD. */
971 update_temp_slot_address (old, new)
976 if (rtx_equal_p (old, new))
979 p = find_temp_slot_from_address (old);
981 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
982 is a register, see if one operand of the PLUS is a temporary
983 location. If so, NEW points into it. Otherwise, if both OLD and
984 NEW are a PLUS and if there is a register in common between them.
985 If so, try a recursive call on those values. */
988 if (GET_CODE (old) != PLUS)
991 if (GET_CODE (new) == REG)
993 update_temp_slot_address (XEXP (old, 0), new);
994 update_temp_slot_address (XEXP (old, 1), new);
997 else if (GET_CODE (new) != PLUS)
1000 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1003 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1004 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1006 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1007 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1012 /* Otherwise add an alias for the temp's address. */
1013 else if (p->address == 0)
1017 if (GET_CODE (p->address) != EXPR_LIST)
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1020 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1024 /* If X could be a reference to a temporary slot, mark the fact that its
1025 address was taken. */
1028 mark_temp_addr_taken (x)
1031 struct temp_slot *p;
1036 /* If X is not in memory or is at a constant address, it cannot be in
1037 a temporary slot. */
1038 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1041 p = find_temp_slot_from_address (XEXP (x, 0));
1046 /* If X could be a reference to a temporary slot, mark that slot as
1047 belonging to the to one level higher than the current level. If X
1048 matched one of our slots, just mark that one. Otherwise, we can't
1049 easily predict which it is, so upgrade all of them. Kept slots
1050 need not be touched.
1052 This is called when an ({...}) construct occurs and a statement
1053 returns a value in memory. */
1056 preserve_temp_slots (x)
1059 struct temp_slot *p = 0;
1061 /* If there is no result, we still might have some objects whose address
1062 were taken, so we need to make sure they stay around. */
1065 for (p = temp_slots; p; p = p->next)
1066 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1072 /* If X is a register that is being used as a pointer, see if we have
1073 a temporary slot we know it points to. To be consistent with
1074 the code below, we really should preserve all non-kept slots
1075 if we can't find a match, but that seems to be much too costly. */
1076 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1077 p = find_temp_slot_from_address (x);
1079 /* If X is not in memory or is at a constant address, it cannot be in
1080 a temporary slot, but it can contain something whose address was
1082 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1084 for (p = temp_slots; p; p = p->next)
1085 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1091 /* First see if we can find a match. */
1093 p = find_temp_slot_from_address (XEXP (x, 0));
1097 /* Move everything at our level whose address was taken to our new
1098 level in case we used its address. */
1099 struct temp_slot *q;
1101 if (p->level == temp_slot_level)
1103 for (q = temp_slots; q; q = q->next)
1104 if (q != p && q->addr_taken && q->level == p->level)
1113 /* Otherwise, preserve all non-kept slots at this level. */
1114 for (p = temp_slots; p; p = p->next)
1115 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1119 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1120 with that RTL_EXPR, promote it into a temporary slot at the present
1121 level so it will not be freed when we free slots made in the
1125 preserve_rtl_expr_result (x)
1128 struct temp_slot *p;
1130 /* If X is not in memory or is at a constant address, it cannot be in
1131 a temporary slot. */
1132 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1135 /* If we can find a match, move it to our level unless it is already at
1137 p = find_temp_slot_from_address (XEXP (x, 0));
1140 p->level = MIN (p->level, temp_slot_level);
1147 /* Free all temporaries used so far. This is normally called at the end
1148 of generating code for a statement. Don't free any temporaries
1149 currently in use for an RTL_EXPR that hasn't yet been emitted.
1150 We could eventually do better than this since it can be reused while
1151 generating the same RTL_EXPR, but this is complex and probably not
1157 struct temp_slot *p;
1159 for (p = temp_slots; p; p = p->next)
1160 if (p->in_use && p->level == temp_slot_level && ! p->keep
1161 && p->rtl_expr == 0)
1164 combine_temp_slots ();
1167 /* Free all temporary slots used in T, an RTL_EXPR node. */
1170 free_temps_for_rtl_expr (t)
1173 struct temp_slot *p;
1175 for (p = temp_slots; p; p = p->next)
1176 if (p->rtl_expr == t)
1179 combine_temp_slots ();
1182 /* Mark all temporaries ever allocated in this function as not suitable
1183 for reuse until the current level is exited. */
1186 mark_all_temps_used ()
1188 struct temp_slot *p;
1190 for (p = temp_slots; p; p = p->next)
1192 p->in_use = p->keep = 1;
1193 p->level = MIN (p->level, temp_slot_level);
1197 /* Push deeper into the nesting level for stack temporaries. */
1205 /* Likewise, but save the new level as the place to allocate variables
1210 push_temp_slots_for_block ()
1214 var_temp_slot_level = temp_slot_level;
1217 /* Likewise, but save the new level as the place to allocate temporaries
1218 for TARGET_EXPRs. */
1221 push_temp_slots_for_target ()
1225 target_temp_slot_level = temp_slot_level;
1228 /* Set and get the value of target_temp_slot_level. The only
1229 permitted use of these functions is to save and restore this value. */
1232 get_target_temp_slot_level ()
1234 return target_temp_slot_level;
1238 set_target_temp_slot_level (level)
1241 target_temp_slot_level = level;
1245 /* Pop a temporary nesting level. All slots in use in the current level
1251 struct temp_slot *p;
1253 for (p = temp_slots; p; p = p->next)
1254 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1257 combine_temp_slots ();
1262 /* Initialize temporary slots. */
1267 /* We have not allocated any temporaries yet. */
1269 temp_slot_level = 0;
1270 var_temp_slot_level = 0;
1271 target_temp_slot_level = 0;
1274 /* Retroactively move an auto variable from a register to a stack slot.
1275 This is done when an address-reference to the variable is seen. */
1278 put_var_into_stack (decl)
1282 enum machine_mode promoted_mode, decl_mode;
1283 struct function *function = 0;
1285 int can_use_addressof;
1287 context = decl_function_context (decl);
1289 /* Get the current rtl used for this object and its original mode. */
1290 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1292 /* No need to do anything if decl has no rtx yet
1293 since in that case caller is setting TREE_ADDRESSABLE
1294 and a stack slot will be assigned when the rtl is made. */
1298 /* Get the declared mode for this object. */
1299 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1300 : DECL_MODE (decl));
1301 /* Get the mode it's actually stored in. */
1302 promoted_mode = GET_MODE (reg);
1304 /* If this variable comes from an outer function,
1305 find that function's saved context. */
1306 if (context != current_function_decl && context != inline_function_decl)
1307 for (function = outer_function_chain; function; function = function->next)
1308 if (function->decl == context)
1311 /* If this is a variable-size object with a pseudo to address it,
1312 put that pseudo into the stack, if the var is nonlocal. */
1313 if (DECL_NONLOCAL (decl)
1314 && GET_CODE (reg) == MEM
1315 && GET_CODE (XEXP (reg, 0)) == REG
1316 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1318 reg = XEXP (reg, 0);
1319 decl_mode = promoted_mode = GET_MODE (reg);
1325 /* FIXME make it work for promoted modes too */
1326 && decl_mode == promoted_mode
1327 #ifdef NON_SAVING_SETJMP
1328 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1332 /* If we can't use ADDRESSOF, make sure we see through one we already
1334 if (! can_use_addressof && GET_CODE (reg) == MEM
1335 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1336 reg = XEXP (XEXP (reg, 0), 0);
1338 /* Now we should have a value that resides in one or more pseudo regs. */
1340 if (GET_CODE (reg) == REG)
1342 /* If this variable lives in the current function and we don't need
1343 to put things in the stack for the sake of setjmp, try to keep it
1344 in a register until we know we actually need the address. */
1345 if (can_use_addressof)
1346 gen_mem_addressof (reg, decl);
1348 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1349 promoted_mode, decl_mode,
1350 TREE_SIDE_EFFECTS (decl), 0,
1351 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1354 else if (GET_CODE (reg) == CONCAT)
1356 /* A CONCAT contains two pseudos; put them both in the stack.
1357 We do it so they end up consecutive. */
1358 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1359 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1360 #ifdef FRAME_GROWS_DOWNWARD
1361 /* Since part 0 should have a lower address, do it second. */
1362 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1363 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1364 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1366 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1367 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1368 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1371 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1372 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1373 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1375 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1376 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1377 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1381 /* Change the CONCAT into a combined MEM for both parts. */
1382 PUT_CODE (reg, MEM);
1383 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1384 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1386 /* The two parts are in memory order already.
1387 Use the lower parts address as ours. */
1388 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1389 /* Prevent sharing of rtl that might lose. */
1390 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1391 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1396 if (current_function_check_memory_usage)
1397 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1398 XEXP (reg, 0), Pmode,
1399 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1400 TYPE_MODE (sizetype),
1401 GEN_INT (MEMORY_USE_RW),
1402 TYPE_MODE (integer_type_node));
1405 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1406 into the stack frame of FUNCTION (0 means the current function).
1407 DECL_MODE is the machine mode of the user-level data type.
1408 PROMOTED_MODE is the machine mode of the register.
1409 VOLATILE_P is nonzero if this is for a "volatile" decl.
1410 USED_P is nonzero if this reg might have already been used in an insn. */
1413 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1414 original_regno, used_p, ht)
1415 struct function *function;
1418 enum machine_mode promoted_mode, decl_mode;
1422 struct hash_table *ht;
1424 struct function *func = function ? function : cfun;
1426 int regno = original_regno;
1429 regno = REGNO (reg);
1431 if (regno < func->x_max_parm_reg)
1432 new = func->x_parm_reg_stack_loc[regno];
1434 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1436 PUT_CODE (reg, MEM);
1437 PUT_MODE (reg, decl_mode);
1438 XEXP (reg, 0) = XEXP (new, 0);
1439 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1440 MEM_VOLATILE_P (reg) = volatile_p;
1442 /* If this is a memory ref that contains aggregate components,
1443 mark it as such for cse and loop optimize. If we are reusing a
1444 previously generated stack slot, then we need to copy the bit in
1445 case it was set for other reasons. For instance, it is set for
1446 __builtin_va_alist. */
1447 MEM_SET_IN_STRUCT_P (reg,
1448 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1449 MEM_ALIAS_SET (reg) = get_alias_set (type);
1451 /* Now make sure that all refs to the variable, previously made
1452 when it was a register, are fixed up to be valid again. */
1454 if (used_p && function != 0)
1456 struct var_refs_queue *temp;
1459 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1460 temp->modified = reg;
1461 temp->promoted_mode = promoted_mode;
1462 temp->unsignedp = TREE_UNSIGNED (type);
1463 temp->next = function->fixup_var_refs_queue;
1464 function->fixup_var_refs_queue = temp;
1467 /* Variable is local; fix it up now. */
1468 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1472 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1474 enum machine_mode promoted_mode;
1476 struct hash_table *ht;
1479 rtx first_insn = get_insns ();
1480 struct sequence_stack *stack = seq_stack;
1481 tree rtl_exps = rtl_expr_chain;
1483 /* Must scan all insns for stack-refs that exceed the limit. */
1484 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1486 /* If there's a hash table, it must record all uses of VAR. */
1490 /* Scan all pending sequences too. */
1491 for (; stack; stack = stack->next)
1493 push_to_sequence (stack->first);
1494 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1495 stack->first, stack->next != 0, 0);
1496 /* Update remembered end of sequence
1497 in case we added an insn at the end. */
1498 stack->last = get_last_insn ();
1502 /* Scan all waiting RTL_EXPRs too. */
1503 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1505 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1506 if (seq != const0_rtx && seq != 0)
1508 push_to_sequence (seq);
1509 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1515 /* Scan the catch clauses for exception handling too. */
1516 push_to_sequence (catch_clauses);
1517 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1522 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1523 some part of an insn. Return a struct fixup_replacement whose OLD
1524 value is equal to X. Allocate a new structure if no such entry exists. */
1526 static struct fixup_replacement *
1527 find_fixup_replacement (replacements, x)
1528 struct fixup_replacement **replacements;
1531 struct fixup_replacement *p;
1533 /* See if we have already replaced this. */
1534 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1539 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1542 p->next = *replacements;
1549 /* Scan the insn-chain starting with INSN for refs to VAR
1550 and fix them up. TOPLEVEL is nonzero if this chain is the
1551 main chain of insns for the current function. */
1554 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1556 enum machine_mode promoted_mode;
1560 struct hash_table *ht;
1563 rtx insn_list = NULL_RTX;
1565 /* If we already know which INSNs reference VAR there's no need
1566 to walk the entire instruction chain. */
1569 insn_list = ((struct insns_for_mem_entry *)
1570 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1571 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1572 insn_list = XEXP (insn_list, 1);
1577 rtx next = NEXT_INSN (insn);
1578 rtx set, prev, prev_set;
1581 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1583 /* Remember the notes in case we delete the insn. */
1584 note = REG_NOTES (insn);
1586 /* If this is a CLOBBER of VAR, delete it.
1588 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1589 and REG_RETVAL notes too. */
1590 if (GET_CODE (PATTERN (insn)) == CLOBBER
1591 && (XEXP (PATTERN (insn), 0) == var
1592 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1593 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1594 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1596 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1597 /* The REG_LIBCALL note will go away since we are going to
1598 turn INSN into a NOTE, so just delete the
1599 corresponding REG_RETVAL note. */
1600 remove_note (XEXP (note, 0),
1601 find_reg_note (XEXP (note, 0), REG_RETVAL,
1604 /* In unoptimized compilation, we shouldn't call delete_insn
1605 except in jump.c doing warnings. */
1606 PUT_CODE (insn, NOTE);
1607 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1608 NOTE_SOURCE_FILE (insn) = 0;
1611 /* The insn to load VAR from a home in the arglist
1612 is now a no-op. When we see it, just delete it.
1613 Similarly if this is storing VAR from a register from which
1614 it was loaded in the previous insn. This will occur
1615 when an ADDRESSOF was made for an arglist slot. */
1617 && (set = single_set (insn)) != 0
1618 && SET_DEST (set) == var
1619 /* If this represents the result of an insn group,
1620 don't delete the insn. */
1621 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1622 && (rtx_equal_p (SET_SRC (set), var)
1623 || (GET_CODE (SET_SRC (set)) == REG
1624 && (prev = prev_nonnote_insn (insn)) != 0
1625 && (prev_set = single_set (prev)) != 0
1626 && SET_DEST (prev_set) == SET_SRC (set)
1627 && rtx_equal_p (SET_SRC (prev_set), var))))
1629 /* In unoptimized compilation, we shouldn't call delete_insn
1630 except in jump.c doing warnings. */
1631 PUT_CODE (insn, NOTE);
1632 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1633 NOTE_SOURCE_FILE (insn) = 0;
1634 if (insn == last_parm_insn)
1635 last_parm_insn = PREV_INSN (next);
1639 struct fixup_replacement *replacements = 0;
1640 rtx next_insn = NEXT_INSN (insn);
1642 if (SMALL_REGISTER_CLASSES)
1644 /* If the insn that copies the results of a CALL_INSN
1645 into a pseudo now references VAR, we have to use an
1646 intermediate pseudo since we want the life of the
1647 return value register to be only a single insn.
1649 If we don't use an intermediate pseudo, such things as
1650 address computations to make the address of VAR valid
1651 if it is not can be placed between the CALL_INSN and INSN.
1653 To make sure this doesn't happen, we record the destination
1654 of the CALL_INSN and see if the next insn uses both that
1657 if (call_dest != 0 && GET_CODE (insn) == INSN
1658 && reg_mentioned_p (var, PATTERN (insn))
1659 && reg_mentioned_p (call_dest, PATTERN (insn)))
1661 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1663 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1665 PATTERN (insn) = replace_rtx (PATTERN (insn),
1669 if (GET_CODE (insn) == CALL_INSN
1670 && GET_CODE (PATTERN (insn)) == SET)
1671 call_dest = SET_DEST (PATTERN (insn));
1672 else if (GET_CODE (insn) == CALL_INSN
1673 && GET_CODE (PATTERN (insn)) == PARALLEL
1674 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1675 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1680 /* See if we have to do anything to INSN now that VAR is in
1681 memory. If it needs to be loaded into a pseudo, use a single
1682 pseudo for the entire insn in case there is a MATCH_DUP
1683 between two operands. We pass a pointer to the head of
1684 a list of struct fixup_replacements. If fixup_var_refs_1
1685 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1686 it will record them in this list.
1688 If it allocated a pseudo for any replacement, we copy into
1691 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1694 /* If this is last_parm_insn, and any instructions were output
1695 after it to fix it up, then we must set last_parm_insn to
1696 the last such instruction emitted. */
1697 if (insn == last_parm_insn)
1698 last_parm_insn = PREV_INSN (next_insn);
1700 while (replacements)
1702 if (GET_CODE (replacements->new) == REG)
1707 /* OLD might be a (subreg (mem)). */
1708 if (GET_CODE (replacements->old) == SUBREG)
1710 = fixup_memory_subreg (replacements->old, insn, 0);
1713 = fixup_stack_1 (replacements->old, insn);
1715 insert_before = insn;
1717 /* If we are changing the mode, do a conversion.
1718 This might be wasteful, but combine.c will
1719 eliminate much of the waste. */
1721 if (GET_MODE (replacements->new)
1722 != GET_MODE (replacements->old))
1725 convert_move (replacements->new,
1726 replacements->old, unsignedp);
1727 seq = gen_sequence ();
1731 seq = gen_move_insn (replacements->new,
1734 emit_insn_before (seq, insert_before);
1737 replacements = replacements->next;
1741 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1742 But don't touch other insns referred to by reg-notes;
1743 we will get them elsewhere. */
1746 if (GET_CODE (note) != INSN_LIST)
1748 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1749 note = XEXP (note, 1);
1757 insn = XEXP (insn_list, 0);
1758 insn_list = XEXP (insn_list, 1);
1765 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1766 See if the rtx expression at *LOC in INSN needs to be changed.
1768 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1769 contain a list of original rtx's and replacements. If we find that we need
1770 to modify this insn by replacing a memory reference with a pseudo or by
1771 making a new MEM to implement a SUBREG, we consult that list to see if
1772 we have already chosen a replacement. If none has already been allocated,
1773 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1774 or the SUBREG, as appropriate, to the pseudo. */
1777 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1779 enum machine_mode promoted_mode;
1782 struct fixup_replacement **replacements;
1785 register rtx x = *loc;
1786 RTX_CODE code = GET_CODE (x);
1787 register const char *fmt;
1788 register rtx tem, tem1;
1789 struct fixup_replacement *replacement;
1794 if (XEXP (x, 0) == var)
1796 /* Prevent sharing of rtl that might lose. */
1797 rtx sub = copy_rtx (XEXP (var, 0));
1799 if (! validate_change (insn, loc, sub, 0))
1801 rtx y = gen_reg_rtx (GET_MODE (sub));
1804 /* We should be able to replace with a register or all is lost.
1805 Note that we can't use validate_change to verify this, since
1806 we're not caring for replacing all dups simultaneously. */
1807 if (! validate_replace_rtx (*loc, y, insn))
1810 /* Careful! First try to recognize a direct move of the
1811 value, mimicking how things are done in gen_reload wrt
1812 PLUS. Consider what happens when insn is a conditional
1813 move instruction and addsi3 clobbers flags. */
1816 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1817 seq = gen_sequence ();
1820 if (recog_memoized (new_insn) < 0)
1822 /* That failed. Fall back on force_operand and hope. */
1825 force_operand (sub, y);
1826 seq = gen_sequence ();
1831 /* Don't separate setter from user. */
1832 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1833 insn = PREV_INSN (insn);
1836 emit_insn_before (seq, insn);
1844 /* If we already have a replacement, use it. Otherwise,
1845 try to fix up this address in case it is invalid. */
1847 replacement = find_fixup_replacement (replacements, var);
1848 if (replacement->new)
1850 *loc = replacement->new;
1854 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1856 /* Unless we are forcing memory to register or we changed the mode,
1857 we can leave things the way they are if the insn is valid. */
1859 INSN_CODE (insn) = -1;
1860 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1861 && recog_memoized (insn) >= 0)
1864 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1868 /* If X contains VAR, we need to unshare it here so that we update
1869 each occurrence separately. But all identical MEMs in one insn
1870 must be replaced with the same rtx because of the possibility of
1873 if (reg_mentioned_p (var, x))
1875 replacement = find_fixup_replacement (replacements, x);
1876 if (replacement->new == 0)
1877 replacement->new = copy_most_rtx (x, var);
1879 *loc = x = replacement->new;
1895 /* Note that in some cases those types of expressions are altered
1896 by optimize_bit_field, and do not survive to get here. */
1897 if (XEXP (x, 0) == var
1898 || (GET_CODE (XEXP (x, 0)) == SUBREG
1899 && SUBREG_REG (XEXP (x, 0)) == var))
1901 /* Get TEM as a valid MEM in the mode presently in the insn.
1903 We don't worry about the possibility of MATCH_DUP here; it
1904 is highly unlikely and would be tricky to handle. */
1907 if (GET_CODE (tem) == SUBREG)
1909 if (GET_MODE_BITSIZE (GET_MODE (tem))
1910 > GET_MODE_BITSIZE (GET_MODE (var)))
1912 replacement = find_fixup_replacement (replacements, var);
1913 if (replacement->new == 0)
1914 replacement->new = gen_reg_rtx (GET_MODE (var));
1915 SUBREG_REG (tem) = replacement->new;
1918 tem = fixup_memory_subreg (tem, insn, 0);
1921 tem = fixup_stack_1 (tem, insn);
1923 /* Unless we want to load from memory, get TEM into the proper mode
1924 for an extract from memory. This can only be done if the
1925 extract is at a constant position and length. */
1927 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1928 && GET_CODE (XEXP (x, 2)) == CONST_INT
1929 && ! mode_dependent_address_p (XEXP (tem, 0))
1930 && ! MEM_VOLATILE_P (tem))
1932 enum machine_mode wanted_mode = VOIDmode;
1933 enum machine_mode is_mode = GET_MODE (tem);
1934 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1937 if (GET_CODE (x) == ZERO_EXTRACT)
1940 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1941 if (wanted_mode == VOIDmode)
1942 wanted_mode = word_mode;
1946 if (GET_CODE (x) == SIGN_EXTRACT)
1948 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1949 if (wanted_mode == VOIDmode)
1950 wanted_mode = word_mode;
1953 /* If we have a narrower mode, we can do something. */
1954 if (wanted_mode != VOIDmode
1955 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1957 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1958 rtx old_pos = XEXP (x, 2);
1961 /* If the bytes and bits are counted differently, we
1962 must adjust the offset. */
1963 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1964 offset = (GET_MODE_SIZE (is_mode)
1965 - GET_MODE_SIZE (wanted_mode) - offset);
1967 pos %= GET_MODE_BITSIZE (wanted_mode);
1969 newmem = gen_rtx_MEM (wanted_mode,
1970 plus_constant (XEXP (tem, 0), offset));
1971 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1972 MEM_COPY_ATTRIBUTES (newmem, tem);
1974 /* Make the change and see if the insn remains valid. */
1975 INSN_CODE (insn) = -1;
1976 XEXP (x, 0) = newmem;
1977 XEXP (x, 2) = GEN_INT (pos);
1979 if (recog_memoized (insn) >= 0)
1982 /* Otherwise, restore old position. XEXP (x, 0) will be
1984 XEXP (x, 2) = old_pos;
1988 /* If we get here, the bitfield extract insn can't accept a memory
1989 reference. Copy the input into a register. */
1991 tem1 = gen_reg_rtx (GET_MODE (tem));
1992 emit_insn_before (gen_move_insn (tem1, tem), insn);
1999 if (SUBREG_REG (x) == var)
2001 /* If this is a special SUBREG made because VAR was promoted
2002 from a wider mode, replace it with VAR and call ourself
2003 recursively, this time saying that the object previously
2004 had its current mode (by virtue of the SUBREG). */
2006 if (SUBREG_PROMOTED_VAR_P (x))
2009 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2013 /* If this SUBREG makes VAR wider, it has become a paradoxical
2014 SUBREG with VAR in memory, but these aren't allowed at this
2015 stage of the compilation. So load VAR into a pseudo and take
2016 a SUBREG of that pseudo. */
2017 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2019 replacement = find_fixup_replacement (replacements, var);
2020 if (replacement->new == 0)
2021 replacement->new = gen_reg_rtx (GET_MODE (var));
2022 SUBREG_REG (x) = replacement->new;
2026 /* See if we have already found a replacement for this SUBREG.
2027 If so, use it. Otherwise, make a MEM and see if the insn
2028 is recognized. If not, or if we should force MEM into a register,
2029 make a pseudo for this SUBREG. */
2030 replacement = find_fixup_replacement (replacements, x);
2031 if (replacement->new)
2033 *loc = replacement->new;
2037 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2039 INSN_CODE (insn) = -1;
2040 if (! flag_force_mem && recog_memoized (insn) >= 0)
2043 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2049 /* First do special simplification of bit-field references. */
2050 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2051 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2052 optimize_bit_field (x, insn, 0);
2053 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2054 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2055 optimize_bit_field (x, insn, NULL_PTR);
2057 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2058 into a register and then store it back out. */
2059 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2060 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2061 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2062 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2063 > GET_MODE_SIZE (GET_MODE (var))))
2065 replacement = find_fixup_replacement (replacements, var);
2066 if (replacement->new == 0)
2067 replacement->new = gen_reg_rtx (GET_MODE (var));
2069 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2070 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2073 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2074 insn into a pseudo and store the low part of the pseudo into VAR. */
2075 if (GET_CODE (SET_DEST (x)) == SUBREG
2076 && SUBREG_REG (SET_DEST (x)) == var
2077 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2078 > GET_MODE_SIZE (GET_MODE (var))))
2080 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2081 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2088 rtx dest = SET_DEST (x);
2089 rtx src = SET_SRC (x);
2091 rtx outerdest = dest;
2094 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2095 || GET_CODE (dest) == SIGN_EXTRACT
2096 || GET_CODE (dest) == ZERO_EXTRACT)
2097 dest = XEXP (dest, 0);
2099 if (GET_CODE (src) == SUBREG)
2100 src = XEXP (src, 0);
2102 /* If VAR does not appear at the top level of the SET
2103 just scan the lower levels of the tree. */
2105 if (src != var && dest != var)
2108 /* We will need to rerecognize this insn. */
2109 INSN_CODE (insn) = -1;
2112 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2114 /* Since this case will return, ensure we fixup all the
2116 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2117 insn, replacements);
2118 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2119 insn, replacements);
2120 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2121 insn, replacements);
2123 tem = XEXP (outerdest, 0);
2125 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2126 that may appear inside a ZERO_EXTRACT.
2127 This was legitimate when the MEM was a REG. */
2128 if (GET_CODE (tem) == SUBREG
2129 && SUBREG_REG (tem) == var)
2130 tem = fixup_memory_subreg (tem, insn, 0);
2132 tem = fixup_stack_1 (tem, insn);
2134 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2135 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2136 && ! mode_dependent_address_p (XEXP (tem, 0))
2137 && ! MEM_VOLATILE_P (tem))
2139 enum machine_mode wanted_mode;
2140 enum machine_mode is_mode = GET_MODE (tem);
2141 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2143 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2144 if (wanted_mode == VOIDmode)
2145 wanted_mode = word_mode;
2147 /* If we have a narrower mode, we can do something. */
2148 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2150 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2151 rtx old_pos = XEXP (outerdest, 2);
2154 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2155 offset = (GET_MODE_SIZE (is_mode)
2156 - GET_MODE_SIZE (wanted_mode) - offset);
2158 pos %= GET_MODE_BITSIZE (wanted_mode);
2160 newmem = gen_rtx_MEM (wanted_mode,
2161 plus_constant (XEXP (tem, 0),
2163 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2164 MEM_COPY_ATTRIBUTES (newmem, tem);
2166 /* Make the change and see if the insn remains valid. */
2167 INSN_CODE (insn) = -1;
2168 XEXP (outerdest, 0) = newmem;
2169 XEXP (outerdest, 2) = GEN_INT (pos);
2171 if (recog_memoized (insn) >= 0)
2174 /* Otherwise, restore old position. XEXP (x, 0) will be
2176 XEXP (outerdest, 2) = old_pos;
2180 /* If we get here, the bit-field store doesn't allow memory
2181 or isn't located at a constant position. Load the value into
2182 a register, do the store, and put it back into memory. */
2184 tem1 = gen_reg_rtx (GET_MODE (tem));
2185 emit_insn_before (gen_move_insn (tem1, tem), insn);
2186 emit_insn_after (gen_move_insn (tem, tem1), insn);
2187 XEXP (outerdest, 0) = tem1;
2192 /* STRICT_LOW_PART is a no-op on memory references
2193 and it can cause combinations to be unrecognizable,
2196 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2197 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2199 /* A valid insn to copy VAR into or out of a register
2200 must be left alone, to avoid an infinite loop here.
2201 If the reference to VAR is by a subreg, fix that up,
2202 since SUBREG is not valid for a memref.
2203 Also fix up the address of the stack slot.
2205 Note that we must not try to recognize the insn until
2206 after we know that we have valid addresses and no
2207 (subreg (mem ...) ...) constructs, since these interfere
2208 with determining the validity of the insn. */
2210 if ((SET_SRC (x) == var
2211 || (GET_CODE (SET_SRC (x)) == SUBREG
2212 && SUBREG_REG (SET_SRC (x)) == var))
2213 && (GET_CODE (SET_DEST (x)) == REG
2214 || (GET_CODE (SET_DEST (x)) == SUBREG
2215 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2216 && GET_MODE (var) == promoted_mode
2217 && x == single_set (insn))
2221 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2222 if (replacement->new)
2223 SET_SRC (x) = replacement->new;
2224 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2225 SET_SRC (x) = replacement->new
2226 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2228 SET_SRC (x) = replacement->new
2229 = fixup_stack_1 (SET_SRC (x), insn);
2231 if (recog_memoized (insn) >= 0)
2234 /* INSN is not valid, but we know that we want to
2235 copy SET_SRC (x) to SET_DEST (x) in some way. So
2236 we generate the move and see whether it requires more
2237 than one insn. If it does, we emit those insns and
2238 delete INSN. Otherwise, we an just replace the pattern
2239 of INSN; we have already verified above that INSN has
2240 no other function that to do X. */
2242 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2243 if (GET_CODE (pat) == SEQUENCE)
2245 emit_insn_after (pat, insn);
2246 PUT_CODE (insn, NOTE);
2247 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2248 NOTE_SOURCE_FILE (insn) = 0;
2251 PATTERN (insn) = pat;
2256 if ((SET_DEST (x) == var
2257 || (GET_CODE (SET_DEST (x)) == SUBREG
2258 && SUBREG_REG (SET_DEST (x)) == var))
2259 && (GET_CODE (SET_SRC (x)) == REG
2260 || (GET_CODE (SET_SRC (x)) == SUBREG
2261 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2262 && GET_MODE (var) == promoted_mode
2263 && x == single_set (insn))
2267 if (GET_CODE (SET_DEST (x)) == SUBREG)
2268 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2270 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2272 if (recog_memoized (insn) >= 0)
2275 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2276 if (GET_CODE (pat) == SEQUENCE)
2278 emit_insn_after (pat, insn);
2279 PUT_CODE (insn, NOTE);
2280 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2281 NOTE_SOURCE_FILE (insn) = 0;
2284 PATTERN (insn) = pat;
2289 /* Otherwise, storing into VAR must be handled specially
2290 by storing into a temporary and copying that into VAR
2291 with a new insn after this one. Note that this case
2292 will be used when storing into a promoted scalar since
2293 the insn will now have different modes on the input
2294 and output and hence will be invalid (except for the case
2295 of setting it to a constant, which does not need any
2296 change if it is valid). We generate extra code in that case,
2297 but combine.c will eliminate it. */
2302 rtx fixeddest = SET_DEST (x);
2304 /* STRICT_LOW_PART can be discarded, around a MEM. */
2305 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2306 fixeddest = XEXP (fixeddest, 0);
2307 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2308 if (GET_CODE (fixeddest) == SUBREG)
2310 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2311 promoted_mode = GET_MODE (fixeddest);
2314 fixeddest = fixup_stack_1 (fixeddest, insn);
2316 temp = gen_reg_rtx (promoted_mode);
2318 emit_insn_after (gen_move_insn (fixeddest,
2319 gen_lowpart (GET_MODE (fixeddest),
2323 SET_DEST (x) = temp;
2331 /* Nothing special about this RTX; fix its operands. */
2333 fmt = GET_RTX_FORMAT (code);
2334 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2337 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2338 else if (fmt[i] == 'E')
2341 for (j = 0; j < XVECLEN (x, i); j++)
2342 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2343 insn, replacements);
2348 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2349 return an rtx (MEM:m1 newaddr) which is equivalent.
2350 If any insns must be emitted to compute NEWADDR, put them before INSN.
2352 UNCRITICAL nonzero means accept paradoxical subregs.
2353 This is used for subregs found inside REG_NOTES. */
2356 fixup_memory_subreg (x, insn, uncritical)
2361 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2362 rtx addr = XEXP (SUBREG_REG (x), 0);
2363 enum machine_mode mode = GET_MODE (x);
2366 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2367 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2371 if (BYTES_BIG_ENDIAN)
2372 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2373 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2374 addr = plus_constant (addr, offset);
2375 if (!flag_force_addr && memory_address_p (mode, addr))
2376 /* Shortcut if no insns need be emitted. */
2377 return change_address (SUBREG_REG (x), mode, addr);
2379 result = change_address (SUBREG_REG (x), mode, addr);
2380 emit_insn_before (gen_sequence (), insn);
2385 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2386 Replace subexpressions of X in place.
2387 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2388 Otherwise return X, with its contents possibly altered.
2390 If any insns must be emitted to compute NEWADDR, put them before INSN.
2392 UNCRITICAL is as in fixup_memory_subreg. */
2395 walk_fixup_memory_subreg (x, insn, uncritical)
2400 register enum rtx_code code;
2401 register const char *fmt;
2407 code = GET_CODE (x);
2409 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2410 return fixup_memory_subreg (x, insn, uncritical);
2412 /* Nothing special about this RTX; fix its operands. */
2414 fmt = GET_RTX_FORMAT (code);
2415 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2418 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2419 else if (fmt[i] == 'E')
2422 for (j = 0; j < XVECLEN (x, i); j++)
2424 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2430 /* For each memory ref within X, if it refers to a stack slot
2431 with an out of range displacement, put the address in a temp register
2432 (emitting new insns before INSN to load these registers)
2433 and alter the memory ref to use that register.
2434 Replace each such MEM rtx with a copy, to avoid clobberage. */
2437 fixup_stack_1 (x, insn)
2442 register RTX_CODE code = GET_CODE (x);
2443 register const char *fmt;
2447 register rtx ad = XEXP (x, 0);
2448 /* If we have address of a stack slot but it's not valid
2449 (displacement is too large), compute the sum in a register. */
2450 if (GET_CODE (ad) == PLUS
2451 && GET_CODE (XEXP (ad, 0)) == REG
2452 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2453 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2454 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2455 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2456 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2458 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2459 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2460 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2461 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2464 if (memory_address_p (GET_MODE (x), ad))
2468 temp = copy_to_reg (ad);
2469 seq = gen_sequence ();
2471 emit_insn_before (seq, insn);
2472 return change_address (x, VOIDmode, temp);
2477 fmt = GET_RTX_FORMAT (code);
2478 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2481 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2482 else if (fmt[i] == 'E')
2485 for (j = 0; j < XVECLEN (x, i); j++)
2486 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2492 /* Optimization: a bit-field instruction whose field
2493 happens to be a byte or halfword in memory
2494 can be changed to a move instruction.
2496 We call here when INSN is an insn to examine or store into a bit-field.
2497 BODY is the SET-rtx to be altered.
2499 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2500 (Currently this is called only from function.c, and EQUIV_MEM
2504 optimize_bit_field (body, insn, equiv_mem)
2509 register rtx bitfield;
2512 enum machine_mode mode;
2514 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2515 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2516 bitfield = SET_DEST (body), destflag = 1;
2518 bitfield = SET_SRC (body), destflag = 0;
2520 /* First check that the field being stored has constant size and position
2521 and is in fact a byte or halfword suitably aligned. */
2523 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2524 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2525 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2527 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2529 register rtx memref = 0;
2531 /* Now check that the containing word is memory, not a register,
2532 and that it is safe to change the machine mode. */
2534 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2535 memref = XEXP (bitfield, 0);
2536 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2538 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2539 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2540 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2541 memref = SUBREG_REG (XEXP (bitfield, 0));
2542 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2544 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2545 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2548 && ! mode_dependent_address_p (XEXP (memref, 0))
2549 && ! MEM_VOLATILE_P (memref))
2551 /* Now adjust the address, first for any subreg'ing
2552 that we are now getting rid of,
2553 and then for which byte of the word is wanted. */
2555 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2558 /* Adjust OFFSET to count bits from low-address byte. */
2559 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2560 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2561 - offset - INTVAL (XEXP (bitfield, 1)));
2563 /* Adjust OFFSET to count bytes from low-address byte. */
2564 offset /= BITS_PER_UNIT;
2565 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2567 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2568 if (BYTES_BIG_ENDIAN)
2569 offset -= (MIN (UNITS_PER_WORD,
2570 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2571 - MIN (UNITS_PER_WORD,
2572 GET_MODE_SIZE (GET_MODE (memref))));
2576 memref = change_address (memref, mode,
2577 plus_constant (XEXP (memref, 0), offset));
2578 insns = get_insns ();
2580 emit_insns_before (insns, insn);
2582 /* Store this memory reference where
2583 we found the bit field reference. */
2587 validate_change (insn, &SET_DEST (body), memref, 1);
2588 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2590 rtx src = SET_SRC (body);
2591 while (GET_CODE (src) == SUBREG
2592 && SUBREG_WORD (src) == 0)
2593 src = SUBREG_REG (src);
2594 if (GET_MODE (src) != GET_MODE (memref))
2595 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2596 validate_change (insn, &SET_SRC (body), src, 1);
2598 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2599 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2600 /* This shouldn't happen because anything that didn't have
2601 one of these modes should have got converted explicitly
2602 and then referenced through a subreg.
2603 This is so because the original bit-field was
2604 handled by agg_mode and so its tree structure had
2605 the same mode that memref now has. */
2610 rtx dest = SET_DEST (body);
2612 while (GET_CODE (dest) == SUBREG
2613 && SUBREG_WORD (dest) == 0
2614 && (GET_MODE_CLASS (GET_MODE (dest))
2615 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2616 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2618 dest = SUBREG_REG (dest);
2620 validate_change (insn, &SET_DEST (body), dest, 1);
2622 if (GET_MODE (dest) == GET_MODE (memref))
2623 validate_change (insn, &SET_SRC (body), memref, 1);
2626 /* Convert the mem ref to the destination mode. */
2627 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2630 convert_move (newreg, memref,
2631 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2635 validate_change (insn, &SET_SRC (body), newreg, 1);
2639 /* See if we can convert this extraction or insertion into
2640 a simple move insn. We might not be able to do so if this
2641 was, for example, part of a PARALLEL.
2643 If we succeed, write out any needed conversions. If we fail,
2644 it is hard to guess why we failed, so don't do anything
2645 special; just let the optimization be suppressed. */
2647 if (apply_change_group () && seq)
2648 emit_insns_before (seq, insn);
2653 /* These routines are responsible for converting virtual register references
2654 to the actual hard register references once RTL generation is complete.
2656 The following four variables are used for communication between the
2657 routines. They contain the offsets of the virtual registers from their
2658 respective hard registers. */
2660 static int in_arg_offset;
2661 static int var_offset;
2662 static int dynamic_offset;
2663 static int out_arg_offset;
2664 static int cfa_offset;
2666 /* In most machines, the stack pointer register is equivalent to the bottom
2669 #ifndef STACK_POINTER_OFFSET
2670 #define STACK_POINTER_OFFSET 0
2673 /* If not defined, pick an appropriate default for the offset of dynamically
2674 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2675 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2677 #ifndef STACK_DYNAMIC_OFFSET
2679 #ifdef ACCUMULATE_OUTGOING_ARGS
2680 /* The bottom of the stack points to the actual arguments. If
2681 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2682 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2683 stack space for register parameters is not pushed by the caller, but
2684 rather part of the fixed stack areas and hence not included in
2685 `current_function_outgoing_args_size'. Nevertheless, we must allow
2686 for it when allocating stack dynamic objects. */
2688 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2689 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2690 (current_function_outgoing_args_size \
2691 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2694 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2695 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2699 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2703 /* On a few machines, the CFA coincides with the arg pointer. */
2705 #ifndef ARG_POINTER_CFA_OFFSET
2706 #define ARG_POINTER_CFA_OFFSET 0
2710 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2711 its address taken. DECL is the decl for the object stored in the
2712 register, for later use if we do need to force REG into the stack.
2713 REG is overwritten by the MEM like in put_reg_into_stack. */
2716 gen_mem_addressof (reg, decl)
2720 tree type = TREE_TYPE (decl);
2721 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2724 /* If the original REG was a user-variable, then so is the REG whose
2725 address is being taken. Likewise for unchanging. */
2726 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2727 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2729 PUT_CODE (reg, MEM);
2730 PUT_MODE (reg, DECL_MODE (decl));
2732 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2733 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2734 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2736 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2737 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2742 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2746 flush_addressof (decl)
2749 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2750 && DECL_RTL (decl) != 0
2751 && GET_CODE (DECL_RTL (decl)) == MEM
2752 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2753 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2754 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2758 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2761 put_addressof_into_stack (r, ht)
2763 struct hash_table *ht;
2765 tree decl = ADDRESSOF_DECL (r);
2766 rtx reg = XEXP (r, 0);
2768 if (GET_CODE (reg) != REG)
2771 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2772 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2773 ADDRESSOF_REGNO (r),
2774 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2777 /* List of replacements made below in purge_addressof_1 when creating
2778 bitfield insertions. */
2779 static rtx purge_bitfield_addressof_replacements;
2781 /* List of replacements made below in purge_addressof_1 for patterns
2782 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2783 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2784 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2785 enough in complex cases, e.g. when some field values can be
2786 extracted by usage MEM with narrower mode. */
2787 static rtx purge_addressof_replacements;
2789 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2790 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2791 the stack. If the function returns FALSE then the replacement could not
2795 purge_addressof_1 (loc, insn, force, store, ht)
2799 struct hash_table *ht;
2805 boolean result = true;
2807 /* Re-start here to avoid recursion in common cases. */
2814 code = GET_CODE (x);
2816 /* If we don't return in any of the cases below, we will recurse inside
2817 the RTX, which will normally result in any ADDRESSOF being forced into
2821 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2822 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2826 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2828 /* We must create a copy of the rtx because it was created by
2829 overwriting a REG rtx which is always shared. */
2830 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2833 if (validate_change (insn, loc, sub, 0)
2834 || validate_replace_rtx (x, sub, insn))
2838 sub = force_operand (sub, NULL_RTX);
2839 if (! validate_change (insn, loc, sub, 0)
2840 && ! validate_replace_rtx (x, sub, insn))
2843 insns = gen_sequence ();
2845 emit_insn_before (insns, insn);
2849 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2851 rtx sub = XEXP (XEXP (x, 0), 0);
2854 if (GET_CODE (sub) == MEM)
2856 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2857 MEM_COPY_ATTRIBUTES (sub2, sub);
2858 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2861 else if (GET_CODE (sub) == REG
2862 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2864 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2866 int size_x, size_sub;
2870 /* When processing REG_NOTES look at the list of
2871 replacements done on the insn to find the register that X
2875 for (tem = purge_bitfield_addressof_replacements;
2877 tem = XEXP (XEXP (tem, 1), 1))
2878 if (rtx_equal_p (x, XEXP (tem, 0)))
2880 *loc = XEXP (XEXP (tem, 1), 0);
2884 /* See comment for purge_addressof_replacements. */
2885 for (tem = purge_addressof_replacements;
2887 tem = XEXP (XEXP (tem, 1), 1))
2888 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2890 rtx z = XEXP (XEXP (tem, 1), 0);
2892 if (GET_MODE (x) == GET_MODE (z)
2893 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2894 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2897 /* It can happen that the note may speak of things
2898 in a wider (or just different) mode than the
2899 code did. This is especially true of
2902 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2905 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2906 && (GET_MODE_SIZE (GET_MODE (x))
2907 > GET_MODE_SIZE (GET_MODE (z))))
2909 /* This can occur as a result in invalid
2910 pointer casts, e.g. float f; ...
2911 *(long long int *)&f.
2912 ??? We could emit a warning here, but
2913 without a line number that wouldn't be
2915 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2918 z = gen_lowpart (GET_MODE (x), z);
2924 /* Sometimes we may not be able to find the replacement. For
2925 example when the original insn was a MEM in a wider mode,
2926 and the note is part of a sign extension of a narrowed
2927 version of that MEM. Gcc testcase compile/990829-1.c can
2928 generate an example of this siutation. Rather than complain
2929 we return false, which will prompt our caller to remove the
2934 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2935 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2937 /* Don't even consider working with paradoxical subregs,
2938 or the moral equivalent seen here. */
2939 if (size_x <= size_sub
2940 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2942 /* Do a bitfield insertion to mirror what would happen
2949 rtx p = PREV_INSN (insn);
2952 val = gen_reg_rtx (GET_MODE (x));
2953 if (! validate_change (insn, loc, val, 0))
2955 /* Discard the current sequence and put the
2956 ADDRESSOF on stack. */
2960 seq = gen_sequence ();
2962 emit_insn_before (seq, insn);
2963 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2967 store_bit_field (sub, size_x, 0, GET_MODE (x),
2968 val, GET_MODE_SIZE (GET_MODE (sub)),
2969 GET_MODE_SIZE (GET_MODE (sub)));
2971 /* Make sure to unshare any shared rtl that store_bit_field
2972 might have created. */
2973 for (p = get_insns(); p; p = NEXT_INSN (p))
2975 reset_used_flags (PATTERN (p));
2976 reset_used_flags (REG_NOTES (p));
2977 reset_used_flags (LOG_LINKS (p));
2979 unshare_all_rtl (get_insns ());
2981 seq = gen_sequence ();
2983 p = emit_insn_after (seq, insn);
2984 if (NEXT_INSN (insn))
2985 compute_insns_for_mem (NEXT_INSN (insn),
2986 p ? NEXT_INSN (p) : NULL_RTX,
2991 rtx p = PREV_INSN (insn);
2994 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2995 GET_MODE (x), GET_MODE (x),
2996 GET_MODE_SIZE (GET_MODE (sub)),
2997 GET_MODE_SIZE (GET_MODE (sub)));
2999 if (! validate_change (insn, loc, val, 0))
3001 /* Discard the current sequence and put the
3002 ADDRESSOF on stack. */
3007 seq = gen_sequence ();
3009 emit_insn_before (seq, insn);
3010 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3014 /* Remember the replacement so that the same one can be done
3015 on the REG_NOTES. */
3016 purge_bitfield_addressof_replacements
3017 = gen_rtx_EXPR_LIST (VOIDmode, x,
3020 purge_bitfield_addressof_replacements));
3022 /* We replaced with a reg -- all done. */
3027 else if (validate_change (insn, loc, sub, 0))
3029 /* Remember the replacement so that the same one can be done
3030 on the REG_NOTES. */
3031 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3035 for (tem = purge_addressof_replacements;
3037 tem = XEXP (XEXP (tem, 1), 1))
3038 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3040 XEXP (XEXP (tem, 1), 0) = sub;
3043 purge_addressof_replacements
3044 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3045 gen_rtx_EXPR_LIST (VOIDmode, sub,
3046 purge_addressof_replacements));
3052 /* else give up and put it into the stack */
3055 else if (code == ADDRESSOF)
3057 put_addressof_into_stack (x, ht);
3060 else if (code == SET)
3062 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3063 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3067 /* Scan all subexpressions. */
3068 fmt = GET_RTX_FORMAT (code);
3069 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3072 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3073 else if (*fmt == 'E')
3074 for (j = 0; j < XVECLEN (x, i); j++)
3075 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3081 /* Return a new hash table entry in HT. */
3083 static struct hash_entry *
3084 insns_for_mem_newfunc (he, ht, k)
3085 struct hash_entry *he;
3086 struct hash_table *ht;
3087 hash_table_key k ATTRIBUTE_UNUSED;
3089 struct insns_for_mem_entry *ifmhe;
3093 ifmhe = ((struct insns_for_mem_entry *)
3094 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3095 ifmhe->insns = NULL_RTX;
3100 /* Return a hash value for K, a REG. */
3102 static unsigned long
3103 insns_for_mem_hash (k)
3106 /* K is really a RTX. Just use the address as the hash value. */
3107 return (unsigned long) k;
3110 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3113 insns_for_mem_comp (k1, k2)
3120 struct insns_for_mem_walk_info {
3121 /* The hash table that we are using to record which INSNs use which
3123 struct hash_table *ht;
3125 /* The INSN we are currently proessing. */
3128 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3129 to find the insns that use the REGs in the ADDRESSOFs. */
3133 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3134 that might be used in an ADDRESSOF expression, record this INSN in
3135 the hash table given by DATA (which is really a pointer to an
3136 insns_for_mem_walk_info structure). */
3139 insns_for_mem_walk (r, data)
3143 struct insns_for_mem_walk_info *ifmwi
3144 = (struct insns_for_mem_walk_info *) data;
3146 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3147 && GET_CODE (XEXP (*r, 0)) == REG)
3148 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3149 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3151 /* Lookup this MEM in the hashtable, creating it if necessary. */
3152 struct insns_for_mem_entry *ifme
3153 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3158 /* If we have not already recorded this INSN, do so now. Since
3159 we process the INSNs in order, we know that if we have
3160 recorded it it must be at the front of the list. */
3161 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3163 /* We do the allocation on the same obstack as is used for
3164 the hash table since this memory will not be used once
3165 the hash table is deallocated. */
3166 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3167 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3176 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3177 which REGs in HT. */
3180 compute_insns_for_mem (insns, last_insn, ht)
3183 struct hash_table *ht;
3186 struct insns_for_mem_walk_info ifmwi;
3189 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3190 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3191 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3194 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3198 /* Helper function for purge_addressof called through for_each_rtx.
3199 Returns true iff the rtl is an ADDRESSOF. */
3201 is_addressof (rtl, data)
3203 void * data ATTRIBUTE_UNUSED;
3205 return GET_CODE (* rtl) == ADDRESSOF;
3208 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3209 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3213 purge_addressof (insns)
3217 struct hash_table ht;
3219 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3220 requires a fixup pass over the instruction stream to correct
3221 INSNs that depended on the REG being a REG, and not a MEM. But,
3222 these fixup passes are slow. Furthermore, more MEMs are not
3223 mentioned in very many instructions. So, we speed up the process
3224 by pre-calculating which REGs occur in which INSNs; that allows
3225 us to perform the fixup passes much more quickly. */
3226 hash_table_init (&ht,
3227 insns_for_mem_newfunc,
3229 insns_for_mem_comp);
3230 compute_insns_for_mem (insns, NULL_RTX, &ht);
3232 for (insn = insns; insn; insn = NEXT_INSN (insn))
3233 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3234 || GET_CODE (insn) == CALL_INSN)
3236 if (! purge_addressof_1 (&PATTERN (insn), insn,
3237 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3238 /* If we could not replace the ADDRESSOFs in the insn,
3239 something is wrong. */
3242 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
3244 /* If we could not replace the ADDRESSOFs in the insn's notes,
3245 we can just remove the offending notes instead. */
3248 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3250 /* If we find a REG_RETVAL note then the insn is a libcall.
3251 Such insns must have REG_EQUAL notes as well, in order
3252 for later passes of the compiler to work. So it is not
3253 safe to delete the notes here, and instead we abort. */
3254 if (REG_NOTE_KIND (note) == REG_RETVAL)
3256 if (for_each_rtx (& note, is_addressof, NULL))
3257 remove_note (insn, note);
3263 hash_table_free (&ht);
3264 purge_bitfield_addressof_replacements = 0;
3265 purge_addressof_replacements = 0;
3268 /* Pass through the INSNS of function FNDECL and convert virtual register
3269 references to hard register references. */
3272 instantiate_virtual_regs (fndecl, insns)
3279 /* Compute the offsets to use for this function. */
3280 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3281 var_offset = STARTING_FRAME_OFFSET;
3282 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3283 out_arg_offset = STACK_POINTER_OFFSET;
3284 cfa_offset = ARG_POINTER_CFA_OFFSET;
3286 /* Scan all variables and parameters of this function. For each that is
3287 in memory, instantiate all virtual registers if the result is a valid
3288 address. If not, we do it later. That will handle most uses of virtual
3289 regs on many machines. */
3290 instantiate_decls (fndecl, 1);
3292 /* Initialize recognition, indicating that volatile is OK. */
3295 /* Scan through all the insns, instantiating every virtual register still
3297 for (insn = insns; insn; insn = NEXT_INSN (insn))
3298 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3299 || GET_CODE (insn) == CALL_INSN)
3301 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3302 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3305 /* Instantiate the stack slots for the parm registers, for later use in
3306 addressof elimination. */
3307 for (i = 0; i < max_parm_reg; ++i)
3308 if (parm_reg_stack_loc[i])
3309 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3311 /* Now instantiate the remaining register equivalences for debugging info.
3312 These will not be valid addresses. */
3313 instantiate_decls (fndecl, 0);
3315 /* Indicate that, from now on, assign_stack_local should use
3316 frame_pointer_rtx. */
3317 virtuals_instantiated = 1;
3320 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3321 all virtual registers in their DECL_RTL's.
3323 If VALID_ONLY, do this only if the resulting address is still valid.
3324 Otherwise, always do it. */
3327 instantiate_decls (fndecl, valid_only)
3333 if (DECL_SAVED_INSNS (fndecl))
3334 /* When compiling an inline function, the obstack used for
3335 rtl allocation is the maybepermanent_obstack. Calling
3336 `resume_temporary_allocation' switches us back to that
3337 obstack while we process this function's parameters. */
3338 resume_temporary_allocation ();
3340 /* Process all parameters of the function. */
3341 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3343 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3345 instantiate_decl (DECL_RTL (decl), size, valid_only);
3347 /* If the parameter was promoted, then the incoming RTL mode may be
3348 larger than the declared type size. We must use the larger of
3350 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3351 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3354 /* Now process all variables defined in the function or its subblocks. */
3355 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3357 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3359 /* Save all rtl allocated for this function by raising the
3360 high-water mark on the maybepermanent_obstack. */
3362 /* All further rtl allocation is now done in the current_obstack. */
3363 rtl_in_current_obstack ();
3367 /* Subroutine of instantiate_decls: Process all decls in the given
3368 BLOCK node and all its subblocks. */
3371 instantiate_decls_1 (let, valid_only)
3377 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3378 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3381 /* Process all subblocks. */
3382 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3383 instantiate_decls_1 (t, valid_only);
3386 /* Subroutine of the preceding procedures: Given RTL representing a
3387 decl and the size of the object, do any instantiation required.
3389 If VALID_ONLY is non-zero, it means that the RTL should only be
3390 changed if the new address is valid. */
3393 instantiate_decl (x, size, valid_only)
3398 enum machine_mode mode;
3401 /* If this is not a MEM, no need to do anything. Similarly if the
3402 address is a constant or a register that is not a virtual register. */
3404 if (x == 0 || GET_CODE (x) != MEM)
3408 if (CONSTANT_P (addr)
3409 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3410 || (GET_CODE (addr) == REG
3411 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3412 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3415 /* If we should only do this if the address is valid, copy the address.
3416 We need to do this so we can undo any changes that might make the
3417 address invalid. This copy is unfortunate, but probably can't be
3421 addr = copy_rtx (addr);
3423 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3427 /* Now verify that the resulting address is valid for every integer or
3428 floating-point mode up to and including SIZE bytes long. We do this
3429 since the object might be accessed in any mode and frame addresses
3432 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3433 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3434 mode = GET_MODE_WIDER_MODE (mode))
3435 if (! memory_address_p (mode, addr))
3438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3439 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3440 mode = GET_MODE_WIDER_MODE (mode))
3441 if (! memory_address_p (mode, addr))
3445 /* Put back the address now that we have updated it and we either know
3446 it is valid or we don't care whether it is valid. */
3451 /* Given a pointer to a piece of rtx and an optional pointer to the
3452 containing object, instantiate any virtual registers present in it.
3454 If EXTRA_INSNS, we always do the replacement and generate
3455 any extra insns before OBJECT. If it zero, we do nothing if replacement
3458 Return 1 if we either had nothing to do or if we were able to do the
3459 needed replacement. Return 0 otherwise; we only return zero if
3460 EXTRA_INSNS is zero.
3462 We first try some simple transformations to avoid the creation of extra
3466 instantiate_virtual_regs_1 (loc, object, extra_insns)
3474 HOST_WIDE_INT offset = 0;
3480 /* Re-start here to avoid recursion in common cases. */
3487 code = GET_CODE (x);
3489 /* Check for some special cases. */
3506 /* We are allowed to set the virtual registers. This means that
3507 the actual register should receive the source minus the
3508 appropriate offset. This is used, for example, in the handling
3509 of non-local gotos. */
3510 if (SET_DEST (x) == virtual_incoming_args_rtx)
3511 new = arg_pointer_rtx, offset = - in_arg_offset;
3512 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3513 new = frame_pointer_rtx, offset = - var_offset;
3514 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3515 new = stack_pointer_rtx, offset = - dynamic_offset;
3516 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3517 new = stack_pointer_rtx, offset = - out_arg_offset;
3518 else if (SET_DEST (x) == virtual_cfa_rtx)
3519 new = arg_pointer_rtx, offset = - cfa_offset;
3523 rtx src = SET_SRC (x);
3525 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3527 /* The only valid sources here are PLUS or REG. Just do
3528 the simplest possible thing to handle them. */
3529 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3533 if (GET_CODE (src) != REG)
3534 temp = force_operand (src, NULL_RTX);
3537 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3541 emit_insns_before (seq, object);
3544 if (! validate_change (object, &SET_SRC (x), temp, 0)
3551 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3556 /* Handle special case of virtual register plus constant. */
3557 if (CONSTANT_P (XEXP (x, 1)))
3559 rtx old, new_offset;
3561 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3562 if (GET_CODE (XEXP (x, 0)) == PLUS)
3564 rtx inner = XEXP (XEXP (x, 0), 0);
3566 if (inner == virtual_incoming_args_rtx)
3567 new = arg_pointer_rtx, offset = in_arg_offset;
3568 else if (inner == virtual_stack_vars_rtx)
3569 new = frame_pointer_rtx, offset = var_offset;
3570 else if (inner == virtual_stack_dynamic_rtx)
3571 new = stack_pointer_rtx, offset = dynamic_offset;
3572 else if (inner == virtual_outgoing_args_rtx)
3573 new = stack_pointer_rtx, offset = out_arg_offset;
3574 else if (inner == virtual_cfa_rtx)
3575 new = arg_pointer_rtx, offset = cfa_offset;
3582 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3584 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3587 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3588 new = arg_pointer_rtx, offset = in_arg_offset;
3589 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3590 new = frame_pointer_rtx, offset = var_offset;
3591 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3592 new = stack_pointer_rtx, offset = dynamic_offset;
3593 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3594 new = stack_pointer_rtx, offset = out_arg_offset;
3595 else if (XEXP (x, 0) == virtual_cfa_rtx)
3596 new = arg_pointer_rtx, offset = cfa_offset;
3599 /* We know the second operand is a constant. Unless the
3600 first operand is a REG (which has been already checked),
3601 it needs to be checked. */
3602 if (GET_CODE (XEXP (x, 0)) != REG)
3610 new_offset = plus_constant (XEXP (x, 1), offset);
3612 /* If the new constant is zero, try to replace the sum with just
3614 if (new_offset == const0_rtx
3615 && validate_change (object, loc, new, 0))
3618 /* Next try to replace the register and new offset.
3619 There are two changes to validate here and we can't assume that
3620 in the case of old offset equals new just changing the register
3621 will yield a valid insn. In the interests of a little efficiency,
3622 however, we only call validate change once (we don't queue up the
3623 changes and then call apply_change_group). */
3627 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3628 : (XEXP (x, 0) = new,
3629 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3637 /* Otherwise copy the new constant into a register and replace
3638 constant with that register. */
3639 temp = gen_reg_rtx (Pmode);
3641 if (validate_change (object, &XEXP (x, 1), temp, 0))
3642 emit_insn_before (gen_move_insn (temp, new_offset), object);
3645 /* If that didn't work, replace this expression with a
3646 register containing the sum. */
3649 new = gen_rtx_PLUS (Pmode, new, new_offset);
3652 temp = force_operand (new, NULL_RTX);
3656 emit_insns_before (seq, object);
3657 if (! validate_change (object, loc, temp, 0)
3658 && ! validate_replace_rtx (x, temp, object))
3666 /* Fall through to generic two-operand expression case. */
3672 case DIV: case UDIV:
3673 case MOD: case UMOD:
3674 case AND: case IOR: case XOR:
3675 case ROTATERT: case ROTATE:
3676 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3678 case GE: case GT: case GEU: case GTU:
3679 case LE: case LT: case LEU: case LTU:
3680 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3681 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3686 /* Most cases of MEM that convert to valid addresses have already been
3687 handled by our scan of decls. The only special handling we
3688 need here is to make a copy of the rtx to ensure it isn't being
3689 shared if we have to change it to a pseudo.
3691 If the rtx is a simple reference to an address via a virtual register,
3692 it can potentially be shared. In such cases, first try to make it
3693 a valid address, which can also be shared. Otherwise, copy it and
3696 First check for common cases that need no processing. These are
3697 usually due to instantiation already being done on a previous instance
3701 if (CONSTANT_ADDRESS_P (temp)
3702 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3703 || temp == arg_pointer_rtx
3705 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3706 || temp == hard_frame_pointer_rtx
3708 || temp == frame_pointer_rtx)
3711 if (GET_CODE (temp) == PLUS
3712 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3713 && (XEXP (temp, 0) == frame_pointer_rtx
3714 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3715 || XEXP (temp, 0) == hard_frame_pointer_rtx
3717 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3718 || XEXP (temp, 0) == arg_pointer_rtx
3723 if (temp == virtual_stack_vars_rtx
3724 || temp == virtual_incoming_args_rtx
3725 || (GET_CODE (temp) == PLUS
3726 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3727 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3728 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3730 /* This MEM may be shared. If the substitution can be done without
3731 the need to generate new pseudos, we want to do it in place
3732 so all copies of the shared rtx benefit. The call below will
3733 only make substitutions if the resulting address is still
3736 Note that we cannot pass X as the object in the recursive call
3737 since the insn being processed may not allow all valid
3738 addresses. However, if we were not passed on object, we can
3739 only modify X without copying it if X will have a valid
3742 ??? Also note that this can still lose if OBJECT is an insn that
3743 has less restrictions on an address that some other insn.
3744 In that case, we will modify the shared address. This case
3745 doesn't seem very likely, though. One case where this could
3746 happen is in the case of a USE or CLOBBER reference, but we
3747 take care of that below. */
3749 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3750 object ? object : x, 0))
3753 /* Otherwise make a copy and process that copy. We copy the entire
3754 RTL expression since it might be a PLUS which could also be
3756 *loc = x = copy_rtx (x);
3759 /* Fall through to generic unary operation case. */
3761 case STRICT_LOW_PART:
3763 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3764 case SIGN_EXTEND: case ZERO_EXTEND:
3765 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3766 case FLOAT: case FIX:
3767 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3771 /* These case either have just one operand or we know that we need not
3772 check the rest of the operands. */
3778 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3779 go ahead and make the invalid one, but do it to a copy. For a REG,
3780 just make the recursive call, since there's no chance of a problem. */
3782 if ((GET_CODE (XEXP (x, 0)) == MEM
3783 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3785 || (GET_CODE (XEXP (x, 0)) == REG
3786 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3789 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3794 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3795 in front of this insn and substitute the temporary. */
3796 if (x == virtual_incoming_args_rtx)
3797 new = arg_pointer_rtx, offset = in_arg_offset;
3798 else if (x == virtual_stack_vars_rtx)
3799 new = frame_pointer_rtx, offset = var_offset;
3800 else if (x == virtual_stack_dynamic_rtx)
3801 new = stack_pointer_rtx, offset = dynamic_offset;
3802 else if (x == virtual_outgoing_args_rtx)
3803 new = stack_pointer_rtx, offset = out_arg_offset;
3804 else if (x == virtual_cfa_rtx)
3805 new = arg_pointer_rtx, offset = cfa_offset;
3809 temp = plus_constant (new, offset);
3810 if (!validate_change (object, loc, temp, 0))
3816 temp = force_operand (temp, NULL_RTX);
3820 emit_insns_before (seq, object);
3821 if (! validate_change (object, loc, temp, 0)
3822 && ! validate_replace_rtx (x, temp, object))
3830 if (GET_CODE (XEXP (x, 0)) == REG)
3833 else if (GET_CODE (XEXP (x, 0)) == MEM)
3835 /* If we have a (addressof (mem ..)), do any instantiation inside
3836 since we know we'll be making the inside valid when we finally
3837 remove the ADDRESSOF. */
3838 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3847 /* Scan all subexpressions. */
3848 fmt = GET_RTX_FORMAT (code);
3849 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3852 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3855 else if (*fmt == 'E')
3856 for (j = 0; j < XVECLEN (x, i); j++)
3857 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3864 /* Optimization: assuming this function does not receive nonlocal gotos,
3865 delete the handlers for such, as well as the insns to establish
3866 and disestablish them. */
3872 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3874 /* Delete the handler by turning off the flag that would
3875 prevent jump_optimize from deleting it.
3876 Also permit deletion of the nonlocal labels themselves
3877 if nothing local refers to them. */
3878 if (GET_CODE (insn) == CODE_LABEL)
3882 LABEL_PRESERVE_P (insn) = 0;
3884 /* Remove it from the nonlocal_label list, to avoid confusing
3886 for (t = nonlocal_labels, last_t = 0; t;
3887 last_t = t, t = TREE_CHAIN (t))
3888 if (DECL_RTL (TREE_VALUE (t)) == insn)
3893 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3895 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3898 if (GET_CODE (insn) == INSN)
3902 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3903 if (reg_mentioned_p (t, PATTERN (insn)))
3909 || (nonlocal_goto_stack_level != 0
3910 && reg_mentioned_p (nonlocal_goto_stack_level,
3917 /* Output a USE for any register use in RTL.
3918 This is used with -noreg to mark the extent of lifespan
3919 of any registers used in a user-visible variable's DECL_RTL. */
3925 if (GET_CODE (rtl) == REG)
3926 /* This is a register variable. */
3927 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3928 else if (GET_CODE (rtl) == MEM
3929 && GET_CODE (XEXP (rtl, 0)) == REG
3930 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3931 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3932 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3933 /* This is a variable-sized structure. */
3934 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3937 /* Like use_variable except that it outputs the USEs after INSN
3938 instead of at the end of the insn-chain. */
3941 use_variable_after (rtl, insn)
3944 if (GET_CODE (rtl) == REG)
3945 /* This is a register variable. */
3946 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3947 else if (GET_CODE (rtl) == MEM
3948 && GET_CODE (XEXP (rtl, 0)) == REG
3949 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3950 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3951 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3952 /* This is a variable-sized structure. */
3953 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3959 return max_parm_reg;
3962 /* Return the first insn following those generated by `assign_parms'. */
3965 get_first_nonparm_insn ()
3968 return NEXT_INSN (last_parm_insn);
3969 return get_insns ();
3972 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3973 Crash if there is none. */
3976 get_first_block_beg ()
3978 register rtx searcher;
3979 register rtx insn = get_first_nonparm_insn ();
3981 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3982 if (GET_CODE (searcher) == NOTE
3983 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3986 abort (); /* Invalid call to this function. (See comments above.) */
3990 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3991 This means a type for which function calls must pass an address to the
3992 function or get an address back from the function.
3993 EXP may be a type node or an expression (whose type is tested). */
3996 aggregate_value_p (exp)
3999 int i, regno, nregs;
4002 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4005 type = TREE_TYPE (exp);
4007 if (RETURN_IN_MEMORY (type))
4009 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4010 and thus can't be returned in registers. */
4011 if (TREE_ADDRESSABLE (type))
4013 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4015 /* Make sure we have suitable call-clobbered regs to return
4016 the value in; if not, we must return it in memory. */
4017 reg = hard_function_value (type, 0, 0);
4019 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4021 if (GET_CODE (reg) != REG)
4024 regno = REGNO (reg);
4025 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4026 for (i = 0; i < nregs; i++)
4027 if (! call_used_regs[regno + i])
4032 /* Assign RTL expressions to the function's parameters.
4033 This may involve copying them into registers and using
4034 those registers as the RTL for them. */
4037 assign_parms (fndecl)
4041 register rtx entry_parm = 0;
4042 register rtx stack_parm = 0;
4043 CUMULATIVE_ARGS args_so_far;
4044 enum machine_mode promoted_mode, passed_mode;
4045 enum machine_mode nominal_mode, promoted_nominal_mode;
4047 /* Total space needed so far for args on the stack,
4048 given as a constant and a tree-expression. */
4049 struct args_size stack_args_size;
4050 tree fntype = TREE_TYPE (fndecl);
4051 tree fnargs = DECL_ARGUMENTS (fndecl);
4052 /* This is used for the arg pointer when referring to stack args. */
4053 rtx internal_arg_pointer;
4054 /* This is a dummy PARM_DECL that we used for the function result if
4055 the function returns a structure. */
4056 tree function_result_decl = 0;
4057 #ifdef SETUP_INCOMING_VARARGS
4058 int varargs_setup = 0;
4060 rtx conversion_insns = 0;
4061 struct args_size alignment_pad;
4063 /* Nonzero if the last arg is named `__builtin_va_alist',
4064 which is used on some machines for old-fashioned non-ANSI varargs.h;
4065 this should be stuck onto the stack as if it had arrived there. */
4067 = (current_function_varargs
4069 && (parm = tree_last (fnargs)) != 0
4071 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4072 "__builtin_va_alist")));
4074 /* Nonzero if function takes extra anonymous args.
4075 This means the last named arg must be on the stack
4076 right before the anonymous ones. */
4078 = (TYPE_ARG_TYPES (fntype) != 0
4079 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4080 != void_type_node));
4082 current_function_stdarg = stdarg;
4084 /* If the reg that the virtual arg pointer will be translated into is
4085 not a fixed reg or is the stack pointer, make a copy of the virtual
4086 arg pointer, and address parms via the copy. The frame pointer is
4087 considered fixed even though it is not marked as such.
4089 The second time through, simply use ap to avoid generating rtx. */
4091 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4092 || ! (fixed_regs[ARG_POINTER_REGNUM]
4093 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4094 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4096 internal_arg_pointer = virtual_incoming_args_rtx;
4097 current_function_internal_arg_pointer = internal_arg_pointer;
4099 stack_args_size.constant = 0;
4100 stack_args_size.var = 0;
4102 /* If struct value address is treated as the first argument, make it so. */
4103 if (aggregate_value_p (DECL_RESULT (fndecl))
4104 && ! current_function_returns_pcc_struct
4105 && struct_value_incoming_rtx == 0)
4107 tree type = build_pointer_type (TREE_TYPE (fntype));
4109 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4111 DECL_ARG_TYPE (function_result_decl) = type;
4112 TREE_CHAIN (function_result_decl) = fnargs;
4113 fnargs = function_result_decl;
4116 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4117 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4119 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4120 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4122 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4125 /* We haven't yet found an argument that we must push and pretend the
4127 current_function_pretend_args_size = 0;
4129 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4131 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4132 struct args_size stack_offset;
4133 struct args_size arg_size;
4134 int passed_pointer = 0;
4135 int did_conversion = 0;
4136 tree passed_type = DECL_ARG_TYPE (parm);
4137 tree nominal_type = TREE_TYPE (parm);
4140 /* Set LAST_NAMED if this is last named arg before some
4142 int last_named = ((TREE_CHAIN (parm) == 0
4143 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4144 && (stdarg || current_function_varargs));
4145 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4146 most machines, if this is a varargs/stdarg function, then we treat
4147 the last named arg as if it were anonymous too. */
4148 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4150 if (TREE_TYPE (parm) == error_mark_node
4151 /* This can happen after weird syntax errors
4152 or if an enum type is defined among the parms. */
4153 || TREE_CODE (parm) != PARM_DECL
4154 || passed_type == NULL)
4156 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4157 = gen_rtx_MEM (BLKmode, const0_rtx);
4158 TREE_USED (parm) = 1;
4162 /* For varargs.h function, save info about regs and stack space
4163 used by the individual args, not including the va_alist arg. */
4164 if (hide_last_arg && last_named)
4165 current_function_args_info = args_so_far;
4167 /* Find mode of arg as it is passed, and mode of arg
4168 as it should be during execution of this function. */
4169 passed_mode = TYPE_MODE (passed_type);
4170 nominal_mode = TYPE_MODE (nominal_type);
4172 /* If the parm's mode is VOID, its value doesn't matter,
4173 and avoid the usual things like emit_move_insn that could crash. */
4174 if (nominal_mode == VOIDmode)
4176 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4180 /* If the parm is to be passed as a transparent union, use the
4181 type of the first field for the tests below. We have already
4182 verified that the modes are the same. */
4183 if (DECL_TRANSPARENT_UNION (parm)
4184 || TYPE_TRANSPARENT_UNION (passed_type))
4185 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4187 /* See if this arg was passed by invisible reference. It is if
4188 it is an object whose size depends on the contents of the
4189 object itself or if the machine requires these objects be passed
4192 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4193 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4194 || TREE_ADDRESSABLE (passed_type)
4195 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4196 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4197 passed_type, named_arg)
4201 passed_type = nominal_type = build_pointer_type (passed_type);
4203 passed_mode = nominal_mode = Pmode;
4206 promoted_mode = passed_mode;
4208 #ifdef PROMOTE_FUNCTION_ARGS
4209 /* Compute the mode in which the arg is actually extended to. */
4210 unsignedp = TREE_UNSIGNED (passed_type);
4211 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4214 /* Let machine desc say which reg (if any) the parm arrives in.
4215 0 means it arrives on the stack. */
4216 #ifdef FUNCTION_INCOMING_ARG
4217 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4218 passed_type, named_arg);
4220 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4221 passed_type, named_arg);
4224 if (entry_parm == 0)
4225 promoted_mode = passed_mode;
4227 #ifdef SETUP_INCOMING_VARARGS
4228 /* If this is the last named parameter, do any required setup for
4229 varargs or stdargs. We need to know about the case of this being an
4230 addressable type, in which case we skip the registers it
4231 would have arrived in.
4233 For stdargs, LAST_NAMED will be set for two parameters, the one that
4234 is actually the last named, and the dummy parameter. We only
4235 want to do this action once.
4237 Also, indicate when RTL generation is to be suppressed. */
4238 if (last_named && !varargs_setup)
4240 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4241 current_function_pretend_args_size, 0);
4246 /* Determine parm's home in the stack,
4247 in case it arrives in the stack or we should pretend it did.
4249 Compute the stack position and rtx where the argument arrives
4252 There is one complexity here: If this was a parameter that would
4253 have been passed in registers, but wasn't only because it is
4254 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4255 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4256 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4257 0 as it was the previous time. */
4259 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4260 locate_and_pad_parm (promoted_mode, passed_type,
4261 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4264 #ifdef FUNCTION_INCOMING_ARG
4265 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4267 pretend_named) != 0,
4269 FUNCTION_ARG (args_so_far, promoted_mode,
4271 pretend_named) != 0,
4274 fndecl, &stack_args_size, &stack_offset, &arg_size,
4278 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4280 if (offset_rtx == const0_rtx)
4281 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4283 stack_parm = gen_rtx_MEM (promoted_mode,
4284 gen_rtx_PLUS (Pmode,
4285 internal_arg_pointer,
4288 /* If this is a memory ref that contains aggregate components,
4289 mark it as such for cse and loop optimize. Likewise if it
4291 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4292 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4293 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4296 /* If this parameter was passed both in registers and in the stack,
4297 use the copy on the stack. */
4298 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4301 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4302 /* If this parm was passed part in regs and part in memory,
4303 pretend it arrived entirely in memory
4304 by pushing the register-part onto the stack.
4306 In the special case of a DImode or DFmode that is split,
4307 we could put it together in a pseudoreg directly,
4308 but for now that's not worth bothering with. */
4312 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4313 passed_type, named_arg);
4317 current_function_pretend_args_size
4318 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4319 / (PARM_BOUNDARY / BITS_PER_UNIT)
4320 * (PARM_BOUNDARY / BITS_PER_UNIT));
4322 /* Handle calls that pass values in multiple non-contiguous
4323 locations. The Irix 6 ABI has examples of this. */
4324 if (GET_CODE (entry_parm) == PARALLEL)
4325 emit_group_store (validize_mem (stack_parm), entry_parm,
4326 int_size_in_bytes (TREE_TYPE (parm)),
4327 (TYPE_ALIGN (TREE_TYPE (parm))
4330 move_block_from_reg (REGNO (entry_parm),
4331 validize_mem (stack_parm), nregs,
4332 int_size_in_bytes (TREE_TYPE (parm)));
4334 entry_parm = stack_parm;
4339 /* If we didn't decide this parm came in a register,
4340 by default it came on the stack. */
4341 if (entry_parm == 0)
4342 entry_parm = stack_parm;
4344 /* Record permanently how this parm was passed. */
4345 DECL_INCOMING_RTL (parm) = entry_parm;
4347 /* If there is actually space on the stack for this parm,
4348 count it in stack_args_size; otherwise set stack_parm to 0
4349 to indicate there is no preallocated stack slot for the parm. */
4351 if (entry_parm == stack_parm
4352 || (GET_CODE (entry_parm) == PARALLEL
4353 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4354 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4355 /* On some machines, even if a parm value arrives in a register
4356 there is still an (uninitialized) stack slot allocated for it.
4358 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4359 whether this parameter already has a stack slot allocated,
4360 because an arg block exists only if current_function_args_size
4361 is larger than some threshold, and we haven't calculated that
4362 yet. So, for now, we just assume that stack slots never exist
4364 || REG_PARM_STACK_SPACE (fndecl) > 0
4368 stack_args_size.constant += arg_size.constant;
4370 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4373 /* No stack slot was pushed for this parm. */
4376 /* Update info on where next arg arrives in registers. */
4378 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4379 passed_type, named_arg);
4381 /* If we can't trust the parm stack slot to be aligned enough
4382 for its ultimate type, don't use that slot after entry.
4383 We'll make another stack slot, if we need one. */
4385 int thisparm_boundary
4386 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4388 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4392 /* If parm was passed in memory, and we need to convert it on entry,
4393 don't store it back in that same slot. */
4395 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4399 /* Now adjust STACK_PARM to the mode and precise location
4400 where this parameter should live during execution,
4401 if we discover that it must live in the stack during execution.
4402 To make debuggers happier on big-endian machines, we store
4403 the value in the last bytes of the space available. */
4405 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4410 if (BYTES_BIG_ENDIAN
4411 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4412 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4413 - GET_MODE_SIZE (nominal_mode));
4415 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4416 if (offset_rtx == const0_rtx)
4417 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4419 stack_parm = gen_rtx_MEM (nominal_mode,
4420 gen_rtx_PLUS (Pmode,
4421 internal_arg_pointer,
4424 /* If this is a memory ref that contains aggregate components,
4425 mark it as such for cse and loop optimize. */
4426 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4430 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4431 in the mode in which it arrives.
4432 STACK_PARM is an RTX for a stack slot where the parameter can live
4433 during the function (in case we want to put it there).
4434 STACK_PARM is 0 if no stack slot was pushed for it.
4436 Now output code if necessary to convert ENTRY_PARM to
4437 the type in which this function declares it,
4438 and store that result in an appropriate place,
4439 which may be a pseudo reg, may be STACK_PARM,
4440 or may be a local stack slot if STACK_PARM is 0.
4442 Set DECL_RTL to that place. */
4444 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4446 /* If a BLKmode arrives in registers, copy it to a stack slot.
4447 Handle calls that pass values in multiple non-contiguous
4448 locations. The Irix 6 ABI has examples of this. */
4449 if (GET_CODE (entry_parm) == REG
4450 || GET_CODE (entry_parm) == PARALLEL)
4453 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4456 /* Note that we will be storing an integral number of words.
4457 So we have to be careful to ensure that we allocate an
4458 integral number of words. We do this below in the
4459 assign_stack_local if space was not allocated in the argument
4460 list. If it was, this will not work if PARM_BOUNDARY is not
4461 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4462 if it becomes a problem. */
4464 if (stack_parm == 0)
4467 = assign_stack_local (GET_MODE (entry_parm),
4470 /* If this is a memory ref that contains aggregate
4471 components, mark it as such for cse and loop optimize. */
4472 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4475 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4478 if (TREE_READONLY (parm))
4479 RTX_UNCHANGING_P (stack_parm) = 1;
4481 /* Handle calls that pass values in multiple non-contiguous
4482 locations. The Irix 6 ABI has examples of this. */
4483 if (GET_CODE (entry_parm) == PARALLEL)
4484 emit_group_store (validize_mem (stack_parm), entry_parm,
4485 int_size_in_bytes (TREE_TYPE (parm)),
4486 (TYPE_ALIGN (TREE_TYPE (parm))
4489 move_block_from_reg (REGNO (entry_parm),
4490 validize_mem (stack_parm),
4491 size_stored / UNITS_PER_WORD,
4492 int_size_in_bytes (TREE_TYPE (parm)));
4494 DECL_RTL (parm) = stack_parm;
4496 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4497 && ! DECL_INLINE (fndecl))
4498 /* layout_decl may set this. */
4499 || TREE_ADDRESSABLE (parm)
4500 || TREE_SIDE_EFFECTS (parm)
4501 /* If -ffloat-store specified, don't put explicit
4502 float variables into registers. */
4503 || (flag_float_store
4504 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4505 /* Always assign pseudo to structure return or item passed
4506 by invisible reference. */
4507 || passed_pointer || parm == function_result_decl)
4509 /* Store the parm in a pseudoregister during the function, but we
4510 may need to do it in a wider mode. */
4512 register rtx parmreg;
4513 int regno, regnoi = 0, regnor = 0;
4515 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4517 promoted_nominal_mode
4518 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4520 parmreg = gen_reg_rtx (promoted_nominal_mode);
4521 mark_user_reg (parmreg);
4523 /* If this was an item that we received a pointer to, set DECL_RTL
4528 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4529 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4532 DECL_RTL (parm) = parmreg;
4534 /* Copy the value into the register. */
4535 if (nominal_mode != passed_mode
4536 || promoted_nominal_mode != promoted_mode)
4539 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4540 mode, by the caller. We now have to convert it to
4541 NOMINAL_MODE, if different. However, PARMREG may be in
4542 a different mode than NOMINAL_MODE if it is being stored
4545 If ENTRY_PARM is a hard register, it might be in a register
4546 not valid for operating in its mode (e.g., an odd-numbered
4547 register for a DFmode). In that case, moves are the only
4548 thing valid, so we can't do a convert from there. This
4549 occurs when the calling sequence allow such misaligned
4552 In addition, the conversion may involve a call, which could
4553 clobber parameters which haven't been copied to pseudo
4554 registers yet. Therefore, we must first copy the parm to
4555 a pseudo reg here, and save the conversion until after all
4556 parameters have been moved. */
4558 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4560 emit_move_insn (tempreg, validize_mem (entry_parm));
4562 push_to_sequence (conversion_insns);
4563 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4565 /* TREE_USED gets set erroneously during expand_assignment. */
4566 save_tree_used = TREE_USED (parm);
4567 expand_assignment (parm,
4568 make_tree (nominal_type, tempreg), 0, 0);
4569 TREE_USED (parm) = save_tree_used;
4570 conversion_insns = get_insns ();
4575 emit_move_insn (parmreg, validize_mem (entry_parm));
4577 /* If we were passed a pointer but the actual value
4578 can safely live in a register, put it in one. */
4579 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4580 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4581 && ! DECL_INLINE (fndecl))
4582 /* layout_decl may set this. */
4583 || TREE_ADDRESSABLE (parm)
4584 || TREE_SIDE_EFFECTS (parm)
4585 /* If -ffloat-store specified, don't put explicit
4586 float variables into registers. */
4587 || (flag_float_store
4588 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4590 /* We can't use nominal_mode, because it will have been set to
4591 Pmode above. We must use the actual mode of the parm. */
4592 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4593 mark_user_reg (parmreg);
4594 emit_move_insn (parmreg, DECL_RTL (parm));
4595 DECL_RTL (parm) = parmreg;
4596 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4600 #ifdef FUNCTION_ARG_CALLEE_COPIES
4601 /* If we are passed an arg by reference and it is our responsibility
4602 to make a copy, do it now.
4603 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4604 original argument, so we must recreate them in the call to
4605 FUNCTION_ARG_CALLEE_COPIES. */
4606 /* ??? Later add code to handle the case that if the argument isn't
4607 modified, don't do the copy. */
4609 else if (passed_pointer
4610 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4611 TYPE_MODE (DECL_ARG_TYPE (parm)),
4612 DECL_ARG_TYPE (parm),
4614 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4617 tree type = DECL_ARG_TYPE (parm);
4619 /* This sequence may involve a library call perhaps clobbering
4620 registers that haven't been copied to pseudos yet. */
4622 push_to_sequence (conversion_insns);
4624 if (TYPE_SIZE (type) == 0
4625 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4626 /* This is a variable sized object. */
4627 copy = gen_rtx_MEM (BLKmode,
4628 allocate_dynamic_stack_space
4629 (expr_size (parm), NULL_RTX,
4630 TYPE_ALIGN (type)));
4632 copy = assign_stack_temp (TYPE_MODE (type),
4633 int_size_in_bytes (type), 1);
4634 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4635 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4637 store_expr (parm, copy, 0);
4638 emit_move_insn (parmreg, XEXP (copy, 0));
4639 if (current_function_check_memory_usage)
4640 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4641 XEXP (copy, 0), Pmode,
4642 GEN_INT (int_size_in_bytes (type)),
4643 TYPE_MODE (sizetype),
4644 GEN_INT (MEMORY_USE_RW),
4645 TYPE_MODE (integer_type_node));
4646 conversion_insns = get_insns ();
4650 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4652 /* In any case, record the parm's desired stack location
4653 in case we later discover it must live in the stack.
4655 If it is a COMPLEX value, store the stack location for both
4658 if (GET_CODE (parmreg) == CONCAT)
4659 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4661 regno = REGNO (parmreg);
4663 if (regno >= max_parm_reg)
4666 int old_max_parm_reg = max_parm_reg;
4668 /* It's slow to expand this one register at a time,
4669 but it's also rare and we need max_parm_reg to be
4670 precisely correct. */
4671 max_parm_reg = regno + 1;
4672 new = (rtx *) xrealloc (parm_reg_stack_loc,
4673 max_parm_reg * sizeof (rtx));
4674 bzero ((char *) (new + old_max_parm_reg),
4675 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4676 parm_reg_stack_loc = new;
4679 if (GET_CODE (parmreg) == CONCAT)
4681 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4683 regnor = REGNO (gen_realpart (submode, parmreg));
4684 regnoi = REGNO (gen_imagpart (submode, parmreg));
4686 if (stack_parm != 0)
4688 parm_reg_stack_loc[regnor]
4689 = gen_realpart (submode, stack_parm);
4690 parm_reg_stack_loc[regnoi]
4691 = gen_imagpart (submode, stack_parm);
4695 parm_reg_stack_loc[regnor] = 0;
4696 parm_reg_stack_loc[regnoi] = 0;
4700 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4702 /* Mark the register as eliminable if we did no conversion
4703 and it was copied from memory at a fixed offset,
4704 and the arg pointer was not copied to a pseudo-reg.
4705 If the arg pointer is a pseudo reg or the offset formed
4706 an invalid address, such memory-equivalences
4707 as we make here would screw up life analysis for it. */
4708 if (nominal_mode == passed_mode
4711 && GET_CODE (stack_parm) == MEM
4712 && stack_offset.var == 0
4713 && reg_mentioned_p (virtual_incoming_args_rtx,
4714 XEXP (stack_parm, 0)))
4716 rtx linsn = get_last_insn ();
4719 /* Mark complex types separately. */
4720 if (GET_CODE (parmreg) == CONCAT)
4721 /* Scan backwards for the set of the real and
4723 for (sinsn = linsn; sinsn != 0;
4724 sinsn = prev_nonnote_insn (sinsn))
4726 set = single_set (sinsn);
4728 && SET_DEST (set) == regno_reg_rtx [regnoi])
4730 = gen_rtx_EXPR_LIST (REG_EQUIV,
4731 parm_reg_stack_loc[regnoi],
4734 && SET_DEST (set) == regno_reg_rtx [regnor])
4736 = gen_rtx_EXPR_LIST (REG_EQUIV,
4737 parm_reg_stack_loc[regnor],
4740 else if ((set = single_set (linsn)) != 0
4741 && SET_DEST (set) == parmreg)
4743 = gen_rtx_EXPR_LIST (REG_EQUIV,
4744 stack_parm, REG_NOTES (linsn));
4747 /* For pointer data type, suggest pointer register. */
4748 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4749 mark_reg_pointer (parmreg,
4750 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4755 /* Value must be stored in the stack slot STACK_PARM
4756 during function execution. */
4758 if (promoted_mode != nominal_mode)
4760 /* Conversion is required. */
4761 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4763 emit_move_insn (tempreg, validize_mem (entry_parm));
4765 push_to_sequence (conversion_insns);
4766 entry_parm = convert_to_mode (nominal_mode, tempreg,
4767 TREE_UNSIGNED (TREE_TYPE (parm)));
4770 /* ??? This may need a big-endian conversion on sparc64. */
4771 stack_parm = change_address (stack_parm, nominal_mode,
4774 conversion_insns = get_insns ();
4779 if (entry_parm != stack_parm)
4781 if (stack_parm == 0)
4784 = assign_stack_local (GET_MODE (entry_parm),
4785 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4786 /* If this is a memory ref that contains aggregate components,
4787 mark it as such for cse and loop optimize. */
4788 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4791 if (promoted_mode != nominal_mode)
4793 push_to_sequence (conversion_insns);
4794 emit_move_insn (validize_mem (stack_parm),
4795 validize_mem (entry_parm));
4796 conversion_insns = get_insns ();
4800 emit_move_insn (validize_mem (stack_parm),
4801 validize_mem (entry_parm));
4803 if (current_function_check_memory_usage)
4805 push_to_sequence (conversion_insns);
4806 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4807 XEXP (stack_parm, 0), Pmode,
4808 GEN_INT (GET_MODE_SIZE (GET_MODE
4810 TYPE_MODE (sizetype),
4811 GEN_INT (MEMORY_USE_RW),
4812 TYPE_MODE (integer_type_node));
4814 conversion_insns = get_insns ();
4817 DECL_RTL (parm) = stack_parm;
4820 /* If this "parameter" was the place where we are receiving the
4821 function's incoming structure pointer, set up the result. */
4822 if (parm == function_result_decl)
4824 tree result = DECL_RESULT (fndecl);
4825 tree restype = TREE_TYPE (result);
4828 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4830 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4831 AGGREGATE_TYPE_P (restype));
4834 if (TREE_THIS_VOLATILE (parm))
4835 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4836 if (TREE_READONLY (parm))
4837 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4840 /* Output all parameter conversion instructions (possibly including calls)
4841 now that all parameters have been copied out of hard registers. */
4842 emit_insns (conversion_insns);
4844 last_parm_insn = get_last_insn ();
4846 current_function_args_size = stack_args_size.constant;
4848 /* Adjust function incoming argument size for alignment and
4851 #ifdef REG_PARM_STACK_SPACE
4852 #ifndef MAYBE_REG_PARM_STACK_SPACE
4853 current_function_args_size = MAX (current_function_args_size,
4854 REG_PARM_STACK_SPACE (fndecl));
4858 #ifdef STACK_BOUNDARY
4859 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4861 current_function_args_size
4862 = ((current_function_args_size + STACK_BYTES - 1)
4863 / STACK_BYTES) * STACK_BYTES;
4866 #ifdef ARGS_GROW_DOWNWARD
4867 current_function_arg_offset_rtx
4868 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4869 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4870 size_int (-stack_args_size.constant)),
4871 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4873 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4876 /* See how many bytes, if any, of its args a function should try to pop
4879 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4880 current_function_args_size);
4882 /* For stdarg.h function, save info about
4883 regs and stack space used by the named args. */
4886 current_function_args_info = args_so_far;
4888 /* Set the rtx used for the function return value. Put this in its
4889 own variable so any optimizers that need this information don't have
4890 to include tree.h. Do this here so it gets done when an inlined
4891 function gets output. */
4893 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4896 /* Indicate whether REGNO is an incoming argument to the current function
4897 that was promoted to a wider mode. If so, return the RTX for the
4898 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4899 that REGNO is promoted from and whether the promotion was signed or
4902 #ifdef PROMOTE_FUNCTION_ARGS
4905 promoted_input_arg (regno, pmode, punsignedp)
4907 enum machine_mode *pmode;
4912 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4913 arg = TREE_CHAIN (arg))
4914 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4915 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4916 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4918 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4919 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4921 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4922 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4923 && mode != DECL_MODE (arg))
4925 *pmode = DECL_MODE (arg);
4926 *punsignedp = unsignedp;
4927 return DECL_INCOMING_RTL (arg);
4936 /* Compute the size and offset from the start of the stacked arguments for a
4937 parm passed in mode PASSED_MODE and with type TYPE.
4939 INITIAL_OFFSET_PTR points to the current offset into the stacked
4942 The starting offset and size for this parm are returned in *OFFSET_PTR
4943 and *ARG_SIZE_PTR, respectively.
4945 IN_REGS is non-zero if the argument will be passed in registers. It will
4946 never be set if REG_PARM_STACK_SPACE is not defined.
4948 FNDECL is the function in which the argument was defined.
4950 There are two types of rounding that are done. The first, controlled by
4951 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4952 list to be aligned to the specific boundary (in bits). This rounding
4953 affects the initial and starting offsets, but not the argument size.
4955 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4956 optionally rounds the size of the parm to PARM_BOUNDARY. The
4957 initial offset is not affected by this rounding, while the size always
4958 is and the starting offset may be. */
4960 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4961 initial_offset_ptr is positive because locate_and_pad_parm's
4962 callers pass in the total size of args so far as
4963 initial_offset_ptr. arg_size_ptr is always positive.*/
4966 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4967 initial_offset_ptr, offset_ptr, arg_size_ptr,
4969 enum machine_mode passed_mode;
4972 tree fndecl ATTRIBUTE_UNUSED;
4973 struct args_size *initial_offset_ptr;
4974 struct args_size *offset_ptr;
4975 struct args_size *arg_size_ptr;
4976 struct args_size *alignment_pad;
4980 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4981 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4982 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4984 #ifdef REG_PARM_STACK_SPACE
4985 /* If we have found a stack parm before we reach the end of the
4986 area reserved for registers, skip that area. */
4989 int reg_parm_stack_space = 0;
4991 #ifdef MAYBE_REG_PARM_STACK_SPACE
4992 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4994 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4996 if (reg_parm_stack_space > 0)
4998 if (initial_offset_ptr->var)
5000 initial_offset_ptr->var
5001 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5002 size_int (reg_parm_stack_space));
5003 initial_offset_ptr->constant = 0;
5005 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5006 initial_offset_ptr->constant = reg_parm_stack_space;
5009 #endif /* REG_PARM_STACK_SPACE */
5011 arg_size_ptr->var = 0;
5012 arg_size_ptr->constant = 0;
5014 #ifdef ARGS_GROW_DOWNWARD
5015 if (initial_offset_ptr->var)
5017 offset_ptr->constant = 0;
5018 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5019 initial_offset_ptr->var);
5023 offset_ptr->constant = - initial_offset_ptr->constant;
5024 offset_ptr->var = 0;
5026 if (where_pad != none
5027 && (TREE_CODE (sizetree) != INTEGER_CST
5028 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5029 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5030 SUB_PARM_SIZE (*offset_ptr, sizetree);
5031 if (where_pad != downward)
5032 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5033 if (initial_offset_ptr->var)
5035 arg_size_ptr->var = size_binop (MINUS_EXPR,
5036 size_binop (MINUS_EXPR,
5038 initial_offset_ptr->var),
5043 arg_size_ptr->constant = (- initial_offset_ptr->constant
5044 - offset_ptr->constant);
5046 #else /* !ARGS_GROW_DOWNWARD */
5047 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5048 *offset_ptr = *initial_offset_ptr;
5050 #ifdef PUSH_ROUNDING
5051 if (passed_mode != BLKmode)
5052 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5055 /* Pad_below needs the pre-rounded size to know how much to pad below
5056 so this must be done before rounding up. */
5057 if (where_pad == downward
5058 /* However, BLKmode args passed in regs have their padding done elsewhere.
5059 The stack slot must be able to hold the entire register. */
5060 && !(in_regs && passed_mode == BLKmode))
5061 pad_below (offset_ptr, passed_mode, sizetree);
5063 if (where_pad != none
5064 && (TREE_CODE (sizetree) != INTEGER_CST
5065 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5066 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5068 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5069 #endif /* ARGS_GROW_DOWNWARD */
5072 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5073 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5076 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5077 struct args_size *offset_ptr;
5079 struct args_size *alignment_pad;
5081 tree save_var = NULL_TREE;
5082 HOST_WIDE_INT save_constant = 0;
5084 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5086 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5088 save_var = offset_ptr->var;
5089 save_constant = offset_ptr->constant;
5092 alignment_pad->var = NULL_TREE;
5093 alignment_pad->constant = 0;
5095 if (boundary > BITS_PER_UNIT)
5097 if (offset_ptr->var)
5100 #ifdef ARGS_GROW_DOWNWARD
5105 (ARGS_SIZE_TREE (*offset_ptr),
5106 boundary / BITS_PER_UNIT);
5107 offset_ptr->constant = 0; /*?*/
5108 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5109 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5113 offset_ptr->constant =
5114 #ifdef ARGS_GROW_DOWNWARD
5115 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5117 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5119 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5120 alignment_pad->constant = offset_ptr->constant - save_constant;
5125 #ifndef ARGS_GROW_DOWNWARD
5127 pad_below (offset_ptr, passed_mode, sizetree)
5128 struct args_size *offset_ptr;
5129 enum machine_mode passed_mode;
5132 if (passed_mode != BLKmode)
5134 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5135 offset_ptr->constant
5136 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5137 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5138 - GET_MODE_SIZE (passed_mode));
5142 if (TREE_CODE (sizetree) != INTEGER_CST
5143 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5145 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5146 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5148 ADD_PARM_SIZE (*offset_ptr, s2);
5149 SUB_PARM_SIZE (*offset_ptr, sizetree);
5155 #ifdef ARGS_GROW_DOWNWARD
5157 round_down (value, divisor)
5161 return size_binop (MULT_EXPR,
5162 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5163 size_int (divisor));
5167 /* Walk the tree of blocks describing the binding levels within a function
5168 and warn about uninitialized variables.
5169 This is done after calling flow_analysis and before global_alloc
5170 clobbers the pseudo-regs to hard regs. */
5173 uninitialized_vars_warning (block)
5176 register tree decl, sub;
5177 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5179 if (warn_uninitialized
5180 && TREE_CODE (decl) == VAR_DECL
5181 /* These warnings are unreliable for and aggregates
5182 because assigning the fields one by one can fail to convince
5183 flow.c that the entire aggregate was initialized.
5184 Unions are troublesome because members may be shorter. */
5185 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5186 && DECL_RTL (decl) != 0
5187 && GET_CODE (DECL_RTL (decl)) == REG
5188 /* Global optimizations can make it difficult to determine if a
5189 particular variable has been initialized. However, a VAR_DECL
5190 with a nonzero DECL_INITIAL had an initializer, so do not
5191 claim it is potentially uninitialized.
5193 We do not care about the actual value in DECL_INITIAL, so we do
5194 not worry that it may be a dangling pointer. */
5195 && DECL_INITIAL (decl) == NULL_TREE
5196 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5197 warning_with_decl (decl,
5198 "`%s' might be used uninitialized in this function");
5200 && TREE_CODE (decl) == VAR_DECL
5201 && DECL_RTL (decl) != 0
5202 && GET_CODE (DECL_RTL (decl)) == REG
5203 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5204 warning_with_decl (decl,
5205 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5207 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5208 uninitialized_vars_warning (sub);
5211 /* Do the appropriate part of uninitialized_vars_warning
5212 but for arguments instead of local variables. */
5215 setjmp_args_warning ()
5218 for (decl = DECL_ARGUMENTS (current_function_decl);
5219 decl; decl = TREE_CHAIN (decl))
5220 if (DECL_RTL (decl) != 0
5221 && GET_CODE (DECL_RTL (decl)) == REG
5222 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5223 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5226 /* If this function call setjmp, put all vars into the stack
5227 unless they were declared `register'. */
5230 setjmp_protect (block)
5233 register tree decl, sub;
5234 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5235 if ((TREE_CODE (decl) == VAR_DECL
5236 || TREE_CODE (decl) == PARM_DECL)
5237 && DECL_RTL (decl) != 0
5238 && (GET_CODE (DECL_RTL (decl)) == REG
5239 || (GET_CODE (DECL_RTL (decl)) == MEM
5240 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5241 /* If this variable came from an inline function, it must be
5242 that its life doesn't overlap the setjmp. If there was a
5243 setjmp in the function, it would already be in memory. We
5244 must exclude such variable because their DECL_RTL might be
5245 set to strange things such as virtual_stack_vars_rtx. */
5246 && ! DECL_FROM_INLINE (decl)
5248 #ifdef NON_SAVING_SETJMP
5249 /* If longjmp doesn't restore the registers,
5250 don't put anything in them. */
5254 ! DECL_REGISTER (decl)))
5255 put_var_into_stack (decl);
5256 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5257 setjmp_protect (sub);
5260 /* Like the previous function, but for args instead of local variables. */
5263 setjmp_protect_args ()
5266 for (decl = DECL_ARGUMENTS (current_function_decl);
5267 decl; decl = TREE_CHAIN (decl))
5268 if ((TREE_CODE (decl) == VAR_DECL
5269 || TREE_CODE (decl) == PARM_DECL)
5270 && DECL_RTL (decl) != 0
5271 && (GET_CODE (DECL_RTL (decl)) == REG
5272 || (GET_CODE (DECL_RTL (decl)) == MEM
5273 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5275 /* If longjmp doesn't restore the registers,
5276 don't put anything in them. */
5277 #ifdef NON_SAVING_SETJMP
5281 ! DECL_REGISTER (decl)))
5282 put_var_into_stack (decl);
5285 /* Return the context-pointer register corresponding to DECL,
5286 or 0 if it does not need one. */
5289 lookup_static_chain (decl)
5292 tree context = decl_function_context (decl);
5296 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5299 /* We treat inline_function_decl as an alias for the current function
5300 because that is the inline function whose vars, types, etc.
5301 are being merged into the current function.
5302 See expand_inline_function. */
5303 if (context == current_function_decl || context == inline_function_decl)
5304 return virtual_stack_vars_rtx;
5306 for (link = context_display; link; link = TREE_CHAIN (link))
5307 if (TREE_PURPOSE (link) == context)
5308 return RTL_EXPR_RTL (TREE_VALUE (link));
5313 /* Convert a stack slot address ADDR for variable VAR
5314 (from a containing function)
5315 into an address valid in this function (using a static chain). */
5318 fix_lexical_addr (addr, var)
5323 HOST_WIDE_INT displacement;
5324 tree context = decl_function_context (var);
5325 struct function *fp;
5328 /* If this is the present function, we need not do anything. */
5329 if (context == current_function_decl || context == inline_function_decl)
5332 for (fp = outer_function_chain; fp; fp = fp->next)
5333 if (fp->decl == context)
5339 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5340 addr = XEXP (XEXP (addr, 0), 0);
5342 /* Decode given address as base reg plus displacement. */
5343 if (GET_CODE (addr) == REG)
5344 basereg = addr, displacement = 0;
5345 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5346 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5350 /* We accept vars reached via the containing function's
5351 incoming arg pointer and via its stack variables pointer. */
5352 if (basereg == fp->internal_arg_pointer)
5354 /* If reached via arg pointer, get the arg pointer value
5355 out of that function's stack frame.
5357 There are two cases: If a separate ap is needed, allocate a
5358 slot in the outer function for it and dereference it that way.
5359 This is correct even if the real ap is actually a pseudo.
5360 Otherwise, just adjust the offset from the frame pointer to
5363 #ifdef NEED_SEPARATE_AP
5366 if (fp->x_arg_pointer_save_area == 0)
5367 fp->x_arg_pointer_save_area
5368 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5370 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5371 addr = memory_address (Pmode, addr);
5373 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5375 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5376 base = lookup_static_chain (var);
5380 else if (basereg == virtual_stack_vars_rtx)
5382 /* This is the same code as lookup_static_chain, duplicated here to
5383 avoid an extra call to decl_function_context. */
5386 for (link = context_display; link; link = TREE_CHAIN (link))
5387 if (TREE_PURPOSE (link) == context)
5389 base = RTL_EXPR_RTL (TREE_VALUE (link));
5397 /* Use same offset, relative to appropriate static chain or argument
5399 return plus_constant (base, displacement);
5402 /* Return the address of the trampoline for entering nested fn FUNCTION.
5403 If necessary, allocate a trampoline (in the stack frame)
5404 and emit rtl to initialize its contents (at entry to this function). */
5407 trampoline_address (function)
5413 struct function *fp;
5416 /* Find an existing trampoline and return it. */
5417 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5418 if (TREE_PURPOSE (link) == function)
5420 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5422 for (fp = outer_function_chain; fp; fp = fp->next)
5423 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5424 if (TREE_PURPOSE (link) == function)
5426 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5428 return round_trampoline_addr (tramp);
5431 /* None exists; we must make one. */
5433 /* Find the `struct function' for the function containing FUNCTION. */
5435 fn_context = decl_function_context (function);
5436 if (fn_context != current_function_decl
5437 && fn_context != inline_function_decl)
5438 for (fp = outer_function_chain; fp; fp = fp->next)
5439 if (fp->decl == fn_context)
5442 /* Allocate run-time space for this trampoline
5443 (usually in the defining function's stack frame). */
5444 #ifdef ALLOCATE_TRAMPOLINE
5445 tramp = ALLOCATE_TRAMPOLINE (fp);
5447 /* If rounding needed, allocate extra space
5448 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5449 #ifdef TRAMPOLINE_ALIGNMENT
5450 #define TRAMPOLINE_REAL_SIZE \
5451 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5453 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5455 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5459 /* Record the trampoline for reuse and note it for later initialization
5460 by expand_function_end. */
5463 push_obstacks (fp->function_maybepermanent_obstack,
5464 fp->function_maybepermanent_obstack);
5465 rtlexp = make_node (RTL_EXPR);
5466 RTL_EXPR_RTL (rtlexp) = tramp;
5467 fp->x_trampoline_list = tree_cons (function, rtlexp,
5468 fp->x_trampoline_list);
5473 /* Make the RTL_EXPR node temporary, not momentary, so that the
5474 trampoline_list doesn't become garbage. */
5475 int momentary = suspend_momentary ();
5476 rtlexp = make_node (RTL_EXPR);
5477 resume_momentary (momentary);
5479 RTL_EXPR_RTL (rtlexp) = tramp;
5480 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5483 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5484 return round_trampoline_addr (tramp);
5487 /* Given a trampoline address,
5488 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5491 round_trampoline_addr (tramp)
5494 #ifdef TRAMPOLINE_ALIGNMENT
5495 /* Round address up to desired boundary. */
5496 rtx temp = gen_reg_rtx (Pmode);
5497 temp = expand_binop (Pmode, add_optab, tramp,
5498 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5499 temp, 0, OPTAB_LIB_WIDEN);
5500 tramp = expand_binop (Pmode, and_optab, temp,
5501 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5502 temp, 0, OPTAB_LIB_WIDEN);
5507 /* The functions identify_blocks and reorder_blocks provide a way to
5508 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5509 duplicate portions of the RTL code. Call identify_blocks before
5510 changing the RTL, and call reorder_blocks after. */
5512 /* Put all this function's BLOCK nodes including those that are chained
5513 onto the first block into a vector, and return it.
5514 Also store in each NOTE for the beginning or end of a block
5515 the index of that block in the vector.
5516 The arguments are BLOCK, the chain of top-level blocks of the function,
5517 and INSNS, the insn chain of the function. */
5520 identify_blocks (block, insns)
5528 int current_block_number = 1;
5534 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5535 depth-first order. */
5536 n_blocks = all_blocks (block, 0);
5537 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5538 all_blocks (block, block_vector);
5540 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5542 for (insn = insns; insn; insn = NEXT_INSN (insn))
5543 if (GET_CODE (insn) == NOTE)
5545 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5549 /* If there are more block notes than BLOCKs, something
5551 if (current_block_number == n_blocks)
5554 b = block_vector[current_block_number++];
5555 NOTE_BLOCK (insn) = b;
5556 block_stack[depth++] = b;
5558 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5561 /* There are more NOTE_INSN_BLOCK_ENDs that
5562 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5565 NOTE_BLOCK (insn) = block_stack[--depth];
5569 /* In whole-function mode, we might not have seen the whole function
5570 yet, so we might not use up all the blocks. */
5571 if (n_blocks != current_block_number
5572 && !cfun->x_whole_function_mode_p)
5575 free (block_vector);
5579 /* Given a revised instruction chain, rebuild the tree structure of
5580 BLOCK nodes to correspond to the new order of RTL. The new block
5581 tree is inserted below TOP_BLOCK. Returns the current top-level
5585 reorder_blocks (block, insns)
5589 tree current_block = block;
5592 if (block == NULL_TREE)
5595 /* Prune the old trees away, so that it doesn't get in the way. */
5596 BLOCK_SUBBLOCKS (current_block) = 0;
5597 BLOCK_CHAIN (current_block) = 0;
5599 for (insn = insns; insn; insn = NEXT_INSN (insn))
5600 if (GET_CODE (insn) == NOTE)
5602 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5604 tree block = NOTE_BLOCK (insn);
5605 /* If we have seen this block before, copy it. */
5606 if (TREE_ASM_WRITTEN (block))
5607 block = copy_node (block);
5608 BLOCK_SUBBLOCKS (block) = 0;
5609 TREE_ASM_WRITTEN (block) = 1;
5610 BLOCK_SUPERCONTEXT (block) = current_block;
5611 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5612 BLOCK_SUBBLOCKS (current_block) = block;
5613 current_block = block;
5615 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5617 BLOCK_SUBBLOCKS (current_block)
5618 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5619 current_block = BLOCK_SUPERCONTEXT (current_block);
5623 BLOCK_SUBBLOCKS (current_block)
5624 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5625 return current_block;
5628 /* Reverse the order of elements in the chain T of blocks,
5629 and return the new head of the chain (old last element). */
5635 register tree prev = 0, decl, next;
5636 for (decl = t; decl; decl = next)
5638 next = BLOCK_CHAIN (decl);
5639 BLOCK_CHAIN (decl) = prev;
5645 /* Count the subblocks of the list starting with BLOCK, and list them
5646 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5650 all_blocks (block, vector)
5658 TREE_ASM_WRITTEN (block) = 0;
5660 /* Record this block. */
5662 vector[n_blocks] = block;
5666 /* Record the subblocks, and their subblocks... */
5667 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5668 vector ? vector + n_blocks : 0);
5669 block = BLOCK_CHAIN (block);
5675 /* Allocate a function structure and reset its contents to the defaults. */
5677 prepare_function_start ()
5679 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5681 init_stmt_for_function ();
5682 init_eh_for_function ();
5684 cse_not_expected = ! optimize;
5686 /* Caller save not needed yet. */
5687 caller_save_needed = 0;
5689 /* No stack slots have been made yet. */
5690 stack_slot_list = 0;
5692 current_function_has_nonlocal_label = 0;
5693 current_function_has_nonlocal_goto = 0;
5695 /* There is no stack slot for handling nonlocal gotos. */
5696 nonlocal_goto_handler_slots = 0;
5697 nonlocal_goto_stack_level = 0;
5699 /* No labels have been declared for nonlocal use. */
5700 nonlocal_labels = 0;
5701 nonlocal_goto_handler_labels = 0;
5703 /* No function calls so far in this function. */
5704 function_call_count = 0;
5706 /* No parm regs have been allocated.
5707 (This is important for output_inline_function.) */
5708 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5710 /* Initialize the RTL mechanism. */
5713 /* Initialize the queue of pending postincrement and postdecrements,
5714 and some other info in expr.c. */
5717 /* We haven't done register allocation yet. */
5720 init_varasm_status (cfun);
5722 /* Clear out data used for inlining. */
5723 cfun->inlinable = 0;
5724 cfun->original_decl_initial = 0;
5725 cfun->original_arg_vector = 0;
5727 cfun->stack_alignment_needed = 0;
5729 /* Set if a call to setjmp is seen. */
5730 current_function_calls_setjmp = 0;
5732 /* Set if a call to longjmp is seen. */
5733 current_function_calls_longjmp = 0;
5735 current_function_calls_alloca = 0;
5736 current_function_contains_functions = 0;
5737 current_function_is_leaf = 0;
5738 current_function_sp_is_unchanging = 0;
5739 current_function_uses_only_leaf_regs = 0;
5740 current_function_has_computed_jump = 0;
5741 current_function_is_thunk = 0;
5743 current_function_returns_pcc_struct = 0;
5744 current_function_returns_struct = 0;
5745 current_function_epilogue_delay_list = 0;
5746 current_function_uses_const_pool = 0;
5747 current_function_uses_pic_offset_table = 0;
5748 current_function_cannot_inline = 0;
5750 /* We have not yet needed to make a label to jump to for tail-recursion. */
5751 tail_recursion_label = 0;
5753 /* We haven't had a need to make a save area for ap yet. */
5754 arg_pointer_save_area = 0;
5756 /* No stack slots allocated yet. */
5759 /* No SAVE_EXPRs in this function yet. */
5762 /* No RTL_EXPRs in this function yet. */
5765 /* Set up to allocate temporaries. */
5768 /* Indicate that we need to distinguish between the return value of the
5769 present function and the return value of a function being called. */
5770 rtx_equal_function_value_matters = 1;
5772 /* Indicate that we have not instantiated virtual registers yet. */
5773 virtuals_instantiated = 0;
5775 /* Indicate we have no need of a frame pointer yet. */
5776 frame_pointer_needed = 0;
5778 /* By default assume not varargs or stdarg. */
5779 current_function_varargs = 0;
5780 current_function_stdarg = 0;
5782 /* We haven't made any trampolines for this function yet. */
5783 trampoline_list = 0;
5785 init_pending_stack_adjust ();
5786 inhibit_defer_pop = 0;
5788 current_function_outgoing_args_size = 0;
5790 if (init_lang_status)
5791 (*init_lang_status) (cfun);
5792 if (init_machine_status)
5793 (*init_machine_status) (cfun);
5796 /* Initialize the rtl expansion mechanism so that we can do simple things
5797 like generate sequences. This is used to provide a context during global
5798 initialization of some passes. */
5800 init_dummy_function_start ()
5802 prepare_function_start ();
5805 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5806 and initialize static variables for generating RTL for the statements
5810 init_function_start (subr, filename, line)
5815 prepare_function_start ();
5817 /* Remember this function for later. */
5818 cfun->next_global = all_functions;
5819 all_functions = cfun;
5821 current_function_name = (*decl_printable_name) (subr, 2);
5824 /* Nonzero if this is a nested function that uses a static chain. */
5826 current_function_needs_context
5827 = (decl_function_context (current_function_decl) != 0
5828 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5830 /* Within function body, compute a type's size as soon it is laid out. */
5831 immediate_size_expand++;
5833 /* Prevent ever trying to delete the first instruction of a function.
5834 Also tell final how to output a linenum before the function prologue.
5835 Note linenums could be missing, e.g. when compiling a Java .class file. */
5837 emit_line_note (filename, line);
5839 /* Make sure first insn is a note even if we don't want linenums.
5840 This makes sure the first insn will never be deleted.
5841 Also, final expects a note to appear there. */
5842 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5844 /* Set flags used by final.c. */
5845 if (aggregate_value_p (DECL_RESULT (subr)))
5847 #ifdef PCC_STATIC_STRUCT_RETURN
5848 current_function_returns_pcc_struct = 1;
5850 current_function_returns_struct = 1;
5853 /* Warn if this value is an aggregate type,
5854 regardless of which calling convention we are using for it. */
5855 if (warn_aggregate_return
5856 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5857 warning ("function returns an aggregate");
5859 current_function_returns_pointer
5860 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5863 /* Make sure all values used by the optimization passes have sane
5866 init_function_for_compilation ()
5869 /* No prologue/epilogue insns yet. */
5870 prologue = epilogue = 0;
5873 /* Indicate that the current function uses extra args
5874 not explicitly mentioned in the argument list in any fashion. */
5879 current_function_varargs = 1;
5882 /* Expand a call to __main at the beginning of a possible main function. */
5884 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5885 #undef HAS_INIT_SECTION
5886 #define HAS_INIT_SECTION
5890 expand_main_function ()
5892 #if !defined (HAS_INIT_SECTION)
5893 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5895 #endif /* not HAS_INIT_SECTION */
5898 extern struct obstack permanent_obstack;
5900 /* Start the RTL for a new function, and set variables used for
5902 SUBR is the FUNCTION_DECL node.
5903 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5904 the function's parameters, which must be run at any return statement. */
5907 expand_function_start (subr, parms_have_cleanups)
5909 int parms_have_cleanups;
5913 rtx last_ptr = NULL_RTX;
5915 /* Make sure volatile mem refs aren't considered
5916 valid operands of arithmetic insns. */
5917 init_recog_no_volatile ();
5919 /* Set this before generating any memory accesses. */
5920 current_function_check_memory_usage
5921 = (flag_check_memory_usage
5922 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5924 current_function_instrument_entry_exit
5925 = (flag_instrument_function_entry_exit
5926 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5928 current_function_limit_stack
5929 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5931 /* If function gets a static chain arg, store it in the stack frame.
5932 Do this first, so it gets the first stack slot offset. */
5933 if (current_function_needs_context)
5935 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5937 /* Delay copying static chain if it is not a register to avoid
5938 conflicts with regs used for parameters. */
5939 if (! SMALL_REGISTER_CLASSES
5940 || GET_CODE (static_chain_incoming_rtx) == REG)
5941 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5944 /* If the parameters of this function need cleaning up, get a label
5945 for the beginning of the code which executes those cleanups. This must
5946 be done before doing anything with return_label. */
5947 if (parms_have_cleanups)
5948 cleanup_label = gen_label_rtx ();
5952 /* Make the label for return statements to jump to, if this machine
5953 does not have a one-instruction return and uses an epilogue,
5954 or if it returns a structure, or if it has parm cleanups. */
5956 if (cleanup_label == 0 && HAVE_return
5957 && ! current_function_instrument_entry_exit
5958 && ! current_function_returns_pcc_struct
5959 && ! (current_function_returns_struct && ! optimize))
5962 return_label = gen_label_rtx ();
5964 return_label = gen_label_rtx ();
5967 /* Initialize rtx used to return the value. */
5968 /* Do this before assign_parms so that we copy the struct value address
5969 before any library calls that assign parms might generate. */
5971 /* Decide whether to return the value in memory or in a register. */
5972 if (aggregate_value_p (DECL_RESULT (subr)))
5974 /* Returning something that won't go in a register. */
5975 register rtx value_address = 0;
5977 #ifdef PCC_STATIC_STRUCT_RETURN
5978 if (current_function_returns_pcc_struct)
5980 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5981 value_address = assemble_static_space (size);
5986 /* Expect to be passed the address of a place to store the value.
5987 If it is passed as an argument, assign_parms will take care of
5989 if (struct_value_incoming_rtx)
5991 value_address = gen_reg_rtx (Pmode);
5992 emit_move_insn (value_address, struct_value_incoming_rtx);
5997 DECL_RTL (DECL_RESULT (subr))
5998 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5999 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6000 AGGREGATE_TYPE_P (TREE_TYPE
6005 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6006 /* If return mode is void, this decl rtl should not be used. */
6007 DECL_RTL (DECL_RESULT (subr)) = 0;
6008 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6010 /* If function will end with cleanup code for parms,
6011 compute the return values into a pseudo reg,
6012 which we will copy into the true return register
6013 after the cleanups are done. */
6015 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6017 #ifdef PROMOTE_FUNCTION_RETURN
6018 tree type = TREE_TYPE (DECL_RESULT (subr));
6019 int unsignedp = TREE_UNSIGNED (type);
6021 mode = promote_mode (type, mode, &unsignedp, 1);
6024 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6027 /* Scalar, returned in a register. */
6029 #ifdef FUNCTION_OUTGOING_VALUE
6030 DECL_RTL (DECL_RESULT (subr))
6031 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6033 DECL_RTL (DECL_RESULT (subr))
6034 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6037 /* Mark this reg as the function's return value. */
6038 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6040 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6041 /* Needed because we may need to move this to memory
6042 in case it's a named return value whose address is taken. */
6043 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6047 /* Initialize rtx for parameters and local variables.
6048 In some cases this requires emitting insns. */
6050 assign_parms (subr);
6052 /* Copy the static chain now if it wasn't a register. The delay is to
6053 avoid conflicts with the parameter passing registers. */
6055 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6056 if (GET_CODE (static_chain_incoming_rtx) != REG)
6057 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6059 /* The following was moved from init_function_start.
6060 The move is supposed to make sdb output more accurate. */
6061 /* Indicate the beginning of the function body,
6062 as opposed to parm setup. */
6063 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6065 /* If doing stupid allocation, mark parms as born here. */
6067 if (GET_CODE (get_last_insn ()) != NOTE)
6068 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6069 parm_birth_insn = get_last_insn ();
6073 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6074 use_variable (regno_reg_rtx[i]);
6076 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6077 use_variable (current_function_internal_arg_pointer);
6080 context_display = 0;
6081 if (current_function_needs_context)
6083 /* Fetch static chain values for containing functions. */
6084 tem = decl_function_context (current_function_decl);
6085 /* If not doing stupid register allocation copy the static chain
6086 pointer into a pseudo. If we have small register classes, copy
6087 the value from memory if static_chain_incoming_rtx is a REG. If
6088 we do stupid register allocation, we use the stack address
6090 if (tem && ! obey_regdecls)
6092 /* If the static chain originally came in a register, put it back
6093 there, then move it out in the next insn. The reason for
6094 this peculiar code is to satisfy function integration. */
6095 if (SMALL_REGISTER_CLASSES
6096 && GET_CODE (static_chain_incoming_rtx) == REG)
6097 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6098 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6103 tree rtlexp = make_node (RTL_EXPR);
6105 RTL_EXPR_RTL (rtlexp) = last_ptr;
6106 context_display = tree_cons (tem, rtlexp, context_display);
6107 tem = decl_function_context (tem);
6110 /* Chain thru stack frames, assuming pointer to next lexical frame
6111 is found at the place we always store it. */
6112 #ifdef FRAME_GROWS_DOWNWARD
6113 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6115 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6116 memory_address (Pmode,
6119 /* If we are not optimizing, ensure that we know that this
6120 piece of context is live over the entire function. */
6122 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6127 if (current_function_instrument_entry_exit)
6129 rtx fun = DECL_RTL (current_function_decl);
6130 if (GET_CODE (fun) == MEM)
6131 fun = XEXP (fun, 0);
6134 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6136 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6138 hard_frame_pointer_rtx),
6142 /* After the display initializations is where the tail-recursion label
6143 should go, if we end up needing one. Ensure we have a NOTE here
6144 since some things (like trampolines) get placed before this. */
6145 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6147 /* Evaluate now the sizes of any types declared among the arguments. */
6148 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6150 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6151 EXPAND_MEMORY_USE_BAD);
6152 /* Flush the queue in case this parameter declaration has
6157 /* Make sure there is a line number after the function entry setup code. */
6158 force_next_line_note ();
6161 /* Undo the effects of init_dummy_function_start. */
6163 expand_dummy_function_end ()
6165 /* End any sequences that failed to be closed due to syntax errors. */
6166 while (in_sequence_p ())
6169 /* Outside function body, can't compute type's actual size
6170 until next function's body starts. */
6172 free_after_parsing (cfun);
6173 free_after_compilation (cfun);
6178 /* Emit CODE for each register of the return value. Useful values for
6179 code are USE and CLOBBER. */
6182 diddle_return_value (code)
6185 tree decl_result = DECL_RESULT (current_function_decl);
6186 rtx return_reg = DECL_RTL (decl_result);
6190 if (GET_CODE (return_reg) == REG
6191 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
6193 /* Use hard_function_value to avoid creating a reference to a BLKmode
6194 register in the USE/CLOBBER insn. */
6195 return_reg = hard_function_value (TREE_TYPE (decl_result),
6196 current_function_decl, 1);
6197 REG_FUNCTION_VALUE_P (return_reg) = 1;
6198 emit_insn (gen_rtx_fmt_e (code, VOIDmode, return_reg));
6200 else if (GET_CODE (return_reg) == PARALLEL)
6204 for (i = 0; i < XVECLEN (return_reg, 0); i++)
6206 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
6208 if (GET_CODE (x) == REG
6209 && REGNO (x) < FIRST_PSEUDO_REGISTER)
6210 emit_insn (gen_rtx_fmt_e (code, VOIDmode, x));
6216 /* Generate RTL for the end of the current function.
6217 FILENAME and LINE are the current position in the source file.
6219 It is up to language-specific callers to do cleanups for parameters--
6220 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6223 expand_function_end (filename, line, end_bindings)
6231 #ifdef TRAMPOLINE_TEMPLATE
6232 static rtx initial_trampoline;
6235 finish_expr_for_function ();
6237 #ifdef NON_SAVING_SETJMP
6238 /* Don't put any variables in registers if we call setjmp
6239 on a machine that fails to restore the registers. */
6240 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6242 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6243 setjmp_protect (DECL_INITIAL (current_function_decl));
6245 setjmp_protect_args ();
6249 /* Save the argument pointer if a save area was made for it. */
6250 if (arg_pointer_save_area)
6252 /* arg_pointer_save_area may not be a valid memory address, so we
6253 have to check it and fix it if necessary. */
6256 emit_move_insn (validize_mem (arg_pointer_save_area),
6257 virtual_incoming_args_rtx);
6258 seq = gen_sequence ();
6260 emit_insn_before (seq, tail_recursion_reentry);
6263 /* Initialize any trampolines required by this function. */
6264 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6266 tree function = TREE_PURPOSE (link);
6267 rtx context = lookup_static_chain (function);
6268 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6269 #ifdef TRAMPOLINE_TEMPLATE
6274 #ifdef TRAMPOLINE_TEMPLATE
6275 /* First make sure this compilation has a template for
6276 initializing trampolines. */
6277 if (initial_trampoline == 0)
6279 end_temporary_allocation ();
6281 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6282 resume_temporary_allocation ();
6284 ggc_add_rtx_root (&initial_trampoline, 1);
6288 /* Generate insns to initialize the trampoline. */
6290 tramp = round_trampoline_addr (XEXP (tramp, 0));
6291 #ifdef TRAMPOLINE_TEMPLATE
6292 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6293 emit_block_move (blktramp, initial_trampoline,
6294 GEN_INT (TRAMPOLINE_SIZE),
6295 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6297 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6301 /* Put those insns at entry to the containing function (this one). */
6302 emit_insns_before (seq, tail_recursion_reentry);
6305 /* If we are doing stack checking and this function makes calls,
6306 do a stack probe at the start of the function to ensure we have enough
6307 space for another stack frame. */
6308 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6312 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6313 if (GET_CODE (insn) == CALL_INSN)
6316 probe_stack_range (STACK_CHECK_PROTECT,
6317 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6320 emit_insns_before (seq, tail_recursion_reentry);
6325 /* Warn about unused parms if extra warnings were specified. */
6326 if (warn_unused && extra_warnings)
6330 for (decl = DECL_ARGUMENTS (current_function_decl);
6331 decl; decl = TREE_CHAIN (decl))
6332 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6333 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6334 warning_with_decl (decl, "unused parameter `%s'");
6337 /* Delete handlers for nonlocal gotos if nothing uses them. */
6338 if (nonlocal_goto_handler_slots != 0
6339 && ! current_function_has_nonlocal_label)
6342 /* End any sequences that failed to be closed due to syntax errors. */
6343 while (in_sequence_p ())
6346 /* Outside function body, can't compute type's actual size
6347 until next function's body starts. */
6348 immediate_size_expand--;
6350 /* If doing stupid register allocation,
6351 mark register parms as dying here. */
6356 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6357 use_variable (regno_reg_rtx[i]);
6359 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6361 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6363 use_variable (XEXP (tem, 0));
6364 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6367 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6368 use_variable (current_function_internal_arg_pointer);
6371 clear_pending_stack_adjust ();
6372 do_pending_stack_adjust ();
6374 /* Mark the end of the function body.
6375 If control reaches this insn, the function can drop through
6376 without returning a value. */
6377 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6379 /* Must mark the last line number note in the function, so that the test
6380 coverage code can avoid counting the last line twice. This just tells
6381 the code to ignore the immediately following line note, since there
6382 already exists a copy of this note somewhere above. This line number
6383 note is still needed for debugging though, so we can't delete it. */
6384 if (flag_test_coverage)
6385 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6387 /* Output a linenumber for the end of the function.
6388 SDB depends on this. */
6389 emit_line_note_force (filename, line);
6391 /* Output the label for the actual return from the function,
6392 if one is expected. This happens either because a function epilogue
6393 is used instead of a return instruction, or because a return was done
6394 with a goto in order to run local cleanups, or because of pcc-style
6395 structure returning. */
6399 /* Before the return label, clobber the return registers so that
6400 they are not propogated live to the rest of the function. This
6401 can only happen with functions that drop through; if there had
6402 been a return statement, there would have either been a return
6403 rtx, or a jump to the return label. */
6404 diddle_return_value (CLOBBER);
6406 emit_label (return_label);
6409 /* C++ uses this. */
6411 expand_end_bindings (0, 0, 0);
6413 /* Now handle any leftover exception regions that may have been
6414 created for the parameters. */
6416 rtx last = get_last_insn ();
6419 expand_leftover_cleanups ();
6421 /* If there are any catch_clauses remaining, output them now. */
6422 emit_insns (catch_clauses);
6423 catch_clauses = NULL_RTX;
6424 /* If the above emitted any code, may sure we jump around it. */
6425 if (last != get_last_insn ())
6427 label = gen_label_rtx ();
6428 last = emit_jump_insn_after (gen_jump (label), last);
6429 last = emit_barrier_after (last);
6434 if (current_function_instrument_entry_exit)
6436 rtx fun = DECL_RTL (current_function_decl);
6437 if (GET_CODE (fun) == MEM)
6438 fun = XEXP (fun, 0);
6441 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6443 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6445 hard_frame_pointer_rtx),
6449 /* If we had calls to alloca, and this machine needs
6450 an accurate stack pointer to exit the function,
6451 insert some code to save and restore the stack pointer. */
6452 #ifdef EXIT_IGNORE_STACK
6453 if (! EXIT_IGNORE_STACK)
6455 if (current_function_calls_alloca)
6459 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6460 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6463 /* If scalar return value was computed in a pseudo-reg,
6464 copy that to the hard return register. */
6465 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6466 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6467 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6468 >= FIRST_PSEUDO_REGISTER))
6470 rtx real_decl_result;
6472 #ifdef FUNCTION_OUTGOING_VALUE
6474 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6475 current_function_decl);
6478 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6479 current_function_decl);
6481 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6482 /* If this is a BLKmode structure being returned in registers, then use
6483 the mode computed in expand_return. */
6484 if (GET_MODE (real_decl_result) == BLKmode)
6485 PUT_MODE (real_decl_result,
6486 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6487 emit_move_insn (real_decl_result,
6488 DECL_RTL (DECL_RESULT (current_function_decl)));
6489 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6491 /* The delay slot scheduler assumes that current_function_return_rtx
6492 holds the hard register containing the return value, not a temporary
6494 current_function_return_rtx = real_decl_result;
6497 /* If returning a structure, arrange to return the address of the value
6498 in a place where debuggers expect to find it.
6500 If returning a structure PCC style,
6501 the caller also depends on this value.
6502 And current_function_returns_pcc_struct is not necessarily set. */
6503 if (current_function_returns_struct
6504 || current_function_returns_pcc_struct)
6506 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6507 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6508 #ifdef FUNCTION_OUTGOING_VALUE
6510 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6511 current_function_decl);
6514 = FUNCTION_VALUE (build_pointer_type (type),
6515 current_function_decl);
6518 /* Mark this as a function return value so integrate will delete the
6519 assignment and USE below when inlining this function. */
6520 REG_FUNCTION_VALUE_P (outgoing) = 1;
6522 emit_move_insn (outgoing, value_address);
6523 use_variable (outgoing);
6526 /* If this is an implementation of __throw, do what's necessary to
6527 communicate between __builtin_eh_return and the epilogue. */
6528 expand_eh_return ();
6530 /* Output a return insn if we are using one.
6531 Otherwise, let the rtl chain end here, to drop through
6532 into the epilogue. */
6537 emit_jump_insn (gen_return ());
6542 /* Fix up any gotos that jumped out to the outermost
6543 binding level of the function.
6544 Must follow emitting RETURN_LABEL. */
6546 /* If you have any cleanups to do at this point,
6547 and they need to create temporary variables,
6548 then you will lose. */
6549 expand_fixups (get_insns ());
6552 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6553 or a single insn). */
6556 record_insns (insns)
6561 if (GET_CODE (insns) == SEQUENCE)
6563 int len = XVECLEN (insns, 0);
6564 vec = (int *) oballoc ((len + 1) * sizeof (int));
6567 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6571 vec = (int *) oballoc (2 * sizeof (int));
6572 vec[0] = INSN_UID (insns);
6578 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6581 contains (insn, vec)
6587 if (GET_CODE (insn) == INSN
6588 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6591 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6592 for (j = 0; vec[j]; j++)
6593 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6599 for (j = 0; vec[j]; j++)
6600 if (INSN_UID (insn) == vec[j])
6607 prologue_epilogue_contains (insn)
6610 if (prologue && contains (insn, prologue))
6612 if (epilogue && contains (insn, epilogue))
6617 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6618 this into place with notes indicating where the prologue ends and where
6619 the epilogue begins. Update the basic block information when possible. */
6622 thread_prologue_and_epilogue_insns (f)
6623 rtx f ATTRIBUTE_UNUSED;
6627 #ifdef HAVE_prologue
6633 seq = gen_prologue();
6636 /* Retain a map of the prologue insns. */
6637 if (GET_CODE (seq) != SEQUENCE)
6639 prologue = record_insns (seq);
6641 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6642 seq = gen_sequence ();
6645 /* If optimization is off, and perhaps in an empty function,
6646 the entry block will have no successors. */
6647 if (ENTRY_BLOCK_PTR->succ)
6649 /* Can't deal with multiple successsors of the entry block. */
6650 if (ENTRY_BLOCK_PTR->succ->succ_next)
6653 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6657 emit_insn_after (seq, f);
6661 #ifdef HAVE_epilogue
6666 rtx tail = get_last_insn ();
6668 /* ??? This is gastly. If function returns were not done via uses,
6669 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6670 and all of this uglyness would go away. */
6675 /* If the exit block has no non-fake predecessors, we don't
6676 need an epilogue. Furthermore, only pay attention to the
6677 fallthru predecessors; if (conditional) return insns were
6678 generated, by definition we do not need to emit epilogue
6681 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6682 if ((e->flags & EDGE_FAKE) == 0
6683 && (e->flags & EDGE_FALLTHRU) != 0)
6688 /* We can't handle multiple epilogues -- if one is needed,
6689 we won't be able to place it multiple times.
6691 ??? Fix epilogue expanders to not assume they are the
6692 last thing done compiling the function. Either that
6693 or copy_rtx each insn.
6695 ??? Blah, it's not a simple expression to assert that
6696 we've exactly one fallthru exit edge. */
6701 /* ??? If the last insn of the basic block is a jump, then we
6702 are creating a new basic block. Wimp out and leave these
6703 insns outside any block. */
6704 if (GET_CODE (tail) == JUMP_INSN)
6710 rtx prev, seq, first_use;
6712 /* Move the USE insns at the end of a function onto a list. */
6714 if (GET_CODE (prev) == BARRIER
6715 || GET_CODE (prev) == NOTE)
6716 prev = prev_nonnote_insn (prev);
6720 && GET_CODE (prev) == INSN
6721 && GET_CODE (PATTERN (prev)) == USE)
6723 /* If the end of the block is the use, grab hold of something
6724 else so that we emit barriers etc in the right place. */
6728 tail = PREV_INSN (tail);
6729 while (GET_CODE (tail) == INSN
6730 && GET_CODE (PATTERN (tail)) == USE);
6736 prev = prev_nonnote_insn (prev);
6741 NEXT_INSN (use) = first_use;
6742 PREV_INSN (first_use) = use;
6745 NEXT_INSN (use) = NULL_RTX;
6749 && GET_CODE (prev) == INSN
6750 && GET_CODE (PATTERN (prev)) == USE);
6753 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6754 epilogue insns, the USE insns at the end of a function,
6755 the jump insn that returns, and then a BARRIER. */
6757 if (GET_CODE (tail) != BARRIER)
6759 prev = next_nonnote_insn (tail);
6760 if (!prev || GET_CODE (prev) != BARRIER)
6761 emit_barrier_after (tail);
6764 seq = gen_epilogue ();
6766 tail = emit_jump_insn_after (seq, tail);
6768 /* Insert the USE insns immediately before the return insn, which
6769 must be the last instruction emitted in the sequence. */
6771 emit_insns_before (first_use, tail);
6772 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6774 /* Update the tail of the basic block. */
6778 /* Retain a map of the epilogue insns. */
6779 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6786 commit_edge_insertions ();
6789 /* Reposition the prologue-end and epilogue-begin notes after instruction
6790 scheduling and delayed branch scheduling. */
6793 reposition_prologue_and_epilogue_notes (f)
6794 rtx f ATTRIBUTE_UNUSED;
6796 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6797 /* Reposition the prologue and epilogue notes. */
6804 register rtx insn, note = 0;
6806 /* Scan from the beginning until we reach the last prologue insn.
6807 We apparently can't depend on basic_block_{head,end} after
6809 for (len = 0; prologue[len]; len++)
6811 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6813 if (GET_CODE (insn) == NOTE)
6815 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6818 else if ((len -= contains (insn, prologue)) == 0)
6821 /* Find the prologue-end note if we haven't already, and
6822 move it to just after the last prologue insn. */
6825 for (note = insn; (note = NEXT_INSN (note));)
6826 if (GET_CODE (note) == NOTE
6827 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6831 next = NEXT_INSN (note);
6833 /* Whether or not we can depend on BLOCK_HEAD,
6834 attempt to keep it up-to-date. */
6835 if (BLOCK_HEAD (0) == note)
6836 BLOCK_HEAD (0) = next;
6839 add_insn_after (note, insn);
6846 register rtx insn, note = 0;
6848 /* Scan from the end until we reach the first epilogue insn.
6849 We apparently can't depend on basic_block_{head,end} after
6851 for (len = 0; epilogue[len]; len++)
6853 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6855 if (GET_CODE (insn) == NOTE)
6857 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6860 else if ((len -= contains (insn, epilogue)) == 0)
6862 /* Find the epilogue-begin note if we haven't already, and
6863 move it to just before the first epilogue insn. */
6866 for (note = insn; (note = PREV_INSN (note));)
6867 if (GET_CODE (note) == NOTE
6868 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6872 /* Whether or not we can depend on BLOCK_HEAD,
6873 attempt to keep it up-to-date. */
6875 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6876 BLOCK_HEAD (n_basic_blocks-1) = note;
6879 add_insn_before (note, insn);
6884 #endif /* HAVE_prologue or HAVE_epilogue */
6887 /* Mark T for GC. */
6891 struct temp_slot *t;
6895 ggc_mark_rtx (t->slot);
6896 ggc_mark_rtx (t->address);
6897 ggc_mark_tree (t->rtl_expr);
6903 /* Mark P for GC. */
6906 mark_function_status (p)
6915 ggc_mark_rtx (p->arg_offset_rtx);
6917 if (p->x_parm_reg_stack_loc)
6918 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6922 ggc_mark_rtx (p->return_rtx);
6923 ggc_mark_rtx (p->x_cleanup_label);
6924 ggc_mark_rtx (p->x_return_label);
6925 ggc_mark_rtx (p->x_save_expr_regs);
6926 ggc_mark_rtx (p->x_stack_slot_list);
6927 ggc_mark_rtx (p->x_parm_birth_insn);
6928 ggc_mark_rtx (p->x_tail_recursion_label);
6929 ggc_mark_rtx (p->x_tail_recursion_reentry);
6930 ggc_mark_rtx (p->internal_arg_pointer);
6931 ggc_mark_rtx (p->x_arg_pointer_save_area);
6932 ggc_mark_tree (p->x_rtl_expr_chain);
6933 ggc_mark_rtx (p->x_last_parm_insn);
6934 ggc_mark_tree (p->x_context_display);
6935 ggc_mark_tree (p->x_trampoline_list);
6936 ggc_mark_rtx (p->epilogue_delay_list);
6938 mark_temp_slot (p->x_temp_slots);
6941 struct var_refs_queue *q = p->fixup_var_refs_queue;
6944 ggc_mark_rtx (q->modified);
6949 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6950 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6951 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6952 ggc_mark_tree (p->x_nonlocal_labels);
6955 /* Mark the function chain ARG (which is really a struct function **)
6959 mark_function_chain (arg)
6962 struct function *f = *(struct function **) arg;
6964 for (; f; f = f->next_global)
6966 ggc_mark_tree (f->decl);
6968 mark_function_status (f);
6969 mark_eh_status (f->eh);
6970 mark_stmt_status (f->stmt);
6971 mark_expr_status (f->expr);
6972 mark_emit_status (f->emit);
6973 mark_varasm_status (f->varasm);
6975 if (mark_machine_status)
6976 (*mark_machine_status) (f);
6977 if (mark_lang_status)
6978 (*mark_lang_status) (f);
6980 if (f->original_arg_vector)
6981 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6982 if (f->original_decl_initial)
6983 ggc_mark_tree (f->original_decl_initial);
6987 /* Called once, at initialization, to initialize function.c. */
6990 init_function_once ()
6992 ggc_add_root (&all_functions, 1, sizeof all_functions,
6993 mark_function_chain);