1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 /* Some systems use __main in a way incompatible with its use in gcc, in these
70 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
71 give the same symbol without quotes for an alternative entry point. You
72 must define both, or neither. */
74 #define NAME__MAIN "__main"
75 #define SYMBOL__MAIN __main
78 /* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83 /* Similar, but round to the next highest integer that meets the
85 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
88 during rtl generation. If they are different register numbers, this is
89 always true. It may also be true if
90 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
91 generation. See fix_lexical_addr for details. */
93 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
94 #define NEED_SEPARATE_AP
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
101 int current_function_is_leaf;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 life_analysis has run. */
106 int current_function_sp_is_unchanging;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
115 static int virtuals_instantiated;
117 /* These variables hold pointers to functions to
118 save and restore machine-specific data,
119 in push_function_context and pop_function_context. */
120 void (*init_machine_status) PROTO((struct function *));
121 void (*save_machine_status) PROTO((struct function *));
122 void (*restore_machine_status) PROTO((struct function *));
123 void (*mark_machine_status) PROTO((struct function *));
125 /* Likewise, but for language-specific data. */
126 void (*save_lang_status) PROTO((struct function *));
127 void (*restore_lang_status) PROTO((struct function *));
128 void (*mark_lang_status) PROTO((struct function *));
130 /* The FUNCTION_DECL for an inline function currently being expanded. */
131 tree inline_function_decl;
133 /* The currently compiled function. */
134 struct function *current_function = 0;
136 /* Global list of all compiled functions. */
137 struct function *all_functions = 0;
139 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
140 static int *prologue;
141 static int *epilogue;
143 /* In order to evaluate some expressions, such as function calls returning
144 structures in memory, we need to temporarily allocate stack locations.
145 We record each allocated temporary in the following structure.
147 Associated with each temporary slot is a nesting level. When we pop up
148 one level, all temporaries associated with the previous level are freed.
149 Normally, all temporaries are freed after the execution of the statement
150 in which they were created. However, if we are inside a ({...}) grouping,
151 the result may be in a temporary and hence must be preserved. If the
152 result could be in a temporary, we preserve it if we can determine which
153 one it is in. If we cannot determine which temporary may contain the
154 result, all temporaries are preserved. A temporary is preserved by
155 pretending it was allocated at the previous nesting level.
157 Automatic variables are also assigned temporary slots, at the nesting
158 level where they are defined. They are marked a "kept" so that
159 free_temp_slots will not free them. */
163 /* Points to next temporary slot. */
164 struct temp_slot *next;
165 /* The rtx to used to reference the slot. */
167 /* The rtx used to represent the address if not the address of the
168 slot above. May be an EXPR_LIST if multiple addresses exist. */
170 /* The alignment (in bits) of the slot. */
172 /* The size, in units, of the slot. */
174 /* The alias set for the slot. If the alias set is zero, we don't
175 know anything about the alias set of the slot. We must only
176 reuse a slot if it is assigned an object of the same alias set.
177 Otherwise, the rest of the compiler may assume that the new use
178 of the slot cannot alias the old use of the slot, which is
179 false. If the slot has alias set zero, then we can't reuse the
180 slot at all, since we have no idea what alias set may have been
181 imposed on the memory. For example, if the stack slot is the
182 call frame for an inline functioned, we have no idea what alias
183 sets will be assigned to various pieces of the call frame. */
185 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
187 /* Non-zero if this temporary is currently in use. */
189 /* Non-zero if this temporary has its address taken. */
191 /* Nesting level at which this slot is being used. */
193 /* Non-zero if this should survive a call to free_temp_slots. */
195 /* The offset of the slot from the frame_pointer, including extra space
196 for alignment. This info is for combine_temp_slots. */
197 HOST_WIDE_INT base_offset;
198 /* The size of the slot, including extra space for alignment. This
199 info is for combine_temp_slots. */
200 HOST_WIDE_INT full_size;
203 /* This structure is used to record MEMs or pseudos used to replace VAR, any
204 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
205 maintain this list in case two operands of an insn were required to match;
206 in that case we must ensure we use the same replacement. */
208 struct fixup_replacement
212 struct fixup_replacement *next;
215 struct insns_for_mem_entry {
216 /* The KEY in HE will be a MEM. */
217 struct hash_entry he;
218 /* These are the INSNS which reference the MEM. */
222 /* Forward declarations. */
224 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
225 int, struct function *));
226 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
228 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
229 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
230 enum machine_mode, enum machine_mode,
232 struct hash_table *));
233 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
234 struct hash_table *));
235 static struct fixup_replacement
236 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
237 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
238 rtx, int, struct hash_table *));
239 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
240 struct fixup_replacement **));
241 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
242 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
243 static rtx fixup_stack_1 PROTO((rtx, rtx));
244 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
245 static void instantiate_decls PROTO((tree, int));
246 static void instantiate_decls_1 PROTO((tree, int));
247 static void instantiate_decl PROTO((rtx, int, int));
248 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
249 static void delete_handlers PROTO((void));
250 static void pad_to_arg_alignment PROTO((struct args_size *, int));
251 #ifndef ARGS_GROW_DOWNWARD
252 static void pad_below PROTO((struct args_size *, enum machine_mode,
255 #ifdef ARGS_GROW_DOWNWARD
256 static tree round_down PROTO((tree, int));
258 static rtx round_trampoline_addr PROTO((rtx));
259 static tree blocks_nreverse PROTO((tree));
260 static int all_blocks PROTO((tree, tree *));
261 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
262 static int *record_insns PROTO((rtx));
263 static int contains PROTO((rtx, int *));
264 #endif /* HAVE_prologue || HAVE_epilogue */
265 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
266 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
267 struct hash_table *));
268 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
271 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
272 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
273 static int insns_for_mem_walk PROTO ((rtx *, void *));
274 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
277 /* Pointer to chain of `struct function' for containing functions. */
278 struct function *outer_function_chain;
280 /* Given a function decl for a containing function,
281 return the `struct function' for it. */
284 find_function_data (decl)
289 for (p = outer_function_chain; p; p = p->next)
296 /* Save the current context for compilation of a nested function.
297 This is called from language-specific code. The caller should use
298 the save_lang_status callback to save any language-specific state,
299 since this function knows only about language-independent
303 push_function_context_to (context)
306 struct function *p, *context_data;
310 context_data = (context == current_function_decl
312 : find_function_data (context));
313 context_data->contains_functions = 1;
316 if (current_function == 0)
317 init_dummy_function_start ();
318 p = current_function;
320 p->next = outer_function_chain;
321 outer_function_chain = p;
322 p->decl = current_function_decl;
323 p->fixup_var_refs_queue = 0;
325 save_tree_status (p);
326 if (save_lang_status)
327 (*save_lang_status) (p);
328 if (save_machine_status)
329 (*save_machine_status) (p);
331 current_function = 0;
335 push_function_context ()
337 push_function_context_to (current_function_decl);
340 /* Restore the last saved context, at the end of a nested function.
341 This function is called from language-specific code. */
344 pop_function_context_from (context)
347 struct function *p = outer_function_chain;
348 struct var_refs_queue *queue;
350 current_function = p;
351 outer_function_chain = p->next;
353 current_function_decl = p->decl;
356 restore_tree_status (p);
357 restore_emit_status (p);
359 if (restore_machine_status)
360 (*restore_machine_status) (p);
361 if (restore_lang_status)
362 (*restore_lang_status) (p);
364 /* Finish doing put_var_into_stack for any of our variables
365 which became addressable during the nested function. */
366 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
367 fixup_var_refs (queue->modified, queue->promoted_mode,
368 queue->unsignedp, 0);
370 /* Reset variables that have known state during rtx generation. */
371 rtx_equal_function_value_matters = 1;
372 virtuals_instantiated = 0;
376 pop_function_context ()
378 pop_function_context_from (current_function_decl);
381 /* Clear out all parts of the state in F that can safely be discarded
382 after the function has been compiled, to let garbage collection
383 reclaim the memory. */
385 free_after_compilation (f)
388 free_emit_status (f);
389 free_varasm_status (f);
391 free (f->x_parm_reg_stack_loc);
393 f->can_garbage_collect = 1;
396 /* Allocate fixed slots in the stack frame of the current function. */
398 /* Return size needed for stack frame based on slots so far allocated in
400 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
401 the caller may have to do that. */
404 get_func_frame_size (f)
407 #ifdef FRAME_GROWS_DOWNWARD
408 return -f->x_frame_offset;
410 return f->x_frame_offset;
414 /* Return size needed for stack frame based on slots so far allocated.
415 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
416 the caller may have to do that. */
420 return get_func_frame_size (current_function);
423 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
424 with machine mode MODE.
426 ALIGN controls the amount of alignment for the address of the slot:
427 0 means according to MODE,
428 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
429 positive specifies alignment boundary in bits.
431 We do not round to stack_boundary here.
433 FUNCTION specifies the function to allocate in. */
436 assign_stack_local_1 (mode, size, align, function)
437 enum machine_mode mode;
440 struct function *function;
442 register rtx x, addr;
443 int bigend_correction = 0;
446 /* Allocate in the memory associated with the function in whose frame
448 if (function != current_function)
449 push_obstacks (function->function_obstack,
450 function->function_maybepermanent_obstack);
456 alignment = GET_MODE_ALIGNMENT (mode);
458 alignment = BIGGEST_ALIGNMENT;
460 /* Allow the target to (possibly) increase the alignment of this
462 type = type_for_mode (mode, 0);
464 alignment = LOCAL_ALIGNMENT (type, alignment);
466 alignment /= BITS_PER_UNIT;
468 else if (align == -1)
470 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
471 size = CEIL_ROUND (size, alignment);
474 alignment = align / BITS_PER_UNIT;
476 #ifdef FRAME_GROWS_DOWNWARD
477 function->x_frame_offset -= size;
480 /* Round frame offset to that alignment.
481 We must be careful here, since FRAME_OFFSET might be negative and
482 division with a negative dividend isn't as well defined as we might
483 like. So we instead assume that ALIGNMENT is a power of two and
484 use logical operations which are unambiguous. */
485 #ifdef FRAME_GROWS_DOWNWARD
486 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
488 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode)
494 bigend_correction = size - GET_MODE_SIZE (mode);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (function == current_function && virtuals_instantiated)
499 addr = plus_constant (frame_pointer_rtx,
500 (frame_offset + bigend_correction
501 + STARTING_FRAME_OFFSET));
503 addr = plus_constant (virtual_stack_vars_rtx,
504 frame_offset + bigend_correction);
506 #ifndef FRAME_GROWS_DOWNWARD
507 function->x_frame_offset += size;
510 x = gen_rtx_MEM (mode, addr);
512 function->x_stack_slot_list
513 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
515 if (function != current_function)
521 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
524 assign_stack_local (mode, size, align)
525 enum machine_mode mode;
529 return assign_stack_local_1 (mode, size, align, current_function);
532 /* Allocate a temporary stack slot and record it for possible later
535 MODE is the machine mode to be given to the returned rtx.
537 SIZE is the size in units of the space required. We do no rounding here
538 since assign_stack_local will do any required rounding.
540 KEEP is 1 if this slot is to be retained after a call to
541 free_temp_slots. Automatic variables for a block are allocated
542 with this flag. KEEP is 2 if we allocate a longer term temporary,
543 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
544 if we are to allocate something at an inner level to be treated as
545 a variable in the block (e.g., a SAVE_EXPR).
547 TYPE is the type that will be used for the stack slot. */
550 assign_stack_temp_for_type (mode, size, keep, type)
551 enum machine_mode mode;
558 struct temp_slot *p, *best_p = 0;
560 /* If SIZE is -1 it means that somebody tried to allocate a temporary
561 of a variable size. */
565 /* If we know the alias set for the memory that will be used, use
566 it. If there's no TYPE, then we don't know anything about the
567 alias set for the memory. */
569 alias_set = get_alias_set (type);
573 align = GET_MODE_ALIGNMENT (mode);
575 align = BIGGEST_ALIGNMENT;
578 type = type_for_mode (mode, 0);
580 align = LOCAL_ALIGNMENT (type, align);
582 /* Try to find an available, already-allocated temporary of the proper
583 mode which meets the size and alignment requirements. Choose the
584 smallest one with the closest alignment. */
585 for (p = temp_slots; p; p = p->next)
586 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
588 && (!flag_strict_aliasing
589 || (alias_set && p->alias_set == alias_set))
590 && (best_p == 0 || best_p->size > p->size
591 || (best_p->size == p->size && best_p->align > p->align)))
593 if (p->align == align && p->size == size)
601 /* Make our best, if any, the one to use. */
604 /* If there are enough aligned bytes left over, make them into a new
605 temp_slot so that the extra bytes don't get wasted. Do this only
606 for BLKmode slots, so that we can be sure of the alignment. */
607 if (GET_MODE (best_p->slot) == BLKmode
608 /* We can't split slots if -fstrict-aliasing because the
609 information about the alias set for the new slot will be
611 && !flag_strict_aliasing)
613 int alignment = best_p->align / BITS_PER_UNIT;
614 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
616 if (best_p->size - rounded_size >= alignment)
618 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
619 p->in_use = p->addr_taken = 0;
620 p->size = best_p->size - rounded_size;
621 p->base_offset = best_p->base_offset + rounded_size;
622 p->full_size = best_p->full_size - rounded_size;
623 p->slot = gen_rtx_MEM (BLKmode,
624 plus_constant (XEXP (best_p->slot, 0),
626 p->align = best_p->align;
629 p->next = temp_slots;
632 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
635 best_p->size = rounded_size;
636 best_p->full_size = rounded_size;
643 /* If we still didn't find one, make a new temporary. */
646 HOST_WIDE_INT frame_offset_old = frame_offset;
648 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
650 /* We are passing an explicit alignment request to assign_stack_local.
651 One side effect of that is assign_stack_local will not round SIZE
652 to ensure the frame offset remains suitably aligned.
654 So for requests which depended on the rounding of SIZE, we go ahead
655 and round it now. We also make sure ALIGNMENT is at least
656 BIGGEST_ALIGNMENT. */
657 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
659 p->slot = assign_stack_local (mode,
661 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
666 p->alias_set = alias_set;
668 /* The following slot size computation is necessary because we don't
669 know the actual size of the temporary slot until assign_stack_local
670 has performed all the frame alignment and size rounding for the
671 requested temporary. Note that extra space added for alignment
672 can be either above or below this stack slot depending on which
673 way the frame grows. We include the extra space if and only if it
674 is above this slot. */
675 #ifdef FRAME_GROWS_DOWNWARD
676 p->size = frame_offset_old - frame_offset;
681 /* Now define the fields used by combine_temp_slots. */
682 #ifdef FRAME_GROWS_DOWNWARD
683 p->base_offset = frame_offset;
684 p->full_size = frame_offset_old - frame_offset;
686 p->base_offset = frame_offset_old;
687 p->full_size = frame_offset - frame_offset_old;
690 p->next = temp_slots;
696 p->rtl_expr = seq_rtl_expr;
700 p->level = target_temp_slot_level;
705 p->level = var_temp_slot_level;
710 p->level = temp_slot_level;
714 /* We may be reusing an old slot, so clear any MEM flags that may have been
716 RTX_UNCHANGING_P (p->slot) = 0;
717 MEM_IN_STRUCT_P (p->slot) = 0;
718 MEM_SCALAR_P (p->slot) = 0;
719 MEM_ALIAS_SET (p->slot) = 0;
723 /* Allocate a temporary stack slot and record it for possible later
724 reuse. First three arguments are same as in preceding function. */
727 assign_stack_temp (mode, size, keep)
728 enum machine_mode mode;
732 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
735 /* Assign a temporary of given TYPE.
736 KEEP is as for assign_stack_temp.
737 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
738 it is 0 if a register is OK.
739 DONT_PROMOTE is 1 if we should not promote values in register
743 assign_temp (type, keep, memory_required, dont_promote)
749 enum machine_mode mode = TYPE_MODE (type);
750 int unsignedp = TREE_UNSIGNED (type);
752 if (mode == BLKmode || memory_required)
754 HOST_WIDE_INT size = int_size_in_bytes (type);
757 /* Unfortunately, we don't yet know how to allocate variable-sized
758 temporaries. However, sometimes we have a fixed upper limit on
759 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
760 instead. This is the case for Chill variable-sized strings. */
761 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
762 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
763 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
764 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
766 tmp = assign_stack_temp_for_type (mode, size, keep, type);
767 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
771 #ifndef PROMOTE_FOR_CALL_ONLY
773 mode = promote_mode (type, mode, &unsignedp, 0);
776 return gen_reg_rtx (mode);
779 /* Combine temporary stack slots which are adjacent on the stack.
781 This allows for better use of already allocated stack space. This is only
782 done for BLKmode slots because we can be sure that we won't have alignment
783 problems in this case. */
786 combine_temp_slots ()
788 struct temp_slot *p, *q;
789 struct temp_slot *prev_p, *prev_q;
792 /* We can't combine slots, because the information about which slot
793 is in which alias set will be lost. */
794 if (flag_strict_aliasing)
797 /* If there are a lot of temp slots, don't do anything unless
798 high levels of optimizaton. */
799 if (! flag_expensive_optimizations)
800 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
801 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
804 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
808 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
809 for (q = p->next, prev_q = p; q; q = prev_q->next)
812 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
814 if (p->base_offset + p->full_size == q->base_offset)
816 /* Q comes after P; combine Q into P. */
818 p->full_size += q->full_size;
821 else if (q->base_offset + q->full_size == p->base_offset)
823 /* P comes after Q; combine P into Q. */
825 q->full_size += p->full_size;
830 /* Either delete Q or advance past it. */
832 prev_q->next = q->next;
836 /* Either delete P or advance past it. */
840 prev_p->next = p->next;
842 temp_slots = p->next;
849 /* Find the temp slot corresponding to the object at address X. */
851 static struct temp_slot *
852 find_temp_slot_from_address (x)
858 for (p = temp_slots; p; p = p->next)
863 else if (XEXP (p->slot, 0) == x
865 || (GET_CODE (x) == PLUS
866 && XEXP (x, 0) == virtual_stack_vars_rtx
867 && GET_CODE (XEXP (x, 1)) == CONST_INT
868 && INTVAL (XEXP (x, 1)) >= p->base_offset
869 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
872 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
873 for (next = p->address; next; next = XEXP (next, 1))
874 if (XEXP (next, 0) == x)
881 /* Indicate that NEW is an alternate way of referring to the temp slot
882 that previously was known by OLD. */
885 update_temp_slot_address (old, new)
888 struct temp_slot *p = find_temp_slot_from_address (old);
890 /* If none, return. Else add NEW as an alias. */
893 else if (p->address == 0)
897 if (GET_CODE (p->address) != EXPR_LIST)
898 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
900 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
904 /* If X could be a reference to a temporary slot, mark the fact that its
905 address was taken. */
908 mark_temp_addr_taken (x)
916 /* If X is not in memory or is at a constant address, it cannot be in
918 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
921 p = find_temp_slot_from_address (XEXP (x, 0));
926 /* If X could be a reference to a temporary slot, mark that slot as
927 belonging to the to one level higher than the current level. If X
928 matched one of our slots, just mark that one. Otherwise, we can't
929 easily predict which it is, so upgrade all of them. Kept slots
932 This is called when an ({...}) construct occurs and a statement
933 returns a value in memory. */
936 preserve_temp_slots (x)
939 struct temp_slot *p = 0;
941 /* If there is no result, we still might have some objects whose address
942 were taken, so we need to make sure they stay around. */
945 for (p = temp_slots; p; p = p->next)
946 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
952 /* If X is a register that is being used as a pointer, see if we have
953 a temporary slot we know it points to. To be consistent with
954 the code below, we really should preserve all non-kept slots
955 if we can't find a match, but that seems to be much too costly. */
956 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
957 p = find_temp_slot_from_address (x);
959 /* If X is not in memory or is at a constant address, it cannot be in
960 a temporary slot, but it can contain something whose address was
962 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
964 for (p = temp_slots; p; p = p->next)
965 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
971 /* First see if we can find a match. */
973 p = find_temp_slot_from_address (XEXP (x, 0));
977 /* Move everything at our level whose address was taken to our new
978 level in case we used its address. */
981 if (p->level == temp_slot_level)
983 for (q = temp_slots; q; q = q->next)
984 if (q != p && q->addr_taken && q->level == p->level)
993 /* Otherwise, preserve all non-kept slots at this level. */
994 for (p = temp_slots; p; p = p->next)
995 if (p->in_use && p->level == temp_slot_level && ! p->keep)
999 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1000 with that RTL_EXPR, promote it into a temporary slot at the present
1001 level so it will not be freed when we free slots made in the
1005 preserve_rtl_expr_result (x)
1008 struct temp_slot *p;
1010 /* If X is not in memory or is at a constant address, it cannot be in
1011 a temporary slot. */
1012 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1015 /* If we can find a match, move it to our level unless it is already at
1017 p = find_temp_slot_from_address (XEXP (x, 0));
1020 p->level = MIN (p->level, temp_slot_level);
1027 /* Free all temporaries used so far. This is normally called at the end
1028 of generating code for a statement. Don't free any temporaries
1029 currently in use for an RTL_EXPR that hasn't yet been emitted.
1030 We could eventually do better than this since it can be reused while
1031 generating the same RTL_EXPR, but this is complex and probably not
1037 struct temp_slot *p;
1039 for (p = temp_slots; p; p = p->next)
1040 if (p->in_use && p->level == temp_slot_level && ! p->keep
1041 && p->rtl_expr == 0)
1044 combine_temp_slots ();
1047 /* Free all temporary slots used in T, an RTL_EXPR node. */
1050 free_temps_for_rtl_expr (t)
1053 struct temp_slot *p;
1055 for (p = temp_slots; p; p = p->next)
1056 if (p->rtl_expr == t)
1059 combine_temp_slots ();
1062 /* Mark all temporaries ever allocated in this function as not suitable
1063 for reuse until the current level is exited. */
1066 mark_all_temps_used ()
1068 struct temp_slot *p;
1070 for (p = temp_slots; p; p = p->next)
1072 p->in_use = p->keep = 1;
1073 p->level = MIN (p->level, temp_slot_level);
1077 /* Push deeper into the nesting level for stack temporaries. */
1085 /* Likewise, but save the new level as the place to allocate variables
1089 push_temp_slots_for_block ()
1093 var_temp_slot_level = temp_slot_level;
1096 /* Likewise, but save the new level as the place to allocate temporaries
1097 for TARGET_EXPRs. */
1100 push_temp_slots_for_target ()
1104 target_temp_slot_level = temp_slot_level;
1107 /* Set and get the value of target_temp_slot_level. The only
1108 permitted use of these functions is to save and restore this value. */
1111 get_target_temp_slot_level ()
1113 return target_temp_slot_level;
1117 set_target_temp_slot_level (level)
1120 target_temp_slot_level = level;
1123 /* Pop a temporary nesting level. All slots in use in the current level
1129 struct temp_slot *p;
1131 for (p = temp_slots; p; p = p->next)
1132 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1135 combine_temp_slots ();
1140 /* Initialize temporary slots. */
1145 /* We have not allocated any temporaries yet. */
1147 temp_slot_level = 0;
1148 var_temp_slot_level = 0;
1149 target_temp_slot_level = 0;
1152 /* Retroactively move an auto variable from a register to a stack slot.
1153 This is done when an address-reference to the variable is seen. */
1156 put_var_into_stack (decl)
1160 enum machine_mode promoted_mode, decl_mode;
1161 struct function *function = 0;
1163 int can_use_addressof;
1165 context = decl_function_context (decl);
1167 /* Get the current rtl used for this object and its original mode. */
1168 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1170 /* No need to do anything if decl has no rtx yet
1171 since in that case caller is setting TREE_ADDRESSABLE
1172 and a stack slot will be assigned when the rtl is made. */
1176 /* Get the declared mode for this object. */
1177 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1178 : DECL_MODE (decl));
1179 /* Get the mode it's actually stored in. */
1180 promoted_mode = GET_MODE (reg);
1182 /* If this variable comes from an outer function,
1183 find that function's saved context. */
1184 if (context != current_function_decl && context != inline_function_decl)
1185 for (function = outer_function_chain; function; function = function->next)
1186 if (function->decl == context)
1189 /* If this is a variable-size object with a pseudo to address it,
1190 put that pseudo into the stack, if the var is nonlocal. */
1191 if (DECL_NONLOCAL (decl)
1192 && GET_CODE (reg) == MEM
1193 && GET_CODE (XEXP (reg, 0)) == REG
1194 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1196 reg = XEXP (reg, 0);
1197 decl_mode = promoted_mode = GET_MODE (reg);
1203 /* FIXME make it work for promoted modes too */
1204 && decl_mode == promoted_mode
1205 #ifdef NON_SAVING_SETJMP
1206 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1210 /* If we can't use ADDRESSOF, make sure we see through one we already
1212 if (! can_use_addressof && GET_CODE (reg) == MEM
1213 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1214 reg = XEXP (XEXP (reg, 0), 0);
1216 /* Now we should have a value that resides in one or more pseudo regs. */
1218 if (GET_CODE (reg) == REG)
1220 /* If this variable lives in the current function and we don't need
1221 to put things in the stack for the sake of setjmp, try to keep it
1222 in a register until we know we actually need the address. */
1223 if (can_use_addressof)
1224 gen_mem_addressof (reg, decl);
1226 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1227 promoted_mode, decl_mode,
1228 TREE_SIDE_EFFECTS (decl), 0,
1229 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1232 else if (GET_CODE (reg) == CONCAT)
1234 /* A CONCAT contains two pseudos; put them both in the stack.
1235 We do it so they end up consecutive. */
1236 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1237 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1238 #ifdef FRAME_GROWS_DOWNWARD
1239 /* Since part 0 should have a lower address, do it second. */
1240 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1241 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1242 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1244 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1245 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1246 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1249 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1250 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1251 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1253 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1254 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1255 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1259 /* Change the CONCAT into a combined MEM for both parts. */
1260 PUT_CODE (reg, MEM);
1261 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1262 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1264 /* The two parts are in memory order already.
1265 Use the lower parts address as ours. */
1266 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1267 /* Prevent sharing of rtl that might lose. */
1268 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1269 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1274 if (current_function_check_memory_usage)
1275 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1276 XEXP (reg, 0), Pmode,
1277 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1278 TYPE_MODE (sizetype),
1279 GEN_INT (MEMORY_USE_RW),
1280 TYPE_MODE (integer_type_node));
1283 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1284 into the stack frame of FUNCTION (0 means the current function).
1285 DECL_MODE is the machine mode of the user-level data type.
1286 PROMOTED_MODE is the machine mode of the register.
1287 VOLATILE_P is nonzero if this is for a "volatile" decl.
1288 USED_P is nonzero if this reg might have already been used in an insn. */
1291 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1292 original_regno, used_p, ht)
1293 struct function *function;
1296 enum machine_mode promoted_mode, decl_mode;
1300 struct hash_table *ht;
1302 struct function *func = function ? function : current_function;
1304 int regno = original_regno;
1307 regno = REGNO (reg);
1309 if (regno < func->x_max_parm_reg)
1310 new = func->x_parm_reg_stack_loc[regno];
1312 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1314 PUT_CODE (reg, MEM);
1315 PUT_MODE (reg, decl_mode);
1316 XEXP (reg, 0) = XEXP (new, 0);
1317 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1318 MEM_VOLATILE_P (reg) = volatile_p;
1320 /* If this is a memory ref that contains aggregate components,
1321 mark it as such for cse and loop optimize. If we are reusing a
1322 previously generated stack slot, then we need to copy the bit in
1323 case it was set for other reasons. For instance, it is set for
1324 __builtin_va_alist. */
1325 MEM_SET_IN_STRUCT_P (reg,
1326 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1327 MEM_ALIAS_SET (reg) = get_alias_set (type);
1329 /* Now make sure that all refs to the variable, previously made
1330 when it was a register, are fixed up to be valid again. */
1332 if (used_p && function != 0)
1334 struct var_refs_queue *temp;
1336 /* Variable is inherited; fix it up when we get back to its function. */
1337 push_obstacks (function->function_obstack,
1338 function->function_maybepermanent_obstack);
1340 /* See comment in restore_tree_status in tree.c for why this needs to be
1341 on saveable obstack. */
1343 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1344 temp->modified = reg;
1345 temp->promoted_mode = promoted_mode;
1346 temp->unsignedp = TREE_UNSIGNED (type);
1347 temp->next = function->fixup_var_refs_queue;
1348 function->fixup_var_refs_queue = temp;
1352 /* Variable is local; fix it up now. */
1353 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1357 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1359 enum machine_mode promoted_mode;
1361 struct hash_table *ht;
1364 rtx first_insn = get_insns ();
1365 struct sequence_stack *stack = seq_stack;
1366 tree rtl_exps = rtl_expr_chain;
1368 /* Must scan all insns for stack-refs that exceed the limit. */
1369 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1371 /* If there's a hash table, it must record all uses of VAR. */
1375 /* Scan all pending sequences too. */
1376 for (; stack; stack = stack->next)
1378 push_to_sequence (stack->first);
1379 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1380 stack->first, stack->next != 0, 0);
1381 /* Update remembered end of sequence
1382 in case we added an insn at the end. */
1383 stack->last = get_last_insn ();
1387 /* Scan all waiting RTL_EXPRs too. */
1388 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1390 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1391 if (seq != const0_rtx && seq != 0)
1393 push_to_sequence (seq);
1394 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1400 /* Scan the catch clauses for exception handling too. */
1401 push_to_sequence (catch_clauses);
1402 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1407 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1408 some part of an insn. Return a struct fixup_replacement whose OLD
1409 value is equal to X. Allocate a new structure if no such entry exists. */
1411 static struct fixup_replacement *
1412 find_fixup_replacement (replacements, x)
1413 struct fixup_replacement **replacements;
1416 struct fixup_replacement *p;
1418 /* See if we have already replaced this. */
1419 for (p = *replacements; p && p->old != x; p = p->next)
1424 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1427 p->next = *replacements;
1434 /* Scan the insn-chain starting with INSN for refs to VAR
1435 and fix them up. TOPLEVEL is nonzero if this chain is the
1436 main chain of insns for the current function. */
1439 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1441 enum machine_mode promoted_mode;
1445 struct hash_table *ht;
1448 rtx insn_list = NULL_RTX;
1450 /* If we already know which INSNs reference VAR there's no need
1451 to walk the entire instruction chain. */
1454 insn_list = ((struct insns_for_mem_entry *)
1455 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1456 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1457 insn_list = XEXP (insn_list, 1);
1462 rtx next = NEXT_INSN (insn);
1463 rtx set, prev, prev_set;
1466 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1468 /* Remember the notes in case we delete the insn. */
1469 note = REG_NOTES (insn);
1471 /* If this is a CLOBBER of VAR, delete it.
1473 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1474 and REG_RETVAL notes too. */
1475 if (GET_CODE (PATTERN (insn)) == CLOBBER
1476 && (XEXP (PATTERN (insn), 0) == var
1477 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1478 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1479 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1481 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1482 /* The REG_LIBCALL note will go away since we are going to
1483 turn INSN into a NOTE, so just delete the
1484 corresponding REG_RETVAL note. */
1485 remove_note (XEXP (note, 0),
1486 find_reg_note (XEXP (note, 0), REG_RETVAL,
1489 /* In unoptimized compilation, we shouldn't call delete_insn
1490 except in jump.c doing warnings. */
1491 PUT_CODE (insn, NOTE);
1492 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1493 NOTE_SOURCE_FILE (insn) = 0;
1496 /* The insn to load VAR from a home in the arglist
1497 is now a no-op. When we see it, just delete it.
1498 Similarly if this is storing VAR from a register from which
1499 it was loaded in the previous insn. This will occur
1500 when an ADDRESSOF was made for an arglist slot. */
1502 && (set = single_set (insn)) != 0
1503 && SET_DEST (set) == var
1504 /* If this represents the result of an insn group,
1505 don't delete the insn. */
1506 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1507 && (rtx_equal_p (SET_SRC (set), var)
1508 || (GET_CODE (SET_SRC (set)) == REG
1509 && (prev = prev_nonnote_insn (insn)) != 0
1510 && (prev_set = single_set (prev)) != 0
1511 && SET_DEST (prev_set) == SET_SRC (set)
1512 && rtx_equal_p (SET_SRC (prev_set), var))))
1514 /* In unoptimized compilation, we shouldn't call delete_insn
1515 except in jump.c doing warnings. */
1516 PUT_CODE (insn, NOTE);
1517 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1518 NOTE_SOURCE_FILE (insn) = 0;
1519 if (insn == last_parm_insn)
1520 last_parm_insn = PREV_INSN (next);
1524 struct fixup_replacement *replacements = 0;
1525 rtx next_insn = NEXT_INSN (insn);
1527 if (SMALL_REGISTER_CLASSES)
1529 /* If the insn that copies the results of a CALL_INSN
1530 into a pseudo now references VAR, we have to use an
1531 intermediate pseudo since we want the life of the
1532 return value register to be only a single insn.
1534 If we don't use an intermediate pseudo, such things as
1535 address computations to make the address of VAR valid
1536 if it is not can be placed between the CALL_INSN and INSN.
1538 To make sure this doesn't happen, we record the destination
1539 of the CALL_INSN and see if the next insn uses both that
1542 if (call_dest != 0 && GET_CODE (insn) == INSN
1543 && reg_mentioned_p (var, PATTERN (insn))
1544 && reg_mentioned_p (call_dest, PATTERN (insn)))
1546 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1548 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1550 PATTERN (insn) = replace_rtx (PATTERN (insn),
1554 if (GET_CODE (insn) == CALL_INSN
1555 && GET_CODE (PATTERN (insn)) == SET)
1556 call_dest = SET_DEST (PATTERN (insn));
1557 else if (GET_CODE (insn) == CALL_INSN
1558 && GET_CODE (PATTERN (insn)) == PARALLEL
1559 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1560 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1565 /* See if we have to do anything to INSN now that VAR is in
1566 memory. If it needs to be loaded into a pseudo, use a single
1567 pseudo for the entire insn in case there is a MATCH_DUP
1568 between two operands. We pass a pointer to the head of
1569 a list of struct fixup_replacements. If fixup_var_refs_1
1570 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1571 it will record them in this list.
1573 If it allocated a pseudo for any replacement, we copy into
1576 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1579 /* If this is last_parm_insn, and any instructions were output
1580 after it to fix it up, then we must set last_parm_insn to
1581 the last such instruction emitted. */
1582 if (insn == last_parm_insn)
1583 last_parm_insn = PREV_INSN (next_insn);
1585 while (replacements)
1587 if (GET_CODE (replacements->new) == REG)
1592 /* OLD might be a (subreg (mem)). */
1593 if (GET_CODE (replacements->old) == SUBREG)
1595 = fixup_memory_subreg (replacements->old, insn, 0);
1598 = fixup_stack_1 (replacements->old, insn);
1600 insert_before = insn;
1602 /* If we are changing the mode, do a conversion.
1603 This might be wasteful, but combine.c will
1604 eliminate much of the waste. */
1606 if (GET_MODE (replacements->new)
1607 != GET_MODE (replacements->old))
1610 convert_move (replacements->new,
1611 replacements->old, unsignedp);
1612 seq = gen_sequence ();
1616 seq = gen_move_insn (replacements->new,
1619 emit_insn_before (seq, insert_before);
1622 replacements = replacements->next;
1626 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1627 But don't touch other insns referred to by reg-notes;
1628 we will get them elsewhere. */
1631 if (GET_CODE (note) != INSN_LIST)
1633 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1634 note = XEXP (note, 1);
1642 insn = XEXP (insn_list, 0);
1643 insn_list = XEXP (insn_list, 1);
1650 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1651 See if the rtx expression at *LOC in INSN needs to be changed.
1653 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1654 contain a list of original rtx's and replacements. If we find that we need
1655 to modify this insn by replacing a memory reference with a pseudo or by
1656 making a new MEM to implement a SUBREG, we consult that list to see if
1657 we have already chosen a replacement. If none has already been allocated,
1658 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1659 or the SUBREG, as appropriate, to the pseudo. */
1662 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1664 enum machine_mode promoted_mode;
1667 struct fixup_replacement **replacements;
1670 register rtx x = *loc;
1671 RTX_CODE code = GET_CODE (x);
1672 register const char *fmt;
1673 register rtx tem, tem1;
1674 struct fixup_replacement *replacement;
1679 if (XEXP (x, 0) == var)
1681 /* Prevent sharing of rtl that might lose. */
1682 rtx sub = copy_rtx (XEXP (var, 0));
1684 if (! validate_change (insn, loc, sub, 0))
1686 rtx y = gen_reg_rtx (GET_MODE (sub));
1689 /* We should be able to replace with a register or all is lost.
1690 Note that we can't use validate_change to verify this, since
1691 we're not caring for replacing all dups simultaneously. */
1692 if (! validate_replace_rtx (*loc, y, insn))
1695 /* Careful! First try to recognize a direct move of the
1696 value, mimicking how things are done in gen_reload wrt
1697 PLUS. Consider what happens when insn is a conditional
1698 move instruction and addsi3 clobbers flags. */
1701 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1702 seq = gen_sequence ();
1705 if (recog_memoized (new_insn) < 0)
1707 /* That failed. Fall back on force_operand and hope. */
1710 force_operand (sub, y);
1711 seq = gen_sequence ();
1716 /* Don't separate setter from user. */
1717 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1718 insn = PREV_INSN (insn);
1721 emit_insn_before (seq, insn);
1729 /* If we already have a replacement, use it. Otherwise,
1730 try to fix up this address in case it is invalid. */
1732 replacement = find_fixup_replacement (replacements, var);
1733 if (replacement->new)
1735 *loc = replacement->new;
1739 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1741 /* Unless we are forcing memory to register or we changed the mode,
1742 we can leave things the way they are if the insn is valid. */
1744 INSN_CODE (insn) = -1;
1745 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1746 && recog_memoized (insn) >= 0)
1749 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1753 /* If X contains VAR, we need to unshare it here so that we update
1754 each occurrence separately. But all identical MEMs in one insn
1755 must be replaced with the same rtx because of the possibility of
1758 if (reg_mentioned_p (var, x))
1760 replacement = find_fixup_replacement (replacements, x);
1761 if (replacement->new == 0)
1762 replacement->new = copy_most_rtx (x, var);
1764 *loc = x = replacement->new;
1780 /* Note that in some cases those types of expressions are altered
1781 by optimize_bit_field, and do not survive to get here. */
1782 if (XEXP (x, 0) == var
1783 || (GET_CODE (XEXP (x, 0)) == SUBREG
1784 && SUBREG_REG (XEXP (x, 0)) == var))
1786 /* Get TEM as a valid MEM in the mode presently in the insn.
1788 We don't worry about the possibility of MATCH_DUP here; it
1789 is highly unlikely and would be tricky to handle. */
1792 if (GET_CODE (tem) == SUBREG)
1794 if (GET_MODE_BITSIZE (GET_MODE (tem))
1795 > GET_MODE_BITSIZE (GET_MODE (var)))
1797 replacement = find_fixup_replacement (replacements, var);
1798 if (replacement->new == 0)
1799 replacement->new = gen_reg_rtx (GET_MODE (var));
1800 SUBREG_REG (tem) = replacement->new;
1803 tem = fixup_memory_subreg (tem, insn, 0);
1806 tem = fixup_stack_1 (tem, insn);
1808 /* Unless we want to load from memory, get TEM into the proper mode
1809 for an extract from memory. This can only be done if the
1810 extract is at a constant position and length. */
1812 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1813 && GET_CODE (XEXP (x, 2)) == CONST_INT
1814 && ! mode_dependent_address_p (XEXP (tem, 0))
1815 && ! MEM_VOLATILE_P (tem))
1817 enum machine_mode wanted_mode = VOIDmode;
1818 enum machine_mode is_mode = GET_MODE (tem);
1819 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1822 if (GET_CODE (x) == ZERO_EXTRACT)
1824 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1825 if (wanted_mode == VOIDmode)
1826 wanted_mode = word_mode;
1830 if (GET_CODE (x) == SIGN_EXTRACT)
1832 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1833 if (wanted_mode == VOIDmode)
1834 wanted_mode = word_mode;
1837 /* If we have a narrower mode, we can do something. */
1838 if (wanted_mode != VOIDmode
1839 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1841 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1842 rtx old_pos = XEXP (x, 2);
1845 /* If the bytes and bits are counted differently, we
1846 must adjust the offset. */
1847 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1848 offset = (GET_MODE_SIZE (is_mode)
1849 - GET_MODE_SIZE (wanted_mode) - offset);
1851 pos %= GET_MODE_BITSIZE (wanted_mode);
1853 newmem = gen_rtx_MEM (wanted_mode,
1854 plus_constant (XEXP (tem, 0), offset));
1855 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1856 MEM_COPY_ATTRIBUTES (newmem, tem);
1858 /* Make the change and see if the insn remains valid. */
1859 INSN_CODE (insn) = -1;
1860 XEXP (x, 0) = newmem;
1861 XEXP (x, 2) = GEN_INT (pos);
1863 if (recog_memoized (insn) >= 0)
1866 /* Otherwise, restore old position. XEXP (x, 0) will be
1868 XEXP (x, 2) = old_pos;
1872 /* If we get here, the bitfield extract insn can't accept a memory
1873 reference. Copy the input into a register. */
1875 tem1 = gen_reg_rtx (GET_MODE (tem));
1876 emit_insn_before (gen_move_insn (tem1, tem), insn);
1883 if (SUBREG_REG (x) == var)
1885 /* If this is a special SUBREG made because VAR was promoted
1886 from a wider mode, replace it with VAR and call ourself
1887 recursively, this time saying that the object previously
1888 had its current mode (by virtue of the SUBREG). */
1890 if (SUBREG_PROMOTED_VAR_P (x))
1893 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1897 /* If this SUBREG makes VAR wider, it has become a paradoxical
1898 SUBREG with VAR in memory, but these aren't allowed at this
1899 stage of the compilation. So load VAR into a pseudo and take
1900 a SUBREG of that pseudo. */
1901 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1903 replacement = find_fixup_replacement (replacements, var);
1904 if (replacement->new == 0)
1905 replacement->new = gen_reg_rtx (GET_MODE (var));
1906 SUBREG_REG (x) = replacement->new;
1910 /* See if we have already found a replacement for this SUBREG.
1911 If so, use it. Otherwise, make a MEM and see if the insn
1912 is recognized. If not, or if we should force MEM into a register,
1913 make a pseudo for this SUBREG. */
1914 replacement = find_fixup_replacement (replacements, x);
1915 if (replacement->new)
1917 *loc = replacement->new;
1921 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1923 INSN_CODE (insn) = -1;
1924 if (! flag_force_mem && recog_memoized (insn) >= 0)
1927 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1933 /* First do special simplification of bit-field references. */
1934 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1935 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1936 optimize_bit_field (x, insn, 0);
1937 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1938 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1939 optimize_bit_field (x, insn, NULL_PTR);
1941 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1942 into a register and then store it back out. */
1943 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1944 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1945 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1946 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1947 > GET_MODE_SIZE (GET_MODE (var))))
1949 replacement = find_fixup_replacement (replacements, var);
1950 if (replacement->new == 0)
1951 replacement->new = gen_reg_rtx (GET_MODE (var));
1953 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1954 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1957 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1958 insn into a pseudo and store the low part of the pseudo into VAR. */
1959 if (GET_CODE (SET_DEST (x)) == SUBREG
1960 && SUBREG_REG (SET_DEST (x)) == var
1961 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1962 > GET_MODE_SIZE (GET_MODE (var))))
1964 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1965 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1972 rtx dest = SET_DEST (x);
1973 rtx src = SET_SRC (x);
1975 rtx outerdest = dest;
1978 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1979 || GET_CODE (dest) == SIGN_EXTRACT
1980 || GET_CODE (dest) == ZERO_EXTRACT)
1981 dest = XEXP (dest, 0);
1983 if (GET_CODE (src) == SUBREG)
1984 src = XEXP (src, 0);
1986 /* If VAR does not appear at the top level of the SET
1987 just scan the lower levels of the tree. */
1989 if (src != var && dest != var)
1992 /* We will need to rerecognize this insn. */
1993 INSN_CODE (insn) = -1;
1996 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1998 /* Since this case will return, ensure we fixup all the
2000 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2001 insn, replacements);
2002 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2003 insn, replacements);
2004 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2005 insn, replacements);
2007 tem = XEXP (outerdest, 0);
2009 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2010 that may appear inside a ZERO_EXTRACT.
2011 This was legitimate when the MEM was a REG. */
2012 if (GET_CODE (tem) == SUBREG
2013 && SUBREG_REG (tem) == var)
2014 tem = fixup_memory_subreg (tem, insn, 0);
2016 tem = fixup_stack_1 (tem, insn);
2018 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2019 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2020 && ! mode_dependent_address_p (XEXP (tem, 0))
2021 && ! MEM_VOLATILE_P (tem))
2023 enum machine_mode wanted_mode;
2024 enum machine_mode is_mode = GET_MODE (tem);
2025 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2027 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2028 if (wanted_mode == VOIDmode)
2029 wanted_mode = word_mode;
2031 /* If we have a narrower mode, we can do something. */
2032 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2034 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2035 rtx old_pos = XEXP (outerdest, 2);
2038 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2039 offset = (GET_MODE_SIZE (is_mode)
2040 - GET_MODE_SIZE (wanted_mode) - offset);
2042 pos %= GET_MODE_BITSIZE (wanted_mode);
2044 newmem = gen_rtx_MEM (wanted_mode,
2045 plus_constant (XEXP (tem, 0), offset));
2046 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2047 MEM_COPY_ATTRIBUTES (newmem, tem);
2049 /* Make the change and see if the insn remains valid. */
2050 INSN_CODE (insn) = -1;
2051 XEXP (outerdest, 0) = newmem;
2052 XEXP (outerdest, 2) = GEN_INT (pos);
2054 if (recog_memoized (insn) >= 0)
2057 /* Otherwise, restore old position. XEXP (x, 0) will be
2059 XEXP (outerdest, 2) = old_pos;
2063 /* If we get here, the bit-field store doesn't allow memory
2064 or isn't located at a constant position. Load the value into
2065 a register, do the store, and put it back into memory. */
2067 tem1 = gen_reg_rtx (GET_MODE (tem));
2068 emit_insn_before (gen_move_insn (tem1, tem), insn);
2069 emit_insn_after (gen_move_insn (tem, tem1), insn);
2070 XEXP (outerdest, 0) = tem1;
2075 /* STRICT_LOW_PART is a no-op on memory references
2076 and it can cause combinations to be unrecognizable,
2079 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2080 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2082 /* A valid insn to copy VAR into or out of a register
2083 must be left alone, to avoid an infinite loop here.
2084 If the reference to VAR is by a subreg, fix that up,
2085 since SUBREG is not valid for a memref.
2086 Also fix up the address of the stack slot.
2088 Note that we must not try to recognize the insn until
2089 after we know that we have valid addresses and no
2090 (subreg (mem ...) ...) constructs, since these interfere
2091 with determining the validity of the insn. */
2093 if ((SET_SRC (x) == var
2094 || (GET_CODE (SET_SRC (x)) == SUBREG
2095 && SUBREG_REG (SET_SRC (x)) == var))
2096 && (GET_CODE (SET_DEST (x)) == REG
2097 || (GET_CODE (SET_DEST (x)) == SUBREG
2098 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2099 && GET_MODE (var) == promoted_mode
2100 && x == single_set (insn))
2104 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2105 if (replacement->new)
2106 SET_SRC (x) = replacement->new;
2107 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2108 SET_SRC (x) = replacement->new
2109 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2111 SET_SRC (x) = replacement->new
2112 = fixup_stack_1 (SET_SRC (x), insn);
2114 if (recog_memoized (insn) >= 0)
2117 /* INSN is not valid, but we know that we want to
2118 copy SET_SRC (x) to SET_DEST (x) in some way. So
2119 we generate the move and see whether it requires more
2120 than one insn. If it does, we emit those insns and
2121 delete INSN. Otherwise, we an just replace the pattern
2122 of INSN; we have already verified above that INSN has
2123 no other function that to do X. */
2125 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2126 if (GET_CODE (pat) == SEQUENCE)
2128 emit_insn_after (pat, insn);
2129 PUT_CODE (insn, NOTE);
2130 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2131 NOTE_SOURCE_FILE (insn) = 0;
2134 PATTERN (insn) = pat;
2139 if ((SET_DEST (x) == var
2140 || (GET_CODE (SET_DEST (x)) == SUBREG
2141 && SUBREG_REG (SET_DEST (x)) == var))
2142 && (GET_CODE (SET_SRC (x)) == REG
2143 || (GET_CODE (SET_SRC (x)) == SUBREG
2144 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2145 && GET_MODE (var) == promoted_mode
2146 && x == single_set (insn))
2150 if (GET_CODE (SET_DEST (x)) == SUBREG)
2151 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2153 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2155 if (recog_memoized (insn) >= 0)
2158 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2159 if (GET_CODE (pat) == SEQUENCE)
2161 emit_insn_after (pat, insn);
2162 PUT_CODE (insn, NOTE);
2163 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2164 NOTE_SOURCE_FILE (insn) = 0;
2167 PATTERN (insn) = pat;
2172 /* Otherwise, storing into VAR must be handled specially
2173 by storing into a temporary and copying that into VAR
2174 with a new insn after this one. Note that this case
2175 will be used when storing into a promoted scalar since
2176 the insn will now have different modes on the input
2177 and output and hence will be invalid (except for the case
2178 of setting it to a constant, which does not need any
2179 change if it is valid). We generate extra code in that case,
2180 but combine.c will eliminate it. */
2185 rtx fixeddest = SET_DEST (x);
2187 /* STRICT_LOW_PART can be discarded, around a MEM. */
2188 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2189 fixeddest = XEXP (fixeddest, 0);
2190 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2191 if (GET_CODE (fixeddest) == SUBREG)
2193 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2194 promoted_mode = GET_MODE (fixeddest);
2197 fixeddest = fixup_stack_1 (fixeddest, insn);
2199 temp = gen_reg_rtx (promoted_mode);
2201 emit_insn_after (gen_move_insn (fixeddest,
2202 gen_lowpart (GET_MODE (fixeddest),
2206 SET_DEST (x) = temp;
2214 /* Nothing special about this RTX; fix its operands. */
2216 fmt = GET_RTX_FORMAT (code);
2217 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2220 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2224 for (j = 0; j < XVECLEN (x, i); j++)
2225 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2226 insn, replacements);
2231 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2232 return an rtx (MEM:m1 newaddr) which is equivalent.
2233 If any insns must be emitted to compute NEWADDR, put them before INSN.
2235 UNCRITICAL nonzero means accept paradoxical subregs.
2236 This is used for subregs found inside REG_NOTES. */
2239 fixup_memory_subreg (x, insn, uncritical)
2244 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2245 rtx addr = XEXP (SUBREG_REG (x), 0);
2246 enum machine_mode mode = GET_MODE (x);
2249 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2250 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2254 if (BYTES_BIG_ENDIAN)
2255 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2256 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2257 addr = plus_constant (addr, offset);
2258 if (!flag_force_addr && memory_address_p (mode, addr))
2259 /* Shortcut if no insns need be emitted. */
2260 return change_address (SUBREG_REG (x), mode, addr);
2262 result = change_address (SUBREG_REG (x), mode, addr);
2263 emit_insn_before (gen_sequence (), insn);
2268 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2269 Replace subexpressions of X in place.
2270 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2271 Otherwise return X, with its contents possibly altered.
2273 If any insns must be emitted to compute NEWADDR, put them before INSN.
2275 UNCRITICAL is as in fixup_memory_subreg. */
2278 walk_fixup_memory_subreg (x, insn, uncritical)
2283 register enum rtx_code code;
2284 register const char *fmt;
2290 code = GET_CODE (x);
2292 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2293 return fixup_memory_subreg (x, insn, uncritical);
2295 /* Nothing special about this RTX; fix its operands. */
2297 fmt = GET_RTX_FORMAT (code);
2298 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2301 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2305 for (j = 0; j < XVECLEN (x, i); j++)
2307 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2313 /* For each memory ref within X, if it refers to a stack slot
2314 with an out of range displacement, put the address in a temp register
2315 (emitting new insns before INSN to load these registers)
2316 and alter the memory ref to use that register.
2317 Replace each such MEM rtx with a copy, to avoid clobberage. */
2320 fixup_stack_1 (x, insn)
2325 register RTX_CODE code = GET_CODE (x);
2326 register const char *fmt;
2330 register rtx ad = XEXP (x, 0);
2331 /* If we have address of a stack slot but it's not valid
2332 (displacement is too large), compute the sum in a register. */
2333 if (GET_CODE (ad) == PLUS
2334 && GET_CODE (XEXP (ad, 0)) == REG
2335 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2336 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2337 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2338 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2339 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2341 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2342 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2343 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2344 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2347 if (memory_address_p (GET_MODE (x), ad))
2351 temp = copy_to_reg (ad);
2352 seq = gen_sequence ();
2354 emit_insn_before (seq, insn);
2355 return change_address (x, VOIDmode, temp);
2360 fmt = GET_RTX_FORMAT (code);
2361 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2364 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2368 for (j = 0; j < XVECLEN (x, i); j++)
2369 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2375 /* Optimization: a bit-field instruction whose field
2376 happens to be a byte or halfword in memory
2377 can be changed to a move instruction.
2379 We call here when INSN is an insn to examine or store into a bit-field.
2380 BODY is the SET-rtx to be altered.
2382 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2383 (Currently this is called only from function.c, and EQUIV_MEM
2387 optimize_bit_field (body, insn, equiv_mem)
2392 register rtx bitfield;
2395 enum machine_mode mode;
2397 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2398 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2399 bitfield = SET_DEST (body), destflag = 1;
2401 bitfield = SET_SRC (body), destflag = 0;
2403 /* First check that the field being stored has constant size and position
2404 and is in fact a byte or halfword suitably aligned. */
2406 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2407 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2408 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2410 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2412 register rtx memref = 0;
2414 /* Now check that the containing word is memory, not a register,
2415 and that it is safe to change the machine mode. */
2417 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2418 memref = XEXP (bitfield, 0);
2419 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2421 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2422 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2423 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2424 memref = SUBREG_REG (XEXP (bitfield, 0));
2425 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2427 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2428 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2431 && ! mode_dependent_address_p (XEXP (memref, 0))
2432 && ! MEM_VOLATILE_P (memref))
2434 /* Now adjust the address, first for any subreg'ing
2435 that we are now getting rid of,
2436 and then for which byte of the word is wanted. */
2438 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2441 /* Adjust OFFSET to count bits from low-address byte. */
2442 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2443 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2444 - offset - INTVAL (XEXP (bitfield, 1)));
2446 /* Adjust OFFSET to count bytes from low-address byte. */
2447 offset /= BITS_PER_UNIT;
2448 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2450 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2451 if (BYTES_BIG_ENDIAN)
2452 offset -= (MIN (UNITS_PER_WORD,
2453 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2454 - MIN (UNITS_PER_WORD,
2455 GET_MODE_SIZE (GET_MODE (memref))));
2459 memref = change_address (memref, mode,
2460 plus_constant (XEXP (memref, 0), offset));
2461 insns = get_insns ();
2463 emit_insns_before (insns, insn);
2465 /* Store this memory reference where
2466 we found the bit field reference. */
2470 validate_change (insn, &SET_DEST (body), memref, 1);
2471 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2473 rtx src = SET_SRC (body);
2474 while (GET_CODE (src) == SUBREG
2475 && SUBREG_WORD (src) == 0)
2476 src = SUBREG_REG (src);
2477 if (GET_MODE (src) != GET_MODE (memref))
2478 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2479 validate_change (insn, &SET_SRC (body), src, 1);
2481 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2482 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2483 /* This shouldn't happen because anything that didn't have
2484 one of these modes should have got converted explicitly
2485 and then referenced through a subreg.
2486 This is so because the original bit-field was
2487 handled by agg_mode and so its tree structure had
2488 the same mode that memref now has. */
2493 rtx dest = SET_DEST (body);
2495 while (GET_CODE (dest) == SUBREG
2496 && SUBREG_WORD (dest) == 0
2497 && (GET_MODE_CLASS (GET_MODE (dest))
2498 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2499 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2501 dest = SUBREG_REG (dest);
2503 validate_change (insn, &SET_DEST (body), dest, 1);
2505 if (GET_MODE (dest) == GET_MODE (memref))
2506 validate_change (insn, &SET_SRC (body), memref, 1);
2509 /* Convert the mem ref to the destination mode. */
2510 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2513 convert_move (newreg, memref,
2514 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2518 validate_change (insn, &SET_SRC (body), newreg, 1);
2522 /* See if we can convert this extraction or insertion into
2523 a simple move insn. We might not be able to do so if this
2524 was, for example, part of a PARALLEL.
2526 If we succeed, write out any needed conversions. If we fail,
2527 it is hard to guess why we failed, so don't do anything
2528 special; just let the optimization be suppressed. */
2530 if (apply_change_group () && seq)
2531 emit_insns_before (seq, insn);
2536 /* These routines are responsible for converting virtual register references
2537 to the actual hard register references once RTL generation is complete.
2539 The following four variables are used for communication between the
2540 routines. They contain the offsets of the virtual registers from their
2541 respective hard registers. */
2543 static int in_arg_offset;
2544 static int var_offset;
2545 static int dynamic_offset;
2546 static int out_arg_offset;
2547 static int cfa_offset;
2549 /* In most machines, the stack pointer register is equivalent to the bottom
2552 #ifndef STACK_POINTER_OFFSET
2553 #define STACK_POINTER_OFFSET 0
2556 /* If not defined, pick an appropriate default for the offset of dynamically
2557 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2558 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2560 #ifndef STACK_DYNAMIC_OFFSET
2562 #ifdef ACCUMULATE_OUTGOING_ARGS
2563 /* The bottom of the stack points to the actual arguments. If
2564 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2565 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2566 stack space for register parameters is not pushed by the caller, but
2567 rather part of the fixed stack areas and hence not included in
2568 `current_function_outgoing_args_size'. Nevertheless, we must allow
2569 for it when allocating stack dynamic objects. */
2571 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2572 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2573 (current_function_outgoing_args_size \
2574 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2577 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2578 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2582 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2586 /* On a few machines, the CFA coincides with the arg pointer. */
2588 #ifndef ARG_POINTER_CFA_OFFSET
2589 #define ARG_POINTER_CFA_OFFSET 0
2593 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2594 its address taken. DECL is the decl for the object stored in the
2595 register, for later use if we do need to force REG into the stack.
2596 REG is overwritten by the MEM like in put_reg_into_stack. */
2599 gen_mem_addressof (reg, decl)
2603 tree type = TREE_TYPE (decl);
2604 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2606 /* If the original REG was a user-variable, then so is the REG whose
2607 address is being taken. */
2608 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2610 PUT_CODE (reg, MEM);
2611 PUT_MODE (reg, DECL_MODE (decl));
2613 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2614 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2615 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2617 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2618 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2623 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2626 flush_addressof (decl)
2629 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2630 && DECL_RTL (decl) != 0
2631 && GET_CODE (DECL_RTL (decl)) == MEM
2632 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2633 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2634 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2637 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2640 put_addressof_into_stack (r, ht)
2642 struct hash_table *ht;
2644 tree decl = ADDRESSOF_DECL (r);
2645 rtx reg = XEXP (r, 0);
2647 if (GET_CODE (reg) != REG)
2650 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2651 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2652 ADDRESSOF_REGNO (r),
2653 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2656 /* List of replacements made below in purge_addressof_1 when creating
2657 bitfield insertions. */
2658 static rtx purge_bitfield_addressof_replacements;
2660 /* List of replacements made below in purge_addressof_1 for patterns
2661 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2662 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2663 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2664 enough in complex cases, e.g. when some field values can be
2665 extracted by usage MEM with narrower mode. */
2666 static rtx purge_addressof_replacements;
2668 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2669 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2673 purge_addressof_1 (loc, insn, force, store, ht)
2677 struct hash_table *ht;
2684 /* Re-start here to avoid recursion in common cases. */
2691 code = GET_CODE (x);
2693 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2696 /* We must create a copy of the rtx because it was created by
2697 overwriting a REG rtx which is always shared. */
2698 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2700 if (validate_change (insn, loc, sub, 0)
2701 || validate_replace_rtx (x, sub, insn))
2705 sub = force_operand (sub, NULL_RTX);
2706 if (! validate_change (insn, loc, sub, 0)
2707 && ! validate_replace_rtx (x, sub, insn))
2710 insns = gen_sequence ();
2712 emit_insn_before (insns, insn);
2715 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2717 rtx sub = XEXP (XEXP (x, 0), 0);
2720 if (GET_CODE (sub) == MEM)
2722 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2723 MEM_COPY_ATTRIBUTES (sub2, sub);
2724 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2728 if (GET_CODE (sub) == REG
2729 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2731 put_addressof_into_stack (XEXP (x, 0), ht);
2734 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2736 int size_x, size_sub;
2740 /* When processing REG_NOTES look at the list of
2741 replacements done on the insn to find the register that X
2745 for (tem = purge_bitfield_addressof_replacements;
2747 tem = XEXP (XEXP (tem, 1), 1))
2748 if (rtx_equal_p (x, XEXP (tem, 0)))
2750 *loc = XEXP (XEXP (tem, 1), 0);
2754 /* See comment for purge_addressof_replacements. */
2755 for (tem = purge_addressof_replacements;
2757 tem = XEXP (XEXP (tem, 1), 1))
2758 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2760 rtx z = XEXP (XEXP (tem, 1), 0);
2762 if (GET_MODE (x) == GET_MODE (z)
2763 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2764 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2767 /* It can happen that the note may speak of things
2768 in a wider (or just different) mode than the
2769 code did. This is especially true of
2772 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2775 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2776 && (GET_MODE_SIZE (GET_MODE (x))
2777 > GET_MODE_SIZE (GET_MODE (z))))
2779 /* This can occur as a result in invalid
2780 pointer casts, e.g. float f; ...
2781 *(long long int *)&f.
2782 ??? We could emit a warning here, but
2783 without a line number that wouldn't be
2785 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2788 z = gen_lowpart (GET_MODE (x), z);
2794 /* There should always be such a replacement. */
2798 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2799 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2801 /* Don't even consider working with paradoxical subregs,
2802 or the moral equivalent seen here. */
2803 if (size_x <= size_sub
2804 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2806 /* Do a bitfield insertion to mirror what would happen
2813 rtx p = PREV_INSN (insn);
2816 val = gen_reg_rtx (GET_MODE (x));
2817 if (! validate_change (insn, loc, val, 0))
2819 /* Discard the current sequence and put the
2820 ADDRESSOF on stack. */
2824 seq = gen_sequence ();
2826 emit_insn_before (seq, insn);
2827 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2831 store_bit_field (sub, size_x, 0, GET_MODE (x),
2832 val, GET_MODE_SIZE (GET_MODE (sub)),
2833 GET_MODE_SIZE (GET_MODE (sub)));
2835 /* Make sure to unshare any shared rtl that store_bit_field
2836 might have created. */
2837 for (p = get_insns(); p; p = NEXT_INSN (p))
2839 reset_used_flags (PATTERN (p));
2840 reset_used_flags (REG_NOTES (p));
2841 reset_used_flags (LOG_LINKS (p));
2843 unshare_all_rtl (get_insns ());
2845 seq = gen_sequence ();
2847 p = emit_insn_after (seq, insn);
2848 if (NEXT_INSN (insn))
2849 compute_insns_for_mem (NEXT_INSN (insn),
2850 p ? NEXT_INSN (p) : NULL_RTX,
2855 rtx p = PREV_INSN (insn);
2858 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2859 GET_MODE (x), GET_MODE (x),
2860 GET_MODE_SIZE (GET_MODE (sub)),
2861 GET_MODE_SIZE (GET_MODE (sub)));
2863 if (! validate_change (insn, loc, val, 0))
2865 /* Discard the current sequence and put the
2866 ADDRESSOF on stack. */
2871 seq = gen_sequence ();
2873 emit_insn_before (seq, insn);
2874 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2878 /* Remember the replacement so that the same one can be done
2879 on the REG_NOTES. */
2880 purge_bitfield_addressof_replacements
2881 = gen_rtx_EXPR_LIST (VOIDmode, x,
2884 purge_bitfield_addressof_replacements));
2886 /* We replaced with a reg -- all done. */
2890 else if (validate_change (insn, loc, sub, 0))
2892 /* Remember the replacement so that the same one can be done
2893 on the REG_NOTES. */
2894 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2898 for (tem = purge_addressof_replacements;
2900 tem = XEXP (XEXP (tem, 1), 1))
2901 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2903 XEXP (XEXP (tem, 1), 0) = sub;
2906 purge_addressof_replacements
2907 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2908 gen_rtx_EXPR_LIST (VOIDmode, sub,
2909 purge_addressof_replacements));
2915 /* else give up and put it into the stack */
2917 else if (code == ADDRESSOF)
2919 put_addressof_into_stack (x, ht);
2922 else if (code == SET)
2924 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2925 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2929 /* Scan all subexpressions. */
2930 fmt = GET_RTX_FORMAT (code);
2931 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2934 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
2935 else if (*fmt == 'E')
2936 for (j = 0; j < XVECLEN (x, i); j++)
2937 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
2941 /* Return a new hash table entry in HT. */
2943 static struct hash_entry *
2944 insns_for_mem_newfunc (he, ht, k)
2945 struct hash_entry *he;
2946 struct hash_table *ht;
2947 hash_table_key k ATTRIBUTE_UNUSED;
2949 struct insns_for_mem_entry *ifmhe;
2953 ifmhe = ((struct insns_for_mem_entry *)
2954 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
2955 ifmhe->insns = NULL_RTX;
2960 /* Return a hash value for K, a REG. */
2962 static unsigned long
2963 insns_for_mem_hash (k)
2966 /* K is really a RTX. Just use the address as the hash value. */
2967 return (unsigned long) k;
2970 /* Return non-zero if K1 and K2 (two REGs) are the same. */
2973 insns_for_mem_comp (k1, k2)
2980 struct insns_for_mem_walk_info {
2981 /* The hash table that we are using to record which INSNs use which
2983 struct hash_table *ht;
2985 /* The INSN we are currently proessing. */
2988 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
2989 to find the insns that use the REGs in the ADDRESSOFs. */
2993 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
2994 that might be used in an ADDRESSOF expression, record this INSN in
2995 the hash table given by DATA (which is really a pointer to an
2996 insns_for_mem_walk_info structure). */
2999 insns_for_mem_walk (r, data)
3003 struct insns_for_mem_walk_info *ifmwi
3004 = (struct insns_for_mem_walk_info *) data;
3006 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3007 && GET_CODE (XEXP (*r, 0)) == REG)
3008 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3009 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3011 /* Lookup this MEM in the hashtable, creating it if necessary. */
3012 struct insns_for_mem_entry *ifme
3013 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3018 /* If we have not already recorded this INSN, do so now. Since
3019 we process the INSNs in order, we know that if we have
3020 recorded it it must be at the front of the list. */
3021 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3023 /* We do the allocation on the same obstack as is used for
3024 the hash table since this memory will not be used once
3025 the hash table is deallocated. */
3026 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3027 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3036 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3037 which REGs in HT. */
3040 compute_insns_for_mem (insns, last_insn, ht)
3043 struct hash_table *ht;
3046 struct insns_for_mem_walk_info ifmwi;
3049 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3050 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3051 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3054 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3058 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3059 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3063 purge_addressof (insns)
3067 struct hash_table ht;
3069 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3070 requires a fixup pass over the instruction stream to correct
3071 INSNs that depended on the REG being a REG, and not a MEM. But,
3072 these fixup passes are slow. Furthermore, more MEMs are not
3073 mentioned in very many instructions. So, we speed up the process
3074 by pre-calculating which REGs occur in which INSNs; that allows
3075 us to perform the fixup passes much more quickly. */
3076 hash_table_init (&ht,
3077 insns_for_mem_newfunc,
3079 insns_for_mem_comp);
3080 compute_insns_for_mem (insns, NULL_RTX, &ht);
3082 for (insn = insns; insn; insn = NEXT_INSN (insn))
3083 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3084 || GET_CODE (insn) == CALL_INSN)
3086 purge_addressof_1 (&PATTERN (insn), insn,
3087 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3088 purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht);
3092 hash_table_free (&ht);
3093 purge_bitfield_addressof_replacements = 0;
3094 purge_addressof_replacements = 0;
3097 /* Pass through the INSNS of function FNDECL and convert virtual register
3098 references to hard register references. */
3101 instantiate_virtual_regs (fndecl, insns)
3108 /* Compute the offsets to use for this function. */
3109 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3110 var_offset = STARTING_FRAME_OFFSET;
3111 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3112 out_arg_offset = STACK_POINTER_OFFSET;
3113 cfa_offset = ARG_POINTER_CFA_OFFSET;
3115 /* Scan all variables and parameters of this function. For each that is
3116 in memory, instantiate all virtual registers if the result is a valid
3117 address. If not, we do it later. That will handle most uses of virtual
3118 regs on many machines. */
3119 instantiate_decls (fndecl, 1);
3121 /* Initialize recognition, indicating that volatile is OK. */
3124 /* Scan through all the insns, instantiating every virtual register still
3126 for (insn = insns; insn; insn = NEXT_INSN (insn))
3127 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3128 || GET_CODE (insn) == CALL_INSN)
3130 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3131 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3134 /* Instantiate the stack slots for the parm registers, for later use in
3135 addressof elimination. */
3136 for (i = 0; i < max_parm_reg; ++i)
3137 if (parm_reg_stack_loc[i])
3138 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3140 /* Now instantiate the remaining register equivalences for debugging info.
3141 These will not be valid addresses. */
3142 instantiate_decls (fndecl, 0);
3144 /* Indicate that, from now on, assign_stack_local should use
3145 frame_pointer_rtx. */
3146 virtuals_instantiated = 1;
3149 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3150 all virtual registers in their DECL_RTL's.
3152 If VALID_ONLY, do this only if the resulting address is still valid.
3153 Otherwise, always do it. */
3156 instantiate_decls (fndecl, valid_only)
3162 if (DECL_SAVED_INSNS (fndecl))
3163 /* When compiling an inline function, the obstack used for
3164 rtl allocation is the maybepermanent_obstack. Calling
3165 `resume_temporary_allocation' switches us back to that
3166 obstack while we process this function's parameters. */
3167 resume_temporary_allocation ();
3169 /* Process all parameters of the function. */
3170 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3172 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3174 instantiate_decl (DECL_RTL (decl), size, valid_only);
3176 /* If the parameter was promoted, then the incoming RTL mode may be
3177 larger than the declared type size. We must use the larger of
3179 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3180 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3183 /* Now process all variables defined in the function or its subblocks. */
3184 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3186 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3188 /* Save all rtl allocated for this function by raising the
3189 high-water mark on the maybepermanent_obstack. */
3191 /* All further rtl allocation is now done in the current_obstack. */
3192 rtl_in_current_obstack ();
3196 /* Subroutine of instantiate_decls: Process all decls in the given
3197 BLOCK node and all its subblocks. */
3200 instantiate_decls_1 (let, valid_only)
3206 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3207 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3210 /* Process all subblocks. */
3211 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3212 instantiate_decls_1 (t, valid_only);
3215 /* Subroutine of the preceding procedures: Given RTL representing a
3216 decl and the size of the object, do any instantiation required.
3218 If VALID_ONLY is non-zero, it means that the RTL should only be
3219 changed if the new address is valid. */
3222 instantiate_decl (x, size, valid_only)
3227 enum machine_mode mode;
3230 /* If this is not a MEM, no need to do anything. Similarly if the
3231 address is a constant or a register that is not a virtual register. */
3233 if (x == 0 || GET_CODE (x) != MEM)
3237 if (CONSTANT_P (addr)
3238 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3239 || (GET_CODE (addr) == REG
3240 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3241 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3244 /* If we should only do this if the address is valid, copy the address.
3245 We need to do this so we can undo any changes that might make the
3246 address invalid. This copy is unfortunate, but probably can't be
3250 addr = copy_rtx (addr);
3252 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3256 /* Now verify that the resulting address is valid for every integer or
3257 floating-point mode up to and including SIZE bytes long. We do this
3258 since the object might be accessed in any mode and frame addresses
3261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3262 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3263 mode = GET_MODE_WIDER_MODE (mode))
3264 if (! memory_address_p (mode, addr))
3267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3268 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3269 mode = GET_MODE_WIDER_MODE (mode))
3270 if (! memory_address_p (mode, addr))
3274 /* Put back the address now that we have updated it and we either know
3275 it is valid or we don't care whether it is valid. */
3280 /* Given a pointer to a piece of rtx and an optional pointer to the
3281 containing object, instantiate any virtual registers present in it.
3283 If EXTRA_INSNS, we always do the replacement and generate
3284 any extra insns before OBJECT. If it zero, we do nothing if replacement
3287 Return 1 if we either had nothing to do or if we were able to do the
3288 needed replacement. Return 0 otherwise; we only return zero if
3289 EXTRA_INSNS is zero.
3291 We first try some simple transformations to avoid the creation of extra
3295 instantiate_virtual_regs_1 (loc, object, extra_insns)
3303 HOST_WIDE_INT offset = 0;
3309 /* Re-start here to avoid recursion in common cases. */
3316 code = GET_CODE (x);
3318 /* Check for some special cases. */
3335 /* We are allowed to set the virtual registers. This means that
3336 the actual register should receive the source minus the
3337 appropriate offset. This is used, for example, in the handling
3338 of non-local gotos. */
3339 if (SET_DEST (x) == virtual_incoming_args_rtx)
3340 new = arg_pointer_rtx, offset = - in_arg_offset;
3341 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3342 new = frame_pointer_rtx, offset = - var_offset;
3343 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3344 new = stack_pointer_rtx, offset = - dynamic_offset;
3345 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3346 new = stack_pointer_rtx, offset = - out_arg_offset;
3347 else if (SET_DEST (x) == virtual_cfa_rtx)
3348 new = arg_pointer_rtx, offset = - cfa_offset;
3352 /* The only valid sources here are PLUS or REG. Just do
3353 the simplest possible thing to handle them. */
3354 if (GET_CODE (SET_SRC (x)) != REG
3355 && GET_CODE (SET_SRC (x)) != PLUS)
3359 if (GET_CODE (SET_SRC (x)) != REG)
3360 temp = force_operand (SET_SRC (x), NULL_RTX);
3363 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3367 emit_insns_before (seq, object);
3370 if (! validate_change (object, &SET_SRC (x), temp, 0)
3377 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3382 /* Handle special case of virtual register plus constant. */
3383 if (CONSTANT_P (XEXP (x, 1)))
3385 rtx old, new_offset;
3387 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3388 if (GET_CODE (XEXP (x, 0)) == PLUS)
3390 rtx inner = XEXP (XEXP (x, 0), 0);
3392 if (inner == virtual_incoming_args_rtx)
3393 new = arg_pointer_rtx, offset = in_arg_offset;
3394 else if (inner == virtual_stack_vars_rtx)
3395 new = frame_pointer_rtx, offset = var_offset;
3396 else if (inner == virtual_stack_dynamic_rtx)
3397 new = stack_pointer_rtx, offset = dynamic_offset;
3398 else if (inner == virtual_outgoing_args_rtx)
3399 new = stack_pointer_rtx, offset = out_arg_offset;
3400 else if (inner == virtual_cfa_rtx)
3401 new = arg_pointer_rtx, offset = cfa_offset;
3408 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3410 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3413 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3414 new = arg_pointer_rtx, offset = in_arg_offset;
3415 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3416 new = frame_pointer_rtx, offset = var_offset;
3417 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3418 new = stack_pointer_rtx, offset = dynamic_offset;
3419 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3420 new = stack_pointer_rtx, offset = out_arg_offset;
3421 else if (XEXP (x, 0) == virtual_cfa_rtx)
3422 new = arg_pointer_rtx, offset = cfa_offset;
3425 /* We know the second operand is a constant. Unless the
3426 first operand is a REG (which has been already checked),
3427 it needs to be checked. */
3428 if (GET_CODE (XEXP (x, 0)) != REG)
3436 new_offset = plus_constant (XEXP (x, 1), offset);
3438 /* If the new constant is zero, try to replace the sum with just
3440 if (new_offset == const0_rtx
3441 && validate_change (object, loc, new, 0))
3444 /* Next try to replace the register and new offset.
3445 There are two changes to validate here and we can't assume that
3446 in the case of old offset equals new just changing the register
3447 will yield a valid insn. In the interests of a little efficiency,
3448 however, we only call validate change once (we don't queue up the
3449 changes and then call apply_change_group). */
3453 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3454 : (XEXP (x, 0) = new,
3455 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3463 /* Otherwise copy the new constant into a register and replace
3464 constant with that register. */
3465 temp = gen_reg_rtx (Pmode);
3467 if (validate_change (object, &XEXP (x, 1), temp, 0))
3468 emit_insn_before (gen_move_insn (temp, new_offset), object);
3471 /* If that didn't work, replace this expression with a
3472 register containing the sum. */
3475 new = gen_rtx_PLUS (Pmode, new, new_offset);
3478 temp = force_operand (new, NULL_RTX);
3482 emit_insns_before (seq, object);
3483 if (! validate_change (object, loc, temp, 0)
3484 && ! validate_replace_rtx (x, temp, object))
3492 /* Fall through to generic two-operand expression case. */
3498 case DIV: case UDIV:
3499 case MOD: case UMOD:
3500 case AND: case IOR: case XOR:
3501 case ROTATERT: case ROTATE:
3502 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3504 case GE: case GT: case GEU: case GTU:
3505 case LE: case LT: case LEU: case LTU:
3506 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3507 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3512 /* Most cases of MEM that convert to valid addresses have already been
3513 handled by our scan of decls. The only special handling we
3514 need here is to make a copy of the rtx to ensure it isn't being
3515 shared if we have to change it to a pseudo.
3517 If the rtx is a simple reference to an address via a virtual register,
3518 it can potentially be shared. In such cases, first try to make it
3519 a valid address, which can also be shared. Otherwise, copy it and
3522 First check for common cases that need no processing. These are
3523 usually due to instantiation already being done on a previous instance
3527 if (CONSTANT_ADDRESS_P (temp)
3528 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3529 || temp == arg_pointer_rtx
3531 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3532 || temp == hard_frame_pointer_rtx
3534 || temp == frame_pointer_rtx)
3537 if (GET_CODE (temp) == PLUS
3538 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3539 && (XEXP (temp, 0) == frame_pointer_rtx
3540 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3541 || XEXP (temp, 0) == hard_frame_pointer_rtx
3543 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3544 || XEXP (temp, 0) == arg_pointer_rtx
3549 if (temp == virtual_stack_vars_rtx
3550 || temp == virtual_incoming_args_rtx
3551 || (GET_CODE (temp) == PLUS
3552 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3553 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3554 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3556 /* This MEM may be shared. If the substitution can be done without
3557 the need to generate new pseudos, we want to do it in place
3558 so all copies of the shared rtx benefit. The call below will
3559 only make substitutions if the resulting address is still
3562 Note that we cannot pass X as the object in the recursive call
3563 since the insn being processed may not allow all valid
3564 addresses. However, if we were not passed on object, we can
3565 only modify X without copying it if X will have a valid
3568 ??? Also note that this can still lose if OBJECT is an insn that
3569 has less restrictions on an address that some other insn.
3570 In that case, we will modify the shared address. This case
3571 doesn't seem very likely, though. One case where this could
3572 happen is in the case of a USE or CLOBBER reference, but we
3573 take care of that below. */
3575 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3576 object ? object : x, 0))
3579 /* Otherwise make a copy and process that copy. We copy the entire
3580 RTL expression since it might be a PLUS which could also be
3582 *loc = x = copy_rtx (x);
3585 /* Fall through to generic unary operation case. */
3587 case STRICT_LOW_PART:
3589 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3590 case SIGN_EXTEND: case ZERO_EXTEND:
3591 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3592 case FLOAT: case FIX:
3593 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3597 /* These case either have just one operand or we know that we need not
3598 check the rest of the operands. */
3604 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3605 go ahead and make the invalid one, but do it to a copy. For a REG,
3606 just make the recursive call, since there's no chance of a problem. */
3608 if ((GET_CODE (XEXP (x, 0)) == MEM
3609 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3611 || (GET_CODE (XEXP (x, 0)) == REG
3612 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3615 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3620 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3621 in front of this insn and substitute the temporary. */
3622 if (x == virtual_incoming_args_rtx)
3623 new = arg_pointer_rtx, offset = in_arg_offset;
3624 else if (x == virtual_stack_vars_rtx)
3625 new = frame_pointer_rtx, offset = var_offset;
3626 else if (x == virtual_stack_dynamic_rtx)
3627 new = stack_pointer_rtx, offset = dynamic_offset;
3628 else if (x == virtual_outgoing_args_rtx)
3629 new = stack_pointer_rtx, offset = out_arg_offset;
3630 else if (x == virtual_cfa_rtx)
3631 new = arg_pointer_rtx, offset = cfa_offset;
3635 temp = plus_constant (new, offset);
3636 if (!validate_change (object, loc, temp, 0))
3642 temp = force_operand (temp, NULL_RTX);
3646 emit_insns_before (seq, object);
3647 if (! validate_change (object, loc, temp, 0)
3648 && ! validate_replace_rtx (x, temp, object))
3656 if (GET_CODE (XEXP (x, 0)) == REG)
3659 else if (GET_CODE (XEXP (x, 0)) == MEM)
3661 /* If we have a (addressof (mem ..)), do any instantiation inside
3662 since we know we'll be making the inside valid when we finally
3663 remove the ADDRESSOF. */
3664 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3673 /* Scan all subexpressions. */
3674 fmt = GET_RTX_FORMAT (code);
3675 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3678 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3681 else if (*fmt == 'E')
3682 for (j = 0; j < XVECLEN (x, i); j++)
3683 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3690 /* Optimization: assuming this function does not receive nonlocal gotos,
3691 delete the handlers for such, as well as the insns to establish
3692 and disestablish them. */
3698 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3700 /* Delete the handler by turning off the flag that would
3701 prevent jump_optimize from deleting it.
3702 Also permit deletion of the nonlocal labels themselves
3703 if nothing local refers to them. */
3704 if (GET_CODE (insn) == CODE_LABEL)
3708 LABEL_PRESERVE_P (insn) = 0;
3710 /* Remove it from the nonlocal_label list, to avoid confusing
3712 for (t = nonlocal_labels, last_t = 0; t;
3713 last_t = t, t = TREE_CHAIN (t))
3714 if (DECL_RTL (TREE_VALUE (t)) == insn)
3719 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3721 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3724 if (GET_CODE (insn) == INSN)
3728 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3729 if (reg_mentioned_p (t, PATTERN (insn)))
3735 || (nonlocal_goto_stack_level != 0
3736 && reg_mentioned_p (nonlocal_goto_stack_level,
3743 /* Output a USE for any register use in RTL.
3744 This is used with -noreg to mark the extent of lifespan
3745 of any registers used in a user-visible variable's DECL_RTL. */
3751 if (GET_CODE (rtl) == REG)
3752 /* This is a register variable. */
3753 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3754 else if (GET_CODE (rtl) == MEM
3755 && GET_CODE (XEXP (rtl, 0)) == REG
3756 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3757 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3758 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3759 /* This is a variable-sized structure. */
3760 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3763 /* Like use_variable except that it outputs the USEs after INSN
3764 instead of at the end of the insn-chain. */
3767 use_variable_after (rtl, insn)
3770 if (GET_CODE (rtl) == REG)
3771 /* This is a register variable. */
3772 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3773 else if (GET_CODE (rtl) == MEM
3774 && GET_CODE (XEXP (rtl, 0)) == REG
3775 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3776 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3777 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3778 /* This is a variable-sized structure. */
3779 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3785 return max_parm_reg;
3788 /* Return the first insn following those generated by `assign_parms'. */
3791 get_first_nonparm_insn ()
3794 return NEXT_INSN (last_parm_insn);
3795 return get_insns ();
3798 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3799 Crash if there is none. */
3802 get_first_block_beg ()
3804 register rtx searcher;
3805 register rtx insn = get_first_nonparm_insn ();
3807 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3808 if (GET_CODE (searcher) == NOTE
3809 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3812 abort (); /* Invalid call to this function. (See comments above.) */
3816 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3817 This means a type for which function calls must pass an address to the
3818 function or get an address back from the function.
3819 EXP may be a type node or an expression (whose type is tested). */
3822 aggregate_value_p (exp)
3825 int i, regno, nregs;
3828 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3831 type = TREE_TYPE (exp);
3833 if (RETURN_IN_MEMORY (type))
3835 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3836 and thus can't be returned in registers. */
3837 if (TREE_ADDRESSABLE (type))
3839 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3841 /* Make sure we have suitable call-clobbered regs to return
3842 the value in; if not, we must return it in memory. */
3843 reg = hard_function_value (type, 0);
3845 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3847 if (GET_CODE (reg) != REG)
3850 regno = REGNO (reg);
3851 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3852 for (i = 0; i < nregs; i++)
3853 if (! call_used_regs[regno + i])
3858 /* Assign RTL expressions to the function's parameters.
3859 This may involve copying them into registers and using
3860 those registers as the RTL for them.
3862 If SECOND_TIME is non-zero it means that this function is being
3863 called a second time. This is done by integrate.c when a function's
3864 compilation is deferred. We need to come back here in case the
3865 FUNCTION_ARG macro computes items needed for the rest of the compilation
3866 (such as changing which registers are fixed or caller-saved). But suppress
3867 writing any insns or setting DECL_RTL of anything in this case. */
3870 assign_parms (fndecl, second_time)
3875 register rtx entry_parm = 0;
3876 register rtx stack_parm = 0;
3877 CUMULATIVE_ARGS args_so_far;
3878 enum machine_mode promoted_mode, passed_mode;
3879 enum machine_mode nominal_mode, promoted_nominal_mode;
3881 /* Total space needed so far for args on the stack,
3882 given as a constant and a tree-expression. */
3883 struct args_size stack_args_size;
3884 tree fntype = TREE_TYPE (fndecl);
3885 tree fnargs = DECL_ARGUMENTS (fndecl);
3886 /* This is used for the arg pointer when referring to stack args. */
3887 rtx internal_arg_pointer;
3888 /* This is a dummy PARM_DECL that we used for the function result if
3889 the function returns a structure. */
3890 tree function_result_decl = 0;
3891 #ifdef SETUP_INCOMING_VARARGS
3892 int varargs_setup = 0;
3894 rtx conversion_insns = 0;
3896 /* Nonzero if the last arg is named `__builtin_va_alist',
3897 which is used on some machines for old-fashioned non-ANSI varargs.h;
3898 this should be stuck onto the stack as if it had arrived there. */
3900 = (current_function_varargs
3902 && (parm = tree_last (fnargs)) != 0
3904 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3905 "__builtin_va_alist")));
3907 /* Nonzero if function takes extra anonymous args.
3908 This means the last named arg must be on the stack
3909 right before the anonymous ones. */
3911 = (TYPE_ARG_TYPES (fntype) != 0
3912 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3913 != void_type_node));
3915 current_function_stdarg = stdarg;
3917 /* If the reg that the virtual arg pointer will be translated into is
3918 not a fixed reg or is the stack pointer, make a copy of the virtual
3919 arg pointer, and address parms via the copy. The frame pointer is
3920 considered fixed even though it is not marked as such.
3922 The second time through, simply use ap to avoid generating rtx. */
3924 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3925 || ! (fixed_regs[ARG_POINTER_REGNUM]
3926 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3928 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3930 internal_arg_pointer = virtual_incoming_args_rtx;
3931 current_function_internal_arg_pointer = internal_arg_pointer;
3933 stack_args_size.constant = 0;
3934 stack_args_size.var = 0;
3936 /* If struct value address is treated as the first argument, make it so. */
3937 if (aggregate_value_p (DECL_RESULT (fndecl))
3938 && ! current_function_returns_pcc_struct
3939 && struct_value_incoming_rtx == 0)
3941 tree type = build_pointer_type (TREE_TYPE (fntype));
3943 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3945 DECL_ARG_TYPE (function_result_decl) = type;
3946 TREE_CHAIN (function_result_decl) = fnargs;
3947 fnargs = function_result_decl;
3950 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3951 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
3953 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3954 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3956 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3959 /* We haven't yet found an argument that we must push and pretend the
3961 current_function_pretend_args_size = 0;
3963 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3965 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3966 struct args_size stack_offset;
3967 struct args_size arg_size;
3968 int passed_pointer = 0;
3969 int did_conversion = 0;
3970 tree passed_type = DECL_ARG_TYPE (parm);
3971 tree nominal_type = TREE_TYPE (parm);
3974 /* Set LAST_NAMED if this is last named arg before some
3976 int last_named = ((TREE_CHAIN (parm) == 0
3977 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3978 && (stdarg || current_function_varargs));
3979 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3980 most machines, if this is a varargs/stdarg function, then we treat
3981 the last named arg as if it were anonymous too. */
3982 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3984 if (TREE_TYPE (parm) == error_mark_node
3985 /* This can happen after weird syntax errors
3986 or if an enum type is defined among the parms. */
3987 || TREE_CODE (parm) != PARM_DECL
3988 || passed_type == NULL)
3990 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3991 = gen_rtx_MEM (BLKmode, const0_rtx);
3992 TREE_USED (parm) = 1;
3996 /* For varargs.h function, save info about regs and stack space
3997 used by the individual args, not including the va_alist arg. */
3998 if (hide_last_arg && last_named)
3999 current_function_args_info = args_so_far;
4001 /* Find mode of arg as it is passed, and mode of arg
4002 as it should be during execution of this function. */
4003 passed_mode = TYPE_MODE (passed_type);
4004 nominal_mode = TYPE_MODE (nominal_type);
4006 /* If the parm's mode is VOID, its value doesn't matter,
4007 and avoid the usual things like emit_move_insn that could crash. */
4008 if (nominal_mode == VOIDmode)
4010 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4014 /* If the parm is to be passed as a transparent union, use the
4015 type of the first field for the tests below. We have already
4016 verified that the modes are the same. */
4017 if (DECL_TRANSPARENT_UNION (parm)
4018 || TYPE_TRANSPARENT_UNION (passed_type))
4019 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4021 /* See if this arg was passed by invisible reference. It is if
4022 it is an object whose size depends on the contents of the
4023 object itself or if the machine requires these objects be passed
4026 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4027 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4028 || TREE_ADDRESSABLE (passed_type)
4029 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4030 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4031 passed_type, named_arg)
4035 passed_type = nominal_type = build_pointer_type (passed_type);
4037 passed_mode = nominal_mode = Pmode;
4040 promoted_mode = passed_mode;
4042 #ifdef PROMOTE_FUNCTION_ARGS
4043 /* Compute the mode in which the arg is actually extended to. */
4044 unsignedp = TREE_UNSIGNED (passed_type);
4045 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4048 /* Let machine desc say which reg (if any) the parm arrives in.
4049 0 means it arrives on the stack. */
4050 #ifdef FUNCTION_INCOMING_ARG
4051 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4052 passed_type, named_arg);
4054 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4055 passed_type, named_arg);
4058 if (entry_parm == 0)
4059 promoted_mode = passed_mode;
4061 #ifdef SETUP_INCOMING_VARARGS
4062 /* If this is the last named parameter, do any required setup for
4063 varargs or stdargs. We need to know about the case of this being an
4064 addressable type, in which case we skip the registers it
4065 would have arrived in.
4067 For stdargs, LAST_NAMED will be set for two parameters, the one that
4068 is actually the last named, and the dummy parameter. We only
4069 want to do this action once.
4071 Also, indicate when RTL generation is to be suppressed. */
4072 if (last_named && !varargs_setup)
4074 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4075 current_function_pretend_args_size,
4081 /* Determine parm's home in the stack,
4082 in case it arrives in the stack or we should pretend it did.
4084 Compute the stack position and rtx where the argument arrives
4087 There is one complexity here: If this was a parameter that would
4088 have been passed in registers, but wasn't only because it is
4089 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4090 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4091 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4092 0 as it was the previous time. */
4094 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4095 locate_and_pad_parm (promoted_mode, passed_type,
4096 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4099 #ifdef FUNCTION_INCOMING_ARG
4100 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4102 pretend_named) != 0,
4104 FUNCTION_ARG (args_so_far, promoted_mode,
4106 pretend_named) != 0,
4109 fndecl, &stack_args_size, &stack_offset, &arg_size);
4113 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4115 if (offset_rtx == const0_rtx)
4116 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4118 stack_parm = gen_rtx_MEM (promoted_mode,
4119 gen_rtx_PLUS (Pmode,
4120 internal_arg_pointer,
4123 /* If this is a memory ref that contains aggregate components,
4124 mark it as such for cse and loop optimize. Likewise if it
4126 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4127 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4128 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4131 /* If this parameter was passed both in registers and in the stack,
4132 use the copy on the stack. */
4133 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4136 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4137 /* If this parm was passed part in regs and part in memory,
4138 pretend it arrived entirely in memory
4139 by pushing the register-part onto the stack.
4141 In the special case of a DImode or DFmode that is split,
4142 we could put it together in a pseudoreg directly,
4143 but for now that's not worth bothering with. */
4147 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4148 passed_type, named_arg);
4152 current_function_pretend_args_size
4153 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4154 / (PARM_BOUNDARY / BITS_PER_UNIT)
4155 * (PARM_BOUNDARY / BITS_PER_UNIT));
4159 /* Handle calls that pass values in multiple non-contiguous
4160 locations. The Irix 6 ABI has examples of this. */
4161 if (GET_CODE (entry_parm) == PARALLEL)
4162 emit_group_store (validize_mem (stack_parm), entry_parm,
4163 int_size_in_bytes (TREE_TYPE (parm)),
4164 (TYPE_ALIGN (TREE_TYPE (parm))
4167 move_block_from_reg (REGNO (entry_parm),
4168 validize_mem (stack_parm), nregs,
4169 int_size_in_bytes (TREE_TYPE (parm)));
4171 entry_parm = stack_parm;
4176 /* If we didn't decide this parm came in a register,
4177 by default it came on the stack. */
4178 if (entry_parm == 0)
4179 entry_parm = stack_parm;
4181 /* Record permanently how this parm was passed. */
4183 DECL_INCOMING_RTL (parm) = entry_parm;
4185 /* If there is actually space on the stack for this parm,
4186 count it in stack_args_size; otherwise set stack_parm to 0
4187 to indicate there is no preallocated stack slot for the parm. */
4189 if (entry_parm == stack_parm
4190 || (GET_CODE (entry_parm) == PARALLEL
4191 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4192 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4193 /* On some machines, even if a parm value arrives in a register
4194 there is still an (uninitialized) stack slot allocated for it.
4196 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4197 whether this parameter already has a stack slot allocated,
4198 because an arg block exists only if current_function_args_size
4199 is larger than some threshold, and we haven't calculated that
4200 yet. So, for now, we just assume that stack slots never exist
4202 || REG_PARM_STACK_SPACE (fndecl) > 0
4206 stack_args_size.constant += arg_size.constant;
4208 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4211 /* No stack slot was pushed for this parm. */
4214 /* Update info on where next arg arrives in registers. */
4216 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4217 passed_type, named_arg);
4219 /* If this is our second time through, we are done with this parm. */
4223 /* If we can't trust the parm stack slot to be aligned enough
4224 for its ultimate type, don't use that slot after entry.
4225 We'll make another stack slot, if we need one. */
4227 int thisparm_boundary
4228 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4230 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4234 /* If parm was passed in memory, and we need to convert it on entry,
4235 don't store it back in that same slot. */
4237 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4241 /* Now adjust STACK_PARM to the mode and precise location
4242 where this parameter should live during execution,
4243 if we discover that it must live in the stack during execution.
4244 To make debuggers happier on big-endian machines, we store
4245 the value in the last bytes of the space available. */
4247 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4252 if (BYTES_BIG_ENDIAN
4253 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4254 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4255 - GET_MODE_SIZE (nominal_mode));
4257 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4258 if (offset_rtx == const0_rtx)
4259 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4261 stack_parm = gen_rtx_MEM (nominal_mode,
4262 gen_rtx_PLUS (Pmode,
4263 internal_arg_pointer,
4266 /* If this is a memory ref that contains aggregate components,
4267 mark it as such for cse and loop optimize. */
4268 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4273 /* We need this "use" info, because the gcc-register->stack-register
4274 converter in reg-stack.c needs to know which registers are active
4275 at the start of the function call. The actual parameter loading
4276 instructions are not always available then anymore, since they might
4277 have been optimised away. */
4279 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4280 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4283 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4284 in the mode in which it arrives.
4285 STACK_PARM is an RTX for a stack slot where the parameter can live
4286 during the function (in case we want to put it there).
4287 STACK_PARM is 0 if no stack slot was pushed for it.
4289 Now output code if necessary to convert ENTRY_PARM to
4290 the type in which this function declares it,
4291 and store that result in an appropriate place,
4292 which may be a pseudo reg, may be STACK_PARM,
4293 or may be a local stack slot if STACK_PARM is 0.
4295 Set DECL_RTL to that place. */
4297 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4299 /* If a BLKmode arrives in registers, copy it to a stack slot.
4300 Handle calls that pass values in multiple non-contiguous
4301 locations. The Irix 6 ABI has examples of this. */
4302 if (GET_CODE (entry_parm) == REG
4303 || GET_CODE (entry_parm) == PARALLEL)
4306 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4309 /* Note that we will be storing an integral number of words.
4310 So we have to be careful to ensure that we allocate an
4311 integral number of words. We do this below in the
4312 assign_stack_local if space was not allocated in the argument
4313 list. If it was, this will not work if PARM_BOUNDARY is not
4314 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4315 if it becomes a problem. */
4317 if (stack_parm == 0)
4320 = assign_stack_local (GET_MODE (entry_parm),
4323 /* If this is a memory ref that contains aggregate
4324 components, mark it as such for cse and loop optimize. */
4325 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4328 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4331 if (TREE_READONLY (parm))
4332 RTX_UNCHANGING_P (stack_parm) = 1;
4334 /* Handle calls that pass values in multiple non-contiguous
4335 locations. The Irix 6 ABI has examples of this. */
4336 if (GET_CODE (entry_parm) == PARALLEL)
4337 emit_group_store (validize_mem (stack_parm), entry_parm,
4338 int_size_in_bytes (TREE_TYPE (parm)),
4339 (TYPE_ALIGN (TREE_TYPE (parm))
4342 move_block_from_reg (REGNO (entry_parm),
4343 validize_mem (stack_parm),
4344 size_stored / UNITS_PER_WORD,
4345 int_size_in_bytes (TREE_TYPE (parm)));
4347 DECL_RTL (parm) = stack_parm;
4349 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4350 && ! DECL_INLINE (fndecl))
4351 /* layout_decl may set this. */
4352 || TREE_ADDRESSABLE (parm)
4353 || TREE_SIDE_EFFECTS (parm)
4354 /* If -ffloat-store specified, don't put explicit
4355 float variables into registers. */
4356 || (flag_float_store
4357 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4358 /* Always assign pseudo to structure return or item passed
4359 by invisible reference. */
4360 || passed_pointer || parm == function_result_decl)
4362 /* Store the parm in a pseudoregister during the function, but we
4363 may need to do it in a wider mode. */
4365 register rtx parmreg;
4366 int regno, regnoi = 0, regnor = 0;
4368 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4370 promoted_nominal_mode
4371 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4373 parmreg = gen_reg_rtx (promoted_nominal_mode);
4374 mark_user_reg (parmreg);
4376 /* If this was an item that we received a pointer to, set DECL_RTL
4381 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4382 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4385 DECL_RTL (parm) = parmreg;
4387 /* Copy the value into the register. */
4388 if (nominal_mode != passed_mode
4389 || promoted_nominal_mode != promoted_mode)
4392 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4393 mode, by the caller. We now have to convert it to
4394 NOMINAL_MODE, if different. However, PARMREG may be in
4395 a different mode than NOMINAL_MODE if it is being stored
4398 If ENTRY_PARM is a hard register, it might be in a register
4399 not valid for operating in its mode (e.g., an odd-numbered
4400 register for a DFmode). In that case, moves are the only
4401 thing valid, so we can't do a convert from there. This
4402 occurs when the calling sequence allow such misaligned
4405 In addition, the conversion may involve a call, which could
4406 clobber parameters which haven't been copied to pseudo
4407 registers yet. Therefore, we must first copy the parm to
4408 a pseudo reg here, and save the conversion until after all
4409 parameters have been moved. */
4411 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4413 emit_move_insn (tempreg, validize_mem (entry_parm));
4415 push_to_sequence (conversion_insns);
4416 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4418 /* TREE_USED gets set erroneously during expand_assignment. */
4419 save_tree_used = TREE_USED (parm);
4420 expand_assignment (parm,
4421 make_tree (nominal_type, tempreg), 0, 0);
4422 TREE_USED (parm) = save_tree_used;
4423 conversion_insns = get_insns ();
4428 emit_move_insn (parmreg, validize_mem (entry_parm));
4430 /* If we were passed a pointer but the actual value
4431 can safely live in a register, put it in one. */
4432 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4433 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4434 && ! DECL_INLINE (fndecl))
4435 /* layout_decl may set this. */
4436 || TREE_ADDRESSABLE (parm)
4437 || TREE_SIDE_EFFECTS (parm)
4438 /* If -ffloat-store specified, don't put explicit
4439 float variables into registers. */
4440 || (flag_float_store
4441 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4443 /* We can't use nominal_mode, because it will have been set to
4444 Pmode above. We must use the actual mode of the parm. */
4445 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4446 mark_user_reg (parmreg);
4447 emit_move_insn (parmreg, DECL_RTL (parm));
4448 DECL_RTL (parm) = parmreg;
4449 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4453 #ifdef FUNCTION_ARG_CALLEE_COPIES
4454 /* If we are passed an arg by reference and it is our responsibility
4455 to make a copy, do it now.
4456 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4457 original argument, so we must recreate them in the call to
4458 FUNCTION_ARG_CALLEE_COPIES. */
4459 /* ??? Later add code to handle the case that if the argument isn't
4460 modified, don't do the copy. */
4462 else if (passed_pointer
4463 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4464 TYPE_MODE (DECL_ARG_TYPE (parm)),
4465 DECL_ARG_TYPE (parm),
4467 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4470 tree type = DECL_ARG_TYPE (parm);
4472 /* This sequence may involve a library call perhaps clobbering
4473 registers that haven't been copied to pseudos yet. */
4475 push_to_sequence (conversion_insns);
4477 if (TYPE_SIZE (type) == 0
4478 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4479 /* This is a variable sized object. */
4480 copy = gen_rtx_MEM (BLKmode,
4481 allocate_dynamic_stack_space
4482 (expr_size (parm), NULL_RTX,
4483 TYPE_ALIGN (type)));
4485 copy = assign_stack_temp (TYPE_MODE (type),
4486 int_size_in_bytes (type), 1);
4487 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4488 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4490 store_expr (parm, copy, 0);
4491 emit_move_insn (parmreg, XEXP (copy, 0));
4492 if (current_function_check_memory_usage)
4493 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4494 XEXP (copy, 0), Pmode,
4495 GEN_INT (int_size_in_bytes (type)),
4496 TYPE_MODE (sizetype),
4497 GEN_INT (MEMORY_USE_RW),
4498 TYPE_MODE (integer_type_node));
4499 conversion_insns = get_insns ();
4503 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4505 /* In any case, record the parm's desired stack location
4506 in case we later discover it must live in the stack.
4508 If it is a COMPLEX value, store the stack location for both
4511 if (GET_CODE (parmreg) == CONCAT)
4512 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4514 regno = REGNO (parmreg);
4516 if (regno >= max_parm_reg)
4519 int old_max_parm_reg = max_parm_reg;
4521 /* It's slow to expand this one register at a time,
4522 but it's also rare and we need max_parm_reg to be
4523 precisely correct. */
4524 max_parm_reg = regno + 1;
4525 new = (rtx *) xrealloc (parm_reg_stack_loc,
4526 max_parm_reg * sizeof (rtx));
4527 bzero ((char *) (new + old_max_parm_reg),
4528 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4529 parm_reg_stack_loc = new;
4532 if (GET_CODE (parmreg) == CONCAT)
4534 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4536 regnor = REGNO (gen_realpart (submode, parmreg));
4537 regnoi = REGNO (gen_imagpart (submode, parmreg));
4539 if (stack_parm != 0)
4541 parm_reg_stack_loc[regnor]
4542 = gen_realpart (submode, stack_parm);
4543 parm_reg_stack_loc[regnoi]
4544 = gen_imagpart (submode, stack_parm);
4548 parm_reg_stack_loc[regnor] = 0;
4549 parm_reg_stack_loc[regnoi] = 0;
4553 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4555 /* Mark the register as eliminable if we did no conversion
4556 and it was copied from memory at a fixed offset,
4557 and the arg pointer was not copied to a pseudo-reg.
4558 If the arg pointer is a pseudo reg or the offset formed
4559 an invalid address, such memory-equivalences
4560 as we make here would screw up life analysis for it. */
4561 if (nominal_mode == passed_mode
4564 && GET_CODE (stack_parm) == MEM
4565 && stack_offset.var == 0
4566 && reg_mentioned_p (virtual_incoming_args_rtx,
4567 XEXP (stack_parm, 0)))
4569 rtx linsn = get_last_insn ();
4572 /* Mark complex types separately. */
4573 if (GET_CODE (parmreg) == CONCAT)
4574 /* Scan backwards for the set of the real and
4576 for (sinsn = linsn; sinsn != 0;
4577 sinsn = prev_nonnote_insn (sinsn))
4579 set = single_set (sinsn);
4581 && SET_DEST (set) == regno_reg_rtx [regnoi])
4583 = gen_rtx_EXPR_LIST (REG_EQUIV,
4584 parm_reg_stack_loc[regnoi],
4587 && SET_DEST (set) == regno_reg_rtx [regnor])
4589 = gen_rtx_EXPR_LIST (REG_EQUIV,
4590 parm_reg_stack_loc[regnor],
4593 else if ((set = single_set (linsn)) != 0
4594 && SET_DEST (set) == parmreg)
4596 = gen_rtx_EXPR_LIST (REG_EQUIV,
4597 stack_parm, REG_NOTES (linsn));
4600 /* For pointer data type, suggest pointer register. */
4601 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4602 mark_reg_pointer (parmreg,
4603 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4608 /* Value must be stored in the stack slot STACK_PARM
4609 during function execution. */
4611 if (promoted_mode != nominal_mode)
4613 /* Conversion is required. */
4614 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4616 emit_move_insn (tempreg, validize_mem (entry_parm));
4618 push_to_sequence (conversion_insns);
4619 entry_parm = convert_to_mode (nominal_mode, tempreg,
4620 TREE_UNSIGNED (TREE_TYPE (parm)));
4623 /* ??? This may need a big-endian conversion on sparc64. */
4624 stack_parm = change_address (stack_parm, nominal_mode,
4627 conversion_insns = get_insns ();
4632 if (entry_parm != stack_parm)
4634 if (stack_parm == 0)
4637 = assign_stack_local (GET_MODE (entry_parm),
4638 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4639 /* If this is a memory ref that contains aggregate components,
4640 mark it as such for cse and loop optimize. */
4641 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4644 if (promoted_mode != nominal_mode)
4646 push_to_sequence (conversion_insns);
4647 emit_move_insn (validize_mem (stack_parm),
4648 validize_mem (entry_parm));
4649 conversion_insns = get_insns ();
4653 emit_move_insn (validize_mem (stack_parm),
4654 validize_mem (entry_parm));
4656 if (current_function_check_memory_usage)
4658 push_to_sequence (conversion_insns);
4659 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4660 XEXP (stack_parm, 0), Pmode,
4661 GEN_INT (GET_MODE_SIZE (GET_MODE
4663 TYPE_MODE (sizetype),
4664 GEN_INT (MEMORY_USE_RW),
4665 TYPE_MODE (integer_type_node));
4667 conversion_insns = get_insns ();
4670 DECL_RTL (parm) = stack_parm;
4673 /* If this "parameter" was the place where we are receiving the
4674 function's incoming structure pointer, set up the result. */
4675 if (parm == function_result_decl)
4677 tree result = DECL_RESULT (fndecl);
4678 tree restype = TREE_TYPE (result);
4681 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4683 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4684 AGGREGATE_TYPE_P (restype));
4687 if (TREE_THIS_VOLATILE (parm))
4688 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4689 if (TREE_READONLY (parm))
4690 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4693 /* Output all parameter conversion instructions (possibly including calls)
4694 now that all parameters have been copied out of hard registers. */
4695 emit_insns (conversion_insns);
4697 last_parm_insn = get_last_insn ();
4699 current_function_args_size = stack_args_size.constant;
4701 /* Adjust function incoming argument size for alignment and
4704 #ifdef REG_PARM_STACK_SPACE
4705 #ifndef MAYBE_REG_PARM_STACK_SPACE
4706 current_function_args_size = MAX (current_function_args_size,
4707 REG_PARM_STACK_SPACE (fndecl));
4711 #ifdef STACK_BOUNDARY
4712 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4714 current_function_args_size
4715 = ((current_function_args_size + STACK_BYTES - 1)
4716 / STACK_BYTES) * STACK_BYTES;
4719 #ifdef ARGS_GROW_DOWNWARD
4720 current_function_arg_offset_rtx
4721 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4722 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4723 size_int (-stack_args_size.constant)),
4724 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4726 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4729 /* See how many bytes, if any, of its args a function should try to pop
4732 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4733 current_function_args_size);
4735 /* For stdarg.h function, save info about
4736 regs and stack space used by the named args. */
4739 current_function_args_info = args_so_far;
4741 /* Set the rtx used for the function return value. Put this in its
4742 own variable so any optimizers that need this information don't have
4743 to include tree.h. Do this here so it gets done when an inlined
4744 function gets output. */
4746 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4749 /* Indicate whether REGNO is an incoming argument to the current function
4750 that was promoted to a wider mode. If so, return the RTX for the
4751 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4752 that REGNO is promoted from and whether the promotion was signed or
4755 #ifdef PROMOTE_FUNCTION_ARGS
4758 promoted_input_arg (regno, pmode, punsignedp)
4760 enum machine_mode *pmode;
4765 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4766 arg = TREE_CHAIN (arg))
4767 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4768 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4769 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4771 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4772 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4774 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4775 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4776 && mode != DECL_MODE (arg))
4778 *pmode = DECL_MODE (arg);
4779 *punsignedp = unsignedp;
4780 return DECL_INCOMING_RTL (arg);
4789 /* Compute the size and offset from the start of the stacked arguments for a
4790 parm passed in mode PASSED_MODE and with type TYPE.
4792 INITIAL_OFFSET_PTR points to the current offset into the stacked
4795 The starting offset and size for this parm are returned in *OFFSET_PTR
4796 and *ARG_SIZE_PTR, respectively.
4798 IN_REGS is non-zero if the argument will be passed in registers. It will
4799 never be set if REG_PARM_STACK_SPACE is not defined.
4801 FNDECL is the function in which the argument was defined.
4803 There are two types of rounding that are done. The first, controlled by
4804 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4805 list to be aligned to the specific boundary (in bits). This rounding
4806 affects the initial and starting offsets, but not the argument size.
4808 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4809 optionally rounds the size of the parm to PARM_BOUNDARY. The
4810 initial offset is not affected by this rounding, while the size always
4811 is and the starting offset may be. */
4813 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4814 initial_offset_ptr is positive because locate_and_pad_parm's
4815 callers pass in the total size of args so far as
4816 initial_offset_ptr. arg_size_ptr is always positive.*/
4819 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4820 initial_offset_ptr, offset_ptr, arg_size_ptr)
4821 enum machine_mode passed_mode;
4824 tree fndecl ATTRIBUTE_UNUSED;
4825 struct args_size *initial_offset_ptr;
4826 struct args_size *offset_ptr;
4827 struct args_size *arg_size_ptr;
4830 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4831 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4832 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4834 #ifdef REG_PARM_STACK_SPACE
4835 /* If we have found a stack parm before we reach the end of the
4836 area reserved for registers, skip that area. */
4839 int reg_parm_stack_space = 0;
4841 #ifdef MAYBE_REG_PARM_STACK_SPACE
4842 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4844 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4846 if (reg_parm_stack_space > 0)
4848 if (initial_offset_ptr->var)
4850 initial_offset_ptr->var
4851 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4852 size_int (reg_parm_stack_space));
4853 initial_offset_ptr->constant = 0;
4855 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4856 initial_offset_ptr->constant = reg_parm_stack_space;
4859 #endif /* REG_PARM_STACK_SPACE */
4861 arg_size_ptr->var = 0;
4862 arg_size_ptr->constant = 0;
4864 #ifdef ARGS_GROW_DOWNWARD
4865 if (initial_offset_ptr->var)
4867 offset_ptr->constant = 0;
4868 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4869 initial_offset_ptr->var);
4873 offset_ptr->constant = - initial_offset_ptr->constant;
4874 offset_ptr->var = 0;
4876 if (where_pad != none
4877 && (TREE_CODE (sizetree) != INTEGER_CST
4878 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4879 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4880 SUB_PARM_SIZE (*offset_ptr, sizetree);
4881 if (where_pad != downward)
4882 pad_to_arg_alignment (offset_ptr, boundary);
4883 if (initial_offset_ptr->var)
4885 arg_size_ptr->var = size_binop (MINUS_EXPR,
4886 size_binop (MINUS_EXPR,
4888 initial_offset_ptr->var),
4893 arg_size_ptr->constant = (- initial_offset_ptr->constant
4894 - offset_ptr->constant);
4896 #else /* !ARGS_GROW_DOWNWARD */
4897 pad_to_arg_alignment (initial_offset_ptr, boundary);
4898 *offset_ptr = *initial_offset_ptr;
4900 #ifdef PUSH_ROUNDING
4901 if (passed_mode != BLKmode)
4902 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4905 /* Pad_below needs the pre-rounded size to know how much to pad below
4906 so this must be done before rounding up. */
4907 if (where_pad == downward
4908 /* However, BLKmode args passed in regs have their padding done elsewhere.
4909 The stack slot must be able to hold the entire register. */
4910 && !(in_regs && passed_mode == BLKmode))
4911 pad_below (offset_ptr, passed_mode, sizetree);
4913 if (where_pad != none
4914 && (TREE_CODE (sizetree) != INTEGER_CST
4915 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4916 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4918 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4919 #endif /* ARGS_GROW_DOWNWARD */
4922 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4923 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4926 pad_to_arg_alignment (offset_ptr, boundary)
4927 struct args_size *offset_ptr;
4930 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4932 if (boundary > BITS_PER_UNIT)
4934 if (offset_ptr->var)
4937 #ifdef ARGS_GROW_DOWNWARD
4942 (ARGS_SIZE_TREE (*offset_ptr),
4943 boundary / BITS_PER_UNIT);
4944 offset_ptr->constant = 0; /*?*/
4947 offset_ptr->constant =
4948 #ifdef ARGS_GROW_DOWNWARD
4949 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4951 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4956 #ifndef ARGS_GROW_DOWNWARD
4958 pad_below (offset_ptr, passed_mode, sizetree)
4959 struct args_size *offset_ptr;
4960 enum machine_mode passed_mode;
4963 if (passed_mode != BLKmode)
4965 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4966 offset_ptr->constant
4967 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4968 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4969 - GET_MODE_SIZE (passed_mode));
4973 if (TREE_CODE (sizetree) != INTEGER_CST
4974 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4976 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4977 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4979 ADD_PARM_SIZE (*offset_ptr, s2);
4980 SUB_PARM_SIZE (*offset_ptr, sizetree);
4986 #ifdef ARGS_GROW_DOWNWARD
4988 round_down (value, divisor)
4992 return size_binop (MULT_EXPR,
4993 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4994 size_int (divisor));
4998 /* Walk the tree of blocks describing the binding levels within a function
4999 and warn about uninitialized variables.
5000 This is done after calling flow_analysis and before global_alloc
5001 clobbers the pseudo-regs to hard regs. */
5004 uninitialized_vars_warning (block)
5007 register tree decl, sub;
5008 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5010 if (TREE_CODE (decl) == VAR_DECL
5011 /* These warnings are unreliable for and aggregates
5012 because assigning the fields one by one can fail to convince
5013 flow.c that the entire aggregate was initialized.
5014 Unions are troublesome because members may be shorter. */
5015 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5016 && DECL_RTL (decl) != 0
5017 && GET_CODE (DECL_RTL (decl)) == REG
5018 /* Global optimizations can make it difficult to determine if a
5019 particular variable has been initialized. However, a VAR_DECL
5020 with a nonzero DECL_INITIAL had an initializer, so do not
5021 claim it is potentially uninitialized.
5023 We do not care about the actual value in DECL_INITIAL, so we do
5024 not worry that it may be a dangling pointer. */
5025 && DECL_INITIAL (decl) == NULL_TREE
5026 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5027 warning_with_decl (decl,
5028 "`%s' might be used uninitialized in this function");
5029 if (TREE_CODE (decl) == VAR_DECL
5030 && DECL_RTL (decl) != 0
5031 && GET_CODE (DECL_RTL (decl)) == REG
5032 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5033 warning_with_decl (decl,
5034 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5036 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5037 uninitialized_vars_warning (sub);
5040 /* Do the appropriate part of uninitialized_vars_warning
5041 but for arguments instead of local variables. */
5044 setjmp_args_warning ()
5047 for (decl = DECL_ARGUMENTS (current_function_decl);
5048 decl; decl = TREE_CHAIN (decl))
5049 if (DECL_RTL (decl) != 0
5050 && GET_CODE (DECL_RTL (decl)) == REG
5051 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5052 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5055 /* If this function call setjmp, put all vars into the stack
5056 unless they were declared `register'. */
5059 setjmp_protect (block)
5062 register tree decl, sub;
5063 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5064 if ((TREE_CODE (decl) == VAR_DECL
5065 || TREE_CODE (decl) == PARM_DECL)
5066 && DECL_RTL (decl) != 0
5067 && (GET_CODE (DECL_RTL (decl)) == REG
5068 || (GET_CODE (DECL_RTL (decl)) == MEM
5069 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5070 /* If this variable came from an inline function, it must be
5071 that its life doesn't overlap the setjmp. If there was a
5072 setjmp in the function, it would already be in memory. We
5073 must exclude such variable because their DECL_RTL might be
5074 set to strange things such as virtual_stack_vars_rtx. */
5075 && ! DECL_FROM_INLINE (decl)
5077 #ifdef NON_SAVING_SETJMP
5078 /* If longjmp doesn't restore the registers,
5079 don't put anything in them. */
5083 ! DECL_REGISTER (decl)))
5084 put_var_into_stack (decl);
5085 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5086 setjmp_protect (sub);
5089 /* Like the previous function, but for args instead of local variables. */
5092 setjmp_protect_args ()
5095 for (decl = DECL_ARGUMENTS (current_function_decl);
5096 decl; decl = TREE_CHAIN (decl))
5097 if ((TREE_CODE (decl) == VAR_DECL
5098 || TREE_CODE (decl) == PARM_DECL)
5099 && DECL_RTL (decl) != 0
5100 && (GET_CODE (DECL_RTL (decl)) == REG
5101 || (GET_CODE (DECL_RTL (decl)) == MEM
5102 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5104 /* If longjmp doesn't restore the registers,
5105 don't put anything in them. */
5106 #ifdef NON_SAVING_SETJMP
5110 ! DECL_REGISTER (decl)))
5111 put_var_into_stack (decl);
5114 /* Return the context-pointer register corresponding to DECL,
5115 or 0 if it does not need one. */
5118 lookup_static_chain (decl)
5121 tree context = decl_function_context (decl);
5125 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5128 /* We treat inline_function_decl as an alias for the current function
5129 because that is the inline function whose vars, types, etc.
5130 are being merged into the current function.
5131 See expand_inline_function. */
5132 if (context == current_function_decl || context == inline_function_decl)
5133 return virtual_stack_vars_rtx;
5135 for (link = context_display; link; link = TREE_CHAIN (link))
5136 if (TREE_PURPOSE (link) == context)
5137 return RTL_EXPR_RTL (TREE_VALUE (link));
5142 /* Convert a stack slot address ADDR for variable VAR
5143 (from a containing function)
5144 into an address valid in this function (using a static chain). */
5147 fix_lexical_addr (addr, var)
5152 HOST_WIDE_INT displacement;
5153 tree context = decl_function_context (var);
5154 struct function *fp;
5157 /* If this is the present function, we need not do anything. */
5158 if (context == current_function_decl || context == inline_function_decl)
5161 for (fp = outer_function_chain; fp; fp = fp->next)
5162 if (fp->decl == context)
5168 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5169 addr = XEXP (XEXP (addr, 0), 0);
5171 /* Decode given address as base reg plus displacement. */
5172 if (GET_CODE (addr) == REG)
5173 basereg = addr, displacement = 0;
5174 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5175 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5179 /* We accept vars reached via the containing function's
5180 incoming arg pointer and via its stack variables pointer. */
5181 if (basereg == fp->internal_arg_pointer)
5183 /* If reached via arg pointer, get the arg pointer value
5184 out of that function's stack frame.
5186 There are two cases: If a separate ap is needed, allocate a
5187 slot in the outer function for it and dereference it that way.
5188 This is correct even if the real ap is actually a pseudo.
5189 Otherwise, just adjust the offset from the frame pointer to
5192 #ifdef NEED_SEPARATE_AP
5195 if (fp->x_arg_pointer_save_area == 0)
5196 fp->x_arg_pointer_save_area
5197 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5199 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5200 addr = memory_address (Pmode, addr);
5202 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5204 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5205 base = lookup_static_chain (var);
5209 else if (basereg == virtual_stack_vars_rtx)
5211 /* This is the same code as lookup_static_chain, duplicated here to
5212 avoid an extra call to decl_function_context. */
5215 for (link = context_display; link; link = TREE_CHAIN (link))
5216 if (TREE_PURPOSE (link) == context)
5218 base = RTL_EXPR_RTL (TREE_VALUE (link));
5226 /* Use same offset, relative to appropriate static chain or argument
5228 return plus_constant (base, displacement);
5231 /* Return the address of the trampoline for entering nested fn FUNCTION.
5232 If necessary, allocate a trampoline (in the stack frame)
5233 and emit rtl to initialize its contents (at entry to this function). */
5236 trampoline_address (function)
5242 struct function *fp;
5245 /* Find an existing trampoline and return it. */
5246 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5247 if (TREE_PURPOSE (link) == function)
5249 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5251 for (fp = outer_function_chain; fp; fp = fp->next)
5252 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5253 if (TREE_PURPOSE (link) == function)
5255 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5257 return round_trampoline_addr (tramp);
5260 /* None exists; we must make one. */
5262 /* Find the `struct function' for the function containing FUNCTION. */
5264 fn_context = decl_function_context (function);
5265 if (fn_context != current_function_decl
5266 && fn_context != inline_function_decl)
5267 for (fp = outer_function_chain; fp; fp = fp->next)
5268 if (fp->decl == fn_context)
5271 /* Allocate run-time space for this trampoline
5272 (usually in the defining function's stack frame). */
5273 #ifdef ALLOCATE_TRAMPOLINE
5274 tramp = ALLOCATE_TRAMPOLINE (fp);
5276 /* If rounding needed, allocate extra space
5277 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5278 #ifdef TRAMPOLINE_ALIGNMENT
5279 #define TRAMPOLINE_REAL_SIZE \
5280 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5282 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5284 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5285 fp ? fp : current_function);
5288 /* Record the trampoline for reuse and note it for later initialization
5289 by expand_function_end. */
5292 push_obstacks (fp->function_maybepermanent_obstack,
5293 fp->function_maybepermanent_obstack);
5294 rtlexp = make_node (RTL_EXPR);
5295 RTL_EXPR_RTL (rtlexp) = tramp;
5296 fp->x_trampoline_list = tree_cons (function, rtlexp,
5297 fp->x_trampoline_list);
5302 /* Make the RTL_EXPR node temporary, not momentary, so that the
5303 trampoline_list doesn't become garbage. */
5304 int momentary = suspend_momentary ();
5305 rtlexp = make_node (RTL_EXPR);
5306 resume_momentary (momentary);
5308 RTL_EXPR_RTL (rtlexp) = tramp;
5309 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5312 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5313 return round_trampoline_addr (tramp);
5316 /* Given a trampoline address,
5317 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5320 round_trampoline_addr (tramp)
5323 #ifdef TRAMPOLINE_ALIGNMENT
5324 /* Round address up to desired boundary. */
5325 rtx temp = gen_reg_rtx (Pmode);
5326 temp = expand_binop (Pmode, add_optab, tramp,
5327 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5328 temp, 0, OPTAB_LIB_WIDEN);
5329 tramp = expand_binop (Pmode, and_optab, temp,
5330 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5331 temp, 0, OPTAB_LIB_WIDEN);
5336 /* The functions identify_blocks and reorder_blocks provide a way to
5337 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5338 duplicate portions of the RTL code. Call identify_blocks before
5339 changing the RTL, and call reorder_blocks after. */
5341 /* Put all this function's BLOCK nodes including those that are chained
5342 onto the first block into a vector, and return it.
5343 Also store in each NOTE for the beginning or end of a block
5344 the index of that block in the vector.
5345 The arguments are BLOCK, the chain of top-level blocks of the function,
5346 and INSNS, the insn chain of the function. */
5349 identify_blocks (block, insns)
5357 int next_block_number = 1;
5358 int current_block_number = 1;
5364 n_blocks = all_blocks (block, 0);
5365 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5366 block_stack = (int *) alloca (n_blocks * sizeof (int));
5368 all_blocks (block, block_vector);
5370 for (insn = insns; insn; insn = NEXT_INSN (insn))
5371 if (GET_CODE (insn) == NOTE)
5373 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5375 block_stack[depth++] = current_block_number;
5376 current_block_number = next_block_number;
5377 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5379 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5381 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5382 current_block_number = block_stack[--depth];
5386 if (n_blocks != next_block_number)
5389 return block_vector;
5392 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5393 and a revised instruction chain, rebuild the tree structure
5394 of BLOCK nodes to correspond to the new order of RTL.
5395 The new block tree is inserted below TOP_BLOCK.
5396 Returns the current top-level block. */
5399 reorder_blocks (block_vector, block, insns)
5404 tree current_block = block;
5407 if (block_vector == 0)
5410 /* Prune the old trees away, so that it doesn't get in the way. */
5411 BLOCK_SUBBLOCKS (current_block) = 0;
5412 BLOCK_CHAIN (current_block) = 0;
5414 for (insn = insns; insn; insn = NEXT_INSN (insn))
5415 if (GET_CODE (insn) == NOTE)
5417 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5419 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5420 /* If we have seen this block before, copy it. */
5421 if (TREE_ASM_WRITTEN (block))
5422 block = copy_node (block);
5423 BLOCK_SUBBLOCKS (block) = 0;
5424 TREE_ASM_WRITTEN (block) = 1;
5425 BLOCK_SUPERCONTEXT (block) = current_block;
5426 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5427 BLOCK_SUBBLOCKS (current_block) = block;
5428 current_block = block;
5429 NOTE_SOURCE_FILE (insn) = 0;
5431 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5433 BLOCK_SUBBLOCKS (current_block)
5434 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5435 current_block = BLOCK_SUPERCONTEXT (current_block);
5436 NOTE_SOURCE_FILE (insn) = 0;
5440 BLOCK_SUBBLOCKS (current_block)
5441 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5442 return current_block;
5445 /* Reverse the order of elements in the chain T of blocks,
5446 and return the new head of the chain (old last element). */
5452 register tree prev = 0, decl, next;
5453 for (decl = t; decl; decl = next)
5455 next = BLOCK_CHAIN (decl);
5456 BLOCK_CHAIN (decl) = prev;
5462 /* Count the subblocks of the list starting with BLOCK, and list them
5463 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5467 all_blocks (block, vector)
5475 TREE_ASM_WRITTEN (block) = 0;
5477 /* Record this block. */
5479 vector[n_blocks] = block;
5483 /* Record the subblocks, and their subblocks... */
5484 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5485 vector ? vector + n_blocks : 0);
5486 block = BLOCK_CHAIN (block);
5492 /* Allocate a function structure and reset its contents to the defaults. */
5494 prepare_function_start ()
5496 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5497 current_function->can_garbage_collect = 0;
5499 init_stmt_for_function ();
5501 cse_not_expected = ! optimize;
5503 /* Caller save not needed yet. */
5504 caller_save_needed = 0;
5506 /* No stack slots have been made yet. */
5507 stack_slot_list = 0;
5509 current_function_has_nonlocal_label = 0;
5510 current_function_has_nonlocal_goto = 0;
5512 /* There is no stack slot for handling nonlocal gotos. */
5513 nonlocal_goto_handler_slots = 0;
5514 nonlocal_goto_stack_level = 0;
5516 /* No labels have been declared for nonlocal use. */
5517 nonlocal_labels = 0;
5518 nonlocal_goto_handler_labels = 0;
5520 /* No function calls so far in this function. */
5521 function_call_count = 0;
5523 /* No parm regs have been allocated.
5524 (This is important for output_inline_function.) */
5525 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5527 /* Initialize the RTL mechanism. */
5530 /* Initialize the queue of pending postincrement and postdecrements,
5531 and some other info in expr.c. */
5534 /* We haven't done register allocation yet. */
5537 init_varasm_status (current_function);
5539 /* Clear out data used for inlining. */
5540 current_function->inlinable = 0;
5541 current_function->original_decl_initial = 0;
5542 current_function->original_arg_vector = 0;
5544 /* Set if a call to setjmp is seen. */
5545 current_function_calls_setjmp = 0;
5547 /* Set if a call to longjmp is seen. */
5548 current_function_calls_longjmp = 0;
5550 current_function_calls_alloca = 0;
5551 current_function_contains_functions = 0;
5552 current_function_is_leaf = 0;
5553 current_function_sp_is_unchanging = 0;
5554 current_function_uses_only_leaf_regs = 0;
5555 current_function_has_computed_jump = 0;
5556 current_function_is_thunk = 0;
5558 current_function_returns_pcc_struct = 0;
5559 current_function_returns_struct = 0;
5560 current_function_epilogue_delay_list = 0;
5561 current_function_uses_const_pool = 0;
5562 current_function_uses_pic_offset_table = 0;
5563 current_function_cannot_inline = 0;
5565 /* We have not yet needed to make a label to jump to for tail-recursion. */
5566 tail_recursion_label = 0;
5568 /* We haven't had a need to make a save area for ap yet. */
5569 arg_pointer_save_area = 0;
5571 /* No stack slots allocated yet. */
5574 /* No SAVE_EXPRs in this function yet. */
5577 /* No RTL_EXPRs in this function yet. */
5580 /* Set up to allocate temporaries. */
5583 /* Indicate that we need to distinguish between the return value of the
5584 present function and the return value of a function being called. */
5585 rtx_equal_function_value_matters = 1;
5587 /* Indicate that we have not instantiated virtual registers yet. */
5588 virtuals_instantiated = 0;
5590 /* Indicate we have no need of a frame pointer yet. */
5591 frame_pointer_needed = 0;
5593 /* By default assume not varargs or stdarg. */
5594 current_function_varargs = 0;
5595 current_function_stdarg = 0;
5597 /* We haven't made any trampolines for this function yet. */
5598 trampoline_list = 0;
5600 init_pending_stack_adjust ();
5601 inhibit_defer_pop = 0;
5603 current_function_outgoing_args_size = 0;
5605 if (init_machine_status)
5606 (*init_machine_status) (current_function);
5609 /* Initialize the rtl expansion mechanism so that we can do simple things
5610 like generate sequences. This is used to provide a context during global
5611 initialization of some passes. */
5613 init_dummy_function_start ()
5615 prepare_function_start ();
5618 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5619 and initialize static variables for generating RTL for the statements
5623 init_function_start (subr, filename, line)
5628 prepare_function_start ();
5630 /* Remember this function for later. */
5631 current_function->next_global = all_functions;
5632 all_functions = current_function;
5634 current_function_name = (*decl_printable_name) (subr, 2);
5636 /* Nonzero if this is a nested function that uses a static chain. */
5638 current_function_needs_context
5639 = (decl_function_context (current_function_decl) != 0
5640 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5642 /* Within function body, compute a type's size as soon it is laid out. */
5643 immediate_size_expand++;
5645 /* Prevent ever trying to delete the first instruction of a function.
5646 Also tell final how to output a linenum before the function prologue.
5647 Note linenums could be missing, e.g. when compiling a Java .class file. */
5649 emit_line_note (filename, line);
5651 /* Make sure first insn is a note even if we don't want linenums.
5652 This makes sure the first insn will never be deleted.
5653 Also, final expects a note to appear there. */
5654 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5656 /* Set flags used by final.c. */
5657 if (aggregate_value_p (DECL_RESULT (subr)))
5659 #ifdef PCC_STATIC_STRUCT_RETURN
5660 current_function_returns_pcc_struct = 1;
5662 current_function_returns_struct = 1;
5665 /* Warn if this value is an aggregate type,
5666 regardless of which calling convention we are using for it. */
5667 if (warn_aggregate_return
5668 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5669 warning ("function returns an aggregate");
5671 current_function_returns_pointer
5672 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5675 /* Make sure all values used by the optimization passes have sane
5678 init_function_for_compilation ()
5681 /* No prologue/epilogue insns yet. */
5682 prologue = epilogue = 0;
5685 /* Indicate that the current function uses extra args
5686 not explicitly mentioned in the argument list in any fashion. */
5691 current_function_varargs = 1;
5694 /* Expand a call to __main at the beginning of a possible main function. */
5696 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5697 #undef HAS_INIT_SECTION
5698 #define HAS_INIT_SECTION
5702 expand_main_function ()
5704 #if !defined (HAS_INIT_SECTION)
5705 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5707 #endif /* not HAS_INIT_SECTION */
5710 extern struct obstack permanent_obstack;
5712 /* Start the RTL for a new function, and set variables used for
5714 SUBR is the FUNCTION_DECL node.
5715 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5716 the function's parameters, which must be run at any return statement. */
5719 expand_function_start (subr, parms_have_cleanups)
5721 int parms_have_cleanups;
5725 rtx last_ptr = NULL_RTX;
5727 /* Make sure volatile mem refs aren't considered
5728 valid operands of arithmetic insns. */
5729 init_recog_no_volatile ();
5731 /* Set this before generating any memory accesses. */
5732 current_function_check_memory_usage
5733 = (flag_check_memory_usage
5734 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5736 current_function_instrument_entry_exit
5737 = (flag_instrument_function_entry_exit
5738 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5740 /* If function gets a static chain arg, store it in the stack frame.
5741 Do this first, so it gets the first stack slot offset. */
5742 if (current_function_needs_context)
5744 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5746 /* Delay copying static chain if it is not a register to avoid
5747 conflicts with regs used for parameters. */
5748 if (! SMALL_REGISTER_CLASSES
5749 || GET_CODE (static_chain_incoming_rtx) == REG)
5750 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5753 /* If the parameters of this function need cleaning up, get a label
5754 for the beginning of the code which executes those cleanups. This must
5755 be done before doing anything with return_label. */
5756 if (parms_have_cleanups)
5757 cleanup_label = gen_label_rtx ();
5761 /* Make the label for return statements to jump to, if this machine
5762 does not have a one-instruction return and uses an epilogue,
5763 or if it returns a structure, or if it has parm cleanups. */
5765 if (cleanup_label == 0 && HAVE_return
5766 && ! current_function_instrument_entry_exit
5767 && ! current_function_returns_pcc_struct
5768 && ! (current_function_returns_struct && ! optimize))
5771 return_label = gen_label_rtx ();
5773 return_label = gen_label_rtx ();
5776 /* Initialize rtx used to return the value. */
5777 /* Do this before assign_parms so that we copy the struct value address
5778 before any library calls that assign parms might generate. */
5780 /* Decide whether to return the value in memory or in a register. */
5781 if (aggregate_value_p (DECL_RESULT (subr)))
5783 /* Returning something that won't go in a register. */
5784 register rtx value_address = 0;
5786 #ifdef PCC_STATIC_STRUCT_RETURN
5787 if (current_function_returns_pcc_struct)
5789 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5790 value_address = assemble_static_space (size);
5795 /* Expect to be passed the address of a place to store the value.
5796 If it is passed as an argument, assign_parms will take care of
5798 if (struct_value_incoming_rtx)
5800 value_address = gen_reg_rtx (Pmode);
5801 emit_move_insn (value_address, struct_value_incoming_rtx);
5806 DECL_RTL (DECL_RESULT (subr))
5807 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5808 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5809 AGGREGATE_TYPE_P (TREE_TYPE
5814 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5815 /* If return mode is void, this decl rtl should not be used. */
5816 DECL_RTL (DECL_RESULT (subr)) = 0;
5817 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5819 /* If function will end with cleanup code for parms,
5820 compute the return values into a pseudo reg,
5821 which we will copy into the true return register
5822 after the cleanups are done. */
5824 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5826 #ifdef PROMOTE_FUNCTION_RETURN
5827 tree type = TREE_TYPE (DECL_RESULT (subr));
5828 int unsignedp = TREE_UNSIGNED (type);
5830 mode = promote_mode (type, mode, &unsignedp, 1);
5833 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5836 /* Scalar, returned in a register. */
5838 #ifdef FUNCTION_OUTGOING_VALUE
5839 DECL_RTL (DECL_RESULT (subr))
5840 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5842 DECL_RTL (DECL_RESULT (subr))
5843 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5846 /* Mark this reg as the function's return value. */
5847 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5849 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5850 /* Needed because we may need to move this to memory
5851 in case it's a named return value whose address is taken. */
5852 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5856 /* Initialize rtx for parameters and local variables.
5857 In some cases this requires emitting insns. */
5859 assign_parms (subr, 0);
5861 /* Copy the static chain now if it wasn't a register. The delay is to
5862 avoid conflicts with the parameter passing registers. */
5864 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5865 if (GET_CODE (static_chain_incoming_rtx) != REG)
5866 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5868 /* The following was moved from init_function_start.
5869 The move is supposed to make sdb output more accurate. */
5870 /* Indicate the beginning of the function body,
5871 as opposed to parm setup. */
5872 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5874 /* If doing stupid allocation, mark parms as born here. */
5876 if (GET_CODE (get_last_insn ()) != NOTE)
5877 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5878 parm_birth_insn = get_last_insn ();
5882 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5883 use_variable (regno_reg_rtx[i]);
5885 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5886 use_variable (current_function_internal_arg_pointer);
5889 context_display = 0;
5890 if (current_function_needs_context)
5892 /* Fetch static chain values for containing functions. */
5893 tem = decl_function_context (current_function_decl);
5894 /* If not doing stupid register allocation copy the static chain
5895 pointer into a pseudo. If we have small register classes, copy
5896 the value from memory if static_chain_incoming_rtx is a REG. If
5897 we do stupid register allocation, we use the stack address
5899 if (tem && ! obey_regdecls)
5901 /* If the static chain originally came in a register, put it back
5902 there, then move it out in the next insn. The reason for
5903 this peculiar code is to satisfy function integration. */
5904 if (SMALL_REGISTER_CLASSES
5905 && GET_CODE (static_chain_incoming_rtx) == REG)
5906 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5907 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5912 tree rtlexp = make_node (RTL_EXPR);
5914 RTL_EXPR_RTL (rtlexp) = last_ptr;
5915 context_display = tree_cons (tem, rtlexp, context_display);
5916 tem = decl_function_context (tem);
5919 /* Chain thru stack frames, assuming pointer to next lexical frame
5920 is found at the place we always store it. */
5921 #ifdef FRAME_GROWS_DOWNWARD
5922 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5924 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5925 memory_address (Pmode, last_ptr)));
5927 /* If we are not optimizing, ensure that we know that this
5928 piece of context is live over the entire function. */
5930 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5935 if (current_function_instrument_entry_exit)
5937 rtx fun = DECL_RTL (current_function_decl);
5938 if (GET_CODE (fun) == MEM)
5939 fun = XEXP (fun, 0);
5942 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5944 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5946 hard_frame_pointer_rtx),
5950 /* After the display initializations is where the tail-recursion label
5951 should go, if we end up needing one. Ensure we have a NOTE here
5952 since some things (like trampolines) get placed before this. */
5953 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5955 /* Evaluate now the sizes of any types declared among the arguments. */
5956 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5958 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5959 EXPAND_MEMORY_USE_BAD);
5960 /* Flush the queue in case this parameter declaration has
5965 /* Make sure there is a line number after the function entry setup code. */
5966 force_next_line_note ();
5969 /* Undo the effects of init_dummy_function_start. */
5971 expand_dummy_function_end ()
5973 /* End any sequences that failed to be closed due to syntax errors. */
5974 while (in_sequence_p ())
5977 /* Outside function body, can't compute type's actual size
5978 until next function's body starts. */
5979 current_function = 0;
5982 /* Generate RTL for the end of the current function.
5983 FILENAME and LINE are the current position in the source file.
5985 It is up to language-specific callers to do cleanups for parameters--
5986 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5989 expand_function_end (filename, line, end_bindings)
5997 #ifdef TRAMPOLINE_TEMPLATE
5998 static rtx initial_trampoline;
6001 finish_expr_for_function ();
6003 #ifdef NON_SAVING_SETJMP
6004 /* Don't put any variables in registers if we call setjmp
6005 on a machine that fails to restore the registers. */
6006 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6008 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6009 setjmp_protect (DECL_INITIAL (current_function_decl));
6011 setjmp_protect_args ();
6015 /* Save the argument pointer if a save area was made for it. */
6016 if (arg_pointer_save_area)
6018 /* arg_pointer_save_area may not be a valid memory address, so we
6019 have to check it and fix it if necessary. */
6022 emit_move_insn (validize_mem (arg_pointer_save_area),
6023 virtual_incoming_args_rtx);
6024 seq = gen_sequence ();
6026 emit_insn_before (seq, tail_recursion_reentry);
6029 /* Initialize any trampolines required by this function. */
6030 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6032 tree function = TREE_PURPOSE (link);
6033 rtx context = lookup_static_chain (function);
6034 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6035 #ifdef TRAMPOLINE_TEMPLATE
6040 #ifdef TRAMPOLINE_TEMPLATE
6041 /* First make sure this compilation has a template for
6042 initializing trampolines. */
6043 if (initial_trampoline == 0)
6045 end_temporary_allocation ();
6047 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6048 resume_temporary_allocation ();
6052 /* Generate insns to initialize the trampoline. */
6054 tramp = round_trampoline_addr (XEXP (tramp, 0));
6055 #ifdef TRAMPOLINE_TEMPLATE
6056 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6057 emit_block_move (blktramp, initial_trampoline,
6058 GEN_INT (TRAMPOLINE_SIZE),
6059 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6061 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6065 /* Put those insns at entry to the containing function (this one). */
6066 emit_insns_before (seq, tail_recursion_reentry);
6069 /* If we are doing stack checking and this function makes calls,
6070 do a stack probe at the start of the function to ensure we have enough
6071 space for another stack frame. */
6072 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6076 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6077 if (GET_CODE (insn) == CALL_INSN)
6080 probe_stack_range (STACK_CHECK_PROTECT,
6081 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6084 emit_insns_before (seq, tail_recursion_reentry);
6089 /* Warn about unused parms if extra warnings were specified. */
6090 if (warn_unused && extra_warnings)
6094 for (decl = DECL_ARGUMENTS (current_function_decl);
6095 decl; decl = TREE_CHAIN (decl))
6096 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6097 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6098 warning_with_decl (decl, "unused parameter `%s'");
6101 /* Delete handlers for nonlocal gotos if nothing uses them. */
6102 if (nonlocal_goto_handler_slots != 0
6103 && ! current_function_has_nonlocal_label)
6106 /* End any sequences that failed to be closed due to syntax errors. */
6107 while (in_sequence_p ())
6110 /* Outside function body, can't compute type's actual size
6111 until next function's body starts. */
6112 immediate_size_expand--;
6114 /* If doing stupid register allocation,
6115 mark register parms as dying here. */
6120 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6121 use_variable (regno_reg_rtx[i]);
6123 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6125 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6127 use_variable (XEXP (tem, 0));
6128 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6131 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6132 use_variable (current_function_internal_arg_pointer);
6135 clear_pending_stack_adjust ();
6136 do_pending_stack_adjust ();
6138 /* Mark the end of the function body.
6139 If control reaches this insn, the function can drop through
6140 without returning a value. */
6141 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6143 /* Must mark the last line number note in the function, so that the test
6144 coverage code can avoid counting the last line twice. This just tells
6145 the code to ignore the immediately following line note, since there
6146 already exists a copy of this note somewhere above. This line number
6147 note is still needed for debugging though, so we can't delete it. */
6148 if (flag_test_coverage)
6149 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6151 /* Output a linenumber for the end of the function.
6152 SDB depends on this. */
6153 emit_line_note_force (filename, line);
6155 /* Output the label for the actual return from the function,
6156 if one is expected. This happens either because a function epilogue
6157 is used instead of a return instruction, or because a return was done
6158 with a goto in order to run local cleanups, or because of pcc-style
6159 structure returning. */
6162 emit_label (return_label);
6164 /* C++ uses this. */
6166 expand_end_bindings (0, 0, 0);
6168 /* Now handle any leftover exception regions that may have been
6169 created for the parameters. */
6171 rtx last = get_last_insn ();
6174 expand_leftover_cleanups ();
6176 /* If the above emitted any code, may sure we jump around it. */
6177 if (last != get_last_insn ())
6179 label = gen_label_rtx ();
6180 last = emit_jump_insn_after (gen_jump (label), last);
6181 last = emit_barrier_after (last);
6186 if (current_function_instrument_entry_exit)
6188 rtx fun = DECL_RTL (current_function_decl);
6189 if (GET_CODE (fun) == MEM)
6190 fun = XEXP (fun, 0);
6193 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6195 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6197 hard_frame_pointer_rtx),
6201 /* If we had calls to alloca, and this machine needs
6202 an accurate stack pointer to exit the function,
6203 insert some code to save and restore the stack pointer. */
6204 #ifdef EXIT_IGNORE_STACK
6205 if (! EXIT_IGNORE_STACK)
6207 if (current_function_calls_alloca)
6211 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6212 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6215 /* If scalar return value was computed in a pseudo-reg,
6216 copy that to the hard return register. */
6217 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6218 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6219 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6220 >= FIRST_PSEUDO_REGISTER))
6222 rtx real_decl_result;
6224 #ifdef FUNCTION_OUTGOING_VALUE
6226 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6227 current_function_decl);
6230 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6231 current_function_decl);
6233 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6234 /* If this is a BLKmode structure being returned in registers, then use
6235 the mode computed in expand_return. */
6236 if (GET_MODE (real_decl_result) == BLKmode)
6237 PUT_MODE (real_decl_result,
6238 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6239 emit_move_insn (real_decl_result,
6240 DECL_RTL (DECL_RESULT (current_function_decl)));
6241 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6243 /* The delay slot scheduler assumes that current_function_return_rtx
6244 holds the hard register containing the return value, not a temporary
6246 current_function_return_rtx = real_decl_result;
6249 /* If returning a structure, arrange to return the address of the value
6250 in a place where debuggers expect to find it.
6252 If returning a structure PCC style,
6253 the caller also depends on this value.
6254 And current_function_returns_pcc_struct is not necessarily set. */
6255 if (current_function_returns_struct
6256 || current_function_returns_pcc_struct)
6258 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6259 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6260 #ifdef FUNCTION_OUTGOING_VALUE
6262 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6263 current_function_decl);
6266 = FUNCTION_VALUE (build_pointer_type (type),
6267 current_function_decl);
6270 /* Mark this as a function return value so integrate will delete the
6271 assignment and USE below when inlining this function. */
6272 REG_FUNCTION_VALUE_P (outgoing) = 1;
6274 emit_move_insn (outgoing, value_address);
6275 use_variable (outgoing);
6278 /* If this is an implementation of __throw, do what's necessary to
6279 communicate between __builtin_eh_return and the epilogue. */
6280 expand_eh_return ();
6282 /* Output a return insn if we are using one.
6283 Otherwise, let the rtl chain end here, to drop through
6284 into the epilogue. */
6289 emit_jump_insn (gen_return ());
6294 /* Fix up any gotos that jumped out to the outermost
6295 binding level of the function.
6296 Must follow emitting RETURN_LABEL. */
6298 /* If you have any cleanups to do at this point,
6299 and they need to create temporary variables,
6300 then you will lose. */
6301 expand_fixups (get_insns ());
6304 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6305 or a single insn). */
6307 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6309 record_insns (insns)
6314 if (GET_CODE (insns) == SEQUENCE)
6316 int len = XVECLEN (insns, 0);
6317 vec = (int *) oballoc ((len + 1) * sizeof (int));
6320 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6324 vec = (int *) oballoc (2 * sizeof (int));
6325 vec[0] = INSN_UID (insns);
6331 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6334 contains (insn, vec)
6340 if (GET_CODE (insn) == INSN
6341 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6344 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6345 for (j = 0; vec[j]; j++)
6346 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6352 for (j = 0; vec[j]; j++)
6353 if (INSN_UID (insn) == vec[j])
6360 prologue_epilogue_contains (insn)
6363 if (prologue && contains (insn, prologue))
6365 if (epilogue && contains (insn, epilogue))
6369 #endif /* HAVE_prologue || HAVE_epilogue */
6371 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6372 this into place with notes indicating where the prologue ends and where
6373 the epilogue begins. Update the basic block information when possible. */
6376 thread_prologue_and_epilogue_insns (f)
6377 rtx f ATTRIBUTE_UNUSED;
6381 #ifdef HAVE_prologue
6387 seq = gen_prologue();
6390 /* Retain a map of the prologue insns. */
6391 if (GET_CODE (seq) != SEQUENCE)
6393 prologue = record_insns (seq);
6395 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6396 seq = gen_sequence ();
6399 /* If optimization is off, and perhaps in an empty function,
6400 the entry block will have no successors. */
6401 if (ENTRY_BLOCK_PTR->succ)
6403 /* Can't deal with multiple successsors of the entry block. */
6404 if (ENTRY_BLOCK_PTR->succ->succ_next)
6407 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6411 emit_insn_after (seq, f);
6415 #ifdef HAVE_epilogue
6420 rtx tail = get_last_insn ();
6422 /* ??? This is gastly. If function returns were not done via uses,
6423 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6424 and all of this uglyness would go away. */
6429 /* If the exit block has no non-fake predecessors, we don't
6430 need an epilogue. Furthermore, only pay attention to the
6431 fallthru predecessors; if (conditional) return insns were
6432 generated, by definition we do not need to emit epilogue
6435 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6436 if ((e->flags & EDGE_FAKE) == 0
6437 && (e->flags & EDGE_FALLTHRU) != 0)
6442 /* We can't handle multiple epilogues -- if one is needed,
6443 we won't be able to place it multiple times.
6445 ??? Fix epilogue expanders to not assume they are the
6446 last thing done compiling the function. Either that
6447 or copy_rtx each insn.
6449 ??? Blah, it's not a simple expression to assert that
6450 we've exactly one fallthru exit edge. */
6455 /* ??? If the last insn of the basic block is a jump, then we
6456 are creating a new basic block. Wimp out and leave these
6457 insns outside any block. */
6458 if (GET_CODE (tail) == JUMP_INSN)
6464 rtx prev, seq, first_use;
6466 /* Move the USE insns at the end of a function onto a list. */
6468 if (GET_CODE (prev) == BARRIER
6469 || GET_CODE (prev) == NOTE)
6470 prev = prev_nonnote_insn (prev);
6474 && GET_CODE (prev) == INSN
6475 && GET_CODE (PATTERN (prev)) == USE)
6477 /* If the end of the block is the use, grab hold of something
6478 else so that we emit barriers etc in the right place. */
6482 tail = PREV_INSN (tail);
6483 while (GET_CODE (tail) == INSN
6484 && GET_CODE (PATTERN (tail)) == USE);
6490 prev = prev_nonnote_insn (prev);
6495 NEXT_INSN (use) = first_use;
6496 PREV_INSN (first_use) = use;
6499 NEXT_INSN (use) = NULL_RTX;
6503 && GET_CODE (prev) == INSN
6504 && GET_CODE (PATTERN (prev)) == USE);
6507 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6508 epilogue insns, the USE insns at the end of a function,
6509 the jump insn that returns, and then a BARRIER. */
6511 if (GET_CODE (tail) != BARRIER)
6513 prev = next_nonnote_insn (tail);
6514 if (!prev || GET_CODE (prev) != BARRIER)
6515 emit_barrier_after (tail);
6518 seq = gen_epilogue ();
6520 tail = emit_jump_insn_after (seq, tail);
6522 /* Insert the USE insns immediately before the return insn, which
6523 must be the last instruction emitted in the sequence. */
6525 emit_insns_before (first_use, tail);
6526 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6528 /* Update the tail of the basic block. */
6532 /* Retain a map of the epilogue insns. */
6533 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6540 commit_edge_insertions ();
6543 /* Reposition the prologue-end and epilogue-begin notes after instruction
6544 scheduling and delayed branch scheduling. */
6547 reposition_prologue_and_epilogue_notes (f)
6548 rtx f ATTRIBUTE_UNUSED;
6550 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6551 /* Reposition the prologue and epilogue notes. */
6558 register rtx insn, note = 0;
6560 /* Scan from the beginning until we reach the last prologue insn.
6561 We apparently can't depend on basic_block_{head,end} after
6563 for (len = 0; prologue[len]; len++)
6565 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6567 if (GET_CODE (insn) == NOTE)
6569 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6572 else if ((len -= contains (insn, prologue)) == 0)
6575 /* Find the prologue-end note if we haven't already, and
6576 move it to just after the last prologue insn. */
6579 for (note = insn; (note = NEXT_INSN (note));)
6580 if (GET_CODE (note) == NOTE
6581 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6585 next = NEXT_INSN (note);
6587 /* Whether or not we can depend on BLOCK_HEAD,
6588 attempt to keep it up-to-date. */
6589 if (BLOCK_HEAD (0) == note)
6590 BLOCK_HEAD (0) = next;
6593 add_insn_after (note, insn);
6600 register rtx insn, note = 0;
6602 /* Scan from the end until we reach the first epilogue insn.
6603 We apparently can't depend on basic_block_{head,end} after
6605 for (len = 0; epilogue[len]; len++)
6607 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6609 if (GET_CODE (insn) == NOTE)
6611 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6614 else if ((len -= contains (insn, epilogue)) == 0)
6616 /* Find the epilogue-begin note if we haven't already, and
6617 move it to just before the first epilogue insn. */
6620 for (note = insn; (note = PREV_INSN (note));)
6621 if (GET_CODE (note) == NOTE
6622 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6626 /* Whether or not we can depend on BLOCK_HEAD,
6627 attempt to keep it up-to-date. */
6629 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6630 BLOCK_HEAD (n_basic_blocks-1) = note;
6633 add_insn_before (note, insn);
6638 #endif /* HAVE_prologue or HAVE_epilogue */