1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 /* Some systems use __main in a way incompatible with its use in gcc, in these
70 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
71 give the same symbol without quotes for an alternative entry point. You
72 must define both, or neither. */
74 #define NAME__MAIN "__main"
75 #define SYMBOL__MAIN __main
78 /* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83 /* Similar, but round to the next highest integer that meets the
85 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
88 during rtl generation. If they are different register numbers, this is
89 always true. It may also be true if
90 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
91 generation. See fix_lexical_addr for details. */
93 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
94 #define NEED_SEPARATE_AP
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
101 int current_function_is_leaf;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 life_analysis has run. */
106 int current_function_sp_is_unchanging;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
115 static int virtuals_instantiated;
117 /* These variables hold pointers to functions to
118 save and restore machine-specific data,
119 in push_function_context and pop_function_context. */
120 void (*save_machine_status) PROTO((struct function *));
121 void (*restore_machine_status) PROTO((struct function *));
123 /* The FUNCTION_DECL for an inline function currently being expanded. */
124 tree inline_function_decl;
126 /* The currently compiled function. */
127 struct function *current_function = 0;
129 /* Global list of all compiled functions. */
130 struct function *all_functions = 0;
132 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
133 static int *prologue;
134 static int *epilogue;
136 /* In order to evaluate some expressions, such as function calls returning
137 structures in memory, we need to temporarily allocate stack locations.
138 We record each allocated temporary in the following structure.
140 Associated with each temporary slot is a nesting level. When we pop up
141 one level, all temporaries associated with the previous level are freed.
142 Normally, all temporaries are freed after the execution of the statement
143 in which they were created. However, if we are inside a ({...}) grouping,
144 the result may be in a temporary and hence must be preserved. If the
145 result could be in a temporary, we preserve it if we can determine which
146 one it is in. If we cannot determine which temporary may contain the
147 result, all temporaries are preserved. A temporary is preserved by
148 pretending it was allocated at the previous nesting level.
150 Automatic variables are also assigned temporary slots, at the nesting
151 level where they are defined. They are marked a "kept" so that
152 free_temp_slots will not free them. */
156 /* Points to next temporary slot. */
157 struct temp_slot *next;
158 /* The rtx to used to reference the slot. */
160 /* The rtx used to represent the address if not the address of the
161 slot above. May be an EXPR_LIST if multiple addresses exist. */
163 /* The alignment (in bits) of the slot. */
165 /* The size, in units, of the slot. */
167 /* The alias set for the slot. If the alias set is zero, we don't
168 know anything about the alias set of the slot. We must only
169 reuse a slot if it is assigned an object of the same alias set.
170 Otherwise, the rest of the compiler may assume that the new use
171 of the slot cannot alias the old use of the slot, which is
172 false. If the slot has alias set zero, then we can't reuse the
173 slot at all, since we have no idea what alias set may have been
174 imposed on the memory. For example, if the stack slot is the
175 call frame for an inline functioned, we have no idea what alias
176 sets will be assigned to various pieces of the call frame. */
178 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
180 /* Non-zero if this temporary is currently in use. */
182 /* Non-zero if this temporary has its address taken. */
184 /* Nesting level at which this slot is being used. */
186 /* Non-zero if this should survive a call to free_temp_slots. */
188 /* The offset of the slot from the frame_pointer, including extra space
189 for alignment. This info is for combine_temp_slots. */
190 HOST_WIDE_INT base_offset;
191 /* The size of the slot, including extra space for alignment. This
192 info is for combine_temp_slots. */
193 HOST_WIDE_INT full_size;
196 /* This structure is used to record MEMs or pseudos used to replace VAR, any
197 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
198 maintain this list in case two operands of an insn were required to match;
199 in that case we must ensure we use the same replacement. */
201 struct fixup_replacement
205 struct fixup_replacement *next;
208 struct insns_for_mem_entry {
209 /* The KEY in HE will be a MEM. */
210 struct hash_entry he;
211 /* These are the INSNS which reference the MEM. */
215 /* Forward declarations. */
217 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
218 int, struct function *));
219 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
221 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
222 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
223 enum machine_mode, enum machine_mode,
225 struct hash_table *));
226 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
227 struct hash_table *));
228 static struct fixup_replacement
229 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
230 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
231 rtx, int, struct hash_table *));
232 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
233 struct fixup_replacement **));
234 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
235 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
236 static rtx fixup_stack_1 PROTO((rtx, rtx));
237 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
238 static void instantiate_decls PROTO((tree, int));
239 static void instantiate_decls_1 PROTO((tree, int));
240 static void instantiate_decl PROTO((rtx, int, int));
241 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
242 static void delete_handlers PROTO((void));
243 static void pad_to_arg_alignment PROTO((struct args_size *, int));
244 #ifndef ARGS_GROW_DOWNWARD
245 static void pad_below PROTO((struct args_size *, enum machine_mode,
248 #ifdef ARGS_GROW_DOWNWARD
249 static tree round_down PROTO((tree, int));
251 static rtx round_trampoline_addr PROTO((rtx));
252 static tree blocks_nreverse PROTO((tree));
253 static int all_blocks PROTO((tree, tree *));
254 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
255 static int *record_insns PROTO((rtx));
256 static int contains PROTO((rtx, int *));
257 #endif /* HAVE_prologue || HAVE_epilogue */
258 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
259 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
260 struct hash_table *));
261 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
264 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
265 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
266 static int insns_for_mem_walk PROTO ((rtx *, void *));
267 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
270 /* Pointer to chain of `struct function' for containing functions. */
271 struct function *outer_function_chain;
273 /* Given a function decl for a containing function,
274 return the `struct function' for it. */
277 find_function_data (decl)
282 for (p = outer_function_chain; p; p = p->next)
289 /* Save the current context for compilation of a nested function.
290 This is called from language-specific code.
291 The caller is responsible for saving any language-specific status,
292 since this function knows only about language-independent variables. */
295 push_function_context_to (context)
300 if (current_function == 0)
301 init_dummy_function_start ();
302 p = current_function;
304 p->next = outer_function_chain;
305 outer_function_chain = p;
306 p->decl = current_function_decl;
307 p->fixup_var_refs_queue = 0;
309 save_tree_status (p, context);
310 save_varasm_status (p, context);
311 if (save_machine_status)
312 (*save_machine_status) (p);
314 current_function = 0;
318 push_function_context ()
320 push_function_context_to (current_function_decl);
323 /* Restore the last saved context, at the end of a nested function.
324 This function is called from language-specific code. */
327 pop_function_context_from (context)
330 struct function *p = outer_function_chain;
331 struct var_refs_queue *queue;
333 current_function = p;
334 outer_function_chain = p->next;
336 current_function_contains_functions
337 |= p->inline_obstacks || context == current_function_decl;
338 current_function_decl = p->decl;
341 restore_tree_status (p, context);
342 restore_emit_status (p);
343 restore_varasm_status (p);
345 if (restore_machine_status)
346 (*restore_machine_status) (p);
348 /* Finish doing put_var_into_stack for any of our variables
349 which became addressable during the nested function. */
350 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
351 fixup_var_refs (queue->modified, queue->promoted_mode,
352 queue->unsignedp, 0);
354 /* Reset variables that have known state during rtx generation. */
355 rtx_equal_function_value_matters = 1;
356 virtuals_instantiated = 0;
359 void pop_function_context ()
361 pop_function_context_from (current_function_decl);
364 /* Allocate fixed slots in the stack frame of the current function. */
366 /* Return size needed for stack frame based on slots so far allocated in
368 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
369 the caller may have to do that. */
372 get_func_frame_size (f)
375 #ifdef FRAME_GROWS_DOWNWARD
376 return -f->x_frame_offset;
378 return f->x_frame_offset;
382 /* Return size needed for stack frame based on slots so far allocated.
383 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
384 the caller may have to do that. */
388 return get_func_frame_size (current_function);
391 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
392 with machine mode MODE.
394 ALIGN controls the amount of alignment for the address of the slot:
395 0 means according to MODE,
396 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
397 positive specifies alignment boundary in bits.
399 We do not round to stack_boundary here. */
402 assign_stack_local (mode, size, align)
403 enum machine_mode mode;
407 register rtx x, addr;
408 int bigend_correction = 0;
415 alignment = GET_MODE_ALIGNMENT (mode);
417 alignment = BIGGEST_ALIGNMENT;
419 /* Allow the target to (possibly) increase the alignment of this
421 type = type_for_mode (mode, 0);
423 alignment = LOCAL_ALIGNMENT (type, alignment);
425 alignment /= BITS_PER_UNIT;
427 else if (align == -1)
429 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
430 size = CEIL_ROUND (size, alignment);
433 alignment = align / BITS_PER_UNIT;
435 #ifdef FRAME_GROWS_DOWNWARD
436 frame_offset -= size;
439 /* Round frame offset to that alignment.
440 We must be careful here, since FRAME_OFFSET might be negative and
441 division with a negative dividend isn't as well defined as we might
442 like. So we instead assume that ALIGNMENT is a power of two and
443 use logical operations which are unambiguous. */
444 #ifdef FRAME_GROWS_DOWNWARD
445 frame_offset = FLOOR_ROUND (frame_offset, alignment);
447 frame_offset = CEIL_ROUND (frame_offset, alignment);
450 /* On a big-endian machine, if we are allocating more space than we will use,
451 use the least significant bytes of those that are allocated. */
452 if (BYTES_BIG_ENDIAN && mode != BLKmode)
453 bigend_correction = size - GET_MODE_SIZE (mode);
455 /* If we have already instantiated virtual registers, return the actual
456 address relative to the frame pointer. */
457 if (virtuals_instantiated)
458 addr = plus_constant (frame_pointer_rtx,
459 (frame_offset + bigend_correction
460 + STARTING_FRAME_OFFSET));
462 addr = plus_constant (virtual_stack_vars_rtx,
463 frame_offset + bigend_correction);
465 #ifndef FRAME_GROWS_DOWNWARD
466 frame_offset += size;
469 x = gen_rtx_MEM (mode, addr);
471 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
476 /* Assign a stack slot in a containing function.
477 First three arguments are same as in preceding function.
478 The last argument specifies the function to allocate in. */
481 assign_outer_stack_local (mode, size, align, function)
482 enum machine_mode mode;
485 struct function *function;
487 register rtx x, addr;
488 int bigend_correction = 0;
491 /* Allocate in the memory associated with the function in whose frame
493 push_obstacks (function->function_obstack,
494 function->function_maybepermanent_obstack);
500 alignment = GET_MODE_ALIGNMENT (mode);
502 alignment = BIGGEST_ALIGNMENT;
504 /* Allow the target to (possibly) increase the alignment of this
506 type = type_for_mode (mode, 0);
508 alignment = LOCAL_ALIGNMENT (type, alignment);
510 alignment /= BITS_PER_UNIT;
512 else if (align == -1)
514 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
515 size = CEIL_ROUND (size, alignment);
518 alignment = align / BITS_PER_UNIT;
520 #ifdef FRAME_GROWS_DOWNWARD
521 function->x_frame_offset -= size;
524 /* Round frame offset to that alignment. */
525 #ifdef FRAME_GROWS_DOWNWARD
526 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
528 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
531 /* On a big-endian machine, if we are allocating more space than we will use,
532 use the least significant bytes of those that are allocated. */
533 if (BYTES_BIG_ENDIAN && mode != BLKmode)
534 bigend_correction = size - GET_MODE_SIZE (mode);
536 addr = plus_constant (virtual_stack_vars_rtx,
537 function->x_frame_offset + bigend_correction);
538 #ifndef FRAME_GROWS_DOWNWARD
539 function->x_frame_offset += size;
542 x = gen_rtx_MEM (mode, addr);
544 function->x_stack_slot_list
545 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
552 /* Allocate a temporary stack slot and record it for possible later
555 MODE is the machine mode to be given to the returned rtx.
557 SIZE is the size in units of the space required. We do no rounding here
558 since assign_stack_local will do any required rounding.
560 KEEP is 1 if this slot is to be retained after a call to
561 free_temp_slots. Automatic variables for a block are allocated
562 with this flag. KEEP is 2 if we allocate a longer term temporary,
563 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
564 if we are to allocate something at an inner level to be treated as
565 a variable in the block (e.g., a SAVE_EXPR).
567 TYPE is the type that will be used for the stack slot. */
570 assign_stack_temp_for_type (mode, size, keep, type)
571 enum machine_mode mode;
578 struct temp_slot *p, *best_p = 0;
580 /* If SIZE is -1 it means that somebody tried to allocate a temporary
581 of a variable size. */
585 /* If we know the alias set for the memory that will be used, use
586 it. If there's no TYPE, then we don't know anything about the
587 alias set for the memory. */
589 alias_set = get_alias_set (type);
593 align = GET_MODE_ALIGNMENT (mode);
595 align = BIGGEST_ALIGNMENT;
598 type = type_for_mode (mode, 0);
600 align = LOCAL_ALIGNMENT (type, align);
602 /* Try to find an available, already-allocated temporary of the proper
603 mode which meets the size and alignment requirements. Choose the
604 smallest one with the closest alignment. */
605 for (p = temp_slots; p; p = p->next)
606 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
608 && (!flag_strict_aliasing
609 || (alias_set && p->alias_set == alias_set))
610 && (best_p == 0 || best_p->size > p->size
611 || (best_p->size == p->size && best_p->align > p->align)))
613 if (p->align == align && p->size == size)
621 /* Make our best, if any, the one to use. */
624 /* If there are enough aligned bytes left over, make them into a new
625 temp_slot so that the extra bytes don't get wasted. Do this only
626 for BLKmode slots, so that we can be sure of the alignment. */
627 if (GET_MODE (best_p->slot) == BLKmode
628 /* We can't split slots if -fstrict-aliasing because the
629 information about the alias set for the new slot will be
631 && !flag_strict_aliasing)
633 int alignment = best_p->align / BITS_PER_UNIT;
634 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
636 if (best_p->size - rounded_size >= alignment)
638 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
639 p->in_use = p->addr_taken = 0;
640 p->size = best_p->size - rounded_size;
641 p->base_offset = best_p->base_offset + rounded_size;
642 p->full_size = best_p->full_size - rounded_size;
643 p->slot = gen_rtx_MEM (BLKmode,
644 plus_constant (XEXP (best_p->slot, 0),
646 p->align = best_p->align;
649 p->next = temp_slots;
652 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
655 best_p->size = rounded_size;
656 best_p->full_size = rounded_size;
663 /* If we still didn't find one, make a new temporary. */
666 HOST_WIDE_INT frame_offset_old = frame_offset;
668 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
670 /* We are passing an explicit alignment request to assign_stack_local.
671 One side effect of that is assign_stack_local will not round SIZE
672 to ensure the frame offset remains suitably aligned.
674 So for requests which depended on the rounding of SIZE, we go ahead
675 and round it now. We also make sure ALIGNMENT is at least
676 BIGGEST_ALIGNMENT. */
677 if (mode == BLKmode && align < (BIGGEST_ALIGNMENT / BITS_PER_UNIT))
679 p->slot = assign_stack_local (mode,
681 ? CEIL_ROUND (size, align) : size,
685 p->alias_set = alias_set;
687 /* The following slot size computation is necessary because we don't
688 know the actual size of the temporary slot until assign_stack_local
689 has performed all the frame alignment and size rounding for the
690 requested temporary. Note that extra space added for alignment
691 can be either above or below this stack slot depending on which
692 way the frame grows. We include the extra space if and only if it
693 is above this slot. */
694 #ifdef FRAME_GROWS_DOWNWARD
695 p->size = frame_offset_old - frame_offset;
700 /* Now define the fields used by combine_temp_slots. */
701 #ifdef FRAME_GROWS_DOWNWARD
702 p->base_offset = frame_offset;
703 p->full_size = frame_offset_old - frame_offset;
705 p->base_offset = frame_offset_old;
706 p->full_size = frame_offset - frame_offset_old;
709 p->next = temp_slots;
715 p->rtl_expr = seq_rtl_expr;
719 p->level = target_temp_slot_level;
724 p->level = var_temp_slot_level;
729 p->level = temp_slot_level;
733 /* We may be reusing an old slot, so clear any MEM flags that may have been
735 RTX_UNCHANGING_P (p->slot) = 0;
736 MEM_IN_STRUCT_P (p->slot) = 0;
737 MEM_SCALAR_P (p->slot) = 0;
738 MEM_ALIAS_SET (p->slot) = 0;
742 /* Allocate a temporary stack slot and record it for possible later
743 reuse. First three arguments are same as in preceding function. */
746 assign_stack_temp (mode, size, keep)
747 enum machine_mode mode;
751 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
754 /* Assign a temporary of given TYPE.
755 KEEP is as for assign_stack_temp.
756 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
757 it is 0 if a register is OK.
758 DONT_PROMOTE is 1 if we should not promote values in register
762 assign_temp (type, keep, memory_required, dont_promote)
768 enum machine_mode mode = TYPE_MODE (type);
769 int unsignedp = TREE_UNSIGNED (type);
771 if (mode == BLKmode || memory_required)
773 HOST_WIDE_INT size = int_size_in_bytes (type);
776 /* Unfortunately, we don't yet know how to allocate variable-sized
777 temporaries. However, sometimes we have a fixed upper limit on
778 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
779 instead. This is the case for Chill variable-sized strings. */
780 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
781 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
782 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
783 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
785 tmp = assign_stack_temp_for_type (mode, size, keep, type);
786 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
790 #ifndef PROMOTE_FOR_CALL_ONLY
792 mode = promote_mode (type, mode, &unsignedp, 0);
795 return gen_reg_rtx (mode);
798 /* Combine temporary stack slots which are adjacent on the stack.
800 This allows for better use of already allocated stack space. This is only
801 done for BLKmode slots because we can be sure that we won't have alignment
802 problems in this case. */
805 combine_temp_slots ()
807 struct temp_slot *p, *q;
808 struct temp_slot *prev_p, *prev_q;
811 /* We can't combine slots, because the information about which slot
812 is in which alias set will be lost. */
813 if (flag_strict_aliasing)
816 /* If there are a lot of temp slots, don't do anything unless
817 high levels of optimizaton. */
818 if (! flag_expensive_optimizations)
819 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
820 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
823 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
827 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
828 for (q = p->next, prev_q = p; q; q = prev_q->next)
831 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
833 if (p->base_offset + p->full_size == q->base_offset)
835 /* Q comes after P; combine Q into P. */
837 p->full_size += q->full_size;
840 else if (q->base_offset + q->full_size == p->base_offset)
842 /* P comes after Q; combine P into Q. */
844 q->full_size += p->full_size;
849 /* Either delete Q or advance past it. */
851 prev_q->next = q->next;
855 /* Either delete P or advance past it. */
859 prev_p->next = p->next;
861 temp_slots = p->next;
868 /* Find the temp slot corresponding to the object at address X. */
870 static struct temp_slot *
871 find_temp_slot_from_address (x)
877 for (p = temp_slots; p; p = p->next)
882 else if (XEXP (p->slot, 0) == x
884 || (GET_CODE (x) == PLUS
885 && XEXP (x, 0) == virtual_stack_vars_rtx
886 && GET_CODE (XEXP (x, 1)) == CONST_INT
887 && INTVAL (XEXP (x, 1)) >= p->base_offset
888 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
891 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
892 for (next = p->address; next; next = XEXP (next, 1))
893 if (XEXP (next, 0) == x)
900 /* Indicate that NEW is an alternate way of referring to the temp slot
901 that previously was known by OLD. */
904 update_temp_slot_address (old, new)
907 struct temp_slot *p = find_temp_slot_from_address (old);
909 /* If none, return. Else add NEW as an alias. */
912 else if (p->address == 0)
916 if (GET_CODE (p->address) != EXPR_LIST)
917 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
919 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
923 /* If X could be a reference to a temporary slot, mark the fact that its
924 address was taken. */
927 mark_temp_addr_taken (x)
935 /* If X is not in memory or is at a constant address, it cannot be in
937 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
940 p = find_temp_slot_from_address (XEXP (x, 0));
945 /* If X could be a reference to a temporary slot, mark that slot as
946 belonging to the to one level higher than the current level. If X
947 matched one of our slots, just mark that one. Otherwise, we can't
948 easily predict which it is, so upgrade all of them. Kept slots
951 This is called when an ({...}) construct occurs and a statement
952 returns a value in memory. */
955 preserve_temp_slots (x)
958 struct temp_slot *p = 0;
960 /* If there is no result, we still might have some objects whose address
961 were taken, so we need to make sure they stay around. */
964 for (p = temp_slots; p; p = p->next)
965 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
971 /* If X is a register that is being used as a pointer, see if we have
972 a temporary slot we know it points to. To be consistent with
973 the code below, we really should preserve all non-kept slots
974 if we can't find a match, but that seems to be much too costly. */
975 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
976 p = find_temp_slot_from_address (x);
978 /* If X is not in memory or is at a constant address, it cannot be in
979 a temporary slot, but it can contain something whose address was
981 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
983 for (p = temp_slots; p; p = p->next)
984 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
990 /* First see if we can find a match. */
992 p = find_temp_slot_from_address (XEXP (x, 0));
996 /* Move everything at our level whose address was taken to our new
997 level in case we used its address. */
1000 if (p->level == temp_slot_level)
1002 for (q = temp_slots; q; q = q->next)
1003 if (q != p && q->addr_taken && q->level == p->level)
1012 /* Otherwise, preserve all non-kept slots at this level. */
1013 for (p = temp_slots; p; p = p->next)
1014 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1018 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1019 with that RTL_EXPR, promote it into a temporary slot at the present
1020 level so it will not be freed when we free slots made in the
1024 preserve_rtl_expr_result (x)
1027 struct temp_slot *p;
1029 /* If X is not in memory or is at a constant address, it cannot be in
1030 a temporary slot. */
1031 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1034 /* If we can find a match, move it to our level unless it is already at
1036 p = find_temp_slot_from_address (XEXP (x, 0));
1039 p->level = MIN (p->level, temp_slot_level);
1046 /* Free all temporaries used so far. This is normally called at the end
1047 of generating code for a statement. Don't free any temporaries
1048 currently in use for an RTL_EXPR that hasn't yet been emitted.
1049 We could eventually do better than this since it can be reused while
1050 generating the same RTL_EXPR, but this is complex and probably not
1056 struct temp_slot *p;
1058 for (p = temp_slots; p; p = p->next)
1059 if (p->in_use && p->level == temp_slot_level && ! p->keep
1060 && p->rtl_expr == 0)
1063 combine_temp_slots ();
1066 /* Free all temporary slots used in T, an RTL_EXPR node. */
1069 free_temps_for_rtl_expr (t)
1072 struct temp_slot *p;
1074 for (p = temp_slots; p; p = p->next)
1075 if (p->rtl_expr == t)
1078 combine_temp_slots ();
1081 /* Mark all temporaries ever allocated in this function as not suitable
1082 for reuse until the current level is exited. */
1085 mark_all_temps_used ()
1087 struct temp_slot *p;
1089 for (p = temp_slots; p; p = p->next)
1091 p->in_use = p->keep = 1;
1092 p->level = MIN (p->level, temp_slot_level);
1096 /* Push deeper into the nesting level for stack temporaries. */
1104 /* Likewise, but save the new level as the place to allocate variables
1108 push_temp_slots_for_block ()
1112 var_temp_slot_level = temp_slot_level;
1115 /* Likewise, but save the new level as the place to allocate temporaries
1116 for TARGET_EXPRs. */
1119 push_temp_slots_for_target ()
1123 target_temp_slot_level = temp_slot_level;
1126 /* Set and get the value of target_temp_slot_level. The only
1127 permitted use of these functions is to save and restore this value. */
1130 get_target_temp_slot_level ()
1132 return target_temp_slot_level;
1136 set_target_temp_slot_level (level)
1139 target_temp_slot_level = level;
1142 /* Pop a temporary nesting level. All slots in use in the current level
1148 struct temp_slot *p;
1150 for (p = temp_slots; p; p = p->next)
1151 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1154 combine_temp_slots ();
1159 /* Initialize temporary slots. */
1164 /* We have not allocated any temporaries yet. */
1166 temp_slot_level = 0;
1167 var_temp_slot_level = 0;
1168 target_temp_slot_level = 0;
1171 /* Retroactively move an auto variable from a register to a stack slot.
1172 This is done when an address-reference to the variable is seen. */
1175 put_var_into_stack (decl)
1179 enum machine_mode promoted_mode, decl_mode;
1180 struct function *function = 0;
1182 int can_use_addressof;
1184 context = decl_function_context (decl);
1186 /* Get the current rtl used for this object and its original mode. */
1187 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1189 /* No need to do anything if decl has no rtx yet
1190 since in that case caller is setting TREE_ADDRESSABLE
1191 and a stack slot will be assigned when the rtl is made. */
1195 /* Get the declared mode for this object. */
1196 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1197 : DECL_MODE (decl));
1198 /* Get the mode it's actually stored in. */
1199 promoted_mode = GET_MODE (reg);
1201 /* If this variable comes from an outer function,
1202 find that function's saved context. */
1203 if (context != current_function_decl && context != inline_function_decl)
1204 for (function = outer_function_chain; function; function = function->next)
1205 if (function->decl == context)
1208 /* If this is a variable-size object with a pseudo to address it,
1209 put that pseudo into the stack, if the var is nonlocal. */
1210 if (DECL_NONLOCAL (decl)
1211 && GET_CODE (reg) == MEM
1212 && GET_CODE (XEXP (reg, 0)) == REG
1213 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1215 reg = XEXP (reg, 0);
1216 decl_mode = promoted_mode = GET_MODE (reg);
1222 /* FIXME make it work for promoted modes too */
1223 && decl_mode == promoted_mode
1224 #ifdef NON_SAVING_SETJMP
1225 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1229 /* If we can't use ADDRESSOF, make sure we see through one we already
1231 if (! can_use_addressof && GET_CODE (reg) == MEM
1232 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1233 reg = XEXP (XEXP (reg, 0), 0);
1235 /* Now we should have a value that resides in one or more pseudo regs. */
1237 if (GET_CODE (reg) == REG)
1239 /* If this variable lives in the current function and we don't need
1240 to put things in the stack for the sake of setjmp, try to keep it
1241 in a register until we know we actually need the address. */
1242 if (can_use_addressof)
1243 gen_mem_addressof (reg, decl);
1245 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1246 promoted_mode, decl_mode,
1247 TREE_SIDE_EFFECTS (decl), 0,
1248 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1251 else if (GET_CODE (reg) == CONCAT)
1253 /* A CONCAT contains two pseudos; put them both in the stack.
1254 We do it so they end up consecutive. */
1255 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1256 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1257 #ifdef FRAME_GROWS_DOWNWARD
1258 /* Since part 0 should have a lower address, do it second. */
1259 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1260 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1261 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1263 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1264 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1265 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1268 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1269 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1270 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1272 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1273 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1274 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1278 /* Change the CONCAT into a combined MEM for both parts. */
1279 PUT_CODE (reg, MEM);
1280 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1281 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1283 /* The two parts are in memory order already.
1284 Use the lower parts address as ours. */
1285 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1286 /* Prevent sharing of rtl that might lose. */
1287 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1288 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1293 if (current_function_check_memory_usage)
1294 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1295 XEXP (reg, 0), Pmode,
1296 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1297 TYPE_MODE (sizetype),
1298 GEN_INT (MEMORY_USE_RW),
1299 TYPE_MODE (integer_type_node));
1302 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1303 into the stack frame of FUNCTION (0 means the current function).
1304 DECL_MODE is the machine mode of the user-level data type.
1305 PROMOTED_MODE is the machine mode of the register.
1306 VOLATILE_P is nonzero if this is for a "volatile" decl.
1307 USED_P is nonzero if this reg might have already been used in an insn. */
1310 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1311 original_regno, used_p, ht)
1312 struct function *function;
1315 enum machine_mode promoted_mode, decl_mode;
1319 struct hash_table *ht;
1322 int regno = original_regno;
1325 regno = REGNO (reg);
1329 if (regno < function->x_max_parm_reg)
1330 new = function->x_parm_reg_stack_loc[regno];
1332 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1337 if (regno < max_parm_reg)
1338 new = parm_reg_stack_loc[regno];
1340 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1343 PUT_MODE (reg, decl_mode);
1344 XEXP (reg, 0) = XEXP (new, 0);
1345 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1346 MEM_VOLATILE_P (reg) = volatile_p;
1347 PUT_CODE (reg, MEM);
1349 /* If this is a memory ref that contains aggregate components,
1350 mark it as such for cse and loop optimize. If we are reusing a
1351 previously generated stack slot, then we need to copy the bit in
1352 case it was set for other reasons. For instance, it is set for
1353 __builtin_va_alist. */
1354 MEM_SET_IN_STRUCT_P (reg,
1355 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1356 MEM_ALIAS_SET (reg) = get_alias_set (type);
1358 /* Now make sure that all refs to the variable, previously made
1359 when it was a register, are fixed up to be valid again. */
1361 if (used_p && function != 0)
1363 struct var_refs_queue *temp;
1365 /* Variable is inherited; fix it up when we get back to its function. */
1366 push_obstacks (function->function_obstack,
1367 function->function_maybepermanent_obstack);
1369 /* See comment in restore_tree_status in tree.c for why this needs to be
1370 on saveable obstack. */
1372 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1373 temp->modified = reg;
1374 temp->promoted_mode = promoted_mode;
1375 temp->unsignedp = TREE_UNSIGNED (type);
1376 temp->next = function->fixup_var_refs_queue;
1377 function->fixup_var_refs_queue = temp;
1381 /* Variable is local; fix it up now. */
1382 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1386 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1388 enum machine_mode promoted_mode;
1390 struct hash_table *ht;
1393 rtx first_insn = get_insns ();
1394 struct sequence_stack *stack = seq_stack;
1395 tree rtl_exps = rtl_expr_chain;
1397 /* Must scan all insns for stack-refs that exceed the limit. */
1398 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1400 /* If there's a hash table, it must record all uses of VAR. */
1404 /* Scan all pending sequences too. */
1405 for (; stack; stack = stack->next)
1407 push_to_sequence (stack->first);
1408 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1409 stack->first, stack->next != 0, 0);
1410 /* Update remembered end of sequence
1411 in case we added an insn at the end. */
1412 stack->last = get_last_insn ();
1416 /* Scan all waiting RTL_EXPRs too. */
1417 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1419 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1420 if (seq != const0_rtx && seq != 0)
1422 push_to_sequence (seq);
1423 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1429 /* Scan the catch clauses for exception handling too. */
1430 push_to_sequence (catch_clauses);
1431 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1436 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1437 some part of an insn. Return a struct fixup_replacement whose OLD
1438 value is equal to X. Allocate a new structure if no such entry exists. */
1440 static struct fixup_replacement *
1441 find_fixup_replacement (replacements, x)
1442 struct fixup_replacement **replacements;
1445 struct fixup_replacement *p;
1447 /* See if we have already replaced this. */
1448 for (p = *replacements; p && p->old != x; p = p->next)
1453 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1456 p->next = *replacements;
1463 /* Scan the insn-chain starting with INSN for refs to VAR
1464 and fix them up. TOPLEVEL is nonzero if this chain is the
1465 main chain of insns for the current function. */
1468 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1470 enum machine_mode promoted_mode;
1474 struct hash_table *ht;
1477 rtx insn_list = NULL_RTX;
1479 /* If we already know which INSNs reference VAR there's no need
1480 to walk the entire instruction chain. */
1483 insn_list = ((struct insns_for_mem_entry *)
1484 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1485 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1486 insn_list = XEXP (insn_list, 1);
1491 rtx next = NEXT_INSN (insn);
1492 rtx set, prev, prev_set;
1495 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1497 /* If this is a CLOBBER of VAR, delete it.
1499 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1500 and REG_RETVAL notes too. */
1501 if (GET_CODE (PATTERN (insn)) == CLOBBER
1502 && (XEXP (PATTERN (insn), 0) == var
1503 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1504 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1505 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1507 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1508 /* The REG_LIBCALL note will go away since we are going to
1509 turn INSN into a NOTE, so just delete the
1510 corresponding REG_RETVAL note. */
1511 remove_note (XEXP (note, 0),
1512 find_reg_note (XEXP (note, 0), REG_RETVAL,
1515 /* In unoptimized compilation, we shouldn't call delete_insn
1516 except in jump.c doing warnings. */
1517 PUT_CODE (insn, NOTE);
1518 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1519 NOTE_SOURCE_FILE (insn) = 0;
1522 /* The insn to load VAR from a home in the arglist
1523 is now a no-op. When we see it, just delete it.
1524 Similarly if this is storing VAR from a register from which
1525 it was loaded in the previous insn. This will occur
1526 when an ADDRESSOF was made for an arglist slot. */
1528 && (set = single_set (insn)) != 0
1529 && SET_DEST (set) == var
1530 /* If this represents the result of an insn group,
1531 don't delete the insn. */
1532 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1533 && (rtx_equal_p (SET_SRC (set), var)
1534 || (GET_CODE (SET_SRC (set)) == REG
1535 && (prev = prev_nonnote_insn (insn)) != 0
1536 && (prev_set = single_set (prev)) != 0
1537 && SET_DEST (prev_set) == SET_SRC (set)
1538 && rtx_equal_p (SET_SRC (prev_set), var))))
1540 /* In unoptimized compilation, we shouldn't call delete_insn
1541 except in jump.c doing warnings. */
1542 PUT_CODE (insn, NOTE);
1543 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1544 NOTE_SOURCE_FILE (insn) = 0;
1545 if (insn == last_parm_insn)
1546 last_parm_insn = PREV_INSN (next);
1550 struct fixup_replacement *replacements = 0;
1551 rtx next_insn = NEXT_INSN (insn);
1553 if (SMALL_REGISTER_CLASSES)
1555 /* If the insn that copies the results of a CALL_INSN
1556 into a pseudo now references VAR, we have to use an
1557 intermediate pseudo since we want the life of the
1558 return value register to be only a single insn.
1560 If we don't use an intermediate pseudo, such things as
1561 address computations to make the address of VAR valid
1562 if it is not can be placed between the CALL_INSN and INSN.
1564 To make sure this doesn't happen, we record the destination
1565 of the CALL_INSN and see if the next insn uses both that
1568 if (call_dest != 0 && GET_CODE (insn) == INSN
1569 && reg_mentioned_p (var, PATTERN (insn))
1570 && reg_mentioned_p (call_dest, PATTERN (insn)))
1572 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1574 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1576 PATTERN (insn) = replace_rtx (PATTERN (insn),
1580 if (GET_CODE (insn) == CALL_INSN
1581 && GET_CODE (PATTERN (insn)) == SET)
1582 call_dest = SET_DEST (PATTERN (insn));
1583 else if (GET_CODE (insn) == CALL_INSN
1584 && GET_CODE (PATTERN (insn)) == PARALLEL
1585 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1586 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1591 /* See if we have to do anything to INSN now that VAR is in
1592 memory. If it needs to be loaded into a pseudo, use a single
1593 pseudo for the entire insn in case there is a MATCH_DUP
1594 between two operands. We pass a pointer to the head of
1595 a list of struct fixup_replacements. If fixup_var_refs_1
1596 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1597 it will record them in this list.
1599 If it allocated a pseudo for any replacement, we copy into
1602 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1605 /* If this is last_parm_insn, and any instructions were output
1606 after it to fix it up, then we must set last_parm_insn to
1607 the last such instruction emitted. */
1608 if (insn == last_parm_insn)
1609 last_parm_insn = PREV_INSN (next_insn);
1611 while (replacements)
1613 if (GET_CODE (replacements->new) == REG)
1618 /* OLD might be a (subreg (mem)). */
1619 if (GET_CODE (replacements->old) == SUBREG)
1621 = fixup_memory_subreg (replacements->old, insn, 0);
1624 = fixup_stack_1 (replacements->old, insn);
1626 insert_before = insn;
1628 /* If we are changing the mode, do a conversion.
1629 This might be wasteful, but combine.c will
1630 eliminate much of the waste. */
1632 if (GET_MODE (replacements->new)
1633 != GET_MODE (replacements->old))
1636 convert_move (replacements->new,
1637 replacements->old, unsignedp);
1638 seq = gen_sequence ();
1642 seq = gen_move_insn (replacements->new,
1645 emit_insn_before (seq, insert_before);
1648 replacements = replacements->next;
1652 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1653 But don't touch other insns referred to by reg-notes;
1654 we will get them elsewhere. */
1655 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1656 if (GET_CODE (note) != INSN_LIST)
1658 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1665 insn = XEXP (insn_list, 0);
1666 insn_list = XEXP (insn_list, 1);
1673 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1674 See if the rtx expression at *LOC in INSN needs to be changed.
1676 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1677 contain a list of original rtx's and replacements. If we find that we need
1678 to modify this insn by replacing a memory reference with a pseudo or by
1679 making a new MEM to implement a SUBREG, we consult that list to see if
1680 we have already chosen a replacement. If none has already been allocated,
1681 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1682 or the SUBREG, as appropriate, to the pseudo. */
1685 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1687 enum machine_mode promoted_mode;
1690 struct fixup_replacement **replacements;
1693 register rtx x = *loc;
1694 RTX_CODE code = GET_CODE (x);
1695 register const char *fmt;
1696 register rtx tem, tem1;
1697 struct fixup_replacement *replacement;
1702 if (XEXP (x, 0) == var)
1704 /* Prevent sharing of rtl that might lose. */
1705 rtx sub = copy_rtx (XEXP (var, 0));
1707 if (! validate_change (insn, loc, sub, 0))
1709 rtx y = gen_reg_rtx (GET_MODE (sub));
1712 /* We should be able to replace with a register or all is lost.
1713 Note that we can't use validate_change to verify this, since
1714 we're not caring for replacing all dups simultaneously. */
1715 if (! validate_replace_rtx (*loc, y, insn))
1718 /* Careful! First try to recognize a direct move of the
1719 value, mimicking how things are done in gen_reload wrt
1720 PLUS. Consider what happens when insn is a conditional
1721 move instruction and addsi3 clobbers flags. */
1724 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1725 seq = gen_sequence ();
1728 if (recog_memoized (new_insn) < 0)
1730 /* That failed. Fall back on force_operand and hope. */
1733 force_operand (sub, y);
1734 seq = gen_sequence ();
1739 /* Don't separate setter from user. */
1740 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1741 insn = PREV_INSN (insn);
1744 emit_insn_before (seq, insn);
1752 /* If we already have a replacement, use it. Otherwise,
1753 try to fix up this address in case it is invalid. */
1755 replacement = find_fixup_replacement (replacements, var);
1756 if (replacement->new)
1758 *loc = replacement->new;
1762 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1764 /* Unless we are forcing memory to register or we changed the mode,
1765 we can leave things the way they are if the insn is valid. */
1767 INSN_CODE (insn) = -1;
1768 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1769 && recog_memoized (insn) >= 0)
1772 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1776 /* If X contains VAR, we need to unshare it here so that we update
1777 each occurrence separately. But all identical MEMs in one insn
1778 must be replaced with the same rtx because of the possibility of
1781 if (reg_mentioned_p (var, x))
1783 replacement = find_fixup_replacement (replacements, x);
1784 if (replacement->new == 0)
1785 replacement->new = copy_most_rtx (x, var);
1787 *loc = x = replacement->new;
1803 /* Note that in some cases those types of expressions are altered
1804 by optimize_bit_field, and do not survive to get here. */
1805 if (XEXP (x, 0) == var
1806 || (GET_CODE (XEXP (x, 0)) == SUBREG
1807 && SUBREG_REG (XEXP (x, 0)) == var))
1809 /* Get TEM as a valid MEM in the mode presently in the insn.
1811 We don't worry about the possibility of MATCH_DUP here; it
1812 is highly unlikely and would be tricky to handle. */
1815 if (GET_CODE (tem) == SUBREG)
1817 if (GET_MODE_BITSIZE (GET_MODE (tem))
1818 > GET_MODE_BITSIZE (GET_MODE (var)))
1820 replacement = find_fixup_replacement (replacements, var);
1821 if (replacement->new == 0)
1822 replacement->new = gen_reg_rtx (GET_MODE (var));
1823 SUBREG_REG (tem) = replacement->new;
1826 tem = fixup_memory_subreg (tem, insn, 0);
1829 tem = fixup_stack_1 (tem, insn);
1831 /* Unless we want to load from memory, get TEM into the proper mode
1832 for an extract from memory. This can only be done if the
1833 extract is at a constant position and length. */
1835 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1836 && GET_CODE (XEXP (x, 2)) == CONST_INT
1837 && ! mode_dependent_address_p (XEXP (tem, 0))
1838 && ! MEM_VOLATILE_P (tem))
1840 enum machine_mode wanted_mode = VOIDmode;
1841 enum machine_mode is_mode = GET_MODE (tem);
1842 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1845 if (GET_CODE (x) == ZERO_EXTRACT)
1847 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1848 if (wanted_mode == VOIDmode)
1849 wanted_mode = word_mode;
1853 if (GET_CODE (x) == SIGN_EXTRACT)
1855 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1856 if (wanted_mode == VOIDmode)
1857 wanted_mode = word_mode;
1860 /* If we have a narrower mode, we can do something. */
1861 if (wanted_mode != VOIDmode
1862 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1864 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1865 rtx old_pos = XEXP (x, 2);
1868 /* If the bytes and bits are counted differently, we
1869 must adjust the offset. */
1870 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1871 offset = (GET_MODE_SIZE (is_mode)
1872 - GET_MODE_SIZE (wanted_mode) - offset);
1874 pos %= GET_MODE_BITSIZE (wanted_mode);
1876 newmem = gen_rtx_MEM (wanted_mode,
1877 plus_constant (XEXP (tem, 0), offset));
1878 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1879 MEM_COPY_ATTRIBUTES (newmem, tem);
1881 /* Make the change and see if the insn remains valid. */
1882 INSN_CODE (insn) = -1;
1883 XEXP (x, 0) = newmem;
1884 XEXP (x, 2) = GEN_INT (pos);
1886 if (recog_memoized (insn) >= 0)
1889 /* Otherwise, restore old position. XEXP (x, 0) will be
1891 XEXP (x, 2) = old_pos;
1895 /* If we get here, the bitfield extract insn can't accept a memory
1896 reference. Copy the input into a register. */
1898 tem1 = gen_reg_rtx (GET_MODE (tem));
1899 emit_insn_before (gen_move_insn (tem1, tem), insn);
1906 if (SUBREG_REG (x) == var)
1908 /* If this is a special SUBREG made because VAR was promoted
1909 from a wider mode, replace it with VAR and call ourself
1910 recursively, this time saying that the object previously
1911 had its current mode (by virtue of the SUBREG). */
1913 if (SUBREG_PROMOTED_VAR_P (x))
1916 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1920 /* If this SUBREG makes VAR wider, it has become a paradoxical
1921 SUBREG with VAR in memory, but these aren't allowed at this
1922 stage of the compilation. So load VAR into a pseudo and take
1923 a SUBREG of that pseudo. */
1924 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1926 replacement = find_fixup_replacement (replacements, var);
1927 if (replacement->new == 0)
1928 replacement->new = gen_reg_rtx (GET_MODE (var));
1929 SUBREG_REG (x) = replacement->new;
1933 /* See if we have already found a replacement for this SUBREG.
1934 If so, use it. Otherwise, make a MEM and see if the insn
1935 is recognized. If not, or if we should force MEM into a register,
1936 make a pseudo for this SUBREG. */
1937 replacement = find_fixup_replacement (replacements, x);
1938 if (replacement->new)
1940 *loc = replacement->new;
1944 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1946 INSN_CODE (insn) = -1;
1947 if (! flag_force_mem && recog_memoized (insn) >= 0)
1950 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1956 /* First do special simplification of bit-field references. */
1957 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1958 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1959 optimize_bit_field (x, insn, 0);
1960 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1961 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1962 optimize_bit_field (x, insn, NULL_PTR);
1964 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1965 into a register and then store it back out. */
1966 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1967 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1968 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1969 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1970 > GET_MODE_SIZE (GET_MODE (var))))
1972 replacement = find_fixup_replacement (replacements, var);
1973 if (replacement->new == 0)
1974 replacement->new = gen_reg_rtx (GET_MODE (var));
1976 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1977 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1980 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1981 insn into a pseudo and store the low part of the pseudo into VAR. */
1982 if (GET_CODE (SET_DEST (x)) == SUBREG
1983 && SUBREG_REG (SET_DEST (x)) == var
1984 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1985 > GET_MODE_SIZE (GET_MODE (var))))
1987 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1988 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1995 rtx dest = SET_DEST (x);
1996 rtx src = SET_SRC (x);
1998 rtx outerdest = dest;
2001 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2002 || GET_CODE (dest) == SIGN_EXTRACT
2003 || GET_CODE (dest) == ZERO_EXTRACT)
2004 dest = XEXP (dest, 0);
2006 if (GET_CODE (src) == SUBREG)
2007 src = XEXP (src, 0);
2009 /* If VAR does not appear at the top level of the SET
2010 just scan the lower levels of the tree. */
2012 if (src != var && dest != var)
2015 /* We will need to rerecognize this insn. */
2016 INSN_CODE (insn) = -1;
2019 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2021 /* Since this case will return, ensure we fixup all the
2023 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2024 insn, replacements);
2025 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2026 insn, replacements);
2027 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2028 insn, replacements);
2030 tem = XEXP (outerdest, 0);
2032 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2033 that may appear inside a ZERO_EXTRACT.
2034 This was legitimate when the MEM was a REG. */
2035 if (GET_CODE (tem) == SUBREG
2036 && SUBREG_REG (tem) == var)
2037 tem = fixup_memory_subreg (tem, insn, 0);
2039 tem = fixup_stack_1 (tem, insn);
2041 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2042 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2043 && ! mode_dependent_address_p (XEXP (tem, 0))
2044 && ! MEM_VOLATILE_P (tem))
2046 enum machine_mode wanted_mode;
2047 enum machine_mode is_mode = GET_MODE (tem);
2048 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2050 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2051 if (wanted_mode == VOIDmode)
2052 wanted_mode = word_mode;
2054 /* If we have a narrower mode, we can do something. */
2055 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2057 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2058 rtx old_pos = XEXP (outerdest, 2);
2061 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2062 offset = (GET_MODE_SIZE (is_mode)
2063 - GET_MODE_SIZE (wanted_mode) - offset);
2065 pos %= GET_MODE_BITSIZE (wanted_mode);
2067 newmem = gen_rtx_MEM (wanted_mode,
2068 plus_constant (XEXP (tem, 0), offset));
2069 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2070 MEM_COPY_ATTRIBUTES (newmem, tem);
2072 /* Make the change and see if the insn remains valid. */
2073 INSN_CODE (insn) = -1;
2074 XEXP (outerdest, 0) = newmem;
2075 XEXP (outerdest, 2) = GEN_INT (pos);
2077 if (recog_memoized (insn) >= 0)
2080 /* Otherwise, restore old position. XEXP (x, 0) will be
2082 XEXP (outerdest, 2) = old_pos;
2086 /* If we get here, the bit-field store doesn't allow memory
2087 or isn't located at a constant position. Load the value into
2088 a register, do the store, and put it back into memory. */
2090 tem1 = gen_reg_rtx (GET_MODE (tem));
2091 emit_insn_before (gen_move_insn (tem1, tem), insn);
2092 emit_insn_after (gen_move_insn (tem, tem1), insn);
2093 XEXP (outerdest, 0) = tem1;
2098 /* STRICT_LOW_PART is a no-op on memory references
2099 and it can cause combinations to be unrecognizable,
2102 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2103 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2105 /* A valid insn to copy VAR into or out of a register
2106 must be left alone, to avoid an infinite loop here.
2107 If the reference to VAR is by a subreg, fix that up,
2108 since SUBREG is not valid for a memref.
2109 Also fix up the address of the stack slot.
2111 Note that we must not try to recognize the insn until
2112 after we know that we have valid addresses and no
2113 (subreg (mem ...) ...) constructs, since these interfere
2114 with determining the validity of the insn. */
2116 if ((SET_SRC (x) == var
2117 || (GET_CODE (SET_SRC (x)) == SUBREG
2118 && SUBREG_REG (SET_SRC (x)) == var))
2119 && (GET_CODE (SET_DEST (x)) == REG
2120 || (GET_CODE (SET_DEST (x)) == SUBREG
2121 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2122 && GET_MODE (var) == promoted_mode
2123 && x == single_set (insn))
2127 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2128 if (replacement->new)
2129 SET_SRC (x) = replacement->new;
2130 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2131 SET_SRC (x) = replacement->new
2132 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2134 SET_SRC (x) = replacement->new
2135 = fixup_stack_1 (SET_SRC (x), insn);
2137 if (recog_memoized (insn) >= 0)
2140 /* INSN is not valid, but we know that we want to
2141 copy SET_SRC (x) to SET_DEST (x) in some way. So
2142 we generate the move and see whether it requires more
2143 than one insn. If it does, we emit those insns and
2144 delete INSN. Otherwise, we an just replace the pattern
2145 of INSN; we have already verified above that INSN has
2146 no other function that to do X. */
2148 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2149 if (GET_CODE (pat) == SEQUENCE)
2151 emit_insn_after (pat, insn);
2152 PUT_CODE (insn, NOTE);
2153 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2154 NOTE_SOURCE_FILE (insn) = 0;
2157 PATTERN (insn) = pat;
2162 if ((SET_DEST (x) == var
2163 || (GET_CODE (SET_DEST (x)) == SUBREG
2164 && SUBREG_REG (SET_DEST (x)) == var))
2165 && (GET_CODE (SET_SRC (x)) == REG
2166 || (GET_CODE (SET_SRC (x)) == SUBREG
2167 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2168 && GET_MODE (var) == promoted_mode
2169 && x == single_set (insn))
2173 if (GET_CODE (SET_DEST (x)) == SUBREG)
2174 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2176 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2178 if (recog_memoized (insn) >= 0)
2181 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2182 if (GET_CODE (pat) == SEQUENCE)
2184 emit_insn_after (pat, insn);
2185 PUT_CODE (insn, NOTE);
2186 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2187 NOTE_SOURCE_FILE (insn) = 0;
2190 PATTERN (insn) = pat;
2195 /* Otherwise, storing into VAR must be handled specially
2196 by storing into a temporary and copying that into VAR
2197 with a new insn after this one. Note that this case
2198 will be used when storing into a promoted scalar since
2199 the insn will now have different modes on the input
2200 and output and hence will be invalid (except for the case
2201 of setting it to a constant, which does not need any
2202 change if it is valid). We generate extra code in that case,
2203 but combine.c will eliminate it. */
2208 rtx fixeddest = SET_DEST (x);
2210 /* STRICT_LOW_PART can be discarded, around a MEM. */
2211 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2212 fixeddest = XEXP (fixeddest, 0);
2213 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2214 if (GET_CODE (fixeddest) == SUBREG)
2216 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2217 promoted_mode = GET_MODE (fixeddest);
2220 fixeddest = fixup_stack_1 (fixeddest, insn);
2222 temp = gen_reg_rtx (promoted_mode);
2224 emit_insn_after (gen_move_insn (fixeddest,
2225 gen_lowpart (GET_MODE (fixeddest),
2229 SET_DEST (x) = temp;
2237 /* Nothing special about this RTX; fix its operands. */
2239 fmt = GET_RTX_FORMAT (code);
2240 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2243 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2247 for (j = 0; j < XVECLEN (x, i); j++)
2248 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2249 insn, replacements);
2254 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2255 return an rtx (MEM:m1 newaddr) which is equivalent.
2256 If any insns must be emitted to compute NEWADDR, put them before INSN.
2258 UNCRITICAL nonzero means accept paradoxical subregs.
2259 This is used for subregs found inside REG_NOTES. */
2262 fixup_memory_subreg (x, insn, uncritical)
2267 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2268 rtx addr = XEXP (SUBREG_REG (x), 0);
2269 enum machine_mode mode = GET_MODE (x);
2272 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2273 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2277 if (BYTES_BIG_ENDIAN)
2278 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2279 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2280 addr = plus_constant (addr, offset);
2281 if (!flag_force_addr && memory_address_p (mode, addr))
2282 /* Shortcut if no insns need be emitted. */
2283 return change_address (SUBREG_REG (x), mode, addr);
2285 result = change_address (SUBREG_REG (x), mode, addr);
2286 emit_insn_before (gen_sequence (), insn);
2291 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2292 Replace subexpressions of X in place.
2293 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2294 Otherwise return X, with its contents possibly altered.
2296 If any insns must be emitted to compute NEWADDR, put them before INSN.
2298 UNCRITICAL is as in fixup_memory_subreg. */
2301 walk_fixup_memory_subreg (x, insn, uncritical)
2306 register enum rtx_code code;
2307 register const char *fmt;
2313 code = GET_CODE (x);
2315 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2316 return fixup_memory_subreg (x, insn, uncritical);
2318 /* Nothing special about this RTX; fix its operands. */
2320 fmt = GET_RTX_FORMAT (code);
2321 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2324 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2328 for (j = 0; j < XVECLEN (x, i); j++)
2330 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2336 /* For each memory ref within X, if it refers to a stack slot
2337 with an out of range displacement, put the address in a temp register
2338 (emitting new insns before INSN to load these registers)
2339 and alter the memory ref to use that register.
2340 Replace each such MEM rtx with a copy, to avoid clobberage. */
2343 fixup_stack_1 (x, insn)
2348 register RTX_CODE code = GET_CODE (x);
2349 register const char *fmt;
2353 register rtx ad = XEXP (x, 0);
2354 /* If we have address of a stack slot but it's not valid
2355 (displacement is too large), compute the sum in a register. */
2356 if (GET_CODE (ad) == PLUS
2357 && GET_CODE (XEXP (ad, 0)) == REG
2358 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2359 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2360 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2361 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2362 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2364 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2365 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2366 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2367 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2370 if (memory_address_p (GET_MODE (x), ad))
2374 temp = copy_to_reg (ad);
2375 seq = gen_sequence ();
2377 emit_insn_before (seq, insn);
2378 return change_address (x, VOIDmode, temp);
2383 fmt = GET_RTX_FORMAT (code);
2384 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2387 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2391 for (j = 0; j < XVECLEN (x, i); j++)
2392 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2398 /* Optimization: a bit-field instruction whose field
2399 happens to be a byte or halfword in memory
2400 can be changed to a move instruction.
2402 We call here when INSN is an insn to examine or store into a bit-field.
2403 BODY is the SET-rtx to be altered.
2405 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2406 (Currently this is called only from function.c, and EQUIV_MEM
2410 optimize_bit_field (body, insn, equiv_mem)
2415 register rtx bitfield;
2418 enum machine_mode mode;
2420 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2421 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2422 bitfield = SET_DEST (body), destflag = 1;
2424 bitfield = SET_SRC (body), destflag = 0;
2426 /* First check that the field being stored has constant size and position
2427 and is in fact a byte or halfword suitably aligned. */
2429 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2430 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2431 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2433 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2435 register rtx memref = 0;
2437 /* Now check that the containing word is memory, not a register,
2438 and that it is safe to change the machine mode. */
2440 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2441 memref = XEXP (bitfield, 0);
2442 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2444 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2445 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2446 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2447 memref = SUBREG_REG (XEXP (bitfield, 0));
2448 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2450 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2451 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2454 && ! mode_dependent_address_p (XEXP (memref, 0))
2455 && ! MEM_VOLATILE_P (memref))
2457 /* Now adjust the address, first for any subreg'ing
2458 that we are now getting rid of,
2459 and then for which byte of the word is wanted. */
2461 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2464 /* Adjust OFFSET to count bits from low-address byte. */
2465 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2466 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2467 - offset - INTVAL (XEXP (bitfield, 1)));
2469 /* Adjust OFFSET to count bytes from low-address byte. */
2470 offset /= BITS_PER_UNIT;
2471 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2473 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2474 if (BYTES_BIG_ENDIAN)
2475 offset -= (MIN (UNITS_PER_WORD,
2476 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2477 - MIN (UNITS_PER_WORD,
2478 GET_MODE_SIZE (GET_MODE (memref))));
2482 memref = change_address (memref, mode,
2483 plus_constant (XEXP (memref, 0), offset));
2484 insns = get_insns ();
2486 emit_insns_before (insns, insn);
2488 /* Store this memory reference where
2489 we found the bit field reference. */
2493 validate_change (insn, &SET_DEST (body), memref, 1);
2494 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2496 rtx src = SET_SRC (body);
2497 while (GET_CODE (src) == SUBREG
2498 && SUBREG_WORD (src) == 0)
2499 src = SUBREG_REG (src);
2500 if (GET_MODE (src) != GET_MODE (memref))
2501 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2502 validate_change (insn, &SET_SRC (body), src, 1);
2504 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2505 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2506 /* This shouldn't happen because anything that didn't have
2507 one of these modes should have got converted explicitly
2508 and then referenced through a subreg.
2509 This is so because the original bit-field was
2510 handled by agg_mode and so its tree structure had
2511 the same mode that memref now has. */
2516 rtx dest = SET_DEST (body);
2518 while (GET_CODE (dest) == SUBREG
2519 && SUBREG_WORD (dest) == 0
2520 && (GET_MODE_CLASS (GET_MODE (dest))
2521 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2522 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2524 dest = SUBREG_REG (dest);
2526 validate_change (insn, &SET_DEST (body), dest, 1);
2528 if (GET_MODE (dest) == GET_MODE (memref))
2529 validate_change (insn, &SET_SRC (body), memref, 1);
2532 /* Convert the mem ref to the destination mode. */
2533 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2536 convert_move (newreg, memref,
2537 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2541 validate_change (insn, &SET_SRC (body), newreg, 1);
2545 /* See if we can convert this extraction or insertion into
2546 a simple move insn. We might not be able to do so if this
2547 was, for example, part of a PARALLEL.
2549 If we succeed, write out any needed conversions. If we fail,
2550 it is hard to guess why we failed, so don't do anything
2551 special; just let the optimization be suppressed. */
2553 if (apply_change_group () && seq)
2554 emit_insns_before (seq, insn);
2559 /* These routines are responsible for converting virtual register references
2560 to the actual hard register references once RTL generation is complete.
2562 The following four variables are used for communication between the
2563 routines. They contain the offsets of the virtual registers from their
2564 respective hard registers. */
2566 static int in_arg_offset;
2567 static int var_offset;
2568 static int dynamic_offset;
2569 static int out_arg_offset;
2570 static int cfa_offset;
2572 /* In most machines, the stack pointer register is equivalent to the bottom
2575 #ifndef STACK_POINTER_OFFSET
2576 #define STACK_POINTER_OFFSET 0
2579 /* If not defined, pick an appropriate default for the offset of dynamically
2580 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2581 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2583 #ifndef STACK_DYNAMIC_OFFSET
2585 #ifdef ACCUMULATE_OUTGOING_ARGS
2586 /* The bottom of the stack points to the actual arguments. If
2587 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2588 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2589 stack space for register parameters is not pushed by the caller, but
2590 rather part of the fixed stack areas and hence not included in
2591 `current_function_outgoing_args_size'. Nevertheless, we must allow
2592 for it when allocating stack dynamic objects. */
2594 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2595 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2596 (current_function_outgoing_args_size \
2597 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2600 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2601 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2605 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2609 /* On a few machines, the CFA coincides with the arg pointer. */
2611 #ifndef ARG_POINTER_CFA_OFFSET
2612 #define ARG_POINTER_CFA_OFFSET 0
2616 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2617 its address taken. DECL is the decl for the object stored in the
2618 register, for later use if we do need to force REG into the stack.
2619 REG is overwritten by the MEM like in put_reg_into_stack. */
2622 gen_mem_addressof (reg, decl)
2626 tree type = TREE_TYPE (decl);
2627 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2629 /* If the original REG was a user-variable, then so is the REG whose
2630 address is being taken. */
2631 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2634 PUT_CODE (reg, MEM);
2635 PUT_MODE (reg, DECL_MODE (decl));
2636 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2637 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2638 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2640 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2641 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2646 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2649 flush_addressof (decl)
2652 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2653 && DECL_RTL (decl) != 0
2654 && GET_CODE (DECL_RTL (decl)) == MEM
2655 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2656 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2657 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2660 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2663 put_addressof_into_stack (r, ht)
2665 struct hash_table *ht;
2667 tree decl = ADDRESSOF_DECL (r);
2668 rtx reg = XEXP (r, 0);
2670 if (GET_CODE (reg) != REG)
2673 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2674 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2675 ADDRESSOF_REGNO (r),
2676 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2679 /* List of replacements made below in purge_addressof_1 when creating
2680 bitfield insertions. */
2681 static rtx purge_bitfield_addressof_replacements;
2683 /* List of replacements made below in purge_addressof_1 for patterns
2684 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2685 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2686 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2687 enough in complex cases, e.g. when some field values can be
2688 extracted by usage MEM with narrower mode. */
2689 static rtx purge_addressof_replacements;
2691 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2692 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2696 purge_addressof_1 (loc, insn, force, store, ht)
2700 struct hash_table *ht;
2707 /* Re-start here to avoid recursion in common cases. */
2714 code = GET_CODE (x);
2716 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2719 /* We must create a copy of the rtx because it was created by
2720 overwriting a REG rtx which is always shared. */
2721 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2723 if (validate_change (insn, loc, sub, 0)
2724 || validate_replace_rtx (x, sub, insn))
2728 sub = force_operand (sub, NULL_RTX);
2729 if (! validate_change (insn, loc, sub, 0)
2730 && ! validate_replace_rtx (x, sub, insn))
2733 insns = gen_sequence ();
2735 emit_insn_before (insns, insn);
2738 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2740 rtx sub = XEXP (XEXP (x, 0), 0);
2743 if (GET_CODE (sub) == MEM)
2745 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2746 MEM_COPY_ATTRIBUTES (sub2, sub);
2747 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2751 if (GET_CODE (sub) == REG
2752 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2754 put_addressof_into_stack (XEXP (x, 0), ht);
2757 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2759 int size_x, size_sub;
2763 /* When processing REG_NOTES look at the list of
2764 replacements done on the insn to find the register that X
2768 for (tem = purge_bitfield_addressof_replacements;
2770 tem = XEXP (XEXP (tem, 1), 1))
2771 if (rtx_equal_p (x, XEXP (tem, 0)))
2773 *loc = XEXP (XEXP (tem, 1), 0);
2777 /* See comment for purge_addressof_replacements. */
2778 for (tem = purge_addressof_replacements;
2780 tem = XEXP (XEXP (tem, 1), 1))
2781 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2783 rtx z = XEXP (XEXP (tem, 1), 0);
2785 if (GET_MODE (x) == GET_MODE (z)
2786 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2787 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2790 /* It can happen that the note may speak of things
2791 in a wider (or just different) mode than the
2792 code did. This is especially true of
2795 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2798 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2799 && (GET_MODE_SIZE (GET_MODE (x))
2800 > GET_MODE_SIZE (GET_MODE (z))))
2802 /* This can occur as a result in invalid
2803 pointer casts, e.g. float f; ...
2804 *(long long int *)&f.
2805 ??? We could emit a warning here, but
2806 without a line number that wouldn't be
2808 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2811 z = gen_lowpart (GET_MODE (x), z);
2817 /* There should always be such a replacement. */
2821 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2822 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2824 /* Don't even consider working with paradoxical subregs,
2825 or the moral equivalent seen here. */
2826 if (size_x <= size_sub
2827 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2829 /* Do a bitfield insertion to mirror what would happen
2836 rtx p = PREV_INSN (insn);
2839 val = gen_reg_rtx (GET_MODE (x));
2840 if (! validate_change (insn, loc, val, 0))
2842 /* Discard the current sequence and put the
2843 ADDRESSOF on stack. */
2847 seq = gen_sequence ();
2849 emit_insn_before (seq, insn);
2850 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2854 store_bit_field (sub, size_x, 0, GET_MODE (x),
2855 val, GET_MODE_SIZE (GET_MODE (sub)),
2856 GET_MODE_SIZE (GET_MODE (sub)));
2858 /* Make sure to unshare any shared rtl that store_bit_field
2859 might have created. */
2860 for (p = get_insns(); p; p = NEXT_INSN (p))
2862 reset_used_flags (PATTERN (p));
2863 reset_used_flags (REG_NOTES (p));
2864 reset_used_flags (LOG_LINKS (p));
2866 unshare_all_rtl (get_insns ());
2868 seq = gen_sequence ();
2870 p = emit_insn_after (seq, insn);
2871 if (NEXT_INSN (insn))
2872 compute_insns_for_mem (NEXT_INSN (insn),
2873 p ? NEXT_INSN (p) : NULL_RTX,
2878 rtx p = PREV_INSN (insn);
2881 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2882 GET_MODE (x), GET_MODE (x),
2883 GET_MODE_SIZE (GET_MODE (sub)),
2884 GET_MODE_SIZE (GET_MODE (sub)));
2886 if (! validate_change (insn, loc, val, 0))
2888 /* Discard the current sequence and put the
2889 ADDRESSOF on stack. */
2894 seq = gen_sequence ();
2896 emit_insn_before (seq, insn);
2897 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2901 /* Remember the replacement so that the same one can be done
2902 on the REG_NOTES. */
2903 purge_bitfield_addressof_replacements
2904 = gen_rtx_EXPR_LIST (VOIDmode, x,
2907 purge_bitfield_addressof_replacements));
2909 /* We replaced with a reg -- all done. */
2913 else if (validate_change (insn, loc, sub, 0))
2915 /* Remember the replacement so that the same one can be done
2916 on the REG_NOTES. */
2917 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2921 for (tem = purge_addressof_replacements;
2923 tem = XEXP (XEXP (tem, 1), 1))
2924 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2926 XEXP (XEXP (tem, 1), 0) = sub;
2929 purge_addressof_replacements
2930 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2931 gen_rtx_EXPR_LIST (VOIDmode, sub,
2932 purge_addressof_replacements));
2938 /* else give up and put it into the stack */
2940 else if (code == ADDRESSOF)
2942 put_addressof_into_stack (x, ht);
2945 else if (code == SET)
2947 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2948 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2952 /* Scan all subexpressions. */
2953 fmt = GET_RTX_FORMAT (code);
2954 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2957 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
2958 else if (*fmt == 'E')
2959 for (j = 0; j < XVECLEN (x, i); j++)
2960 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
2964 /* Return a new hash table entry in HT. */
2966 static struct hash_entry *
2967 insns_for_mem_newfunc (he, ht, k)
2968 struct hash_entry *he;
2969 struct hash_table *ht;
2970 hash_table_key k ATTRIBUTE_UNUSED;
2972 struct insns_for_mem_entry *ifmhe;
2976 ifmhe = ((struct insns_for_mem_entry *)
2977 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
2978 ifmhe->insns = NULL_RTX;
2983 /* Return a hash value for K, a REG. */
2985 static unsigned long
2986 insns_for_mem_hash (k)
2989 /* K is really a RTX. Just use the address as the hash value. */
2990 return (unsigned long) k;
2993 /* Return non-zero if K1 and K2 (two REGs) are the same. */
2996 insns_for_mem_comp (k1, k2)
3003 struct insns_for_mem_walk_info {
3004 /* The hash table that we are using to record which INSNs use which
3006 struct hash_table *ht;
3008 /* The INSN we are currently proessing. */
3011 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3012 to find the insns that use the REGs in the ADDRESSOFs. */
3016 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3017 that might be used in an ADDRESSOF expression, record this INSN in
3018 the hash table given by DATA (which is really a pointer to an
3019 insns_for_mem_walk_info structure). */
3022 insns_for_mem_walk (r, data)
3026 struct insns_for_mem_walk_info *ifmwi
3027 = (struct insns_for_mem_walk_info *) data;
3029 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3030 && GET_CODE (XEXP (*r, 0)) == REG)
3031 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3032 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3034 /* Lookup this MEM in the hashtable, creating it if necessary. */
3035 struct insns_for_mem_entry *ifme
3036 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3041 /* If we have not already recorded this INSN, do so now. Since
3042 we process the INSNs in order, we know that if we have
3043 recorded it it must be at the front of the list. */
3044 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3046 /* We do the allocation on the same obstack as is used for
3047 the hash table since this memory will not be used once
3048 the hash table is deallocated. */
3049 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3050 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3059 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3060 which REGs in HT. */
3063 compute_insns_for_mem (insns, last_insn, ht)
3066 struct hash_table *ht;
3069 struct insns_for_mem_walk_info ifmwi;
3072 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3073 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3074 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3077 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3081 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3082 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3086 purge_addressof (insns)
3090 struct hash_table ht;
3092 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3093 requires a fixup pass over the instruction stream to correct
3094 INSNs that depended on the REG being a REG, and not a MEM. But,
3095 these fixup passes are slow. Furthermore, more MEMs are not
3096 mentioned in very many instructions. So, we speed up the process
3097 by pre-calculating which REGs occur in which INSNs; that allows
3098 us to perform the fixup passes much more quickly. */
3099 hash_table_init (&ht,
3100 insns_for_mem_newfunc,
3102 insns_for_mem_comp);
3103 compute_insns_for_mem (insns, NULL_RTX, &ht);
3105 for (insn = insns; insn; insn = NEXT_INSN (insn))
3106 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3107 || GET_CODE (insn) == CALL_INSN)
3109 purge_addressof_1 (&PATTERN (insn), insn,
3110 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3111 purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht);
3115 hash_table_free (&ht);
3116 purge_bitfield_addressof_replacements = 0;
3117 purge_addressof_replacements = 0;
3120 /* Pass through the INSNS of function FNDECL and convert virtual register
3121 references to hard register references. */
3124 instantiate_virtual_regs (fndecl, insns)
3131 /* Compute the offsets to use for this function. */
3132 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3133 var_offset = STARTING_FRAME_OFFSET;
3134 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3135 out_arg_offset = STACK_POINTER_OFFSET;
3136 cfa_offset = ARG_POINTER_CFA_OFFSET;
3138 /* Scan all variables and parameters of this function. For each that is
3139 in memory, instantiate all virtual registers if the result is a valid
3140 address. If not, we do it later. That will handle most uses of virtual
3141 regs on many machines. */
3142 instantiate_decls (fndecl, 1);
3144 /* Initialize recognition, indicating that volatile is OK. */
3147 /* Scan through all the insns, instantiating every virtual register still
3149 for (insn = insns; insn; insn = NEXT_INSN (insn))
3150 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3151 || GET_CODE (insn) == CALL_INSN)
3153 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3154 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3157 /* Instantiate the stack slots for the parm registers, for later use in
3158 addressof elimination. */
3159 for (i = 0; i < max_parm_reg; ++i)
3160 if (parm_reg_stack_loc[i])
3161 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3163 /* Now instantiate the remaining register equivalences for debugging info.
3164 These will not be valid addresses. */
3165 instantiate_decls (fndecl, 0);
3167 /* Indicate that, from now on, assign_stack_local should use
3168 frame_pointer_rtx. */
3169 virtuals_instantiated = 1;
3172 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3173 all virtual registers in their DECL_RTL's.
3175 If VALID_ONLY, do this only if the resulting address is still valid.
3176 Otherwise, always do it. */
3179 instantiate_decls (fndecl, valid_only)
3185 if (DECL_SAVED_INSNS (fndecl))
3186 /* When compiling an inline function, the obstack used for
3187 rtl allocation is the maybepermanent_obstack. Calling
3188 `resume_temporary_allocation' switches us back to that
3189 obstack while we process this function's parameters. */
3190 resume_temporary_allocation ();
3192 /* Process all parameters of the function. */
3193 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3195 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3197 instantiate_decl (DECL_RTL (decl), size, valid_only);
3199 /* If the parameter was promoted, then the incoming RTL mode may be
3200 larger than the declared type size. We must use the larger of
3202 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3203 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3206 /* Now process all variables defined in the function or its subblocks. */
3207 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3209 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3211 /* Save all rtl allocated for this function by raising the
3212 high-water mark on the maybepermanent_obstack. */
3214 /* All further rtl allocation is now done in the current_obstack. */
3215 rtl_in_current_obstack ();
3219 /* Subroutine of instantiate_decls: Process all decls in the given
3220 BLOCK node and all its subblocks. */
3223 instantiate_decls_1 (let, valid_only)
3229 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3230 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3233 /* Process all subblocks. */
3234 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3235 instantiate_decls_1 (t, valid_only);
3238 /* Subroutine of the preceding procedures: Given RTL representing a
3239 decl and the size of the object, do any instantiation required.
3241 If VALID_ONLY is non-zero, it means that the RTL should only be
3242 changed if the new address is valid. */
3245 instantiate_decl (x, size, valid_only)
3250 enum machine_mode mode;
3253 /* If this is not a MEM, no need to do anything. Similarly if the
3254 address is a constant or a register that is not a virtual register. */
3256 if (x == 0 || GET_CODE (x) != MEM)
3260 if (CONSTANT_P (addr)
3261 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3262 || (GET_CODE (addr) == REG
3263 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3264 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3267 /* If we should only do this if the address is valid, copy the address.
3268 We need to do this so we can undo any changes that might make the
3269 address invalid. This copy is unfortunate, but probably can't be
3273 addr = copy_rtx (addr);
3275 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3279 /* Now verify that the resulting address is valid for every integer or
3280 floating-point mode up to and including SIZE bytes long. We do this
3281 since the object might be accessed in any mode and frame addresses
3284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3285 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3286 mode = GET_MODE_WIDER_MODE (mode))
3287 if (! memory_address_p (mode, addr))
3290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3291 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3292 mode = GET_MODE_WIDER_MODE (mode))
3293 if (! memory_address_p (mode, addr))
3297 /* Put back the address now that we have updated it and we either know
3298 it is valid or we don't care whether it is valid. */
3303 /* Given a pointer to a piece of rtx and an optional pointer to the
3304 containing object, instantiate any virtual registers present in it.
3306 If EXTRA_INSNS, we always do the replacement and generate
3307 any extra insns before OBJECT. If it zero, we do nothing if replacement
3310 Return 1 if we either had nothing to do or if we were able to do the
3311 needed replacement. Return 0 otherwise; we only return zero if
3312 EXTRA_INSNS is zero.
3314 We first try some simple transformations to avoid the creation of extra
3318 instantiate_virtual_regs_1 (loc, object, extra_insns)
3326 HOST_WIDE_INT offset = 0;
3332 /* Re-start here to avoid recursion in common cases. */
3339 code = GET_CODE (x);
3341 /* Check for some special cases. */
3358 /* We are allowed to set the virtual registers. This means that
3359 the actual register should receive the source minus the
3360 appropriate offset. This is used, for example, in the handling
3361 of non-local gotos. */
3362 if (SET_DEST (x) == virtual_incoming_args_rtx)
3363 new = arg_pointer_rtx, offset = - in_arg_offset;
3364 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3365 new = frame_pointer_rtx, offset = - var_offset;
3366 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3367 new = stack_pointer_rtx, offset = - dynamic_offset;
3368 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3369 new = stack_pointer_rtx, offset = - out_arg_offset;
3370 else if (SET_DEST (x) == virtual_cfa_rtx)
3371 new = arg_pointer_rtx, offset = - cfa_offset;
3375 /* The only valid sources here are PLUS or REG. Just do
3376 the simplest possible thing to handle them. */
3377 if (GET_CODE (SET_SRC (x)) != REG
3378 && GET_CODE (SET_SRC (x)) != PLUS)
3382 if (GET_CODE (SET_SRC (x)) != REG)
3383 temp = force_operand (SET_SRC (x), NULL_RTX);
3386 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3390 emit_insns_before (seq, object);
3393 if (! validate_change (object, &SET_SRC (x), temp, 0)
3400 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3405 /* Handle special case of virtual register plus constant. */
3406 if (CONSTANT_P (XEXP (x, 1)))
3408 rtx old, new_offset;
3410 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3411 if (GET_CODE (XEXP (x, 0)) == PLUS)
3413 rtx inner = XEXP (XEXP (x, 0), 0);
3415 if (inner == virtual_incoming_args_rtx)
3416 new = arg_pointer_rtx, offset = in_arg_offset;
3417 else if (inner == virtual_stack_vars_rtx)
3418 new = frame_pointer_rtx, offset = var_offset;
3419 else if (inner == virtual_stack_dynamic_rtx)
3420 new = stack_pointer_rtx, offset = dynamic_offset;
3421 else if (inner == virtual_outgoing_args_rtx)
3422 new = stack_pointer_rtx, offset = out_arg_offset;
3423 else if (inner == virtual_cfa_rtx)
3424 new = arg_pointer_rtx, offset = cfa_offset;
3431 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3433 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3436 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3437 new = arg_pointer_rtx, offset = in_arg_offset;
3438 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3439 new = frame_pointer_rtx, offset = var_offset;
3440 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3441 new = stack_pointer_rtx, offset = dynamic_offset;
3442 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3443 new = stack_pointer_rtx, offset = out_arg_offset;
3444 else if (XEXP (x, 0) == virtual_cfa_rtx)
3445 new = arg_pointer_rtx, offset = cfa_offset;
3448 /* We know the second operand is a constant. Unless the
3449 first operand is a REG (which has been already checked),
3450 it needs to be checked. */
3451 if (GET_CODE (XEXP (x, 0)) != REG)
3459 new_offset = plus_constant (XEXP (x, 1), offset);
3461 /* If the new constant is zero, try to replace the sum with just
3463 if (new_offset == const0_rtx
3464 && validate_change (object, loc, new, 0))
3467 /* Next try to replace the register and new offset.
3468 There are two changes to validate here and we can't assume that
3469 in the case of old offset equals new just changing the register
3470 will yield a valid insn. In the interests of a little efficiency,
3471 however, we only call validate change once (we don't queue up the
3472 changes and then call apply_change_group). */
3476 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3477 : (XEXP (x, 0) = new,
3478 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3486 /* Otherwise copy the new constant into a register and replace
3487 constant with that register. */
3488 temp = gen_reg_rtx (Pmode);
3490 if (validate_change (object, &XEXP (x, 1), temp, 0))
3491 emit_insn_before (gen_move_insn (temp, new_offset), object);
3494 /* If that didn't work, replace this expression with a
3495 register containing the sum. */
3498 new = gen_rtx_PLUS (Pmode, new, new_offset);
3501 temp = force_operand (new, NULL_RTX);
3505 emit_insns_before (seq, object);
3506 if (! validate_change (object, loc, temp, 0)
3507 && ! validate_replace_rtx (x, temp, object))
3515 /* Fall through to generic two-operand expression case. */
3521 case DIV: case UDIV:
3522 case MOD: case UMOD:
3523 case AND: case IOR: case XOR:
3524 case ROTATERT: case ROTATE:
3525 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3527 case GE: case GT: case GEU: case GTU:
3528 case LE: case LT: case LEU: case LTU:
3529 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3530 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3535 /* Most cases of MEM that convert to valid addresses have already been
3536 handled by our scan of decls. The only special handling we
3537 need here is to make a copy of the rtx to ensure it isn't being
3538 shared if we have to change it to a pseudo.
3540 If the rtx is a simple reference to an address via a virtual register,
3541 it can potentially be shared. In such cases, first try to make it
3542 a valid address, which can also be shared. Otherwise, copy it and
3545 First check for common cases that need no processing. These are
3546 usually due to instantiation already being done on a previous instance
3550 if (CONSTANT_ADDRESS_P (temp)
3551 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3552 || temp == arg_pointer_rtx
3554 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3555 || temp == hard_frame_pointer_rtx
3557 || temp == frame_pointer_rtx)
3560 if (GET_CODE (temp) == PLUS
3561 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3562 && (XEXP (temp, 0) == frame_pointer_rtx
3563 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3564 || XEXP (temp, 0) == hard_frame_pointer_rtx
3566 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3567 || XEXP (temp, 0) == arg_pointer_rtx
3572 if (temp == virtual_stack_vars_rtx
3573 || temp == virtual_incoming_args_rtx
3574 || (GET_CODE (temp) == PLUS
3575 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3576 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3577 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3579 /* This MEM may be shared. If the substitution can be done without
3580 the need to generate new pseudos, we want to do it in place
3581 so all copies of the shared rtx benefit. The call below will
3582 only make substitutions if the resulting address is still
3585 Note that we cannot pass X as the object in the recursive call
3586 since the insn being processed may not allow all valid
3587 addresses. However, if we were not passed on object, we can
3588 only modify X without copying it if X will have a valid
3591 ??? Also note that this can still lose if OBJECT is an insn that
3592 has less restrictions on an address that some other insn.
3593 In that case, we will modify the shared address. This case
3594 doesn't seem very likely, though. One case where this could
3595 happen is in the case of a USE or CLOBBER reference, but we
3596 take care of that below. */
3598 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3599 object ? object : x, 0))
3602 /* Otherwise make a copy and process that copy. We copy the entire
3603 RTL expression since it might be a PLUS which could also be
3605 *loc = x = copy_rtx (x);
3608 /* Fall through to generic unary operation case. */
3610 case STRICT_LOW_PART:
3612 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3613 case SIGN_EXTEND: case ZERO_EXTEND:
3614 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3615 case FLOAT: case FIX:
3616 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3620 /* These case either have just one operand or we know that we need not
3621 check the rest of the operands. */
3627 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3628 go ahead and make the invalid one, but do it to a copy. For a REG,
3629 just make the recursive call, since there's no chance of a problem. */
3631 if ((GET_CODE (XEXP (x, 0)) == MEM
3632 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3634 || (GET_CODE (XEXP (x, 0)) == REG
3635 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3638 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3643 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3644 in front of this insn and substitute the temporary. */
3645 if (x == virtual_incoming_args_rtx)
3646 new = arg_pointer_rtx, offset = in_arg_offset;
3647 else if (x == virtual_stack_vars_rtx)
3648 new = frame_pointer_rtx, offset = var_offset;
3649 else if (x == virtual_stack_dynamic_rtx)
3650 new = stack_pointer_rtx, offset = dynamic_offset;
3651 else if (x == virtual_outgoing_args_rtx)
3652 new = stack_pointer_rtx, offset = out_arg_offset;
3653 else if (x == virtual_cfa_rtx)
3654 new = arg_pointer_rtx, offset = cfa_offset;
3658 temp = plus_constant (new, offset);
3659 if (!validate_change (object, loc, temp, 0))
3665 temp = force_operand (temp, NULL_RTX);
3669 emit_insns_before (seq, object);
3670 if (! validate_change (object, loc, temp, 0)
3671 && ! validate_replace_rtx (x, temp, object))
3679 if (GET_CODE (XEXP (x, 0)) == REG)
3682 else if (GET_CODE (XEXP (x, 0)) == MEM)
3684 /* If we have a (addressof (mem ..)), do any instantiation inside
3685 since we know we'll be making the inside valid when we finally
3686 remove the ADDRESSOF. */
3687 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3696 /* Scan all subexpressions. */
3697 fmt = GET_RTX_FORMAT (code);
3698 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3701 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3704 else if (*fmt == 'E')
3705 for (j = 0; j < XVECLEN (x, i); j++)
3706 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3713 /* Optimization: assuming this function does not receive nonlocal gotos,
3714 delete the handlers for such, as well as the insns to establish
3715 and disestablish them. */
3721 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3723 /* Delete the handler by turning off the flag that would
3724 prevent jump_optimize from deleting it.
3725 Also permit deletion of the nonlocal labels themselves
3726 if nothing local refers to them. */
3727 if (GET_CODE (insn) == CODE_LABEL)
3731 LABEL_PRESERVE_P (insn) = 0;
3733 /* Remove it from the nonlocal_label list, to avoid confusing
3735 for (t = nonlocal_labels, last_t = 0; t;
3736 last_t = t, t = TREE_CHAIN (t))
3737 if (DECL_RTL (TREE_VALUE (t)) == insn)
3742 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3744 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3747 if (GET_CODE (insn) == INSN)
3751 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3752 if (reg_mentioned_p (t, PATTERN (insn)))
3758 || (nonlocal_goto_stack_level != 0
3759 && reg_mentioned_p (nonlocal_goto_stack_level,
3766 /* Output a USE for any register use in RTL.
3767 This is used with -noreg to mark the extent of lifespan
3768 of any registers used in a user-visible variable's DECL_RTL. */
3774 if (GET_CODE (rtl) == REG)
3775 /* This is a register variable. */
3776 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3777 else if (GET_CODE (rtl) == MEM
3778 && GET_CODE (XEXP (rtl, 0)) == REG
3779 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3780 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3781 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3782 /* This is a variable-sized structure. */
3783 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3786 /* Like use_variable except that it outputs the USEs after INSN
3787 instead of at the end of the insn-chain. */
3790 use_variable_after (rtl, insn)
3793 if (GET_CODE (rtl) == REG)
3794 /* This is a register variable. */
3795 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3796 else if (GET_CODE (rtl) == MEM
3797 && GET_CODE (XEXP (rtl, 0)) == REG
3798 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3799 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3800 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3801 /* This is a variable-sized structure. */
3802 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3808 return max_parm_reg;
3811 /* Return the first insn following those generated by `assign_parms'. */
3814 get_first_nonparm_insn ()
3817 return NEXT_INSN (last_parm_insn);
3818 return get_insns ();
3821 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3822 Crash if there is none. */
3825 get_first_block_beg ()
3827 register rtx searcher;
3828 register rtx insn = get_first_nonparm_insn ();
3830 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3831 if (GET_CODE (searcher) == NOTE
3832 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3835 abort (); /* Invalid call to this function. (See comments above.) */
3839 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3840 This means a type for which function calls must pass an address to the
3841 function or get an address back from the function.
3842 EXP may be a type node or an expression (whose type is tested). */
3845 aggregate_value_p (exp)
3848 int i, regno, nregs;
3851 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3854 type = TREE_TYPE (exp);
3856 if (RETURN_IN_MEMORY (type))
3858 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3859 and thus can't be returned in registers. */
3860 if (TREE_ADDRESSABLE (type))
3862 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3864 /* Make sure we have suitable call-clobbered regs to return
3865 the value in; if not, we must return it in memory. */
3866 reg = hard_function_value (type, 0);
3868 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3870 if (GET_CODE (reg) != REG)
3873 regno = REGNO (reg);
3874 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3875 for (i = 0; i < nregs; i++)
3876 if (! call_used_regs[regno + i])
3881 /* Assign RTL expressions to the function's parameters.
3882 This may involve copying them into registers and using
3883 those registers as the RTL for them.
3885 If SECOND_TIME is non-zero it means that this function is being
3886 called a second time. This is done by integrate.c when a function's
3887 compilation is deferred. We need to come back here in case the
3888 FUNCTION_ARG macro computes items needed for the rest of the compilation
3889 (such as changing which registers are fixed or caller-saved). But suppress
3890 writing any insns or setting DECL_RTL of anything in this case. */
3893 assign_parms (fndecl, second_time)
3898 register rtx entry_parm = 0;
3899 register rtx stack_parm = 0;
3900 CUMULATIVE_ARGS args_so_far;
3901 enum machine_mode promoted_mode, passed_mode;
3902 enum machine_mode nominal_mode, promoted_nominal_mode;
3904 /* Total space needed so far for args on the stack,
3905 given as a constant and a tree-expression. */
3906 struct args_size stack_args_size;
3907 tree fntype = TREE_TYPE (fndecl);
3908 tree fnargs = DECL_ARGUMENTS (fndecl);
3909 /* This is used for the arg pointer when referring to stack args. */
3910 rtx internal_arg_pointer;
3911 /* This is a dummy PARM_DECL that we used for the function result if
3912 the function returns a structure. */
3913 tree function_result_decl = 0;
3914 #ifdef SETUP_INCOMING_VARARGS
3915 int varargs_setup = 0;
3917 rtx conversion_insns = 0;
3919 /* Nonzero if the last arg is named `__builtin_va_alist',
3920 which is used on some machines for old-fashioned non-ANSI varargs.h;
3921 this should be stuck onto the stack as if it had arrived there. */
3923 = (current_function_varargs
3925 && (parm = tree_last (fnargs)) != 0
3927 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3928 "__builtin_va_alist")));
3930 /* Nonzero if function takes extra anonymous args.
3931 This means the last named arg must be on the stack
3932 right before the anonymous ones. */
3934 = (TYPE_ARG_TYPES (fntype) != 0
3935 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3936 != void_type_node));
3938 current_function_stdarg = stdarg;
3940 /* If the reg that the virtual arg pointer will be translated into is
3941 not a fixed reg or is the stack pointer, make a copy of the virtual
3942 arg pointer, and address parms via the copy. The frame pointer is
3943 considered fixed even though it is not marked as such.
3945 The second time through, simply use ap to avoid generating rtx. */
3947 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3948 || ! (fixed_regs[ARG_POINTER_REGNUM]
3949 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3951 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3953 internal_arg_pointer = virtual_incoming_args_rtx;
3954 current_function_internal_arg_pointer = internal_arg_pointer;
3956 stack_args_size.constant = 0;
3957 stack_args_size.var = 0;
3959 /* If struct value address is treated as the first argument, make it so. */
3960 if (aggregate_value_p (DECL_RESULT (fndecl))
3961 && ! current_function_returns_pcc_struct
3962 && struct_value_incoming_rtx == 0)
3964 tree type = build_pointer_type (TREE_TYPE (fntype));
3966 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3968 DECL_ARG_TYPE (function_result_decl) = type;
3969 TREE_CHAIN (function_result_decl) = fnargs;
3970 fnargs = function_result_decl;
3973 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3974 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3975 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3977 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3978 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3980 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3983 /* We haven't yet found an argument that we must push and pretend the
3985 current_function_pretend_args_size = 0;
3987 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3989 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3990 struct args_size stack_offset;
3991 struct args_size arg_size;
3992 int passed_pointer = 0;
3993 int did_conversion = 0;
3994 tree passed_type = DECL_ARG_TYPE (parm);
3995 tree nominal_type = TREE_TYPE (parm);
3998 /* Set LAST_NAMED if this is last named arg before some
4000 int last_named = ((TREE_CHAIN (parm) == 0
4001 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4002 && (stdarg || current_function_varargs));
4003 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4004 most machines, if this is a varargs/stdarg function, then we treat
4005 the last named arg as if it were anonymous too. */
4006 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4008 if (TREE_TYPE (parm) == error_mark_node
4009 /* This can happen after weird syntax errors
4010 or if an enum type is defined among the parms. */
4011 || TREE_CODE (parm) != PARM_DECL
4012 || passed_type == NULL)
4014 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4015 = gen_rtx_MEM (BLKmode, const0_rtx);
4016 TREE_USED (parm) = 1;
4020 /* For varargs.h function, save info about regs and stack space
4021 used by the individual args, not including the va_alist arg. */
4022 if (hide_last_arg && last_named)
4023 current_function_args_info = args_so_far;
4025 /* Find mode of arg as it is passed, and mode of arg
4026 as it should be during execution of this function. */
4027 passed_mode = TYPE_MODE (passed_type);
4028 nominal_mode = TYPE_MODE (nominal_type);
4030 /* If the parm's mode is VOID, its value doesn't matter,
4031 and avoid the usual things like emit_move_insn that could crash. */
4032 if (nominal_mode == VOIDmode)
4034 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4038 /* If the parm is to be passed as a transparent union, use the
4039 type of the first field for the tests below. We have already
4040 verified that the modes are the same. */
4041 if (DECL_TRANSPARENT_UNION (parm)
4042 || TYPE_TRANSPARENT_UNION (passed_type))
4043 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4045 /* See if this arg was passed by invisible reference. It is if
4046 it is an object whose size depends on the contents of the
4047 object itself or if the machine requires these objects be passed
4050 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4051 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4052 || TREE_ADDRESSABLE (passed_type)
4053 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4054 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4055 passed_type, named_arg)
4059 passed_type = nominal_type = build_pointer_type (passed_type);
4061 passed_mode = nominal_mode = Pmode;
4064 promoted_mode = passed_mode;
4066 #ifdef PROMOTE_FUNCTION_ARGS
4067 /* Compute the mode in which the arg is actually extended to. */
4068 unsignedp = TREE_UNSIGNED (passed_type);
4069 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4072 /* Let machine desc say which reg (if any) the parm arrives in.
4073 0 means it arrives on the stack. */
4074 #ifdef FUNCTION_INCOMING_ARG
4075 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4076 passed_type, named_arg);
4078 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4079 passed_type, named_arg);
4082 if (entry_parm == 0)
4083 promoted_mode = passed_mode;
4085 #ifdef SETUP_INCOMING_VARARGS
4086 /* If this is the last named parameter, do any required setup for
4087 varargs or stdargs. We need to know about the case of this being an
4088 addressable type, in which case we skip the registers it
4089 would have arrived in.
4091 For stdargs, LAST_NAMED will be set for two parameters, the one that
4092 is actually the last named, and the dummy parameter. We only
4093 want to do this action once.
4095 Also, indicate when RTL generation is to be suppressed. */
4096 if (last_named && !varargs_setup)
4098 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4099 current_function_pretend_args_size,
4105 /* Determine parm's home in the stack,
4106 in case it arrives in the stack or we should pretend it did.
4108 Compute the stack position and rtx where the argument arrives
4111 There is one complexity here: If this was a parameter that would
4112 have been passed in registers, but wasn't only because it is
4113 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4114 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4115 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4116 0 as it was the previous time. */
4118 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4119 locate_and_pad_parm (promoted_mode, passed_type,
4120 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4123 #ifdef FUNCTION_INCOMING_ARG
4124 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4126 pretend_named) != 0,
4128 FUNCTION_ARG (args_so_far, promoted_mode,
4130 pretend_named) != 0,
4133 fndecl, &stack_args_size, &stack_offset, &arg_size);
4137 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4139 if (offset_rtx == const0_rtx)
4140 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4142 stack_parm = gen_rtx_MEM (promoted_mode,
4143 gen_rtx_PLUS (Pmode,
4144 internal_arg_pointer,
4147 /* If this is a memory ref that contains aggregate components,
4148 mark it as such for cse and loop optimize. Likewise if it
4150 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4151 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4152 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4155 /* If this parameter was passed both in registers and in the stack,
4156 use the copy on the stack. */
4157 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4160 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4161 /* If this parm was passed part in regs and part in memory,
4162 pretend it arrived entirely in memory
4163 by pushing the register-part onto the stack.
4165 In the special case of a DImode or DFmode that is split,
4166 we could put it together in a pseudoreg directly,
4167 but for now that's not worth bothering with. */
4171 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4172 passed_type, named_arg);
4176 current_function_pretend_args_size
4177 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4178 / (PARM_BOUNDARY / BITS_PER_UNIT)
4179 * (PARM_BOUNDARY / BITS_PER_UNIT));
4183 /* Handle calls that pass values in multiple non-contiguous
4184 locations. The Irix 6 ABI has examples of this. */
4185 if (GET_CODE (entry_parm) == PARALLEL)
4186 emit_group_store (validize_mem (stack_parm), entry_parm,
4187 int_size_in_bytes (TREE_TYPE (parm)),
4188 (TYPE_ALIGN (TREE_TYPE (parm))
4191 move_block_from_reg (REGNO (entry_parm),
4192 validize_mem (stack_parm), nregs,
4193 int_size_in_bytes (TREE_TYPE (parm)));
4195 entry_parm = stack_parm;
4200 /* If we didn't decide this parm came in a register,
4201 by default it came on the stack. */
4202 if (entry_parm == 0)
4203 entry_parm = stack_parm;
4205 /* Record permanently how this parm was passed. */
4207 DECL_INCOMING_RTL (parm) = entry_parm;
4209 /* If there is actually space on the stack for this parm,
4210 count it in stack_args_size; otherwise set stack_parm to 0
4211 to indicate there is no preallocated stack slot for the parm. */
4213 if (entry_parm == stack_parm
4214 || (GET_CODE (entry_parm) == PARALLEL
4215 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4216 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4217 /* On some machines, even if a parm value arrives in a register
4218 there is still an (uninitialized) stack slot allocated for it.
4220 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4221 whether this parameter already has a stack slot allocated,
4222 because an arg block exists only if current_function_args_size
4223 is larger than some threshold, and we haven't calculated that
4224 yet. So, for now, we just assume that stack slots never exist
4226 || REG_PARM_STACK_SPACE (fndecl) > 0
4230 stack_args_size.constant += arg_size.constant;
4232 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4235 /* No stack slot was pushed for this parm. */
4238 /* Update info on where next arg arrives in registers. */
4240 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4241 passed_type, named_arg);
4243 /* If this is our second time through, we are done with this parm. */
4247 /* If we can't trust the parm stack slot to be aligned enough
4248 for its ultimate type, don't use that slot after entry.
4249 We'll make another stack slot, if we need one. */
4251 int thisparm_boundary
4252 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4254 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4258 /* If parm was passed in memory, and we need to convert it on entry,
4259 don't store it back in that same slot. */
4261 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4265 /* Now adjust STACK_PARM to the mode and precise location
4266 where this parameter should live during execution,
4267 if we discover that it must live in the stack during execution.
4268 To make debuggers happier on big-endian machines, we store
4269 the value in the last bytes of the space available. */
4271 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4276 if (BYTES_BIG_ENDIAN
4277 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4278 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4279 - GET_MODE_SIZE (nominal_mode));
4281 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4282 if (offset_rtx == const0_rtx)
4283 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4285 stack_parm = gen_rtx_MEM (nominal_mode,
4286 gen_rtx_PLUS (Pmode,
4287 internal_arg_pointer,
4290 /* If this is a memory ref that contains aggregate components,
4291 mark it as such for cse and loop optimize. */
4292 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4297 /* We need this "use" info, because the gcc-register->stack-register
4298 converter in reg-stack.c needs to know which registers are active
4299 at the start of the function call. The actual parameter loading
4300 instructions are not always available then anymore, since they might
4301 have been optimised away. */
4303 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4304 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4307 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4308 in the mode in which it arrives.
4309 STACK_PARM is an RTX for a stack slot where the parameter can live
4310 during the function (in case we want to put it there).
4311 STACK_PARM is 0 if no stack slot was pushed for it.
4313 Now output code if necessary to convert ENTRY_PARM to
4314 the type in which this function declares it,
4315 and store that result in an appropriate place,
4316 which may be a pseudo reg, may be STACK_PARM,
4317 or may be a local stack slot if STACK_PARM is 0.
4319 Set DECL_RTL to that place. */
4321 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4323 /* If a BLKmode arrives in registers, copy it to a stack slot.
4324 Handle calls that pass values in multiple non-contiguous
4325 locations. The Irix 6 ABI has examples of this. */
4326 if (GET_CODE (entry_parm) == REG
4327 || GET_CODE (entry_parm) == PARALLEL)
4330 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4333 /* Note that we will be storing an integral number of words.
4334 So we have to be careful to ensure that we allocate an
4335 integral number of words. We do this below in the
4336 assign_stack_local if space was not allocated in the argument
4337 list. If it was, this will not work if PARM_BOUNDARY is not
4338 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4339 if it becomes a problem. */
4341 if (stack_parm == 0)
4344 = assign_stack_local (GET_MODE (entry_parm),
4347 /* If this is a memory ref that contains aggregate
4348 components, mark it as such for cse and loop optimize. */
4349 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4352 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4355 if (TREE_READONLY (parm))
4356 RTX_UNCHANGING_P (stack_parm) = 1;
4358 /* Handle calls that pass values in multiple non-contiguous
4359 locations. The Irix 6 ABI has examples of this. */
4360 if (GET_CODE (entry_parm) == PARALLEL)
4361 emit_group_store (validize_mem (stack_parm), entry_parm,
4362 int_size_in_bytes (TREE_TYPE (parm)),
4363 (TYPE_ALIGN (TREE_TYPE (parm))
4366 move_block_from_reg (REGNO (entry_parm),
4367 validize_mem (stack_parm),
4368 size_stored / UNITS_PER_WORD,
4369 int_size_in_bytes (TREE_TYPE (parm)));
4371 DECL_RTL (parm) = stack_parm;
4373 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4374 && ! DECL_INLINE (fndecl))
4375 /* layout_decl may set this. */
4376 || TREE_ADDRESSABLE (parm)
4377 || TREE_SIDE_EFFECTS (parm)
4378 /* If -ffloat-store specified, don't put explicit
4379 float variables into registers. */
4380 || (flag_float_store
4381 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4382 /* Always assign pseudo to structure return or item passed
4383 by invisible reference. */
4384 || passed_pointer || parm == function_result_decl)
4386 /* Store the parm in a pseudoregister during the function, but we
4387 may need to do it in a wider mode. */
4389 register rtx parmreg;
4390 int regno, regnoi = 0, regnor = 0;
4392 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4394 promoted_nominal_mode
4395 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4397 parmreg = gen_reg_rtx (promoted_nominal_mode);
4398 mark_user_reg (parmreg);
4400 /* If this was an item that we received a pointer to, set DECL_RTL
4405 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4406 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4409 DECL_RTL (parm) = parmreg;
4411 /* Copy the value into the register. */
4412 if (nominal_mode != passed_mode
4413 || promoted_nominal_mode != promoted_mode)
4416 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4417 mode, by the caller. We now have to convert it to
4418 NOMINAL_MODE, if different. However, PARMREG may be in
4419 a different mode than NOMINAL_MODE if it is being stored
4422 If ENTRY_PARM is a hard register, it might be in a register
4423 not valid for operating in its mode (e.g., an odd-numbered
4424 register for a DFmode). In that case, moves are the only
4425 thing valid, so we can't do a convert from there. This
4426 occurs when the calling sequence allow such misaligned
4429 In addition, the conversion may involve a call, which could
4430 clobber parameters which haven't been copied to pseudo
4431 registers yet. Therefore, we must first copy the parm to
4432 a pseudo reg here, and save the conversion until after all
4433 parameters have been moved. */
4435 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4437 emit_move_insn (tempreg, validize_mem (entry_parm));
4439 push_to_sequence (conversion_insns);
4440 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4442 /* TREE_USED gets set erroneously during expand_assignment. */
4443 save_tree_used = TREE_USED (parm);
4444 expand_assignment (parm,
4445 make_tree (nominal_type, tempreg), 0, 0);
4446 TREE_USED (parm) = save_tree_used;
4447 conversion_insns = get_insns ();
4452 emit_move_insn (parmreg, validize_mem (entry_parm));
4454 /* If we were passed a pointer but the actual value
4455 can safely live in a register, put it in one. */
4456 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4457 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4458 && ! DECL_INLINE (fndecl))
4459 /* layout_decl may set this. */
4460 || TREE_ADDRESSABLE (parm)
4461 || TREE_SIDE_EFFECTS (parm)
4462 /* If -ffloat-store specified, don't put explicit
4463 float variables into registers. */
4464 || (flag_float_store
4465 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4467 /* We can't use nominal_mode, because it will have been set to
4468 Pmode above. We must use the actual mode of the parm. */
4469 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4470 mark_user_reg (parmreg);
4471 emit_move_insn (parmreg, DECL_RTL (parm));
4472 DECL_RTL (parm) = parmreg;
4473 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4477 #ifdef FUNCTION_ARG_CALLEE_COPIES
4478 /* If we are passed an arg by reference and it is our responsibility
4479 to make a copy, do it now.
4480 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4481 original argument, so we must recreate them in the call to
4482 FUNCTION_ARG_CALLEE_COPIES. */
4483 /* ??? Later add code to handle the case that if the argument isn't
4484 modified, don't do the copy. */
4486 else if (passed_pointer
4487 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4488 TYPE_MODE (DECL_ARG_TYPE (parm)),
4489 DECL_ARG_TYPE (parm),
4491 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4494 tree type = DECL_ARG_TYPE (parm);
4496 /* This sequence may involve a library call perhaps clobbering
4497 registers that haven't been copied to pseudos yet. */
4499 push_to_sequence (conversion_insns);
4501 if (TYPE_SIZE (type) == 0
4502 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4503 /* This is a variable sized object. */
4504 copy = gen_rtx_MEM (BLKmode,
4505 allocate_dynamic_stack_space
4506 (expr_size (parm), NULL_RTX,
4507 TYPE_ALIGN (type)));
4509 copy = assign_stack_temp (TYPE_MODE (type),
4510 int_size_in_bytes (type), 1);
4511 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4512 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4514 store_expr (parm, copy, 0);
4515 emit_move_insn (parmreg, XEXP (copy, 0));
4516 if (current_function_check_memory_usage)
4517 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4518 XEXP (copy, 0), Pmode,
4519 GEN_INT (int_size_in_bytes (type)),
4520 TYPE_MODE (sizetype),
4521 GEN_INT (MEMORY_USE_RW),
4522 TYPE_MODE (integer_type_node));
4523 conversion_insns = get_insns ();
4527 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4529 /* In any case, record the parm's desired stack location
4530 in case we later discover it must live in the stack.
4532 If it is a COMPLEX value, store the stack location for both
4535 if (GET_CODE (parmreg) == CONCAT)
4536 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4538 regno = REGNO (parmreg);
4540 if (regno >= max_parm_reg)
4543 int old_max_parm_reg = max_parm_reg;
4545 /* It's slow to expand this one register at a time,
4546 but it's also rare and we need max_parm_reg to be
4547 precisely correct. */
4548 max_parm_reg = regno + 1;
4549 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4550 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4551 old_max_parm_reg * sizeof (rtx));
4552 bzero ((char *) (new + old_max_parm_reg),
4553 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4554 parm_reg_stack_loc = new;
4557 if (GET_CODE (parmreg) == CONCAT)
4559 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4561 regnor = REGNO (gen_realpart (submode, parmreg));
4562 regnoi = REGNO (gen_imagpart (submode, parmreg));
4564 if (stack_parm != 0)
4566 parm_reg_stack_loc[regnor]
4567 = gen_realpart (submode, stack_parm);
4568 parm_reg_stack_loc[regnoi]
4569 = gen_imagpart (submode, stack_parm);
4573 parm_reg_stack_loc[regnor] = 0;
4574 parm_reg_stack_loc[regnoi] = 0;
4578 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4580 /* Mark the register as eliminable if we did no conversion
4581 and it was copied from memory at a fixed offset,
4582 and the arg pointer was not copied to a pseudo-reg.
4583 If the arg pointer is a pseudo reg or the offset formed
4584 an invalid address, such memory-equivalences
4585 as we make here would screw up life analysis for it. */
4586 if (nominal_mode == passed_mode
4589 && GET_CODE (stack_parm) == MEM
4590 && stack_offset.var == 0
4591 && reg_mentioned_p (virtual_incoming_args_rtx,
4592 XEXP (stack_parm, 0)))
4594 rtx linsn = get_last_insn ();
4597 /* Mark complex types separately. */
4598 if (GET_CODE (parmreg) == CONCAT)
4599 /* Scan backwards for the set of the real and
4601 for (sinsn = linsn; sinsn != 0;
4602 sinsn = prev_nonnote_insn (sinsn))
4604 set = single_set (sinsn);
4606 && SET_DEST (set) == regno_reg_rtx [regnoi])
4608 = gen_rtx_EXPR_LIST (REG_EQUIV,
4609 parm_reg_stack_loc[regnoi],
4612 && SET_DEST (set) == regno_reg_rtx [regnor])
4614 = gen_rtx_EXPR_LIST (REG_EQUIV,
4615 parm_reg_stack_loc[regnor],
4618 else if ((set = single_set (linsn)) != 0
4619 && SET_DEST (set) == parmreg)
4621 = gen_rtx_EXPR_LIST (REG_EQUIV,
4622 stack_parm, REG_NOTES (linsn));
4625 /* For pointer data type, suggest pointer register. */
4626 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4627 mark_reg_pointer (parmreg,
4628 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4633 /* Value must be stored in the stack slot STACK_PARM
4634 during function execution. */
4636 if (promoted_mode != nominal_mode)
4638 /* Conversion is required. */
4639 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4641 emit_move_insn (tempreg, validize_mem (entry_parm));
4643 push_to_sequence (conversion_insns);
4644 entry_parm = convert_to_mode (nominal_mode, tempreg,
4645 TREE_UNSIGNED (TREE_TYPE (parm)));
4648 /* ??? This may need a big-endian conversion on sparc64. */
4649 stack_parm = change_address (stack_parm, nominal_mode,
4652 conversion_insns = get_insns ();
4657 if (entry_parm != stack_parm)
4659 if (stack_parm == 0)
4662 = assign_stack_local (GET_MODE (entry_parm),
4663 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4664 /* If this is a memory ref that contains aggregate components,
4665 mark it as such for cse and loop optimize. */
4666 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4669 if (promoted_mode != nominal_mode)
4671 push_to_sequence (conversion_insns);
4672 emit_move_insn (validize_mem (stack_parm),
4673 validize_mem (entry_parm));
4674 conversion_insns = get_insns ();
4678 emit_move_insn (validize_mem (stack_parm),
4679 validize_mem (entry_parm));
4681 if (current_function_check_memory_usage)
4683 push_to_sequence (conversion_insns);
4684 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4685 XEXP (stack_parm, 0), Pmode,
4686 GEN_INT (GET_MODE_SIZE (GET_MODE
4688 TYPE_MODE (sizetype),
4689 GEN_INT (MEMORY_USE_RW),
4690 TYPE_MODE (integer_type_node));
4692 conversion_insns = get_insns ();
4695 DECL_RTL (parm) = stack_parm;
4698 /* If this "parameter" was the place where we are receiving the
4699 function's incoming structure pointer, set up the result. */
4700 if (parm == function_result_decl)
4702 tree result = DECL_RESULT (fndecl);
4703 tree restype = TREE_TYPE (result);
4706 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4708 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4709 AGGREGATE_TYPE_P (restype));
4712 if (TREE_THIS_VOLATILE (parm))
4713 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4714 if (TREE_READONLY (parm))
4715 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4718 /* Output all parameter conversion instructions (possibly including calls)
4719 now that all parameters have been copied out of hard registers. */
4720 emit_insns (conversion_insns);
4722 last_parm_insn = get_last_insn ();
4724 current_function_args_size = stack_args_size.constant;
4726 /* Adjust function incoming argument size for alignment and
4729 #ifdef REG_PARM_STACK_SPACE
4730 #ifndef MAYBE_REG_PARM_STACK_SPACE
4731 current_function_args_size = MAX (current_function_args_size,
4732 REG_PARM_STACK_SPACE (fndecl));
4736 #ifdef STACK_BOUNDARY
4737 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4739 current_function_args_size
4740 = ((current_function_args_size + STACK_BYTES - 1)
4741 / STACK_BYTES) * STACK_BYTES;
4744 #ifdef ARGS_GROW_DOWNWARD
4745 current_function_arg_offset_rtx
4746 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4747 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4748 size_int (-stack_args_size.constant)),
4749 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4751 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4754 /* See how many bytes, if any, of its args a function should try to pop
4757 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4758 current_function_args_size);
4760 /* For stdarg.h function, save info about
4761 regs and stack space used by the named args. */
4764 current_function_args_info = args_so_far;
4766 /* Set the rtx used for the function return value. Put this in its
4767 own variable so any optimizers that need this information don't have
4768 to include tree.h. Do this here so it gets done when an inlined
4769 function gets output. */
4771 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4774 /* Indicate whether REGNO is an incoming argument to the current function
4775 that was promoted to a wider mode. If so, return the RTX for the
4776 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4777 that REGNO is promoted from and whether the promotion was signed or
4780 #ifdef PROMOTE_FUNCTION_ARGS
4783 promoted_input_arg (regno, pmode, punsignedp)
4785 enum machine_mode *pmode;
4790 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4791 arg = TREE_CHAIN (arg))
4792 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4793 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4794 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4796 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4797 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4799 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4800 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4801 && mode != DECL_MODE (arg))
4803 *pmode = DECL_MODE (arg);
4804 *punsignedp = unsignedp;
4805 return DECL_INCOMING_RTL (arg);
4814 /* Compute the size and offset from the start of the stacked arguments for a
4815 parm passed in mode PASSED_MODE and with type TYPE.
4817 INITIAL_OFFSET_PTR points to the current offset into the stacked
4820 The starting offset and size for this parm are returned in *OFFSET_PTR
4821 and *ARG_SIZE_PTR, respectively.
4823 IN_REGS is non-zero if the argument will be passed in registers. It will
4824 never be set if REG_PARM_STACK_SPACE is not defined.
4826 FNDECL is the function in which the argument was defined.
4828 There are two types of rounding that are done. The first, controlled by
4829 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4830 list to be aligned to the specific boundary (in bits). This rounding
4831 affects the initial and starting offsets, but not the argument size.
4833 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4834 optionally rounds the size of the parm to PARM_BOUNDARY. The
4835 initial offset is not affected by this rounding, while the size always
4836 is and the starting offset may be. */
4838 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4839 initial_offset_ptr is positive because locate_and_pad_parm's
4840 callers pass in the total size of args so far as
4841 initial_offset_ptr. arg_size_ptr is always positive.*/
4844 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4845 initial_offset_ptr, offset_ptr, arg_size_ptr)
4846 enum machine_mode passed_mode;
4849 tree fndecl ATTRIBUTE_UNUSED;
4850 struct args_size *initial_offset_ptr;
4851 struct args_size *offset_ptr;
4852 struct args_size *arg_size_ptr;
4855 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4856 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4857 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4859 #ifdef REG_PARM_STACK_SPACE
4860 /* If we have found a stack parm before we reach the end of the
4861 area reserved for registers, skip that area. */
4864 int reg_parm_stack_space = 0;
4866 #ifdef MAYBE_REG_PARM_STACK_SPACE
4867 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4869 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4871 if (reg_parm_stack_space > 0)
4873 if (initial_offset_ptr->var)
4875 initial_offset_ptr->var
4876 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4877 size_int (reg_parm_stack_space));
4878 initial_offset_ptr->constant = 0;
4880 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4881 initial_offset_ptr->constant = reg_parm_stack_space;
4884 #endif /* REG_PARM_STACK_SPACE */
4886 arg_size_ptr->var = 0;
4887 arg_size_ptr->constant = 0;
4889 #ifdef ARGS_GROW_DOWNWARD
4890 if (initial_offset_ptr->var)
4892 offset_ptr->constant = 0;
4893 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4894 initial_offset_ptr->var);
4898 offset_ptr->constant = - initial_offset_ptr->constant;
4899 offset_ptr->var = 0;
4901 if (where_pad != none
4902 && (TREE_CODE (sizetree) != INTEGER_CST
4903 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4904 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4905 SUB_PARM_SIZE (*offset_ptr, sizetree);
4906 if (where_pad != downward)
4907 pad_to_arg_alignment (offset_ptr, boundary);
4908 if (initial_offset_ptr->var)
4910 arg_size_ptr->var = size_binop (MINUS_EXPR,
4911 size_binop (MINUS_EXPR,
4913 initial_offset_ptr->var),
4918 arg_size_ptr->constant = (- initial_offset_ptr->constant
4919 - offset_ptr->constant);
4921 #else /* !ARGS_GROW_DOWNWARD */
4922 pad_to_arg_alignment (initial_offset_ptr, boundary);
4923 *offset_ptr = *initial_offset_ptr;
4925 #ifdef PUSH_ROUNDING
4926 if (passed_mode != BLKmode)
4927 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4930 /* Pad_below needs the pre-rounded size to know how much to pad below
4931 so this must be done before rounding up. */
4932 if (where_pad == downward
4933 /* However, BLKmode args passed in regs have their padding done elsewhere.
4934 The stack slot must be able to hold the entire register. */
4935 && !(in_regs && passed_mode == BLKmode))
4936 pad_below (offset_ptr, passed_mode, sizetree);
4938 if (where_pad != none
4939 && (TREE_CODE (sizetree) != INTEGER_CST
4940 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4941 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4943 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4944 #endif /* ARGS_GROW_DOWNWARD */
4947 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4948 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4951 pad_to_arg_alignment (offset_ptr, boundary)
4952 struct args_size *offset_ptr;
4955 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4957 if (boundary > BITS_PER_UNIT)
4959 if (offset_ptr->var)
4962 #ifdef ARGS_GROW_DOWNWARD
4967 (ARGS_SIZE_TREE (*offset_ptr),
4968 boundary / BITS_PER_UNIT);
4969 offset_ptr->constant = 0; /*?*/
4972 offset_ptr->constant =
4973 #ifdef ARGS_GROW_DOWNWARD
4974 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4976 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4981 #ifndef ARGS_GROW_DOWNWARD
4983 pad_below (offset_ptr, passed_mode, sizetree)
4984 struct args_size *offset_ptr;
4985 enum machine_mode passed_mode;
4988 if (passed_mode != BLKmode)
4990 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4991 offset_ptr->constant
4992 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4993 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4994 - GET_MODE_SIZE (passed_mode));
4998 if (TREE_CODE (sizetree) != INTEGER_CST
4999 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5001 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5002 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5004 ADD_PARM_SIZE (*offset_ptr, s2);
5005 SUB_PARM_SIZE (*offset_ptr, sizetree);
5011 #ifdef ARGS_GROW_DOWNWARD
5013 round_down (value, divisor)
5017 return size_binop (MULT_EXPR,
5018 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5019 size_int (divisor));
5023 /* Walk the tree of blocks describing the binding levels within a function
5024 and warn about uninitialized variables.
5025 This is done after calling flow_analysis and before global_alloc
5026 clobbers the pseudo-regs to hard regs. */
5029 uninitialized_vars_warning (block)
5032 register tree decl, sub;
5033 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5035 if (TREE_CODE (decl) == VAR_DECL
5036 /* These warnings are unreliable for and aggregates
5037 because assigning the fields one by one can fail to convince
5038 flow.c that the entire aggregate was initialized.
5039 Unions are troublesome because members may be shorter. */
5040 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5041 && DECL_RTL (decl) != 0
5042 && GET_CODE (DECL_RTL (decl)) == REG
5043 /* Global optimizations can make it difficult to determine if a
5044 particular variable has been initialized. However, a VAR_DECL
5045 with a nonzero DECL_INITIAL had an initializer, so do not
5046 claim it is potentially uninitialized.
5048 We do not care about the actual value in DECL_INITIAL, so we do
5049 not worry that it may be a dangling pointer. */
5050 && DECL_INITIAL (decl) == NULL_TREE
5051 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5052 warning_with_decl (decl,
5053 "`%s' might be used uninitialized in this function");
5054 if (TREE_CODE (decl) == VAR_DECL
5055 && DECL_RTL (decl) != 0
5056 && GET_CODE (DECL_RTL (decl)) == REG
5057 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5058 warning_with_decl (decl,
5059 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5061 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5062 uninitialized_vars_warning (sub);
5065 /* Do the appropriate part of uninitialized_vars_warning
5066 but for arguments instead of local variables. */
5069 setjmp_args_warning ()
5072 for (decl = DECL_ARGUMENTS (current_function_decl);
5073 decl; decl = TREE_CHAIN (decl))
5074 if (DECL_RTL (decl) != 0
5075 && GET_CODE (DECL_RTL (decl)) == REG
5076 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5077 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5080 /* If this function call setjmp, put all vars into the stack
5081 unless they were declared `register'. */
5084 setjmp_protect (block)
5087 register tree decl, sub;
5088 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5089 if ((TREE_CODE (decl) == VAR_DECL
5090 || TREE_CODE (decl) == PARM_DECL)
5091 && DECL_RTL (decl) != 0
5092 && (GET_CODE (DECL_RTL (decl)) == REG
5093 || (GET_CODE (DECL_RTL (decl)) == MEM
5094 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5095 /* If this variable came from an inline function, it must be
5096 that its life doesn't overlap the setjmp. If there was a
5097 setjmp in the function, it would already be in memory. We
5098 must exclude such variable because their DECL_RTL might be
5099 set to strange things such as virtual_stack_vars_rtx. */
5100 && ! DECL_FROM_INLINE (decl)
5102 #ifdef NON_SAVING_SETJMP
5103 /* If longjmp doesn't restore the registers,
5104 don't put anything in them. */
5108 ! DECL_REGISTER (decl)))
5109 put_var_into_stack (decl);
5110 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5111 setjmp_protect (sub);
5114 /* Like the previous function, but for args instead of local variables. */
5117 setjmp_protect_args ()
5120 for (decl = DECL_ARGUMENTS (current_function_decl);
5121 decl; decl = TREE_CHAIN (decl))
5122 if ((TREE_CODE (decl) == VAR_DECL
5123 || TREE_CODE (decl) == PARM_DECL)
5124 && DECL_RTL (decl) != 0
5125 && (GET_CODE (DECL_RTL (decl)) == REG
5126 || (GET_CODE (DECL_RTL (decl)) == MEM
5127 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5129 /* If longjmp doesn't restore the registers,
5130 don't put anything in them. */
5131 #ifdef NON_SAVING_SETJMP
5135 ! DECL_REGISTER (decl)))
5136 put_var_into_stack (decl);
5139 /* Return the context-pointer register corresponding to DECL,
5140 or 0 if it does not need one. */
5143 lookup_static_chain (decl)
5146 tree context = decl_function_context (decl);
5150 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5153 /* We treat inline_function_decl as an alias for the current function
5154 because that is the inline function whose vars, types, etc.
5155 are being merged into the current function.
5156 See expand_inline_function. */
5157 if (context == current_function_decl || context == inline_function_decl)
5158 return virtual_stack_vars_rtx;
5160 for (link = context_display; link; link = TREE_CHAIN (link))
5161 if (TREE_PURPOSE (link) == context)
5162 return RTL_EXPR_RTL (TREE_VALUE (link));
5167 /* Convert a stack slot address ADDR for variable VAR
5168 (from a containing function)
5169 into an address valid in this function (using a static chain). */
5172 fix_lexical_addr (addr, var)
5177 HOST_WIDE_INT displacement;
5178 tree context = decl_function_context (var);
5179 struct function *fp;
5182 /* If this is the present function, we need not do anything. */
5183 if (context == current_function_decl || context == inline_function_decl)
5186 for (fp = outer_function_chain; fp; fp = fp->next)
5187 if (fp->decl == context)
5193 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5194 addr = XEXP (XEXP (addr, 0), 0);
5196 /* Decode given address as base reg plus displacement. */
5197 if (GET_CODE (addr) == REG)
5198 basereg = addr, displacement = 0;
5199 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5200 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5204 /* We accept vars reached via the containing function's
5205 incoming arg pointer and via its stack variables pointer. */
5206 if (basereg == fp->internal_arg_pointer)
5208 /* If reached via arg pointer, get the arg pointer value
5209 out of that function's stack frame.
5211 There are two cases: If a separate ap is needed, allocate a
5212 slot in the outer function for it and dereference it that way.
5213 This is correct even if the real ap is actually a pseudo.
5214 Otherwise, just adjust the offset from the frame pointer to
5217 #ifdef NEED_SEPARATE_AP
5220 if (fp->x_arg_pointer_save_area == 0)
5221 fp->x_arg_pointer_save_area
5222 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5224 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5225 addr = memory_address (Pmode, addr);
5227 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5229 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5230 base = lookup_static_chain (var);
5234 else if (basereg == virtual_stack_vars_rtx)
5236 /* This is the same code as lookup_static_chain, duplicated here to
5237 avoid an extra call to decl_function_context. */
5240 for (link = context_display; link; link = TREE_CHAIN (link))
5241 if (TREE_PURPOSE (link) == context)
5243 base = RTL_EXPR_RTL (TREE_VALUE (link));
5251 /* Use same offset, relative to appropriate static chain or argument
5253 return plus_constant (base, displacement);
5256 /* Return the address of the trampoline for entering nested fn FUNCTION.
5257 If necessary, allocate a trampoline (in the stack frame)
5258 and emit rtl to initialize its contents (at entry to this function). */
5261 trampoline_address (function)
5267 struct function *fp;
5270 /* Find an existing trampoline and return it. */
5271 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5272 if (TREE_PURPOSE (link) == function)
5274 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5276 for (fp = outer_function_chain; fp; fp = fp->next)
5277 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5278 if (TREE_PURPOSE (link) == function)
5280 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5282 return round_trampoline_addr (tramp);
5285 /* None exists; we must make one. */
5287 /* Find the `struct function' for the function containing FUNCTION. */
5289 fn_context = decl_function_context (function);
5290 if (fn_context != current_function_decl
5291 && fn_context != inline_function_decl)
5292 for (fp = outer_function_chain; fp; fp = fp->next)
5293 if (fp->decl == fn_context)
5296 /* Allocate run-time space for this trampoline
5297 (usually in the defining function's stack frame). */
5298 #ifdef ALLOCATE_TRAMPOLINE
5299 tramp = ALLOCATE_TRAMPOLINE (fp);
5301 /* If rounding needed, allocate extra space
5302 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5303 #ifdef TRAMPOLINE_ALIGNMENT
5304 #define TRAMPOLINE_REAL_SIZE \
5305 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5307 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5310 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5312 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5315 /* Record the trampoline for reuse and note it for later initialization
5316 by expand_function_end. */
5319 push_obstacks (fp->function_maybepermanent_obstack,
5320 fp->function_maybepermanent_obstack);
5321 rtlexp = make_node (RTL_EXPR);
5322 RTL_EXPR_RTL (rtlexp) = tramp;
5323 fp->x_trampoline_list = tree_cons (function, rtlexp,
5324 fp->x_trampoline_list);
5329 /* Make the RTL_EXPR node temporary, not momentary, so that the
5330 trampoline_list doesn't become garbage. */
5331 int momentary = suspend_momentary ();
5332 rtlexp = make_node (RTL_EXPR);
5333 resume_momentary (momentary);
5335 RTL_EXPR_RTL (rtlexp) = tramp;
5336 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5339 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5340 return round_trampoline_addr (tramp);
5343 /* Given a trampoline address,
5344 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5347 round_trampoline_addr (tramp)
5350 #ifdef TRAMPOLINE_ALIGNMENT
5351 /* Round address up to desired boundary. */
5352 rtx temp = gen_reg_rtx (Pmode);
5353 temp = expand_binop (Pmode, add_optab, tramp,
5354 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5355 temp, 0, OPTAB_LIB_WIDEN);
5356 tramp = expand_binop (Pmode, and_optab, temp,
5357 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5358 temp, 0, OPTAB_LIB_WIDEN);
5363 /* The functions identify_blocks and reorder_blocks provide a way to
5364 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5365 duplicate portions of the RTL code. Call identify_blocks before
5366 changing the RTL, and call reorder_blocks after. */
5368 /* Put all this function's BLOCK nodes including those that are chained
5369 onto the first block into a vector, and return it.
5370 Also store in each NOTE for the beginning or end of a block
5371 the index of that block in the vector.
5372 The arguments are BLOCK, the chain of top-level blocks of the function,
5373 and INSNS, the insn chain of the function. */
5376 identify_blocks (block, insns)
5384 int next_block_number = 1;
5385 int current_block_number = 1;
5391 n_blocks = all_blocks (block, 0);
5392 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5393 block_stack = (int *) alloca (n_blocks * sizeof (int));
5395 all_blocks (block, block_vector);
5397 for (insn = insns; insn; insn = NEXT_INSN (insn))
5398 if (GET_CODE (insn) == NOTE)
5400 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5402 block_stack[depth++] = current_block_number;
5403 current_block_number = next_block_number;
5404 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5406 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5408 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5409 current_block_number = block_stack[--depth];
5413 if (n_blocks != next_block_number)
5416 return block_vector;
5419 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5420 and a revised instruction chain, rebuild the tree structure
5421 of BLOCK nodes to correspond to the new order of RTL.
5422 The new block tree is inserted below TOP_BLOCK.
5423 Returns the current top-level block. */
5426 reorder_blocks (block_vector, block, insns)
5431 tree current_block = block;
5434 if (block_vector == 0)
5437 /* Prune the old trees away, so that it doesn't get in the way. */
5438 BLOCK_SUBBLOCKS (current_block) = 0;
5439 BLOCK_CHAIN (current_block) = 0;
5441 for (insn = insns; insn; insn = NEXT_INSN (insn))
5442 if (GET_CODE (insn) == NOTE)
5444 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5446 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5447 /* If we have seen this block before, copy it. */
5448 if (TREE_ASM_WRITTEN (block))
5449 block = copy_node (block);
5450 BLOCK_SUBBLOCKS (block) = 0;
5451 TREE_ASM_WRITTEN (block) = 1;
5452 BLOCK_SUPERCONTEXT (block) = current_block;
5453 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5454 BLOCK_SUBBLOCKS (current_block) = block;
5455 current_block = block;
5456 NOTE_SOURCE_FILE (insn) = 0;
5458 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5460 BLOCK_SUBBLOCKS (current_block)
5461 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5462 current_block = BLOCK_SUPERCONTEXT (current_block);
5463 NOTE_SOURCE_FILE (insn) = 0;
5467 BLOCK_SUBBLOCKS (current_block)
5468 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5469 return current_block;
5472 /* Reverse the order of elements in the chain T of blocks,
5473 and return the new head of the chain (old last element). */
5479 register tree prev = 0, decl, next;
5480 for (decl = t; decl; decl = next)
5482 next = BLOCK_CHAIN (decl);
5483 BLOCK_CHAIN (decl) = prev;
5489 /* Count the subblocks of the list starting with BLOCK, and list them
5490 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5494 all_blocks (block, vector)
5502 TREE_ASM_WRITTEN (block) = 0;
5504 /* Record this block. */
5506 vector[n_blocks] = block;
5510 /* Record the subblocks, and their subblocks... */
5511 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5512 vector ? vector + n_blocks : 0);
5513 block = BLOCK_CHAIN (block);
5519 /* Allocate a function structure and reset its contents to the defaults. */
5521 prepare_function_start ()
5523 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5525 init_stmt_for_function ();
5527 cse_not_expected = ! optimize;
5529 /* Caller save not needed yet. */
5530 caller_save_needed = 0;
5532 /* No stack slots have been made yet. */
5533 stack_slot_list = 0;
5535 current_function_has_nonlocal_label = 0;
5536 current_function_has_nonlocal_goto = 0;
5538 /* There is no stack slot for handling nonlocal gotos. */
5539 nonlocal_goto_handler_slots = 0;
5540 nonlocal_goto_stack_level = 0;
5542 /* No labels have been declared for nonlocal use. */
5543 nonlocal_labels = 0;
5544 nonlocal_goto_handler_labels = 0;
5546 /* No function calls so far in this function. */
5547 function_call_count = 0;
5549 /* No parm regs have been allocated.
5550 (This is important for output_inline_function.) */
5551 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5553 /* Initialize the RTL mechanism. */
5556 /* Initialize the queue of pending postincrement and postdecrements,
5557 and some other info in expr.c. */
5560 /* We haven't done register allocation yet. */
5563 init_const_rtx_hash_table ();
5565 /* Set if a call to setjmp is seen. */
5566 current_function_calls_setjmp = 0;
5568 /* Set if a call to longjmp is seen. */
5569 current_function_calls_longjmp = 0;
5571 current_function_calls_alloca = 0;
5572 current_function_contains_functions = 0;
5573 current_function_is_leaf = 0;
5574 current_function_sp_is_unchanging = 0;
5575 current_function_uses_only_leaf_regs = 0;
5576 current_function_has_computed_jump = 0;
5577 current_function_is_thunk = 0;
5579 current_function_returns_pcc_struct = 0;
5580 current_function_returns_struct = 0;
5581 current_function_epilogue_delay_list = 0;
5582 current_function_uses_const_pool = 0;
5583 current_function_uses_pic_offset_table = 0;
5584 current_function_cannot_inline = 0;
5586 current_function->inlinable = 0;
5588 /* We have not yet needed to make a label to jump to for tail-recursion. */
5589 tail_recursion_label = 0;
5591 /* We haven't had a need to make a save area for ap yet. */
5592 arg_pointer_save_area = 0;
5594 /* No stack slots allocated yet. */
5597 /* No SAVE_EXPRs in this function yet. */
5600 /* No RTL_EXPRs in this function yet. */
5603 /* Set up to allocate temporaries. */
5606 /* Indicate that we need to distinguish between the return value of the
5607 present function and the return value of a function being called. */
5608 rtx_equal_function_value_matters = 1;
5610 /* Indicate that we have not instantiated virtual registers yet. */
5611 virtuals_instantiated = 0;
5613 /* Indicate we have no need of a frame pointer yet. */
5614 frame_pointer_needed = 0;
5616 /* By default assume not varargs or stdarg. */
5617 current_function_varargs = 0;
5618 current_function_stdarg = 0;
5620 /* We haven't made any trampolines for this function yet. */
5621 trampoline_list = 0;
5623 init_pending_stack_adjust ();
5624 inhibit_defer_pop = 0;
5626 current_function_outgoing_args_size = 0;
5629 /* Initialize the rtl expansion mechanism so that we can do simple things
5630 like generate sequences. This is used to provide a context during global
5631 initialization of some passes. */
5633 init_dummy_function_start ()
5635 prepare_function_start ();
5638 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5639 and initialize static variables for generating RTL for the statements
5643 init_function_start (subr, filename, line)
5648 prepare_function_start ();
5650 /* Remember this function for later. */
5651 current_function->next_global = all_functions;
5652 all_functions = current_function;
5654 current_function_name = (*decl_printable_name) (subr, 2);
5656 /* Nonzero if this is a nested function that uses a static chain. */
5658 current_function_needs_context
5659 = (decl_function_context (current_function_decl) != 0
5660 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5662 /* Within function body, compute a type's size as soon it is laid out. */
5663 immediate_size_expand++;
5665 /* Prevent ever trying to delete the first instruction of a function.
5666 Also tell final how to output a linenum before the function prologue.
5667 Note linenums could be missing, e.g. when compiling a Java .class file. */
5669 emit_line_note (filename, line);
5671 /* Make sure first insn is a note even if we don't want linenums.
5672 This makes sure the first insn will never be deleted.
5673 Also, final expects a note to appear there. */
5674 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5676 /* Set flags used by final.c. */
5677 if (aggregate_value_p (DECL_RESULT (subr)))
5679 #ifdef PCC_STATIC_STRUCT_RETURN
5680 current_function_returns_pcc_struct = 1;
5682 current_function_returns_struct = 1;
5685 /* Warn if this value is an aggregate type,
5686 regardless of which calling convention we are using for it. */
5687 if (warn_aggregate_return
5688 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5689 warning ("function returns an aggregate");
5691 current_function_returns_pointer
5692 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5695 /* Make sure all values used by the optimization passes have sane
5698 init_function_for_compilation ()
5701 /* No prologue/epilogue insns yet. */
5702 prologue = epilogue = 0;
5705 /* Indicate that the current function uses extra args
5706 not explicitly mentioned in the argument list in any fashion. */
5711 current_function_varargs = 1;
5714 /* Expand a call to __main at the beginning of a possible main function. */
5716 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5717 #undef HAS_INIT_SECTION
5718 #define HAS_INIT_SECTION
5722 expand_main_function ()
5724 #if !defined (HAS_INIT_SECTION)
5725 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5727 #endif /* not HAS_INIT_SECTION */
5730 extern struct obstack permanent_obstack;
5732 /* Start the RTL for a new function, and set variables used for
5734 SUBR is the FUNCTION_DECL node.
5735 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5736 the function's parameters, which must be run at any return statement. */
5739 expand_function_start (subr, parms_have_cleanups)
5741 int parms_have_cleanups;
5745 rtx last_ptr = NULL_RTX;
5747 /* Make sure volatile mem refs aren't considered
5748 valid operands of arithmetic insns. */
5749 init_recog_no_volatile ();
5751 /* Set this before generating any memory accesses. */
5752 current_function_check_memory_usage
5753 = (flag_check_memory_usage
5754 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5756 current_function_instrument_entry_exit
5757 = (flag_instrument_function_entry_exit
5758 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5760 /* If function gets a static chain arg, store it in the stack frame.
5761 Do this first, so it gets the first stack slot offset. */
5762 if (current_function_needs_context)
5764 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5766 /* Delay copying static chain if it is not a register to avoid
5767 conflicts with regs used for parameters. */
5768 if (! SMALL_REGISTER_CLASSES
5769 || GET_CODE (static_chain_incoming_rtx) == REG)
5770 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5773 /* If the parameters of this function need cleaning up, get a label
5774 for the beginning of the code which executes those cleanups. This must
5775 be done before doing anything with return_label. */
5776 if (parms_have_cleanups)
5777 cleanup_label = gen_label_rtx ();
5781 /* Make the label for return statements to jump to, if this machine
5782 does not have a one-instruction return and uses an epilogue,
5783 or if it returns a structure, or if it has parm cleanups. */
5785 if (cleanup_label == 0 && HAVE_return
5786 && ! current_function_instrument_entry_exit
5787 && ! current_function_returns_pcc_struct
5788 && ! (current_function_returns_struct && ! optimize))
5791 return_label = gen_label_rtx ();
5793 return_label = gen_label_rtx ();
5796 /* Initialize rtx used to return the value. */
5797 /* Do this before assign_parms so that we copy the struct value address
5798 before any library calls that assign parms might generate. */
5800 /* Decide whether to return the value in memory or in a register. */
5801 if (aggregate_value_p (DECL_RESULT (subr)))
5803 /* Returning something that won't go in a register. */
5804 register rtx value_address = 0;
5806 #ifdef PCC_STATIC_STRUCT_RETURN
5807 if (current_function_returns_pcc_struct)
5809 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5810 value_address = assemble_static_space (size);
5815 /* Expect to be passed the address of a place to store the value.
5816 If it is passed as an argument, assign_parms will take care of
5818 if (struct_value_incoming_rtx)
5820 value_address = gen_reg_rtx (Pmode);
5821 emit_move_insn (value_address, struct_value_incoming_rtx);
5826 DECL_RTL (DECL_RESULT (subr))
5827 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5828 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5829 AGGREGATE_TYPE_P (TREE_TYPE
5834 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5835 /* If return mode is void, this decl rtl should not be used. */
5836 DECL_RTL (DECL_RESULT (subr)) = 0;
5837 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5839 /* If function will end with cleanup code for parms,
5840 compute the return values into a pseudo reg,
5841 which we will copy into the true return register
5842 after the cleanups are done. */
5844 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5846 #ifdef PROMOTE_FUNCTION_RETURN
5847 tree type = TREE_TYPE (DECL_RESULT (subr));
5848 int unsignedp = TREE_UNSIGNED (type);
5850 mode = promote_mode (type, mode, &unsignedp, 1);
5853 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5856 /* Scalar, returned in a register. */
5858 #ifdef FUNCTION_OUTGOING_VALUE
5859 DECL_RTL (DECL_RESULT (subr))
5860 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5862 DECL_RTL (DECL_RESULT (subr))
5863 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5866 /* Mark this reg as the function's return value. */
5867 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5869 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5870 /* Needed because we may need to move this to memory
5871 in case it's a named return value whose address is taken. */
5872 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5876 /* Initialize rtx for parameters and local variables.
5877 In some cases this requires emitting insns. */
5879 assign_parms (subr, 0);
5881 /* Copy the static chain now if it wasn't a register. The delay is to
5882 avoid conflicts with the parameter passing registers. */
5884 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5885 if (GET_CODE (static_chain_incoming_rtx) != REG)
5886 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5888 /* The following was moved from init_function_start.
5889 The move is supposed to make sdb output more accurate. */
5890 /* Indicate the beginning of the function body,
5891 as opposed to parm setup. */
5892 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5894 /* If doing stupid allocation, mark parms as born here. */
5896 if (GET_CODE (get_last_insn ()) != NOTE)
5897 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5898 parm_birth_insn = get_last_insn ();
5902 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5903 use_variable (regno_reg_rtx[i]);
5905 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5906 use_variable (current_function_internal_arg_pointer);
5909 context_display = 0;
5910 if (current_function_needs_context)
5912 /* Fetch static chain values for containing functions. */
5913 tem = decl_function_context (current_function_decl);
5914 /* If not doing stupid register allocation copy the static chain
5915 pointer into a pseudo. If we have small register classes, copy
5916 the value from memory if static_chain_incoming_rtx is a REG. If
5917 we do stupid register allocation, we use the stack address
5919 if (tem && ! obey_regdecls)
5921 /* If the static chain originally came in a register, put it back
5922 there, then move it out in the next insn. The reason for
5923 this peculiar code is to satisfy function integration. */
5924 if (SMALL_REGISTER_CLASSES
5925 && GET_CODE (static_chain_incoming_rtx) == REG)
5926 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5927 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5932 tree rtlexp = make_node (RTL_EXPR);
5934 RTL_EXPR_RTL (rtlexp) = last_ptr;
5935 context_display = tree_cons (tem, rtlexp, context_display);
5936 tem = decl_function_context (tem);
5939 /* Chain thru stack frames, assuming pointer to next lexical frame
5940 is found at the place we always store it. */
5941 #ifdef FRAME_GROWS_DOWNWARD
5942 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5944 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5945 memory_address (Pmode, last_ptr)));
5947 /* If we are not optimizing, ensure that we know that this
5948 piece of context is live over the entire function. */
5950 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5955 if (current_function_instrument_entry_exit)
5957 rtx fun = DECL_RTL (current_function_decl);
5958 if (GET_CODE (fun) == MEM)
5959 fun = XEXP (fun, 0);
5962 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5964 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5966 hard_frame_pointer_rtx),
5970 /* After the display initializations is where the tail-recursion label
5971 should go, if we end up needing one. Ensure we have a NOTE here
5972 since some things (like trampolines) get placed before this. */
5973 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5975 /* Evaluate now the sizes of any types declared among the arguments. */
5976 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5978 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5979 EXPAND_MEMORY_USE_BAD);
5980 /* Flush the queue in case this parameter declaration has
5985 /* Make sure there is a line number after the function entry setup code. */
5986 force_next_line_note ();
5989 /* Undo the effects of init_dummy_function_start. */
5991 expand_dummy_function_end ()
5993 /* End any sequences that failed to be closed due to syntax errors. */
5994 while (in_sequence_p ())
5997 /* Outside function body, can't compute type's actual size
5998 until next function's body starts. */
5999 current_function = 0;
6002 /* Generate RTL for the end of the current function.
6003 FILENAME and LINE are the current position in the source file.
6005 It is up to language-specific callers to do cleanups for parameters--
6006 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6009 expand_function_end (filename, line, end_bindings)
6017 #ifdef TRAMPOLINE_TEMPLATE
6018 static rtx initial_trampoline;
6021 finish_expr_for_function ();
6023 #ifdef NON_SAVING_SETJMP
6024 /* Don't put any variables in registers if we call setjmp
6025 on a machine that fails to restore the registers. */
6026 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6028 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6029 setjmp_protect (DECL_INITIAL (current_function_decl));
6031 setjmp_protect_args ();
6035 /* Save the argument pointer if a save area was made for it. */
6036 if (arg_pointer_save_area)
6038 /* arg_pointer_save_area may not be a valid memory address, so we
6039 have to check it and fix it if necessary. */
6042 emit_move_insn (validize_mem (arg_pointer_save_area),
6043 virtual_incoming_args_rtx);
6044 seq = gen_sequence ();
6046 emit_insn_before (seq, tail_recursion_reentry);
6049 /* Initialize any trampolines required by this function. */
6050 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6052 tree function = TREE_PURPOSE (link);
6053 rtx context = lookup_static_chain (function);
6054 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6055 #ifdef TRAMPOLINE_TEMPLATE
6060 #ifdef TRAMPOLINE_TEMPLATE
6061 /* First make sure this compilation has a template for
6062 initializing trampolines. */
6063 if (initial_trampoline == 0)
6065 end_temporary_allocation ();
6067 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6068 resume_temporary_allocation ();
6072 /* Generate insns to initialize the trampoline. */
6074 tramp = round_trampoline_addr (XEXP (tramp, 0));
6075 #ifdef TRAMPOLINE_TEMPLATE
6076 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6077 emit_block_move (blktramp, initial_trampoline,
6078 GEN_INT (TRAMPOLINE_SIZE),
6079 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6081 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6085 /* Put those insns at entry to the containing function (this one). */
6086 emit_insns_before (seq, tail_recursion_reentry);
6089 /* If we are doing stack checking and this function makes calls,
6090 do a stack probe at the start of the function to ensure we have enough
6091 space for another stack frame. */
6092 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6096 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6097 if (GET_CODE (insn) == CALL_INSN)
6100 probe_stack_range (STACK_CHECK_PROTECT,
6101 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6104 emit_insns_before (seq, tail_recursion_reentry);
6109 /* Warn about unused parms if extra warnings were specified. */
6110 if (warn_unused && extra_warnings)
6114 for (decl = DECL_ARGUMENTS (current_function_decl);
6115 decl; decl = TREE_CHAIN (decl))
6116 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6117 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6118 warning_with_decl (decl, "unused parameter `%s'");
6121 /* Delete handlers for nonlocal gotos if nothing uses them. */
6122 if (nonlocal_goto_handler_slots != 0
6123 && ! current_function_has_nonlocal_label)
6126 /* End any sequences that failed to be closed due to syntax errors. */
6127 while (in_sequence_p ())
6130 /* Outside function body, can't compute type's actual size
6131 until next function's body starts. */
6132 immediate_size_expand--;
6134 /* If doing stupid register allocation,
6135 mark register parms as dying here. */
6140 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6141 use_variable (regno_reg_rtx[i]);
6143 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6145 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6147 use_variable (XEXP (tem, 0));
6148 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6151 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6152 use_variable (current_function_internal_arg_pointer);
6155 clear_pending_stack_adjust ();
6156 do_pending_stack_adjust ();
6158 /* Mark the end of the function body.
6159 If control reaches this insn, the function can drop through
6160 without returning a value. */
6161 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6163 /* Must mark the last line number note in the function, so that the test
6164 coverage code can avoid counting the last line twice. This just tells
6165 the code to ignore the immediately following line note, since there
6166 already exists a copy of this note somewhere above. This line number
6167 note is still needed for debugging though, so we can't delete it. */
6168 if (flag_test_coverage)
6169 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6171 /* Output a linenumber for the end of the function.
6172 SDB depends on this. */
6173 emit_line_note_force (filename, line);
6175 /* Output the label for the actual return from the function,
6176 if one is expected. This happens either because a function epilogue
6177 is used instead of a return instruction, or because a return was done
6178 with a goto in order to run local cleanups, or because of pcc-style
6179 structure returning. */
6182 emit_label (return_label);
6184 /* C++ uses this. */
6186 expand_end_bindings (0, 0, 0);
6188 /* Now handle any leftover exception regions that may have been
6189 created for the parameters. */
6191 rtx last = get_last_insn ();
6194 expand_leftover_cleanups ();
6196 /* If the above emitted any code, may sure we jump around it. */
6197 if (last != get_last_insn ())
6199 label = gen_label_rtx ();
6200 last = emit_jump_insn_after (gen_jump (label), last);
6201 last = emit_barrier_after (last);
6206 if (current_function_instrument_entry_exit)
6208 rtx fun = DECL_RTL (current_function_decl);
6209 if (GET_CODE (fun) == MEM)
6210 fun = XEXP (fun, 0);
6213 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6215 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6217 hard_frame_pointer_rtx),
6221 /* If we had calls to alloca, and this machine needs
6222 an accurate stack pointer to exit the function,
6223 insert some code to save and restore the stack pointer. */
6224 #ifdef EXIT_IGNORE_STACK
6225 if (! EXIT_IGNORE_STACK)
6227 if (current_function_calls_alloca)
6231 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6232 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6235 /* If scalar return value was computed in a pseudo-reg,
6236 copy that to the hard return register. */
6237 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6238 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6239 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6240 >= FIRST_PSEUDO_REGISTER))
6242 rtx real_decl_result;
6244 #ifdef FUNCTION_OUTGOING_VALUE
6246 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6247 current_function_decl);
6250 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6251 current_function_decl);
6253 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6254 /* If this is a BLKmode structure being returned in registers, then use
6255 the mode computed in expand_return. */
6256 if (GET_MODE (real_decl_result) == BLKmode)
6257 PUT_MODE (real_decl_result,
6258 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6259 emit_move_insn (real_decl_result,
6260 DECL_RTL (DECL_RESULT (current_function_decl)));
6261 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6263 /* The delay slot scheduler assumes that current_function_return_rtx
6264 holds the hard register containing the return value, not a temporary
6266 current_function_return_rtx = real_decl_result;
6269 /* If returning a structure, arrange to return the address of the value
6270 in a place where debuggers expect to find it.
6272 If returning a structure PCC style,
6273 the caller also depends on this value.
6274 And current_function_returns_pcc_struct is not necessarily set. */
6275 if (current_function_returns_struct
6276 || current_function_returns_pcc_struct)
6278 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6279 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6280 #ifdef FUNCTION_OUTGOING_VALUE
6282 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6283 current_function_decl);
6286 = FUNCTION_VALUE (build_pointer_type (type),
6287 current_function_decl);
6290 /* Mark this as a function return value so integrate will delete the
6291 assignment and USE below when inlining this function. */
6292 REG_FUNCTION_VALUE_P (outgoing) = 1;
6294 emit_move_insn (outgoing, value_address);
6295 use_variable (outgoing);
6298 /* If this is an implementation of __throw, do what's necessary to
6299 communicate between __builtin_eh_return and the epilogue. */
6300 expand_eh_return ();
6302 /* Output a return insn if we are using one.
6303 Otherwise, let the rtl chain end here, to drop through
6304 into the epilogue. */
6309 emit_jump_insn (gen_return ());
6314 /* Fix up any gotos that jumped out to the outermost
6315 binding level of the function.
6316 Must follow emitting RETURN_LABEL. */
6318 /* If you have any cleanups to do at this point,
6319 and they need to create temporary variables,
6320 then you will lose. */
6321 expand_fixups (get_insns ());
6324 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6325 or a single insn). */
6327 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6329 record_insns (insns)
6334 if (GET_CODE (insns) == SEQUENCE)
6336 int len = XVECLEN (insns, 0);
6337 vec = (int *) oballoc ((len + 1) * sizeof (int));
6340 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6344 vec = (int *) oballoc (2 * sizeof (int));
6345 vec[0] = INSN_UID (insns);
6351 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6354 contains (insn, vec)
6360 if (GET_CODE (insn) == INSN
6361 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6364 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6365 for (j = 0; vec[j]; j++)
6366 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6372 for (j = 0; vec[j]; j++)
6373 if (INSN_UID (insn) == vec[j])
6380 prologue_epilogue_contains (insn)
6383 if (prologue && contains (insn, prologue))
6385 if (epilogue && contains (insn, epilogue))
6389 #endif /* HAVE_prologue || HAVE_epilogue */
6391 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6392 this into place with notes indicating where the prologue ends and where
6393 the epilogue begins. Update the basic block information when possible. */
6396 thread_prologue_and_epilogue_insns (f)
6397 rtx f ATTRIBUTE_UNUSED;
6401 #ifdef HAVE_prologue
6407 seq = gen_prologue();
6410 /* Retain a map of the prologue insns. */
6411 if (GET_CODE (seq) != SEQUENCE)
6413 prologue = record_insns (seq);
6415 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6416 seq = gen_sequence ();
6419 /* If optimization is off, and perhaps in an empty function,
6420 the entry block will have no successors. */
6421 if (ENTRY_BLOCK_PTR->succ)
6423 /* Can't deal with multiple successsors of the entry block. */
6424 if (ENTRY_BLOCK_PTR->succ->succ_next)
6427 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6431 emit_insn_after (seq, f);
6435 #ifdef HAVE_epilogue
6440 rtx tail = get_last_insn ();
6442 /* ??? This is gastly. If function returns were not done via uses,
6443 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6444 and all of this uglyness would go away. */
6449 /* If the exit block has no non-fake predecessors, we don't
6450 need an epilogue. Furthermore, only pay attention to the
6451 fallthru predecessors; if (conditional) return insns were
6452 generated, by definition we do not need to emit epilogue
6455 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6456 if ((e->flags & EDGE_FAKE) == 0
6457 && (e->flags & EDGE_FALLTHRU) != 0)
6462 /* We can't handle multiple epilogues -- if one is needed,
6463 we won't be able to place it multiple times.
6465 ??? Fix epilogue expanders to not assume they are the
6466 last thing done compiling the function. Either that
6467 or copy_rtx each insn.
6469 ??? Blah, it's not a simple expression to assert that
6470 we've exactly one fallthru exit edge. */
6475 /* ??? If the last insn of the basic block is a jump, then we
6476 are creating a new basic block. Wimp out and leave these
6477 insns outside any block. */
6478 if (GET_CODE (tail) == JUMP_INSN)
6484 rtx prev, seq, first_use;
6486 /* Move the USE insns at the end of a function onto a list. */
6488 if (GET_CODE (prev) == BARRIER
6489 || GET_CODE (prev) == NOTE)
6490 prev = prev_nonnote_insn (prev);
6494 && GET_CODE (prev) == INSN
6495 && GET_CODE (PATTERN (prev)) == USE)
6497 /* If the end of the block is the use, grab hold of something
6498 else so that we emit barriers etc in the right place. */
6502 tail = PREV_INSN (tail);
6503 while (GET_CODE (tail) == INSN
6504 && GET_CODE (PATTERN (tail)) == USE);
6510 prev = prev_nonnote_insn (prev);
6515 NEXT_INSN (use) = first_use;
6516 PREV_INSN (first_use) = use;
6519 NEXT_INSN (use) = NULL_RTX;
6523 && GET_CODE (prev) == INSN
6524 && GET_CODE (PATTERN (prev)) == USE);
6527 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6528 epilogue insns, the USE insns at the end of a function,
6529 the jump insn that returns, and then a BARRIER. */
6531 if (GET_CODE (tail) != BARRIER)
6533 prev = next_nonnote_insn (tail);
6534 if (!prev || GET_CODE (prev) != BARRIER)
6535 emit_barrier_after (tail);
6538 seq = gen_epilogue ();
6540 tail = emit_jump_insn_after (seq, tail);
6542 /* Insert the USE insns immediately before the return insn, which
6543 must be the last instruction emitted in the sequence. */
6545 emit_insns_before (first_use, tail);
6546 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6548 /* Update the tail of the basic block. */
6552 /* Retain a map of the epilogue insns. */
6553 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6560 commit_edge_insertions ();
6563 /* Reposition the prologue-end and epilogue-begin notes after instruction
6564 scheduling and delayed branch scheduling. */
6567 reposition_prologue_and_epilogue_notes (f)
6568 rtx f ATTRIBUTE_UNUSED;
6570 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6571 /* Reposition the prologue and epilogue notes. */
6578 register rtx insn, note = 0;
6580 /* Scan from the beginning until we reach the last prologue insn.
6581 We apparently can't depend on basic_block_{head,end} after
6583 for (len = 0; prologue[len]; len++)
6585 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6587 if (GET_CODE (insn) == NOTE)
6589 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6592 else if ((len -= contains (insn, prologue)) == 0)
6595 /* Find the prologue-end note if we haven't already, and
6596 move it to just after the last prologue insn. */
6599 for (note = insn; (note = NEXT_INSN (note));)
6600 if (GET_CODE (note) == NOTE
6601 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6605 next = NEXT_INSN (note);
6607 /* Whether or not we can depend on BLOCK_HEAD,
6608 attempt to keep it up-to-date. */
6609 if (BLOCK_HEAD (0) == note)
6610 BLOCK_HEAD (0) = next;
6613 add_insn_after (note, insn);
6620 register rtx insn, note = 0;
6622 /* Scan from the end until we reach the first epilogue insn.
6623 We apparently can't depend on basic_block_{head,end} after
6625 for (len = 0; epilogue[len]; len++)
6627 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6629 if (GET_CODE (insn) == NOTE)
6631 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6634 else if ((len -= contains (insn, epilogue)) == 0)
6636 /* Find the epilogue-begin note if we haven't already, and
6637 move it to just before the first epilogue insn. */
6640 for (note = insn; (note = PREV_INSN (note));)
6641 if (GET_CODE (note) == NOTE
6642 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6646 /* Whether or not we can depend on BLOCK_HEAD,
6647 attempt to keep it up-to-date. */
6649 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6650 BLOCK_HEAD (n_basic_blocks-1) = note;
6653 add_insn_before (note, insn);
6658 #endif /* HAVE_prologue or HAVE_epilogue */