1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
39 #include "coretypes.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
59 #include "integrate.h"
60 #include "langhooks.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 life_analysis has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
194 static struct temp_slot *find_temp_slot_from_address (rtx);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205 static int contains (rtx, VEC(int,heap) **);
207 static void emit_return_into_block (basic_block, rtx);
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function *outer_function_chain;
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
224 find_function_data (tree decl)
228 for (p = outer_function_chain; p; p = p->outer)
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
242 push_function_context_to (tree context ATTRIBUTE_UNUSED)
247 init_dummy_function_start ();
250 p->outer = outer_function_chain;
251 outer_function_chain = p;
253 lang_hooks.function.enter_nested (p);
259 push_function_context (void)
261 push_function_context_to (current_function_decl);
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
270 struct function *p = outer_function_chain;
273 outer_function_chain = p->outer;
275 current_function_decl = p->decl;
277 lang_hooks.function.leave_nested (p);
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated = 0;
281 generating_concat_p = 1;
285 pop_function_context (void)
287 pop_function_context_from (current_function_decl);
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
295 free_after_parsing (struct function *f)
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
302 lang_hooks.function.final (f);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
310 free_after_compilation (struct function *f)
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
328 f->x_nonlocal_goto_handler_labels = NULL;
329 f->x_return_label = NULL;
330 f->x_naked_return_label = NULL;
331 f->x_stack_slot_list = NULL;
332 f->x_stack_check_probe_note = NULL;
333 f->x_arg_pointer_save_area = NULL;
334 f->x_parm_birth_insn = NULL;
335 f->original_arg_vector = NULL;
336 f->original_decl_initial = NULL;
337 f->epilogue_delay_list = NULL;
340 /* Allocate fixed slots in the stack frame of the current function. */
342 /* Return size needed for stack frame based on slots so far allocated in
344 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
345 the caller may have to do that. */
348 get_func_frame_size (struct function *f)
350 if (FRAME_GROWS_DOWNWARD)
351 return -f->x_frame_offset;
353 return f->x_frame_offset;
356 /* Return size needed for stack frame based on slots so far allocated.
357 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
358 the caller may have to do that. */
361 get_frame_size (void)
363 return get_func_frame_size (cfun);
366 /* Issue an error message and return TRUE if frame OFFSET overflows in
367 the signed target pointer arithmetics for function FUNC. Otherwise
371 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
373 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
375 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
376 /* Leave room for the fixed part of the frame. */
377 - 64 * UNITS_PER_WORD)
379 error ("%Jtotal size of local objects too large", func);
386 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
387 with machine mode MODE.
389 ALIGN controls the amount of alignment for the address of the slot:
390 0 means according to MODE,
391 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
392 -2 means use BITS_PER_UNIT,
393 positive specifies alignment boundary in bits.
395 We do not round to stack_boundary here.
397 FUNCTION specifies the function to allocate in. */
400 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
401 struct function *function)
404 int bigend_correction = 0;
405 unsigned int alignment;
406 int frame_off, frame_alignment, frame_phase;
413 alignment = BIGGEST_ALIGNMENT;
415 alignment = GET_MODE_ALIGNMENT (mode);
417 /* Allow the target to (possibly) increase the alignment of this
419 type = lang_hooks.types.type_for_mode (mode, 0);
421 alignment = LOCAL_ALIGNMENT (type, alignment);
423 alignment /= BITS_PER_UNIT;
425 else if (align == -1)
427 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
428 size = CEIL_ROUND (size, alignment);
430 else if (align == -2)
431 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
433 alignment = align / BITS_PER_UNIT;
435 if (FRAME_GROWS_DOWNWARD)
436 function->x_frame_offset -= size;
438 /* Ignore alignment we can't do with expected alignment of the boundary. */
439 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
440 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
442 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
443 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
445 /* Calculate how many bytes the start of local variables is off from
447 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
448 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
449 frame_phase = frame_off ? frame_alignment - frame_off : 0;
451 /* Round the frame offset to the specified alignment. The default is
452 to always honor requests to align the stack but a port may choose to
453 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
454 if (STACK_ALIGNMENT_NEEDED
458 /* We must be careful here, since FRAME_OFFSET might be negative and
459 division with a negative dividend isn't as well defined as we might
460 like. So we instead assume that ALIGNMENT is a power of two and
461 use logical operations which are unambiguous. */
462 if (FRAME_GROWS_DOWNWARD)
463 function->x_frame_offset
464 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
465 (unsigned HOST_WIDE_INT) alignment)
468 function->x_frame_offset
469 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
470 (unsigned HOST_WIDE_INT) alignment)
474 /* On a big-endian machine, if we are allocating more space than we will use,
475 use the least significant bytes of those that are allocated. */
476 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
477 bigend_correction = size - GET_MODE_SIZE (mode);
479 /* If we have already instantiated virtual registers, return the actual
480 address relative to the frame pointer. */
481 if (function == cfun && virtuals_instantiated)
482 addr = plus_constant (frame_pointer_rtx,
484 (frame_offset + bigend_correction
485 + STARTING_FRAME_OFFSET, Pmode));
487 addr = plus_constant (virtual_stack_vars_rtx,
489 (function->x_frame_offset + bigend_correction,
492 if (!FRAME_GROWS_DOWNWARD)
493 function->x_frame_offset += size;
495 x = gen_rtx_MEM (mode, addr);
496 MEM_NOTRAP_P (x) = 1;
498 function->x_stack_slot_list
499 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
501 if (frame_offset_overflow (function->x_frame_offset, function->decl))
502 function->x_frame_offset = 0;
507 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
511 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
513 return assign_stack_local_1 (mode, size, align, cfun);
517 /* Removes temporary slot TEMP from LIST. */
520 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
523 temp->next->prev = temp->prev;
525 temp->prev->next = temp->next;
529 temp->prev = temp->next = NULL;
532 /* Inserts temporary slot TEMP to LIST. */
535 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
539 (*list)->prev = temp;
544 /* Returns the list of used temp slots at LEVEL. */
546 static struct temp_slot **
547 temp_slots_at_level (int level)
550 if (!used_temp_slots)
551 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
553 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
554 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
556 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
559 /* Returns the maximal temporary slot level. */
562 max_slot_level (void)
564 if (!used_temp_slots)
567 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
570 /* Moves temporary slot TEMP to LEVEL. */
573 move_slot_to_level (struct temp_slot *temp, int level)
575 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
576 insert_slot_to_list (temp, temp_slots_at_level (level));
580 /* Make temporary slot TEMP available. */
583 make_slot_available (struct temp_slot *temp)
585 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
586 insert_slot_to_list (temp, &avail_temp_slots);
591 /* Allocate a temporary stack slot and record it for possible later
594 MODE is the machine mode to be given to the returned rtx.
596 SIZE is the size in units of the space required. We do no rounding here
597 since assign_stack_local will do any required rounding.
599 KEEP is 1 if this slot is to be retained after a call to
600 free_temp_slots. Automatic variables for a block are allocated
601 with this flag. KEEP values of 2 or 3 were needed respectively
602 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
603 or for SAVE_EXPRs, but they are now unused.
605 TYPE is the type that will be used for the stack slot. */
608 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
612 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
615 /* If SIZE is -1 it means that somebody tried to allocate a temporary
616 of a variable size. */
617 gcc_assert (size != -1);
619 /* These are now unused. */
620 gcc_assert (keep <= 1);
623 align = BIGGEST_ALIGNMENT;
625 align = GET_MODE_ALIGNMENT (mode);
628 type = lang_hooks.types.type_for_mode (mode, 0);
631 align = LOCAL_ALIGNMENT (type, align);
633 /* Try to find an available, already-allocated temporary of the proper
634 mode which meets the size and alignment requirements. Choose the
635 smallest one with the closest alignment.
637 If assign_stack_temp is called outside of the tree->rtl expansion,
638 we cannot reuse the stack slots (that may still refer to
639 VIRTUAL_STACK_VARS_REGNUM). */
640 if (!virtuals_instantiated)
642 for (p = avail_temp_slots; p; p = p->next)
644 if (p->align >= align && p->size >= size
645 && GET_MODE (p->slot) == mode
646 && objects_must_conflict_p (p->type, type)
647 && (best_p == 0 || best_p->size > p->size
648 || (best_p->size == p->size && best_p->align > p->align)))
650 if (p->align == align && p->size == size)
653 cut_slot_from_list (selected, &avail_temp_slots);
662 /* Make our best, if any, the one to use. */
666 cut_slot_from_list (selected, &avail_temp_slots);
668 /* If there are enough aligned bytes left over, make them into a new
669 temp_slot so that the extra bytes don't get wasted. Do this only
670 for BLKmode slots, so that we can be sure of the alignment. */
671 if (GET_MODE (best_p->slot) == BLKmode)
673 int alignment = best_p->align / BITS_PER_UNIT;
674 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
676 if (best_p->size - rounded_size >= alignment)
678 p = ggc_alloc (sizeof (struct temp_slot));
679 p->in_use = p->addr_taken = 0;
680 p->size = best_p->size - rounded_size;
681 p->base_offset = best_p->base_offset + rounded_size;
682 p->full_size = best_p->full_size - rounded_size;
683 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
684 p->align = best_p->align;
686 p->type = best_p->type;
687 insert_slot_to_list (p, &avail_temp_slots);
689 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
692 best_p->size = rounded_size;
693 best_p->full_size = rounded_size;
698 /* If we still didn't find one, make a new temporary. */
701 HOST_WIDE_INT frame_offset_old = frame_offset;
703 p = ggc_alloc (sizeof (struct temp_slot));
705 /* We are passing an explicit alignment request to assign_stack_local.
706 One side effect of that is assign_stack_local will not round SIZE
707 to ensure the frame offset remains suitably aligned.
709 So for requests which depended on the rounding of SIZE, we go ahead
710 and round it now. We also make sure ALIGNMENT is at least
711 BIGGEST_ALIGNMENT. */
712 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
713 p->slot = assign_stack_local (mode,
715 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
721 /* The following slot size computation is necessary because we don't
722 know the actual size of the temporary slot until assign_stack_local
723 has performed all the frame alignment and size rounding for the
724 requested temporary. Note that extra space added for alignment
725 can be either above or below this stack slot depending on which
726 way the frame grows. We include the extra space if and only if it
727 is above this slot. */
728 if (FRAME_GROWS_DOWNWARD)
729 p->size = frame_offset_old - frame_offset;
733 /* Now define the fields used by combine_temp_slots. */
734 if (FRAME_GROWS_DOWNWARD)
736 p->base_offset = frame_offset;
737 p->full_size = frame_offset_old - frame_offset;
741 p->base_offset = frame_offset_old;
742 p->full_size = frame_offset - frame_offset_old;
753 p->level = temp_slot_level;
756 pp = temp_slots_at_level (p->level);
757 insert_slot_to_list (p, pp);
759 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
760 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
761 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
763 /* If we know the alias set for the memory that will be used, use
764 it. If there's no TYPE, then we don't know anything about the
765 alias set for the memory. */
766 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
767 set_mem_align (slot, align);
769 /* If a type is specified, set the relevant flags. */
772 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
773 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
775 MEM_NOTRAP_P (slot) = 1;
780 /* Allocate a temporary stack slot and record it for possible later
781 reuse. First three arguments are same as in preceding function. */
784 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
786 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
789 /* Assign a temporary.
790 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
791 and so that should be used in error messages. In either case, we
792 allocate of the given type.
793 KEEP is as for assign_stack_temp.
794 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
795 it is 0 if a register is OK.
796 DONT_PROMOTE is 1 if we should not promote values in register
800 assign_temp (tree type_or_decl, int keep, int memory_required,
801 int dont_promote ATTRIBUTE_UNUSED)
804 enum machine_mode mode;
809 if (DECL_P (type_or_decl))
810 decl = type_or_decl, type = TREE_TYPE (decl);
812 decl = NULL, type = type_or_decl;
814 mode = TYPE_MODE (type);
816 unsignedp = TYPE_UNSIGNED (type);
819 if (mode == BLKmode || memory_required)
821 HOST_WIDE_INT size = int_size_in_bytes (type);
825 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
826 problems with allocating the stack space. */
830 /* Unfortunately, we don't yet know how to allocate variable-sized
831 temporaries. However, sometimes we have a fixed upper limit on
832 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
833 instead. This is the case for Chill variable-sized strings. */
834 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
835 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
836 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
837 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
839 /* If we still haven't been able to get a size, see if the language
840 can compute a maximum size. */
842 && (size_tree = lang_hooks.types.max_size (type)) != 0
843 && host_integerp (size_tree, 1))
844 size = tree_low_cst (size_tree, 1);
846 /* The size of the temporary may be too large to fit into an integer. */
847 /* ??? Not sure this should happen except for user silliness, so limit
848 this to things that aren't compiler-generated temporaries. The
849 rest of the time we'll die in assign_stack_temp_for_type. */
850 if (decl && size == -1
851 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
853 error ("size of variable %q+D is too large", decl);
857 tmp = assign_stack_temp_for_type (mode, size, keep, type);
863 mode = promote_mode (type, mode, &unsignedp, 0);
866 return gen_reg_rtx (mode);
869 /* Combine temporary stack slots which are adjacent on the stack.
871 This allows for better use of already allocated stack space. This is only
872 done for BLKmode slots because we can be sure that we won't have alignment
873 problems in this case. */
876 combine_temp_slots (void)
878 struct temp_slot *p, *q, *next, *next_q;
881 /* We can't combine slots, because the information about which slot
882 is in which alias set will be lost. */
883 if (flag_strict_aliasing)
886 /* If there are a lot of temp slots, don't do anything unless
887 high levels of optimization. */
888 if (! flag_expensive_optimizations)
889 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
890 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
893 for (p = avail_temp_slots; p; p = next)
899 if (GET_MODE (p->slot) != BLKmode)
902 for (q = p->next; q; q = next_q)
908 if (GET_MODE (q->slot) != BLKmode)
911 if (p->base_offset + p->full_size == q->base_offset)
913 /* Q comes after P; combine Q into P. */
915 p->full_size += q->full_size;
918 else if (q->base_offset + q->full_size == p->base_offset)
920 /* P comes after Q; combine P into Q. */
922 q->full_size += p->full_size;
927 cut_slot_from_list (q, &avail_temp_slots);
930 /* Either delete P or advance past it. */
932 cut_slot_from_list (p, &avail_temp_slots);
936 /* Find the temp slot corresponding to the object at address X. */
938 static struct temp_slot *
939 find_temp_slot_from_address (rtx x)
945 for (i = max_slot_level (); i >= 0; i--)
946 for (p = *temp_slots_at_level (i); p; p = p->next)
948 if (XEXP (p->slot, 0) == x
950 || (GET_CODE (x) == PLUS
951 && XEXP (x, 0) == virtual_stack_vars_rtx
952 && GET_CODE (XEXP (x, 1)) == CONST_INT
953 && INTVAL (XEXP (x, 1)) >= p->base_offset
954 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
957 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
958 for (next = p->address; next; next = XEXP (next, 1))
959 if (XEXP (next, 0) == x)
963 /* If we have a sum involving a register, see if it points to a temp
965 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
966 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
968 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
969 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
975 /* Indicate that NEW is an alternate way of referring to the temp slot
976 that previously was known by OLD. */
979 update_temp_slot_address (rtx old, rtx new)
983 if (rtx_equal_p (old, new))
986 p = find_temp_slot_from_address (old);
988 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
989 is a register, see if one operand of the PLUS is a temporary
990 location. If so, NEW points into it. Otherwise, if both OLD and
991 NEW are a PLUS and if there is a register in common between them.
992 If so, try a recursive call on those values. */
995 if (GET_CODE (old) != PLUS)
1000 update_temp_slot_address (XEXP (old, 0), new);
1001 update_temp_slot_address (XEXP (old, 1), new);
1004 else if (GET_CODE (new) != PLUS)
1007 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1008 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1009 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1010 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1011 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1012 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1013 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1014 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1019 /* Otherwise add an alias for the temp's address. */
1020 else if (p->address == 0)
1024 if (GET_CODE (p->address) != EXPR_LIST)
1025 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1027 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1031 /* If X could be a reference to a temporary slot, mark the fact that its
1032 address was taken. */
1035 mark_temp_addr_taken (rtx x)
1037 struct temp_slot *p;
1042 /* If X is not in memory or is at a constant address, it cannot be in
1043 a temporary slot. */
1044 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1047 p = find_temp_slot_from_address (XEXP (x, 0));
1052 /* If X could be a reference to a temporary slot, mark that slot as
1053 belonging to the to one level higher than the current level. If X
1054 matched one of our slots, just mark that one. Otherwise, we can't
1055 easily predict which it is, so upgrade all of them. Kept slots
1056 need not be touched.
1058 This is called when an ({...}) construct occurs and a statement
1059 returns a value in memory. */
1062 preserve_temp_slots (rtx x)
1064 struct temp_slot *p = 0, *next;
1066 /* If there is no result, we still might have some objects whose address
1067 were taken, so we need to make sure they stay around. */
1070 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1075 move_slot_to_level (p, temp_slot_level - 1);
1081 /* If X is a register that is being used as a pointer, see if we have
1082 a temporary slot we know it points to. To be consistent with
1083 the code below, we really should preserve all non-kept slots
1084 if we can't find a match, but that seems to be much too costly. */
1085 if (REG_P (x) && REG_POINTER (x))
1086 p = find_temp_slot_from_address (x);
1088 /* If X is not in memory or is at a constant address, it cannot be in
1089 a temporary slot, but it can contain something whose address was
1091 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1093 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1098 move_slot_to_level (p, temp_slot_level - 1);
1104 /* First see if we can find a match. */
1106 p = find_temp_slot_from_address (XEXP (x, 0));
1110 /* Move everything at our level whose address was taken to our new
1111 level in case we used its address. */
1112 struct temp_slot *q;
1114 if (p->level == temp_slot_level)
1116 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1120 if (p != q && q->addr_taken)
1121 move_slot_to_level (q, temp_slot_level - 1);
1124 move_slot_to_level (p, temp_slot_level - 1);
1130 /* Otherwise, preserve all non-kept slots at this level. */
1131 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1136 move_slot_to_level (p, temp_slot_level - 1);
1140 /* Free all temporaries used so far. This is normally called at the
1141 end of generating code for a statement. */
1144 free_temp_slots (void)
1146 struct temp_slot *p, *next;
1148 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1153 make_slot_available (p);
1156 combine_temp_slots ();
1159 /* Push deeper into the nesting level for stack temporaries. */
1162 push_temp_slots (void)
1167 /* Pop a temporary nesting level. All slots in use in the current level
1171 pop_temp_slots (void)
1173 struct temp_slot *p, *next;
1175 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1178 make_slot_available (p);
1181 combine_temp_slots ();
1186 /* Initialize temporary slots. */
1189 init_temp_slots (void)
1191 /* We have not allocated any temporaries yet. */
1192 avail_temp_slots = 0;
1193 used_temp_slots = 0;
1194 temp_slot_level = 0;
1197 /* These routines are responsible for converting virtual register references
1198 to the actual hard register references once RTL generation is complete.
1200 The following four variables are used for communication between the
1201 routines. They contain the offsets of the virtual registers from their
1202 respective hard registers. */
1204 static int in_arg_offset;
1205 static int var_offset;
1206 static int dynamic_offset;
1207 static int out_arg_offset;
1208 static int cfa_offset;
1210 /* In most machines, the stack pointer register is equivalent to the bottom
1213 #ifndef STACK_POINTER_OFFSET
1214 #define STACK_POINTER_OFFSET 0
1217 /* If not defined, pick an appropriate default for the offset of dynamically
1218 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1219 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1221 #ifndef STACK_DYNAMIC_OFFSET
1223 /* The bottom of the stack points to the actual arguments. If
1224 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1225 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1226 stack space for register parameters is not pushed by the caller, but
1227 rather part of the fixed stack areas and hence not included in
1228 `current_function_outgoing_args_size'. Nevertheless, we must allow
1229 for it when allocating stack dynamic objects. */
1231 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1232 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1233 ((ACCUMULATE_OUTGOING_ARGS \
1234 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1235 + (STACK_POINTER_OFFSET)) \
1238 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1239 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1240 + (STACK_POINTER_OFFSET))
1245 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1246 is a virtual register, return the equivalent hard register and set the
1247 offset indirectly through the pointer. Otherwise, return 0. */
1250 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1253 HOST_WIDE_INT offset;
1255 if (x == virtual_incoming_args_rtx)
1256 new = arg_pointer_rtx, offset = in_arg_offset;
1257 else if (x == virtual_stack_vars_rtx)
1258 new = frame_pointer_rtx, offset = var_offset;
1259 else if (x == virtual_stack_dynamic_rtx)
1260 new = stack_pointer_rtx, offset = dynamic_offset;
1261 else if (x == virtual_outgoing_args_rtx)
1262 new = stack_pointer_rtx, offset = out_arg_offset;
1263 else if (x == virtual_cfa_rtx)
1265 #ifdef FRAME_POINTER_CFA_OFFSET
1266 new = frame_pointer_rtx;
1268 new = arg_pointer_rtx;
1270 offset = cfa_offset;
1279 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1280 Instantiate any virtual registers present inside of *LOC. The expression
1281 is simplified, as much as possible, but is not to be considered "valid"
1282 in any sense implied by the target. If any change is made, set CHANGED
1286 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1288 HOST_WIDE_INT offset;
1289 bool *changed = (bool *) data;
1296 switch (GET_CODE (x))
1299 new = instantiate_new_reg (x, &offset);
1302 *loc = plus_constant (new, offset);
1309 new = instantiate_new_reg (XEXP (x, 0), &offset);
1312 new = plus_constant (new, offset);
1313 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1319 /* FIXME -- from old code */
1320 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1321 we can commute the PLUS and SUBREG because pointers into the
1322 frame are well-behaved. */
1332 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1333 matches the predicate for insn CODE operand OPERAND. */
1336 safe_insn_predicate (int code, int operand, rtx x)
1338 const struct insn_operand_data *op_data;
1343 op_data = &insn_data[code].operand[operand];
1344 if (op_data->predicate == NULL)
1347 return op_data->predicate (x, op_data->mode);
1350 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1351 registers present inside of insn. The result will be a valid insn. */
1354 instantiate_virtual_regs_in_insn (rtx insn)
1356 HOST_WIDE_INT offset;
1358 bool any_change = false;
1359 rtx set, new, x, seq;
1361 /* There are some special cases to be handled first. */
1362 set = single_set (insn);
1365 /* We're allowed to assign to a virtual register. This is interpreted
1366 to mean that the underlying register gets assigned the inverse
1367 transformation. This is used, for example, in the handling of
1369 new = instantiate_new_reg (SET_DEST (set), &offset);
1374 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1375 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1377 x = force_operand (x, new);
1379 emit_move_insn (new, x);
1384 emit_insn_before (seq, insn);
1389 /* Handle a straight copy from a virtual register by generating a
1390 new add insn. The difference between this and falling through
1391 to the generic case is avoiding a new pseudo and eliminating a
1392 move insn in the initial rtl stream. */
1393 new = instantiate_new_reg (SET_SRC (set), &offset);
1394 if (new && offset != 0
1395 && REG_P (SET_DEST (set))
1396 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1400 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1401 new, GEN_INT (offset), SET_DEST (set),
1402 1, OPTAB_LIB_WIDEN);
1403 if (x != SET_DEST (set))
1404 emit_move_insn (SET_DEST (set), x);
1409 emit_insn_before (seq, insn);
1414 extract_insn (insn);
1415 insn_code = INSN_CODE (insn);
1417 /* Handle a plus involving a virtual register by determining if the
1418 operands remain valid if they're modified in place. */
1419 if (GET_CODE (SET_SRC (set)) == PLUS
1420 && recog_data.n_operands >= 3
1421 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1422 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1423 && GET_CODE (recog_data.operand[2]) == CONST_INT
1424 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1426 offset += INTVAL (recog_data.operand[2]);
1428 /* If the sum is zero, then replace with a plain move. */
1430 && REG_P (SET_DEST (set))
1431 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1434 emit_move_insn (SET_DEST (set), new);
1438 emit_insn_before (seq, insn);
1443 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1445 /* Using validate_change and apply_change_group here leaves
1446 recog_data in an invalid state. Since we know exactly what
1447 we want to check, do those two by hand. */
1448 if (safe_insn_predicate (insn_code, 1, new)
1449 && safe_insn_predicate (insn_code, 2, x))
1451 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1452 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1455 /* Fall through into the regular operand fixup loop in
1456 order to take care of operands other than 1 and 2. */
1462 extract_insn (insn);
1463 insn_code = INSN_CODE (insn);
1466 /* In the general case, we expect virtual registers to appear only in
1467 operands, and then only as either bare registers or inside memories. */
1468 for (i = 0; i < recog_data.n_operands; ++i)
1470 x = recog_data.operand[i];
1471 switch (GET_CODE (x))
1475 rtx addr = XEXP (x, 0);
1476 bool changed = false;
1478 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1483 x = replace_equiv_address (x, addr);
1487 emit_insn_before (seq, insn);
1492 new = instantiate_new_reg (x, &offset);
1501 /* Careful, special mode predicates may have stuff in
1502 insn_data[insn_code].operand[i].mode that isn't useful
1503 to us for computing a new value. */
1504 /* ??? Recognize address_operand and/or "p" constraints
1505 to see if (plus new offset) is a valid before we put
1506 this through expand_simple_binop. */
1507 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1508 GEN_INT (offset), NULL_RTX,
1509 1, OPTAB_LIB_WIDEN);
1512 emit_insn_before (seq, insn);
1517 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1523 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1524 GEN_INT (offset), NULL_RTX,
1525 1, OPTAB_LIB_WIDEN);
1528 emit_insn_before (seq, insn);
1530 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1531 GET_MODE (new), SUBREG_BYTE (x));
1538 /* At this point, X contains the new value for the operand.
1539 Validate the new value vs the insn predicate. Note that
1540 asm insns will have insn_code -1 here. */
1541 if (!safe_insn_predicate (insn_code, i, x))
1542 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1544 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1550 /* Propagate operand changes into the duplicates. */
1551 for (i = 0; i < recog_data.n_dups; ++i)
1552 *recog_data.dup_loc[i]
1553 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1555 /* Force re-recognition of the instruction for validation. */
1556 INSN_CODE (insn) = -1;
1559 if (asm_noperands (PATTERN (insn)) >= 0)
1561 if (!check_asm_operands (PATTERN (insn)))
1563 error_for_asm (insn, "impossible constraint in %<asm%>");
1569 if (recog_memoized (insn) < 0)
1570 fatal_insn_not_found (insn);
1574 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1575 do any instantiation required. */
1578 instantiate_decl (rtx x)
1585 /* If this is a CONCAT, recurse for the pieces. */
1586 if (GET_CODE (x) == CONCAT)
1588 instantiate_decl (XEXP (x, 0));
1589 instantiate_decl (XEXP (x, 1));
1593 /* If this is not a MEM, no need to do anything. Similarly if the
1594 address is a constant or a register that is not a virtual register. */
1599 if (CONSTANT_P (addr)
1601 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1602 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1605 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1608 /* Helper for instantiate_decls called via walk_tree: Process all decls
1609 in the given DECL_VALUE_EXPR. */
1612 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1618 if (DECL_P (t) && DECL_RTL_SET_P (t))
1619 instantiate_decl (DECL_RTL (t));
1624 /* Subroutine of instantiate_decls: Process all decls in the given
1625 BLOCK node and all its subblocks. */
1628 instantiate_decls_1 (tree let)
1632 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1634 if (DECL_RTL_SET_P (t))
1635 instantiate_decl (DECL_RTL (t));
1636 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1638 tree v = DECL_VALUE_EXPR (t);
1639 walk_tree (&v, instantiate_expr, NULL, NULL);
1643 /* Process all subblocks. */
1644 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1645 instantiate_decls_1 (t);
1648 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1649 all virtual registers in their DECL_RTL's. */
1652 instantiate_decls (tree fndecl)
1656 /* Process all parameters of the function. */
1657 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1659 instantiate_decl (DECL_RTL (decl));
1660 instantiate_decl (DECL_INCOMING_RTL (decl));
1661 if (DECL_HAS_VALUE_EXPR_P (decl))
1663 tree v = DECL_VALUE_EXPR (decl);
1664 walk_tree (&v, instantiate_expr, NULL, NULL);
1668 /* Now process all variables defined in the function or its subblocks. */
1669 instantiate_decls_1 (DECL_INITIAL (fndecl));
1672 /* Pass through the INSNS of function FNDECL and convert virtual register
1673 references to hard register references. */
1676 instantiate_virtual_regs (void)
1680 /* Compute the offsets to use for this function. */
1681 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1682 var_offset = STARTING_FRAME_OFFSET;
1683 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1684 out_arg_offset = STACK_POINTER_OFFSET;
1685 #ifdef FRAME_POINTER_CFA_OFFSET
1686 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1688 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1691 /* Initialize recognition, indicating that volatile is OK. */
1694 /* Scan through all the insns, instantiating every virtual register still
1696 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1699 /* These patterns in the instruction stream can never be recognized.
1700 Fortunately, they shouldn't contain virtual registers either. */
1701 if (GET_CODE (PATTERN (insn)) == USE
1702 || GET_CODE (PATTERN (insn)) == CLOBBER
1703 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1704 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1705 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1708 instantiate_virtual_regs_in_insn (insn);
1710 if (INSN_DELETED_P (insn))
1713 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1715 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1716 if (GET_CODE (insn) == CALL_INSN)
1717 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1718 instantiate_virtual_regs_in_rtx, NULL);
1721 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1722 instantiate_decls (current_function_decl);
1724 /* Indicate that, from now on, assign_stack_local should use
1725 frame_pointer_rtx. */
1726 virtuals_instantiated = 1;
1730 struct tree_opt_pass pass_instantiate_virtual_regs =
1734 instantiate_virtual_regs, /* execute */
1737 0, /* static_pass_number */
1739 0, /* properties_required */
1740 0, /* properties_provided */
1741 0, /* properties_destroyed */
1742 0, /* todo_flags_start */
1743 TODO_dump_func, /* todo_flags_finish */
1748 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1749 This means a type for which function calls must pass an address to the
1750 function or get an address back from the function.
1751 EXP may be a type node or an expression (whose type is tested). */
1754 aggregate_value_p (tree exp, tree fntype)
1756 int i, regno, nregs;
1759 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1762 switch (TREE_CODE (fntype))
1765 fntype = get_callee_fndecl (fntype);
1766 fntype = fntype ? TREE_TYPE (fntype) : 0;
1769 fntype = TREE_TYPE (fntype);
1774 case IDENTIFIER_NODE:
1778 /* We don't expect other rtl types here. */
1782 if (TREE_CODE (type) == VOID_TYPE)
1784 /* If the front end has decided that this needs to be passed by
1785 reference, do so. */
1786 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1787 && DECL_BY_REFERENCE (exp))
1789 if (targetm.calls.return_in_memory (type, fntype))
1791 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1792 and thus can't be returned in registers. */
1793 if (TREE_ADDRESSABLE (type))
1795 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1797 /* Make sure we have suitable call-clobbered regs to return
1798 the value in; if not, we must return it in memory. */
1799 reg = hard_function_value (type, 0, fntype, 0);
1801 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1806 regno = REGNO (reg);
1807 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1808 for (i = 0; i < nregs; i++)
1809 if (! call_used_regs[regno + i])
1814 /* Return true if we should assign DECL a pseudo register; false if it
1815 should live on the local stack. */
1818 use_register_for_decl (tree decl)
1820 /* Honor volatile. */
1821 if (TREE_SIDE_EFFECTS (decl))
1824 /* Honor addressability. */
1825 if (TREE_ADDRESSABLE (decl))
1828 /* Only register-like things go in registers. */
1829 if (DECL_MODE (decl) == BLKmode)
1832 /* If -ffloat-store specified, don't put explicit float variables
1834 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1835 propagates values across these stores, and it probably shouldn't. */
1836 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1839 /* If we're not interested in tracking debugging information for
1840 this decl, then we can certainly put it in a register. */
1841 if (DECL_IGNORED_P (decl))
1844 return (optimize || DECL_REGISTER (decl));
1847 /* Return true if TYPE should be passed by invisible reference. */
1850 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1851 tree type, bool named_arg)
1855 /* If this type contains non-trivial constructors, then it is
1856 forbidden for the middle-end to create any new copies. */
1857 if (TREE_ADDRESSABLE (type))
1860 /* GCC post 3.4 passes *all* variable sized types by reference. */
1861 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1865 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1868 /* Return true if TYPE, which is passed by reference, should be callee
1869 copied instead of caller copied. */
1872 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1873 tree type, bool named_arg)
1875 if (type && TREE_ADDRESSABLE (type))
1877 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1880 /* Structures to communicate between the subroutines of assign_parms.
1881 The first holds data persistent across all parameters, the second
1882 is cleared out for each parameter. */
1884 struct assign_parm_data_all
1886 CUMULATIVE_ARGS args_so_far;
1887 struct args_size stack_args_size;
1888 tree function_result_decl;
1890 rtx conversion_insns;
1891 HOST_WIDE_INT pretend_args_size;
1892 HOST_WIDE_INT extra_pretend_bytes;
1893 int reg_parm_stack_space;
1896 struct assign_parm_data_one
1902 enum machine_mode nominal_mode;
1903 enum machine_mode passed_mode;
1904 enum machine_mode promoted_mode;
1905 struct locate_and_pad_arg_data locate;
1907 BOOL_BITFIELD named_arg : 1;
1908 BOOL_BITFIELD passed_pointer : 1;
1909 BOOL_BITFIELD on_stack : 1;
1910 BOOL_BITFIELD loaded_in_reg : 1;
1913 /* A subroutine of assign_parms. Initialize ALL. */
1916 assign_parms_initialize_all (struct assign_parm_data_all *all)
1920 memset (all, 0, sizeof (*all));
1922 fntype = TREE_TYPE (current_function_decl);
1924 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1925 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1927 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1928 current_function_decl, -1);
1931 #ifdef REG_PARM_STACK_SPACE
1932 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1936 /* If ARGS contains entries with complex types, split the entry into two
1937 entries of the component type. Return a new list of substitutions are
1938 needed, else the old list. */
1941 split_complex_args (tree args)
1945 /* Before allocating memory, check for the common case of no complex. */
1946 for (p = args; p; p = TREE_CHAIN (p))
1948 tree type = TREE_TYPE (p);
1949 if (TREE_CODE (type) == COMPLEX_TYPE
1950 && targetm.calls.split_complex_arg (type))
1956 args = copy_list (args);
1958 for (p = args; p; p = TREE_CHAIN (p))
1960 tree type = TREE_TYPE (p);
1961 if (TREE_CODE (type) == COMPLEX_TYPE
1962 && targetm.calls.split_complex_arg (type))
1965 tree subtype = TREE_TYPE (type);
1966 bool addressable = TREE_ADDRESSABLE (p);
1968 /* Rewrite the PARM_DECL's type with its component. */
1969 TREE_TYPE (p) = subtype;
1970 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1971 DECL_MODE (p) = VOIDmode;
1972 DECL_SIZE (p) = NULL;
1973 DECL_SIZE_UNIT (p) = NULL;
1974 /* If this arg must go in memory, put it in a pseudo here.
1975 We can't allow it to go in memory as per normal parms,
1976 because the usual place might not have the imag part
1977 adjacent to the real part. */
1978 DECL_ARTIFICIAL (p) = addressable;
1979 DECL_IGNORED_P (p) = addressable;
1980 TREE_ADDRESSABLE (p) = 0;
1983 /* Build a second synthetic decl. */
1984 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1985 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1986 DECL_ARTIFICIAL (decl) = addressable;
1987 DECL_IGNORED_P (decl) = addressable;
1988 layout_decl (decl, 0);
1990 /* Splice it in; skip the new decl. */
1991 TREE_CHAIN (decl) = TREE_CHAIN (p);
1992 TREE_CHAIN (p) = decl;
2000 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2001 the hidden struct return argument, and (abi willing) complex args.
2002 Return the new parameter list. */
2005 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2007 tree fndecl = current_function_decl;
2008 tree fntype = TREE_TYPE (fndecl);
2009 tree fnargs = DECL_ARGUMENTS (fndecl);
2011 /* If struct value address is treated as the first argument, make it so. */
2012 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2013 && ! current_function_returns_pcc_struct
2014 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2016 tree type = build_pointer_type (TREE_TYPE (fntype));
2019 decl = build_decl (PARM_DECL, NULL_TREE, type);
2020 DECL_ARG_TYPE (decl) = type;
2021 DECL_ARTIFICIAL (decl) = 1;
2022 DECL_IGNORED_P (decl) = 1;
2024 TREE_CHAIN (decl) = fnargs;
2026 all->function_result_decl = decl;
2029 all->orig_fnargs = fnargs;
2031 /* If the target wants to split complex arguments into scalars, do so. */
2032 if (targetm.calls.split_complex_arg)
2033 fnargs = split_complex_args (fnargs);
2038 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2039 data for the parameter. Incorporate ABI specifics such as pass-by-
2040 reference and type promotion. */
2043 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2044 struct assign_parm_data_one *data)
2046 tree nominal_type, passed_type;
2047 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2049 memset (data, 0, sizeof (*data));
2051 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2052 if (!current_function_stdarg)
2053 data->named_arg = 1; /* No varadic parms. */
2054 else if (TREE_CHAIN (parm))
2055 data->named_arg = 1; /* Not the last non-varadic parm. */
2056 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2057 data->named_arg = 1; /* Only varadic ones are unnamed. */
2059 data->named_arg = 0; /* Treat as varadic. */
2061 nominal_type = TREE_TYPE (parm);
2062 passed_type = DECL_ARG_TYPE (parm);
2064 /* Look out for errors propagating this far. Also, if the parameter's
2065 type is void then its value doesn't matter. */
2066 if (TREE_TYPE (parm) == error_mark_node
2067 /* This can happen after weird syntax errors
2068 or if an enum type is defined among the parms. */
2069 || TREE_CODE (parm) != PARM_DECL
2070 || passed_type == NULL
2071 || VOID_TYPE_P (nominal_type))
2073 nominal_type = passed_type = void_type_node;
2074 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2078 /* Find mode of arg as it is passed, and mode of arg as it should be
2079 during execution of this function. */
2080 passed_mode = TYPE_MODE (passed_type);
2081 nominal_mode = TYPE_MODE (nominal_type);
2083 /* If the parm is to be passed as a transparent union, use the type of
2084 the first field for the tests below. We have already verified that
2085 the modes are the same. */
2086 if (TREE_CODE (passed_type) == UNION_TYPE
2087 && TYPE_TRANSPARENT_UNION (passed_type))
2088 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2090 /* See if this arg was passed by invisible reference. */
2091 if (pass_by_reference (&all->args_so_far, passed_mode,
2092 passed_type, data->named_arg))
2094 passed_type = nominal_type = build_pointer_type (passed_type);
2095 data->passed_pointer = true;
2096 passed_mode = nominal_mode = Pmode;
2099 /* Find mode as it is passed by the ABI. */
2100 promoted_mode = passed_mode;
2101 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2103 int unsignedp = TYPE_UNSIGNED (passed_type);
2104 promoted_mode = promote_mode (passed_type, promoted_mode,
2109 data->nominal_type = nominal_type;
2110 data->passed_type = passed_type;
2111 data->nominal_mode = nominal_mode;
2112 data->passed_mode = passed_mode;
2113 data->promoted_mode = promoted_mode;
2116 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2119 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2120 struct assign_parm_data_one *data, bool no_rtl)
2122 int varargs_pretend_bytes = 0;
2124 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2125 data->promoted_mode,
2127 &varargs_pretend_bytes, no_rtl);
2129 /* If the back-end has requested extra stack space, record how much is
2130 needed. Do not change pretend_args_size otherwise since it may be
2131 nonzero from an earlier partial argument. */
2132 if (varargs_pretend_bytes > 0)
2133 all->pretend_args_size = varargs_pretend_bytes;
2136 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2137 the incoming location of the current parameter. */
2140 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2141 struct assign_parm_data_one *data)
2143 HOST_WIDE_INT pretend_bytes = 0;
2147 if (data->promoted_mode == VOIDmode)
2149 data->entry_parm = data->stack_parm = const0_rtx;
2153 #ifdef FUNCTION_INCOMING_ARG
2154 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2155 data->passed_type, data->named_arg);
2157 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2158 data->passed_type, data->named_arg);
2161 if (entry_parm == 0)
2162 data->promoted_mode = data->passed_mode;
2164 /* Determine parm's home in the stack, in case it arrives in the stack
2165 or we should pretend it did. Compute the stack position and rtx where
2166 the argument arrives and its size.
2168 There is one complexity here: If this was a parameter that would
2169 have been passed in registers, but wasn't only because it is
2170 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2171 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2172 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2173 as it was the previous time. */
2174 in_regs = entry_parm != 0;
2175 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2178 if (!in_regs && !data->named_arg)
2180 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2183 #ifdef FUNCTION_INCOMING_ARG
2184 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2185 data->passed_type, true);
2187 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2188 data->passed_type, true);
2190 in_regs = tem != NULL;
2194 /* If this parameter was passed both in registers and in the stack, use
2195 the copy on the stack. */
2196 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2204 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2205 data->promoted_mode,
2208 data->partial = partial;
2210 /* The caller might already have allocated stack space for the
2211 register parameters. */
2212 if (partial != 0 && all->reg_parm_stack_space == 0)
2214 /* Part of this argument is passed in registers and part
2215 is passed on the stack. Ask the prologue code to extend
2216 the stack part so that we can recreate the full value.
2218 PRETEND_BYTES is the size of the registers we need to store.
2219 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2220 stack space that the prologue should allocate.
2222 Internally, gcc assumes that the argument pointer is aligned
2223 to STACK_BOUNDARY bits. This is used both for alignment
2224 optimizations (see init_emit) and to locate arguments that are
2225 aligned to more than PARM_BOUNDARY bits. We must preserve this
2226 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2227 a stack boundary. */
2229 /* We assume at most one partial arg, and it must be the first
2230 argument on the stack. */
2231 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2233 pretend_bytes = partial;
2234 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2236 /* We want to align relative to the actual stack pointer, so
2237 don't include this in the stack size until later. */
2238 all->extra_pretend_bytes = all->pretend_args_size;
2242 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2243 entry_parm ? data->partial : 0, current_function_decl,
2244 &all->stack_args_size, &data->locate);
2246 /* Adjust offsets to include the pretend args. */
2247 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2248 data->locate.slot_offset.constant += pretend_bytes;
2249 data->locate.offset.constant += pretend_bytes;
2251 data->entry_parm = entry_parm;
2254 /* A subroutine of assign_parms. If there is actually space on the stack
2255 for this parm, count it in stack_args_size and return true. */
2258 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2259 struct assign_parm_data_one *data)
2261 /* Trivially true if we've no incoming register. */
2262 if (data->entry_parm == NULL)
2264 /* Also true if we're partially in registers and partially not,
2265 since we've arranged to drop the entire argument on the stack. */
2266 else if (data->partial != 0)
2268 /* Also true if the target says that it's passed in both registers
2269 and on the stack. */
2270 else if (GET_CODE (data->entry_parm) == PARALLEL
2271 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2273 /* Also true if the target says that there's stack allocated for
2274 all register parameters. */
2275 else if (all->reg_parm_stack_space > 0)
2277 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2281 all->stack_args_size.constant += data->locate.size.constant;
2282 if (data->locate.size.var)
2283 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2288 /* A subroutine of assign_parms. Given that this parameter is allocated
2289 stack space by the ABI, find it. */
2292 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2294 rtx offset_rtx, stack_parm;
2295 unsigned int align, boundary;
2297 /* If we're passing this arg using a reg, make its stack home the
2298 aligned stack slot. */
2299 if (data->entry_parm)
2300 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2302 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2304 stack_parm = current_function_internal_arg_pointer;
2305 if (offset_rtx != const0_rtx)
2306 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2307 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2309 set_mem_attributes (stack_parm, parm, 1);
2311 boundary = data->locate.boundary;
2312 align = BITS_PER_UNIT;
2314 /* If we're padding upward, we know that the alignment of the slot
2315 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2316 intentionally forcing upward padding. Otherwise we have to come
2317 up with a guess at the alignment based on OFFSET_RTX. */
2318 if (data->locate.where_pad != downward || data->entry_parm)
2320 else if (GET_CODE (offset_rtx) == CONST_INT)
2322 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2323 align = align & -align;
2325 set_mem_align (stack_parm, align);
2327 if (data->entry_parm)
2328 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2330 data->stack_parm = stack_parm;
2333 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2334 always valid and contiguous. */
2337 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2339 rtx entry_parm = data->entry_parm;
2340 rtx stack_parm = data->stack_parm;
2342 /* If this parm was passed part in regs and part in memory, pretend it
2343 arrived entirely in memory by pushing the register-part onto the stack.
2344 In the special case of a DImode or DFmode that is split, we could put
2345 it together in a pseudoreg directly, but for now that's not worth
2347 if (data->partial != 0)
2349 /* Handle calls that pass values in multiple non-contiguous
2350 locations. The Irix 6 ABI has examples of this. */
2351 if (GET_CODE (entry_parm) == PARALLEL)
2352 emit_group_store (validize_mem (stack_parm), entry_parm,
2354 int_size_in_bytes (data->passed_type));
2357 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2358 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2359 data->partial / UNITS_PER_WORD);
2362 entry_parm = stack_parm;
2365 /* If we didn't decide this parm came in a register, by default it came
2367 else if (entry_parm == NULL)
2368 entry_parm = stack_parm;
2370 /* When an argument is passed in multiple locations, we can't make use
2371 of this information, but we can save some copying if the whole argument
2372 is passed in a single register. */
2373 else if (GET_CODE (entry_parm) == PARALLEL
2374 && data->nominal_mode != BLKmode
2375 && data->passed_mode != BLKmode)
2377 size_t i, len = XVECLEN (entry_parm, 0);
2379 for (i = 0; i < len; i++)
2380 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2381 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2382 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2383 == data->passed_mode)
2384 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2386 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2391 data->entry_parm = entry_parm;
2394 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2395 always valid and properly aligned. */
2398 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2400 rtx stack_parm = data->stack_parm;
2402 /* If we can't trust the parm stack slot to be aligned enough for its
2403 ultimate type, don't use that slot after entry. We'll make another
2404 stack slot, if we need one. */
2406 && ((STRICT_ALIGNMENT
2407 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2408 || (data->nominal_type
2409 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2410 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2413 /* If parm was passed in memory, and we need to convert it on entry,
2414 don't store it back in that same slot. */
2415 else if (data->entry_parm == stack_parm
2416 && data->nominal_mode != BLKmode
2417 && data->nominal_mode != data->passed_mode)
2420 /* If stack protection is in effect for this function, don't leave any
2421 pointers in their passed stack slots. */
2422 else if (cfun->stack_protect_guard
2423 && (flag_stack_protect == 2
2424 || data->passed_pointer
2425 || POINTER_TYPE_P (data->nominal_type)))
2428 data->stack_parm = stack_parm;
2431 /* A subroutine of assign_parms. Return true if the current parameter
2432 should be stored as a BLKmode in the current frame. */
2435 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2437 if (data->nominal_mode == BLKmode)
2439 if (GET_CODE (data->entry_parm) == PARALLEL)
2442 #ifdef BLOCK_REG_PADDING
2443 /* Only assign_parm_setup_block knows how to deal with register arguments
2444 that are padded at the least significant end. */
2445 if (REG_P (data->entry_parm)
2446 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2447 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2448 == (BYTES_BIG_ENDIAN ? upward : downward)))
2455 /* A subroutine of assign_parms. Arrange for the parameter to be
2456 present and valid in DATA->STACK_RTL. */
2459 assign_parm_setup_block (struct assign_parm_data_all *all,
2460 tree parm, struct assign_parm_data_one *data)
2462 rtx entry_parm = data->entry_parm;
2463 rtx stack_parm = data->stack_parm;
2465 HOST_WIDE_INT size_stored;
2466 rtx orig_entry_parm = entry_parm;
2468 if (GET_CODE (entry_parm) == PARALLEL)
2469 entry_parm = emit_group_move_into_temps (entry_parm);
2471 /* If we've a non-block object that's nevertheless passed in parts,
2472 reconstitute it in register operations rather than on the stack. */
2473 if (GET_CODE (entry_parm) == PARALLEL
2474 && data->nominal_mode != BLKmode)
2476 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2478 if ((XVECLEN (entry_parm, 0) > 1
2479 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2480 && use_register_for_decl (parm))
2482 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2484 push_to_sequence (all->conversion_insns);
2486 /* For values returned in multiple registers, handle possible
2487 incompatible calls to emit_group_store.
2489 For example, the following would be invalid, and would have to
2490 be fixed by the conditional below:
2492 emit_group_store ((reg:SF), (parallel:DF))
2493 emit_group_store ((reg:SI), (parallel:DI))
2495 An example of this are doubles in e500 v2:
2496 (parallel:DF (expr_list (reg:SI) (const_int 0))
2497 (expr_list (reg:SI) (const_int 4))). */
2498 if (data->nominal_mode != data->passed_mode)
2500 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2501 emit_group_store (t, entry_parm, NULL_TREE,
2502 GET_MODE_SIZE (GET_MODE (entry_parm)));
2503 convert_move (parmreg, t, 0);
2506 emit_group_store (parmreg, entry_parm, data->nominal_type,
2507 int_size_in_bytes (data->nominal_type));
2509 all->conversion_insns = get_insns ();
2512 SET_DECL_RTL (parm, parmreg);
2517 size = int_size_in_bytes (data->passed_type);
2518 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2519 if (stack_parm == 0)
2521 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2522 stack_parm = assign_stack_local (BLKmode, size_stored,
2524 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2525 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2526 set_mem_attributes (stack_parm, parm, 1);
2529 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2530 calls that pass values in multiple non-contiguous locations. */
2531 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2535 /* Note that we will be storing an integral number of words.
2536 So we have to be careful to ensure that we allocate an
2537 integral number of words. We do this above when we call
2538 assign_stack_local if space was not allocated in the argument
2539 list. If it was, this will not work if PARM_BOUNDARY is not
2540 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2541 if it becomes a problem. Exception is when BLKmode arrives
2542 with arguments not conforming to word_mode. */
2544 if (data->stack_parm == 0)
2546 else if (GET_CODE (entry_parm) == PARALLEL)
2549 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2551 mem = validize_mem (stack_parm);
2553 /* Handle values in multiple non-contiguous locations. */
2554 if (GET_CODE (entry_parm) == PARALLEL)
2556 push_to_sequence (all->conversion_insns);
2557 emit_group_store (mem, entry_parm, data->passed_type, size);
2558 all->conversion_insns = get_insns ();
2565 /* If SIZE is that of a mode no bigger than a word, just use
2566 that mode's store operation. */
2567 else if (size <= UNITS_PER_WORD)
2569 enum machine_mode mode
2570 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2573 #ifdef BLOCK_REG_PADDING
2574 && (size == UNITS_PER_WORD
2575 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2576 != (BYTES_BIG_ENDIAN ? upward : downward)))
2580 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2581 emit_move_insn (change_address (mem, mode, 0), reg);
2584 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2585 machine must be aligned to the left before storing
2586 to memory. Note that the previous test doesn't
2587 handle all cases (e.g. SIZE == 3). */
2588 else if (size != UNITS_PER_WORD
2589 #ifdef BLOCK_REG_PADDING
2590 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2598 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2599 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2601 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2602 build_int_cst (NULL_TREE, by),
2604 tem = change_address (mem, word_mode, 0);
2605 emit_move_insn (tem, x);
2608 move_block_from_reg (REGNO (entry_parm), mem,
2609 size_stored / UNITS_PER_WORD);
2612 move_block_from_reg (REGNO (entry_parm), mem,
2613 size_stored / UNITS_PER_WORD);
2615 else if (data->stack_parm == 0)
2617 push_to_sequence (all->conversion_insns);
2618 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2620 all->conversion_insns = get_insns ();
2624 data->stack_parm = stack_parm;
2625 SET_DECL_RTL (parm, stack_parm);
2628 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2629 parameter. Get it there. Perform all ABI specified conversions. */
2632 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2633 struct assign_parm_data_one *data)
2636 enum machine_mode promoted_nominal_mode;
2637 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2638 bool did_conversion = false;
2640 /* Store the parm in a pseudoregister during the function, but we may
2641 need to do it in a wider mode. */
2643 /* This is not really promoting for a call. However we need to be
2644 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2645 promoted_nominal_mode
2646 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2648 parmreg = gen_reg_rtx (promoted_nominal_mode);
2650 if (!DECL_ARTIFICIAL (parm))
2651 mark_user_reg (parmreg);
2653 /* If this was an item that we received a pointer to,
2654 set DECL_RTL appropriately. */
2655 if (data->passed_pointer)
2657 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2658 set_mem_attributes (x, parm, 1);
2659 SET_DECL_RTL (parm, x);
2662 SET_DECL_RTL (parm, parmreg);
2664 /* Copy the value into the register. */
2665 if (data->nominal_mode != data->passed_mode
2666 || promoted_nominal_mode != data->promoted_mode)
2670 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2671 mode, by the caller. We now have to convert it to
2672 NOMINAL_MODE, if different. However, PARMREG may be in
2673 a different mode than NOMINAL_MODE if it is being stored
2676 If ENTRY_PARM is a hard register, it might be in a register
2677 not valid for operating in its mode (e.g., an odd-numbered
2678 register for a DFmode). In that case, moves are the only
2679 thing valid, so we can't do a convert from there. This
2680 occurs when the calling sequence allow such misaligned
2683 In addition, the conversion may involve a call, which could
2684 clobber parameters which haven't been copied to pseudo
2685 registers yet. Therefore, we must first copy the parm to
2686 a pseudo reg here, and save the conversion until after all
2687 parameters have been moved. */
2689 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2691 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2693 push_to_sequence (all->conversion_insns);
2694 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2696 if (GET_CODE (tempreg) == SUBREG
2697 && GET_MODE (tempreg) == data->nominal_mode
2698 && REG_P (SUBREG_REG (tempreg))
2699 && data->nominal_mode == data->passed_mode
2700 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2701 && GET_MODE_SIZE (GET_MODE (tempreg))
2702 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2704 /* The argument is already sign/zero extended, so note it
2706 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2707 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2710 /* TREE_USED gets set erroneously during expand_assignment. */
2711 save_tree_used = TREE_USED (parm);
2712 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2713 TREE_USED (parm) = save_tree_used;
2714 all->conversion_insns = get_insns ();
2717 did_conversion = true;
2720 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2722 /* If we were passed a pointer but the actual value can safely live
2723 in a register, put it in one. */
2724 if (data->passed_pointer
2725 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2726 /* If by-reference argument was promoted, demote it. */
2727 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2728 || use_register_for_decl (parm)))
2730 /* We can't use nominal_mode, because it will have been set to
2731 Pmode above. We must use the actual mode of the parm. */
2732 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2733 mark_user_reg (parmreg);
2735 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2737 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2738 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2740 push_to_sequence (all->conversion_insns);
2741 emit_move_insn (tempreg, DECL_RTL (parm));
2742 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2743 emit_move_insn (parmreg, tempreg);
2744 all->conversion_insns = get_insns ();
2747 did_conversion = true;
2750 emit_move_insn (parmreg, DECL_RTL (parm));
2752 SET_DECL_RTL (parm, parmreg);
2754 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2756 data->stack_parm = NULL;
2759 /* Mark the register as eliminable if we did no conversion and it was
2760 copied from memory at a fixed offset, and the arg pointer was not
2761 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2762 offset formed an invalid address, such memory-equivalences as we
2763 make here would screw up life analysis for it. */
2764 if (data->nominal_mode == data->passed_mode
2766 && data->stack_parm != 0
2767 && MEM_P (data->stack_parm)
2768 && data->locate.offset.var == 0
2769 && reg_mentioned_p (virtual_incoming_args_rtx,
2770 XEXP (data->stack_parm, 0)))
2772 rtx linsn = get_last_insn ();
2775 /* Mark complex types separately. */
2776 if (GET_CODE (parmreg) == CONCAT)
2778 enum machine_mode submode
2779 = GET_MODE_INNER (GET_MODE (parmreg));
2780 int regnor = REGNO (XEXP (parmreg, 0));
2781 int regnoi = REGNO (XEXP (parmreg, 1));
2782 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2783 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2784 GET_MODE_SIZE (submode));
2786 /* Scan backwards for the set of the real and
2788 for (sinsn = linsn; sinsn != 0;
2789 sinsn = prev_nonnote_insn (sinsn))
2791 set = single_set (sinsn);
2795 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2797 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2799 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2801 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2805 else if ((set = single_set (linsn)) != 0
2806 && SET_DEST (set) == parmreg)
2808 = gen_rtx_EXPR_LIST (REG_EQUIV,
2809 data->stack_parm, REG_NOTES (linsn));
2812 /* For pointer data type, suggest pointer register. */
2813 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2814 mark_reg_pointer (parmreg,
2815 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2818 /* A subroutine of assign_parms. Allocate stack space to hold the current
2819 parameter. Get it there. Perform all ABI specified conversions. */
2822 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2823 struct assign_parm_data_one *data)
2825 /* Value must be stored in the stack slot STACK_PARM during function
2827 bool to_conversion = false;
2829 if (data->promoted_mode != data->nominal_mode)
2831 /* Conversion is required. */
2832 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2834 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2836 push_to_sequence (all->conversion_insns);
2837 to_conversion = true;
2839 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2840 TYPE_UNSIGNED (TREE_TYPE (parm)));
2842 if (data->stack_parm)
2843 /* ??? This may need a big-endian conversion on sparc64. */
2845 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2848 if (data->entry_parm != data->stack_parm)
2852 if (data->stack_parm == 0)
2855 = assign_stack_local (GET_MODE (data->entry_parm),
2856 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2857 TYPE_ALIGN (data->passed_type));
2858 set_mem_attributes (data->stack_parm, parm, 1);
2861 dest = validize_mem (data->stack_parm);
2862 src = validize_mem (data->entry_parm);
2866 /* Use a block move to handle potentially misaligned entry_parm. */
2868 push_to_sequence (all->conversion_insns);
2869 to_conversion = true;
2871 emit_block_move (dest, src,
2872 GEN_INT (int_size_in_bytes (data->passed_type)),
2876 emit_move_insn (dest, src);
2881 all->conversion_insns = get_insns ();
2885 SET_DECL_RTL (parm, data->stack_parm);
2888 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2889 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2892 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2895 tree orig_fnargs = all->orig_fnargs;
2897 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2899 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2900 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2902 rtx tmp, real, imag;
2903 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2905 real = DECL_RTL (fnargs);
2906 imag = DECL_RTL (TREE_CHAIN (fnargs));
2907 if (inner != GET_MODE (real))
2909 real = gen_lowpart_SUBREG (inner, real);
2910 imag = gen_lowpart_SUBREG (inner, imag);
2913 if (TREE_ADDRESSABLE (parm))
2916 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2918 /* split_complex_arg put the real and imag parts in
2919 pseudos. Move them to memory. */
2920 tmp = assign_stack_local (DECL_MODE (parm), size,
2921 TYPE_ALIGN (TREE_TYPE (parm)));
2922 set_mem_attributes (tmp, parm, 1);
2923 rmem = adjust_address_nv (tmp, inner, 0);
2924 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2925 push_to_sequence (all->conversion_insns);
2926 emit_move_insn (rmem, real);
2927 emit_move_insn (imem, imag);
2928 all->conversion_insns = get_insns ();
2932 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2933 SET_DECL_RTL (parm, tmp);
2935 real = DECL_INCOMING_RTL (fnargs);
2936 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2937 if (inner != GET_MODE (real))
2939 real = gen_lowpart_SUBREG (inner, real);
2940 imag = gen_lowpart_SUBREG (inner, imag);
2942 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2943 set_decl_incoming_rtl (parm, tmp);
2944 fnargs = TREE_CHAIN (fnargs);
2948 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2949 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2951 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2952 instead of the copy of decl, i.e. FNARGS. */
2953 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2954 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2957 fnargs = TREE_CHAIN (fnargs);
2961 /* Assign RTL expressions to the function's parameters. This may involve
2962 copying them into registers and using those registers as the DECL_RTL. */
2965 assign_parms (tree fndecl)
2967 struct assign_parm_data_all all;
2970 current_function_internal_arg_pointer
2971 = targetm.calls.internal_arg_pointer ();
2973 assign_parms_initialize_all (&all);
2974 fnargs = assign_parms_augmented_arg_list (&all);
2976 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2978 struct assign_parm_data_one data;
2980 /* Extract the type of PARM; adjust it according to ABI. */
2981 assign_parm_find_data_types (&all, parm, &data);
2983 /* Early out for errors and void parameters. */
2984 if (data.passed_mode == VOIDmode)
2986 SET_DECL_RTL (parm, const0_rtx);
2987 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2991 if (current_function_stdarg && !TREE_CHAIN (parm))
2992 assign_parms_setup_varargs (&all, &data, false);
2994 /* Find out where the parameter arrives in this function. */
2995 assign_parm_find_entry_rtl (&all, &data);
2997 /* Find out where stack space for this parameter might be. */
2998 if (assign_parm_is_stack_parm (&all, &data))
3000 assign_parm_find_stack_rtl (parm, &data);
3001 assign_parm_adjust_entry_rtl (&data);
3004 /* Record permanently how this parm was passed. */
3005 set_decl_incoming_rtl (parm, data.entry_parm);
3007 /* Update info on where next arg arrives in registers. */
3008 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3009 data.passed_type, data.named_arg);
3011 assign_parm_adjust_stack_rtl (&data);
3013 if (assign_parm_setup_block_p (&data))
3014 assign_parm_setup_block (&all, parm, &data);
3015 else if (data.passed_pointer || use_register_for_decl (parm))
3016 assign_parm_setup_reg (&all, parm, &data);
3018 assign_parm_setup_stack (&all, parm, &data);
3021 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3022 assign_parms_unsplit_complex (&all, fnargs);
3024 /* Output all parameter conversion instructions (possibly including calls)
3025 now that all parameters have been copied out of hard registers. */
3026 emit_insn (all.conversion_insns);
3028 /* If we are receiving a struct value address as the first argument, set up
3029 the RTL for the function result. As this might require code to convert
3030 the transmitted address to Pmode, we do this here to ensure that possible
3031 preliminary conversions of the address have been emitted already. */
3032 if (all.function_result_decl)
3034 tree result = DECL_RESULT (current_function_decl);
3035 rtx addr = DECL_RTL (all.function_result_decl);
3038 if (DECL_BY_REFERENCE (result))
3042 addr = convert_memory_address (Pmode, addr);
3043 x = gen_rtx_MEM (DECL_MODE (result), addr);
3044 set_mem_attributes (x, result, 1);
3046 SET_DECL_RTL (result, x);
3049 /* We have aligned all the args, so add space for the pretend args. */
3050 current_function_pretend_args_size = all.pretend_args_size;
3051 all.stack_args_size.constant += all.extra_pretend_bytes;
3052 current_function_args_size = all.stack_args_size.constant;
3054 /* Adjust function incoming argument size for alignment and
3057 #ifdef REG_PARM_STACK_SPACE
3058 current_function_args_size = MAX (current_function_args_size,
3059 REG_PARM_STACK_SPACE (fndecl));
3062 current_function_args_size = CEIL_ROUND (current_function_args_size,
3063 PARM_BOUNDARY / BITS_PER_UNIT);
3065 #ifdef ARGS_GROW_DOWNWARD
3066 current_function_arg_offset_rtx
3067 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3068 : expand_expr (size_diffop (all.stack_args_size.var,
3069 size_int (-all.stack_args_size.constant)),
3070 NULL_RTX, VOIDmode, 0));
3072 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3075 /* See how many bytes, if any, of its args a function should try to pop
3078 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3079 current_function_args_size);
3081 /* For stdarg.h function, save info about
3082 regs and stack space used by the named args. */
3084 current_function_args_info = all.args_so_far;
3086 /* Set the rtx used for the function return value. Put this in its
3087 own variable so any optimizers that need this information don't have
3088 to include tree.h. Do this here so it gets done when an inlined
3089 function gets output. */
3091 current_function_return_rtx
3092 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3093 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3095 /* If scalar return value was computed in a pseudo-reg, or was a named
3096 return value that got dumped to the stack, copy that to the hard
3098 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3100 tree decl_result = DECL_RESULT (fndecl);
3101 rtx decl_rtl = DECL_RTL (decl_result);
3103 if (REG_P (decl_rtl)
3104 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3105 : DECL_REGISTER (decl_result))
3109 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3111 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3112 /* The delay slot scheduler assumes that current_function_return_rtx
3113 holds the hard register containing the return value, not a
3114 temporary pseudo. */
3115 current_function_return_rtx = real_decl_rtl;
3120 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3121 For all seen types, gimplify their sizes. */
3124 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3131 if (POINTER_TYPE_P (t))
3133 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3134 && !TYPE_SIZES_GIMPLIFIED (t))
3136 gimplify_type_sizes (t, (tree *) data);
3144 /* Gimplify the parameter list for current_function_decl. This involves
3145 evaluating SAVE_EXPRs of variable sized parameters and generating code
3146 to implement callee-copies reference parameters. Returns a list of
3147 statements to add to the beginning of the function, or NULL if nothing
3151 gimplify_parameters (void)
3153 struct assign_parm_data_all all;
3154 tree fnargs, parm, stmts = NULL;
3156 assign_parms_initialize_all (&all);
3157 fnargs = assign_parms_augmented_arg_list (&all);
3159 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3161 struct assign_parm_data_one data;
3163 /* Extract the type of PARM; adjust it according to ABI. */
3164 assign_parm_find_data_types (&all, parm, &data);
3166 /* Early out for errors and void parameters. */
3167 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3170 /* Update info on where next arg arrives in registers. */
3171 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3172 data.passed_type, data.named_arg);
3174 /* ??? Once upon a time variable_size stuffed parameter list
3175 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3176 turned out to be less than manageable in the gimple world.
3177 Now we have to hunt them down ourselves. */
3178 walk_tree_without_duplicates (&data.passed_type,
3179 gimplify_parm_type, &stmts);
3181 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3183 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3184 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3187 if (data.passed_pointer)
3189 tree type = TREE_TYPE (data.passed_type);
3190 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3191 type, data.named_arg))
3195 /* For constant sized objects, this is trivial; for
3196 variable-sized objects, we have to play games. */
3197 if (TREE_CONSTANT (DECL_SIZE (parm)))
3199 local = create_tmp_var (type, get_name (parm));
3200 DECL_IGNORED_P (local) = 0;
3204 tree ptr_type, addr, args;
3206 ptr_type = build_pointer_type (type);
3207 addr = create_tmp_var (ptr_type, get_name (parm));
3208 DECL_IGNORED_P (addr) = 0;
3209 local = build_fold_indirect_ref (addr);
3211 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3212 t = built_in_decls[BUILT_IN_ALLOCA];
3213 t = build_function_call_expr (t, args);
3214 t = fold_convert (ptr_type, t);
3215 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3216 gimplify_and_add (t, &stmts);
3219 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3220 gimplify_and_add (t, &stmts);
3222 SET_DECL_VALUE_EXPR (parm, local);
3223 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3231 /* Indicate whether REGNO is an incoming argument to the current function
3232 that was promoted to a wider mode. If so, return the RTX for the
3233 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3234 that REGNO is promoted from and whether the promotion was signed or
3238 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3242 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3243 arg = TREE_CHAIN (arg))
3244 if (REG_P (DECL_INCOMING_RTL (arg))
3245 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3246 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3248 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3249 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3251 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3252 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3253 && mode != DECL_MODE (arg))
3255 *pmode = DECL_MODE (arg);
3256 *punsignedp = unsignedp;
3257 return DECL_INCOMING_RTL (arg);
3265 /* Compute the size and offset from the start of the stacked arguments for a
3266 parm passed in mode PASSED_MODE and with type TYPE.
3268 INITIAL_OFFSET_PTR points to the current offset into the stacked
3271 The starting offset and size for this parm are returned in
3272 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3273 nonzero, the offset is that of stack slot, which is returned in
3274 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3275 padding required from the initial offset ptr to the stack slot.
3277 IN_REGS is nonzero if the argument will be passed in registers. It will
3278 never be set if REG_PARM_STACK_SPACE is not defined.
3280 FNDECL is the function in which the argument was defined.
3282 There are two types of rounding that are done. The first, controlled by
3283 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3284 list to be aligned to the specific boundary (in bits). This rounding
3285 affects the initial and starting offsets, but not the argument size.
3287 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3288 optionally rounds the size of the parm to PARM_BOUNDARY. The
3289 initial offset is not affected by this rounding, while the size always
3290 is and the starting offset may be. */
3292 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3293 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3294 callers pass in the total size of args so far as
3295 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3298 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3299 int partial, tree fndecl ATTRIBUTE_UNUSED,
3300 struct args_size *initial_offset_ptr,
3301 struct locate_and_pad_arg_data *locate)
3304 enum direction where_pad;
3305 unsigned int boundary;
3306 int reg_parm_stack_space = 0;
3307 int part_size_in_regs;
3309 #ifdef REG_PARM_STACK_SPACE
3310 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3312 /* If we have found a stack parm before we reach the end of the
3313 area reserved for registers, skip that area. */
3316 if (reg_parm_stack_space > 0)
3318 if (initial_offset_ptr->var)
3320 initial_offset_ptr->var
3321 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3322 ssize_int (reg_parm_stack_space));
3323 initial_offset_ptr->constant = 0;
3325 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3326 initial_offset_ptr->constant = reg_parm_stack_space;
3329 #endif /* REG_PARM_STACK_SPACE */
3331 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3334 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3335 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3336 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3337 locate->where_pad = where_pad;
3338 locate->boundary = boundary;
3340 /* Remember if the outgoing parameter requires extra alignment on the
3341 calling function side. */
3342 if (boundary > PREFERRED_STACK_BOUNDARY)
3343 boundary = PREFERRED_STACK_BOUNDARY;
3344 if (cfun->stack_alignment_needed < boundary)
3345 cfun->stack_alignment_needed = boundary;
3347 #ifdef ARGS_GROW_DOWNWARD
3348 locate->slot_offset.constant = -initial_offset_ptr->constant;
3349 if (initial_offset_ptr->var)
3350 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3351 initial_offset_ptr->var);
3355 if (where_pad != none
3356 && (!host_integerp (sizetree, 1)
3357 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3358 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3359 SUB_PARM_SIZE (locate->slot_offset, s2);
3362 locate->slot_offset.constant += part_size_in_regs;
3365 #ifdef REG_PARM_STACK_SPACE
3366 || REG_PARM_STACK_SPACE (fndecl) > 0
3369 pad_to_arg_alignment (&locate->slot_offset, boundary,
3370 &locate->alignment_pad);
3372 locate->size.constant = (-initial_offset_ptr->constant
3373 - locate->slot_offset.constant);
3374 if (initial_offset_ptr->var)
3375 locate->size.var = size_binop (MINUS_EXPR,
3376 size_binop (MINUS_EXPR,
3378 initial_offset_ptr->var),
3379 locate->slot_offset.var);
3381 /* Pad_below needs the pre-rounded size to know how much to pad
3383 locate->offset = locate->slot_offset;
3384 if (where_pad == downward)
3385 pad_below (&locate->offset, passed_mode, sizetree);
3387 #else /* !ARGS_GROW_DOWNWARD */
3389 #ifdef REG_PARM_STACK_SPACE
3390 || REG_PARM_STACK_SPACE (fndecl) > 0
3393 pad_to_arg_alignment (initial_offset_ptr, boundary,
3394 &locate->alignment_pad);
3395 locate->slot_offset = *initial_offset_ptr;
3397 #ifdef PUSH_ROUNDING
3398 if (passed_mode != BLKmode)
3399 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3402 /* Pad_below needs the pre-rounded size to know how much to pad below
3403 so this must be done before rounding up. */
3404 locate->offset = locate->slot_offset;
3405 if (where_pad == downward)
3406 pad_below (&locate->offset, passed_mode, sizetree);
3408 if (where_pad != none
3409 && (!host_integerp (sizetree, 1)
3410 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3411 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3413 ADD_PARM_SIZE (locate->size, sizetree);
3415 locate->size.constant -= part_size_in_regs;
3416 #endif /* ARGS_GROW_DOWNWARD */
3419 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3420 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3423 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3424 struct args_size *alignment_pad)
3426 tree save_var = NULL_TREE;
3427 HOST_WIDE_INT save_constant = 0;
3428 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3429 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3431 #ifdef SPARC_STACK_BOUNDARY_HACK
3432 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3433 the real alignment of %sp. However, when it does this, the
3434 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3435 if (SPARC_STACK_BOUNDARY_HACK)
3439 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3441 save_var = offset_ptr->var;
3442 save_constant = offset_ptr->constant;
3445 alignment_pad->var = NULL_TREE;
3446 alignment_pad->constant = 0;
3448 if (boundary > BITS_PER_UNIT)
3450 if (offset_ptr->var)
3452 tree sp_offset_tree = ssize_int (sp_offset);
3453 tree offset = size_binop (PLUS_EXPR,
3454 ARGS_SIZE_TREE (*offset_ptr),
3456 #ifdef ARGS_GROW_DOWNWARD
3457 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3459 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3462 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3463 /* ARGS_SIZE_TREE includes constant term. */
3464 offset_ptr->constant = 0;
3465 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3466 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3471 offset_ptr->constant = -sp_offset +
3472 #ifdef ARGS_GROW_DOWNWARD
3473 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3475 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3477 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3478 alignment_pad->constant = offset_ptr->constant - save_constant;
3484 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3486 if (passed_mode != BLKmode)
3488 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3489 offset_ptr->constant
3490 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3491 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3492 - GET_MODE_SIZE (passed_mode));
3496 if (TREE_CODE (sizetree) != INTEGER_CST
3497 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3499 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3500 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3502 ADD_PARM_SIZE (*offset_ptr, s2);
3503 SUB_PARM_SIZE (*offset_ptr, sizetree);
3508 /* Walk the tree of blocks describing the binding levels within a function
3509 and warn about variables the might be killed by setjmp or vfork.
3510 This is done after calling flow_analysis and before global_alloc
3511 clobbers the pseudo-regs to hard regs. */
3514 setjmp_vars_warning (tree block)
3518 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3520 if (TREE_CODE (decl) == VAR_DECL
3521 && DECL_RTL_SET_P (decl)
3522 && REG_P (DECL_RTL (decl))
3523 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3524 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3529 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3530 setjmp_vars_warning (sub);
3533 /* Do the appropriate part of setjmp_vars_warning
3534 but for arguments instead of local variables. */
3537 setjmp_args_warning (void)
3540 for (decl = DECL_ARGUMENTS (current_function_decl);
3541 decl; decl = TREE_CHAIN (decl))
3542 if (DECL_RTL (decl) != 0
3543 && REG_P (DECL_RTL (decl))
3544 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3545 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3550 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3551 and create duplicate blocks. */
3552 /* ??? Need an option to either create block fragments or to create
3553 abstract origin duplicates of a source block. It really depends
3554 on what optimization has been performed. */
3557 reorder_blocks (void)
3559 tree block = DECL_INITIAL (current_function_decl);
3560 VEC(tree,heap) *block_stack;
3562 if (block == NULL_TREE)
3565 block_stack = VEC_alloc (tree, heap, 10);
3567 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3568 clear_block_marks (block);
3570 /* Prune the old trees away, so that they don't get in the way. */
3571 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3572 BLOCK_CHAIN (block) = NULL_TREE;
3574 /* Recreate the block tree from the note nesting. */
3575 reorder_blocks_1 (get_insns (), block, &block_stack);
3576 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3578 /* Remove deleted blocks from the block fragment chains. */
3579 reorder_fix_fragments (block);
3581 VEC_free (tree, heap, block_stack);
3584 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3587 clear_block_marks (tree block)
3591 TREE_ASM_WRITTEN (block) = 0;
3592 clear_block_marks (BLOCK_SUBBLOCKS (block));
3593 block = BLOCK_CHAIN (block);
3598 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3602 for (insn = insns; insn; insn = NEXT_INSN (insn))
3606 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3608 tree block = NOTE_BLOCK (insn);
3610 /* If we have seen this block before, that means it now
3611 spans multiple address regions. Create a new fragment. */
3612 if (TREE_ASM_WRITTEN (block))
3614 tree new_block = copy_node (block);
3617 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3618 ? BLOCK_FRAGMENT_ORIGIN (block)
3620 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3621 BLOCK_FRAGMENT_CHAIN (new_block)
3622 = BLOCK_FRAGMENT_CHAIN (origin);
3623 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3625 NOTE_BLOCK (insn) = new_block;
3629 BLOCK_SUBBLOCKS (block) = 0;
3630 TREE_ASM_WRITTEN (block) = 1;
3631 /* When there's only one block for the entire function,
3632 current_block == block and we mustn't do this, it
3633 will cause infinite recursion. */
3634 if (block != current_block)
3636 BLOCK_SUPERCONTEXT (block) = current_block;
3637 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3638 BLOCK_SUBBLOCKS (current_block) = block;
3639 current_block = block;
3641 VEC_safe_push (tree, heap, *p_block_stack, block);
3643 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3645 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3646 BLOCK_SUBBLOCKS (current_block)
3647 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3648 current_block = BLOCK_SUPERCONTEXT (current_block);
3654 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3655 appears in the block tree, select one of the fragments to become
3656 the new origin block. */
3659 reorder_fix_fragments (tree block)
3663 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3664 tree new_origin = NULL_TREE;
3668 if (! TREE_ASM_WRITTEN (dup_origin))
3670 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3672 /* Find the first of the remaining fragments. There must
3673 be at least one -- the current block. */
3674 while (! TREE_ASM_WRITTEN (new_origin))
3675 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3676 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3679 else if (! dup_origin)
3682 /* Re-root the rest of the fragments to the new origin. In the
3683 case that DUP_ORIGIN was null, that means BLOCK was the origin
3684 of a chain of fragments and we want to remove those fragments
3685 that didn't make it to the output. */
3688 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3693 if (TREE_ASM_WRITTEN (chain))
3695 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3697 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3699 chain = BLOCK_FRAGMENT_CHAIN (chain);
3704 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3705 block = BLOCK_CHAIN (block);
3709 /* Reverse the order of elements in the chain T of blocks,
3710 and return the new head of the chain (old last element). */
3713 blocks_nreverse (tree t)
3715 tree prev = 0, decl, next;
3716 for (decl = t; decl; decl = next)
3718 next = BLOCK_CHAIN (decl);
3719 BLOCK_CHAIN (decl) = prev;
3725 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3726 non-NULL, list them all into VECTOR, in a depth-first preorder
3727 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3731 all_blocks (tree block, tree *vector)
3737 TREE_ASM_WRITTEN (block) = 0;
3739 /* Record this block. */
3741 vector[n_blocks] = block;
3745 /* Record the subblocks, and their subblocks... */
3746 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3747 vector ? vector + n_blocks : 0);
3748 block = BLOCK_CHAIN (block);
3754 /* Return a vector containing all the blocks rooted at BLOCK. The
3755 number of elements in the vector is stored in N_BLOCKS_P. The
3756 vector is dynamically allocated; it is the caller's responsibility
3757 to call `free' on the pointer returned. */
3760 get_block_vector (tree block, int *n_blocks_p)
3764 *n_blocks_p = all_blocks (block, NULL);
3765 block_vector = XNEWVEC (tree, *n_blocks_p);
3766 all_blocks (block, block_vector);
3768 return block_vector;
3771 static GTY(()) int next_block_index = 2;
3773 /* Set BLOCK_NUMBER for all the blocks in FN. */
3776 number_blocks (tree fn)
3782 /* For SDB and XCOFF debugging output, we start numbering the blocks
3783 from 1 within each function, rather than keeping a running
3785 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3786 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3787 next_block_index = 1;
3790 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3792 /* The top-level BLOCK isn't numbered at all. */
3793 for (i = 1; i < n_blocks; ++i)
3794 /* We number the blocks from two. */
3795 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3797 free (block_vector);
3802 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3805 debug_find_var_in_block_tree (tree var, tree block)
3809 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3813 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3815 tree ret = debug_find_var_in_block_tree (var, t);
3823 /* Allocate a function structure for FNDECL and set its contents
3827 allocate_struct_function (tree fndecl)
3830 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3832 cfun = ggc_alloc_cleared (sizeof (struct function));
3834 cfun->stack_alignment_needed = STACK_BOUNDARY;
3835 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3837 current_function_funcdef_no = funcdef_no++;
3839 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3841 init_eh_for_function ();
3843 lang_hooks.function.init (cfun);
3844 if (init_machine_status)
3845 cfun->machine = (*init_machine_status) ();
3850 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3851 cfun->decl = fndecl;
3853 result = DECL_RESULT (fndecl);
3854 if (aggregate_value_p (result, fndecl))
3856 #ifdef PCC_STATIC_STRUCT_RETURN
3857 current_function_returns_pcc_struct = 1;
3859 current_function_returns_struct = 1;
3862 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3864 current_function_stdarg
3866 && TYPE_ARG_TYPES (fntype) != 0
3867 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3868 != void_type_node));
3870 /* Assume all registers in stdarg functions need to be saved. */
3871 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3872 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3875 /* Reset cfun, and other non-struct-function variables to defaults as
3876 appropriate for emitting rtl at the start of a function. */
3879 prepare_function_start (tree fndecl)
3881 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3882 cfun = DECL_STRUCT_FUNCTION (fndecl);
3884 allocate_struct_function (fndecl);
3886 init_varasm_status (cfun);
3889 cse_not_expected = ! optimize;
3891 /* Caller save not needed yet. */
3892 caller_save_needed = 0;
3894 /* We haven't done register allocation yet. */
3897 /* Indicate that we have not instantiated virtual registers yet. */
3898 virtuals_instantiated = 0;
3900 /* Indicate that we want CONCATs now. */
3901 generating_concat_p = 1;
3903 /* Indicate we have no need of a frame pointer yet. */
3904 frame_pointer_needed = 0;
3907 /* Initialize the rtl expansion mechanism so that we can do simple things
3908 like generate sequences. This is used to provide a context during global
3909 initialization of some passes. */
3911 init_dummy_function_start (void)
3913 prepare_function_start (NULL);
3916 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3917 and initialize static variables for generating RTL for the statements
3921 init_function_start (tree subr)
3923 prepare_function_start (subr);
3925 /* Prevent ever trying to delete the first instruction of a
3926 function. Also tell final how to output a linenum before the
3927 function prologue. Note linenums could be missing, e.g. when
3928 compiling a Java .class file. */
3929 if (! DECL_IS_BUILTIN (subr))
3930 emit_line_note (DECL_SOURCE_LOCATION (subr));
3932 /* Make sure first insn is a note even if we don't want linenums.
3933 This makes sure the first insn will never be deleted.
3934 Also, final expects a note to appear there. */
3935 emit_note (NOTE_INSN_DELETED);
3937 /* Warn if this value is an aggregate type,
3938 regardless of which calling convention we are using for it. */
3939 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3940 warning (OPT_Waggregate_return, "function returns an aggregate");
3943 /* Make sure all values used by the optimization passes have sane
3946 init_function_for_compilation (void)
3950 /* No prologue/epilogue insns yet. Make sure that these vectors are
3952 gcc_assert (VEC_length (int, prologue) == 0);
3953 gcc_assert (VEC_length (int, epilogue) == 0);
3954 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3958 struct tree_opt_pass pass_init_function =
3962 init_function_for_compilation, /* execute */
3965 0, /* static_pass_number */
3967 0, /* properties_required */
3968 0, /* properties_provided */
3969 0, /* properties_destroyed */
3970 0, /* todo_flags_start */
3971 0, /* todo_flags_finish */
3977 expand_main_function (void)
3979 #if (defined(INVOKE__main) \
3980 || (!defined(HAS_INIT_SECTION) \
3981 && !defined(INIT_SECTION_ASM_OP) \
3982 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3983 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3987 /* Expand code to initialize the stack_protect_guard. This is invoked at
3988 the beginning of a function to be protected. */
3990 #ifndef HAVE_stack_protect_set
3991 # define HAVE_stack_protect_set 0
3992 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3996 stack_protect_prologue (void)
3998 tree guard_decl = targetm.stack_protect_guard ();
4001 /* Avoid expand_expr here, because we don't want guard_decl pulled
4002 into registers unless absolutely necessary. And we know that
4003 cfun->stack_protect_guard is a local stack slot, so this skips
4005 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4006 y = validize_mem (DECL_RTL (guard_decl));
4008 /* Allow the target to copy from Y to X without leaking Y into a
4010 if (HAVE_stack_protect_set)
4012 rtx insn = gen_stack_protect_set (x, y);
4020 /* Otherwise do a straight move. */
4021 emit_move_insn (x, y);
4024 /* Expand code to verify the stack_protect_guard. This is invoked at
4025 the end of a function to be protected. */
4027 #ifndef HAVE_stack_protect_test
4028 # define HAVE_stack_protect_test 0
4029 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4033 stack_protect_epilogue (void)
4035 tree guard_decl = targetm.stack_protect_guard ();
4036 rtx label = gen_label_rtx ();
4039 /* Avoid expand_expr here, because we don't want guard_decl pulled
4040 into registers unless absolutely necessary. And we know that
4041 cfun->stack_protect_guard is a local stack slot, so this skips
4043 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4044 y = validize_mem (DECL_RTL (guard_decl));
4046 /* Allow the target to compare Y with X without leaking either into
4048 switch (HAVE_stack_protect_test != 0)
4051 tmp = gen_stack_protect_test (x, y, label);
4060 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4064 /* The noreturn predictor has been moved to the tree level. The rtl-level
4065 predictors estimate this branch about 20%, which isn't enough to get
4066 things moved out of line. Since this is the only extant case of adding
4067 a noreturn function at the rtl level, it doesn't seem worth doing ought
4068 except adding the prediction by hand. */
4069 tmp = get_last_insn ();
4071 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4073 expand_expr_stmt (targetm.stack_protect_fail ());
4077 /* Start the RTL for a new function, and set variables used for
4079 SUBR is the FUNCTION_DECL node.
4080 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4081 the function's parameters, which must be run at any return statement. */
4084 expand_function_start (tree subr)
4086 /* Make sure volatile mem refs aren't considered
4087 valid operands of arithmetic insns. */
4088 init_recog_no_volatile ();
4090 current_function_profile
4092 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4094 current_function_limit_stack
4095 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4097 /* Make the label for return statements to jump to. Do not special
4098 case machines with special return instructions -- they will be
4099 handled later during jump, ifcvt, or epilogue creation. */
4100 return_label = gen_label_rtx ();
4102 /* Initialize rtx used to return the value. */
4103 /* Do this before assign_parms so that we copy the struct value address
4104 before any library calls that assign parms might generate. */
4106 /* Decide whether to return the value in memory or in a register. */
4107 if (aggregate_value_p (DECL_RESULT (subr), subr))
4109 /* Returning something that won't go in a register. */
4110 rtx value_address = 0;
4112 #ifdef PCC_STATIC_STRUCT_RETURN
4113 if (current_function_returns_pcc_struct)
4115 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4116 value_address = assemble_static_space (size);
4121 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4122 /* Expect to be passed the address of a place to store the value.
4123 If it is passed as an argument, assign_parms will take care of
4127 value_address = gen_reg_rtx (Pmode);
4128 emit_move_insn (value_address, sv);
4133 rtx x = value_address;
4134 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4136 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4137 set_mem_attributes (x, DECL_RESULT (subr), 1);
4139 SET_DECL_RTL (DECL_RESULT (subr), x);
4142 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4143 /* If return mode is void, this decl rtl should not be used. */
4144 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4147 /* Compute the return values into a pseudo reg, which we will copy
4148 into the true return register after the cleanups are done. */
4149 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4150 if (TYPE_MODE (return_type) != BLKmode
4151 && targetm.calls.return_in_msb (return_type))
4152 /* expand_function_end will insert the appropriate padding in
4153 this case. Use the return value's natural (unpadded) mode
4154 within the function proper. */
4155 SET_DECL_RTL (DECL_RESULT (subr),
4156 gen_reg_rtx (TYPE_MODE (return_type)));
4159 /* In order to figure out what mode to use for the pseudo, we
4160 figure out what the mode of the eventual return register will
4161 actually be, and use that. */
4162 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4164 /* Structures that are returned in registers are not
4165 aggregate_value_p, so we may see a PARALLEL or a REG. */
4166 if (REG_P (hard_reg))
4167 SET_DECL_RTL (DECL_RESULT (subr),
4168 gen_reg_rtx (GET_MODE (hard_reg)));
4171 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4172 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4176 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4177 result to the real return register(s). */
4178 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4181 /* Initialize rtx for parameters and local variables.
4182 In some cases this requires emitting insns. */
4183 assign_parms (subr);
4185 /* If function gets a static chain arg, store it. */
4186 if (cfun->static_chain_decl)
4188 tree parm = cfun->static_chain_decl;
4189 rtx local = gen_reg_rtx (Pmode);
4191 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4192 SET_DECL_RTL (parm, local);
4193 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4195 emit_move_insn (local, static_chain_incoming_rtx);
4198 /* If the function receives a non-local goto, then store the
4199 bits we need to restore the frame pointer. */
4200 if (cfun->nonlocal_goto_save_area)
4205 /* ??? We need to do this save early. Unfortunately here is
4206 before the frame variable gets declared. Help out... */
4207 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4209 t_save = build4 (ARRAY_REF, ptr_type_node,
4210 cfun->nonlocal_goto_save_area,
4211 integer_zero_node, NULL_TREE, NULL_TREE);
4212 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4213 r_save = convert_memory_address (Pmode, r_save);
4215 emit_move_insn (r_save, virtual_stack_vars_rtx);
4216 update_nonlocal_goto_save_area ();
4219 /* The following was moved from init_function_start.
4220 The move is supposed to make sdb output more accurate. */
4221 /* Indicate the beginning of the function body,
4222 as opposed to parm setup. */
4223 emit_note (NOTE_INSN_FUNCTION_BEG);
4225 gcc_assert (NOTE_P (get_last_insn ()));
4227 parm_birth_insn = get_last_insn ();
4229 if (current_function_profile)
4232 PROFILE_HOOK (current_function_funcdef_no);
4236 /* After the display initializations is where the stack checking
4238 if(flag_stack_check)
4239 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4241 /* Make sure there is a line number after the function entry setup code. */
4242 force_next_line_note ();
4245 /* Undo the effects of init_dummy_function_start. */
4247 expand_dummy_function_end (void)
4249 /* End any sequences that failed to be closed due to syntax errors. */
4250 while (in_sequence_p ())
4253 /* Outside function body, can't compute type's actual size
4254 until next function's body starts. */
4256 free_after_parsing (cfun);
4257 free_after_compilation (cfun);
4261 /* Call DOIT for each hard register used as a return value from
4262 the current function. */
4265 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4267 rtx outgoing = current_function_return_rtx;
4272 if (REG_P (outgoing))
4273 (*doit) (outgoing, arg);
4274 else if (GET_CODE (outgoing) == PARALLEL)
4278 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4280 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4282 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4289 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4291 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4295 clobber_return_register (void)
4297 diddle_return_value (do_clobber_return_reg, NULL);
4299 /* In case we do use pseudo to return value, clobber it too. */
4300 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4302 tree decl_result = DECL_RESULT (current_function_decl);
4303 rtx decl_rtl = DECL_RTL (decl_result);
4304 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4306 do_clobber_return_reg (decl_rtl, NULL);
4312 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4314 emit_insn (gen_rtx_USE (VOIDmode, reg));
4318 use_return_register (void)
4320 diddle_return_value (do_use_return_reg, NULL);
4323 /* Possibly warn about unused parameters. */
4325 do_warn_unused_parameter (tree fn)
4329 for (decl = DECL_ARGUMENTS (fn);
4330 decl; decl = TREE_CHAIN (decl))
4331 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4332 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4333 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4336 static GTY(()) rtx initial_trampoline;
4338 /* Generate RTL for the end of the current function. */
4341 expand_function_end (void)
4345 /* If arg_pointer_save_area was referenced only from a nested
4346 function, we will not have initialized it yet. Do that now. */
4347 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4348 get_arg_pointer_save_area (cfun);
4350 /* If we are doing stack checking and this function makes calls,
4351 do a stack probe at the start of the function to ensure we have enough
4352 space for another stack frame. */
4353 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4357 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4361 probe_stack_range (STACK_CHECK_PROTECT,
4362 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4365 emit_insn_before (seq, stack_check_probe_note);
4370 /* Possibly warn about unused parameters.
4371 When frontend does unit-at-a-time, the warning is already
4372 issued at finalization time. */
4373 if (warn_unused_parameter
4374 && !lang_hooks.callgraph.expand_function)
4375 do_warn_unused_parameter (current_function_decl);
4377 /* End any sequences that failed to be closed due to syntax errors. */
4378 while (in_sequence_p ())
4381 clear_pending_stack_adjust ();
4382 do_pending_stack_adjust ();
4384 /* Mark the end of the function body.
4385 If control reaches this insn, the function can drop through
4386 without returning a value. */
4387 emit_note (NOTE_INSN_FUNCTION_END);
4389 /* Must mark the last line number note in the function, so that the test
4390 coverage code can avoid counting the last line twice. This just tells
4391 the code to ignore the immediately following line note, since there
4392 already exists a copy of this note somewhere above. This line number
4393 note is still needed for debugging though, so we can't delete it. */
4394 if (flag_test_coverage)
4395 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4397 /* Output a linenumber for the end of the function.
4398 SDB depends on this. */
4399 force_next_line_note ();
4400 emit_line_note (input_location);
4402 /* Before the return label (if any), clobber the return
4403 registers so that they are not propagated live to the rest of
4404 the function. This can only happen with functions that drop
4405 through; if there had been a return statement, there would
4406 have either been a return rtx, or a jump to the return label.
4408 We delay actual code generation after the current_function_value_rtx
4410 clobber_after = get_last_insn ();
4412 /* Output the label for the actual return from the function. */
4413 emit_label (return_label);
4415 if (USING_SJLJ_EXCEPTIONS)
4417 /* Let except.c know where it should emit the call to unregister
4418 the function context for sjlj exceptions. */
4419 if (flag_exceptions)
4420 sjlj_emit_function_exit_after (get_last_insn ());
4424 /* @@@ This is a kludge. We want to ensure that instructions that
4425 may trap are not moved into the epilogue by scheduling, because
4426 we don't always emit unwind information for the epilogue.
4427 However, not all machine descriptions define a blockage insn, so
4428 emit an ASM_INPUT to act as one. */
4429 if (flag_non_call_exceptions)
4430 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4433 /* If this is an implementation of throw, do what's necessary to
4434 communicate between __builtin_eh_return and the epilogue. */
4435 expand_eh_return ();
4437 /* If scalar return value was computed in a pseudo-reg, or was a named
4438 return value that got dumped to the stack, copy that to the hard
4440 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4442 tree decl_result = DECL_RESULT (current_function_decl);
4443 rtx decl_rtl = DECL_RTL (decl_result);
4445 if (REG_P (decl_rtl)
4446 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4447 : DECL_REGISTER (decl_result))
4449 rtx real_decl_rtl = current_function_return_rtx;
4451 /* This should be set in assign_parms. */
4452 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4454 /* If this is a BLKmode structure being returned in registers,
4455 then use the mode computed in expand_return. Note that if
4456 decl_rtl is memory, then its mode may have been changed,
4457 but that current_function_return_rtx has not. */
4458 if (GET_MODE (real_decl_rtl) == BLKmode)
4459 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4461 /* If a non-BLKmode return value should be padded at the least
4462 significant end of the register, shift it left by the appropriate
4463 amount. BLKmode results are handled using the group load/store
4465 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4466 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4468 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4469 REGNO (real_decl_rtl)),
4471 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4473 /* If a named return value dumped decl_return to memory, then
4474 we may need to re-do the PROMOTE_MODE signed/unsigned
4476 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4478 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4480 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4481 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4484 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4486 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4488 /* If expand_function_start has created a PARALLEL for decl_rtl,
4489 move the result to the real return registers. Otherwise, do
4490 a group load from decl_rtl for a named return. */
4491 if (GET_CODE (decl_rtl) == PARALLEL)
4492 emit_group_move (real_decl_rtl, decl_rtl);
4494 emit_group_load (real_decl_rtl, decl_rtl,
4495 TREE_TYPE (decl_result),
4496 int_size_in_bytes (TREE_TYPE (decl_result)));
4498 /* In the case of complex integer modes smaller than a word, we'll
4499 need to generate some non-trivial bitfield insertions. Do that
4500 on a pseudo and not the hard register. */
4501 else if (GET_CODE (decl_rtl) == CONCAT
4502 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4503 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4505 int old_generating_concat_p;
4508 old_generating_concat_p = generating_concat_p;
4509 generating_concat_p = 0;
4510 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4511 generating_concat_p = old_generating_concat_p;
4513 emit_move_insn (tmp, decl_rtl);
4514 emit_move_insn (real_decl_rtl, tmp);
4517 emit_move_insn (real_decl_rtl, decl_rtl);
4521 /* If returning a structure, arrange to return the address of the value
4522 in a place where debuggers expect to find it.
4524 If returning a structure PCC style,
4525 the caller also depends on this value.
4526 And current_function_returns_pcc_struct is not necessarily set. */
4527 if (current_function_returns_struct
4528 || current_function_returns_pcc_struct)
4530 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4531 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4534 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4535 type = TREE_TYPE (type);
4537 value_address = XEXP (value_address, 0);
4539 outgoing = targetm.calls.function_value (build_pointer_type (type),
4540 current_function_decl, true);
4542 /* Mark this as a function return value so integrate will delete the
4543 assignment and USE below when inlining this function. */
4544 REG_FUNCTION_VALUE_P (outgoing) = 1;
4546 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4547 value_address = convert_memory_address (GET_MODE (outgoing),
4550 emit_move_insn (outgoing, value_address);
4552 /* Show return register used to hold result (in this case the address
4554 current_function_return_rtx = outgoing;
4557 /* Emit the actual code to clobber return register. */
4562 clobber_return_register ();
4563 expand_naked_return ();
4567 emit_insn_after (seq, clobber_after);
4570 /* Output the label for the naked return from the function. */
4571 emit_label (naked_return_label);
4573 /* If stack protection is enabled for this function, check the guard. */
4574 if (cfun->stack_protect_guard)
4575 stack_protect_epilogue ();
4577 /* If we had calls to alloca, and this machine needs
4578 an accurate stack pointer to exit the function,
4579 insert some code to save and restore the stack pointer. */
4580 if (! EXIT_IGNORE_STACK
4581 && current_function_calls_alloca)
4585 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4586 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4589 /* ??? This should no longer be necessary since stupid is no longer with
4590 us, but there are some parts of the compiler (eg reload_combine, and
4591 sh mach_dep_reorg) that still try and compute their own lifetime info
4592 instead of using the general framework. */
4593 use_return_register ();
4597 get_arg_pointer_save_area (struct function *f)
4599 rtx ret = f->x_arg_pointer_save_area;
4603 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4604 f->x_arg_pointer_save_area = ret;
4607 if (f == cfun && ! f->arg_pointer_save_area_init)
4611 /* Save the arg pointer at the beginning of the function. The
4612 generated stack slot may not be a valid memory address, so we
4613 have to check it and fix it if necessary. */
4615 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4619 push_topmost_sequence ();
4620 emit_insn_after (seq, entry_of_function ());
4621 pop_topmost_sequence ();
4627 /* Extend a vector that records the INSN_UIDs of INSNS
4628 (a list of one or more insns). */
4631 record_insns (rtx insns, VEC(int,heap) **vecp)
4635 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4636 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4639 /* Set the locator of the insn chain starting at INSN to LOC. */
4641 set_insn_locators (rtx insn, int loc)
4643 while (insn != NULL_RTX)
4646 INSN_LOCATOR (insn) = loc;
4647 insn = NEXT_INSN (insn);
4651 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4652 be running after reorg, SEQUENCE rtl is possible. */
4655 contains (rtx insn, VEC(int,heap) **vec)
4659 if (NONJUMP_INSN_P (insn)
4660 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4663 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4664 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4665 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4666 == VEC_index (int, *vec, j))
4672 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4673 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4680 prologue_epilogue_contains (rtx insn)
4682 if (contains (insn, &prologue))
4684 if (contains (insn, &epilogue))
4690 sibcall_epilogue_contains (rtx insn)
4692 if (sibcall_epilogue)
4693 return contains (insn, &sibcall_epilogue);
4698 /* Insert gen_return at the end of block BB. This also means updating
4699 block_for_insn appropriately. */
4702 emit_return_into_block (basic_block bb, rtx line_note)
4704 emit_jump_insn_after (gen_return (), BB_END (bb));
4706 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4708 #endif /* HAVE_return */
4710 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4712 /* These functions convert the epilogue into a variant that does not
4713 modify the stack pointer. This is used in cases where a function
4714 returns an object whose size is not known until it is computed.
4715 The called function leaves the object on the stack, leaves the
4716 stack depressed, and returns a pointer to the object.
4718 What we need to do is track all modifications and references to the
4719 stack pointer, deleting the modifications and changing the
4720 references to point to the location the stack pointer would have
4721 pointed to had the modifications taken place.
4723 These functions need to be portable so we need to make as few
4724 assumptions about the epilogue as we can. However, the epilogue
4725 basically contains three things: instructions to reset the stack
4726 pointer, instructions to reload registers, possibly including the
4727 frame pointer, and an instruction to return to the caller.
4729 We must be sure of what a relevant epilogue insn is doing. We also
4730 make no attempt to validate the insns we make since if they are
4731 invalid, we probably can't do anything valid. The intent is that
4732 these routines get "smarter" as more and more machines start to use
4733 them and they try operating on different epilogues.
4735 We use the following structure to track what the part of the
4736 epilogue that we've already processed has done. We keep two copies
4737 of the SP equivalence, one for use during the insn we are
4738 processing and one for use in the next insn. The difference is
4739 because one part of a PARALLEL may adjust SP and the other may use
4744 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4745 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4746 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4747 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4748 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4749 should be set to once we no longer need
4751 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4755 static void handle_epilogue_set (rtx, struct epi_info *);
4756 static void update_epilogue_consts (rtx, rtx, void *);
4757 static void emit_equiv_load (struct epi_info *);
4759 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4760 no modifications to the stack pointer. Return the new list of insns. */
4763 keep_stack_depressed (rtx insns)
4766 struct epi_info info;
4769 /* If the epilogue is just a single instruction, it must be OK as is. */
4770 if (NEXT_INSN (insns) == NULL_RTX)
4773 /* Otherwise, start a sequence, initialize the information we have, and
4774 process all the insns we were given. */
4777 info.sp_equiv_reg = stack_pointer_rtx;
4779 info.equiv_reg_src = 0;
4781 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4782 info.const_equiv[j] = 0;
4786 while (insn != NULL_RTX)
4788 next = NEXT_INSN (insn);
4797 /* If this insn references the register that SP is equivalent to and
4798 we have a pending load to that register, we must force out the load
4799 first and then indicate we no longer know what SP's equivalent is. */
4800 if (info.equiv_reg_src != 0
4801 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4803 emit_equiv_load (&info);
4804 info.sp_equiv_reg = 0;
4807 info.new_sp_equiv_reg = info.sp_equiv_reg;
4808 info.new_sp_offset = info.sp_offset;
4810 /* If this is a (RETURN) and the return address is on the stack,
4811 update the address and change to an indirect jump. */
4812 if (GET_CODE (PATTERN (insn)) == RETURN
4813 || (GET_CODE (PATTERN (insn)) == PARALLEL
4814 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4816 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4818 HOST_WIDE_INT offset = 0;
4819 rtx jump_insn, jump_set;
4821 /* If the return address is in a register, we can emit the insn
4822 unchanged. Otherwise, it must be a MEM and we see what the
4823 base register and offset are. In any case, we have to emit any
4824 pending load to the equivalent reg of SP, if any. */
4825 if (REG_P (retaddr))
4827 emit_equiv_load (&info);
4835 gcc_assert (MEM_P (retaddr));
4837 ret_ptr = XEXP (retaddr, 0);
4839 if (REG_P (ret_ptr))
4841 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4846 gcc_assert (GET_CODE (ret_ptr) == PLUS
4847 && REG_P (XEXP (ret_ptr, 0))
4848 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4849 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4850 offset = INTVAL (XEXP (ret_ptr, 1));
4854 /* If the base of the location containing the return pointer
4855 is SP, we must update it with the replacement address. Otherwise,
4856 just build the necessary MEM. */
4857 retaddr = plus_constant (base, offset);
4858 if (base == stack_pointer_rtx)
4859 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4860 plus_constant (info.sp_equiv_reg,
4863 retaddr = gen_rtx_MEM (Pmode, retaddr);
4864 MEM_NOTRAP_P (retaddr) = 1;
4866 /* If there is a pending load to the equivalent register for SP
4867 and we reference that register, we must load our address into
4868 a scratch register and then do that load. */
4869 if (info.equiv_reg_src
4870 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4875 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4876 if (HARD_REGNO_MODE_OK (regno, Pmode)
4877 && !fixed_regs[regno]
4878 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4880 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4881 && !refers_to_regno_p (regno,
4882 regno + hard_regno_nregs[regno]
4884 info.equiv_reg_src, NULL)
4885 && info.const_equiv[regno] == 0)
4888 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4890 reg = gen_rtx_REG (Pmode, regno);
4891 emit_move_insn (reg, retaddr);
4895 emit_equiv_load (&info);
4896 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4898 /* Show the SET in the above insn is a RETURN. */
4899 jump_set = single_set (jump_insn);
4900 gcc_assert (jump_set);
4901 SET_IS_RETURN_P (jump_set) = 1;
4904 /* If SP is not mentioned in the pattern and its equivalent register, if
4905 any, is not modified, just emit it. Otherwise, if neither is set,
4906 replace the reference to SP and emit the insn. If none of those are
4907 true, handle each SET individually. */
4908 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4909 && (info.sp_equiv_reg == stack_pointer_rtx
4910 || !reg_set_p (info.sp_equiv_reg, insn)))
4912 else if (! reg_set_p (stack_pointer_rtx, insn)
4913 && (info.sp_equiv_reg == stack_pointer_rtx
4914 || !reg_set_p (info.sp_equiv_reg, insn)))
4918 changed = validate_replace_rtx (stack_pointer_rtx,
4919 plus_constant (info.sp_equiv_reg,
4922 gcc_assert (changed);
4926 else if (GET_CODE (PATTERN (insn)) == SET)
4927 handle_epilogue_set (PATTERN (insn), &info);
4928 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4930 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4931 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4932 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4937 info.sp_equiv_reg = info.new_sp_equiv_reg;
4938 info.sp_offset = info.new_sp_offset;
4940 /* Now update any constants this insn sets. */
4941 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4945 insns = get_insns ();
4950 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4951 structure that contains information about what we've seen so far. We
4952 process this SET by either updating that data or by emitting one or
4956 handle_epilogue_set (rtx set, struct epi_info *p)
4958 /* First handle the case where we are setting SP. Record what it is being
4959 set from, which we must be able to determine */
4960 if (reg_set_p (stack_pointer_rtx, set))
4962 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4964 if (GET_CODE (SET_SRC (set)) == PLUS)
4966 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4967 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4968 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4971 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4972 && (REGNO (XEXP (SET_SRC (set), 1))
4973 < FIRST_PSEUDO_REGISTER)
4974 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4976 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4980 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4982 /* If we are adjusting SP, we adjust from the old data. */
4983 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4985 p->new_sp_equiv_reg = p->sp_equiv_reg;
4986 p->new_sp_offset += p->sp_offset;
4989 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4994 /* Next handle the case where we are setting SP's equivalent
4995 register. We must not already have a value to set it to. We
4996 could update, but there seems little point in handling that case.
4997 Note that we have to allow for the case where we are setting the
4998 register set in the previous part of a PARALLEL inside a single
4999 insn. But use the old offset for any updates within this insn.
5000 We must allow for the case where the register is being set in a
5001 different (usually wider) mode than Pmode). */
5002 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5004 gcc_assert (!p->equiv_reg_src
5005 && REG_P (p->new_sp_equiv_reg)
5006 && REG_P (SET_DEST (set))
5007 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5009 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5011 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5012 plus_constant (p->sp_equiv_reg,
5016 /* Otherwise, replace any references to SP in the insn to its new value
5017 and emit the insn. */
5020 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5021 plus_constant (p->sp_equiv_reg,
5023 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5024 plus_constant (p->sp_equiv_reg,
5030 /* Update the tracking information for registers set to constants. */
5033 update_epilogue_consts (rtx dest, rtx x, void *data)
5035 struct epi_info *p = (struct epi_info *) data;
5038 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5041 /* If we are either clobbering a register or doing a partial set,
5042 show we don't know the value. */
5043 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5044 p->const_equiv[REGNO (dest)] = 0;
5046 /* If we are setting it to a constant, record that constant. */
5047 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5048 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5050 /* If this is a binary operation between a register we have been tracking
5051 and a constant, see if we can compute a new constant value. */
5052 else if (ARITHMETIC_P (SET_SRC (x))
5053 && REG_P (XEXP (SET_SRC (x), 0))
5054 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5055 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5056 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5057 && 0 != (new = simplify_binary_operation
5058 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5059 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5060 XEXP (SET_SRC (x), 1)))
5061 && GET_CODE (new) == CONST_INT)
5062 p->const_equiv[REGNO (dest)] = new;
5064 /* Otherwise, we can't do anything with this value. */
5066 p->const_equiv[REGNO (dest)] = 0;
5069 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5072 emit_equiv_load (struct epi_info *p)
5074 if (p->equiv_reg_src != 0)
5076 rtx dest = p->sp_equiv_reg;
5078 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5079 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5080 REGNO (p->sp_equiv_reg));
5082 emit_move_insn (dest, p->equiv_reg_src);
5083 p->equiv_reg_src = 0;
5088 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5089 this into place with notes indicating where the prologue ends and where
5090 the epilogue begins. Update the basic block information when possible. */
5093 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5097 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5100 #ifdef HAVE_prologue
5101 rtx prologue_end = NULL_RTX;
5103 #if defined (HAVE_epilogue) || defined(HAVE_return)
5104 rtx epilogue_end = NULL_RTX;
5108 #ifdef HAVE_prologue
5112 seq = gen_prologue ();
5115 /* Retain a map of the prologue insns. */
5116 record_insns (seq, &prologue);
5117 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5121 set_insn_locators (seq, prologue_locator);
5123 /* Can't deal with multiple successors of the entry block
5124 at the moment. Function should always have at least one
5126 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5128 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5133 /* If the exit block has no non-fake predecessors, we don't need
5135 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5136 if ((e->flags & EDGE_FAKE) == 0)
5142 if (optimize && HAVE_return)
5144 /* If we're allowed to generate a simple return instruction,
5145 then by definition we don't need a full epilogue. Examine
5146 the block that falls through to EXIT. If it does not
5147 contain any code, examine its predecessors and try to
5148 emit (conditional) return instructions. */
5153 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5154 if (e->flags & EDGE_FALLTHRU)
5160 /* Verify that there are no active instructions in the last block. */
5161 label = BB_END (last);
5162 while (label && !LABEL_P (label))
5164 if (active_insn_p (label))
5166 label = PREV_INSN (label);
5169 if (BB_HEAD (last) == label && LABEL_P (label))
5172 rtx epilogue_line_note = NULL_RTX;
5174 /* Locate the line number associated with the closing brace,
5175 if we can find one. */
5176 for (seq = get_last_insn ();
5177 seq && ! active_insn_p (seq);
5178 seq = PREV_INSN (seq))
5179 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5181 epilogue_line_note = seq;
5185 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5187 basic_block bb = e->src;
5190 if (bb == ENTRY_BLOCK_PTR)
5197 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5203 /* If we have an unconditional jump, we can replace that
5204 with a simple return instruction. */
5205 if (simplejump_p (jump))
5207 emit_return_into_block (bb, epilogue_line_note);
5211 /* If we have a conditional jump, we can try to replace
5212 that with a conditional return instruction. */
5213 else if (condjump_p (jump))
5215 if (! redirect_jump (jump, 0, 0))
5221 /* If this block has only one successor, it both jumps
5222 and falls through to the fallthru block, so we can't
5224 if (single_succ_p (bb))
5236 /* Fix up the CFG for the successful change we just made. */
5237 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5240 /* Emit a return insn for the exit fallthru block. Whether
5241 this is still reachable will be determined later. */
5243 emit_barrier_after (BB_END (last));
5244 emit_return_into_block (last, epilogue_line_note);
5245 epilogue_end = BB_END (last);
5246 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5251 /* Find the edge that falls through to EXIT. Other edges may exist
5252 due to RETURN instructions, but those don't need epilogues.
5253 There really shouldn't be a mixture -- either all should have
5254 been converted or none, however... */
5256 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5257 if (e->flags & EDGE_FALLTHRU)
5262 #ifdef HAVE_epilogue
5266 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5268 seq = gen_epilogue ();
5270 #ifdef INCOMING_RETURN_ADDR_RTX
5271 /* If this function returns with the stack depressed and we can support
5272 it, massage the epilogue to actually do that. */
5273 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5274 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5275 seq = keep_stack_depressed (seq);
5278 emit_jump_insn (seq);
5280 /* Retain a map of the epilogue insns. */
5281 record_insns (seq, &epilogue);
5282 set_insn_locators (seq, epilogue_locator);
5287 insert_insn_on_edge (seq, e);
5295 if (! next_active_insn (BB_END (e->src)))
5297 /* We have a fall-through edge to the exit block, the source is not
5298 at the end of the function, and there will be an assembler epilogue
5299 at the end of the function.
5300 We can't use force_nonfallthru here, because that would try to
5301 use return. Inserting a jump 'by hand' is extremely messy, so
5302 we take advantage of cfg_layout_finalize using
5303 fixup_fallthru_exit_predecessor. */
5304 cfg_layout_initialize (0);
5305 FOR_EACH_BB (cur_bb)
5306 if (cur_bb->index >= NUM_FIXED_BLOCKS
5307 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5308 cur_bb->aux = cur_bb->next_bb;
5309 cfg_layout_finalize ();
5314 commit_edge_insertions ();
5316 #ifdef HAVE_sibcall_epilogue
5317 /* Emit sibling epilogues before any sibling call sites. */
5318 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5320 basic_block bb = e->src;
5321 rtx insn = BB_END (bb);
5324 || ! SIBLING_CALL_P (insn))
5331 emit_insn (gen_sibcall_epilogue ());
5335 /* Retain a map of the epilogue insns. Used in life analysis to
5336 avoid getting rid of sibcall epilogue insns. Do this before we
5337 actually emit the sequence. */
5338 record_insns (seq, &sibcall_epilogue);
5339 set_insn_locators (seq, epilogue_locator);
5341 emit_insn_before (seq, insn);
5346 #ifdef HAVE_prologue
5347 /* This is probably all useless now that we use locators. */
5352 /* GDB handles `break f' by setting a breakpoint on the first
5353 line note after the prologue. Which means (1) that if
5354 there are line number notes before where we inserted the
5355 prologue we should move them, and (2) we should generate a
5356 note before the end of the first basic block, if there isn't
5359 ??? This behavior is completely broken when dealing with
5360 multiple entry functions. We simply place the note always
5361 into first basic block and let alternate entry points
5365 for (insn = prologue_end; insn; insn = prev)
5367 prev = PREV_INSN (insn);
5368 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5370 /* Note that we cannot reorder the first insn in the
5371 chain, since rest_of_compilation relies on that
5372 remaining constant. */
5375 reorder_insns (insn, insn, prologue_end);
5379 /* Find the last line number note in the first block. */
5380 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5381 insn != prologue_end && insn;
5382 insn = PREV_INSN (insn))
5383 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5386 /* If we didn't find one, make a copy of the first line number
5390 for (insn = next_active_insn (prologue_end);
5392 insn = PREV_INSN (insn))
5393 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5395 emit_note_copy_after (insn, prologue_end);
5401 #ifdef HAVE_epilogue
5406 /* Similarly, move any line notes that appear after the epilogue.
5407 There is no need, however, to be quite so anal about the existence
5408 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5409 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5411 for (insn = epilogue_end; insn; insn = next)
5413 next = NEXT_INSN (insn);
5415 && (NOTE_LINE_NUMBER (insn) > 0
5416 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5417 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5418 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5424 /* Reposition the prologue-end and epilogue-begin notes after instruction
5425 scheduling and delayed branch scheduling. */
5428 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5430 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5431 rtx insn, last, note;
5434 if ((len = VEC_length (int, prologue)) > 0)
5438 /* Scan from the beginning until we reach the last prologue insn.
5439 We apparently can't depend on basic_block_{head,end} after
5441 for (insn = f; insn; insn = NEXT_INSN (insn))
5445 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5448 else if (contains (insn, &prologue))
5458 /* Find the prologue-end note if we haven't already, and
5459 move it to just after the last prologue insn. */
5462 for (note = last; (note = NEXT_INSN (note));)
5464 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5468 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5470 last = NEXT_INSN (last);
5471 reorder_insns (note, note, last);
5475 if ((len = VEC_length (int, epilogue)) > 0)
5479 /* Scan from the end until we reach the first epilogue insn.
5480 We apparently can't depend on basic_block_{head,end} after
5482 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5486 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5489 else if (contains (insn, &epilogue))
5499 /* Find the epilogue-begin note if we haven't already, and
5500 move it to just before the first epilogue insn. */
5503 for (note = insn; (note = PREV_INSN (note));)
5505 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5509 if (PREV_INSN (last) != note)
5510 reorder_insns (note, note, PREV_INSN (last));
5513 #endif /* HAVE_prologue or HAVE_epilogue */
5516 /* Resets insn_block_boundaries array. */
5519 reset_block_changes (void)
5521 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5522 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5525 /* Record the boundary for BLOCK. */
5527 record_block_change (tree block)
5535 if(!cfun->ib_boundaries_block)
5538 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5539 VARRAY_POP (cfun->ib_boundaries_block);
5541 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5542 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5544 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5547 /* Finishes record of boundaries. */
5548 void finalize_block_changes (void)
5550 record_block_change (DECL_INITIAL (current_function_decl));
5553 /* For INSN return the BLOCK it belongs to. */
5555 check_block_change (rtx insn, tree *block)
5557 unsigned uid = INSN_UID (insn);
5559 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5562 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5565 /* Releases the ib_boundaries_block records. */
5567 free_block_changes (void)
5569 cfun->ib_boundaries_block = NULL;
5572 /* Returns the name of the current function. */
5574 current_function_name (void)
5576 return lang_hooks.decl_printable_name (cfun->decl, 2);
5581 rest_of_handle_check_leaf_regs (void)
5583 #ifdef LEAF_REGISTERS
5584 current_function_uses_only_leaf_regs
5585 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5590 /* Insert a type into the used types hash table. */
5592 used_types_insert (tree t, struct function *func)
5594 if (t != NULL && func != NULL)
5598 if (func->used_types_hash == NULL)
5599 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5600 htab_eq_pointer, NULL);
5601 slot = htab_find_slot (func->used_types_hash, t, INSERT);
5607 struct tree_opt_pass pass_leaf_regs =
5611 rest_of_handle_check_leaf_regs, /* execute */
5614 0, /* static_pass_number */
5616 0, /* properties_required */
5617 0, /* properties_provided */
5618 0, /* properties_destroyed */
5619 0, /* todo_flags_start */
5620 0, /* todo_flags_finish */
5625 #include "gt-function.h"