1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
39 #include "coretypes.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
59 #include "integrate.h"
60 #include "langhooks.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 life_analysis has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
194 static struct temp_slot *find_temp_slot_from_address (rtx);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205 static int contains (rtx, VEC(int,heap) **);
207 static void emit_return_into_block (basic_block, rtx);
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function *outer_function_chain;
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
224 find_function_data (tree decl)
228 for (p = outer_function_chain; p; p = p->outer)
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
242 push_function_context_to (tree context ATTRIBUTE_UNUSED)
247 init_dummy_function_start ();
250 p->outer = outer_function_chain;
251 outer_function_chain = p;
253 lang_hooks.function.enter_nested (p);
259 push_function_context (void)
261 push_function_context_to (current_function_decl);
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
270 struct function *p = outer_function_chain;
273 outer_function_chain = p->outer;
275 current_function_decl = p->decl;
277 lang_hooks.function.leave_nested (p);
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated = 0;
281 generating_concat_p = 1;
285 pop_function_context (void)
287 pop_function_context_from (current_function_decl);
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
295 free_after_parsing (struct function *f)
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
302 lang_hooks.function.final (f);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
310 free_after_compilation (struct function *f)
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
328 f->x_nonlocal_goto_handler_labels = NULL;
329 f->x_return_label = NULL;
330 f->x_naked_return_label = NULL;
331 f->x_stack_slot_list = NULL;
332 f->x_stack_check_probe_note = NULL;
333 f->x_arg_pointer_save_area = NULL;
334 f->x_parm_birth_insn = NULL;
335 f->epilogue_delay_list = NULL;
338 /* Allocate fixed slots in the stack frame of the current function. */
340 /* Return size needed for stack frame based on slots so far allocated in
342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
343 the caller may have to do that. */
346 get_func_frame_size (struct function *f)
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
351 return f->x_frame_offset;
354 /* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
359 get_frame_size (void)
361 return get_func_frame_size (cfun);
364 /* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
369 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
377 error ("%Jtotal size of local objects too large", func);
384 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
390 -2 means use BITS_PER_UNIT,
391 positive specifies alignment boundary in bits.
393 We do not round to stack_boundary here.
395 FUNCTION specifies the function to allocate in. */
398 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
402 int bigend_correction = 0;
403 unsigned int alignment;
404 int frame_off, frame_alignment, frame_phase;
411 alignment = BIGGEST_ALIGNMENT;
413 alignment = GET_MODE_ALIGNMENT (mode);
415 /* Allow the target to (possibly) increase the alignment of this
417 type = lang_hooks.types.type_for_mode (mode, 0);
419 alignment = LOCAL_ALIGNMENT (type, alignment);
421 alignment /= BITS_PER_UNIT;
423 else if (align == -1)
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
431 alignment = align / BITS_PER_UNIT;
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
443 /* Calculate how many bytes the start of local variables is off from
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
475 bigend_correction = size - GET_MODE_SIZE (mode);
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
479 if (function == cfun && virtuals_instantiated)
480 addr = plus_constant (frame_pointer_rtx,
482 (frame_offset + bigend_correction
483 + STARTING_FRAME_OFFSET, Pmode));
485 addr = plus_constant (virtual_stack_vars_rtx,
487 (function->x_frame_offset + bigend_correction,
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
493 x = gen_rtx_MEM (mode, addr);
494 MEM_NOTRAP_P (x) = 1;
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
505 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
509 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
511 return assign_stack_local_1 (mode, size, align, cfun);
515 /* Removes temporary slot TEMP from LIST. */
518 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
521 temp->next->prev = temp->prev;
523 temp->prev->next = temp->next;
527 temp->prev = temp->next = NULL;
530 /* Inserts temporary slot TEMP to LIST. */
533 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
537 (*list)->prev = temp;
542 /* Returns the list of used temp slots at LEVEL. */
544 static struct temp_slot **
545 temp_slots_at_level (int level)
547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
549 size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
552 VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
553 p = VEC_address (temp_slot_p, used_temp_slots);
554 memset (&p[old_length], 0,
555 sizeof (temp_slot_p) * (level + 1 - old_length));
558 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
561 /* Returns the maximal temporary slot level. */
564 max_slot_level (void)
566 if (!used_temp_slots)
569 return VEC_length (temp_slot_p, used_temp_slots) - 1;
572 /* Moves temporary slot TEMP to LEVEL. */
575 move_slot_to_level (struct temp_slot *temp, int level)
577 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
578 insert_slot_to_list (temp, temp_slots_at_level (level));
582 /* Make temporary slot TEMP available. */
585 make_slot_available (struct temp_slot *temp)
587 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
588 insert_slot_to_list (temp, &avail_temp_slots);
593 /* Allocate a temporary stack slot and record it for possible later
596 MODE is the machine mode to be given to the returned rtx.
598 SIZE is the size in units of the space required. We do no rounding here
599 since assign_stack_local will do any required rounding.
601 KEEP is 1 if this slot is to be retained after a call to
602 free_temp_slots. Automatic variables for a block are allocated
603 with this flag. KEEP values of 2 or 3 were needed respectively
604 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
605 or for SAVE_EXPRs, but they are now unused.
607 TYPE is the type that will be used for the stack slot. */
610 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
614 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
617 /* If SIZE is -1 it means that somebody tried to allocate a temporary
618 of a variable size. */
619 gcc_assert (size != -1);
621 /* These are now unused. */
622 gcc_assert (keep <= 1);
625 align = BIGGEST_ALIGNMENT;
627 align = GET_MODE_ALIGNMENT (mode);
630 type = lang_hooks.types.type_for_mode (mode, 0);
633 align = LOCAL_ALIGNMENT (type, align);
635 /* Try to find an available, already-allocated temporary of the proper
636 mode which meets the size and alignment requirements. Choose the
637 smallest one with the closest alignment.
639 If assign_stack_temp is called outside of the tree->rtl expansion,
640 we cannot reuse the stack slots (that may still refer to
641 VIRTUAL_STACK_VARS_REGNUM). */
642 if (!virtuals_instantiated)
644 for (p = avail_temp_slots; p; p = p->next)
646 if (p->align >= align && p->size >= size
647 && GET_MODE (p->slot) == mode
648 && objects_must_conflict_p (p->type, type)
649 && (best_p == 0 || best_p->size > p->size
650 || (best_p->size == p->size && best_p->align > p->align)))
652 if (p->align == align && p->size == size)
655 cut_slot_from_list (selected, &avail_temp_slots);
664 /* Make our best, if any, the one to use. */
668 cut_slot_from_list (selected, &avail_temp_slots);
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
673 if (GET_MODE (best_p->slot) == BLKmode)
675 int alignment = best_p->align / BITS_PER_UNIT;
676 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
678 if (best_p->size - rounded_size >= alignment)
680 p = ggc_alloc (sizeof (struct temp_slot));
681 p->in_use = p->addr_taken = 0;
682 p->size = best_p->size - rounded_size;
683 p->base_offset = best_p->base_offset + rounded_size;
684 p->full_size = best_p->full_size - rounded_size;
685 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
686 p->align = best_p->align;
688 p->type = best_p->type;
689 insert_slot_to_list (p, &avail_temp_slots);
691 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
694 best_p->size = rounded_size;
695 best_p->full_size = rounded_size;
700 /* If we still didn't find one, make a new temporary. */
703 HOST_WIDE_INT frame_offset_old = frame_offset;
705 p = ggc_alloc (sizeof (struct temp_slot));
707 /* We are passing an explicit alignment request to assign_stack_local.
708 One side effect of that is assign_stack_local will not round SIZE
709 to ensure the frame offset remains suitably aligned.
711 So for requests which depended on the rounding of SIZE, we go ahead
712 and round it now. We also make sure ALIGNMENT is at least
713 BIGGEST_ALIGNMENT. */
714 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
715 p->slot = assign_stack_local (mode,
717 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
723 /* The following slot size computation is necessary because we don't
724 know the actual size of the temporary slot until assign_stack_local
725 has performed all the frame alignment and size rounding for the
726 requested temporary. Note that extra space added for alignment
727 can be either above or below this stack slot depending on which
728 way the frame grows. We include the extra space if and only if it
729 is above this slot. */
730 if (FRAME_GROWS_DOWNWARD)
731 p->size = frame_offset_old - frame_offset;
735 /* Now define the fields used by combine_temp_slots. */
736 if (FRAME_GROWS_DOWNWARD)
738 p->base_offset = frame_offset;
739 p->full_size = frame_offset_old - frame_offset;
743 p->base_offset = frame_offset_old;
744 p->full_size = frame_offset - frame_offset_old;
755 p->level = temp_slot_level;
758 pp = temp_slots_at_level (p->level);
759 insert_slot_to_list (p, pp);
761 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
762 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
763 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
765 /* If we know the alias set for the memory that will be used, use
766 it. If there's no TYPE, then we don't know anything about the
767 alias set for the memory. */
768 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
769 set_mem_align (slot, align);
771 /* If a type is specified, set the relevant flags. */
774 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
775 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
777 MEM_NOTRAP_P (slot) = 1;
782 /* Allocate a temporary stack slot and record it for possible later
783 reuse. First three arguments are same as in preceding function. */
786 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
788 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
791 /* Assign a temporary.
792 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
793 and so that should be used in error messages. In either case, we
794 allocate of the given type.
795 KEEP is as for assign_stack_temp.
796 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
797 it is 0 if a register is OK.
798 DONT_PROMOTE is 1 if we should not promote values in register
802 assign_temp (tree type_or_decl, int keep, int memory_required,
803 int dont_promote ATTRIBUTE_UNUSED)
806 enum machine_mode mode;
811 if (DECL_P (type_or_decl))
812 decl = type_or_decl, type = TREE_TYPE (decl);
814 decl = NULL, type = type_or_decl;
816 mode = TYPE_MODE (type);
818 unsignedp = TYPE_UNSIGNED (type);
821 if (mode == BLKmode || memory_required)
823 HOST_WIDE_INT size = int_size_in_bytes (type);
826 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
827 problems with allocating the stack space. */
831 /* Unfortunately, we don't yet know how to allocate variable-sized
832 temporaries. However, sometimes we can find a fixed upper limit on
833 the size, so try that instead. */
835 size = max_int_size_in_bytes (type);
837 /* The size of the temporary may be too large to fit into an integer. */
838 /* ??? Not sure this should happen except for user silliness, so limit
839 this to things that aren't compiler-generated temporaries. The
840 rest of the time we'll die in assign_stack_temp_for_type. */
841 if (decl && size == -1
842 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
844 error ("size of variable %q+D is too large", decl);
848 tmp = assign_stack_temp_for_type (mode, size, keep, type);
854 mode = promote_mode (type, mode, &unsignedp, 0);
857 return gen_reg_rtx (mode);
860 /* Combine temporary stack slots which are adjacent on the stack.
862 This allows for better use of already allocated stack space. This is only
863 done for BLKmode slots because we can be sure that we won't have alignment
864 problems in this case. */
867 combine_temp_slots (void)
869 struct temp_slot *p, *q, *next, *next_q;
872 /* We can't combine slots, because the information about which slot
873 is in which alias set will be lost. */
874 if (flag_strict_aliasing)
877 /* If there are a lot of temp slots, don't do anything unless
878 high levels of optimization. */
879 if (! flag_expensive_optimizations)
880 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
881 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
884 for (p = avail_temp_slots; p; p = next)
890 if (GET_MODE (p->slot) != BLKmode)
893 for (q = p->next; q; q = next_q)
899 if (GET_MODE (q->slot) != BLKmode)
902 if (p->base_offset + p->full_size == q->base_offset)
904 /* Q comes after P; combine Q into P. */
906 p->full_size += q->full_size;
909 else if (q->base_offset + q->full_size == p->base_offset)
911 /* P comes after Q; combine P into Q. */
913 q->full_size += p->full_size;
918 cut_slot_from_list (q, &avail_temp_slots);
921 /* Either delete P or advance past it. */
923 cut_slot_from_list (p, &avail_temp_slots);
927 /* Find the temp slot corresponding to the object at address X. */
929 static struct temp_slot *
930 find_temp_slot_from_address (rtx x)
936 for (i = max_slot_level (); i >= 0; i--)
937 for (p = *temp_slots_at_level (i); p; p = p->next)
939 if (XEXP (p->slot, 0) == x
941 || (GET_CODE (x) == PLUS
942 && XEXP (x, 0) == virtual_stack_vars_rtx
943 && GET_CODE (XEXP (x, 1)) == CONST_INT
944 && INTVAL (XEXP (x, 1)) >= p->base_offset
945 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
948 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
949 for (next = p->address; next; next = XEXP (next, 1))
950 if (XEXP (next, 0) == x)
954 /* If we have a sum involving a register, see if it points to a temp
956 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
957 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
959 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
960 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
966 /* Indicate that NEW is an alternate way of referring to the temp slot
967 that previously was known by OLD. */
970 update_temp_slot_address (rtx old, rtx new)
974 if (rtx_equal_p (old, new))
977 p = find_temp_slot_from_address (old);
979 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
980 is a register, see if one operand of the PLUS is a temporary
981 location. If so, NEW points into it. Otherwise, if both OLD and
982 NEW are a PLUS and if there is a register in common between them.
983 If so, try a recursive call on those values. */
986 if (GET_CODE (old) != PLUS)
991 update_temp_slot_address (XEXP (old, 0), new);
992 update_temp_slot_address (XEXP (old, 1), new);
995 else if (GET_CODE (new) != PLUS)
998 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
999 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1000 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1003 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1004 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1010 /* Otherwise add an alias for the temp's address. */
1011 else if (p->address == 0)
1015 if (GET_CODE (p->address) != EXPR_LIST)
1016 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1022 /* If X could be a reference to a temporary slot, mark the fact that its
1023 address was taken. */
1026 mark_temp_addr_taken (rtx x)
1028 struct temp_slot *p;
1033 /* If X is not in memory or is at a constant address, it cannot be in
1034 a temporary slot. */
1035 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1038 p = find_temp_slot_from_address (XEXP (x, 0));
1043 /* If X could be a reference to a temporary slot, mark that slot as
1044 belonging to the to one level higher than the current level. If X
1045 matched one of our slots, just mark that one. Otherwise, we can't
1046 easily predict which it is, so upgrade all of them. Kept slots
1047 need not be touched.
1049 This is called when an ({...}) construct occurs and a statement
1050 returns a value in memory. */
1053 preserve_temp_slots (rtx x)
1055 struct temp_slot *p = 0, *next;
1057 /* If there is no result, we still might have some objects whose address
1058 were taken, so we need to make sure they stay around. */
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1066 move_slot_to_level (p, temp_slot_level - 1);
1072 /* If X is a register that is being used as a pointer, see if we have
1073 a temporary slot we know it points to. To be consistent with
1074 the code below, we really should preserve all non-kept slots
1075 if we can't find a match, but that seems to be much too costly. */
1076 if (REG_P (x) && REG_POINTER (x))
1077 p = find_temp_slot_from_address (x);
1079 /* If X is not in memory or is at a constant address, it cannot be in
1080 a temporary slot, but it can contain something whose address was
1082 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1084 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1089 move_slot_to_level (p, temp_slot_level - 1);
1095 /* First see if we can find a match. */
1097 p = find_temp_slot_from_address (XEXP (x, 0));
1101 /* Move everything at our level whose address was taken to our new
1102 level in case we used its address. */
1103 struct temp_slot *q;
1105 if (p->level == temp_slot_level)
1107 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1111 if (p != q && q->addr_taken)
1112 move_slot_to_level (q, temp_slot_level - 1);
1115 move_slot_to_level (p, temp_slot_level - 1);
1121 /* Otherwise, preserve all non-kept slots at this level. */
1122 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1127 move_slot_to_level (p, temp_slot_level - 1);
1131 /* Free all temporaries used so far. This is normally called at the
1132 end of generating code for a statement. */
1135 free_temp_slots (void)
1137 struct temp_slot *p, *next;
1139 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1144 make_slot_available (p);
1147 combine_temp_slots ();
1150 /* Push deeper into the nesting level for stack temporaries. */
1153 push_temp_slots (void)
1158 /* Pop a temporary nesting level. All slots in use in the current level
1162 pop_temp_slots (void)
1164 struct temp_slot *p, *next;
1166 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1169 make_slot_available (p);
1172 combine_temp_slots ();
1177 /* Initialize temporary slots. */
1180 init_temp_slots (void)
1182 /* We have not allocated any temporaries yet. */
1183 avail_temp_slots = 0;
1184 used_temp_slots = 0;
1185 temp_slot_level = 0;
1188 /* These routines are responsible for converting virtual register references
1189 to the actual hard register references once RTL generation is complete.
1191 The following four variables are used for communication between the
1192 routines. They contain the offsets of the virtual registers from their
1193 respective hard registers. */
1195 static int in_arg_offset;
1196 static int var_offset;
1197 static int dynamic_offset;
1198 static int out_arg_offset;
1199 static int cfa_offset;
1201 /* In most machines, the stack pointer register is equivalent to the bottom
1204 #ifndef STACK_POINTER_OFFSET
1205 #define STACK_POINTER_OFFSET 0
1208 /* If not defined, pick an appropriate default for the offset of dynamically
1209 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1210 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1212 #ifndef STACK_DYNAMIC_OFFSET
1214 /* The bottom of the stack points to the actual arguments. If
1215 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1216 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1217 stack space for register parameters is not pushed by the caller, but
1218 rather part of the fixed stack areas and hence not included in
1219 `current_function_outgoing_args_size'. Nevertheless, we must allow
1220 for it when allocating stack dynamic objects. */
1222 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1223 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1224 ((ACCUMULATE_OUTGOING_ARGS \
1225 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1226 + (STACK_POINTER_OFFSET)) \
1229 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1230 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1231 + (STACK_POINTER_OFFSET))
1236 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1237 is a virtual register, return the equivalent hard register and set the
1238 offset indirectly through the pointer. Otherwise, return 0. */
1241 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1244 HOST_WIDE_INT offset;
1246 if (x == virtual_incoming_args_rtx)
1247 new = arg_pointer_rtx, offset = in_arg_offset;
1248 else if (x == virtual_stack_vars_rtx)
1249 new = frame_pointer_rtx, offset = var_offset;
1250 else if (x == virtual_stack_dynamic_rtx)
1251 new = stack_pointer_rtx, offset = dynamic_offset;
1252 else if (x == virtual_outgoing_args_rtx)
1253 new = stack_pointer_rtx, offset = out_arg_offset;
1254 else if (x == virtual_cfa_rtx)
1256 #ifdef FRAME_POINTER_CFA_OFFSET
1257 new = frame_pointer_rtx;
1259 new = arg_pointer_rtx;
1261 offset = cfa_offset;
1270 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1271 Instantiate any virtual registers present inside of *LOC. The expression
1272 is simplified, as much as possible, but is not to be considered "valid"
1273 in any sense implied by the target. If any change is made, set CHANGED
1277 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1279 HOST_WIDE_INT offset;
1280 bool *changed = (bool *) data;
1287 switch (GET_CODE (x))
1290 new = instantiate_new_reg (x, &offset);
1293 *loc = plus_constant (new, offset);
1300 new = instantiate_new_reg (XEXP (x, 0), &offset);
1303 new = plus_constant (new, offset);
1304 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1310 /* FIXME -- from old code */
1311 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1312 we can commute the PLUS and SUBREG because pointers into the
1313 frame are well-behaved. */
1323 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1324 matches the predicate for insn CODE operand OPERAND. */
1327 safe_insn_predicate (int code, int operand, rtx x)
1329 const struct insn_operand_data *op_data;
1334 op_data = &insn_data[code].operand[operand];
1335 if (op_data->predicate == NULL)
1338 return op_data->predicate (x, op_data->mode);
1341 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1342 registers present inside of insn. The result will be a valid insn. */
1345 instantiate_virtual_regs_in_insn (rtx insn)
1347 HOST_WIDE_INT offset;
1349 bool any_change = false;
1350 rtx set, new, x, seq;
1352 /* There are some special cases to be handled first. */
1353 set = single_set (insn);
1356 /* We're allowed to assign to a virtual register. This is interpreted
1357 to mean that the underlying register gets assigned the inverse
1358 transformation. This is used, for example, in the handling of
1360 new = instantiate_new_reg (SET_DEST (set), &offset);
1365 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1366 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1368 x = force_operand (x, new);
1370 emit_move_insn (new, x);
1375 emit_insn_before (seq, insn);
1380 /* Handle a straight copy from a virtual register by generating a
1381 new add insn. The difference between this and falling through
1382 to the generic case is avoiding a new pseudo and eliminating a
1383 move insn in the initial rtl stream. */
1384 new = instantiate_new_reg (SET_SRC (set), &offset);
1385 if (new && offset != 0
1386 && REG_P (SET_DEST (set))
1387 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1391 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1392 new, GEN_INT (offset), SET_DEST (set),
1393 1, OPTAB_LIB_WIDEN);
1394 if (x != SET_DEST (set))
1395 emit_move_insn (SET_DEST (set), x);
1400 emit_insn_before (seq, insn);
1405 extract_insn (insn);
1406 insn_code = INSN_CODE (insn);
1408 /* Handle a plus involving a virtual register by determining if the
1409 operands remain valid if they're modified in place. */
1410 if (GET_CODE (SET_SRC (set)) == PLUS
1411 && recog_data.n_operands >= 3
1412 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1413 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1414 && GET_CODE (recog_data.operand[2]) == CONST_INT
1415 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1417 offset += INTVAL (recog_data.operand[2]);
1419 /* If the sum is zero, then replace with a plain move. */
1421 && REG_P (SET_DEST (set))
1422 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1425 emit_move_insn (SET_DEST (set), new);
1429 emit_insn_before (seq, insn);
1434 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1436 /* Using validate_change and apply_change_group here leaves
1437 recog_data in an invalid state. Since we know exactly what
1438 we want to check, do those two by hand. */
1439 if (safe_insn_predicate (insn_code, 1, new)
1440 && safe_insn_predicate (insn_code, 2, x))
1442 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1443 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1446 /* Fall through into the regular operand fixup loop in
1447 order to take care of operands other than 1 and 2. */
1453 extract_insn (insn);
1454 insn_code = INSN_CODE (insn);
1457 /* In the general case, we expect virtual registers to appear only in
1458 operands, and then only as either bare registers or inside memories. */
1459 for (i = 0; i < recog_data.n_operands; ++i)
1461 x = recog_data.operand[i];
1462 switch (GET_CODE (x))
1466 rtx addr = XEXP (x, 0);
1467 bool changed = false;
1469 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1474 x = replace_equiv_address (x, addr);
1478 emit_insn_before (seq, insn);
1483 new = instantiate_new_reg (x, &offset);
1492 /* Careful, special mode predicates may have stuff in
1493 insn_data[insn_code].operand[i].mode that isn't useful
1494 to us for computing a new value. */
1495 /* ??? Recognize address_operand and/or "p" constraints
1496 to see if (plus new offset) is a valid before we put
1497 this through expand_simple_binop. */
1498 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1499 GEN_INT (offset), NULL_RTX,
1500 1, OPTAB_LIB_WIDEN);
1503 emit_insn_before (seq, insn);
1508 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1514 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1515 GEN_INT (offset), NULL_RTX,
1516 1, OPTAB_LIB_WIDEN);
1519 emit_insn_before (seq, insn);
1521 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1522 GET_MODE (new), SUBREG_BYTE (x));
1529 /* At this point, X contains the new value for the operand.
1530 Validate the new value vs the insn predicate. Note that
1531 asm insns will have insn_code -1 here. */
1532 if (!safe_insn_predicate (insn_code, i, x))
1533 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1535 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1541 /* Propagate operand changes into the duplicates. */
1542 for (i = 0; i < recog_data.n_dups; ++i)
1543 *recog_data.dup_loc[i]
1544 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1546 /* Force re-recognition of the instruction for validation. */
1547 INSN_CODE (insn) = -1;
1550 if (asm_noperands (PATTERN (insn)) >= 0)
1552 if (!check_asm_operands (PATTERN (insn)))
1554 error_for_asm (insn, "impossible constraint in %<asm%>");
1560 if (recog_memoized (insn) < 0)
1561 fatal_insn_not_found (insn);
1565 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1566 do any instantiation required. */
1569 instantiate_decl (rtx x)
1576 /* If this is a CONCAT, recurse for the pieces. */
1577 if (GET_CODE (x) == CONCAT)
1579 instantiate_decl (XEXP (x, 0));
1580 instantiate_decl (XEXP (x, 1));
1584 /* If this is not a MEM, no need to do anything. Similarly if the
1585 address is a constant or a register that is not a virtual register. */
1590 if (CONSTANT_P (addr)
1592 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1593 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1596 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1599 /* Helper for instantiate_decls called via walk_tree: Process all decls
1600 in the given DECL_VALUE_EXPR. */
1603 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1609 if (DECL_P (t) && DECL_RTL_SET_P (t))
1610 instantiate_decl (DECL_RTL (t));
1615 /* Subroutine of instantiate_decls: Process all decls in the given
1616 BLOCK node and all its subblocks. */
1619 instantiate_decls_1 (tree let)
1623 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1625 if (DECL_RTL_SET_P (t))
1626 instantiate_decl (DECL_RTL (t));
1627 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1629 tree v = DECL_VALUE_EXPR (t);
1630 walk_tree (&v, instantiate_expr, NULL, NULL);
1634 /* Process all subblocks. */
1635 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1636 instantiate_decls_1 (t);
1639 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1640 all virtual registers in their DECL_RTL's. */
1643 instantiate_decls (tree fndecl)
1647 /* Process all parameters of the function. */
1648 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1650 instantiate_decl (DECL_RTL (decl));
1651 instantiate_decl (DECL_INCOMING_RTL (decl));
1652 if (DECL_HAS_VALUE_EXPR_P (decl))
1654 tree v = DECL_VALUE_EXPR (decl);
1655 walk_tree (&v, instantiate_expr, NULL, NULL);
1659 /* Now process all variables defined in the function or its subblocks. */
1660 instantiate_decls_1 (DECL_INITIAL (fndecl));
1663 /* Pass through the INSNS of function FNDECL and convert virtual register
1664 references to hard register references. */
1667 instantiate_virtual_regs (void)
1671 /* Compute the offsets to use for this function. */
1672 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1673 var_offset = STARTING_FRAME_OFFSET;
1674 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1675 out_arg_offset = STACK_POINTER_OFFSET;
1676 #ifdef FRAME_POINTER_CFA_OFFSET
1677 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1679 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1682 /* Initialize recognition, indicating that volatile is OK. */
1685 /* Scan through all the insns, instantiating every virtual register still
1687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1690 /* These patterns in the instruction stream can never be recognized.
1691 Fortunately, they shouldn't contain virtual registers either. */
1692 if (GET_CODE (PATTERN (insn)) == USE
1693 || GET_CODE (PATTERN (insn)) == CLOBBER
1694 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1695 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1696 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1699 instantiate_virtual_regs_in_insn (insn);
1701 if (INSN_DELETED_P (insn))
1704 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1706 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1707 if (GET_CODE (insn) == CALL_INSN)
1708 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1709 instantiate_virtual_regs_in_rtx, NULL);
1712 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1713 instantiate_decls (current_function_decl);
1715 /* Indicate that, from now on, assign_stack_local should use
1716 frame_pointer_rtx. */
1717 virtuals_instantiated = 1;
1721 struct tree_opt_pass pass_instantiate_virtual_regs =
1725 instantiate_virtual_regs, /* execute */
1728 0, /* static_pass_number */
1730 0, /* properties_required */
1731 0, /* properties_provided */
1732 0, /* properties_destroyed */
1733 0, /* todo_flags_start */
1734 TODO_dump_func, /* todo_flags_finish */
1739 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1740 This means a type for which function calls must pass an address to the
1741 function or get an address back from the function.
1742 EXP may be a type node or an expression (whose type is tested). */
1745 aggregate_value_p (tree exp, tree fntype)
1747 int i, regno, nregs;
1750 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1753 switch (TREE_CODE (fntype))
1756 fntype = get_callee_fndecl (fntype);
1757 fntype = fntype ? TREE_TYPE (fntype) : 0;
1760 fntype = TREE_TYPE (fntype);
1765 case IDENTIFIER_NODE:
1769 /* We don't expect other rtl types here. */
1773 if (TREE_CODE (type) == VOID_TYPE)
1775 /* If the front end has decided that this needs to be passed by
1776 reference, do so. */
1777 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1778 && DECL_BY_REFERENCE (exp))
1780 if (targetm.calls.return_in_memory (type, fntype))
1782 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1783 and thus can't be returned in registers. */
1784 if (TREE_ADDRESSABLE (type))
1786 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1788 /* Make sure we have suitable call-clobbered regs to return
1789 the value in; if not, we must return it in memory. */
1790 reg = hard_function_value (type, 0, fntype, 0);
1792 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1797 regno = REGNO (reg);
1798 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1799 for (i = 0; i < nregs; i++)
1800 if (! call_used_regs[regno + i])
1805 /* Return true if we should assign DECL a pseudo register; false if it
1806 should live on the local stack. */
1809 use_register_for_decl (tree decl)
1811 /* Honor volatile. */
1812 if (TREE_SIDE_EFFECTS (decl))
1815 /* Honor addressability. */
1816 if (TREE_ADDRESSABLE (decl))
1819 /* Only register-like things go in registers. */
1820 if (DECL_MODE (decl) == BLKmode)
1823 /* If -ffloat-store specified, don't put explicit float variables
1825 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1826 propagates values across these stores, and it probably shouldn't. */
1827 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1830 /* If we're not interested in tracking debugging information for
1831 this decl, then we can certainly put it in a register. */
1832 if (DECL_IGNORED_P (decl))
1835 return (optimize || DECL_REGISTER (decl));
1838 /* Return true if TYPE should be passed by invisible reference. */
1841 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1842 tree type, bool named_arg)
1846 /* If this type contains non-trivial constructors, then it is
1847 forbidden for the middle-end to create any new copies. */
1848 if (TREE_ADDRESSABLE (type))
1851 /* GCC post 3.4 passes *all* variable sized types by reference. */
1852 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1856 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1859 /* Return true if TYPE, which is passed by reference, should be callee
1860 copied instead of caller copied. */
1863 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1864 tree type, bool named_arg)
1866 if (type && TREE_ADDRESSABLE (type))
1868 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1871 /* Structures to communicate between the subroutines of assign_parms.
1872 The first holds data persistent across all parameters, the second
1873 is cleared out for each parameter. */
1875 struct assign_parm_data_all
1877 CUMULATIVE_ARGS args_so_far;
1878 struct args_size stack_args_size;
1879 tree function_result_decl;
1881 rtx conversion_insns;
1882 HOST_WIDE_INT pretend_args_size;
1883 HOST_WIDE_INT extra_pretend_bytes;
1884 int reg_parm_stack_space;
1887 struct assign_parm_data_one
1893 enum machine_mode nominal_mode;
1894 enum machine_mode passed_mode;
1895 enum machine_mode promoted_mode;
1896 struct locate_and_pad_arg_data locate;
1898 BOOL_BITFIELD named_arg : 1;
1899 BOOL_BITFIELD passed_pointer : 1;
1900 BOOL_BITFIELD on_stack : 1;
1901 BOOL_BITFIELD loaded_in_reg : 1;
1904 /* A subroutine of assign_parms. Initialize ALL. */
1907 assign_parms_initialize_all (struct assign_parm_data_all *all)
1911 memset (all, 0, sizeof (*all));
1913 fntype = TREE_TYPE (current_function_decl);
1915 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1916 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1918 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1919 current_function_decl, -1);
1922 #ifdef REG_PARM_STACK_SPACE
1923 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1927 /* If ARGS contains entries with complex types, split the entry into two
1928 entries of the component type. Return a new list of substitutions are
1929 needed, else the old list. */
1932 split_complex_args (tree args)
1936 /* Before allocating memory, check for the common case of no complex. */
1937 for (p = args; p; p = TREE_CHAIN (p))
1939 tree type = TREE_TYPE (p);
1940 if (TREE_CODE (type) == COMPLEX_TYPE
1941 && targetm.calls.split_complex_arg (type))
1947 args = copy_list (args);
1949 for (p = args; p; p = TREE_CHAIN (p))
1951 tree type = TREE_TYPE (p);
1952 if (TREE_CODE (type) == COMPLEX_TYPE
1953 && targetm.calls.split_complex_arg (type))
1956 tree subtype = TREE_TYPE (type);
1957 bool addressable = TREE_ADDRESSABLE (p);
1959 /* Rewrite the PARM_DECL's type with its component. */
1960 TREE_TYPE (p) = subtype;
1961 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1962 DECL_MODE (p) = VOIDmode;
1963 DECL_SIZE (p) = NULL;
1964 DECL_SIZE_UNIT (p) = NULL;
1965 /* If this arg must go in memory, put it in a pseudo here.
1966 We can't allow it to go in memory as per normal parms,
1967 because the usual place might not have the imag part
1968 adjacent to the real part. */
1969 DECL_ARTIFICIAL (p) = addressable;
1970 DECL_IGNORED_P (p) = addressable;
1971 TREE_ADDRESSABLE (p) = 0;
1974 /* Build a second synthetic decl. */
1975 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1976 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1977 DECL_ARTIFICIAL (decl) = addressable;
1978 DECL_IGNORED_P (decl) = addressable;
1979 layout_decl (decl, 0);
1981 /* Splice it in; skip the new decl. */
1982 TREE_CHAIN (decl) = TREE_CHAIN (p);
1983 TREE_CHAIN (p) = decl;
1991 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1992 the hidden struct return argument, and (abi willing) complex args.
1993 Return the new parameter list. */
1996 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1998 tree fndecl = current_function_decl;
1999 tree fntype = TREE_TYPE (fndecl);
2000 tree fnargs = DECL_ARGUMENTS (fndecl);
2002 /* If struct value address is treated as the first argument, make it so. */
2003 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2004 && ! current_function_returns_pcc_struct
2005 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2007 tree type = build_pointer_type (TREE_TYPE (fntype));
2010 decl = build_decl (PARM_DECL, NULL_TREE, type);
2011 DECL_ARG_TYPE (decl) = type;
2012 DECL_ARTIFICIAL (decl) = 1;
2013 DECL_IGNORED_P (decl) = 1;
2015 TREE_CHAIN (decl) = fnargs;
2017 all->function_result_decl = decl;
2020 all->orig_fnargs = fnargs;
2022 /* If the target wants to split complex arguments into scalars, do so. */
2023 if (targetm.calls.split_complex_arg)
2024 fnargs = split_complex_args (fnargs);
2029 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2030 data for the parameter. Incorporate ABI specifics such as pass-by-
2031 reference and type promotion. */
2034 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2035 struct assign_parm_data_one *data)
2037 tree nominal_type, passed_type;
2038 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2040 memset (data, 0, sizeof (*data));
2042 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2043 if (!current_function_stdarg)
2044 data->named_arg = 1; /* No varadic parms. */
2045 else if (TREE_CHAIN (parm))
2046 data->named_arg = 1; /* Not the last non-varadic parm. */
2047 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2048 data->named_arg = 1; /* Only varadic ones are unnamed. */
2050 data->named_arg = 0; /* Treat as varadic. */
2052 nominal_type = TREE_TYPE (parm);
2053 passed_type = DECL_ARG_TYPE (parm);
2055 /* Look out for errors propagating this far. Also, if the parameter's
2056 type is void then its value doesn't matter. */
2057 if (TREE_TYPE (parm) == error_mark_node
2058 /* This can happen after weird syntax errors
2059 or if an enum type is defined among the parms. */
2060 || TREE_CODE (parm) != PARM_DECL
2061 || passed_type == NULL
2062 || VOID_TYPE_P (nominal_type))
2064 nominal_type = passed_type = void_type_node;
2065 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2069 /* Find mode of arg as it is passed, and mode of arg as it should be
2070 during execution of this function. */
2071 passed_mode = TYPE_MODE (passed_type);
2072 nominal_mode = TYPE_MODE (nominal_type);
2074 /* If the parm is to be passed as a transparent union, use the type of
2075 the first field for the tests below. We have already verified that
2076 the modes are the same. */
2077 if (TREE_CODE (passed_type) == UNION_TYPE
2078 && TYPE_TRANSPARENT_UNION (passed_type))
2079 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2081 /* See if this arg was passed by invisible reference. */
2082 if (pass_by_reference (&all->args_so_far, passed_mode,
2083 passed_type, data->named_arg))
2085 passed_type = nominal_type = build_pointer_type (passed_type);
2086 data->passed_pointer = true;
2087 passed_mode = nominal_mode = Pmode;
2090 /* Find mode as it is passed by the ABI. */
2091 promoted_mode = passed_mode;
2092 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2094 int unsignedp = TYPE_UNSIGNED (passed_type);
2095 promoted_mode = promote_mode (passed_type, promoted_mode,
2100 data->nominal_type = nominal_type;
2101 data->passed_type = passed_type;
2102 data->nominal_mode = nominal_mode;
2103 data->passed_mode = passed_mode;
2104 data->promoted_mode = promoted_mode;
2107 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2110 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2111 struct assign_parm_data_one *data, bool no_rtl)
2113 int varargs_pretend_bytes = 0;
2115 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2116 data->promoted_mode,
2118 &varargs_pretend_bytes, no_rtl);
2120 /* If the back-end has requested extra stack space, record how much is
2121 needed. Do not change pretend_args_size otherwise since it may be
2122 nonzero from an earlier partial argument. */
2123 if (varargs_pretend_bytes > 0)
2124 all->pretend_args_size = varargs_pretend_bytes;
2127 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2128 the incoming location of the current parameter. */
2131 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2132 struct assign_parm_data_one *data)
2134 HOST_WIDE_INT pretend_bytes = 0;
2138 if (data->promoted_mode == VOIDmode)
2140 data->entry_parm = data->stack_parm = const0_rtx;
2144 #ifdef FUNCTION_INCOMING_ARG
2145 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2146 data->passed_type, data->named_arg);
2148 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2149 data->passed_type, data->named_arg);
2152 if (entry_parm == 0)
2153 data->promoted_mode = data->passed_mode;
2155 /* Determine parm's home in the stack, in case it arrives in the stack
2156 or we should pretend it did. Compute the stack position and rtx where
2157 the argument arrives and its size.
2159 There is one complexity here: If this was a parameter that would
2160 have been passed in registers, but wasn't only because it is
2161 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2162 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2163 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2164 as it was the previous time. */
2165 in_regs = entry_parm != 0;
2166 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2169 if (!in_regs && !data->named_arg)
2171 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2174 #ifdef FUNCTION_INCOMING_ARG
2175 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2176 data->passed_type, true);
2178 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2179 data->passed_type, true);
2181 in_regs = tem != NULL;
2185 /* If this parameter was passed both in registers and in the stack, use
2186 the copy on the stack. */
2187 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2195 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2196 data->promoted_mode,
2199 data->partial = partial;
2201 /* The caller might already have allocated stack space for the
2202 register parameters. */
2203 if (partial != 0 && all->reg_parm_stack_space == 0)
2205 /* Part of this argument is passed in registers and part
2206 is passed on the stack. Ask the prologue code to extend
2207 the stack part so that we can recreate the full value.
2209 PRETEND_BYTES is the size of the registers we need to store.
2210 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2211 stack space that the prologue should allocate.
2213 Internally, gcc assumes that the argument pointer is aligned
2214 to STACK_BOUNDARY bits. This is used both for alignment
2215 optimizations (see init_emit) and to locate arguments that are
2216 aligned to more than PARM_BOUNDARY bits. We must preserve this
2217 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2218 a stack boundary. */
2220 /* We assume at most one partial arg, and it must be the first
2221 argument on the stack. */
2222 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2224 pretend_bytes = partial;
2225 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2227 /* We want to align relative to the actual stack pointer, so
2228 don't include this in the stack size until later. */
2229 all->extra_pretend_bytes = all->pretend_args_size;
2233 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2234 entry_parm ? data->partial : 0, current_function_decl,
2235 &all->stack_args_size, &data->locate);
2237 /* Adjust offsets to include the pretend args. */
2238 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2239 data->locate.slot_offset.constant += pretend_bytes;
2240 data->locate.offset.constant += pretend_bytes;
2242 data->entry_parm = entry_parm;
2245 /* A subroutine of assign_parms. If there is actually space on the stack
2246 for this parm, count it in stack_args_size and return true. */
2249 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2250 struct assign_parm_data_one *data)
2252 /* Trivially true if we've no incoming register. */
2253 if (data->entry_parm == NULL)
2255 /* Also true if we're partially in registers and partially not,
2256 since we've arranged to drop the entire argument on the stack. */
2257 else if (data->partial != 0)
2259 /* Also true if the target says that it's passed in both registers
2260 and on the stack. */
2261 else if (GET_CODE (data->entry_parm) == PARALLEL
2262 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2264 /* Also true if the target says that there's stack allocated for
2265 all register parameters. */
2266 else if (all->reg_parm_stack_space > 0)
2268 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2272 all->stack_args_size.constant += data->locate.size.constant;
2273 if (data->locate.size.var)
2274 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2279 /* A subroutine of assign_parms. Given that this parameter is allocated
2280 stack space by the ABI, find it. */
2283 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2285 rtx offset_rtx, stack_parm;
2286 unsigned int align, boundary;
2288 /* If we're passing this arg using a reg, make its stack home the
2289 aligned stack slot. */
2290 if (data->entry_parm)
2291 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2293 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2295 stack_parm = current_function_internal_arg_pointer;
2296 if (offset_rtx != const0_rtx)
2297 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2298 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2300 set_mem_attributes (stack_parm, parm, 1);
2302 boundary = data->locate.boundary;
2303 align = BITS_PER_UNIT;
2305 /* If we're padding upward, we know that the alignment of the slot
2306 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2307 intentionally forcing upward padding. Otherwise we have to come
2308 up with a guess at the alignment based on OFFSET_RTX. */
2309 if (data->locate.where_pad != downward || data->entry_parm)
2311 else if (GET_CODE (offset_rtx) == CONST_INT)
2313 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2314 align = align & -align;
2316 set_mem_align (stack_parm, align);
2318 if (data->entry_parm)
2319 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2321 data->stack_parm = stack_parm;
2324 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2325 always valid and contiguous. */
2328 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2330 rtx entry_parm = data->entry_parm;
2331 rtx stack_parm = data->stack_parm;
2333 /* If this parm was passed part in regs and part in memory, pretend it
2334 arrived entirely in memory by pushing the register-part onto the stack.
2335 In the special case of a DImode or DFmode that is split, we could put
2336 it together in a pseudoreg directly, but for now that's not worth
2338 if (data->partial != 0)
2340 /* Handle calls that pass values in multiple non-contiguous
2341 locations. The Irix 6 ABI has examples of this. */
2342 if (GET_CODE (entry_parm) == PARALLEL)
2343 emit_group_store (validize_mem (stack_parm), entry_parm,
2345 int_size_in_bytes (data->passed_type));
2348 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2349 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2350 data->partial / UNITS_PER_WORD);
2353 entry_parm = stack_parm;
2356 /* If we didn't decide this parm came in a register, by default it came
2358 else if (entry_parm == NULL)
2359 entry_parm = stack_parm;
2361 /* When an argument is passed in multiple locations, we can't make use
2362 of this information, but we can save some copying if the whole argument
2363 is passed in a single register. */
2364 else if (GET_CODE (entry_parm) == PARALLEL
2365 && data->nominal_mode != BLKmode
2366 && data->passed_mode != BLKmode)
2368 size_t i, len = XVECLEN (entry_parm, 0);
2370 for (i = 0; i < len; i++)
2371 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2372 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2373 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2374 == data->passed_mode)
2375 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2377 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2382 data->entry_parm = entry_parm;
2385 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2386 always valid and properly aligned. */
2389 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2391 rtx stack_parm = data->stack_parm;
2393 /* If we can't trust the parm stack slot to be aligned enough for its
2394 ultimate type, don't use that slot after entry. We'll make another
2395 stack slot, if we need one. */
2397 && ((STRICT_ALIGNMENT
2398 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2399 || (data->nominal_type
2400 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2401 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2404 /* If parm was passed in memory, and we need to convert it on entry,
2405 don't store it back in that same slot. */
2406 else if (data->entry_parm == stack_parm
2407 && data->nominal_mode != BLKmode
2408 && data->nominal_mode != data->passed_mode)
2411 /* If stack protection is in effect for this function, don't leave any
2412 pointers in their passed stack slots. */
2413 else if (cfun->stack_protect_guard
2414 && (flag_stack_protect == 2
2415 || data->passed_pointer
2416 || POINTER_TYPE_P (data->nominal_type)))
2419 data->stack_parm = stack_parm;
2422 /* A subroutine of assign_parms. Return true if the current parameter
2423 should be stored as a BLKmode in the current frame. */
2426 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2428 if (data->nominal_mode == BLKmode)
2430 if (GET_CODE (data->entry_parm) == PARALLEL)
2433 #ifdef BLOCK_REG_PADDING
2434 /* Only assign_parm_setup_block knows how to deal with register arguments
2435 that are padded at the least significant end. */
2436 if (REG_P (data->entry_parm)
2437 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2438 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2439 == (BYTES_BIG_ENDIAN ? upward : downward)))
2446 /* A subroutine of assign_parms. Arrange for the parameter to be
2447 present and valid in DATA->STACK_RTL. */
2450 assign_parm_setup_block (struct assign_parm_data_all *all,
2451 tree parm, struct assign_parm_data_one *data)
2453 rtx entry_parm = data->entry_parm;
2454 rtx stack_parm = data->stack_parm;
2456 HOST_WIDE_INT size_stored;
2457 rtx orig_entry_parm = entry_parm;
2459 if (GET_CODE (entry_parm) == PARALLEL)
2460 entry_parm = emit_group_move_into_temps (entry_parm);
2462 /* If we've a non-block object that's nevertheless passed in parts,
2463 reconstitute it in register operations rather than on the stack. */
2464 if (GET_CODE (entry_parm) == PARALLEL
2465 && data->nominal_mode != BLKmode)
2467 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2469 if ((XVECLEN (entry_parm, 0) > 1
2470 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2471 && use_register_for_decl (parm))
2473 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2475 push_to_sequence (all->conversion_insns);
2477 /* For values returned in multiple registers, handle possible
2478 incompatible calls to emit_group_store.
2480 For example, the following would be invalid, and would have to
2481 be fixed by the conditional below:
2483 emit_group_store ((reg:SF), (parallel:DF))
2484 emit_group_store ((reg:SI), (parallel:DI))
2486 An example of this are doubles in e500 v2:
2487 (parallel:DF (expr_list (reg:SI) (const_int 0))
2488 (expr_list (reg:SI) (const_int 4))). */
2489 if (data->nominal_mode != data->passed_mode)
2491 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2492 emit_group_store (t, entry_parm, NULL_TREE,
2493 GET_MODE_SIZE (GET_MODE (entry_parm)));
2494 convert_move (parmreg, t, 0);
2497 emit_group_store (parmreg, entry_parm, data->nominal_type,
2498 int_size_in_bytes (data->nominal_type));
2500 all->conversion_insns = get_insns ();
2503 SET_DECL_RTL (parm, parmreg);
2508 size = int_size_in_bytes (data->passed_type);
2509 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2510 if (stack_parm == 0)
2512 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2513 stack_parm = assign_stack_local (BLKmode, size_stored,
2515 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2516 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2517 set_mem_attributes (stack_parm, parm, 1);
2520 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2521 calls that pass values in multiple non-contiguous locations. */
2522 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2526 /* Note that we will be storing an integral number of words.
2527 So we have to be careful to ensure that we allocate an
2528 integral number of words. We do this above when we call
2529 assign_stack_local if space was not allocated in the argument
2530 list. If it was, this will not work if PARM_BOUNDARY is not
2531 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2532 if it becomes a problem. Exception is when BLKmode arrives
2533 with arguments not conforming to word_mode. */
2535 if (data->stack_parm == 0)
2537 else if (GET_CODE (entry_parm) == PARALLEL)
2540 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2542 mem = validize_mem (stack_parm);
2544 /* Handle values in multiple non-contiguous locations. */
2545 if (GET_CODE (entry_parm) == PARALLEL)
2547 push_to_sequence (all->conversion_insns);
2548 emit_group_store (mem, entry_parm, data->passed_type, size);
2549 all->conversion_insns = get_insns ();
2556 /* If SIZE is that of a mode no bigger than a word, just use
2557 that mode's store operation. */
2558 else if (size <= UNITS_PER_WORD)
2560 enum machine_mode mode
2561 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2564 #ifdef BLOCK_REG_PADDING
2565 && (size == UNITS_PER_WORD
2566 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2567 != (BYTES_BIG_ENDIAN ? upward : downward)))
2571 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2572 emit_move_insn (change_address (mem, mode, 0), reg);
2575 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2576 machine must be aligned to the left before storing
2577 to memory. Note that the previous test doesn't
2578 handle all cases (e.g. SIZE == 3). */
2579 else if (size != UNITS_PER_WORD
2580 #ifdef BLOCK_REG_PADDING
2581 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2589 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2590 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2592 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2593 build_int_cst (NULL_TREE, by),
2595 tem = change_address (mem, word_mode, 0);
2596 emit_move_insn (tem, x);
2599 move_block_from_reg (REGNO (entry_parm), mem,
2600 size_stored / UNITS_PER_WORD);
2603 move_block_from_reg (REGNO (entry_parm), mem,
2604 size_stored / UNITS_PER_WORD);
2606 else if (data->stack_parm == 0)
2608 push_to_sequence (all->conversion_insns);
2609 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2611 all->conversion_insns = get_insns ();
2615 data->stack_parm = stack_parm;
2616 SET_DECL_RTL (parm, stack_parm);
2619 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2620 parameter. Get it there. Perform all ABI specified conversions. */
2623 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2624 struct assign_parm_data_one *data)
2627 enum machine_mode promoted_nominal_mode;
2628 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2629 bool did_conversion = false;
2631 /* Store the parm in a pseudoregister during the function, but we may
2632 need to do it in a wider mode. */
2634 /* This is not really promoting for a call. However we need to be
2635 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2636 promoted_nominal_mode
2637 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2639 parmreg = gen_reg_rtx (promoted_nominal_mode);
2641 if (!DECL_ARTIFICIAL (parm))
2642 mark_user_reg (parmreg);
2644 /* If this was an item that we received a pointer to,
2645 set DECL_RTL appropriately. */
2646 if (data->passed_pointer)
2648 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2649 set_mem_attributes (x, parm, 1);
2650 SET_DECL_RTL (parm, x);
2653 SET_DECL_RTL (parm, parmreg);
2655 /* Copy the value into the register. */
2656 if (data->nominal_mode != data->passed_mode
2657 || promoted_nominal_mode != data->promoted_mode)
2661 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2662 mode, by the caller. We now have to convert it to
2663 NOMINAL_MODE, if different. However, PARMREG may be in
2664 a different mode than NOMINAL_MODE if it is being stored
2667 If ENTRY_PARM is a hard register, it might be in a register
2668 not valid for operating in its mode (e.g., an odd-numbered
2669 register for a DFmode). In that case, moves are the only
2670 thing valid, so we can't do a convert from there. This
2671 occurs when the calling sequence allow such misaligned
2674 In addition, the conversion may involve a call, which could
2675 clobber parameters which haven't been copied to pseudo
2676 registers yet. Therefore, we must first copy the parm to
2677 a pseudo reg here, and save the conversion until after all
2678 parameters have been moved. */
2680 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2682 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2684 push_to_sequence (all->conversion_insns);
2685 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2687 if (GET_CODE (tempreg) == SUBREG
2688 && GET_MODE (tempreg) == data->nominal_mode
2689 && REG_P (SUBREG_REG (tempreg))
2690 && data->nominal_mode == data->passed_mode
2691 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2692 && GET_MODE_SIZE (GET_MODE (tempreg))
2693 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2695 /* The argument is already sign/zero extended, so note it
2697 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2698 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2701 /* TREE_USED gets set erroneously during expand_assignment. */
2702 save_tree_used = TREE_USED (parm);
2703 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2704 TREE_USED (parm) = save_tree_used;
2705 all->conversion_insns = get_insns ();
2708 did_conversion = true;
2711 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2713 /* If we were passed a pointer but the actual value can safely live
2714 in a register, put it in one. */
2715 if (data->passed_pointer
2716 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2717 /* If by-reference argument was promoted, demote it. */
2718 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2719 || use_register_for_decl (parm)))
2721 /* We can't use nominal_mode, because it will have been set to
2722 Pmode above. We must use the actual mode of the parm. */
2723 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2724 mark_user_reg (parmreg);
2726 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2728 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2729 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2731 push_to_sequence (all->conversion_insns);
2732 emit_move_insn (tempreg, DECL_RTL (parm));
2733 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2734 emit_move_insn (parmreg, tempreg);
2735 all->conversion_insns = get_insns ();
2738 did_conversion = true;
2741 emit_move_insn (parmreg, DECL_RTL (parm));
2743 SET_DECL_RTL (parm, parmreg);
2745 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2747 data->stack_parm = NULL;
2750 /* Mark the register as eliminable if we did no conversion and it was
2751 copied from memory at a fixed offset, and the arg pointer was not
2752 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2753 offset formed an invalid address, such memory-equivalences as we
2754 make here would screw up life analysis for it. */
2755 if (data->nominal_mode == data->passed_mode
2757 && data->stack_parm != 0
2758 && MEM_P (data->stack_parm)
2759 && data->locate.offset.var == 0
2760 && reg_mentioned_p (virtual_incoming_args_rtx,
2761 XEXP (data->stack_parm, 0)))
2763 rtx linsn = get_last_insn ();
2766 /* Mark complex types separately. */
2767 if (GET_CODE (parmreg) == CONCAT)
2769 enum machine_mode submode
2770 = GET_MODE_INNER (GET_MODE (parmreg));
2771 int regnor = REGNO (XEXP (parmreg, 0));
2772 int regnoi = REGNO (XEXP (parmreg, 1));
2773 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2774 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2775 GET_MODE_SIZE (submode));
2777 /* Scan backwards for the set of the real and
2779 for (sinsn = linsn; sinsn != 0;
2780 sinsn = prev_nonnote_insn (sinsn))
2782 set = single_set (sinsn);
2786 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2788 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2790 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2792 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2796 else if ((set = single_set (linsn)) != 0
2797 && SET_DEST (set) == parmreg)
2799 = gen_rtx_EXPR_LIST (REG_EQUIV,
2800 data->stack_parm, REG_NOTES (linsn));
2803 /* For pointer data type, suggest pointer register. */
2804 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2805 mark_reg_pointer (parmreg,
2806 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2809 /* A subroutine of assign_parms. Allocate stack space to hold the current
2810 parameter. Get it there. Perform all ABI specified conversions. */
2813 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2814 struct assign_parm_data_one *data)
2816 /* Value must be stored in the stack slot STACK_PARM during function
2818 bool to_conversion = false;
2820 if (data->promoted_mode != data->nominal_mode)
2822 /* Conversion is required. */
2823 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2825 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2827 push_to_sequence (all->conversion_insns);
2828 to_conversion = true;
2830 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2831 TYPE_UNSIGNED (TREE_TYPE (parm)));
2833 if (data->stack_parm)
2834 /* ??? This may need a big-endian conversion on sparc64. */
2836 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2839 if (data->entry_parm != data->stack_parm)
2843 if (data->stack_parm == 0)
2846 = assign_stack_local (GET_MODE (data->entry_parm),
2847 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2848 TYPE_ALIGN (data->passed_type));
2849 set_mem_attributes (data->stack_parm, parm, 1);
2852 dest = validize_mem (data->stack_parm);
2853 src = validize_mem (data->entry_parm);
2857 /* Use a block move to handle potentially misaligned entry_parm. */
2859 push_to_sequence (all->conversion_insns);
2860 to_conversion = true;
2862 emit_block_move (dest, src,
2863 GEN_INT (int_size_in_bytes (data->passed_type)),
2867 emit_move_insn (dest, src);
2872 all->conversion_insns = get_insns ();
2876 SET_DECL_RTL (parm, data->stack_parm);
2879 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2880 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2883 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2886 tree orig_fnargs = all->orig_fnargs;
2888 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2890 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2891 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2893 rtx tmp, real, imag;
2894 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2896 real = DECL_RTL (fnargs);
2897 imag = DECL_RTL (TREE_CHAIN (fnargs));
2898 if (inner != GET_MODE (real))
2900 real = gen_lowpart_SUBREG (inner, real);
2901 imag = gen_lowpart_SUBREG (inner, imag);
2904 if (TREE_ADDRESSABLE (parm))
2907 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2909 /* split_complex_arg put the real and imag parts in
2910 pseudos. Move them to memory. */
2911 tmp = assign_stack_local (DECL_MODE (parm), size,
2912 TYPE_ALIGN (TREE_TYPE (parm)));
2913 set_mem_attributes (tmp, parm, 1);
2914 rmem = adjust_address_nv (tmp, inner, 0);
2915 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2916 push_to_sequence (all->conversion_insns);
2917 emit_move_insn (rmem, real);
2918 emit_move_insn (imem, imag);
2919 all->conversion_insns = get_insns ();
2923 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2924 SET_DECL_RTL (parm, tmp);
2926 real = DECL_INCOMING_RTL (fnargs);
2927 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2928 if (inner != GET_MODE (real))
2930 real = gen_lowpart_SUBREG (inner, real);
2931 imag = gen_lowpart_SUBREG (inner, imag);
2933 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2934 set_decl_incoming_rtl (parm, tmp);
2935 fnargs = TREE_CHAIN (fnargs);
2939 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2940 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2942 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2943 instead of the copy of decl, i.e. FNARGS. */
2944 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2945 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2948 fnargs = TREE_CHAIN (fnargs);
2952 /* Assign RTL expressions to the function's parameters. This may involve
2953 copying them into registers and using those registers as the DECL_RTL. */
2956 assign_parms (tree fndecl)
2958 struct assign_parm_data_all all;
2961 current_function_internal_arg_pointer
2962 = targetm.calls.internal_arg_pointer ();
2964 assign_parms_initialize_all (&all);
2965 fnargs = assign_parms_augmented_arg_list (&all);
2967 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2969 struct assign_parm_data_one data;
2971 /* Extract the type of PARM; adjust it according to ABI. */
2972 assign_parm_find_data_types (&all, parm, &data);
2974 /* Early out for errors and void parameters. */
2975 if (data.passed_mode == VOIDmode)
2977 SET_DECL_RTL (parm, const0_rtx);
2978 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2982 if (current_function_stdarg && !TREE_CHAIN (parm))
2983 assign_parms_setup_varargs (&all, &data, false);
2985 /* Find out where the parameter arrives in this function. */
2986 assign_parm_find_entry_rtl (&all, &data);
2988 /* Find out where stack space for this parameter might be. */
2989 if (assign_parm_is_stack_parm (&all, &data))
2991 assign_parm_find_stack_rtl (parm, &data);
2992 assign_parm_adjust_entry_rtl (&data);
2995 /* Record permanently how this parm was passed. */
2996 set_decl_incoming_rtl (parm, data.entry_parm);
2998 /* Update info on where next arg arrives in registers. */
2999 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3000 data.passed_type, data.named_arg);
3002 assign_parm_adjust_stack_rtl (&data);
3004 if (assign_parm_setup_block_p (&data))
3005 assign_parm_setup_block (&all, parm, &data);
3006 else if (data.passed_pointer || use_register_for_decl (parm))
3007 assign_parm_setup_reg (&all, parm, &data);
3009 assign_parm_setup_stack (&all, parm, &data);
3012 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3013 assign_parms_unsplit_complex (&all, fnargs);
3015 /* Output all parameter conversion instructions (possibly including calls)
3016 now that all parameters have been copied out of hard registers. */
3017 emit_insn (all.conversion_insns);
3019 /* If we are receiving a struct value address as the first argument, set up
3020 the RTL for the function result. As this might require code to convert
3021 the transmitted address to Pmode, we do this here to ensure that possible
3022 preliminary conversions of the address have been emitted already. */
3023 if (all.function_result_decl)
3025 tree result = DECL_RESULT (current_function_decl);
3026 rtx addr = DECL_RTL (all.function_result_decl);
3029 if (DECL_BY_REFERENCE (result))
3033 addr = convert_memory_address (Pmode, addr);
3034 x = gen_rtx_MEM (DECL_MODE (result), addr);
3035 set_mem_attributes (x, result, 1);
3037 SET_DECL_RTL (result, x);
3040 /* We have aligned all the args, so add space for the pretend args. */
3041 current_function_pretend_args_size = all.pretend_args_size;
3042 all.stack_args_size.constant += all.extra_pretend_bytes;
3043 current_function_args_size = all.stack_args_size.constant;
3045 /* Adjust function incoming argument size for alignment and
3048 #ifdef REG_PARM_STACK_SPACE
3049 current_function_args_size = MAX (current_function_args_size,
3050 REG_PARM_STACK_SPACE (fndecl));
3053 current_function_args_size = CEIL_ROUND (current_function_args_size,
3054 PARM_BOUNDARY / BITS_PER_UNIT);
3056 #ifdef ARGS_GROW_DOWNWARD
3057 current_function_arg_offset_rtx
3058 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3059 : expand_expr (size_diffop (all.stack_args_size.var,
3060 size_int (-all.stack_args_size.constant)),
3061 NULL_RTX, VOIDmode, 0));
3063 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3066 /* See how many bytes, if any, of its args a function should try to pop
3069 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3070 current_function_args_size);
3072 /* For stdarg.h function, save info about
3073 regs and stack space used by the named args. */
3075 current_function_args_info = all.args_so_far;
3077 /* Set the rtx used for the function return value. Put this in its
3078 own variable so any optimizers that need this information don't have
3079 to include tree.h. Do this here so it gets done when an inlined
3080 function gets output. */
3082 current_function_return_rtx
3083 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3084 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3086 /* If scalar return value was computed in a pseudo-reg, or was a named
3087 return value that got dumped to the stack, copy that to the hard
3089 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3091 tree decl_result = DECL_RESULT (fndecl);
3092 rtx decl_rtl = DECL_RTL (decl_result);
3094 if (REG_P (decl_rtl)
3095 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3096 : DECL_REGISTER (decl_result))
3100 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3102 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3103 /* The delay slot scheduler assumes that current_function_return_rtx
3104 holds the hard register containing the return value, not a
3105 temporary pseudo. */
3106 current_function_return_rtx = real_decl_rtl;
3111 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3112 For all seen types, gimplify their sizes. */
3115 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3122 if (POINTER_TYPE_P (t))
3124 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3125 && !TYPE_SIZES_GIMPLIFIED (t))
3127 gimplify_type_sizes (t, (tree *) data);
3135 /* Gimplify the parameter list for current_function_decl. This involves
3136 evaluating SAVE_EXPRs of variable sized parameters and generating code
3137 to implement callee-copies reference parameters. Returns a list of
3138 statements to add to the beginning of the function, or NULL if nothing
3142 gimplify_parameters (void)
3144 struct assign_parm_data_all all;
3145 tree fnargs, parm, stmts = NULL;
3147 assign_parms_initialize_all (&all);
3148 fnargs = assign_parms_augmented_arg_list (&all);
3150 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3152 struct assign_parm_data_one data;
3154 /* Extract the type of PARM; adjust it according to ABI. */
3155 assign_parm_find_data_types (&all, parm, &data);
3157 /* Early out for errors and void parameters. */
3158 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3161 /* Update info on where next arg arrives in registers. */
3162 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3163 data.passed_type, data.named_arg);
3165 /* ??? Once upon a time variable_size stuffed parameter list
3166 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3167 turned out to be less than manageable in the gimple world.
3168 Now we have to hunt them down ourselves. */
3169 walk_tree_without_duplicates (&data.passed_type,
3170 gimplify_parm_type, &stmts);
3172 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3174 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3175 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3178 if (data.passed_pointer)
3180 tree type = TREE_TYPE (data.passed_type);
3181 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3182 type, data.named_arg))
3186 /* For constant sized objects, this is trivial; for
3187 variable-sized objects, we have to play games. */
3188 if (TREE_CONSTANT (DECL_SIZE (parm)))
3190 local = create_tmp_var (type, get_name (parm));
3191 DECL_IGNORED_P (local) = 0;
3195 tree ptr_type, addr, args;
3197 ptr_type = build_pointer_type (type);
3198 addr = create_tmp_var (ptr_type, get_name (parm));
3199 DECL_IGNORED_P (addr) = 0;
3200 local = build_fold_indirect_ref (addr);
3202 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3203 t = built_in_decls[BUILT_IN_ALLOCA];
3204 t = build_function_call_expr (t, args);
3205 t = fold_convert (ptr_type, t);
3206 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3207 gimplify_and_add (t, &stmts);
3210 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3211 gimplify_and_add (t, &stmts);
3213 SET_DECL_VALUE_EXPR (parm, local);
3214 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3222 /* Indicate whether REGNO is an incoming argument to the current function
3223 that was promoted to a wider mode. If so, return the RTX for the
3224 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3225 that REGNO is promoted from and whether the promotion was signed or
3229 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3233 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3234 arg = TREE_CHAIN (arg))
3235 if (REG_P (DECL_INCOMING_RTL (arg))
3236 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3237 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3239 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3240 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3242 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3243 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3244 && mode != DECL_MODE (arg))
3246 *pmode = DECL_MODE (arg);
3247 *punsignedp = unsignedp;
3248 return DECL_INCOMING_RTL (arg);
3256 /* Compute the size and offset from the start of the stacked arguments for a
3257 parm passed in mode PASSED_MODE and with type TYPE.
3259 INITIAL_OFFSET_PTR points to the current offset into the stacked
3262 The starting offset and size for this parm are returned in
3263 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3264 nonzero, the offset is that of stack slot, which is returned in
3265 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3266 padding required from the initial offset ptr to the stack slot.
3268 IN_REGS is nonzero if the argument will be passed in registers. It will
3269 never be set if REG_PARM_STACK_SPACE is not defined.
3271 FNDECL is the function in which the argument was defined.
3273 There are two types of rounding that are done. The first, controlled by
3274 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3275 list to be aligned to the specific boundary (in bits). This rounding
3276 affects the initial and starting offsets, but not the argument size.
3278 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3279 optionally rounds the size of the parm to PARM_BOUNDARY. The
3280 initial offset is not affected by this rounding, while the size always
3281 is and the starting offset may be. */
3283 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3284 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3285 callers pass in the total size of args so far as
3286 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3289 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3290 int partial, tree fndecl ATTRIBUTE_UNUSED,
3291 struct args_size *initial_offset_ptr,
3292 struct locate_and_pad_arg_data *locate)
3295 enum direction where_pad;
3296 unsigned int boundary;
3297 int reg_parm_stack_space = 0;
3298 int part_size_in_regs;
3300 #ifdef REG_PARM_STACK_SPACE
3301 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3303 /* If we have found a stack parm before we reach the end of the
3304 area reserved for registers, skip that area. */
3307 if (reg_parm_stack_space > 0)
3309 if (initial_offset_ptr->var)
3311 initial_offset_ptr->var
3312 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3313 ssize_int (reg_parm_stack_space));
3314 initial_offset_ptr->constant = 0;
3316 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3317 initial_offset_ptr->constant = reg_parm_stack_space;
3320 #endif /* REG_PARM_STACK_SPACE */
3322 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3325 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3326 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3327 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3328 locate->where_pad = where_pad;
3329 locate->boundary = boundary;
3331 /* Remember if the outgoing parameter requires extra alignment on the
3332 calling function side. */
3333 if (boundary > PREFERRED_STACK_BOUNDARY)
3334 boundary = PREFERRED_STACK_BOUNDARY;
3335 if (cfun->stack_alignment_needed < boundary)
3336 cfun->stack_alignment_needed = boundary;
3338 #ifdef ARGS_GROW_DOWNWARD
3339 locate->slot_offset.constant = -initial_offset_ptr->constant;
3340 if (initial_offset_ptr->var)
3341 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3342 initial_offset_ptr->var);
3346 if (where_pad != none
3347 && (!host_integerp (sizetree, 1)
3348 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3349 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3350 SUB_PARM_SIZE (locate->slot_offset, s2);
3353 locate->slot_offset.constant += part_size_in_regs;
3356 #ifdef REG_PARM_STACK_SPACE
3357 || REG_PARM_STACK_SPACE (fndecl) > 0
3360 pad_to_arg_alignment (&locate->slot_offset, boundary,
3361 &locate->alignment_pad);
3363 locate->size.constant = (-initial_offset_ptr->constant
3364 - locate->slot_offset.constant);
3365 if (initial_offset_ptr->var)
3366 locate->size.var = size_binop (MINUS_EXPR,
3367 size_binop (MINUS_EXPR,
3369 initial_offset_ptr->var),
3370 locate->slot_offset.var);
3372 /* Pad_below needs the pre-rounded size to know how much to pad
3374 locate->offset = locate->slot_offset;
3375 if (where_pad == downward)
3376 pad_below (&locate->offset, passed_mode, sizetree);
3378 #else /* !ARGS_GROW_DOWNWARD */
3380 #ifdef REG_PARM_STACK_SPACE
3381 || REG_PARM_STACK_SPACE (fndecl) > 0
3384 pad_to_arg_alignment (initial_offset_ptr, boundary,
3385 &locate->alignment_pad);
3386 locate->slot_offset = *initial_offset_ptr;
3388 #ifdef PUSH_ROUNDING
3389 if (passed_mode != BLKmode)
3390 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3393 /* Pad_below needs the pre-rounded size to know how much to pad below
3394 so this must be done before rounding up. */
3395 locate->offset = locate->slot_offset;
3396 if (where_pad == downward)
3397 pad_below (&locate->offset, passed_mode, sizetree);
3399 if (where_pad != none
3400 && (!host_integerp (sizetree, 1)
3401 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3402 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3404 ADD_PARM_SIZE (locate->size, sizetree);
3406 locate->size.constant -= part_size_in_regs;
3407 #endif /* ARGS_GROW_DOWNWARD */
3410 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3411 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3414 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3415 struct args_size *alignment_pad)
3417 tree save_var = NULL_TREE;
3418 HOST_WIDE_INT save_constant = 0;
3419 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3420 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3422 #ifdef SPARC_STACK_BOUNDARY_HACK
3423 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3424 the real alignment of %sp. However, when it does this, the
3425 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3426 if (SPARC_STACK_BOUNDARY_HACK)
3430 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3432 save_var = offset_ptr->var;
3433 save_constant = offset_ptr->constant;
3436 alignment_pad->var = NULL_TREE;
3437 alignment_pad->constant = 0;
3439 if (boundary > BITS_PER_UNIT)
3441 if (offset_ptr->var)
3443 tree sp_offset_tree = ssize_int (sp_offset);
3444 tree offset = size_binop (PLUS_EXPR,
3445 ARGS_SIZE_TREE (*offset_ptr),
3447 #ifdef ARGS_GROW_DOWNWARD
3448 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3450 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3453 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3454 /* ARGS_SIZE_TREE includes constant term. */
3455 offset_ptr->constant = 0;
3456 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3457 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3462 offset_ptr->constant = -sp_offset +
3463 #ifdef ARGS_GROW_DOWNWARD
3464 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3466 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3468 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3469 alignment_pad->constant = offset_ptr->constant - save_constant;
3475 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3477 if (passed_mode != BLKmode)
3479 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3480 offset_ptr->constant
3481 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3482 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3483 - GET_MODE_SIZE (passed_mode));
3487 if (TREE_CODE (sizetree) != INTEGER_CST
3488 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3490 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3491 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3493 ADD_PARM_SIZE (*offset_ptr, s2);
3494 SUB_PARM_SIZE (*offset_ptr, sizetree);
3499 /* Walk the tree of blocks describing the binding levels within a function
3500 and warn about variables the might be killed by setjmp or vfork.
3501 This is done after calling flow_analysis and before global_alloc
3502 clobbers the pseudo-regs to hard regs. */
3505 setjmp_vars_warning (tree block)
3509 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3511 if (TREE_CODE (decl) == VAR_DECL
3512 && DECL_RTL_SET_P (decl)
3513 && REG_P (DECL_RTL (decl))
3514 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3515 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3520 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3521 setjmp_vars_warning (sub);
3524 /* Do the appropriate part of setjmp_vars_warning
3525 but for arguments instead of local variables. */
3528 setjmp_args_warning (void)
3531 for (decl = DECL_ARGUMENTS (current_function_decl);
3532 decl; decl = TREE_CHAIN (decl))
3533 if (DECL_RTL (decl) != 0
3534 && REG_P (DECL_RTL (decl))
3535 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3536 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3541 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3542 and create duplicate blocks. */
3543 /* ??? Need an option to either create block fragments or to create
3544 abstract origin duplicates of a source block. It really depends
3545 on what optimization has been performed. */
3548 reorder_blocks (void)
3550 tree block = DECL_INITIAL (current_function_decl);
3551 VEC(tree,heap) *block_stack;
3553 if (block == NULL_TREE)
3556 block_stack = VEC_alloc (tree, heap, 10);
3558 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3559 clear_block_marks (block);
3561 /* Prune the old trees away, so that they don't get in the way. */
3562 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3563 BLOCK_CHAIN (block) = NULL_TREE;
3565 /* Recreate the block tree from the note nesting. */
3566 reorder_blocks_1 (get_insns (), block, &block_stack);
3567 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3569 /* Remove deleted blocks from the block fragment chains. */
3570 reorder_fix_fragments (block);
3572 VEC_free (tree, heap, block_stack);
3575 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3578 clear_block_marks (tree block)
3582 TREE_ASM_WRITTEN (block) = 0;
3583 clear_block_marks (BLOCK_SUBBLOCKS (block));
3584 block = BLOCK_CHAIN (block);
3589 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3593 for (insn = insns; insn; insn = NEXT_INSN (insn))
3597 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3599 tree block = NOTE_BLOCK (insn);
3601 /* If we have seen this block before, that means it now
3602 spans multiple address regions. Create a new fragment. */
3603 if (TREE_ASM_WRITTEN (block))
3605 tree new_block = copy_node (block);
3608 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3609 ? BLOCK_FRAGMENT_ORIGIN (block)
3611 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3612 BLOCK_FRAGMENT_CHAIN (new_block)
3613 = BLOCK_FRAGMENT_CHAIN (origin);
3614 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3616 NOTE_BLOCK (insn) = new_block;
3620 BLOCK_SUBBLOCKS (block) = 0;
3621 TREE_ASM_WRITTEN (block) = 1;
3622 /* When there's only one block for the entire function,
3623 current_block == block and we mustn't do this, it
3624 will cause infinite recursion. */
3625 if (block != current_block)
3627 BLOCK_SUPERCONTEXT (block) = current_block;
3628 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3629 BLOCK_SUBBLOCKS (current_block) = block;
3630 current_block = block;
3632 VEC_safe_push (tree, heap, *p_block_stack, block);
3634 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3636 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3637 BLOCK_SUBBLOCKS (current_block)
3638 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3639 current_block = BLOCK_SUPERCONTEXT (current_block);
3645 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3646 appears in the block tree, select one of the fragments to become
3647 the new origin block. */
3650 reorder_fix_fragments (tree block)
3654 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3655 tree new_origin = NULL_TREE;
3659 if (! TREE_ASM_WRITTEN (dup_origin))
3661 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3663 /* Find the first of the remaining fragments. There must
3664 be at least one -- the current block. */
3665 while (! TREE_ASM_WRITTEN (new_origin))
3666 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3667 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3670 else if (! dup_origin)
3673 /* Re-root the rest of the fragments to the new origin. In the
3674 case that DUP_ORIGIN was null, that means BLOCK was the origin
3675 of a chain of fragments and we want to remove those fragments
3676 that didn't make it to the output. */
3679 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3684 if (TREE_ASM_WRITTEN (chain))
3686 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3688 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3690 chain = BLOCK_FRAGMENT_CHAIN (chain);
3695 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3696 block = BLOCK_CHAIN (block);
3700 /* Reverse the order of elements in the chain T of blocks,
3701 and return the new head of the chain (old last element). */
3704 blocks_nreverse (tree t)
3706 tree prev = 0, decl, next;
3707 for (decl = t; decl; decl = next)
3709 next = BLOCK_CHAIN (decl);
3710 BLOCK_CHAIN (decl) = prev;
3716 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3717 non-NULL, list them all into VECTOR, in a depth-first preorder
3718 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3722 all_blocks (tree block, tree *vector)
3728 TREE_ASM_WRITTEN (block) = 0;
3730 /* Record this block. */
3732 vector[n_blocks] = block;
3736 /* Record the subblocks, and their subblocks... */
3737 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3738 vector ? vector + n_blocks : 0);
3739 block = BLOCK_CHAIN (block);
3745 /* Return a vector containing all the blocks rooted at BLOCK. The
3746 number of elements in the vector is stored in N_BLOCKS_P. The
3747 vector is dynamically allocated; it is the caller's responsibility
3748 to call `free' on the pointer returned. */
3751 get_block_vector (tree block, int *n_blocks_p)
3755 *n_blocks_p = all_blocks (block, NULL);
3756 block_vector = XNEWVEC (tree, *n_blocks_p);
3757 all_blocks (block, block_vector);
3759 return block_vector;
3762 static GTY(()) int next_block_index = 2;
3764 /* Set BLOCK_NUMBER for all the blocks in FN. */
3767 number_blocks (tree fn)
3773 /* For SDB and XCOFF debugging output, we start numbering the blocks
3774 from 1 within each function, rather than keeping a running
3776 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3777 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3778 next_block_index = 1;
3781 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3783 /* The top-level BLOCK isn't numbered at all. */
3784 for (i = 1; i < n_blocks; ++i)
3785 /* We number the blocks from two. */
3786 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3788 free (block_vector);
3793 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3796 debug_find_var_in_block_tree (tree var, tree block)
3800 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3804 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3806 tree ret = debug_find_var_in_block_tree (var, t);
3814 /* Allocate a function structure for FNDECL and set its contents
3818 allocate_struct_function (tree fndecl)
3821 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3823 cfun = ggc_alloc_cleared (sizeof (struct function));
3825 cfun->stack_alignment_needed = STACK_BOUNDARY;
3826 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3828 current_function_funcdef_no = funcdef_no++;
3830 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3832 init_eh_for_function ();
3834 lang_hooks.function.init (cfun);
3835 if (init_machine_status)
3836 cfun->machine = (*init_machine_status) ();
3841 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3842 cfun->decl = fndecl;
3844 result = DECL_RESULT (fndecl);
3845 if (aggregate_value_p (result, fndecl))
3847 #ifdef PCC_STATIC_STRUCT_RETURN
3848 current_function_returns_pcc_struct = 1;
3850 current_function_returns_struct = 1;
3853 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3855 current_function_stdarg
3857 && TYPE_ARG_TYPES (fntype) != 0
3858 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3859 != void_type_node));
3861 /* Assume all registers in stdarg functions need to be saved. */
3862 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3863 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3866 /* Reset cfun, and other non-struct-function variables to defaults as
3867 appropriate for emitting rtl at the start of a function. */
3870 prepare_function_start (tree fndecl)
3872 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3873 cfun = DECL_STRUCT_FUNCTION (fndecl);
3875 allocate_struct_function (fndecl);
3877 init_varasm_status (cfun);
3880 cse_not_expected = ! optimize;
3882 /* Caller save not needed yet. */
3883 caller_save_needed = 0;
3885 /* We haven't done register allocation yet. */
3888 /* Indicate that we have not instantiated virtual registers yet. */
3889 virtuals_instantiated = 0;
3891 /* Indicate that we want CONCATs now. */
3892 generating_concat_p = 1;
3894 /* Indicate we have no need of a frame pointer yet. */
3895 frame_pointer_needed = 0;
3898 /* Initialize the rtl expansion mechanism so that we can do simple things
3899 like generate sequences. This is used to provide a context during global
3900 initialization of some passes. */
3902 init_dummy_function_start (void)
3904 prepare_function_start (NULL);
3907 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3908 and initialize static variables for generating RTL for the statements
3912 init_function_start (tree subr)
3914 prepare_function_start (subr);
3916 /* Prevent ever trying to delete the first instruction of a
3917 function. Also tell final how to output a linenum before the
3918 function prologue. Note linenums could be missing, e.g. when
3919 compiling a Java .class file. */
3920 if (! DECL_IS_BUILTIN (subr))
3921 emit_line_note (DECL_SOURCE_LOCATION (subr));
3923 /* Make sure first insn is a note even if we don't want linenums.
3924 This makes sure the first insn will never be deleted.
3925 Also, final expects a note to appear there. */
3926 emit_note (NOTE_INSN_DELETED);
3928 /* Warn if this value is an aggregate type,
3929 regardless of which calling convention we are using for it. */
3930 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3931 warning (OPT_Waggregate_return, "function returns an aggregate");
3934 /* Make sure all values used by the optimization passes have sane
3937 init_function_for_compilation (void)
3941 /* No prologue/epilogue insns yet. Make sure that these vectors are
3943 gcc_assert (VEC_length (int, prologue) == 0);
3944 gcc_assert (VEC_length (int, epilogue) == 0);
3945 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3949 struct tree_opt_pass pass_init_function =
3953 init_function_for_compilation, /* execute */
3956 0, /* static_pass_number */
3958 0, /* properties_required */
3959 0, /* properties_provided */
3960 0, /* properties_destroyed */
3961 0, /* todo_flags_start */
3962 0, /* todo_flags_finish */
3968 expand_main_function (void)
3970 #if (defined(INVOKE__main) \
3971 || (!defined(HAS_INIT_SECTION) \
3972 && !defined(INIT_SECTION_ASM_OP) \
3973 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3974 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3978 /* Expand code to initialize the stack_protect_guard. This is invoked at
3979 the beginning of a function to be protected. */
3981 #ifndef HAVE_stack_protect_set
3982 # define HAVE_stack_protect_set 0
3983 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3987 stack_protect_prologue (void)
3989 tree guard_decl = targetm.stack_protect_guard ();
3992 /* Avoid expand_expr here, because we don't want guard_decl pulled
3993 into registers unless absolutely necessary. And we know that
3994 cfun->stack_protect_guard is a local stack slot, so this skips
3996 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3997 y = validize_mem (DECL_RTL (guard_decl));
3999 /* Allow the target to copy from Y to X without leaking Y into a
4001 if (HAVE_stack_protect_set)
4003 rtx insn = gen_stack_protect_set (x, y);
4011 /* Otherwise do a straight move. */
4012 emit_move_insn (x, y);
4015 /* Expand code to verify the stack_protect_guard. This is invoked at
4016 the end of a function to be protected. */
4018 #ifndef HAVE_stack_protect_test
4019 # define HAVE_stack_protect_test 0
4020 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4024 stack_protect_epilogue (void)
4026 tree guard_decl = targetm.stack_protect_guard ();
4027 rtx label = gen_label_rtx ();
4030 /* Avoid expand_expr here, because we don't want guard_decl pulled
4031 into registers unless absolutely necessary. And we know that
4032 cfun->stack_protect_guard is a local stack slot, so this skips
4034 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4035 y = validize_mem (DECL_RTL (guard_decl));
4037 /* Allow the target to compare Y with X without leaking either into
4039 switch (HAVE_stack_protect_test != 0)
4042 tmp = gen_stack_protect_test (x, y, label);
4051 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4055 /* The noreturn predictor has been moved to the tree level. The rtl-level
4056 predictors estimate this branch about 20%, which isn't enough to get
4057 things moved out of line. Since this is the only extant case of adding
4058 a noreturn function at the rtl level, it doesn't seem worth doing ought
4059 except adding the prediction by hand. */
4060 tmp = get_last_insn ();
4062 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4064 expand_expr_stmt (targetm.stack_protect_fail ());
4068 /* Start the RTL for a new function, and set variables used for
4070 SUBR is the FUNCTION_DECL node.
4071 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4072 the function's parameters, which must be run at any return statement. */
4075 expand_function_start (tree subr)
4077 /* Make sure volatile mem refs aren't considered
4078 valid operands of arithmetic insns. */
4079 init_recog_no_volatile ();
4081 current_function_profile
4083 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4085 current_function_limit_stack
4086 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4088 /* Make the label for return statements to jump to. Do not special
4089 case machines with special return instructions -- they will be
4090 handled later during jump, ifcvt, or epilogue creation. */
4091 return_label = gen_label_rtx ();
4093 /* Initialize rtx used to return the value. */
4094 /* Do this before assign_parms so that we copy the struct value address
4095 before any library calls that assign parms might generate. */
4097 /* Decide whether to return the value in memory or in a register. */
4098 if (aggregate_value_p (DECL_RESULT (subr), subr))
4100 /* Returning something that won't go in a register. */
4101 rtx value_address = 0;
4103 #ifdef PCC_STATIC_STRUCT_RETURN
4104 if (current_function_returns_pcc_struct)
4106 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4107 value_address = assemble_static_space (size);
4112 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4113 /* Expect to be passed the address of a place to store the value.
4114 If it is passed as an argument, assign_parms will take care of
4118 value_address = gen_reg_rtx (Pmode);
4119 emit_move_insn (value_address, sv);
4124 rtx x = value_address;
4125 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4127 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4128 set_mem_attributes (x, DECL_RESULT (subr), 1);
4130 SET_DECL_RTL (DECL_RESULT (subr), x);
4133 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4134 /* If return mode is void, this decl rtl should not be used. */
4135 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4138 /* Compute the return values into a pseudo reg, which we will copy
4139 into the true return register after the cleanups are done. */
4140 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4141 if (TYPE_MODE (return_type) != BLKmode
4142 && targetm.calls.return_in_msb (return_type))
4143 /* expand_function_end will insert the appropriate padding in
4144 this case. Use the return value's natural (unpadded) mode
4145 within the function proper. */
4146 SET_DECL_RTL (DECL_RESULT (subr),
4147 gen_reg_rtx (TYPE_MODE (return_type)));
4150 /* In order to figure out what mode to use for the pseudo, we
4151 figure out what the mode of the eventual return register will
4152 actually be, and use that. */
4153 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4155 /* Structures that are returned in registers are not
4156 aggregate_value_p, so we may see a PARALLEL or a REG. */
4157 if (REG_P (hard_reg))
4158 SET_DECL_RTL (DECL_RESULT (subr),
4159 gen_reg_rtx (GET_MODE (hard_reg)));
4162 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4163 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4167 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4168 result to the real return register(s). */
4169 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4172 /* Initialize rtx for parameters and local variables.
4173 In some cases this requires emitting insns. */
4174 assign_parms (subr);
4176 /* If function gets a static chain arg, store it. */
4177 if (cfun->static_chain_decl)
4179 tree parm = cfun->static_chain_decl;
4180 rtx local = gen_reg_rtx (Pmode);
4182 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4183 SET_DECL_RTL (parm, local);
4184 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4186 emit_move_insn (local, static_chain_incoming_rtx);
4189 /* If the function receives a non-local goto, then store the
4190 bits we need to restore the frame pointer. */
4191 if (cfun->nonlocal_goto_save_area)
4196 /* ??? We need to do this save early. Unfortunately here is
4197 before the frame variable gets declared. Help out... */
4198 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4200 t_save = build4 (ARRAY_REF, ptr_type_node,
4201 cfun->nonlocal_goto_save_area,
4202 integer_zero_node, NULL_TREE, NULL_TREE);
4203 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4204 r_save = convert_memory_address (Pmode, r_save);
4206 emit_move_insn (r_save, virtual_stack_vars_rtx);
4207 update_nonlocal_goto_save_area ();
4210 /* The following was moved from init_function_start.
4211 The move is supposed to make sdb output more accurate. */
4212 /* Indicate the beginning of the function body,
4213 as opposed to parm setup. */
4214 emit_note (NOTE_INSN_FUNCTION_BEG);
4216 gcc_assert (NOTE_P (get_last_insn ()));
4218 parm_birth_insn = get_last_insn ();
4220 if (current_function_profile)
4223 PROFILE_HOOK (current_function_funcdef_no);
4227 /* After the display initializations is where the stack checking
4229 if(flag_stack_check)
4230 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4232 /* Make sure there is a line number after the function entry setup code. */
4233 force_next_line_note ();
4236 /* Undo the effects of init_dummy_function_start. */
4238 expand_dummy_function_end (void)
4240 /* End any sequences that failed to be closed due to syntax errors. */
4241 while (in_sequence_p ())
4244 /* Outside function body, can't compute type's actual size
4245 until next function's body starts. */
4247 free_after_parsing (cfun);
4248 free_after_compilation (cfun);
4252 /* Call DOIT for each hard register used as a return value from
4253 the current function. */
4256 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4258 rtx outgoing = current_function_return_rtx;
4263 if (REG_P (outgoing))
4264 (*doit) (outgoing, arg);
4265 else if (GET_CODE (outgoing) == PARALLEL)
4269 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4271 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4273 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4280 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4282 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4286 clobber_return_register (void)
4288 diddle_return_value (do_clobber_return_reg, NULL);
4290 /* In case we do use pseudo to return value, clobber it too. */
4291 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4293 tree decl_result = DECL_RESULT (current_function_decl);
4294 rtx decl_rtl = DECL_RTL (decl_result);
4295 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4297 do_clobber_return_reg (decl_rtl, NULL);
4303 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4305 emit_insn (gen_rtx_USE (VOIDmode, reg));
4309 use_return_register (void)
4311 diddle_return_value (do_use_return_reg, NULL);
4314 /* Possibly warn about unused parameters. */
4316 do_warn_unused_parameter (tree fn)
4320 for (decl = DECL_ARGUMENTS (fn);
4321 decl; decl = TREE_CHAIN (decl))
4322 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4323 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4324 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4327 static GTY(()) rtx initial_trampoline;
4329 /* Generate RTL for the end of the current function. */
4332 expand_function_end (void)
4336 /* If arg_pointer_save_area was referenced only from a nested
4337 function, we will not have initialized it yet. Do that now. */
4338 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4339 get_arg_pointer_save_area (cfun);
4341 /* If we are doing stack checking and this function makes calls,
4342 do a stack probe at the start of the function to ensure we have enough
4343 space for another stack frame. */
4344 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4348 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4352 probe_stack_range (STACK_CHECK_PROTECT,
4353 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4356 emit_insn_before (seq, stack_check_probe_note);
4361 /* Possibly warn about unused parameters.
4362 When frontend does unit-at-a-time, the warning is already
4363 issued at finalization time. */
4364 if (warn_unused_parameter
4365 && !lang_hooks.callgraph.expand_function)
4366 do_warn_unused_parameter (current_function_decl);
4368 /* End any sequences that failed to be closed due to syntax errors. */
4369 while (in_sequence_p ())
4372 clear_pending_stack_adjust ();
4373 do_pending_stack_adjust ();
4375 /* Mark the end of the function body.
4376 If control reaches this insn, the function can drop through
4377 without returning a value. */
4378 emit_note (NOTE_INSN_FUNCTION_END);
4380 /* Must mark the last line number note in the function, so that the test
4381 coverage code can avoid counting the last line twice. This just tells
4382 the code to ignore the immediately following line note, since there
4383 already exists a copy of this note somewhere above. This line number
4384 note is still needed for debugging though, so we can't delete it. */
4385 if (flag_test_coverage)
4386 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4388 /* Output a linenumber for the end of the function.
4389 SDB depends on this. */
4390 force_next_line_note ();
4391 emit_line_note (input_location);
4393 /* Before the return label (if any), clobber the return
4394 registers so that they are not propagated live to the rest of
4395 the function. This can only happen with functions that drop
4396 through; if there had been a return statement, there would
4397 have either been a return rtx, or a jump to the return label.
4399 We delay actual code generation after the current_function_value_rtx
4401 clobber_after = get_last_insn ();
4403 /* Output the label for the actual return from the function. */
4404 emit_label (return_label);
4406 if (USING_SJLJ_EXCEPTIONS)
4408 /* Let except.c know where it should emit the call to unregister
4409 the function context for sjlj exceptions. */
4410 if (flag_exceptions)
4411 sjlj_emit_function_exit_after (get_last_insn ());
4415 /* @@@ This is a kludge. We want to ensure that instructions that
4416 may trap are not moved into the epilogue by scheduling, because
4417 we don't always emit unwind information for the epilogue.
4418 However, not all machine descriptions define a blockage insn, so
4419 emit an ASM_INPUT to act as one. */
4420 if (flag_non_call_exceptions)
4421 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4424 /* If this is an implementation of throw, do what's necessary to
4425 communicate between __builtin_eh_return and the epilogue. */
4426 expand_eh_return ();
4428 /* If scalar return value was computed in a pseudo-reg, or was a named
4429 return value that got dumped to the stack, copy that to the hard
4431 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4433 tree decl_result = DECL_RESULT (current_function_decl);
4434 rtx decl_rtl = DECL_RTL (decl_result);
4436 if (REG_P (decl_rtl)
4437 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4438 : DECL_REGISTER (decl_result))
4440 rtx real_decl_rtl = current_function_return_rtx;
4442 /* This should be set in assign_parms. */
4443 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4445 /* If this is a BLKmode structure being returned in registers,
4446 then use the mode computed in expand_return. Note that if
4447 decl_rtl is memory, then its mode may have been changed,
4448 but that current_function_return_rtx has not. */
4449 if (GET_MODE (real_decl_rtl) == BLKmode)
4450 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4452 /* If a non-BLKmode return value should be padded at the least
4453 significant end of the register, shift it left by the appropriate
4454 amount. BLKmode results are handled using the group load/store
4456 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4457 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4459 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4460 REGNO (real_decl_rtl)),
4462 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4464 /* If a named return value dumped decl_return to memory, then
4465 we may need to re-do the PROMOTE_MODE signed/unsigned
4467 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4469 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4471 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4472 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4475 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4477 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4479 /* If expand_function_start has created a PARALLEL for decl_rtl,
4480 move the result to the real return registers. Otherwise, do
4481 a group load from decl_rtl for a named return. */
4482 if (GET_CODE (decl_rtl) == PARALLEL)
4483 emit_group_move (real_decl_rtl, decl_rtl);
4485 emit_group_load (real_decl_rtl, decl_rtl,
4486 TREE_TYPE (decl_result),
4487 int_size_in_bytes (TREE_TYPE (decl_result)));
4489 /* In the case of complex integer modes smaller than a word, we'll
4490 need to generate some non-trivial bitfield insertions. Do that
4491 on a pseudo and not the hard register. */
4492 else if (GET_CODE (decl_rtl) == CONCAT
4493 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4494 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4496 int old_generating_concat_p;
4499 old_generating_concat_p = generating_concat_p;
4500 generating_concat_p = 0;
4501 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4502 generating_concat_p = old_generating_concat_p;
4504 emit_move_insn (tmp, decl_rtl);
4505 emit_move_insn (real_decl_rtl, tmp);
4508 emit_move_insn (real_decl_rtl, decl_rtl);
4512 /* If returning a structure, arrange to return the address of the value
4513 in a place where debuggers expect to find it.
4515 If returning a structure PCC style,
4516 the caller also depends on this value.
4517 And current_function_returns_pcc_struct is not necessarily set. */
4518 if (current_function_returns_struct
4519 || current_function_returns_pcc_struct)
4521 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4522 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4525 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4526 type = TREE_TYPE (type);
4528 value_address = XEXP (value_address, 0);
4530 outgoing = targetm.calls.function_value (build_pointer_type (type),
4531 current_function_decl, true);
4533 /* Mark this as a function return value so integrate will delete the
4534 assignment and USE below when inlining this function. */
4535 REG_FUNCTION_VALUE_P (outgoing) = 1;
4537 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4538 value_address = convert_memory_address (GET_MODE (outgoing),
4541 emit_move_insn (outgoing, value_address);
4543 /* Show return register used to hold result (in this case the address
4545 current_function_return_rtx = outgoing;
4548 /* Emit the actual code to clobber return register. */
4553 clobber_return_register ();
4554 expand_naked_return ();
4558 emit_insn_after (seq, clobber_after);
4561 /* Output the label for the naked return from the function. */
4562 emit_label (naked_return_label);
4564 /* If stack protection is enabled for this function, check the guard. */
4565 if (cfun->stack_protect_guard)
4566 stack_protect_epilogue ();
4568 /* If we had calls to alloca, and this machine needs
4569 an accurate stack pointer to exit the function,
4570 insert some code to save and restore the stack pointer. */
4571 if (! EXIT_IGNORE_STACK
4572 && current_function_calls_alloca)
4576 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4577 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4580 /* ??? This should no longer be necessary since stupid is no longer with
4581 us, but there are some parts of the compiler (eg reload_combine, and
4582 sh mach_dep_reorg) that still try and compute their own lifetime info
4583 instead of using the general framework. */
4584 use_return_register ();
4588 get_arg_pointer_save_area (struct function *f)
4590 rtx ret = f->x_arg_pointer_save_area;
4594 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4595 f->x_arg_pointer_save_area = ret;
4598 if (f == cfun && ! f->arg_pointer_save_area_init)
4602 /* Save the arg pointer at the beginning of the function. The
4603 generated stack slot may not be a valid memory address, so we
4604 have to check it and fix it if necessary. */
4606 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4610 push_topmost_sequence ();
4611 emit_insn_after (seq, entry_of_function ());
4612 pop_topmost_sequence ();
4618 /* Extend a vector that records the INSN_UIDs of INSNS
4619 (a list of one or more insns). */
4622 record_insns (rtx insns, VEC(int,heap) **vecp)
4626 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4627 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4630 /* Set the locator of the insn chain starting at INSN to LOC. */
4632 set_insn_locators (rtx insn, int loc)
4634 while (insn != NULL_RTX)
4637 INSN_LOCATOR (insn) = loc;
4638 insn = NEXT_INSN (insn);
4642 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4643 be running after reorg, SEQUENCE rtl is possible. */
4646 contains (rtx insn, VEC(int,heap) **vec)
4650 if (NONJUMP_INSN_P (insn)
4651 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4654 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4655 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4656 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4657 == VEC_index (int, *vec, j))
4663 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4664 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4671 prologue_epilogue_contains (rtx insn)
4673 if (contains (insn, &prologue))
4675 if (contains (insn, &epilogue))
4681 sibcall_epilogue_contains (rtx insn)
4683 if (sibcall_epilogue)
4684 return contains (insn, &sibcall_epilogue);
4689 /* Insert gen_return at the end of block BB. This also means updating
4690 block_for_insn appropriately. */
4693 emit_return_into_block (basic_block bb, rtx line_note)
4695 emit_jump_insn_after (gen_return (), BB_END (bb));
4697 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4699 #endif /* HAVE_return */
4701 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4703 /* These functions convert the epilogue into a variant that does not
4704 modify the stack pointer. This is used in cases where a function
4705 returns an object whose size is not known until it is computed.
4706 The called function leaves the object on the stack, leaves the
4707 stack depressed, and returns a pointer to the object.
4709 What we need to do is track all modifications and references to the
4710 stack pointer, deleting the modifications and changing the
4711 references to point to the location the stack pointer would have
4712 pointed to had the modifications taken place.
4714 These functions need to be portable so we need to make as few
4715 assumptions about the epilogue as we can. However, the epilogue
4716 basically contains three things: instructions to reset the stack
4717 pointer, instructions to reload registers, possibly including the
4718 frame pointer, and an instruction to return to the caller.
4720 We must be sure of what a relevant epilogue insn is doing. We also
4721 make no attempt to validate the insns we make since if they are
4722 invalid, we probably can't do anything valid. The intent is that
4723 these routines get "smarter" as more and more machines start to use
4724 them and they try operating on different epilogues.
4726 We use the following structure to track what the part of the
4727 epilogue that we've already processed has done. We keep two copies
4728 of the SP equivalence, one for use during the insn we are
4729 processing and one for use in the next insn. The difference is
4730 because one part of a PARALLEL may adjust SP and the other may use
4735 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4736 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4737 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4738 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4739 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4740 should be set to once we no longer need
4742 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4746 static void handle_epilogue_set (rtx, struct epi_info *);
4747 static void update_epilogue_consts (rtx, rtx, void *);
4748 static void emit_equiv_load (struct epi_info *);
4750 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4751 no modifications to the stack pointer. Return the new list of insns. */
4754 keep_stack_depressed (rtx insns)
4757 struct epi_info info;
4760 /* If the epilogue is just a single instruction, it must be OK as is. */
4761 if (NEXT_INSN (insns) == NULL_RTX)
4764 /* Otherwise, start a sequence, initialize the information we have, and
4765 process all the insns we were given. */
4768 info.sp_equiv_reg = stack_pointer_rtx;
4770 info.equiv_reg_src = 0;
4772 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4773 info.const_equiv[j] = 0;
4777 while (insn != NULL_RTX)
4779 next = NEXT_INSN (insn);
4788 /* If this insn references the register that SP is equivalent to and
4789 we have a pending load to that register, we must force out the load
4790 first and then indicate we no longer know what SP's equivalent is. */
4791 if (info.equiv_reg_src != 0
4792 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4794 emit_equiv_load (&info);
4795 info.sp_equiv_reg = 0;
4798 info.new_sp_equiv_reg = info.sp_equiv_reg;
4799 info.new_sp_offset = info.sp_offset;
4801 /* If this is a (RETURN) and the return address is on the stack,
4802 update the address and change to an indirect jump. */
4803 if (GET_CODE (PATTERN (insn)) == RETURN
4804 || (GET_CODE (PATTERN (insn)) == PARALLEL
4805 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4807 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4809 HOST_WIDE_INT offset = 0;
4810 rtx jump_insn, jump_set;
4812 /* If the return address is in a register, we can emit the insn
4813 unchanged. Otherwise, it must be a MEM and we see what the
4814 base register and offset are. In any case, we have to emit any
4815 pending load to the equivalent reg of SP, if any. */
4816 if (REG_P (retaddr))
4818 emit_equiv_load (&info);
4826 gcc_assert (MEM_P (retaddr));
4828 ret_ptr = XEXP (retaddr, 0);
4830 if (REG_P (ret_ptr))
4832 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4837 gcc_assert (GET_CODE (ret_ptr) == PLUS
4838 && REG_P (XEXP (ret_ptr, 0))
4839 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4840 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4841 offset = INTVAL (XEXP (ret_ptr, 1));
4845 /* If the base of the location containing the return pointer
4846 is SP, we must update it with the replacement address. Otherwise,
4847 just build the necessary MEM. */
4848 retaddr = plus_constant (base, offset);
4849 if (base == stack_pointer_rtx)
4850 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4851 plus_constant (info.sp_equiv_reg,
4854 retaddr = gen_rtx_MEM (Pmode, retaddr);
4855 MEM_NOTRAP_P (retaddr) = 1;
4857 /* If there is a pending load to the equivalent register for SP
4858 and we reference that register, we must load our address into
4859 a scratch register and then do that load. */
4860 if (info.equiv_reg_src
4861 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4866 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4867 if (HARD_REGNO_MODE_OK (regno, Pmode)
4868 && !fixed_regs[regno]
4869 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4871 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4872 && !refers_to_regno_p (regno,
4873 regno + hard_regno_nregs[regno]
4875 info.equiv_reg_src, NULL)
4876 && info.const_equiv[regno] == 0)
4879 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4881 reg = gen_rtx_REG (Pmode, regno);
4882 emit_move_insn (reg, retaddr);
4886 emit_equiv_load (&info);
4887 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4889 /* Show the SET in the above insn is a RETURN. */
4890 jump_set = single_set (jump_insn);
4891 gcc_assert (jump_set);
4892 SET_IS_RETURN_P (jump_set) = 1;
4895 /* If SP is not mentioned in the pattern and its equivalent register, if
4896 any, is not modified, just emit it. Otherwise, if neither is set,
4897 replace the reference to SP and emit the insn. If none of those are
4898 true, handle each SET individually. */
4899 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4900 && (info.sp_equiv_reg == stack_pointer_rtx
4901 || !reg_set_p (info.sp_equiv_reg, insn)))
4903 else if (! reg_set_p (stack_pointer_rtx, insn)
4904 && (info.sp_equiv_reg == stack_pointer_rtx
4905 || !reg_set_p (info.sp_equiv_reg, insn)))
4909 changed = validate_replace_rtx (stack_pointer_rtx,
4910 plus_constant (info.sp_equiv_reg,
4913 gcc_assert (changed);
4917 else if (GET_CODE (PATTERN (insn)) == SET)
4918 handle_epilogue_set (PATTERN (insn), &info);
4919 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4921 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4922 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4923 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4928 info.sp_equiv_reg = info.new_sp_equiv_reg;
4929 info.sp_offset = info.new_sp_offset;
4931 /* Now update any constants this insn sets. */
4932 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4936 insns = get_insns ();
4941 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4942 structure that contains information about what we've seen so far. We
4943 process this SET by either updating that data or by emitting one or
4947 handle_epilogue_set (rtx set, struct epi_info *p)
4949 /* First handle the case where we are setting SP. Record what it is being
4950 set from, which we must be able to determine */
4951 if (reg_set_p (stack_pointer_rtx, set))
4953 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4955 if (GET_CODE (SET_SRC (set)) == PLUS)
4957 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4958 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4959 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4962 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4963 && (REGNO (XEXP (SET_SRC (set), 1))
4964 < FIRST_PSEUDO_REGISTER)
4965 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4967 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4971 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4973 /* If we are adjusting SP, we adjust from the old data. */
4974 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4976 p->new_sp_equiv_reg = p->sp_equiv_reg;
4977 p->new_sp_offset += p->sp_offset;
4980 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4985 /* Next handle the case where we are setting SP's equivalent
4986 register. We must not already have a value to set it to. We
4987 could update, but there seems little point in handling that case.
4988 Note that we have to allow for the case where we are setting the
4989 register set in the previous part of a PARALLEL inside a single
4990 insn. But use the old offset for any updates within this insn.
4991 We must allow for the case where the register is being set in a
4992 different (usually wider) mode than Pmode). */
4993 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4995 gcc_assert (!p->equiv_reg_src
4996 && REG_P (p->new_sp_equiv_reg)
4997 && REG_P (SET_DEST (set))
4998 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5000 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5002 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5003 plus_constant (p->sp_equiv_reg,
5007 /* Otherwise, replace any references to SP in the insn to its new value
5008 and emit the insn. */
5011 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5012 plus_constant (p->sp_equiv_reg,
5014 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5015 plus_constant (p->sp_equiv_reg,
5021 /* Update the tracking information for registers set to constants. */
5024 update_epilogue_consts (rtx dest, rtx x, void *data)
5026 struct epi_info *p = (struct epi_info *) data;
5029 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5032 /* If we are either clobbering a register or doing a partial set,
5033 show we don't know the value. */
5034 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5035 p->const_equiv[REGNO (dest)] = 0;
5037 /* If we are setting it to a constant, record that constant. */
5038 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5039 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5041 /* If this is a binary operation between a register we have been tracking
5042 and a constant, see if we can compute a new constant value. */
5043 else if (ARITHMETIC_P (SET_SRC (x))
5044 && REG_P (XEXP (SET_SRC (x), 0))
5045 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5046 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5047 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5048 && 0 != (new = simplify_binary_operation
5049 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5050 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5051 XEXP (SET_SRC (x), 1)))
5052 && GET_CODE (new) == CONST_INT)
5053 p->const_equiv[REGNO (dest)] = new;
5055 /* Otherwise, we can't do anything with this value. */
5057 p->const_equiv[REGNO (dest)] = 0;
5060 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5063 emit_equiv_load (struct epi_info *p)
5065 if (p->equiv_reg_src != 0)
5067 rtx dest = p->sp_equiv_reg;
5069 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5070 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5071 REGNO (p->sp_equiv_reg));
5073 emit_move_insn (dest, p->equiv_reg_src);
5074 p->equiv_reg_src = 0;
5079 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5080 this into place with notes indicating where the prologue ends and where
5081 the epilogue begins. Update the basic block information when possible. */
5084 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5088 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5091 #ifdef HAVE_prologue
5092 rtx prologue_end = NULL_RTX;
5094 #if defined (HAVE_epilogue) || defined(HAVE_return)
5095 rtx epilogue_end = NULL_RTX;
5099 #ifdef HAVE_prologue
5103 seq = gen_prologue ();
5106 /* Retain a map of the prologue insns. */
5107 record_insns (seq, &prologue);
5108 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5112 set_insn_locators (seq, prologue_locator);
5114 /* Can't deal with multiple successors of the entry block
5115 at the moment. Function should always have at least one
5117 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5119 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5124 /* If the exit block has no non-fake predecessors, we don't need
5126 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5127 if ((e->flags & EDGE_FAKE) == 0)
5133 if (optimize && HAVE_return)
5135 /* If we're allowed to generate a simple return instruction,
5136 then by definition we don't need a full epilogue. Examine
5137 the block that falls through to EXIT. If it does not
5138 contain any code, examine its predecessors and try to
5139 emit (conditional) return instructions. */
5144 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5145 if (e->flags & EDGE_FALLTHRU)
5151 /* Verify that there are no active instructions in the last block. */
5152 label = BB_END (last);
5153 while (label && !LABEL_P (label))
5155 if (active_insn_p (label))
5157 label = PREV_INSN (label);
5160 if (BB_HEAD (last) == label && LABEL_P (label))
5163 rtx epilogue_line_note = NULL_RTX;
5165 /* Locate the line number associated with the closing brace,
5166 if we can find one. */
5167 for (seq = get_last_insn ();
5168 seq && ! active_insn_p (seq);
5169 seq = PREV_INSN (seq))
5170 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5172 epilogue_line_note = seq;
5176 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5178 basic_block bb = e->src;
5181 if (bb == ENTRY_BLOCK_PTR)
5188 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5194 /* If we have an unconditional jump, we can replace that
5195 with a simple return instruction. */
5196 if (simplejump_p (jump))
5198 emit_return_into_block (bb, epilogue_line_note);
5202 /* If we have a conditional jump, we can try to replace
5203 that with a conditional return instruction. */
5204 else if (condjump_p (jump))
5206 if (! redirect_jump (jump, 0, 0))
5212 /* If this block has only one successor, it both jumps
5213 and falls through to the fallthru block, so we can't
5215 if (single_succ_p (bb))
5227 /* Fix up the CFG for the successful change we just made. */
5228 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5231 /* Emit a return insn for the exit fallthru block. Whether
5232 this is still reachable will be determined later. */
5234 emit_barrier_after (BB_END (last));
5235 emit_return_into_block (last, epilogue_line_note);
5236 epilogue_end = BB_END (last);
5237 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5242 /* Find the edge that falls through to EXIT. Other edges may exist
5243 due to RETURN instructions, but those don't need epilogues.
5244 There really shouldn't be a mixture -- either all should have
5245 been converted or none, however... */
5247 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5248 if (e->flags & EDGE_FALLTHRU)
5253 #ifdef HAVE_epilogue
5257 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5259 seq = gen_epilogue ();
5261 #ifdef INCOMING_RETURN_ADDR_RTX
5262 /* If this function returns with the stack depressed and we can support
5263 it, massage the epilogue to actually do that. */
5264 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5265 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5266 seq = keep_stack_depressed (seq);
5269 emit_jump_insn (seq);
5271 /* Retain a map of the epilogue insns. */
5272 record_insns (seq, &epilogue);
5273 set_insn_locators (seq, epilogue_locator);
5278 insert_insn_on_edge (seq, e);
5286 if (! next_active_insn (BB_END (e->src)))
5288 /* We have a fall-through edge to the exit block, the source is not
5289 at the end of the function, and there will be an assembler epilogue
5290 at the end of the function.
5291 We can't use force_nonfallthru here, because that would try to
5292 use return. Inserting a jump 'by hand' is extremely messy, so
5293 we take advantage of cfg_layout_finalize using
5294 fixup_fallthru_exit_predecessor. */
5295 cfg_layout_initialize (0);
5296 FOR_EACH_BB (cur_bb)
5297 if (cur_bb->index >= NUM_FIXED_BLOCKS
5298 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5299 cur_bb->aux = cur_bb->next_bb;
5300 cfg_layout_finalize ();
5305 commit_edge_insertions ();
5307 #ifdef HAVE_sibcall_epilogue
5308 /* Emit sibling epilogues before any sibling call sites. */
5309 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5311 basic_block bb = e->src;
5312 rtx insn = BB_END (bb);
5315 || ! SIBLING_CALL_P (insn))
5322 emit_insn (gen_sibcall_epilogue ());
5326 /* Retain a map of the epilogue insns. Used in life analysis to
5327 avoid getting rid of sibcall epilogue insns. Do this before we
5328 actually emit the sequence. */
5329 record_insns (seq, &sibcall_epilogue);
5330 set_insn_locators (seq, epilogue_locator);
5332 emit_insn_before (seq, insn);
5337 #ifdef HAVE_prologue
5338 /* This is probably all useless now that we use locators. */
5343 /* GDB handles `break f' by setting a breakpoint on the first
5344 line note after the prologue. Which means (1) that if
5345 there are line number notes before where we inserted the
5346 prologue we should move them, and (2) we should generate a
5347 note before the end of the first basic block, if there isn't
5350 ??? This behavior is completely broken when dealing with
5351 multiple entry functions. We simply place the note always
5352 into first basic block and let alternate entry points
5356 for (insn = prologue_end; insn; insn = prev)
5358 prev = PREV_INSN (insn);
5359 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5361 /* Note that we cannot reorder the first insn in the
5362 chain, since rest_of_compilation relies on that
5363 remaining constant. */
5366 reorder_insns (insn, insn, prologue_end);
5370 /* Find the last line number note in the first block. */
5371 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5372 insn != prologue_end && insn;
5373 insn = PREV_INSN (insn))
5374 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5377 /* If we didn't find one, make a copy of the first line number
5381 for (insn = next_active_insn (prologue_end);
5383 insn = PREV_INSN (insn))
5384 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5386 emit_note_copy_after (insn, prologue_end);
5392 #ifdef HAVE_epilogue
5397 /* Similarly, move any line notes that appear after the epilogue.
5398 There is no need, however, to be quite so anal about the existence
5399 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5400 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5402 for (insn = epilogue_end; insn; insn = next)
5404 next = NEXT_INSN (insn);
5406 && (NOTE_LINE_NUMBER (insn) > 0
5407 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5408 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5409 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5415 /* Reposition the prologue-end and epilogue-begin notes after instruction
5416 scheduling and delayed branch scheduling. */
5419 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5421 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5422 rtx insn, last, note;
5425 if ((len = VEC_length (int, prologue)) > 0)
5429 /* Scan from the beginning until we reach the last prologue insn.
5430 We apparently can't depend on basic_block_{head,end} after
5432 for (insn = f; insn; insn = NEXT_INSN (insn))
5436 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5439 else if (contains (insn, &prologue))
5449 /* Find the prologue-end note if we haven't already, and
5450 move it to just after the last prologue insn. */
5453 for (note = last; (note = NEXT_INSN (note));)
5455 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5459 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5461 last = NEXT_INSN (last);
5462 reorder_insns (note, note, last);
5466 if ((len = VEC_length (int, epilogue)) > 0)
5470 /* Scan from the end until we reach the first epilogue insn.
5471 We apparently can't depend on basic_block_{head,end} after
5473 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5477 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5480 else if (contains (insn, &epilogue))
5490 /* Find the epilogue-begin note if we haven't already, and
5491 move it to just before the first epilogue insn. */
5494 for (note = insn; (note = PREV_INSN (note));)
5496 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5500 if (PREV_INSN (last) != note)
5501 reorder_insns (note, note, PREV_INSN (last));
5504 #endif /* HAVE_prologue or HAVE_epilogue */
5507 /* Resets insn_block_boundaries array. */
5510 reset_block_changes (void)
5512 cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5513 VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5516 /* Record the boundary for BLOCK. */
5518 record_block_change (tree block)
5526 if(!cfun->ib_boundaries_block)
5529 last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5531 for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5532 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5534 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5537 /* Finishes record of boundaries. */
5538 void finalize_block_changes (void)
5540 record_block_change (DECL_INITIAL (current_function_decl));
5543 /* For INSN return the BLOCK it belongs to. */
5545 check_block_change (rtx insn, tree *block)
5547 unsigned uid = INSN_UID (insn);
5549 if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5552 *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5555 /* Releases the ib_boundaries_block records. */
5557 free_block_changes (void)
5559 VEC_free (tree, gc, cfun->ib_boundaries_block);
5562 /* Returns the name of the current function. */
5564 current_function_name (void)
5566 return lang_hooks.decl_printable_name (cfun->decl, 2);
5571 rest_of_handle_check_leaf_regs (void)
5573 #ifdef LEAF_REGISTERS
5574 current_function_uses_only_leaf_regs
5575 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5580 /* Insert a TYPE into the used types hash table of CFUN. */
5582 used_types_insert_helper (tree type, struct function *func)
5584 if (type != NULL && func != NULL)
5588 if (func->used_types_hash == NULL)
5589 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5590 htab_eq_pointer, NULL);
5591 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5597 /* Given a type, insert it into the used hash table in cfun. */
5599 used_types_insert (tree t)
5601 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5603 t = TYPE_MAIN_VARIANT (t);
5604 if (debug_info_level > DINFO_LEVEL_NONE)
5605 used_types_insert_helper (t, cfun);
5608 struct tree_opt_pass pass_leaf_regs =
5612 rest_of_handle_check_leaf_regs, /* execute */
5615 0, /* static_pass_number */
5617 0, /* properties_required */
5618 0, /* properties_provided */
5619 0, /* properties_destroyed */
5620 0, /* todo_flags_start */
5621 0, /* todo_flags_finish */
5626 #include "gt-function.h"