1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
78 #define NAME__MAIN "__main"
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
104 int current_function_is_leaf;
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
109 int current_function_nothrow;
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
127 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
128 static GTY(()) int funcdef_no;
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 struct machine_function * (*init_machine_status) (void);
134 /* The currently compiled function. */
135 struct function *cfun = 0;
137 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
138 static GTY(()) varray_type prologue;
139 static GTY(()) varray_type epilogue;
141 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
143 static GTY(()) varray_type sibcall_epilogue;
145 /* In order to evaluate some expressions, such as function calls returning
146 structures in memory, we need to temporarily allocate stack locations.
147 We record each allocated temporary in the following structure.
149 Associated with each temporary slot is a nesting level. When we pop up
150 one level, all temporaries associated with the previous level are freed.
151 Normally, all temporaries are freed after the execution of the statement
152 in which they were created. However, if we are inside a ({...}) grouping,
153 the result may be in a temporary and hence must be preserved. If the
154 result could be in a temporary, we preserve it if we can determine which
155 one it is in. If we cannot determine which temporary may contain the
156 result, all temporaries are preserved. A temporary is preserved by
157 pretending it was allocated at the previous nesting level.
159 Automatic variables are also assigned temporary slots, at the nesting
160 level where they are defined. They are marked a "kept" so that
161 free_temp_slots will not free them. */
163 struct temp_slot GTY(())
165 /* Points to next temporary slot. */
166 struct temp_slot *next;
167 /* Points to previous temporary slot. */
168 struct temp_slot *prev;
170 /* The rtx to used to reference the slot. */
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
175 /* The alignment (in bits) of the slot. */
177 /* The size, in units, of the slot. */
179 /* The type of the object in the slot, or zero if it doesn't correspond
180 to a type. We use this to determine whether a slot can be reused.
181 It can be reused if objects of the type of the new slot will always
182 conflict with objects of the type of the old slot. */
184 /* Nonzero if this temporary is currently in use. */
186 /* Nonzero if this temporary has its address taken. */
188 /* Nesting level at which this slot is being used. */
190 /* Nonzero if this should survive a call to free_temp_slots. */
192 /* The offset of the slot from the frame_pointer, including extra space
193 for alignment. This info is for combine_temp_slots. */
194 HOST_WIDE_INT base_offset;
195 /* The size of the slot, including extra space for alignment. This
196 info is for combine_temp_slots. */
197 HOST_WIDE_INT full_size;
200 /* Forward declarations. */
202 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
204 static struct temp_slot *find_temp_slot_from_address (rtx);
205 static void instantiate_decls (tree, int);
206 static void instantiate_decls_1 (tree, int);
207 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
208 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
209 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
210 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
211 static void pad_below (struct args_size *, enum machine_mode, tree);
212 static void reorder_blocks_1 (rtx, tree, varray_type *);
213 static void reorder_fix_fragments (tree);
214 static int all_blocks (tree, tree *);
215 static tree *get_block_vector (tree, int *);
216 extern tree debug_find_var_in_block_tree (tree, tree);
217 /* We always define `record_insns' even if it's not used so that we
218 can always export `prologue_epilogue_contains'. */
219 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
220 static int contains (rtx, varray_type);
222 static void emit_return_into_block (basic_block, rtx);
224 static void purge_single_hard_subreg_set (rtx);
225 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
226 static rtx keep_stack_depressed (rtx);
228 static void prepare_function_start (tree);
229 static void do_clobber_return_reg (rtx, void *);
230 static void do_use_return_reg (rtx, void *);
231 static void instantiate_virtual_regs_lossage (rtx);
232 static tree split_complex_args (tree);
233 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
235 /* Pointer to chain of `struct function' for containing functions. */
236 struct function *outer_function_chain;
238 /* Given a function decl for a containing function,
239 return the `struct function' for it. */
242 find_function_data (tree decl)
246 for (p = outer_function_chain; p; p = p->outer)
253 /* Save the current context for compilation of a nested function.
254 This is called from language-specific code. The caller should use
255 the enter_nested langhook to save any language-specific state,
256 since this function knows only about language-independent
260 push_function_context_to (tree context)
266 if (context == current_function_decl)
267 cfun->contains_functions = 1;
270 struct function *containing = find_function_data (context);
271 containing->contains_functions = 1;
276 init_dummy_function_start ();
279 p->outer = outer_function_chain;
280 outer_function_chain = p;
282 lang_hooks.function.enter_nested (p);
288 push_function_context (void)
290 push_function_context_to (current_function_decl);
293 /* Restore the last saved context, at the end of a nested function.
294 This function is called from language-specific code. */
297 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
299 struct function *p = outer_function_chain;
302 outer_function_chain = p->outer;
304 current_function_decl = p->decl;
307 restore_emit_status (p);
309 lang_hooks.function.leave_nested (p);
311 /* Reset variables that have known state during rtx generation. */
312 rtx_equal_function_value_matters = 1;
313 virtuals_instantiated = 0;
314 generating_concat_p = 1;
318 pop_function_context (void)
320 pop_function_context_from (current_function_decl);
323 /* Clear out all parts of the state in F that can safely be discarded
324 after the function has been parsed, but not compiled, to let
325 garbage collection reclaim the memory. */
328 free_after_parsing (struct function *f)
330 /* f->expr->forced_labels is used by code generation. */
331 /* f->emit->regno_reg_rtx is used by code generation. */
332 /* f->varasm is used by code generation. */
333 /* f->eh->eh_return_stub_label is used by code generation. */
335 lang_hooks.function.final (f);
339 /* Clear out all parts of the state in F that can safely be discarded
340 after the function has been compiled, to let garbage collection
341 reclaim the memory. */
344 free_after_compilation (struct function *f)
352 f->x_avail_temp_slots = NULL;
353 f->x_used_temp_slots = NULL;
354 f->arg_offset_rtx = NULL;
355 f->return_rtx = NULL;
356 f->internal_arg_pointer = NULL;
357 f->x_nonlocal_goto_handler_labels = NULL;
358 f->x_return_label = NULL;
359 f->x_naked_return_label = NULL;
360 f->x_stack_slot_list = NULL;
361 f->x_tail_recursion_reentry = NULL;
362 f->x_arg_pointer_save_area = NULL;
363 f->x_parm_birth_insn = NULL;
364 f->original_arg_vector = NULL;
365 f->original_decl_initial = NULL;
366 f->epilogue_delay_list = NULL;
369 /* Allocate fixed slots in the stack frame of the current function. */
371 /* Return size needed for stack frame based on slots so far allocated in
373 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
374 the caller may have to do that. */
377 get_func_frame_size (struct function *f)
379 #ifdef FRAME_GROWS_DOWNWARD
380 return -f->x_frame_offset;
382 return f->x_frame_offset;
386 /* Return size needed for stack frame based on slots so far allocated.
387 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
388 the caller may have to do that. */
390 get_frame_size (void)
392 return get_func_frame_size (cfun);
395 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
396 with machine mode MODE.
398 ALIGN controls the amount of alignment for the address of the slot:
399 0 means according to MODE,
400 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
401 -2 means use BITS_PER_UNIT,
402 positive specifies alignment boundary in bits.
404 We do not round to stack_boundary here.
406 FUNCTION specifies the function to allocate in. */
409 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
410 struct function *function)
413 int bigend_correction = 0;
415 int frame_off, frame_alignment, frame_phase;
422 alignment = BIGGEST_ALIGNMENT;
424 alignment = GET_MODE_ALIGNMENT (mode);
426 /* Allow the target to (possibly) increase the alignment of this
428 type = lang_hooks.types.type_for_mode (mode, 0);
430 alignment = LOCAL_ALIGNMENT (type, alignment);
432 alignment /= BITS_PER_UNIT;
434 else if (align == -1)
436 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
437 size = CEIL_ROUND (size, alignment);
439 else if (align == -2)
440 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
442 alignment = align / BITS_PER_UNIT;
444 #ifdef FRAME_GROWS_DOWNWARD
445 function->x_frame_offset -= size;
448 /* Ignore alignment we can't do with expected alignment of the boundary. */
449 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
450 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
452 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
453 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
455 /* Calculate how many bytes the start of local variables is off from
457 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
458 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
459 frame_phase = frame_off ? frame_alignment - frame_off : 0;
461 /* Round the frame offset to the specified alignment. The default is
462 to always honor requests to align the stack but a port may choose to
463 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
464 if (STACK_ALIGNMENT_NEEDED
468 /* We must be careful here, since FRAME_OFFSET might be negative and
469 division with a negative dividend isn't as well defined as we might
470 like. So we instead assume that ALIGNMENT is a power of two and
471 use logical operations which are unambiguous. */
472 #ifdef FRAME_GROWS_DOWNWARD
473 function->x_frame_offset
474 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
477 function->x_frame_offset
478 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
483 /* On a big-endian machine, if we are allocating more space than we will use,
484 use the least significant bytes of those that are allocated. */
485 if (BYTES_BIG_ENDIAN && mode != BLKmode)
486 bigend_correction = size - GET_MODE_SIZE (mode);
488 /* If we have already instantiated virtual registers, return the actual
489 address relative to the frame pointer. */
490 if (function == cfun && virtuals_instantiated)
491 addr = plus_constant (frame_pointer_rtx,
493 (frame_offset + bigend_correction
494 + STARTING_FRAME_OFFSET, Pmode));
496 addr = plus_constant (virtual_stack_vars_rtx,
498 (function->x_frame_offset + bigend_correction,
501 #ifndef FRAME_GROWS_DOWNWARD
502 function->x_frame_offset += size;
505 x = gen_rtx_MEM (mode, addr);
507 function->x_stack_slot_list
508 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
513 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
517 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
519 return assign_stack_local_1 (mode, size, align, cfun);
523 /* Removes temporary slot TEMP from LIST. */
526 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
529 temp->next->prev = temp->prev;
531 temp->prev->next = temp->next;
535 temp->prev = temp->next = NULL;
538 /* Inserts temporary slot TEMP to LIST. */
541 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
545 (*list)->prev = temp;
550 /* Returns the list of used temp slots at LEVEL. */
552 static struct temp_slot **
553 temp_slots_at_level (int level)
557 if (!used_temp_slots)
558 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
560 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
561 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
563 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
566 /* Returns the maximal temporary slot level. */
569 max_slot_level (void)
571 if (!used_temp_slots)
574 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
577 /* Moves temporary slot TEMP to LEVEL. */
580 move_slot_to_level (struct temp_slot *temp, int level)
582 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
583 insert_slot_to_list (temp, temp_slots_at_level (level));
587 /* Make temporary slot TEMP available. */
590 make_slot_available (struct temp_slot *temp)
592 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
593 insert_slot_to_list (temp, &avail_temp_slots);
598 /* Allocate a temporary stack slot and record it for possible later
601 MODE is the machine mode to be given to the returned rtx.
603 SIZE is the size in units of the space required. We do no rounding here
604 since assign_stack_local will do any required rounding.
606 KEEP is 1 if this slot is to be retained after a call to
607 free_temp_slots. Automatic variables for a block are allocated
608 with this flag. KEEP is 2 if we allocate a longer term temporary,
609 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
610 if we are to allocate something at an inner level to be treated as
611 a variable in the block (e.g., a SAVE_EXPR).
613 TYPE is the type that will be used for the stack slot. */
616 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
620 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
623 /* If SIZE is -1 it means that somebody tried to allocate a temporary
624 of a variable size. */
629 align = BIGGEST_ALIGNMENT;
631 align = GET_MODE_ALIGNMENT (mode);
634 type = lang_hooks.types.type_for_mode (mode, 0);
637 align = LOCAL_ALIGNMENT (type, align);
639 /* Try to find an available, already-allocated temporary of the proper
640 mode which meets the size and alignment requirements. Choose the
641 smallest one with the closest alignment. */
642 for (p = avail_temp_slots; p; p = p->next)
644 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
645 && objects_must_conflict_p (p->type, type)
646 && (best_p == 0 || best_p->size > p->size
647 || (best_p->size == p->size && best_p->align > p->align)))
649 if (p->align == align && p->size == size)
652 cut_slot_from_list (selected, &avail_temp_slots);
660 /* Make our best, if any, the one to use. */
664 cut_slot_from_list (selected, &avail_temp_slots);
666 /* If there are enough aligned bytes left over, make them into a new
667 temp_slot so that the extra bytes don't get wasted. Do this only
668 for BLKmode slots, so that we can be sure of the alignment. */
669 if (GET_MODE (best_p->slot) == BLKmode)
671 int alignment = best_p->align / BITS_PER_UNIT;
672 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
674 if (best_p->size - rounded_size >= alignment)
676 p = ggc_alloc (sizeof (struct temp_slot));
677 p->in_use = p->addr_taken = 0;
678 p->size = best_p->size - rounded_size;
679 p->base_offset = best_p->base_offset + rounded_size;
680 p->full_size = best_p->full_size - rounded_size;
681 p->slot = gen_rtx_MEM (BLKmode,
682 plus_constant (XEXP (best_p->slot, 0),
684 p->align = best_p->align;
686 p->type = best_p->type;
687 insert_slot_to_list (p, &avail_temp_slots);
689 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
692 best_p->size = rounded_size;
693 best_p->full_size = rounded_size;
698 /* If we still didn't find one, make a new temporary. */
701 HOST_WIDE_INT frame_offset_old = frame_offset;
703 p = ggc_alloc (sizeof (struct temp_slot));
705 /* We are passing an explicit alignment request to assign_stack_local.
706 One side effect of that is assign_stack_local will not round SIZE
707 to ensure the frame offset remains suitably aligned.
709 So for requests which depended on the rounding of SIZE, we go ahead
710 and round it now. We also make sure ALIGNMENT is at least
711 BIGGEST_ALIGNMENT. */
712 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
714 p->slot = assign_stack_local (mode,
716 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
722 /* The following slot size computation is necessary because we don't
723 know the actual size of the temporary slot until assign_stack_local
724 has performed all the frame alignment and size rounding for the
725 requested temporary. Note that extra space added for alignment
726 can be either above or below this stack slot depending on which
727 way the frame grows. We include the extra space if and only if it
728 is above this slot. */
729 #ifdef FRAME_GROWS_DOWNWARD
730 p->size = frame_offset_old - frame_offset;
735 /* Now define the fields used by combine_temp_slots. */
736 #ifdef FRAME_GROWS_DOWNWARD
737 p->base_offset = frame_offset;
738 p->full_size = frame_offset_old - frame_offset;
740 p->base_offset = frame_offset_old;
741 p->full_size = frame_offset - frame_offset_old;
755 p->level = target_temp_slot_level;
760 p->level = var_temp_slot_level;
765 p->level = temp_slot_level;
769 pp = temp_slots_at_level (p->level);
770 insert_slot_to_list (p, pp);
772 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
773 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
774 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
776 /* If we know the alias set for the memory that will be used, use
777 it. If there's no TYPE, then we don't know anything about the
778 alias set for the memory. */
779 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
780 set_mem_align (slot, align);
782 /* If a type is specified, set the relevant flags. */
785 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
786 && TYPE_READONLY (type));
787 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
788 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
794 /* Allocate a temporary stack slot and record it for possible later
795 reuse. First three arguments are same as in preceding function. */
798 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
800 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
803 /* Assign a temporary.
804 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
805 and so that should be used in error messages. In either case, we
806 allocate of the given type.
807 KEEP is as for assign_stack_temp.
808 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
809 it is 0 if a register is OK.
810 DONT_PROMOTE is 1 if we should not promote values in register
814 assign_temp (tree type_or_decl, int keep, int memory_required,
815 int dont_promote ATTRIBUTE_UNUSED)
818 enum machine_mode mode;
823 if (DECL_P (type_or_decl))
824 decl = type_or_decl, type = TREE_TYPE (decl);
826 decl = NULL, type = type_or_decl;
828 mode = TYPE_MODE (type);
830 unsignedp = TYPE_UNSIGNED (type);
833 if (mode == BLKmode || memory_required)
835 HOST_WIDE_INT size = int_size_in_bytes (type);
839 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
840 problems with allocating the stack space. */
844 /* Unfortunately, we don't yet know how to allocate variable-sized
845 temporaries. However, sometimes we have a fixed upper limit on
846 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
847 instead. This is the case for Chill variable-sized strings. */
848 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
849 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
850 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
851 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
853 /* If we still haven't been able to get a size, see if the language
854 can compute a maximum size. */
856 && (size_tree = lang_hooks.type_max_size (type)) != 0
857 && host_integerp (size_tree, 1))
858 size = tree_low_cst (size_tree, 1);
860 /* The size of the temporary may be too large to fit into an integer. */
861 /* ??? Not sure this should happen except for user silliness, so limit
862 this to things that aren't compiler-generated temporaries. The
863 rest of the time we'll abort in assign_stack_temp_for_type. */
864 if (decl && size == -1
865 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
867 error ("%Jsize of variable '%D' is too large", decl, decl);
871 tmp = assign_stack_temp_for_type (mode, size, keep, type);
877 mode = promote_mode (type, mode, &unsignedp, 0);
880 return gen_reg_rtx (mode);
883 /* Combine temporary stack slots which are adjacent on the stack.
885 This allows for better use of already allocated stack space. This is only
886 done for BLKmode slots because we can be sure that we won't have alignment
887 problems in this case. */
890 combine_temp_slots (void)
892 struct temp_slot *p, *q, *next, *next_q;
895 /* We can't combine slots, because the information about which slot
896 is in which alias set will be lost. */
897 if (flag_strict_aliasing)
900 /* If there are a lot of temp slots, don't do anything unless
901 high levels of optimization. */
902 if (! flag_expensive_optimizations)
903 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
904 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
907 for (p = avail_temp_slots; p; p = next)
913 if (GET_MODE (p->slot) != BLKmode)
916 for (q = p->next; q; q = next_q)
922 if (GET_MODE (q->slot) != BLKmode)
925 if (p->base_offset + p->full_size == q->base_offset)
927 /* Q comes after P; combine Q into P. */
929 p->full_size += q->full_size;
932 else if (q->base_offset + q->full_size == p->base_offset)
934 /* P comes after Q; combine P into Q. */
936 q->full_size += p->full_size;
941 cut_slot_from_list (q, &avail_temp_slots);
944 /* Either delete P or advance past it. */
946 cut_slot_from_list (p, &avail_temp_slots);
950 /* Find the temp slot corresponding to the object at address X. */
952 static struct temp_slot *
953 find_temp_slot_from_address (rtx x)
959 for (i = max_slot_level (); i >= 0; i--)
960 for (p = *temp_slots_at_level (i); p; p = p->next)
962 if (XEXP (p->slot, 0) == x
964 || (GET_CODE (x) == PLUS
965 && XEXP (x, 0) == virtual_stack_vars_rtx
966 && GET_CODE (XEXP (x, 1)) == CONST_INT
967 && INTVAL (XEXP (x, 1)) >= p->base_offset
968 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
971 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
972 for (next = p->address; next; next = XEXP (next, 1))
973 if (XEXP (next, 0) == x)
977 /* If we have a sum involving a register, see if it points to a temp
979 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
980 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
982 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
983 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
989 /* Indicate that NEW is an alternate way of referring to the temp slot
990 that previously was known by OLD. */
993 update_temp_slot_address (rtx old, rtx new)
997 if (rtx_equal_p (old, new))
1000 p = find_temp_slot_from_address (old);
1002 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1003 is a register, see if one operand of the PLUS is a temporary
1004 location. If so, NEW points into it. Otherwise, if both OLD and
1005 NEW are a PLUS and if there is a register in common between them.
1006 If so, try a recursive call on those values. */
1009 if (GET_CODE (old) != PLUS)
1014 update_temp_slot_address (XEXP (old, 0), new);
1015 update_temp_slot_address (XEXP (old, 1), new);
1018 else if (GET_CODE (new) != PLUS)
1021 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1022 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1023 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1024 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1025 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1026 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1027 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1028 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1033 /* Otherwise add an alias for the temp's address. */
1034 else if (p->address == 0)
1038 if (GET_CODE (p->address) != EXPR_LIST)
1039 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1041 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1045 /* If X could be a reference to a temporary slot, mark the fact that its
1046 address was taken. */
1049 mark_temp_addr_taken (rtx x)
1051 struct temp_slot *p;
1056 /* If X is not in memory or is at a constant address, it cannot be in
1057 a temporary slot. */
1058 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1061 p = find_temp_slot_from_address (XEXP (x, 0));
1066 /* If X could be a reference to a temporary slot, mark that slot as
1067 belonging to the to one level higher than the current level. If X
1068 matched one of our slots, just mark that one. Otherwise, we can't
1069 easily predict which it is, so upgrade all of them. Kept slots
1070 need not be touched.
1072 This is called when an ({...}) construct occurs and a statement
1073 returns a value in memory. */
1076 preserve_temp_slots (rtx x)
1078 struct temp_slot *p = 0, *next;
1080 /* If there is no result, we still might have some objects whose address
1081 were taken, so we need to make sure they stay around. */
1084 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1089 move_slot_to_level (p, temp_slot_level - 1);
1095 /* If X is a register that is being used as a pointer, see if we have
1096 a temporary slot we know it points to. To be consistent with
1097 the code below, we really should preserve all non-kept slots
1098 if we can't find a match, but that seems to be much too costly. */
1099 if (REG_P (x) && REG_POINTER (x))
1100 p = find_temp_slot_from_address (x);
1102 /* If X is not in memory or is at a constant address, it cannot be in
1103 a temporary slot, but it can contain something whose address was
1105 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1107 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1112 move_slot_to_level (p, temp_slot_level - 1);
1118 /* First see if we can find a match. */
1120 p = find_temp_slot_from_address (XEXP (x, 0));
1124 /* Move everything at our level whose address was taken to our new
1125 level in case we used its address. */
1126 struct temp_slot *q;
1128 if (p->level == temp_slot_level)
1130 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1134 if (p != q && q->addr_taken)
1135 move_slot_to_level (q, temp_slot_level - 1);
1138 move_slot_to_level (p, temp_slot_level - 1);
1144 /* Otherwise, preserve all non-kept slots at this level. */
1145 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1150 move_slot_to_level (p, temp_slot_level - 1);
1154 /* Free all temporaries used so far. This is normally called at the
1155 end of generating code for a statement. */
1158 free_temp_slots (void)
1160 struct temp_slot *p, *next;
1162 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1167 make_slot_available (p);
1170 combine_temp_slots ();
1173 /* Push deeper into the nesting level for stack temporaries. */
1176 push_temp_slots (void)
1181 /* Pop a temporary nesting level. All slots in use in the current level
1185 pop_temp_slots (void)
1187 struct temp_slot *p, *next;
1189 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1192 make_slot_available (p);
1195 combine_temp_slots ();
1200 /* Initialize temporary slots. */
1203 init_temp_slots (void)
1205 /* We have not allocated any temporaries yet. */
1206 avail_temp_slots = 0;
1207 used_temp_slots = 0;
1208 temp_slot_level = 0;
1209 var_temp_slot_level = 0;
1210 target_temp_slot_level = 0;
1213 /* These routines are responsible for converting virtual register references
1214 to the actual hard register references once RTL generation is complete.
1216 The following four variables are used for communication between the
1217 routines. They contain the offsets of the virtual registers from their
1218 respective hard registers. */
1220 static int in_arg_offset;
1221 static int var_offset;
1222 static int dynamic_offset;
1223 static int out_arg_offset;
1224 static int cfa_offset;
1226 /* In most machines, the stack pointer register is equivalent to the bottom
1229 #ifndef STACK_POINTER_OFFSET
1230 #define STACK_POINTER_OFFSET 0
1233 /* If not defined, pick an appropriate default for the offset of dynamically
1234 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1235 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1237 #ifndef STACK_DYNAMIC_OFFSET
1239 /* The bottom of the stack points to the actual arguments. If
1240 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1241 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1242 stack space for register parameters is not pushed by the caller, but
1243 rather part of the fixed stack areas and hence not included in
1244 `current_function_outgoing_args_size'. Nevertheless, we must allow
1245 for it when allocating stack dynamic objects. */
1247 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1248 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1249 ((ACCUMULATE_OUTGOING_ARGS \
1250 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1251 + (STACK_POINTER_OFFSET)) \
1254 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1255 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1256 + (STACK_POINTER_OFFSET))
1260 /* On most machines, the CFA coincides with the first incoming parm. */
1262 #ifndef ARG_POINTER_CFA_OFFSET
1263 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1267 /* Convert a SET of a hard subreg to a set of the appropriate hard
1268 register. A subroutine of purge_hard_subreg_sets. */
1271 purge_single_hard_subreg_set (rtx pattern)
1273 rtx reg = SET_DEST (pattern);
1274 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
1277 if (GET_CODE (reg) == SUBREG && REG_P (SUBREG_REG (reg))
1278 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
1280 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
1281 GET_MODE (SUBREG_REG (reg)),
1284 reg = SUBREG_REG (reg);
1288 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1290 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
1291 SET_DEST (pattern) = reg;
1295 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
1296 only such SETs that we expect to see are those left in because
1297 integrate can't handle sets of parts of a return value register.
1299 We don't use alter_subreg because we only want to eliminate subregs
1300 of hard registers. */
1303 purge_hard_subreg_sets (rtx insn)
1305 for (; insn; insn = NEXT_INSN (insn))
1309 rtx pattern = PATTERN (insn);
1310 switch (GET_CODE (pattern))
1313 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
1314 purge_single_hard_subreg_set (pattern);
1319 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
1321 rtx inner_pattern = XVECEXP (pattern, 0, j);
1322 if (GET_CODE (inner_pattern) == SET
1323 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
1324 purge_single_hard_subreg_set (inner_pattern);
1335 /* Pass through the INSNS of function FNDECL and convert virtual register
1336 references to hard register references. */
1339 instantiate_virtual_regs (void)
1343 /* Compute the offsets to use for this function. */
1344 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1345 var_offset = STARTING_FRAME_OFFSET;
1346 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1347 out_arg_offset = STACK_POINTER_OFFSET;
1348 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1350 /* Scan all variables and parameters of this function. For each that is
1351 in memory, instantiate all virtual registers if the result is a valid
1352 address. If not, we do it later. That will handle most uses of virtual
1353 regs on many machines. */
1354 instantiate_decls (current_function_decl, 1);
1356 /* Initialize recognition, indicating that volatile is OK. */
1359 /* Scan through all the insns, instantiating every virtual register still
1361 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1362 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1363 || GET_CODE (insn) == CALL_INSN)
1365 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1366 if (INSN_DELETED_P (insn))
1368 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
1369 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1370 if (GET_CODE (insn) == CALL_INSN)
1371 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1374 /* Past this point all ASM statements should match. Verify that
1375 to avoid failures later in the compilation process. */
1376 if (asm_noperands (PATTERN (insn)) >= 0
1377 && ! check_asm_operands (PATTERN (insn)))
1378 instantiate_virtual_regs_lossage (insn);
1381 /* Now instantiate the remaining register equivalences for debugging info.
1382 These will not be valid addresses. */
1383 instantiate_decls (current_function_decl, 0);
1385 /* Indicate that, from now on, assign_stack_local should use
1386 frame_pointer_rtx. */
1387 virtuals_instantiated = 1;
1390 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1391 all virtual registers in their DECL_RTL's.
1393 If VALID_ONLY, do this only if the resulting address is still valid.
1394 Otherwise, always do it. */
1397 instantiate_decls (tree fndecl, int valid_only)
1401 /* Process all parameters of the function. */
1402 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1404 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1405 HOST_WIDE_INT size_rtl;
1407 instantiate_decl (DECL_RTL (decl), size, valid_only);
1409 /* If the parameter was promoted, then the incoming RTL mode may be
1410 larger than the declared type size. We must use the larger of
1412 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1413 size = MAX (size_rtl, size);
1414 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1417 /* Now process all variables defined in the function or its subblocks. */
1418 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1421 /* Subroutine of instantiate_decls: Process all decls in the given
1422 BLOCK node and all its subblocks. */
1425 instantiate_decls_1 (tree let, int valid_only)
1429 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1430 if (DECL_RTL_SET_P (t))
1431 instantiate_decl (DECL_RTL (t),
1432 int_size_in_bytes (TREE_TYPE (t)),
1435 /* Process all subblocks. */
1436 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1437 instantiate_decls_1 (t, valid_only);
1440 /* Subroutine of the preceding procedures: Given RTL representing a
1441 decl and the size of the object, do any instantiation required.
1443 If VALID_ONLY is nonzero, it means that the RTL should only be
1444 changed if the new address is valid. */
1447 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1449 enum machine_mode mode;
1452 /* If this is not a MEM, no need to do anything. Similarly if the
1453 address is a constant or a register that is not a virtual register. */
1455 if (x == 0 || !MEM_P (x))
1459 if (CONSTANT_P (addr)
1461 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1462 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1465 /* If we should only do this if the address is valid, copy the address.
1466 We need to do this so we can undo any changes that might make the
1467 address invalid. This copy is unfortunate, but probably can't be
1471 addr = copy_rtx (addr);
1473 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1475 if (valid_only && size >= 0)
1477 unsigned HOST_WIDE_INT decl_size = size;
1479 /* Now verify that the resulting address is valid for every integer or
1480 floating-point mode up to and including SIZE bytes long. We do this
1481 since the object might be accessed in any mode and frame addresses
1484 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1486 mode = GET_MODE_WIDER_MODE (mode))
1487 if (! memory_address_p (mode, addr))
1490 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1491 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1492 mode = GET_MODE_WIDER_MODE (mode))
1493 if (! memory_address_p (mode, addr))
1497 /* Put back the address now that we have updated it and we either know
1498 it is valid or we don't care whether it is valid. */
1503 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1504 is a virtual register, return the equivalent hard register and set the
1505 offset indirectly through the pointer. Otherwise, return 0. */
1508 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1511 HOST_WIDE_INT offset;
1513 if (x == virtual_incoming_args_rtx)
1514 new = arg_pointer_rtx, offset = in_arg_offset;
1515 else if (x == virtual_stack_vars_rtx)
1516 new = frame_pointer_rtx, offset = var_offset;
1517 else if (x == virtual_stack_dynamic_rtx)
1518 new = stack_pointer_rtx, offset = dynamic_offset;
1519 else if (x == virtual_outgoing_args_rtx)
1520 new = stack_pointer_rtx, offset = out_arg_offset;
1521 else if (x == virtual_cfa_rtx)
1522 new = arg_pointer_rtx, offset = cfa_offset;
1531 /* Called when instantiate_virtual_regs has failed to update the instruction.
1532 Usually this means that non-matching instruction has been emit, however for
1533 asm statements it may be the problem in the constraints. */
1535 instantiate_virtual_regs_lossage (rtx insn)
1537 if (asm_noperands (PATTERN (insn)) >= 0)
1539 error_for_asm (insn, "impossible constraint in `asm'");
1545 /* Given a pointer to a piece of rtx and an optional pointer to the
1546 containing object, instantiate any virtual registers present in it.
1548 If EXTRA_INSNS, we always do the replacement and generate
1549 any extra insns before OBJECT. If it zero, we do nothing if replacement
1552 Return 1 if we either had nothing to do or if we were able to do the
1553 needed replacement. Return 0 otherwise; we only return zero if
1554 EXTRA_INSNS is zero.
1556 We first try some simple transformations to avoid the creation of extra
1560 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1565 HOST_WIDE_INT offset = 0;
1571 /* Re-start here to avoid recursion in common cases. */
1578 /* We may have detected and deleted invalid asm statements. */
1579 if (object && INSN_P (object) && INSN_DELETED_P (object))
1582 code = GET_CODE (x);
1584 /* Check for some special cases. */
1602 /* We are allowed to set the virtual registers. This means that
1603 the actual register should receive the source minus the
1604 appropriate offset. This is used, for example, in the handling
1605 of non-local gotos. */
1606 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1608 rtx src = SET_SRC (x);
1610 /* We are setting the register, not using it, so the relevant
1611 offset is the negative of the offset to use were we using
1614 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1616 /* The only valid sources here are PLUS or REG. Just do
1617 the simplest possible thing to handle them. */
1618 if (!REG_P (src) && GET_CODE (src) != PLUS)
1620 instantiate_virtual_regs_lossage (object);
1626 temp = force_operand (src, NULL_RTX);
1629 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1633 emit_insn_before (seq, object);
1636 if (! validate_change (object, &SET_SRC (x), temp, 0)
1638 instantiate_virtual_regs_lossage (object);
1643 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1648 /* Handle special case of virtual register plus constant. */
1649 if (CONSTANT_P (XEXP (x, 1)))
1651 rtx old, new_offset;
1653 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1654 if (GET_CODE (XEXP (x, 0)) == PLUS)
1656 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1658 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1660 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1669 #ifdef POINTERS_EXTEND_UNSIGNED
1670 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1671 we can commute the PLUS and SUBREG because pointers into the
1672 frame are well-behaved. */
1673 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1674 && GET_CODE (XEXP (x, 1)) == CONST_INT
1676 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1678 && validate_change (object, loc,
1679 plus_constant (gen_lowpart (ptr_mode,
1682 + INTVAL (XEXP (x, 1))),
1686 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1688 /* We know the second operand is a constant. Unless the
1689 first operand is a REG (which has been already checked),
1690 it needs to be checked. */
1691 if (!REG_P (XEXP (x, 0)))
1699 new_offset = plus_constant (XEXP (x, 1), offset);
1701 /* If the new constant is zero, try to replace the sum with just
1703 if (new_offset == const0_rtx
1704 && validate_change (object, loc, new, 0))
1707 /* Next try to replace the register and new offset.
1708 There are two changes to validate here and we can't assume that
1709 in the case of old offset equals new just changing the register
1710 will yield a valid insn. In the interests of a little efficiency,
1711 however, we only call validate change once (we don't queue up the
1712 changes and then call apply_change_group). */
1716 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1717 : (XEXP (x, 0) = new,
1718 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1726 /* Otherwise copy the new constant into a register and replace
1727 constant with that register. */
1728 temp = gen_reg_rtx (Pmode);
1730 if (validate_change (object, &XEXP (x, 1), temp, 0))
1731 emit_insn_before (gen_move_insn (temp, new_offset), object);
1734 /* If that didn't work, replace this expression with a
1735 register containing the sum. */
1738 new = gen_rtx_PLUS (Pmode, new, new_offset);
1741 temp = force_operand (new, NULL_RTX);
1745 emit_insn_before (seq, object);
1746 if (! validate_change (object, loc, temp, 0)
1747 && ! validate_replace_rtx (x, temp, object))
1749 instantiate_virtual_regs_lossage (object);
1758 /* Fall through to generic two-operand expression case. */
1764 case DIV: case UDIV:
1765 case MOD: case UMOD:
1766 case AND: case IOR: case XOR:
1767 case ROTATERT: case ROTATE:
1768 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1770 case GE: case GT: case GEU: case GTU:
1771 case LE: case LT: case LEU: case LTU:
1772 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1773 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1778 /* Most cases of MEM that convert to valid addresses have already been
1779 handled by our scan of decls. The only special handling we
1780 need here is to make a copy of the rtx to ensure it isn't being
1781 shared if we have to change it to a pseudo.
1783 If the rtx is a simple reference to an address via a virtual register,
1784 it can potentially be shared. In such cases, first try to make it
1785 a valid address, which can also be shared. Otherwise, copy it and
1788 First check for common cases that need no processing. These are
1789 usually due to instantiation already being done on a previous instance
1793 if (CONSTANT_ADDRESS_P (temp)
1794 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1795 || temp == arg_pointer_rtx
1797 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1798 || temp == hard_frame_pointer_rtx
1800 || temp == frame_pointer_rtx)
1803 if (GET_CODE (temp) == PLUS
1804 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1805 && (XEXP (temp, 0) == frame_pointer_rtx
1806 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1807 || XEXP (temp, 0) == hard_frame_pointer_rtx
1809 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1810 || XEXP (temp, 0) == arg_pointer_rtx
1815 if (temp == virtual_stack_vars_rtx
1816 || temp == virtual_incoming_args_rtx
1817 || (GET_CODE (temp) == PLUS
1818 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1819 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1820 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1822 /* This MEM may be shared. If the substitution can be done without
1823 the need to generate new pseudos, we want to do it in place
1824 so all copies of the shared rtx benefit. The call below will
1825 only make substitutions if the resulting address is still
1828 Note that we cannot pass X as the object in the recursive call
1829 since the insn being processed may not allow all valid
1830 addresses. However, if we were not passed on object, we can
1831 only modify X without copying it if X will have a valid
1834 ??? Also note that this can still lose if OBJECT is an insn that
1835 has less restrictions on an address that some other insn.
1836 In that case, we will modify the shared address. This case
1837 doesn't seem very likely, though. One case where this could
1838 happen is in the case of a USE or CLOBBER reference, but we
1839 take care of that below. */
1841 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1842 object ? object : x, 0))
1845 /* Otherwise make a copy and process that copy. We copy the entire
1846 RTL expression since it might be a PLUS which could also be
1848 *loc = x = copy_rtx (x);
1851 /* Fall through to generic unary operation case. */
1854 case STRICT_LOW_PART:
1856 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1857 case SIGN_EXTEND: case ZERO_EXTEND:
1858 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1859 case FLOAT: case FIX:
1860 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1865 case POPCOUNT: case PARITY:
1866 /* These case either have just one operand or we know that we need not
1867 check the rest of the operands. */
1873 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1874 go ahead and make the invalid one, but do it to a copy. For a REG,
1875 just make the recursive call, since there's no chance of a problem. */
1877 if ((MEM_P (XEXP (x, 0))
1878 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1880 || (REG_P (XEXP (x, 0))
1881 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1884 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1889 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1890 in front of this insn and substitute the temporary. */
1891 if ((new = instantiate_new_reg (x, &offset)) != 0)
1893 temp = plus_constant (new, offset);
1894 if (!validate_change (object, loc, temp, 0))
1900 temp = force_operand (temp, NULL_RTX);
1904 emit_insn_before (seq, object);
1905 if (! validate_change (object, loc, temp, 0)
1906 && ! validate_replace_rtx (x, temp, object))
1907 instantiate_virtual_regs_lossage (object);
1917 /* Scan all subexpressions. */
1918 fmt = GET_RTX_FORMAT (code);
1919 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1922 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1925 else if (*fmt == 'E')
1926 for (j = 0; j < XVECLEN (x, i); j++)
1927 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1934 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1935 This means a type for which function calls must pass an address to the
1936 function or get an address back from the function.
1937 EXP may be a type node or an expression (whose type is tested). */
1940 aggregate_value_p (tree exp, tree fntype)
1942 int i, regno, nregs;
1945 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1948 switch (TREE_CODE (fntype))
1951 fntype = get_callee_fndecl (fntype);
1952 fntype = fntype ? TREE_TYPE (fntype) : 0;
1955 fntype = TREE_TYPE (fntype);
1960 case IDENTIFIER_NODE:
1964 /* We don't expect other rtl types here. */
1968 if (TREE_CODE (type) == VOID_TYPE)
1970 if (targetm.calls.return_in_memory (type, fntype))
1972 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1973 and thus can't be returned in registers. */
1974 if (TREE_ADDRESSABLE (type))
1976 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1978 /* Make sure we have suitable call-clobbered regs to return
1979 the value in; if not, we must return it in memory. */
1980 reg = hard_function_value (type, 0, 0);
1982 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1987 regno = REGNO (reg);
1988 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1989 for (i = 0; i < nregs; i++)
1990 if (! call_used_regs[regno + i])
1995 /* Return true if we should assign DECL a pseudo register; false if it
1996 should live on the local stack. */
1999 use_register_for_decl (tree decl)
2001 /* Honor volatile. */
2002 if (TREE_SIDE_EFFECTS (decl))
2005 /* Honor addressability. */
2006 if (TREE_ADDRESSABLE (decl))
2009 /* Only register-like things go in registers. */
2010 if (DECL_MODE (decl) == BLKmode)
2013 /* If -ffloat-store specified, don't put explicit float variables
2015 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2016 propagates values across these stores, and it probably shouldn't. */
2017 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2020 /* Compiler-generated temporaries can always go in registers. */
2021 if (DECL_ARTIFICIAL (decl))
2024 #ifdef NON_SAVING_SETJMP
2025 /* Protect variables not declared "register" from setjmp. */
2026 if (NON_SAVING_SETJMP
2027 && current_function_calls_setjmp
2028 && !DECL_REGISTER (decl))
2032 return (optimize || DECL_REGISTER (decl));
2035 /* Assign RTL expressions to the function's parameters.
2036 This may involve copying them into registers and using
2037 those registers as the RTL for them. */
2040 assign_parms (tree fndecl)
2043 CUMULATIVE_ARGS args_so_far;
2044 /* Total space needed so far for args on the stack,
2045 given as a constant and a tree-expression. */
2046 struct args_size stack_args_size;
2047 HOST_WIDE_INT extra_pretend_bytes = 0;
2048 tree fntype = TREE_TYPE (fndecl);
2049 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
2050 /* This is used for the arg pointer when referring to stack args. */
2051 rtx internal_arg_pointer;
2052 /* This is a dummy PARM_DECL that we used for the function result if
2053 the function returns a structure. */
2054 tree function_result_decl = 0;
2055 int varargs_setup = 0;
2056 int reg_parm_stack_space ATTRIBUTE_UNUSED = 0;
2057 rtx conversion_insns = 0;
2059 /* Nonzero if function takes extra anonymous args.
2060 This means the last named arg must be on the stack
2061 right before the anonymous ones. */
2062 int stdarg = current_function_stdarg;
2064 /* If the reg that the virtual arg pointer will be translated into is
2065 not a fixed reg or is the stack pointer, make a copy of the virtual
2066 arg pointer, and address parms via the copy. The frame pointer is
2067 considered fixed even though it is not marked as such.
2069 The second time through, simply use ap to avoid generating rtx. */
2071 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2072 || ! (fixed_regs[ARG_POINTER_REGNUM]
2073 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
2074 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2076 internal_arg_pointer = virtual_incoming_args_rtx;
2077 current_function_internal_arg_pointer = internal_arg_pointer;
2079 stack_args_size.constant = 0;
2080 stack_args_size.var = 0;
2082 /* If struct value address is treated as the first argument, make it so. */
2083 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2084 && ! current_function_returns_pcc_struct
2085 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2087 tree type = build_pointer_type (TREE_TYPE (fntype));
2089 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2091 DECL_ARG_TYPE (function_result_decl) = type;
2092 TREE_CHAIN (function_result_decl) = fnargs;
2093 fnargs = function_result_decl;
2096 orig_fnargs = fnargs;
2098 /* If the target wants to split complex arguments into scalars, do so. */
2099 if (targetm.calls.split_complex_arg)
2100 fnargs = split_complex_args (fnargs);
2102 #ifdef REG_PARM_STACK_SPACE
2103 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2106 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2107 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
2109 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl, -1);
2112 /* We haven't yet found an argument that we must push and pretend the
2114 current_function_pretend_args_size = 0;
2116 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2120 enum machine_mode promoted_mode, passed_mode;
2121 enum machine_mode nominal_mode, promoted_nominal_mode;
2123 struct locate_and_pad_arg_data locate;
2124 int passed_pointer = 0;
2125 int did_conversion = 0;
2126 tree passed_type = DECL_ARG_TYPE (parm);
2127 tree nominal_type = TREE_TYPE (parm);
2128 int last_named = 0, named_arg;
2131 int pretend_bytes = 0;
2132 int loaded_in_reg = 0;
2134 /* Set LAST_NAMED if this is last named arg before last
2140 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2141 if (DECL_NAME (tem))
2147 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2148 most machines, if this is a varargs/stdarg function, then we treat
2149 the last named arg as if it were anonymous too. */
2150 named_arg = (targetm.calls.strict_argument_naming (&args_so_far)
2153 if (TREE_TYPE (parm) == error_mark_node
2154 /* This can happen after weird syntax errors
2155 or if an enum type is defined among the parms. */
2156 || TREE_CODE (parm) != PARM_DECL
2157 || passed_type == NULL)
2159 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
2160 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2161 TREE_USED (parm) = 1;
2165 /* Find mode of arg as it is passed, and mode of arg
2166 as it should be during execution of this function. */
2167 passed_mode = TYPE_MODE (passed_type);
2168 nominal_mode = TYPE_MODE (nominal_type);
2170 /* If the parm's mode is VOID, its value doesn't matter,
2171 and avoid the usual things like emit_move_insn that could crash. */
2172 if (nominal_mode == VOIDmode)
2174 SET_DECL_RTL (parm, const0_rtx);
2175 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2179 /* If the parm is to be passed as a transparent union, use the
2180 type of the first field for the tests below. We have already
2181 verified that the modes are the same. */
2182 if (DECL_TRANSPARENT_UNION (parm)
2183 || (TREE_CODE (passed_type) == UNION_TYPE
2184 && TYPE_TRANSPARENT_UNION (passed_type)))
2185 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2187 /* See if this arg was passed by invisible reference. It is if
2188 it is an object whose size depends on the contents of the
2189 object itself or if the machine requires these objects be passed
2192 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
2193 || TREE_ADDRESSABLE (passed_type)
2194 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2195 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2196 passed_type, named_arg)
2200 passed_type = nominal_type = build_pointer_type (passed_type);
2202 passed_mode = nominal_mode = Pmode;
2204 /* See if the frontend wants to pass this by invisible reference. */
2205 else if (passed_type != nominal_type
2206 && POINTER_TYPE_P (passed_type)
2207 && TREE_TYPE (passed_type) == nominal_type)
2209 nominal_type = passed_type;
2211 passed_mode = nominal_mode = Pmode;
2214 promoted_mode = passed_mode;
2216 if (targetm.calls.promote_function_args (TREE_TYPE (fndecl)))
2218 /* Compute the mode in which the arg is actually extended to. */
2219 unsignedp = TYPE_UNSIGNED (passed_type);
2220 promoted_mode = promote_mode (passed_type, promoted_mode,
2224 /* Let machine desc say which reg (if any) the parm arrives in.
2225 0 means it arrives on the stack. */
2226 #ifdef FUNCTION_INCOMING_ARG
2227 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2228 passed_type, named_arg);
2230 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
2231 passed_type, named_arg);
2234 if (entry_parm == 0)
2235 promoted_mode = passed_mode;
2237 /* If this is the last named parameter, do any required setup for
2238 varargs or stdargs. We need to know about the case of this being an
2239 addressable type, in which case we skip the registers it
2240 would have arrived in.
2242 For stdargs, LAST_NAMED will be set for two parameters, the one that
2243 is actually the last named, and the dummy parameter. We only
2244 want to do this action once.
2246 Also, indicate when RTL generation is to be suppressed. */
2247 if (last_named && !varargs_setup)
2249 int varargs_pretend_bytes = 0;
2250 targetm.calls.setup_incoming_varargs (&args_so_far, promoted_mode,
2252 &varargs_pretend_bytes, 0);
2255 /* If the back-end has requested extra stack space, record how
2256 much is needed. Do not change pretend_args_size otherwise
2257 since it may be nonzero from an earlier partial argument. */
2258 if (varargs_pretend_bytes > 0)
2259 current_function_pretend_args_size = varargs_pretend_bytes;
2262 /* Determine parm's home in the stack,
2263 in case it arrives in the stack or we should pretend it did.
2265 Compute the stack position and rtx where the argument arrives
2268 There is one complexity here: If this was a parameter that would
2269 have been passed in registers, but wasn't only because it is
2270 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2271 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2272 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2273 0 as it was the previous time. */
2274 in_regs = entry_parm != 0;
2275 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2278 if (!in_regs && !named_arg)
2281 targetm.calls.pretend_outgoing_varargs_named (&args_so_far);
2284 #ifdef FUNCTION_INCOMING_ARG
2285 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2287 pretend_named) != 0;
2289 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
2291 pretend_named) != 0;
2296 /* If this parameter was passed both in registers and in the stack,
2297 use the copy on the stack. */
2298 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
2301 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2304 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
2305 passed_type, named_arg);
2307 /* The caller might already have allocated stack space
2308 for the register parameters. */
2309 && reg_parm_stack_space == 0)
2311 /* Part of this argument is passed in registers and part
2312 is passed on the stack. Ask the prologue code to extend
2313 the stack part so that we can recreate the full value.
2315 PRETEND_BYTES is the size of the registers we need to store.
2316 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2317 stack space that the prologue should allocate.
2319 Internally, gcc assumes that the argument pointer is
2320 aligned to STACK_BOUNDARY bits. This is used both for
2321 alignment optimizations (see init_emit) and to locate
2322 arguments that are aligned to more than PARM_BOUNDARY
2323 bits. We must preserve this invariant by rounding
2324 CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to a stack
2327 /* We assume at most one partial arg, and it must be the first
2328 argument on the stack. */
2329 if (extra_pretend_bytes || current_function_pretend_args_size)
2332 pretend_bytes = partial * UNITS_PER_WORD;
2333 current_function_pretend_args_size
2334 = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2336 /* We want to align relative to the actual stack pointer, so
2337 don't include this in the stack size until later. */
2338 extra_pretend_bytes = current_function_pretend_args_size;
2343 memset (&locate, 0, sizeof (locate));
2344 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
2345 entry_parm ? partial : 0, fndecl,
2346 &stack_args_size, &locate);
2347 /* Adjust offsets to include the pretend args. */
2348 locate.slot_offset.constant += extra_pretend_bytes - pretend_bytes;
2349 locate.offset.constant += extra_pretend_bytes - pretend_bytes;
2353 unsigned int align, boundary;
2355 /* If we're passing this arg using a reg, make its stack home
2356 the aligned stack slot. */
2358 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
2360 offset_rtx = ARGS_SIZE_RTX (locate.offset);
2362 if (offset_rtx == const0_rtx)
2363 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
2365 stack_parm = gen_rtx_MEM (promoted_mode,
2366 gen_rtx_PLUS (Pmode,
2367 internal_arg_pointer,
2370 set_mem_attributes (stack_parm, parm, 1);
2372 boundary = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
2375 /* If we're padding upward, we know that the alignment of the slot
2376 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2377 intentionally forcing upward padding. Otherwise we have to come
2378 up with a guess at the alignment based on OFFSET_RTX. */
2379 if (locate.where_pad == upward || entry_parm)
2381 else if (GET_CODE (offset_rtx) == CONST_INT)
2383 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2384 align = align & -align;
2387 set_mem_align (stack_parm, align);
2390 set_reg_attrs_for_parm (entry_parm, stack_parm);
2393 /* If this parm was passed part in regs and part in memory,
2394 pretend it arrived entirely in memory
2395 by pushing the register-part onto the stack.
2397 In the special case of a DImode or DFmode that is split,
2398 we could put it together in a pseudoreg directly,
2399 but for now that's not worth bothering with. */
2403 /* Handle calls that pass values in multiple non-contiguous
2404 locations. The Irix 6 ABI has examples of this. */
2405 if (GET_CODE (entry_parm) == PARALLEL)
2406 emit_group_store (validize_mem (stack_parm), entry_parm,
2408 int_size_in_bytes (TREE_TYPE (parm)));
2411 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2414 entry_parm = stack_parm;
2417 /* If we didn't decide this parm came in a register,
2418 by default it came on the stack. */
2419 if (entry_parm == 0)
2420 entry_parm = stack_parm;
2422 /* Record permanently how this parm was passed. */
2423 set_decl_incoming_rtl (parm, entry_parm);
2425 /* If there is actually space on the stack for this parm,
2426 count it in stack_args_size; otherwise set stack_parm to 0
2427 to indicate there is no preallocated stack slot for the parm. */
2429 if (entry_parm == stack_parm
2430 || (GET_CODE (entry_parm) == PARALLEL
2431 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
2432 #if defined (REG_PARM_STACK_SPACE)
2433 /* On some machines, even if a parm value arrives in a register
2434 there is still an (uninitialized) stack slot allocated
2436 || REG_PARM_STACK_SPACE (fndecl) > 0
2440 stack_args_size.constant += locate.size.constant;
2441 if (locate.size.var)
2442 ADD_PARM_SIZE (stack_args_size, locate.size.var);
2445 /* No stack slot was pushed for this parm. */
2448 /* Update info on where next arg arrives in registers. */
2450 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
2451 passed_type, named_arg);
2453 /* If we can't trust the parm stack slot to be aligned enough
2454 for its ultimate type, don't use that slot after entry.
2455 We'll make another stack slot, if we need one. */
2456 if (STRICT_ALIGNMENT && stack_parm
2457 && GET_MODE_ALIGNMENT (nominal_mode) > MEM_ALIGN (stack_parm))
2460 /* If parm was passed in memory, and we need to convert it on entry,
2461 don't store it back in that same slot. */
2462 if (entry_parm == stack_parm
2463 && nominal_mode != BLKmode && nominal_mode != passed_mode)
2466 /* When an argument is passed in multiple locations, we can't
2467 make use of this information, but we can save some copying if
2468 the whole argument is passed in a single register. */
2469 if (GET_CODE (entry_parm) == PARALLEL
2470 && nominal_mode != BLKmode && passed_mode != BLKmode)
2472 int i, len = XVECLEN (entry_parm, 0);
2474 for (i = 0; i < len; i++)
2475 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2476 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2477 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2479 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2481 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2482 set_decl_incoming_rtl (parm, entry_parm);
2487 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2488 in the mode in which it arrives.
2489 STACK_PARM is an RTX for a stack slot where the parameter can live
2490 during the function (in case we want to put it there).
2491 STACK_PARM is 0 if no stack slot was pushed for it.
2493 Now output code if necessary to convert ENTRY_PARM to
2494 the type in which this function declares it,
2495 and store that result in an appropriate place,
2496 which may be a pseudo reg, may be STACK_PARM,
2497 or may be a local stack slot if STACK_PARM is 0.
2499 Set DECL_RTL to that place. */
2501 if (GET_CODE (entry_parm) == PARALLEL
2502 && nominal_mode != BLKmode
2503 && XVECLEN (entry_parm, 0) > 1)
2505 /* Reconstitute objects the size of a register or larger using
2506 register operations instead of the stack. */
2507 rtx parmreg = gen_reg_rtx (nominal_mode);
2509 if (REG_P (parmreg))
2511 emit_group_store (parmreg, entry_parm, TREE_TYPE (parm),
2512 int_size_in_bytes (TREE_TYPE (parm)));
2513 SET_DECL_RTL (parm, parmreg);
2518 if (nominal_mode == BLKmode
2519 #ifdef BLOCK_REG_PADDING
2520 || (locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2521 && GET_MODE_SIZE (promoted_mode) < UNITS_PER_WORD)
2523 || GET_CODE (entry_parm) == PARALLEL)
2525 /* If a BLKmode arrives in registers, copy it to a stack slot.
2526 Handle calls that pass values in multiple non-contiguous
2527 locations. The Irix 6 ABI has examples of this. */
2528 if (REG_P (entry_parm)
2529 || (GET_CODE (entry_parm) == PARALLEL
2530 && (!loaded_in_reg || !optimize)))
2532 int size = int_size_in_bytes (TREE_TYPE (parm));
2533 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2536 /* Note that we will be storing an integral number of words.
2537 So we have to be careful to ensure that we allocate an
2538 integral number of words. We do this below in the
2539 assign_stack_local if space was not allocated in the argument
2540 list. If it was, this will not work if PARM_BOUNDARY is not
2541 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2542 if it becomes a problem. Exception is when BLKmode arrives
2543 with arguments not conforming to word_mode. */
2545 if (stack_parm == 0)
2547 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2548 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2549 set_mem_attributes (stack_parm, parm, 1);
2551 else if (GET_CODE (entry_parm) == PARALLEL)
2553 else if (size != 0 && PARM_BOUNDARY % BITS_PER_WORD != 0)
2556 mem = validize_mem (stack_parm);
2558 /* Handle calls that pass values in multiple non-contiguous
2559 locations. The Irix 6 ABI has examples of this. */
2560 if (GET_CODE (entry_parm) == PARALLEL)
2561 emit_group_store (mem, entry_parm, TREE_TYPE (parm), size);
2566 /* If SIZE is that of a mode no bigger than a word, just use
2567 that mode's store operation. */
2568 else if (size <= UNITS_PER_WORD)
2570 enum machine_mode mode
2571 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2574 #ifdef BLOCK_REG_PADDING
2575 && (size == UNITS_PER_WORD
2576 || (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
2577 != (BYTES_BIG_ENDIAN ? upward : downward)))
2581 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2582 emit_move_insn (change_address (mem, mode, 0), reg);
2585 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2586 machine must be aligned to the left before storing
2587 to memory. Note that the previous test doesn't
2588 handle all cases (e.g. SIZE == 3). */
2589 else if (size != UNITS_PER_WORD
2590 #ifdef BLOCK_REG_PADDING
2591 && (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
2599 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2600 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2602 x = expand_binop (word_mode, ashl_optab, reg,
2603 GEN_INT (by), 0, 1, OPTAB_WIDEN);
2604 tem = change_address (mem, word_mode, 0);
2605 emit_move_insn (tem, x);
2608 move_block_from_reg (REGNO (entry_parm), mem,
2609 size_stored / UNITS_PER_WORD);
2612 move_block_from_reg (REGNO (entry_parm), mem,
2613 size_stored / UNITS_PER_WORD);
2615 /* If parm is already bound to register pair, don't change
2617 if (! DECL_RTL_SET_P (parm))
2618 SET_DECL_RTL (parm, stack_parm);
2620 else if (use_register_for_decl (parm)
2621 /* Always assign pseudo to structure return or item passed
2622 by invisible reference. */
2623 || passed_pointer || parm == function_result_decl)
2625 /* Store the parm in a pseudoregister during the function, but we
2626 may need to do it in a wider mode. */
2630 unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2632 promoted_nominal_mode
2633 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
2635 parmreg = gen_reg_rtx (promoted_nominal_mode);
2636 mark_user_reg (parmreg);
2638 /* If this was an item that we received a pointer to, set DECL_RTL
2642 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
2644 set_mem_attributes (x, parm, 1);
2645 SET_DECL_RTL (parm, x);
2649 SET_DECL_RTL (parm, parmreg);
2650 maybe_set_unchanging (DECL_RTL (parm), parm);
2653 /* Copy the value into the register. */
2654 if (nominal_mode != passed_mode
2655 || promoted_nominal_mode != promoted_mode)
2658 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2659 mode, by the caller. We now have to convert it to
2660 NOMINAL_MODE, if different. However, PARMREG may be in
2661 a different mode than NOMINAL_MODE if it is being stored
2664 If ENTRY_PARM is a hard register, it might be in a register
2665 not valid for operating in its mode (e.g., an odd-numbered
2666 register for a DFmode). In that case, moves are the only
2667 thing valid, so we can't do a convert from there. This
2668 occurs when the calling sequence allow such misaligned
2671 In addition, the conversion may involve a call, which could
2672 clobber parameters which haven't been copied to pseudo
2673 registers yet. Therefore, we must first copy the parm to
2674 a pseudo reg here, and save the conversion until after all
2675 parameters have been moved. */
2677 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
2679 emit_move_insn (tempreg, validize_mem (entry_parm));
2681 push_to_sequence (conversion_insns);
2682 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
2684 if (GET_CODE (tempreg) == SUBREG
2685 && GET_MODE (tempreg) == nominal_mode
2686 && REG_P (SUBREG_REG (tempreg))
2687 && nominal_mode == passed_mode
2688 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
2689 && GET_MODE_SIZE (GET_MODE (tempreg))
2690 < GET_MODE_SIZE (GET_MODE (entry_parm)))
2692 /* The argument is already sign/zero extended, so note it
2694 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2695 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2698 /* TREE_USED gets set erroneously during expand_assignment. */
2699 save_tree_used = TREE_USED (parm);
2700 expand_assignment (parm,
2701 make_tree (nominal_type, tempreg), 0);
2702 TREE_USED (parm) = save_tree_used;
2703 conversion_insns = get_insns ();
2708 emit_move_insn (parmreg, validize_mem (entry_parm));
2710 /* If we were passed a pointer but the actual value
2711 can safely live in a register, put it in one. */
2713 && use_register_for_decl (parm)
2714 /* If by-reference argument was promoted, demote it. */
2715 && TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm)))
2717 /* We can't use nominal_mode, because it will have been set to
2718 Pmode above. We must use the actual mode of the parm. */
2719 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2720 mark_user_reg (parmreg);
2721 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2723 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2724 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2725 push_to_sequence (conversion_insns);
2726 emit_move_insn (tempreg, DECL_RTL (parm));
2728 convert_to_mode (GET_MODE (parmreg),
2731 emit_move_insn (parmreg, DECL_RTL (parm));
2732 conversion_insns = get_insns();
2737 emit_move_insn (parmreg, DECL_RTL (parm));
2738 SET_DECL_RTL (parm, parmreg);
2739 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2743 #ifdef FUNCTION_ARG_CALLEE_COPIES
2744 /* If we are passed an arg by reference and it is our responsibility
2745 to make a copy, do it now.
2746 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2747 original argument, so we must recreate them in the call to
2748 FUNCTION_ARG_CALLEE_COPIES. */
2749 /* ??? Later add code to handle the case that if the argument isn't
2750 modified, don't do the copy. */
2752 else if (passed_pointer
2753 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
2754 TYPE_MODE (TREE_TYPE (passed_type)),
2755 TREE_TYPE (passed_type),
2757 && ! TREE_ADDRESSABLE (TREE_TYPE (passed_type)))
2760 tree type = TREE_TYPE (passed_type);
2762 /* This sequence may involve a library call perhaps clobbering
2763 registers that haven't been copied to pseudos yet. */
2765 push_to_sequence (conversion_insns);
2767 if (!COMPLETE_TYPE_P (type)
2768 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2769 /* This is a variable sized object. */
2770 copy = gen_rtx_MEM (BLKmode,
2771 allocate_dynamic_stack_space
2772 (expr_size (parm), NULL_RTX,
2773 TYPE_ALIGN (type)));
2775 copy = assign_stack_temp (TYPE_MODE (type),
2776 int_size_in_bytes (type), 1);
2777 set_mem_attributes (copy, parm, 1);
2779 store_expr (parm, copy, 0);
2780 emit_move_insn (parmreg, XEXP (copy, 0));
2781 conversion_insns = get_insns ();
2785 #endif /* FUNCTION_ARG_CALLEE_COPIES */
2787 /* Mark the register as eliminable if we did no conversion
2788 and it was copied from memory at a fixed offset,
2789 and the arg pointer was not copied to a pseudo-reg.
2790 If the arg pointer is a pseudo reg or the offset formed
2791 an invalid address, such memory-equivalences
2792 as we make here would screw up life analysis for it. */
2793 if (nominal_mode == passed_mode
2796 && MEM_P (stack_parm)
2797 && locate.offset.var == 0
2798 && reg_mentioned_p (virtual_incoming_args_rtx,
2799 XEXP (stack_parm, 0)))
2801 rtx linsn = get_last_insn ();
2804 /* Mark complex types separately. */
2805 if (GET_CODE (parmreg) == CONCAT)
2807 enum machine_mode submode
2808 = GET_MODE_INNER (GET_MODE (parmreg));
2809 int regnor = REGNO (gen_realpart (submode, parmreg));
2810 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2811 rtx stackr = gen_realpart (submode, stack_parm);
2812 rtx stacki = gen_imagpart (submode, stack_parm);
2814 /* Scan backwards for the set of the real and
2816 for (sinsn = linsn; sinsn != 0;
2817 sinsn = prev_nonnote_insn (sinsn))
2819 set = single_set (sinsn);
2823 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2825 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2827 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2829 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2833 else if ((set = single_set (linsn)) != 0
2834 && SET_DEST (set) == parmreg)
2836 = gen_rtx_EXPR_LIST (REG_EQUIV,
2837 stack_parm, REG_NOTES (linsn));
2840 /* For pointer data type, suggest pointer register. */
2841 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2842 mark_reg_pointer (parmreg,
2843 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2847 /* Value must be stored in the stack slot STACK_PARM
2848 during function execution. */
2850 if (promoted_mode != nominal_mode)
2852 /* Conversion is required. */
2853 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
2855 emit_move_insn (tempreg, validize_mem (entry_parm));
2857 push_to_sequence (conversion_insns);
2858 entry_parm = convert_to_mode (nominal_mode, tempreg,
2859 TYPE_UNSIGNED (TREE_TYPE (parm)));
2861 /* ??? This may need a big-endian conversion on sparc64. */
2862 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
2864 conversion_insns = get_insns ();
2869 if (entry_parm != stack_parm)
2871 if (stack_parm == 0)
2874 = assign_stack_local (GET_MODE (entry_parm),
2875 GET_MODE_SIZE (GET_MODE (entry_parm)),
2877 set_mem_attributes (stack_parm, parm, 1);
2880 if (promoted_mode != nominal_mode)
2882 push_to_sequence (conversion_insns);
2883 emit_move_insn (validize_mem (stack_parm),
2884 validize_mem (entry_parm));
2885 conversion_insns = get_insns ();
2889 emit_move_insn (validize_mem (stack_parm),
2890 validize_mem (entry_parm));
2893 SET_DECL_RTL (parm, stack_parm);
2897 if (targetm.calls.split_complex_arg && fnargs != orig_fnargs)
2899 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2901 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2902 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2904 rtx tmp, real, imag;
2905 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2907 real = DECL_RTL (fnargs);
2908 imag = DECL_RTL (TREE_CHAIN (fnargs));
2909 if (inner != GET_MODE (real))
2911 real = gen_lowpart_SUBREG (inner, real);
2912 imag = gen_lowpart_SUBREG (inner, imag);
2914 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2915 SET_DECL_RTL (parm, tmp);
2917 real = DECL_INCOMING_RTL (fnargs);
2918 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2919 if (inner != GET_MODE (real))
2921 real = gen_lowpart_SUBREG (inner, real);
2922 imag = gen_lowpart_SUBREG (inner, imag);
2924 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2925 set_decl_incoming_rtl (parm, tmp);
2926 fnargs = TREE_CHAIN (fnargs);
2930 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2931 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2933 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2934 instead of the copy of decl, i.e. FNARGS. */
2935 if (DECL_INCOMING_RTL (parm)
2936 && MEM_P (DECL_INCOMING_RTL (parm)))
2937 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2939 fnargs = TREE_CHAIN (fnargs);
2943 /* Output all parameter conversion instructions (possibly including calls)
2944 now that all parameters have been copied out of hard registers. */
2945 emit_insn (conversion_insns);
2947 /* If we are receiving a struct value address as the first argument, set up
2948 the RTL for the function result. As this might require code to convert
2949 the transmitted address to Pmode, we do this here to ensure that possible
2950 preliminary conversions of the address have been emitted already. */
2951 if (function_result_decl)
2953 tree result = DECL_RESULT (fndecl);
2954 rtx addr = DECL_RTL (function_result_decl);
2957 addr = convert_memory_address (Pmode, addr);
2958 x = gen_rtx_MEM (DECL_MODE (result), addr);
2959 set_mem_attributes (x, result, 1);
2960 SET_DECL_RTL (result, x);
2963 /* We have aligned all the args, so add space for the pretend args. */
2964 stack_args_size.constant += extra_pretend_bytes;
2965 current_function_args_size = stack_args_size.constant;
2967 /* Adjust function incoming argument size for alignment and
2970 #ifdef REG_PARM_STACK_SPACE
2971 current_function_args_size = MAX (current_function_args_size,
2972 REG_PARM_STACK_SPACE (fndecl));
2975 current_function_args_size
2976 = ((current_function_args_size + STACK_BYTES - 1)
2977 / STACK_BYTES) * STACK_BYTES;
2979 #ifdef ARGS_GROW_DOWNWARD
2980 current_function_arg_offset_rtx
2981 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
2982 : expand_expr (size_diffop (stack_args_size.var,
2983 size_int (-stack_args_size.constant)),
2984 NULL_RTX, VOIDmode, 0));
2986 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2989 /* See how many bytes, if any, of its args a function should try to pop
2992 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
2993 current_function_args_size);
2995 /* For stdarg.h function, save info about
2996 regs and stack space used by the named args. */
2998 current_function_args_info = args_so_far;
3000 /* Set the rtx used for the function return value. Put this in its
3001 own variable so any optimizers that need this information don't have
3002 to include tree.h. Do this here so it gets done when an inlined
3003 function gets output. */
3005 current_function_return_rtx
3006 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3007 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3009 /* If scalar return value was computed in a pseudo-reg, or was a named
3010 return value that got dumped to the stack, copy that to the hard
3012 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3014 tree decl_result = DECL_RESULT (fndecl);
3015 rtx decl_rtl = DECL_RTL (decl_result);
3017 if (REG_P (decl_rtl)
3018 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3019 : DECL_REGISTER (decl_result))
3023 #ifdef FUNCTION_OUTGOING_VALUE
3024 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3027 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3030 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3031 /* The delay slot scheduler assumes that current_function_return_rtx
3032 holds the hard register containing the return value, not a
3033 temporary pseudo. */
3034 current_function_return_rtx = real_decl_rtl;
3039 /* If ARGS contains entries with complex types, split the entry into two
3040 entries of the component type. Return a new list of substitutions are
3041 needed, else the old list. */
3044 split_complex_args (tree args)
3048 /* Before allocating memory, check for the common case of no complex. */
3049 for (p = args; p; p = TREE_CHAIN (p))
3051 tree type = TREE_TYPE (p);
3052 if (TREE_CODE (type) == COMPLEX_TYPE
3053 && targetm.calls.split_complex_arg (type))
3059 args = copy_list (args);
3061 for (p = args; p; p = TREE_CHAIN (p))
3063 tree type = TREE_TYPE (p);
3064 if (TREE_CODE (type) == COMPLEX_TYPE
3065 && targetm.calls.split_complex_arg (type))
3068 tree subtype = TREE_TYPE (type);
3070 /* Rewrite the PARM_DECL's type with its component. */
3071 TREE_TYPE (p) = subtype;
3072 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
3073 DECL_MODE (p) = VOIDmode;
3074 DECL_SIZE (p) = NULL;
3075 DECL_SIZE_UNIT (p) = NULL;
3078 /* Build a second synthetic decl. */
3079 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
3080 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
3081 layout_decl (decl, 0);
3083 /* Splice it in; skip the new decl. */
3084 TREE_CHAIN (decl) = TREE_CHAIN (p);
3085 TREE_CHAIN (p) = decl;
3093 /* Indicate whether REGNO is an incoming argument to the current function
3094 that was promoted to a wider mode. If so, return the RTX for the
3095 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3096 that REGNO is promoted from and whether the promotion was signed or
3100 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3104 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3105 arg = TREE_CHAIN (arg))
3106 if (REG_P (DECL_INCOMING_RTL (arg))
3107 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3108 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3110 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3111 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3113 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3114 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3115 && mode != DECL_MODE (arg))
3117 *pmode = DECL_MODE (arg);
3118 *punsignedp = unsignedp;
3119 return DECL_INCOMING_RTL (arg);
3127 /* Compute the size and offset from the start of the stacked arguments for a
3128 parm passed in mode PASSED_MODE and with type TYPE.
3130 INITIAL_OFFSET_PTR points to the current offset into the stacked
3133 The starting offset and size for this parm are returned in
3134 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3135 nonzero, the offset is that of stack slot, which is returned in
3136 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3137 padding required from the initial offset ptr to the stack slot.
3139 IN_REGS is nonzero if the argument will be passed in registers. It will
3140 never be set if REG_PARM_STACK_SPACE is not defined.
3142 FNDECL is the function in which the argument was defined.
3144 There are two types of rounding that are done. The first, controlled by
3145 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3146 list to be aligned to the specific boundary (in bits). This rounding
3147 affects the initial and starting offsets, but not the argument size.
3149 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3150 optionally rounds the size of the parm to PARM_BOUNDARY. The
3151 initial offset is not affected by this rounding, while the size always
3152 is and the starting offset may be. */
3154 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3155 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3156 callers pass in the total size of args so far as
3157 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3160 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3161 int partial, tree fndecl ATTRIBUTE_UNUSED,
3162 struct args_size *initial_offset_ptr,
3163 struct locate_and_pad_arg_data *locate)
3166 enum direction where_pad;
3168 int reg_parm_stack_space = 0;
3169 int part_size_in_regs;
3171 #ifdef REG_PARM_STACK_SPACE
3172 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3174 /* If we have found a stack parm before we reach the end of the
3175 area reserved for registers, skip that area. */
3178 if (reg_parm_stack_space > 0)
3180 if (initial_offset_ptr->var)
3182 initial_offset_ptr->var
3183 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3184 ssize_int (reg_parm_stack_space));
3185 initial_offset_ptr->constant = 0;
3187 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3188 initial_offset_ptr->constant = reg_parm_stack_space;
3191 #endif /* REG_PARM_STACK_SPACE */
3193 part_size_in_regs = 0;
3194 if (reg_parm_stack_space == 0)
3195 part_size_in_regs = ((partial * UNITS_PER_WORD)
3196 / (PARM_BOUNDARY / BITS_PER_UNIT)
3197 * (PARM_BOUNDARY / BITS_PER_UNIT));
3200 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3201 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3202 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3203 locate->where_pad = where_pad;
3205 #ifdef ARGS_GROW_DOWNWARD
3206 locate->slot_offset.constant = -initial_offset_ptr->constant;
3207 if (initial_offset_ptr->var)
3208 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3209 initial_offset_ptr->var);
3213 if (where_pad != none
3214 && (!host_integerp (sizetree, 1)
3215 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3216 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3217 SUB_PARM_SIZE (locate->slot_offset, s2);
3220 locate->slot_offset.constant += part_size_in_regs;
3223 #ifdef REG_PARM_STACK_SPACE
3224 || REG_PARM_STACK_SPACE (fndecl) > 0
3227 pad_to_arg_alignment (&locate->slot_offset, boundary,
3228 &locate->alignment_pad);
3230 locate->size.constant = (-initial_offset_ptr->constant
3231 - locate->slot_offset.constant);
3232 if (initial_offset_ptr->var)
3233 locate->size.var = size_binop (MINUS_EXPR,
3234 size_binop (MINUS_EXPR,
3236 initial_offset_ptr->var),
3237 locate->slot_offset.var);
3239 /* Pad_below needs the pre-rounded size to know how much to pad
3241 locate->offset = locate->slot_offset;
3242 if (where_pad == downward)
3243 pad_below (&locate->offset, passed_mode, sizetree);
3245 #else /* !ARGS_GROW_DOWNWARD */
3247 #ifdef REG_PARM_STACK_SPACE
3248 || REG_PARM_STACK_SPACE (fndecl) > 0
3251 pad_to_arg_alignment (initial_offset_ptr, boundary,
3252 &locate->alignment_pad);
3253 locate->slot_offset = *initial_offset_ptr;
3255 #ifdef PUSH_ROUNDING
3256 if (passed_mode != BLKmode)
3257 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3260 /* Pad_below needs the pre-rounded size to know how much to pad below
3261 so this must be done before rounding up. */
3262 locate->offset = locate->slot_offset;
3263 if (where_pad == downward)
3264 pad_below (&locate->offset, passed_mode, sizetree);
3266 if (where_pad != none
3267 && (!host_integerp (sizetree, 1)
3268 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3269 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3271 ADD_PARM_SIZE (locate->size, sizetree);
3273 locate->size.constant -= part_size_in_regs;
3274 #endif /* ARGS_GROW_DOWNWARD */
3277 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3278 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3281 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3282 struct args_size *alignment_pad)
3284 tree save_var = NULL_TREE;
3285 HOST_WIDE_INT save_constant = 0;
3286 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3287 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3289 #ifdef SPARC_STACK_BOUNDARY_HACK
3290 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3291 higher than the real alignment of %sp. However, when it does this,
3292 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3293 This is a temporary hack while the sparc port is fixed. */
3294 if (SPARC_STACK_BOUNDARY_HACK)
3298 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3300 save_var = offset_ptr->var;
3301 save_constant = offset_ptr->constant;
3304 alignment_pad->var = NULL_TREE;
3305 alignment_pad->constant = 0;
3307 if (boundary > BITS_PER_UNIT)
3309 if (offset_ptr->var)
3311 tree sp_offset_tree = ssize_int (sp_offset);
3312 tree offset = size_binop (PLUS_EXPR,
3313 ARGS_SIZE_TREE (*offset_ptr),
3315 #ifdef ARGS_GROW_DOWNWARD
3316 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3318 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3321 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3322 /* ARGS_SIZE_TREE includes constant term. */
3323 offset_ptr->constant = 0;
3324 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3325 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3330 offset_ptr->constant = -sp_offset +
3331 #ifdef ARGS_GROW_DOWNWARD
3332 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3334 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3336 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3337 alignment_pad->constant = offset_ptr->constant - save_constant;
3343 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3345 if (passed_mode != BLKmode)
3347 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3348 offset_ptr->constant
3349 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3350 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3351 - GET_MODE_SIZE (passed_mode));
3355 if (TREE_CODE (sizetree) != INTEGER_CST
3356 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3358 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3359 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3361 ADD_PARM_SIZE (*offset_ptr, s2);
3362 SUB_PARM_SIZE (*offset_ptr, sizetree);
3367 /* Walk the tree of blocks describing the binding levels within a function
3368 and warn about variables the might be killed by setjmp or vfork.
3369 This is done after calling flow_analysis and before global_alloc
3370 clobbers the pseudo-regs to hard regs. */
3373 setjmp_vars_warning (tree block)
3377 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3379 if (TREE_CODE (decl) == VAR_DECL
3380 && DECL_RTL_SET_P (decl)
3381 && REG_P (DECL_RTL (decl))
3382 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3383 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
3387 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3388 setjmp_vars_warning (sub);
3391 /* Do the appropriate part of setjmp_vars_warning
3392 but for arguments instead of local variables. */
3395 setjmp_args_warning (void)
3398 for (decl = DECL_ARGUMENTS (current_function_decl);
3399 decl; decl = TREE_CHAIN (decl))
3400 if (DECL_RTL (decl) != 0
3401 && REG_P (DECL_RTL (decl))
3402 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3403 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
3408 /* Convert a stack slot address ADDR for variable VAR
3409 (from a containing function)
3410 into an address valid in this function (using a static chain). */
3413 fix_lexical_addr (rtx addr, tree var)
3416 HOST_WIDE_INT displacement;
3417 tree context = decl_function_context (var);
3418 struct function *fp;
3421 /* If this is the present function, we need not do anything. */
3422 if (context == current_function_decl)
3425 fp = find_function_data (context);
3427 /* Decode given address as base reg plus displacement. */
3429 basereg = addr, displacement = 0;
3430 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3431 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3438 /* Use same offset, relative to appropriate static chain or argument
3440 return plus_constant (base, displacement);
3443 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3444 and create duplicate blocks. */
3445 /* ??? Need an option to either create block fragments or to create
3446 abstract origin duplicates of a source block. It really depends
3447 on what optimization has been performed. */
3450 reorder_blocks (void)
3452 tree block = DECL_INITIAL (current_function_decl);
3453 varray_type block_stack;
3455 if (block == NULL_TREE)
3458 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3460 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3461 clear_block_marks (block);
3463 /* Prune the old trees away, so that they don't get in the way. */
3464 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3465 BLOCK_CHAIN (block) = NULL_TREE;
3467 /* Recreate the block tree from the note nesting. */
3468 reorder_blocks_1 (get_insns (), block, &block_stack);
3469 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3471 /* Remove deleted blocks from the block fragment chains. */
3472 reorder_fix_fragments (block);
3475 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3478 clear_block_marks (tree block)
3482 TREE_ASM_WRITTEN (block) = 0;
3483 clear_block_marks (BLOCK_SUBBLOCKS (block));
3484 block = BLOCK_CHAIN (block);
3489 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3493 for (insn = insns; insn; insn = NEXT_INSN (insn))
3495 if (GET_CODE (insn) == NOTE)
3497 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3499 tree block = NOTE_BLOCK (insn);
3501 /* If we have seen this block before, that means it now
3502 spans multiple address regions. Create a new fragment. */
3503 if (TREE_ASM_WRITTEN (block))
3505 tree new_block = copy_node (block);
3508 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3509 ? BLOCK_FRAGMENT_ORIGIN (block)
3511 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3512 BLOCK_FRAGMENT_CHAIN (new_block)
3513 = BLOCK_FRAGMENT_CHAIN (origin);
3514 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3516 NOTE_BLOCK (insn) = new_block;
3520 BLOCK_SUBBLOCKS (block) = 0;
3521 TREE_ASM_WRITTEN (block) = 1;
3522 /* When there's only one block for the entire function,
3523 current_block == block and we mustn't do this, it
3524 will cause infinite recursion. */
3525 if (block != current_block)
3527 BLOCK_SUPERCONTEXT (block) = current_block;
3528 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3529 BLOCK_SUBBLOCKS (current_block) = block;
3530 current_block = block;
3532 VARRAY_PUSH_TREE (*p_block_stack, block);
3534 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3536 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3537 VARRAY_POP (*p_block_stack);
3538 BLOCK_SUBBLOCKS (current_block)
3539 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3540 current_block = BLOCK_SUPERCONTEXT (current_block);
3546 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3547 appears in the block tree, select one of the fragments to become
3548 the new origin block. */
3551 reorder_fix_fragments (tree block)
3555 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3556 tree new_origin = NULL_TREE;
3560 if (! TREE_ASM_WRITTEN (dup_origin))
3562 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3564 /* Find the first of the remaining fragments. There must
3565 be at least one -- the current block. */
3566 while (! TREE_ASM_WRITTEN (new_origin))
3567 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3568 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3571 else if (! dup_origin)
3574 /* Re-root the rest of the fragments to the new origin. In the
3575 case that DUP_ORIGIN was null, that means BLOCK was the origin
3576 of a chain of fragments and we want to remove those fragments
3577 that didn't make it to the output. */
3580 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3585 if (TREE_ASM_WRITTEN (chain))
3587 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3589 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3591 chain = BLOCK_FRAGMENT_CHAIN (chain);
3596 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3597 block = BLOCK_CHAIN (block);
3601 /* Reverse the order of elements in the chain T of blocks,
3602 and return the new head of the chain (old last element). */
3605 blocks_nreverse (tree t)
3607 tree prev = 0, decl, next;
3608 for (decl = t; decl; decl = next)
3610 next = BLOCK_CHAIN (decl);
3611 BLOCK_CHAIN (decl) = prev;
3617 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3618 non-NULL, list them all into VECTOR, in a depth-first preorder
3619 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3623 all_blocks (tree block, tree *vector)
3629 TREE_ASM_WRITTEN (block) = 0;
3631 /* Record this block. */
3633 vector[n_blocks] = block;
3637 /* Record the subblocks, and their subblocks... */
3638 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3639 vector ? vector + n_blocks : 0);
3640 block = BLOCK_CHAIN (block);
3646 /* Return a vector containing all the blocks rooted at BLOCK. The
3647 number of elements in the vector is stored in N_BLOCKS_P. The
3648 vector is dynamically allocated; it is the caller's responsibility
3649 to call `free' on the pointer returned. */
3652 get_block_vector (tree block, int *n_blocks_p)
3656 *n_blocks_p = all_blocks (block, NULL);
3657 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3658 all_blocks (block, block_vector);
3660 return block_vector;
3663 static GTY(()) int next_block_index = 2;
3665 /* Set BLOCK_NUMBER for all the blocks in FN. */
3668 number_blocks (tree fn)
3674 /* For SDB and XCOFF debugging output, we start numbering the blocks
3675 from 1 within each function, rather than keeping a running
3677 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3678 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3679 next_block_index = 1;
3682 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3684 /* The top-level BLOCK isn't numbered at all. */
3685 for (i = 1; i < n_blocks; ++i)
3686 /* We number the blocks from two. */
3687 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3689 free (block_vector);
3694 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3697 debug_find_var_in_block_tree (tree var, tree block)
3701 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3705 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3707 tree ret = debug_find_var_in_block_tree (var, t);
3715 /* Allocate a function structure for FNDECL and set its contents
3719 allocate_struct_function (tree fndecl)
3722 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3724 cfun = ggc_alloc_cleared (sizeof (struct function));
3726 cfun->stack_alignment_needed = STACK_BOUNDARY;
3727 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3729 current_function_funcdef_no = funcdef_no++;
3731 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3733 init_stmt_for_function ();
3734 init_eh_for_function ();
3736 lang_hooks.function.init (cfun);
3737 if (init_machine_status)
3738 cfun->machine = (*init_machine_status) ();
3743 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3744 cfun->decl = fndecl;
3746 result = DECL_RESULT (fndecl);
3747 if (aggregate_value_p (result, fndecl))
3749 #ifdef PCC_STATIC_STRUCT_RETURN
3750 current_function_returns_pcc_struct = 1;
3752 current_function_returns_struct = 1;
3755 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3757 current_function_stdarg
3759 && TYPE_ARG_TYPES (fntype) != 0
3760 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3761 != void_type_node));
3764 /* Reset cfun, and other non-struct-function variables to defaults as
3765 appropriate for emitting rtl at the start of a function. */
3768 prepare_function_start (tree fndecl)
3770 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3771 cfun = DECL_STRUCT_FUNCTION (fndecl);
3773 allocate_struct_function (fndecl);
3775 init_varasm_status (cfun);
3778 cse_not_expected = ! optimize;
3780 /* Caller save not needed yet. */
3781 caller_save_needed = 0;
3783 /* We haven't done register allocation yet. */
3786 /* Indicate that we need to distinguish between the return value of the
3787 present function and the return value of a function being called. */
3788 rtx_equal_function_value_matters = 1;
3790 /* Indicate that we have not instantiated virtual registers yet. */
3791 virtuals_instantiated = 0;
3793 /* Indicate that we want CONCATs now. */
3794 generating_concat_p = 1;
3796 /* Indicate we have no need of a frame pointer yet. */
3797 frame_pointer_needed = 0;
3800 /* Initialize the rtl expansion mechanism so that we can do simple things
3801 like generate sequences. This is used to provide a context during global
3802 initialization of some passes. */
3804 init_dummy_function_start (void)
3806 prepare_function_start (NULL);
3809 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3810 and initialize static variables for generating RTL for the statements
3814 init_function_start (tree subr)
3816 prepare_function_start (subr);
3818 /* Prevent ever trying to delete the first instruction of a
3819 function. Also tell final how to output a linenum before the
3820 function prologue. Note linenums could be missing, e.g. when
3821 compiling a Java .class file. */
3822 if (! DECL_IS_BUILTIN (subr))
3823 emit_line_note (DECL_SOURCE_LOCATION (subr));
3825 /* Make sure first insn is a note even if we don't want linenums.
3826 This makes sure the first insn will never be deleted.
3827 Also, final expects a note to appear there. */
3828 emit_note (NOTE_INSN_DELETED);
3830 /* Warn if this value is an aggregate type,
3831 regardless of which calling convention we are using for it. */
3832 if (warn_aggregate_return
3833 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3834 warning ("function returns an aggregate");
3837 /* Make sure all values used by the optimization passes have sane
3840 init_function_for_compilation (void)
3844 /* No prologue/epilogue insns yet. */
3845 VARRAY_GROW (prologue, 0);
3846 VARRAY_GROW (epilogue, 0);
3847 VARRAY_GROW (sibcall_epilogue, 0);
3850 /* Expand a call to __main at the beginning of a possible main function. */
3852 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3853 #undef HAS_INIT_SECTION
3854 #define HAS_INIT_SECTION
3858 expand_main_function (void)
3860 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3861 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3863 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
3867 /* Forcibly align the stack. */
3868 #ifdef STACK_GROWS_DOWNWARD
3869 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3870 stack_pointer_rtx, 1, OPTAB_WIDEN);
3872 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3873 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3874 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3875 stack_pointer_rtx, 1, OPTAB_WIDEN);
3877 if (tmp != stack_pointer_rtx)
3878 emit_move_insn (stack_pointer_rtx, tmp);
3880 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3881 tmp = force_reg (Pmode, const0_rtx);
3882 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
3886 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3887 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3890 emit_insn_before (seq, tmp);
3896 #ifndef HAS_INIT_SECTION
3897 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3901 /* The PENDING_SIZES represent the sizes of variable-sized types.
3902 Create RTL for the various sizes now (using temporary variables),
3903 so that we can refer to the sizes from the RTL we are generating
3904 for the current function. The PENDING_SIZES are a TREE_LIST. The
3905 TREE_VALUE of each node is a SAVE_EXPR. */
3908 expand_pending_sizes (tree pending_sizes)
3912 /* Evaluate now the sizes of any types declared among the arguments. */
3913 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
3915 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
3916 /* Flush the queue in case this parameter declaration has
3922 /* Start the RTL for a new function, and set variables used for
3924 SUBR is the FUNCTION_DECL node.
3925 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3926 the function's parameters, which must be run at any return statement. */
3929 expand_function_start (tree subr)
3931 /* Make sure volatile mem refs aren't considered
3932 valid operands of arithmetic insns. */
3933 init_recog_no_volatile ();
3935 current_function_profile
3937 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3939 current_function_limit_stack
3940 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3942 /* Make the label for return statements to jump to. Do not special
3943 case machines with special return instructions -- they will be
3944 handled later during jump, ifcvt, or epilogue creation. */
3945 return_label = gen_label_rtx ();
3947 /* Initialize rtx used to return the value. */
3948 /* Do this before assign_parms so that we copy the struct value address
3949 before any library calls that assign parms might generate. */
3951 /* Decide whether to return the value in memory or in a register. */
3952 if (aggregate_value_p (DECL_RESULT (subr), subr))
3954 /* Returning something that won't go in a register. */
3955 rtx value_address = 0;
3957 #ifdef PCC_STATIC_STRUCT_RETURN
3958 if (current_function_returns_pcc_struct)
3960 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3961 value_address = assemble_static_space (size);
3966 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
3967 /* Expect to be passed the address of a place to store the value.
3968 If it is passed as an argument, assign_parms will take care of
3972 value_address = gen_reg_rtx (Pmode);
3973 emit_move_insn (value_address, sv);
3978 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
3979 set_mem_attributes (x, DECL_RESULT (subr), 1);
3980 SET_DECL_RTL (DECL_RESULT (subr), x);
3983 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3984 /* If return mode is void, this decl rtl should not be used. */
3985 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
3988 /* Compute the return values into a pseudo reg, which we will copy
3989 into the true return register after the cleanups are done. */
3991 /* In order to figure out what mode to use for the pseudo, we
3992 figure out what the mode of the eventual return register will
3993 actually be, and use that. */
3995 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
3998 /* Structures that are returned in registers are not aggregate_value_p,
3999 so we may see a PARALLEL or a REG. */
4000 if (REG_P (hard_reg))
4001 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
4002 else if (GET_CODE (hard_reg) == PARALLEL)
4003 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4007 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4008 result to the real return register(s). */
4009 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4012 /* Initialize rtx for parameters and local variables.
4013 In some cases this requires emitting insns. */
4014 assign_parms (subr);
4016 /* If function gets a static chain arg, store it. */
4017 if (cfun->static_chain_decl)
4019 tree parm = cfun->static_chain_decl;
4020 rtx local = gen_reg_rtx (Pmode);
4022 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4023 SET_DECL_RTL (parm, local);
4024 maybe_set_unchanging (local, parm);
4025 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4027 emit_move_insn (local, static_chain_incoming_rtx);
4030 /* If the function receives a non-local goto, then store the
4031 bits we need to restore the frame pointer. */
4032 if (cfun->nonlocal_goto_save_area)
4037 /* ??? We need to do this save early. Unfortunately here is
4038 before the frame variable gets declared. Help out... */
4039 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4041 t_save = build (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
4042 integer_zero_node, NULL_TREE, NULL_TREE);
4043 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4045 emit_move_insn (r_save, virtual_stack_vars_rtx);
4046 update_nonlocal_goto_save_area ();
4049 /* The following was moved from init_function_start.
4050 The move is supposed to make sdb output more accurate. */
4051 /* Indicate the beginning of the function body,
4052 as opposed to parm setup. */
4053 emit_note (NOTE_INSN_FUNCTION_BEG);
4055 if (GET_CODE (get_last_insn ()) != NOTE)
4056 emit_note (NOTE_INSN_DELETED);
4057 parm_birth_insn = get_last_insn ();
4059 if (current_function_profile)
4062 PROFILE_HOOK (current_function_funcdef_no);
4066 /* After the display initializations is where the tail-recursion label
4067 should go, if we end up needing one. Ensure we have a NOTE here
4068 since some things (like trampolines) get placed before this. */
4069 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4071 /* Evaluate now the sizes of any types declared among the arguments. */
4072 expand_pending_sizes (nreverse (get_pending_sizes ()));
4074 /* Make sure there is a line number after the function entry setup code. */
4075 force_next_line_note ();
4078 /* Undo the effects of init_dummy_function_start. */
4080 expand_dummy_function_end (void)
4082 /* End any sequences that failed to be closed due to syntax errors. */
4083 while (in_sequence_p ())
4086 /* Outside function body, can't compute type's actual size
4087 until next function's body starts. */
4089 free_after_parsing (cfun);
4090 free_after_compilation (cfun);
4094 /* Call DOIT for each hard register used as a return value from
4095 the current function. */
4098 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4100 rtx outgoing = current_function_return_rtx;
4105 if (REG_P (outgoing))
4106 (*doit) (outgoing, arg);
4107 else if (GET_CODE (outgoing) == PARALLEL)
4111 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4113 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4115 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4122 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4124 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4128 clobber_return_register (void)
4130 diddle_return_value (do_clobber_return_reg, NULL);
4132 /* In case we do use pseudo to return value, clobber it too. */
4133 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4135 tree decl_result = DECL_RESULT (current_function_decl);
4136 rtx decl_rtl = DECL_RTL (decl_result);
4137 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4139 do_clobber_return_reg (decl_rtl, NULL);
4145 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4147 emit_insn (gen_rtx_USE (VOIDmode, reg));
4151 use_return_register (void)
4153 diddle_return_value (do_use_return_reg, NULL);
4156 /* Possibly warn about unused parameters. */
4158 do_warn_unused_parameter (tree fn)
4162 for (decl = DECL_ARGUMENTS (fn);
4163 decl; decl = TREE_CHAIN (decl))
4164 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4165 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4166 warning ("%Junused parameter '%D'", decl, decl);
4169 static GTY(()) rtx initial_trampoline;
4171 /* Generate RTL for the end of the current function. */
4174 expand_function_end (void)
4178 finish_expr_for_function ();
4180 /* If arg_pointer_save_area was referenced only from a nested
4181 function, we will not have initialized it yet. Do that now. */
4182 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4183 get_arg_pointer_save_area (cfun);
4185 /* If we are doing stack checking and this function makes calls,
4186 do a stack probe at the start of the function to ensure we have enough
4187 space for another stack frame. */
4188 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4192 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4193 if (GET_CODE (insn) == CALL_INSN)
4196 probe_stack_range (STACK_CHECK_PROTECT,
4197 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4200 emit_insn_before (seq, tail_recursion_reentry);
4205 /* Possibly warn about unused parameters.
4206 When frontend does unit-at-a-time, the warning is already
4207 issued at finalization time. */
4208 if (warn_unused_parameter
4209 && !lang_hooks.callgraph.expand_function)
4210 do_warn_unused_parameter (current_function_decl);
4212 /* End any sequences that failed to be closed due to syntax errors. */
4213 while (in_sequence_p ())
4216 clear_pending_stack_adjust ();
4217 do_pending_stack_adjust ();
4219 /* @@@ This is a kludge. We want to ensure that instructions that
4220 may trap are not moved into the epilogue by scheduling, because
4221 we don't always emit unwind information for the epilogue.
4222 However, not all machine descriptions define a blockage insn, so
4223 emit an ASM_INPUT to act as one. */
4224 if (flag_non_call_exceptions)
4225 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4227 /* Mark the end of the function body.
4228 If control reaches this insn, the function can drop through
4229 without returning a value. */
4230 emit_note (NOTE_INSN_FUNCTION_END);
4232 /* Must mark the last line number note in the function, so that the test
4233 coverage code can avoid counting the last line twice. This just tells
4234 the code to ignore the immediately following line note, since there
4235 already exists a copy of this note somewhere above. This line number
4236 note is still needed for debugging though, so we can't delete it. */
4237 if (flag_test_coverage)
4238 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4240 /* Output a linenumber for the end of the function.
4241 SDB depends on this. */
4242 force_next_line_note ();
4243 emit_line_note (input_location);
4245 /* Before the return label (if any), clobber the return
4246 registers so that they are not propagated live to the rest of
4247 the function. This can only happen with functions that drop
4248 through; if there had been a return statement, there would
4249 have either been a return rtx, or a jump to the return label.
4251 We delay actual code generation after the current_function_value_rtx
4253 clobber_after = get_last_insn ();
4255 /* Output the label for the actual return from the function,
4256 if one is expected. This happens either because a function epilogue
4257 is used instead of a return instruction, or because a return was done
4258 with a goto in order to run local cleanups, or because of pcc-style
4259 structure returning. */
4261 emit_label (return_label);
4263 /* Let except.c know where it should emit the call to unregister
4264 the function context for sjlj exceptions. */
4265 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4266 sjlj_emit_function_exit_after (get_last_insn ());
4268 /* If we had calls to alloca, and this machine needs
4269 an accurate stack pointer to exit the function,
4270 insert some code to save and restore the stack pointer. */
4271 if (! EXIT_IGNORE_STACK
4272 && current_function_calls_alloca)
4276 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4277 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4280 /* If scalar return value was computed in a pseudo-reg, or was a named
4281 return value that got dumped to the stack, copy that to the hard
4283 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4285 tree decl_result = DECL_RESULT (current_function_decl);
4286 rtx decl_rtl = DECL_RTL (decl_result);
4288 if (REG_P (decl_rtl)
4289 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4290 : DECL_REGISTER (decl_result))
4292 rtx real_decl_rtl = current_function_return_rtx;
4294 /* This should be set in assign_parms. */
4295 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
4298 /* If this is a BLKmode structure being returned in registers,
4299 then use the mode computed in expand_return. Note that if
4300 decl_rtl is memory, then its mode may have been changed,
4301 but that current_function_return_rtx has not. */
4302 if (GET_MODE (real_decl_rtl) == BLKmode)
4303 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4305 /* If a named return value dumped decl_return to memory, then
4306 we may need to re-do the PROMOTE_MODE signed/unsigned
4308 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4310 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4312 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4313 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4316 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4318 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4320 /* If expand_function_start has created a PARALLEL for decl_rtl,
4321 move the result to the real return registers. Otherwise, do
4322 a group load from decl_rtl for a named return. */
4323 if (GET_CODE (decl_rtl) == PARALLEL)
4324 emit_group_move (real_decl_rtl, decl_rtl);
4326 emit_group_load (real_decl_rtl, decl_rtl,
4327 TREE_TYPE (decl_result),
4328 int_size_in_bytes (TREE_TYPE (decl_result)));
4331 emit_move_insn (real_decl_rtl, decl_rtl);
4335 /* If returning a structure, arrange to return the address of the value
4336 in a place where debuggers expect to find it.
4338 If returning a structure PCC style,
4339 the caller also depends on this value.
4340 And current_function_returns_pcc_struct is not necessarily set. */
4341 if (current_function_returns_struct
4342 || current_function_returns_pcc_struct)
4345 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4346 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4347 #ifdef FUNCTION_OUTGOING_VALUE
4349 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4350 current_function_decl);
4353 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
4356 /* Mark this as a function return value so integrate will delete the
4357 assignment and USE below when inlining this function. */
4358 REG_FUNCTION_VALUE_P (outgoing) = 1;
4360 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4361 value_address = convert_memory_address (GET_MODE (outgoing),
4364 emit_move_insn (outgoing, value_address);
4366 /* Show return register used to hold result (in this case the address
4368 current_function_return_rtx = outgoing;
4371 /* If this is an implementation of throw, do what's necessary to
4372 communicate between __builtin_eh_return and the epilogue. */
4373 expand_eh_return ();
4375 /* Emit the actual code to clobber return register. */
4380 clobber_return_register ();
4384 after = emit_insn_after (seq, clobber_after);
4387 /* Output the label for the naked return from the function, if one is
4388 expected. This is currently used only by __builtin_return. */
4389 if (naked_return_label)
4390 emit_label (naked_return_label);
4392 /* ??? This should no longer be necessary since stupid is no longer with
4393 us, but there are some parts of the compiler (eg reload_combine, and
4394 sh mach_dep_reorg) that still try and compute their own lifetime info
4395 instead of using the general framework. */
4396 use_return_register ();
4398 /* Fix up any gotos that jumped out to the outermost
4399 binding level of the function.
4400 Must follow emitting RETURN_LABEL. */
4402 /* If you have any cleanups to do at this point,
4403 and they need to create temporary variables,
4404 then you will lose. */
4405 expand_fixups (get_insns ());
4409 get_arg_pointer_save_area (struct function *f)
4411 rtx ret = f->x_arg_pointer_save_area;
4415 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4416 f->x_arg_pointer_save_area = ret;
4419 if (f == cfun && ! f->arg_pointer_save_area_init)
4423 /* Save the arg pointer at the beginning of the function. The
4424 generated stack slot may not be a valid memory address, so we
4425 have to check it and fix it if necessary. */
4427 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4431 push_topmost_sequence ();
4432 emit_insn_after (seq, get_insns ());
4433 pop_topmost_sequence ();
4439 /* Extend a vector that records the INSN_UIDs of INSNS
4440 (a list of one or more insns). */
4443 record_insns (rtx insns, varray_type *vecp)
4450 while (tmp != NULL_RTX)
4453 tmp = NEXT_INSN (tmp);
4456 i = VARRAY_SIZE (*vecp);
4457 VARRAY_GROW (*vecp, i + len);
4459 while (tmp != NULL_RTX)
4461 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4463 tmp = NEXT_INSN (tmp);
4467 /* Set the locator of the insn chain starting at INSN to LOC. */
4469 set_insn_locators (rtx insn, int loc)
4471 while (insn != NULL_RTX)
4474 INSN_LOCATOR (insn) = loc;
4475 insn = NEXT_INSN (insn);
4479 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4480 be running after reorg, SEQUENCE rtl is possible. */
4483 contains (rtx insn, varray_type vec)
4487 if (GET_CODE (insn) == INSN
4488 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4491 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4492 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4493 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4499 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4500 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4507 prologue_epilogue_contains (rtx insn)
4509 if (contains (insn, prologue))
4511 if (contains (insn, epilogue))
4517 sibcall_epilogue_contains (rtx insn)
4519 if (sibcall_epilogue)
4520 return contains (insn, sibcall_epilogue);
4525 /* Insert gen_return at the end of block BB. This also means updating
4526 block_for_insn appropriately. */
4529 emit_return_into_block (basic_block bb, rtx line_note)
4531 emit_jump_insn_after (gen_return (), BB_END (bb));
4533 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4535 #endif /* HAVE_return */
4537 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4539 /* These functions convert the epilogue into a variant that does not modify the
4540 stack pointer. This is used in cases where a function returns an object
4541 whose size is not known until it is computed. The called function leaves the
4542 object on the stack, leaves the stack depressed, and returns a pointer to
4545 What we need to do is track all modifications and references to the stack
4546 pointer, deleting the modifications and changing the references to point to
4547 the location the stack pointer would have pointed to had the modifications
4550 These functions need to be portable so we need to make as few assumptions
4551 about the epilogue as we can. However, the epilogue basically contains
4552 three things: instructions to reset the stack pointer, instructions to
4553 reload registers, possibly including the frame pointer, and an
4554 instruction to return to the caller.
4556 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4557 We also make no attempt to validate the insns we make since if they are
4558 invalid, we probably can't do anything valid. The intent is that these
4559 routines get "smarter" as more and more machines start to use them and
4560 they try operating on different epilogues.
4562 We use the following structure to track what the part of the epilogue that
4563 we've already processed has done. We keep two copies of the SP equivalence,
4564 one for use during the insn we are processing and one for use in the next
4565 insn. The difference is because one part of a PARALLEL may adjust SP
4566 and the other may use it. */
4570 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4571 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4572 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4573 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4574 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4575 should be set to once we no longer need
4577 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4581 static void handle_epilogue_set (rtx, struct epi_info *);
4582 static void update_epilogue_consts (rtx, rtx, void *);
4583 static void emit_equiv_load (struct epi_info *);
4585 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4586 no modifications to the stack pointer. Return the new list of insns. */
4589 keep_stack_depressed (rtx insns)
4592 struct epi_info info;
4595 /* If the epilogue is just a single instruction, it must be OK as is. */
4596 if (NEXT_INSN (insns) == NULL_RTX)
4599 /* Otherwise, start a sequence, initialize the information we have, and
4600 process all the insns we were given. */
4603 info.sp_equiv_reg = stack_pointer_rtx;
4605 info.equiv_reg_src = 0;
4607 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4608 info.const_equiv[j] = 0;
4612 while (insn != NULL_RTX)
4614 next = NEXT_INSN (insn);
4623 /* If this insn references the register that SP is equivalent to and
4624 we have a pending load to that register, we must force out the load
4625 first and then indicate we no longer know what SP's equivalent is. */
4626 if (info.equiv_reg_src != 0
4627 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4629 emit_equiv_load (&info);
4630 info.sp_equiv_reg = 0;
4633 info.new_sp_equiv_reg = info.sp_equiv_reg;
4634 info.new_sp_offset = info.sp_offset;
4636 /* If this is a (RETURN) and the return address is on the stack,
4637 update the address and change to an indirect jump. */
4638 if (GET_CODE (PATTERN (insn)) == RETURN
4639 || (GET_CODE (PATTERN (insn)) == PARALLEL
4640 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4642 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4644 HOST_WIDE_INT offset = 0;
4645 rtx jump_insn, jump_set;
4647 /* If the return address is in a register, we can emit the insn
4648 unchanged. Otherwise, it must be a MEM and we see what the
4649 base register and offset are. In any case, we have to emit any
4650 pending load to the equivalent reg of SP, if any. */
4651 if (REG_P (retaddr))
4653 emit_equiv_load (&info);
4658 else if (MEM_P (retaddr)
4659 && REG_P (XEXP (retaddr, 0)))
4660 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
4661 else if (MEM_P (retaddr)
4662 && GET_CODE (XEXP (retaddr, 0)) == PLUS
4663 && REG_P (XEXP (XEXP (retaddr, 0), 0))
4664 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
4666 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
4667 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
4672 /* If the base of the location containing the return pointer
4673 is SP, we must update it with the replacement address. Otherwise,
4674 just build the necessary MEM. */
4675 retaddr = plus_constant (base, offset);
4676 if (base == stack_pointer_rtx)
4677 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4678 plus_constant (info.sp_equiv_reg,
4681 retaddr = gen_rtx_MEM (Pmode, retaddr);
4683 /* If there is a pending load to the equivalent register for SP
4684 and we reference that register, we must load our address into
4685 a scratch register and then do that load. */
4686 if (info.equiv_reg_src
4687 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4692 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4693 if (HARD_REGNO_MODE_OK (regno, Pmode)
4694 && !fixed_regs[regno]
4695 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4696 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4698 && !refers_to_regno_p (regno,
4699 regno + hard_regno_nregs[regno]
4701 info.equiv_reg_src, NULL)
4702 && info.const_equiv[regno] == 0)
4705 if (regno == FIRST_PSEUDO_REGISTER)
4708 reg = gen_rtx_REG (Pmode, regno);
4709 emit_move_insn (reg, retaddr);
4713 emit_equiv_load (&info);
4714 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4716 /* Show the SET in the above insn is a RETURN. */
4717 jump_set = single_set (jump_insn);
4721 SET_IS_RETURN_P (jump_set) = 1;
4724 /* If SP is not mentioned in the pattern and its equivalent register, if
4725 any, is not modified, just emit it. Otherwise, if neither is set,
4726 replace the reference to SP and emit the insn. If none of those are
4727 true, handle each SET individually. */
4728 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4729 && (info.sp_equiv_reg == stack_pointer_rtx
4730 || !reg_set_p (info.sp_equiv_reg, insn)))
4732 else if (! reg_set_p (stack_pointer_rtx, insn)
4733 && (info.sp_equiv_reg == stack_pointer_rtx
4734 || !reg_set_p (info.sp_equiv_reg, insn)))
4736 if (! validate_replace_rtx (stack_pointer_rtx,
4737 plus_constant (info.sp_equiv_reg,
4744 else if (GET_CODE (PATTERN (insn)) == SET)
4745 handle_epilogue_set (PATTERN (insn), &info);
4746 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4748 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4749 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4750 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4755 info.sp_equiv_reg = info.new_sp_equiv_reg;
4756 info.sp_offset = info.new_sp_offset;
4758 /* Now update any constants this insn sets. */
4759 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4763 insns = get_insns ();
4768 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4769 structure that contains information about what we've seen so far. We
4770 process this SET by either updating that data or by emitting one or
4774 handle_epilogue_set (rtx set, struct epi_info *p)
4776 /* First handle the case where we are setting SP. Record what it is being
4777 set from. If unknown, abort. */
4778 if (reg_set_p (stack_pointer_rtx, set))
4780 if (SET_DEST (set) != stack_pointer_rtx)
4783 if (GET_CODE (SET_SRC (set)) == PLUS)
4785 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4786 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4787 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4788 else if (REG_P (XEXP (SET_SRC (set), 1))
4789 && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
4790 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
4792 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4797 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4799 /* If we are adjusting SP, we adjust from the old data. */
4800 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4802 p->new_sp_equiv_reg = p->sp_equiv_reg;
4803 p->new_sp_offset += p->sp_offset;
4806 if (p->new_sp_equiv_reg == 0 || !REG_P (p->new_sp_equiv_reg))
4812 /* Next handle the case where we are setting SP's equivalent register.
4813 If we already have a value to set it to, abort. We could update, but
4814 there seems little point in handling that case. Note that we have
4815 to allow for the case where we are setting the register set in
4816 the previous part of a PARALLEL inside a single insn. But use the
4817 old offset for any updates within this insn. We must allow for the case
4818 where the register is being set in a different (usually wider) mode than
4820 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4822 if (p->equiv_reg_src != 0
4823 || !REG_P (p->new_sp_equiv_reg)
4824 || !REG_P (SET_DEST (set))
4825 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
4826 || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
4830 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4831 plus_constant (p->sp_equiv_reg,
4835 /* Otherwise, replace any references to SP in the insn to its new value
4836 and emit the insn. */
4839 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4840 plus_constant (p->sp_equiv_reg,
4842 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4843 plus_constant (p->sp_equiv_reg,
4849 /* Update the tracking information for registers set to constants. */
4852 update_epilogue_consts (rtx dest, rtx x, void *data)
4854 struct epi_info *p = (struct epi_info *) data;
4857 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4860 /* If we are either clobbering a register or doing a partial set,
4861 show we don't know the value. */
4862 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4863 p->const_equiv[REGNO (dest)] = 0;
4865 /* If we are setting it to a constant, record that constant. */
4866 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4867 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4869 /* If this is a binary operation between a register we have been tracking
4870 and a constant, see if we can compute a new constant value. */
4871 else if (ARITHMETIC_P (SET_SRC (x))
4872 && REG_P (XEXP (SET_SRC (x), 0))
4873 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4874 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4875 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4876 && 0 != (new = simplify_binary_operation
4877 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4878 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4879 XEXP (SET_SRC (x), 1)))
4880 && GET_CODE (new) == CONST_INT)
4881 p->const_equiv[REGNO (dest)] = new;
4883 /* Otherwise, we can't do anything with this value. */
4885 p->const_equiv[REGNO (dest)] = 0;
4888 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4891 emit_equiv_load (struct epi_info *p)
4893 if (p->equiv_reg_src != 0)
4895 rtx dest = p->sp_equiv_reg;
4897 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4898 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4899 REGNO (p->sp_equiv_reg));
4901 emit_move_insn (dest, p->equiv_reg_src);
4902 p->equiv_reg_src = 0;
4907 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4908 this into place with notes indicating where the prologue ends and where
4909 the epilogue begins. Update the basic block information when possible. */
4912 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
4916 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4919 #ifdef HAVE_prologue
4920 rtx prologue_end = NULL_RTX;
4922 #if defined (HAVE_epilogue) || defined(HAVE_return)
4923 rtx epilogue_end = NULL_RTX;
4926 #ifdef HAVE_prologue
4930 seq = gen_prologue ();
4933 /* Retain a map of the prologue insns. */
4934 record_insns (seq, &prologue);
4935 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
4939 set_insn_locators (seq, prologue_locator);
4941 /* Can't deal with multiple successors of the entry block
4942 at the moment. Function should always have at least one
4944 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
4947 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
4952 /* If the exit block has no non-fake predecessors, we don't need
4954 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
4955 if ((e->flags & EDGE_FAKE) == 0)
4961 if (optimize && HAVE_return)
4963 /* If we're allowed to generate a simple return instruction,
4964 then by definition we don't need a full epilogue. Examine
4965 the block that falls through to EXIT. If it does not
4966 contain any code, examine its predecessors and try to
4967 emit (conditional) return instructions. */
4973 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
4974 if (e->flags & EDGE_FALLTHRU)
4980 /* Verify that there are no active instructions in the last block. */
4981 label = BB_END (last);
4982 while (label && GET_CODE (label) != CODE_LABEL)
4984 if (active_insn_p (label))
4986 label = PREV_INSN (label);
4989 if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
4991 rtx epilogue_line_note = NULL_RTX;
4993 /* Locate the line number associated with the closing brace,
4994 if we can find one. */
4995 for (seq = get_last_insn ();
4996 seq && ! active_insn_p (seq);
4997 seq = PREV_INSN (seq))
4998 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
5000 epilogue_line_note = seq;
5004 for (e = last->pred; e; e = e_next)
5006 basic_block bb = e->src;
5009 e_next = e->pred_next;
5010 if (bb == ENTRY_BLOCK_PTR)
5014 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
5017 /* If we have an unconditional jump, we can replace that
5018 with a simple return instruction. */
5019 if (simplejump_p (jump))
5021 emit_return_into_block (bb, epilogue_line_note);
5025 /* If we have a conditional jump, we can try to replace
5026 that with a conditional return instruction. */
5027 else if (condjump_p (jump))
5029 if (! redirect_jump (jump, 0, 0))
5032 /* If this block has only one successor, it both jumps
5033 and falls through to the fallthru block, so we can't
5035 if (bb->succ->succ_next == NULL)
5041 /* Fix up the CFG for the successful change we just made. */
5042 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5045 /* Emit a return insn for the exit fallthru block. Whether
5046 this is still reachable will be determined later. */
5048 emit_barrier_after (BB_END (last));
5049 emit_return_into_block (last, epilogue_line_note);
5050 epilogue_end = BB_END (last);
5051 last->succ->flags &= ~EDGE_FALLTHRU;
5056 /* Find the edge that falls through to EXIT. Other edges may exist
5057 due to RETURN instructions, but those don't need epilogues.
5058 There really shouldn't be a mixture -- either all should have
5059 been converted or none, however... */
5061 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5062 if (e->flags & EDGE_FALLTHRU)
5067 #ifdef HAVE_epilogue
5071 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5073 seq = gen_epilogue ();
5075 #ifdef INCOMING_RETURN_ADDR_RTX
5076 /* If this function returns with the stack depressed and we can support
5077 it, massage the epilogue to actually do that. */
5078 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5079 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5080 seq = keep_stack_depressed (seq);
5083 emit_jump_insn (seq);
5085 /* Retain a map of the epilogue insns. */
5086 record_insns (seq, &epilogue);
5087 set_insn_locators (seq, epilogue_locator);
5092 insert_insn_on_edge (seq, e);
5100 if (! next_active_insn (BB_END (e->src)))
5102 /* We have a fall-through edge to the exit block, the source is not
5103 at the end of the function, and there will be an assembler epilogue
5104 at the end of the function.
5105 We can't use force_nonfallthru here, because that would try to
5106 use return. Inserting a jump 'by hand' is extremely messy, so
5107 we take advantage of cfg_layout_finalize using
5108 fixup_fallthru_exit_predecessor. */
5109 cfg_layout_initialize ();
5110 FOR_EACH_BB (cur_bb)
5111 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5112 cur_bb->rbi->next = cur_bb->next_bb;
5113 cfg_layout_finalize ();
5118 commit_edge_insertions ();
5120 #ifdef HAVE_sibcall_epilogue
5121 /* Emit sibling epilogues before any sibling call sites. */
5122 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5124 basic_block bb = e->src;
5125 rtx insn = BB_END (bb);
5129 if (GET_CODE (insn) != CALL_INSN
5130 || ! SIBLING_CALL_P (insn))
5134 emit_insn (gen_sibcall_epilogue ());
5138 /* Retain a map of the epilogue insns. Used in life analysis to
5139 avoid getting rid of sibcall epilogue insns. Do this before we
5140 actually emit the sequence. */
5141 record_insns (seq, &sibcall_epilogue);
5142 set_insn_locators (seq, epilogue_locator);
5144 i = PREV_INSN (insn);
5145 newinsn = emit_insn_before (seq, insn);
5149 #ifdef HAVE_prologue
5150 /* This is probably all useless now that we use locators. */
5155 /* GDB handles `break f' by setting a breakpoint on the first
5156 line note after the prologue. Which means (1) that if
5157 there are line number notes before where we inserted the
5158 prologue we should move them, and (2) we should generate a
5159 note before the end of the first basic block, if there isn't
5162 ??? This behavior is completely broken when dealing with
5163 multiple entry functions. We simply place the note always
5164 into first basic block and let alternate entry points
5168 for (insn = prologue_end; insn; insn = prev)
5170 prev = PREV_INSN (insn);
5171 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5173 /* Note that we cannot reorder the first insn in the
5174 chain, since rest_of_compilation relies on that
5175 remaining constant. */
5178 reorder_insns (insn, insn, prologue_end);
5182 /* Find the last line number note in the first block. */
5183 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5184 insn != prologue_end && insn;
5185 insn = PREV_INSN (insn))
5186 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5189 /* If we didn't find one, make a copy of the first line number
5193 for (insn = next_active_insn (prologue_end);
5195 insn = PREV_INSN (insn))
5196 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5198 emit_note_copy_after (insn, prologue_end);
5204 #ifdef HAVE_epilogue
5209 /* Similarly, move any line notes that appear after the epilogue.
5210 There is no need, however, to be quite so anal about the existence
5211 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5212 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5214 for (insn = epilogue_end; insn; insn = next)
5216 next = NEXT_INSN (insn);
5217 if (GET_CODE (insn) == NOTE
5218 && (NOTE_LINE_NUMBER (insn) > 0
5219 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5220 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5221 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5227 /* Reposition the prologue-end and epilogue-begin notes after instruction
5228 scheduling and delayed branch scheduling. */
5231 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5233 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5234 rtx insn, last, note;
5237 if ((len = VARRAY_SIZE (prologue)) > 0)
5241 /* Scan from the beginning until we reach the last prologue insn.
5242 We apparently can't depend on basic_block_{head,end} after
5244 for (insn = f; insn; insn = NEXT_INSN (insn))
5246 if (GET_CODE (insn) == NOTE)
5248 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5251 else if (contains (insn, prologue))
5261 /* Find the prologue-end note if we haven't already, and
5262 move it to just after the last prologue insn. */
5265 for (note = last; (note = NEXT_INSN (note));)
5266 if (GET_CODE (note) == NOTE
5267 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5271 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5272 if (GET_CODE (last) == CODE_LABEL)
5273 last = NEXT_INSN (last);
5274 reorder_insns (note, note, last);
5278 if ((len = VARRAY_SIZE (epilogue)) > 0)
5282 /* Scan from the end until we reach the first epilogue insn.
5283 We apparently can't depend on basic_block_{head,end} after
5285 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5287 if (GET_CODE (insn) == NOTE)
5289 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5292 else if (contains (insn, epilogue))
5302 /* Find the epilogue-begin note if we haven't already, and
5303 move it to just before the first epilogue insn. */
5306 for (note = insn; (note = PREV_INSN (note));)
5307 if (GET_CODE (note) == NOTE
5308 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5312 if (PREV_INSN (last) != note)
5313 reorder_insns (note, note, PREV_INSN (last));
5316 #endif /* HAVE_prologue or HAVE_epilogue */
5319 /* Called once, at initialization, to initialize function.c. */
5322 init_function_once (void)
5324 VARRAY_INT_INIT (prologue, 0, "prologue");
5325 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5326 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5329 /* Resets insn_block_boundaries array. */
5332 reset_block_changes (void)
5334 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5335 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5338 /* Record the boundary for BLOCK. */
5340 record_block_change (tree block)
5348 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5349 VARRAY_POP (cfun->ib_boundaries_block);
5351 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5352 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5354 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5357 /* Finishes record of boundaries. */
5358 void finalize_block_changes (void)
5360 record_block_change (DECL_INITIAL (current_function_decl));
5363 /* For INSN return the BLOCK it belongs to. */
5365 check_block_change (rtx insn, tree *block)
5367 unsigned uid = INSN_UID (insn);
5369 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5372 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5375 /* Releases the ib_boundaries_block records. */
5377 free_block_changes (void)
5379 cfun->ib_boundaries_block = NULL;
5382 /* Returns the name of the current function. */
5384 current_function_name (void)
5386 return lang_hooks.decl_printable_name (cfun->decl, 2);
5389 #include "gt-function.h"