1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
63 #include "tree-pass.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
204 static void emit_return_into_block (basic_block);
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
211 /* Pointer to chain of `struct function' for containing functions. */
212 struct function *outer_function_chain;
214 /* Given a function decl for a containing function,
215 return the `struct function' for it. */
218 find_function_data (tree decl)
222 for (p = outer_function_chain; p; p = p->outer)
229 /* Save the current context for compilation of a nested function.
230 This is called from language-specific code. */
233 push_function_context (void)
236 allocate_struct_function (NULL, false);
238 cfun->outer = outer_function_chain;
239 outer_function_chain = cfun;
243 /* Restore the last saved context, at the end of a nested function.
244 This function is called from language-specific code. */
247 pop_function_context (void)
249 struct function *p = outer_function_chain;
252 outer_function_chain = p->outer;
253 current_function_decl = p->decl;
255 /* Reset variables that have known state during rtx generation. */
256 virtuals_instantiated = 0;
257 generating_concat_p = 1;
260 /* Clear out all parts of the state in F that can safely be discarded
261 after the function has been parsed, but not compiled, to let
262 garbage collection reclaim the memory. */
265 free_after_parsing (struct function *f)
270 /* Clear out all parts of the state in F that can safely be discarded
271 after the function has been compiled, to let garbage collection
272 reclaim the memory. */
275 free_after_compilation (struct function *f)
277 VEC_free (int, heap, prologue);
278 VEC_free (int, heap, epilogue);
279 VEC_free (int, heap, sibcall_epilogue);
280 if (crtl->emit.regno_pointer_align)
281 free (crtl->emit.regno_pointer_align);
283 memset (crtl, 0, sizeof (struct rtl_data));
288 regno_reg_rtx = NULL;
291 /* Return size needed for stack frame based on slots so far allocated.
292 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
293 the caller may have to do that. */
296 get_frame_size (void)
298 if (FRAME_GROWS_DOWNWARD)
299 return -frame_offset;
304 /* Issue an error message and return TRUE if frame OFFSET overflows in
305 the signed target pointer arithmetics for function FUNC. Otherwise
309 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
311 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
313 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
314 /* Leave room for the fixed part of the frame. */
315 - 64 * UNITS_PER_WORD)
317 error ("%Jtotal size of local objects too large", func);
324 /* Return stack slot alignment in bits for TYPE and MODE. */
327 get_stack_local_alignment (tree type, enum machine_mode mode)
329 unsigned int alignment;
332 alignment = BIGGEST_ALIGNMENT;
334 alignment = GET_MODE_ALIGNMENT (mode);
336 /* Allow the frond-end to (possibly) increase the alignment of this
339 type = lang_hooks.types.type_for_mode (mode, 0);
341 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
344 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
345 with machine mode MODE.
347 ALIGN controls the amount of alignment for the address of the slot:
348 0 means according to MODE,
349 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
350 -2 means use BITS_PER_UNIT,
351 positive specifies alignment boundary in bits.
353 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
355 We do not round to stack_boundary here. */
358 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
360 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
363 int bigend_correction = 0;
364 unsigned int alignment, alignment_in_bits;
365 int frame_off, frame_alignment, frame_phase;
369 alignment = get_stack_local_alignment (NULL, mode);
370 alignment /= BITS_PER_UNIT;
372 else if (align == -1)
374 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
375 size = CEIL_ROUND (size, alignment);
377 else if (align == -2)
378 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
380 alignment = align / BITS_PER_UNIT;
382 alignment_in_bits = alignment * BITS_PER_UNIT;
384 if (FRAME_GROWS_DOWNWARD)
385 frame_offset -= size;
387 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
388 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
390 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
391 alignment = alignment_in_bits / BITS_PER_UNIT;
394 if (SUPPORTS_STACK_ALIGNMENT)
396 if (crtl->stack_alignment_estimated < alignment_in_bits)
398 if (!crtl->stack_realign_processed)
399 crtl->stack_alignment_estimated = alignment_in_bits;
402 /* If stack is realigned and stack alignment value
403 hasn't been finalized, it is OK not to increase
404 stack_alignment_estimated. The bigger alignment
405 requirement is recorded in stack_alignment_needed
407 gcc_assert (!crtl->stack_realign_finalized);
408 if (!crtl->stack_realign_needed)
410 /* It is OK to reduce the alignment as long as the
411 requested size is 0 or the estimated stack
412 alignment >= mode alignment. */
413 gcc_assert (reduce_alignment_ok
415 || (crtl->stack_alignment_estimated
416 >= GET_MODE_ALIGNMENT (mode)));
417 alignment_in_bits = crtl->stack_alignment_estimated;
418 alignment = alignment_in_bits / BITS_PER_UNIT;
424 if (crtl->stack_alignment_needed < alignment_in_bits)
425 crtl->stack_alignment_needed = alignment_in_bits;
426 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
427 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
429 /* Calculate how many bytes the start of local variables is off from
431 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
432 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
433 frame_phase = frame_off ? frame_alignment - frame_off : 0;
435 /* Round the frame offset to the specified alignment. The default is
436 to always honor requests to align the stack but a port may choose to
437 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
438 if (STACK_ALIGNMENT_NEEDED
442 /* We must be careful here, since FRAME_OFFSET might be negative and
443 division with a negative dividend isn't as well defined as we might
444 like. So we instead assume that ALIGNMENT is a power of two and
445 use logical operations which are unambiguous. */
446 if (FRAME_GROWS_DOWNWARD)
448 = (FLOOR_ROUND (frame_offset - frame_phase,
449 (unsigned HOST_WIDE_INT) alignment)
453 = (CEIL_ROUND (frame_offset - frame_phase,
454 (unsigned HOST_WIDE_INT) alignment)
458 /* On a big-endian machine, if we are allocating more space than we will use,
459 use the least significant bytes of those that are allocated. */
460 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
461 bigend_correction = size - GET_MODE_SIZE (mode);
463 /* If we have already instantiated virtual registers, return the actual
464 address relative to the frame pointer. */
465 if (virtuals_instantiated)
466 addr = plus_constant (frame_pointer_rtx,
468 (frame_offset + bigend_correction
469 + STARTING_FRAME_OFFSET, Pmode));
471 addr = plus_constant (virtual_stack_vars_rtx,
473 (frame_offset + bigend_correction,
476 if (!FRAME_GROWS_DOWNWARD)
477 frame_offset += size;
479 x = gen_rtx_MEM (mode, addr);
480 set_mem_align (x, alignment_in_bits);
481 MEM_NOTRAP_P (x) = 1;
484 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
486 if (frame_offset_overflow (frame_offset, current_function_decl))
492 /* Wrap up assign_stack_local_1 with last parameter as false. */
495 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
497 return assign_stack_local_1 (mode, size, align, false);
500 /* Removes temporary slot TEMP from LIST. */
503 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
506 temp->next->prev = temp->prev;
508 temp->prev->next = temp->next;
512 temp->prev = temp->next = NULL;
515 /* Inserts temporary slot TEMP to LIST. */
518 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
522 (*list)->prev = temp;
527 /* Returns the list of used temp slots at LEVEL. */
529 static struct temp_slot **
530 temp_slots_at_level (int level)
532 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
533 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
535 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
538 /* Returns the maximal temporary slot level. */
541 max_slot_level (void)
543 if (!used_temp_slots)
546 return VEC_length (temp_slot_p, used_temp_slots) - 1;
549 /* Moves temporary slot TEMP to LEVEL. */
552 move_slot_to_level (struct temp_slot *temp, int level)
554 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
555 insert_slot_to_list (temp, temp_slots_at_level (level));
559 /* Make temporary slot TEMP available. */
562 make_slot_available (struct temp_slot *temp)
564 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
565 insert_slot_to_list (temp, &avail_temp_slots);
570 /* Allocate a temporary stack slot and record it for possible later
573 MODE is the machine mode to be given to the returned rtx.
575 SIZE is the size in units of the space required. We do no rounding here
576 since assign_stack_local will do any required rounding.
578 KEEP is 1 if this slot is to be retained after a call to
579 free_temp_slots. Automatic variables for a block are allocated
580 with this flag. KEEP values of 2 or 3 were needed respectively
581 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
582 or for SAVE_EXPRs, but they are now unused.
584 TYPE is the type that will be used for the stack slot. */
587 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
591 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
594 /* If SIZE is -1 it means that somebody tried to allocate a temporary
595 of a variable size. */
596 gcc_assert (size != -1);
598 /* These are now unused. */
599 gcc_assert (keep <= 1);
601 align = get_stack_local_alignment (type, mode);
603 /* Try to find an available, already-allocated temporary of the proper
604 mode which meets the size and alignment requirements. Choose the
605 smallest one with the closest alignment.
607 If assign_stack_temp is called outside of the tree->rtl expansion,
608 we cannot reuse the stack slots (that may still refer to
609 VIRTUAL_STACK_VARS_REGNUM). */
610 if (!virtuals_instantiated)
612 for (p = avail_temp_slots; p; p = p->next)
614 if (p->align >= align && p->size >= size
615 && GET_MODE (p->slot) == mode
616 && objects_must_conflict_p (p->type, type)
617 && (best_p == 0 || best_p->size > p->size
618 || (best_p->size == p->size && best_p->align > p->align)))
620 if (p->align == align && p->size == size)
623 cut_slot_from_list (selected, &avail_temp_slots);
632 /* Make our best, if any, the one to use. */
636 cut_slot_from_list (selected, &avail_temp_slots);
638 /* If there are enough aligned bytes left over, make them into a new
639 temp_slot so that the extra bytes don't get wasted. Do this only
640 for BLKmode slots, so that we can be sure of the alignment. */
641 if (GET_MODE (best_p->slot) == BLKmode)
643 int alignment = best_p->align / BITS_PER_UNIT;
644 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
646 if (best_p->size - rounded_size >= alignment)
648 p = GGC_NEW (struct temp_slot);
649 p->in_use = p->addr_taken = 0;
650 p->size = best_p->size - rounded_size;
651 p->base_offset = best_p->base_offset + rounded_size;
652 p->full_size = best_p->full_size - rounded_size;
653 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
654 p->align = best_p->align;
656 p->type = best_p->type;
657 insert_slot_to_list (p, &avail_temp_slots);
659 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
662 best_p->size = rounded_size;
663 best_p->full_size = rounded_size;
668 /* If we still didn't find one, make a new temporary. */
671 HOST_WIDE_INT frame_offset_old = frame_offset;
673 p = GGC_NEW (struct temp_slot);
675 /* We are passing an explicit alignment request to assign_stack_local.
676 One side effect of that is assign_stack_local will not round SIZE
677 to ensure the frame offset remains suitably aligned.
679 So for requests which depended on the rounding of SIZE, we go ahead
680 and round it now. We also make sure ALIGNMENT is at least
681 BIGGEST_ALIGNMENT. */
682 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
683 p->slot = assign_stack_local (mode,
685 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
691 /* The following slot size computation is necessary because we don't
692 know the actual size of the temporary slot until assign_stack_local
693 has performed all the frame alignment and size rounding for the
694 requested temporary. Note that extra space added for alignment
695 can be either above or below this stack slot depending on which
696 way the frame grows. We include the extra space if and only if it
697 is above this slot. */
698 if (FRAME_GROWS_DOWNWARD)
699 p->size = frame_offset_old - frame_offset;
703 /* Now define the fields used by combine_temp_slots. */
704 if (FRAME_GROWS_DOWNWARD)
706 p->base_offset = frame_offset;
707 p->full_size = frame_offset_old - frame_offset;
711 p->base_offset = frame_offset_old;
712 p->full_size = frame_offset - frame_offset_old;
723 p->level = temp_slot_level;
726 pp = temp_slots_at_level (p->level);
727 insert_slot_to_list (p, pp);
729 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
730 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
731 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
733 /* If we know the alias set for the memory that will be used, use
734 it. If there's no TYPE, then we don't know anything about the
735 alias set for the memory. */
736 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
737 set_mem_align (slot, align);
739 /* If a type is specified, set the relevant flags. */
742 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
743 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
744 || TREE_CODE (type) == COMPLEX_TYPE));
746 MEM_NOTRAP_P (slot) = 1;
751 /* Allocate a temporary stack slot and record it for possible later
752 reuse. First three arguments are same as in preceding function. */
755 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
757 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
760 /* Assign a temporary.
761 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
762 and so that should be used in error messages. In either case, we
763 allocate of the given type.
764 KEEP is as for assign_stack_temp.
765 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
766 it is 0 if a register is OK.
767 DONT_PROMOTE is 1 if we should not promote values in register
771 assign_temp (tree type_or_decl, int keep, int memory_required,
772 int dont_promote ATTRIBUTE_UNUSED)
775 enum machine_mode mode;
780 if (DECL_P (type_or_decl))
781 decl = type_or_decl, type = TREE_TYPE (decl);
783 decl = NULL, type = type_or_decl;
785 mode = TYPE_MODE (type);
787 unsignedp = TYPE_UNSIGNED (type);
790 if (mode == BLKmode || memory_required)
792 HOST_WIDE_INT size = int_size_in_bytes (type);
795 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
796 problems with allocating the stack space. */
800 /* Unfortunately, we don't yet know how to allocate variable-sized
801 temporaries. However, sometimes we can find a fixed upper limit on
802 the size, so try that instead. */
804 size = max_int_size_in_bytes (type);
806 /* The size of the temporary may be too large to fit into an integer. */
807 /* ??? Not sure this should happen except for user silliness, so limit
808 this to things that aren't compiler-generated temporaries. The
809 rest of the time we'll die in assign_stack_temp_for_type. */
810 if (decl && size == -1
811 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
813 error ("size of variable %q+D is too large", decl);
817 tmp = assign_stack_temp_for_type (mode, size, keep, type);
823 mode = promote_mode (type, mode, &unsignedp, 0);
826 return gen_reg_rtx (mode);
829 /* Combine temporary stack slots which are adjacent on the stack.
831 This allows for better use of already allocated stack space. This is only
832 done for BLKmode slots because we can be sure that we won't have alignment
833 problems in this case. */
836 combine_temp_slots (void)
838 struct temp_slot *p, *q, *next, *next_q;
841 /* We can't combine slots, because the information about which slot
842 is in which alias set will be lost. */
843 if (flag_strict_aliasing)
846 /* If there are a lot of temp slots, don't do anything unless
847 high levels of optimization. */
848 if (! flag_expensive_optimizations)
849 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
850 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
853 for (p = avail_temp_slots; p; p = next)
859 if (GET_MODE (p->slot) != BLKmode)
862 for (q = p->next; q; q = next_q)
868 if (GET_MODE (q->slot) != BLKmode)
871 if (p->base_offset + p->full_size == q->base_offset)
873 /* Q comes after P; combine Q into P. */
875 p->full_size += q->full_size;
878 else if (q->base_offset + q->full_size == p->base_offset)
880 /* P comes after Q; combine P into Q. */
882 q->full_size += p->full_size;
887 cut_slot_from_list (q, &avail_temp_slots);
890 /* Either delete P or advance past it. */
892 cut_slot_from_list (p, &avail_temp_slots);
896 /* Find the temp slot corresponding to the object at address X. */
898 static struct temp_slot *
899 find_temp_slot_from_address (rtx x)
905 for (i = max_slot_level (); i >= 0; i--)
906 for (p = *temp_slots_at_level (i); p; p = p->next)
908 if (XEXP (p->slot, 0) == x
910 || (GET_CODE (x) == PLUS
911 && XEXP (x, 0) == virtual_stack_vars_rtx
912 && GET_CODE (XEXP (x, 1)) == CONST_INT
913 && INTVAL (XEXP (x, 1)) >= p->base_offset
914 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
917 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
918 for (next = p->address; next; next = XEXP (next, 1))
919 if (XEXP (next, 0) == x)
923 /* If we have a sum involving a register, see if it points to a temp
925 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
926 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
928 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
929 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
935 /* Indicate that NEW is an alternate way of referring to the temp slot
936 that previously was known by OLD. */
939 update_temp_slot_address (rtx old, rtx new)
943 if (rtx_equal_p (old, new))
946 p = find_temp_slot_from_address (old);
948 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
949 is a register, see if one operand of the PLUS is a temporary
950 location. If so, NEW points into it. Otherwise, if both OLD and
951 NEW are a PLUS and if there is a register in common between them.
952 If so, try a recursive call on those values. */
955 if (GET_CODE (old) != PLUS)
960 update_temp_slot_address (XEXP (old, 0), new);
961 update_temp_slot_address (XEXP (old, 1), new);
964 else if (GET_CODE (new) != PLUS)
967 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
968 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
969 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
970 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
971 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
972 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
973 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
974 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
979 /* Otherwise add an alias for the temp's address. */
980 else if (p->address == 0)
984 if (GET_CODE (p->address) != EXPR_LIST)
985 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
987 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
991 /* If X could be a reference to a temporary slot, mark the fact that its
992 address was taken. */
995 mark_temp_addr_taken (rtx x)
1002 /* If X is not in memory or is at a constant address, it cannot be in
1003 a temporary slot. */
1004 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1007 p = find_temp_slot_from_address (XEXP (x, 0));
1012 /* If X could be a reference to a temporary slot, mark that slot as
1013 belonging to the to one level higher than the current level. If X
1014 matched one of our slots, just mark that one. Otherwise, we can't
1015 easily predict which it is, so upgrade all of them. Kept slots
1016 need not be touched.
1018 This is called when an ({...}) construct occurs and a statement
1019 returns a value in memory. */
1022 preserve_temp_slots (rtx x)
1024 struct temp_slot *p = 0, *next;
1026 /* If there is no result, we still might have some objects whose address
1027 were taken, so we need to make sure they stay around. */
1030 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1035 move_slot_to_level (p, temp_slot_level - 1);
1041 /* If X is a register that is being used as a pointer, see if we have
1042 a temporary slot we know it points to. To be consistent with
1043 the code below, we really should preserve all non-kept slots
1044 if we can't find a match, but that seems to be much too costly. */
1045 if (REG_P (x) && REG_POINTER (x))
1046 p = find_temp_slot_from_address (x);
1048 /* If X is not in memory or is at a constant address, it cannot be in
1049 a temporary slot, but it can contain something whose address was
1051 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1053 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1058 move_slot_to_level (p, temp_slot_level - 1);
1064 /* First see if we can find a match. */
1066 p = find_temp_slot_from_address (XEXP (x, 0));
1070 /* Move everything at our level whose address was taken to our new
1071 level in case we used its address. */
1072 struct temp_slot *q;
1074 if (p->level == temp_slot_level)
1076 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1080 if (p != q && q->addr_taken)
1081 move_slot_to_level (q, temp_slot_level - 1);
1084 move_slot_to_level (p, temp_slot_level - 1);
1090 /* Otherwise, preserve all non-kept slots at this level. */
1091 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1096 move_slot_to_level (p, temp_slot_level - 1);
1100 /* Free all temporaries used so far. This is normally called at the
1101 end of generating code for a statement. */
1104 free_temp_slots (void)
1106 struct temp_slot *p, *next;
1108 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1113 make_slot_available (p);
1116 combine_temp_slots ();
1119 /* Push deeper into the nesting level for stack temporaries. */
1122 push_temp_slots (void)
1127 /* Pop a temporary nesting level. All slots in use in the current level
1131 pop_temp_slots (void)
1133 struct temp_slot *p, *next;
1135 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1138 make_slot_available (p);
1141 combine_temp_slots ();
1146 /* Initialize temporary slots. */
1149 init_temp_slots (void)
1151 /* We have not allocated any temporaries yet. */
1152 avail_temp_slots = 0;
1153 used_temp_slots = 0;
1154 temp_slot_level = 0;
1157 /* These routines are responsible for converting virtual register references
1158 to the actual hard register references once RTL generation is complete.
1160 The following four variables are used for communication between the
1161 routines. They contain the offsets of the virtual registers from their
1162 respective hard registers. */
1164 static int in_arg_offset;
1165 static int var_offset;
1166 static int dynamic_offset;
1167 static int out_arg_offset;
1168 static int cfa_offset;
1170 /* In most machines, the stack pointer register is equivalent to the bottom
1173 #ifndef STACK_POINTER_OFFSET
1174 #define STACK_POINTER_OFFSET 0
1177 /* If not defined, pick an appropriate default for the offset of dynamically
1178 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1179 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1181 #ifndef STACK_DYNAMIC_OFFSET
1183 /* The bottom of the stack points to the actual arguments. If
1184 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1185 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1186 stack space for register parameters is not pushed by the caller, but
1187 rather part of the fixed stack areas and hence not included in
1188 `crtl->outgoing_args_size'. Nevertheless, we must allow
1189 for it when allocating stack dynamic objects. */
1191 #if defined(REG_PARM_STACK_SPACE)
1192 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1193 ((ACCUMULATE_OUTGOING_ARGS \
1194 ? (crtl->outgoing_args_size \
1195 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1196 : REG_PARM_STACK_SPACE (FNDECL))) \
1197 : 0) + (STACK_POINTER_OFFSET))
1199 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1200 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1201 + (STACK_POINTER_OFFSET))
1206 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1207 is a virtual register, return the equivalent hard register and set the
1208 offset indirectly through the pointer. Otherwise, return 0. */
1211 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1214 HOST_WIDE_INT offset;
1216 if (x == virtual_incoming_args_rtx)
1218 if (stack_realign_drap)
1220 /* Replace virtual_incoming_args_rtx with internal arg
1221 pointer if DRAP is used to realign stack. */
1222 new = crtl->args.internal_arg_pointer;
1226 new = arg_pointer_rtx, offset = in_arg_offset;
1228 else if (x == virtual_stack_vars_rtx)
1229 new = frame_pointer_rtx, offset = var_offset;
1230 else if (x == virtual_stack_dynamic_rtx)
1231 new = stack_pointer_rtx, offset = dynamic_offset;
1232 else if (x == virtual_outgoing_args_rtx)
1233 new = stack_pointer_rtx, offset = out_arg_offset;
1234 else if (x == virtual_cfa_rtx)
1236 #ifdef FRAME_POINTER_CFA_OFFSET
1237 new = frame_pointer_rtx;
1239 new = arg_pointer_rtx;
1241 offset = cfa_offset;
1250 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1251 Instantiate any virtual registers present inside of *LOC. The expression
1252 is simplified, as much as possible, but is not to be considered "valid"
1253 in any sense implied by the target. If any change is made, set CHANGED
1257 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1259 HOST_WIDE_INT offset;
1260 bool *changed = (bool *) data;
1267 switch (GET_CODE (x))
1270 new = instantiate_new_reg (x, &offset);
1273 *loc = plus_constant (new, offset);
1280 new = instantiate_new_reg (XEXP (x, 0), &offset);
1283 new = plus_constant (new, offset);
1284 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1290 /* FIXME -- from old code */
1291 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1292 we can commute the PLUS and SUBREG because pointers into the
1293 frame are well-behaved. */
1303 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1304 matches the predicate for insn CODE operand OPERAND. */
1307 safe_insn_predicate (int code, int operand, rtx x)
1309 const struct insn_operand_data *op_data;
1314 op_data = &insn_data[code].operand[operand];
1315 if (op_data->predicate == NULL)
1318 return op_data->predicate (x, op_data->mode);
1321 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1322 registers present inside of insn. The result will be a valid insn. */
1325 instantiate_virtual_regs_in_insn (rtx insn)
1327 HOST_WIDE_INT offset;
1329 bool any_change = false;
1330 rtx set, new, x, seq;
1332 /* There are some special cases to be handled first. */
1333 set = single_set (insn);
1336 /* We're allowed to assign to a virtual register. This is interpreted
1337 to mean that the underlying register gets assigned the inverse
1338 transformation. This is used, for example, in the handling of
1340 new = instantiate_new_reg (SET_DEST (set), &offset);
1345 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1346 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1348 x = force_operand (x, new);
1350 emit_move_insn (new, x);
1355 emit_insn_before (seq, insn);
1360 /* Handle a straight copy from a virtual register by generating a
1361 new add insn. The difference between this and falling through
1362 to the generic case is avoiding a new pseudo and eliminating a
1363 move insn in the initial rtl stream. */
1364 new = instantiate_new_reg (SET_SRC (set), &offset);
1365 if (new && offset != 0
1366 && REG_P (SET_DEST (set))
1367 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1371 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1372 new, GEN_INT (offset), SET_DEST (set),
1373 1, OPTAB_LIB_WIDEN);
1374 if (x != SET_DEST (set))
1375 emit_move_insn (SET_DEST (set), x);
1380 emit_insn_before (seq, insn);
1385 extract_insn (insn);
1386 insn_code = INSN_CODE (insn);
1388 /* Handle a plus involving a virtual register by determining if the
1389 operands remain valid if they're modified in place. */
1390 if (GET_CODE (SET_SRC (set)) == PLUS
1391 && recog_data.n_operands >= 3
1392 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1393 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1394 && GET_CODE (recog_data.operand[2]) == CONST_INT
1395 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1397 offset += INTVAL (recog_data.operand[2]);
1399 /* If the sum is zero, then replace with a plain move. */
1401 && REG_P (SET_DEST (set))
1402 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1405 emit_move_insn (SET_DEST (set), new);
1409 emit_insn_before (seq, insn);
1414 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1416 /* Using validate_change and apply_change_group here leaves
1417 recog_data in an invalid state. Since we know exactly what
1418 we want to check, do those two by hand. */
1419 if (safe_insn_predicate (insn_code, 1, new)
1420 && safe_insn_predicate (insn_code, 2, x))
1422 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1423 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1426 /* Fall through into the regular operand fixup loop in
1427 order to take care of operands other than 1 and 2. */
1433 extract_insn (insn);
1434 insn_code = INSN_CODE (insn);
1437 /* In the general case, we expect virtual registers to appear only in
1438 operands, and then only as either bare registers or inside memories. */
1439 for (i = 0; i < recog_data.n_operands; ++i)
1441 x = recog_data.operand[i];
1442 switch (GET_CODE (x))
1446 rtx addr = XEXP (x, 0);
1447 bool changed = false;
1449 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1454 x = replace_equiv_address (x, addr);
1455 /* It may happen that the address with the virtual reg
1456 was valid (e.g. based on the virtual stack reg, which might
1457 be acceptable to the predicates with all offsets), whereas
1458 the address now isn't anymore, for instance when the address
1459 is still offsetted, but the base reg isn't virtual-stack-reg
1460 anymore. Below we would do a force_reg on the whole operand,
1461 but this insn might actually only accept memory. Hence,
1462 before doing that last resort, try to reload the address into
1463 a register, so this operand stays a MEM. */
1464 if (!safe_insn_predicate (insn_code, i, x))
1466 addr = force_reg (GET_MODE (addr), addr);
1467 x = replace_equiv_address (x, addr);
1472 emit_insn_before (seq, insn);
1477 new = instantiate_new_reg (x, &offset);
1486 /* Careful, special mode predicates may have stuff in
1487 insn_data[insn_code].operand[i].mode that isn't useful
1488 to us for computing a new value. */
1489 /* ??? Recognize address_operand and/or "p" constraints
1490 to see if (plus new offset) is a valid before we put
1491 this through expand_simple_binop. */
1492 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1493 GEN_INT (offset), NULL_RTX,
1494 1, OPTAB_LIB_WIDEN);
1497 emit_insn_before (seq, insn);
1502 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1508 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1509 GEN_INT (offset), NULL_RTX,
1510 1, OPTAB_LIB_WIDEN);
1513 emit_insn_before (seq, insn);
1515 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1516 GET_MODE (new), SUBREG_BYTE (x));
1523 /* At this point, X contains the new value for the operand.
1524 Validate the new value vs the insn predicate. Note that
1525 asm insns will have insn_code -1 here. */
1526 if (!safe_insn_predicate (insn_code, i, x))
1529 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1533 emit_insn_before (seq, insn);
1536 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1542 /* Propagate operand changes into the duplicates. */
1543 for (i = 0; i < recog_data.n_dups; ++i)
1544 *recog_data.dup_loc[i]
1545 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1547 /* Force re-recognition of the instruction for validation. */
1548 INSN_CODE (insn) = -1;
1551 if (asm_noperands (PATTERN (insn)) >= 0)
1553 if (!check_asm_operands (PATTERN (insn)))
1555 error_for_asm (insn, "impossible constraint in %<asm%>");
1561 if (recog_memoized (insn) < 0)
1562 fatal_insn_not_found (insn);
1566 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1567 do any instantiation required. */
1570 instantiate_decl_rtl (rtx x)
1577 /* If this is a CONCAT, recurse for the pieces. */
1578 if (GET_CODE (x) == CONCAT)
1580 instantiate_decl_rtl (XEXP (x, 0));
1581 instantiate_decl_rtl (XEXP (x, 1));
1585 /* If this is not a MEM, no need to do anything. Similarly if the
1586 address is a constant or a register that is not a virtual register. */
1591 if (CONSTANT_P (addr)
1593 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1594 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1597 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1600 /* Helper for instantiate_decls called via walk_tree: Process all decls
1601 in the given DECL_VALUE_EXPR. */
1604 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1610 if (DECL_P (t) && DECL_RTL_SET_P (t))
1611 instantiate_decl_rtl (DECL_RTL (t));
1616 /* Subroutine of instantiate_decls: Process all decls in the given
1617 BLOCK node and all its subblocks. */
1620 instantiate_decls_1 (tree let)
1624 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1626 if (DECL_RTL_SET_P (t))
1627 instantiate_decl_rtl (DECL_RTL (t));
1628 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1630 tree v = DECL_VALUE_EXPR (t);
1631 walk_tree (&v, instantiate_expr, NULL, NULL);
1635 /* Process all subblocks. */
1636 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1637 instantiate_decls_1 (t);
1640 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1641 all virtual registers in their DECL_RTL's. */
1644 instantiate_decls (tree fndecl)
1648 /* Process all parameters of the function. */
1649 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1651 instantiate_decl_rtl (DECL_RTL (decl));
1652 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1653 if (DECL_HAS_VALUE_EXPR_P (decl))
1655 tree v = DECL_VALUE_EXPR (decl);
1656 walk_tree (&v, instantiate_expr, NULL, NULL);
1660 /* Now process all variables defined in the function or its subblocks. */
1661 instantiate_decls_1 (DECL_INITIAL (fndecl));
1664 /* Pass through the INSNS of function FNDECL and convert virtual register
1665 references to hard register references. */
1668 instantiate_virtual_regs (void)
1672 /* Compute the offsets to use for this function. */
1673 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1674 var_offset = STARTING_FRAME_OFFSET;
1675 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1676 out_arg_offset = STACK_POINTER_OFFSET;
1677 #ifdef FRAME_POINTER_CFA_OFFSET
1678 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1680 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1683 /* Initialize recognition, indicating that volatile is OK. */
1686 /* Scan through all the insns, instantiating every virtual register still
1688 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1691 /* These patterns in the instruction stream can never be recognized.
1692 Fortunately, they shouldn't contain virtual registers either. */
1693 if (GET_CODE (PATTERN (insn)) == USE
1694 || GET_CODE (PATTERN (insn)) == CLOBBER
1695 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1696 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1697 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1700 instantiate_virtual_regs_in_insn (insn);
1702 if (INSN_DELETED_P (insn))
1705 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1707 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1708 if (GET_CODE (insn) == CALL_INSN)
1709 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1710 instantiate_virtual_regs_in_rtx, NULL);
1713 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1714 instantiate_decls (current_function_decl);
1716 targetm.instantiate_decls ();
1718 /* Indicate that, from now on, assign_stack_local should use
1719 frame_pointer_rtx. */
1720 virtuals_instantiated = 1;
1724 struct rtl_opt_pass pass_instantiate_virtual_regs =
1730 instantiate_virtual_regs, /* execute */
1733 0, /* static_pass_number */
1735 0, /* properties_required */
1736 0, /* properties_provided */
1737 0, /* properties_destroyed */
1738 0, /* todo_flags_start */
1739 TODO_dump_func /* todo_flags_finish */
1744 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1745 This means a type for which function calls must pass an address to the
1746 function or get an address back from the function.
1747 EXP may be a type node or an expression (whose type is tested). */
1750 aggregate_value_p (const_tree exp, const_tree fntype)
1752 int i, regno, nregs;
1755 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1757 /* DECL node associated with FNTYPE when relevant, which we might need to
1758 check for by-invisible-reference returns, typically for CALL_EXPR input
1760 const_tree fndecl = NULL_TREE;
1763 switch (TREE_CODE (fntype))
1766 fndecl = get_callee_fndecl (fntype);
1767 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1771 fntype = TREE_TYPE (fndecl);
1776 case IDENTIFIER_NODE:
1780 /* We don't expect other rtl types here. */
1784 if (TREE_CODE (type) == VOID_TYPE)
1787 /* If the front end has decided that this needs to be passed by
1788 reference, do so. */
1789 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1790 && DECL_BY_REFERENCE (exp))
1793 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1794 called function RESULT_DECL, meaning the function returns in memory by
1795 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1796 on the function type, which used to be the way to request such a return
1797 mechanism but might now be causing troubles at gimplification time if
1798 temporaries with the function type need to be created. */
1799 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1800 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1803 if (targetm.calls.return_in_memory (type, fntype))
1805 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1806 and thus can't be returned in registers. */
1807 if (TREE_ADDRESSABLE (type))
1809 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1811 /* Make sure we have suitable call-clobbered regs to return
1812 the value in; if not, we must return it in memory. */
1813 reg = hard_function_value (type, 0, fntype, 0);
1815 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1820 regno = REGNO (reg);
1821 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1822 for (i = 0; i < nregs; i++)
1823 if (! call_used_regs[regno + i])
1828 /* Return true if we should assign DECL a pseudo register; false if it
1829 should live on the local stack. */
1832 use_register_for_decl (const_tree decl)
1834 if (!targetm.calls.allocate_stack_slots_for_args())
1837 /* Honor volatile. */
1838 if (TREE_SIDE_EFFECTS (decl))
1841 /* Honor addressability. */
1842 if (TREE_ADDRESSABLE (decl))
1845 /* Only register-like things go in registers. */
1846 if (DECL_MODE (decl) == BLKmode)
1849 /* If -ffloat-store specified, don't put explicit float variables
1851 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1852 propagates values across these stores, and it probably shouldn't. */
1853 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1856 /* If we're not interested in tracking debugging information for
1857 this decl, then we can certainly put it in a register. */
1858 if (DECL_IGNORED_P (decl))
1861 return (optimize || DECL_REGISTER (decl));
1864 /* Return true if TYPE should be passed by invisible reference. */
1867 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1868 tree type, bool named_arg)
1872 /* If this type contains non-trivial constructors, then it is
1873 forbidden for the middle-end to create any new copies. */
1874 if (TREE_ADDRESSABLE (type))
1877 /* GCC post 3.4 passes *all* variable sized types by reference. */
1878 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1882 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1885 /* Return true if TYPE, which is passed by reference, should be callee
1886 copied instead of caller copied. */
1889 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1890 tree type, bool named_arg)
1892 if (type && TREE_ADDRESSABLE (type))
1894 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1897 /* Structures to communicate between the subroutines of assign_parms.
1898 The first holds data persistent across all parameters, the second
1899 is cleared out for each parameter. */
1901 struct assign_parm_data_all
1903 CUMULATIVE_ARGS args_so_far;
1904 struct args_size stack_args_size;
1905 tree function_result_decl;
1907 rtx first_conversion_insn;
1908 rtx last_conversion_insn;
1909 HOST_WIDE_INT pretend_args_size;
1910 HOST_WIDE_INT extra_pretend_bytes;
1911 int reg_parm_stack_space;
1914 struct assign_parm_data_one
1920 enum machine_mode nominal_mode;
1921 enum machine_mode passed_mode;
1922 enum machine_mode promoted_mode;
1923 struct locate_and_pad_arg_data locate;
1925 BOOL_BITFIELD named_arg : 1;
1926 BOOL_BITFIELD passed_pointer : 1;
1927 BOOL_BITFIELD on_stack : 1;
1928 BOOL_BITFIELD loaded_in_reg : 1;
1931 /* A subroutine of assign_parms. Initialize ALL. */
1934 assign_parms_initialize_all (struct assign_parm_data_all *all)
1938 memset (all, 0, sizeof (*all));
1940 fntype = TREE_TYPE (current_function_decl);
1942 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1943 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1945 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1946 current_function_decl, -1);
1949 #ifdef REG_PARM_STACK_SPACE
1950 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1954 /* If ARGS contains entries with complex types, split the entry into two
1955 entries of the component type. Return a new list of substitutions are
1956 needed, else the old list. */
1959 split_complex_args (tree args)
1963 /* Before allocating memory, check for the common case of no complex. */
1964 for (p = args; p; p = TREE_CHAIN (p))
1966 tree type = TREE_TYPE (p);
1967 if (TREE_CODE (type) == COMPLEX_TYPE
1968 && targetm.calls.split_complex_arg (type))
1974 args = copy_list (args);
1976 for (p = args; p; p = TREE_CHAIN (p))
1978 tree type = TREE_TYPE (p);
1979 if (TREE_CODE (type) == COMPLEX_TYPE
1980 && targetm.calls.split_complex_arg (type))
1983 tree subtype = TREE_TYPE (type);
1984 bool addressable = TREE_ADDRESSABLE (p);
1986 /* Rewrite the PARM_DECL's type with its component. */
1987 TREE_TYPE (p) = subtype;
1988 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1989 DECL_MODE (p) = VOIDmode;
1990 DECL_SIZE (p) = NULL;
1991 DECL_SIZE_UNIT (p) = NULL;
1992 /* If this arg must go in memory, put it in a pseudo here.
1993 We can't allow it to go in memory as per normal parms,
1994 because the usual place might not have the imag part
1995 adjacent to the real part. */
1996 DECL_ARTIFICIAL (p) = addressable;
1997 DECL_IGNORED_P (p) = addressable;
1998 TREE_ADDRESSABLE (p) = 0;
2001 /* Build a second synthetic decl. */
2002 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2003 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2004 DECL_ARTIFICIAL (decl) = addressable;
2005 DECL_IGNORED_P (decl) = addressable;
2006 layout_decl (decl, 0);
2008 /* Splice it in; skip the new decl. */
2009 TREE_CHAIN (decl) = TREE_CHAIN (p);
2010 TREE_CHAIN (p) = decl;
2018 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2019 the hidden struct return argument, and (abi willing) complex args.
2020 Return the new parameter list. */
2023 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2025 tree fndecl = current_function_decl;
2026 tree fntype = TREE_TYPE (fndecl);
2027 tree fnargs = DECL_ARGUMENTS (fndecl);
2029 /* If struct value address is treated as the first argument, make it so. */
2030 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2031 && ! cfun->returns_pcc_struct
2032 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2034 tree type = build_pointer_type (TREE_TYPE (fntype));
2037 decl = build_decl (PARM_DECL, NULL_TREE, type);
2038 DECL_ARG_TYPE (decl) = type;
2039 DECL_ARTIFICIAL (decl) = 1;
2040 DECL_IGNORED_P (decl) = 1;
2042 TREE_CHAIN (decl) = fnargs;
2044 all->function_result_decl = decl;
2047 all->orig_fnargs = fnargs;
2049 /* If the target wants to split complex arguments into scalars, do so. */
2050 if (targetm.calls.split_complex_arg)
2051 fnargs = split_complex_args (fnargs);
2056 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2057 data for the parameter. Incorporate ABI specifics such as pass-by-
2058 reference and type promotion. */
2061 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2062 struct assign_parm_data_one *data)
2064 tree nominal_type, passed_type;
2065 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2067 memset (data, 0, sizeof (*data));
2069 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2071 data->named_arg = 1; /* No variadic parms. */
2072 else if (TREE_CHAIN (parm))
2073 data->named_arg = 1; /* Not the last non-variadic parm. */
2074 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2075 data->named_arg = 1; /* Only variadic ones are unnamed. */
2077 data->named_arg = 0; /* Treat as variadic. */
2079 nominal_type = TREE_TYPE (parm);
2080 passed_type = DECL_ARG_TYPE (parm);
2082 /* Look out for errors propagating this far. Also, if the parameter's
2083 type is void then its value doesn't matter. */
2084 if (TREE_TYPE (parm) == error_mark_node
2085 /* This can happen after weird syntax errors
2086 or if an enum type is defined among the parms. */
2087 || TREE_CODE (parm) != PARM_DECL
2088 || passed_type == NULL
2089 || VOID_TYPE_P (nominal_type))
2091 nominal_type = passed_type = void_type_node;
2092 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2096 /* Find mode of arg as it is passed, and mode of arg as it should be
2097 during execution of this function. */
2098 passed_mode = TYPE_MODE (passed_type);
2099 nominal_mode = TYPE_MODE (nominal_type);
2101 /* If the parm is to be passed as a transparent union, use the type of
2102 the first field for the tests below. We have already verified that
2103 the modes are the same. */
2104 if (TREE_CODE (passed_type) == UNION_TYPE
2105 && TYPE_TRANSPARENT_UNION (passed_type))
2106 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2108 /* See if this arg was passed by invisible reference. */
2109 if (pass_by_reference (&all->args_so_far, passed_mode,
2110 passed_type, data->named_arg))
2112 passed_type = nominal_type = build_pointer_type (passed_type);
2113 data->passed_pointer = true;
2114 passed_mode = nominal_mode = Pmode;
2117 /* Find mode as it is passed by the ABI. */
2118 promoted_mode = passed_mode;
2119 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2121 int unsignedp = TYPE_UNSIGNED (passed_type);
2122 promoted_mode = promote_mode (passed_type, promoted_mode,
2127 data->nominal_type = nominal_type;
2128 data->passed_type = passed_type;
2129 data->nominal_mode = nominal_mode;
2130 data->passed_mode = passed_mode;
2131 data->promoted_mode = promoted_mode;
2134 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2137 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2138 struct assign_parm_data_one *data, bool no_rtl)
2140 int varargs_pretend_bytes = 0;
2142 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2143 data->promoted_mode,
2145 &varargs_pretend_bytes, no_rtl);
2147 /* If the back-end has requested extra stack space, record how much is
2148 needed. Do not change pretend_args_size otherwise since it may be
2149 nonzero from an earlier partial argument. */
2150 if (varargs_pretend_bytes > 0)
2151 all->pretend_args_size = varargs_pretend_bytes;
2154 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2155 the incoming location of the current parameter. */
2158 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2159 struct assign_parm_data_one *data)
2161 HOST_WIDE_INT pretend_bytes = 0;
2165 if (data->promoted_mode == VOIDmode)
2167 data->entry_parm = data->stack_parm = const0_rtx;
2171 #ifdef FUNCTION_INCOMING_ARG
2172 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2173 data->passed_type, data->named_arg);
2175 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2176 data->passed_type, data->named_arg);
2179 if (entry_parm == 0)
2180 data->promoted_mode = data->passed_mode;
2182 /* Determine parm's home in the stack, in case it arrives in the stack
2183 or we should pretend it did. Compute the stack position and rtx where
2184 the argument arrives and its size.
2186 There is one complexity here: If this was a parameter that would
2187 have been passed in registers, but wasn't only because it is
2188 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2189 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2190 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2191 as it was the previous time. */
2192 in_regs = entry_parm != 0;
2193 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2196 if (!in_regs && !data->named_arg)
2198 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2201 #ifdef FUNCTION_INCOMING_ARG
2202 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2203 data->passed_type, true);
2205 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2206 data->passed_type, true);
2208 in_regs = tem != NULL;
2212 /* If this parameter was passed both in registers and in the stack, use
2213 the copy on the stack. */
2214 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2222 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2223 data->promoted_mode,
2226 data->partial = partial;
2228 /* The caller might already have allocated stack space for the
2229 register parameters. */
2230 if (partial != 0 && all->reg_parm_stack_space == 0)
2232 /* Part of this argument is passed in registers and part
2233 is passed on the stack. Ask the prologue code to extend
2234 the stack part so that we can recreate the full value.
2236 PRETEND_BYTES is the size of the registers we need to store.
2237 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2238 stack space that the prologue should allocate.
2240 Internally, gcc assumes that the argument pointer is aligned
2241 to STACK_BOUNDARY bits. This is used both for alignment
2242 optimizations (see init_emit) and to locate arguments that are
2243 aligned to more than PARM_BOUNDARY bits. We must preserve this
2244 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2245 a stack boundary. */
2247 /* We assume at most one partial arg, and it must be the first
2248 argument on the stack. */
2249 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2251 pretend_bytes = partial;
2252 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2254 /* We want to align relative to the actual stack pointer, so
2255 don't include this in the stack size until later. */
2256 all->extra_pretend_bytes = all->pretend_args_size;
2260 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2261 entry_parm ? data->partial : 0, current_function_decl,
2262 &all->stack_args_size, &data->locate);
2264 /* Update parm_stack_boundary if this parameter is passed in the
2266 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2267 crtl->parm_stack_boundary = data->locate.boundary;
2269 /* Adjust offsets to include the pretend args. */
2270 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2271 data->locate.slot_offset.constant += pretend_bytes;
2272 data->locate.offset.constant += pretend_bytes;
2274 data->entry_parm = entry_parm;
2277 /* A subroutine of assign_parms. If there is actually space on the stack
2278 for this parm, count it in stack_args_size and return true. */
2281 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2282 struct assign_parm_data_one *data)
2284 /* Trivially true if we've no incoming register. */
2285 if (data->entry_parm == NULL)
2287 /* Also true if we're partially in registers and partially not,
2288 since we've arranged to drop the entire argument on the stack. */
2289 else if (data->partial != 0)
2291 /* Also true if the target says that it's passed in both registers
2292 and on the stack. */
2293 else if (GET_CODE (data->entry_parm) == PARALLEL
2294 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2296 /* Also true if the target says that there's stack allocated for
2297 all register parameters. */
2298 else if (all->reg_parm_stack_space > 0)
2300 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2304 all->stack_args_size.constant += data->locate.size.constant;
2305 if (data->locate.size.var)
2306 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2311 /* A subroutine of assign_parms. Given that this parameter is allocated
2312 stack space by the ABI, find it. */
2315 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2317 rtx offset_rtx, stack_parm;
2318 unsigned int align, boundary;
2320 /* If we're passing this arg using a reg, make its stack home the
2321 aligned stack slot. */
2322 if (data->entry_parm)
2323 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2325 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2327 stack_parm = crtl->args.internal_arg_pointer;
2328 if (offset_rtx != const0_rtx)
2329 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2330 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2332 set_mem_attributes (stack_parm, parm, 1);
2334 boundary = data->locate.boundary;
2335 align = BITS_PER_UNIT;
2337 /* If we're padding upward, we know that the alignment of the slot
2338 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2339 intentionally forcing upward padding. Otherwise we have to come
2340 up with a guess at the alignment based on OFFSET_RTX. */
2341 if (data->locate.where_pad != downward || data->entry_parm)
2343 else if (GET_CODE (offset_rtx) == CONST_INT)
2345 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2346 align = align & -align;
2348 set_mem_align (stack_parm, align);
2350 if (data->entry_parm)
2351 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2353 data->stack_parm = stack_parm;
2356 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2357 always valid and contiguous. */
2360 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2362 rtx entry_parm = data->entry_parm;
2363 rtx stack_parm = data->stack_parm;
2365 /* If this parm was passed part in regs and part in memory, pretend it
2366 arrived entirely in memory by pushing the register-part onto the stack.
2367 In the special case of a DImode or DFmode that is split, we could put
2368 it together in a pseudoreg directly, but for now that's not worth
2370 if (data->partial != 0)
2372 /* Handle calls that pass values in multiple non-contiguous
2373 locations. The Irix 6 ABI has examples of this. */
2374 if (GET_CODE (entry_parm) == PARALLEL)
2375 emit_group_store (validize_mem (stack_parm), entry_parm,
2377 int_size_in_bytes (data->passed_type));
2380 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2381 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2382 data->partial / UNITS_PER_WORD);
2385 entry_parm = stack_parm;
2388 /* If we didn't decide this parm came in a register, by default it came
2390 else if (entry_parm == NULL)
2391 entry_parm = stack_parm;
2393 /* When an argument is passed in multiple locations, we can't make use
2394 of this information, but we can save some copying if the whole argument
2395 is passed in a single register. */
2396 else if (GET_CODE (entry_parm) == PARALLEL
2397 && data->nominal_mode != BLKmode
2398 && data->passed_mode != BLKmode)
2400 size_t i, len = XVECLEN (entry_parm, 0);
2402 for (i = 0; i < len; i++)
2403 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2404 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2405 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2406 == data->passed_mode)
2407 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2409 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2414 data->entry_parm = entry_parm;
2417 /* A subroutine of assign_parms. Reconstitute any values which were
2418 passed in multiple registers and would fit in a single register. */
2421 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2423 rtx entry_parm = data->entry_parm;
2425 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2426 This can be done with register operations rather than on the
2427 stack, even if we will store the reconstituted parameter on the
2429 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2431 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2432 emit_group_store (parmreg, entry_parm, NULL_TREE,
2433 GET_MODE_SIZE (GET_MODE (entry_parm)));
2434 entry_parm = parmreg;
2437 data->entry_parm = entry_parm;
2440 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2441 always valid and properly aligned. */
2444 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2446 rtx stack_parm = data->stack_parm;
2448 /* If we can't trust the parm stack slot to be aligned enough for its
2449 ultimate type, don't use that slot after entry. We'll make another
2450 stack slot, if we need one. */
2452 && ((STRICT_ALIGNMENT
2453 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2454 || (data->nominal_type
2455 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2456 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2459 /* If parm was passed in memory, and we need to convert it on entry,
2460 don't store it back in that same slot. */
2461 else if (data->entry_parm == stack_parm
2462 && data->nominal_mode != BLKmode
2463 && data->nominal_mode != data->passed_mode)
2466 /* If stack protection is in effect for this function, don't leave any
2467 pointers in their passed stack slots. */
2468 else if (crtl->stack_protect_guard
2469 && (flag_stack_protect == 2
2470 || data->passed_pointer
2471 || POINTER_TYPE_P (data->nominal_type)))
2474 data->stack_parm = stack_parm;
2477 /* A subroutine of assign_parms. Return true if the current parameter
2478 should be stored as a BLKmode in the current frame. */
2481 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2483 if (data->nominal_mode == BLKmode)
2485 if (GET_MODE (data->entry_parm) == BLKmode)
2488 #ifdef BLOCK_REG_PADDING
2489 /* Only assign_parm_setup_block knows how to deal with register arguments
2490 that are padded at the least significant end. */
2491 if (REG_P (data->entry_parm)
2492 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2493 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2494 == (BYTES_BIG_ENDIAN ? upward : downward)))
2501 /* A subroutine of assign_parms. Arrange for the parameter to be
2502 present and valid in DATA->STACK_RTL. */
2505 assign_parm_setup_block (struct assign_parm_data_all *all,
2506 tree parm, struct assign_parm_data_one *data)
2508 rtx entry_parm = data->entry_parm;
2509 rtx stack_parm = data->stack_parm;
2511 HOST_WIDE_INT size_stored;
2513 if (GET_CODE (entry_parm) == PARALLEL)
2514 entry_parm = emit_group_move_into_temps (entry_parm);
2516 size = int_size_in_bytes (data->passed_type);
2517 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2518 if (stack_parm == 0)
2520 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2521 stack_parm = assign_stack_local (BLKmode, size_stored,
2523 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2524 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2525 set_mem_attributes (stack_parm, parm, 1);
2528 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2529 calls that pass values in multiple non-contiguous locations. */
2530 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2534 /* Note that we will be storing an integral number of words.
2535 So we have to be careful to ensure that we allocate an
2536 integral number of words. We do this above when we call
2537 assign_stack_local if space was not allocated in the argument
2538 list. If it was, this will not work if PARM_BOUNDARY is not
2539 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2540 if it becomes a problem. Exception is when BLKmode arrives
2541 with arguments not conforming to word_mode. */
2543 if (data->stack_parm == 0)
2545 else if (GET_CODE (entry_parm) == PARALLEL)
2548 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2550 mem = validize_mem (stack_parm);
2552 /* Handle values in multiple non-contiguous locations. */
2553 if (GET_CODE (entry_parm) == PARALLEL)
2555 push_to_sequence2 (all->first_conversion_insn,
2556 all->last_conversion_insn);
2557 emit_group_store (mem, entry_parm, data->passed_type, size);
2558 all->first_conversion_insn = get_insns ();
2559 all->last_conversion_insn = get_last_insn ();
2566 /* If SIZE is that of a mode no bigger than a word, just use
2567 that mode's store operation. */
2568 else if (size <= UNITS_PER_WORD)
2570 enum machine_mode mode
2571 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2574 #ifdef BLOCK_REG_PADDING
2575 && (size == UNITS_PER_WORD
2576 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2577 != (BYTES_BIG_ENDIAN ? upward : downward)))
2583 /* We are really truncating a word_mode value containing
2584 SIZE bytes into a value of mode MODE. If such an
2585 operation requires no actual instructions, we can refer
2586 to the value directly in mode MODE, otherwise we must
2587 start with the register in word_mode and explicitly
2589 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2590 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2593 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2594 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2596 emit_move_insn (change_address (mem, mode, 0), reg);
2599 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2600 machine must be aligned to the left before storing
2601 to memory. Note that the previous test doesn't
2602 handle all cases (e.g. SIZE == 3). */
2603 else if (size != UNITS_PER_WORD
2604 #ifdef BLOCK_REG_PADDING
2605 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2613 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2614 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2616 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2617 build_int_cst (NULL_TREE, by),
2619 tem = change_address (mem, word_mode, 0);
2620 emit_move_insn (tem, x);
2623 move_block_from_reg (REGNO (entry_parm), mem,
2624 size_stored / UNITS_PER_WORD);
2627 move_block_from_reg (REGNO (entry_parm), mem,
2628 size_stored / UNITS_PER_WORD);
2630 else if (data->stack_parm == 0)
2632 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2633 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2635 all->first_conversion_insn = get_insns ();
2636 all->last_conversion_insn = get_last_insn ();
2640 data->stack_parm = stack_parm;
2641 SET_DECL_RTL (parm, stack_parm);
2644 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2645 parameter. Get it there. Perform all ABI specified conversions. */
2648 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2649 struct assign_parm_data_one *data)
2652 enum machine_mode promoted_nominal_mode;
2653 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2654 bool did_conversion = false;
2656 /* Store the parm in a pseudoregister during the function, but we may
2657 need to do it in a wider mode. */
2659 /* This is not really promoting for a call. However we need to be
2660 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2661 promoted_nominal_mode
2662 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2664 parmreg = gen_reg_rtx (promoted_nominal_mode);
2666 if (!DECL_ARTIFICIAL (parm))
2667 mark_user_reg (parmreg);
2669 /* If this was an item that we received a pointer to,
2670 set DECL_RTL appropriately. */
2671 if (data->passed_pointer)
2673 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2674 set_mem_attributes (x, parm, 1);
2675 SET_DECL_RTL (parm, x);
2678 SET_DECL_RTL (parm, parmreg);
2680 assign_parm_remove_parallels (data);
2682 /* Copy the value into the register. */
2683 if (data->nominal_mode != data->passed_mode
2684 || promoted_nominal_mode != data->promoted_mode)
2688 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2689 mode, by the caller. We now have to convert it to
2690 NOMINAL_MODE, if different. However, PARMREG may be in
2691 a different mode than NOMINAL_MODE if it is being stored
2694 If ENTRY_PARM is a hard register, it might be in a register
2695 not valid for operating in its mode (e.g., an odd-numbered
2696 register for a DFmode). In that case, moves are the only
2697 thing valid, so we can't do a convert from there. This
2698 occurs when the calling sequence allow such misaligned
2701 In addition, the conversion may involve a call, which could
2702 clobber parameters which haven't been copied to pseudo
2703 registers yet. Therefore, we must first copy the parm to
2704 a pseudo reg here, and save the conversion until after all
2705 parameters have been moved. */
2707 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2709 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2711 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2712 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2714 if (GET_CODE (tempreg) == SUBREG
2715 && GET_MODE (tempreg) == data->nominal_mode
2716 && REG_P (SUBREG_REG (tempreg))
2717 && data->nominal_mode == data->passed_mode
2718 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2719 && GET_MODE_SIZE (GET_MODE (tempreg))
2720 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2722 /* The argument is already sign/zero extended, so note it
2724 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2725 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2728 /* TREE_USED gets set erroneously during expand_assignment. */
2729 save_tree_used = TREE_USED (parm);
2730 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2731 TREE_USED (parm) = save_tree_used;
2732 all->first_conversion_insn = get_insns ();
2733 all->last_conversion_insn = get_last_insn ();
2736 did_conversion = true;
2739 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2741 /* If we were passed a pointer but the actual value can safely live
2742 in a register, put it in one. */
2743 if (data->passed_pointer
2744 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2745 /* If by-reference argument was promoted, demote it. */
2746 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2747 || use_register_for_decl (parm)))
2749 /* We can't use nominal_mode, because it will have been set to
2750 Pmode above. We must use the actual mode of the parm. */
2751 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2752 mark_user_reg (parmreg);
2754 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2756 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2757 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2759 push_to_sequence2 (all->first_conversion_insn,
2760 all->last_conversion_insn);
2761 emit_move_insn (tempreg, DECL_RTL (parm));
2762 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2763 emit_move_insn (parmreg, tempreg);
2764 all->first_conversion_insn = get_insns ();
2765 all->last_conversion_insn = get_last_insn ();
2768 did_conversion = true;
2771 emit_move_insn (parmreg, DECL_RTL (parm));
2773 SET_DECL_RTL (parm, parmreg);
2775 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2777 data->stack_parm = NULL;
2780 /* Mark the register as eliminable if we did no conversion and it was
2781 copied from memory at a fixed offset, and the arg pointer was not
2782 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2783 offset formed an invalid address, such memory-equivalences as we
2784 make here would screw up life analysis for it. */
2785 if (data->nominal_mode == data->passed_mode
2787 && data->stack_parm != 0
2788 && MEM_P (data->stack_parm)
2789 && data->locate.offset.var == 0
2790 && reg_mentioned_p (virtual_incoming_args_rtx,
2791 XEXP (data->stack_parm, 0)))
2793 rtx linsn = get_last_insn ();
2796 /* Mark complex types separately. */
2797 if (GET_CODE (parmreg) == CONCAT)
2799 enum machine_mode submode
2800 = GET_MODE_INNER (GET_MODE (parmreg));
2801 int regnor = REGNO (XEXP (parmreg, 0));
2802 int regnoi = REGNO (XEXP (parmreg, 1));
2803 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2804 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2805 GET_MODE_SIZE (submode));
2807 /* Scan backwards for the set of the real and
2809 for (sinsn = linsn; sinsn != 0;
2810 sinsn = prev_nonnote_insn (sinsn))
2812 set = single_set (sinsn);
2816 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2817 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2818 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2819 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2822 else if ((set = single_set (linsn)) != 0
2823 && SET_DEST (set) == parmreg)
2824 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2827 /* For pointer data type, suggest pointer register. */
2828 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2829 mark_reg_pointer (parmreg,
2830 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2833 /* A subroutine of assign_parms. Allocate stack space to hold the current
2834 parameter. Get it there. Perform all ABI specified conversions. */
2837 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2838 struct assign_parm_data_one *data)
2840 /* Value must be stored in the stack slot STACK_PARM during function
2842 bool to_conversion = false;
2844 assign_parm_remove_parallels (data);
2846 if (data->promoted_mode != data->nominal_mode)
2848 /* Conversion is required. */
2849 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2851 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2853 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2854 to_conversion = true;
2856 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2857 TYPE_UNSIGNED (TREE_TYPE (parm)));
2859 if (data->stack_parm)
2860 /* ??? This may need a big-endian conversion on sparc64. */
2862 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2865 if (data->entry_parm != data->stack_parm)
2869 if (data->stack_parm == 0)
2872 = assign_stack_local (GET_MODE (data->entry_parm),
2873 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2874 TYPE_ALIGN (data->passed_type));
2875 set_mem_attributes (data->stack_parm, parm, 1);
2878 dest = validize_mem (data->stack_parm);
2879 src = validize_mem (data->entry_parm);
2883 /* Use a block move to handle potentially misaligned entry_parm. */
2885 push_to_sequence2 (all->first_conversion_insn,
2886 all->last_conversion_insn);
2887 to_conversion = true;
2889 emit_block_move (dest, src,
2890 GEN_INT (int_size_in_bytes (data->passed_type)),
2894 emit_move_insn (dest, src);
2899 all->first_conversion_insn = get_insns ();
2900 all->last_conversion_insn = get_last_insn ();
2904 SET_DECL_RTL (parm, data->stack_parm);
2907 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2908 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2911 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2914 tree orig_fnargs = all->orig_fnargs;
2916 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2918 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2919 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2921 rtx tmp, real, imag;
2922 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2924 real = DECL_RTL (fnargs);
2925 imag = DECL_RTL (TREE_CHAIN (fnargs));
2926 if (inner != GET_MODE (real))
2928 real = gen_lowpart_SUBREG (inner, real);
2929 imag = gen_lowpart_SUBREG (inner, imag);
2932 if (TREE_ADDRESSABLE (parm))
2935 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2937 /* split_complex_arg put the real and imag parts in
2938 pseudos. Move them to memory. */
2939 tmp = assign_stack_local (DECL_MODE (parm), size,
2940 TYPE_ALIGN (TREE_TYPE (parm)));
2941 set_mem_attributes (tmp, parm, 1);
2942 rmem = adjust_address_nv (tmp, inner, 0);
2943 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2944 push_to_sequence2 (all->first_conversion_insn,
2945 all->last_conversion_insn);
2946 emit_move_insn (rmem, real);
2947 emit_move_insn (imem, imag);
2948 all->first_conversion_insn = get_insns ();
2949 all->last_conversion_insn = get_last_insn ();
2953 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2954 SET_DECL_RTL (parm, tmp);
2956 real = DECL_INCOMING_RTL (fnargs);
2957 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2958 if (inner != GET_MODE (real))
2960 real = gen_lowpart_SUBREG (inner, real);
2961 imag = gen_lowpart_SUBREG (inner, imag);
2963 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2964 set_decl_incoming_rtl (parm, tmp, false);
2965 fnargs = TREE_CHAIN (fnargs);
2969 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2970 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2972 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2973 instead of the copy of decl, i.e. FNARGS. */
2974 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2975 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2978 fnargs = TREE_CHAIN (fnargs);
2982 /* Assign RTL expressions to the function's parameters. This may involve
2983 copying them into registers and using those registers as the DECL_RTL. */
2986 assign_parms (tree fndecl)
2988 struct assign_parm_data_all all;
2991 crtl->args.internal_arg_pointer
2992 = targetm.calls.internal_arg_pointer ();
2994 assign_parms_initialize_all (&all);
2995 fnargs = assign_parms_augmented_arg_list (&all);
2997 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2999 struct assign_parm_data_one data;
3001 /* Extract the type of PARM; adjust it according to ABI. */
3002 assign_parm_find_data_types (&all, parm, &data);
3004 /* Early out for errors and void parameters. */
3005 if (data.passed_mode == VOIDmode)
3007 SET_DECL_RTL (parm, const0_rtx);
3008 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3012 /* Estimate stack alignment from parameter alignment. */
3013 if (SUPPORTS_STACK_ALIGNMENT)
3015 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3017 if (TYPE_ALIGN (data.nominal_type) > align)
3018 align = TYPE_ALIGN (data.passed_type);
3019 if (crtl->stack_alignment_estimated < align)
3021 gcc_assert (!crtl->stack_realign_processed);
3022 crtl->stack_alignment_estimated = align;
3026 if (cfun->stdarg && !TREE_CHAIN (parm))
3027 assign_parms_setup_varargs (&all, &data, false);
3029 /* Find out where the parameter arrives in this function. */
3030 assign_parm_find_entry_rtl (&all, &data);
3032 /* Find out where stack space for this parameter might be. */
3033 if (assign_parm_is_stack_parm (&all, &data))
3035 assign_parm_find_stack_rtl (parm, &data);
3036 assign_parm_adjust_entry_rtl (&data);
3039 /* Record permanently how this parm was passed. */
3040 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3042 /* Update info on where next arg arrives in registers. */
3043 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3044 data.passed_type, data.named_arg);
3046 assign_parm_adjust_stack_rtl (&data);
3048 if (assign_parm_setup_block_p (&data))
3049 assign_parm_setup_block (&all, parm, &data);
3050 else if (data.passed_pointer || use_register_for_decl (parm))
3051 assign_parm_setup_reg (&all, parm, &data);
3053 assign_parm_setup_stack (&all, parm, &data);
3056 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3057 assign_parms_unsplit_complex (&all, fnargs);
3059 /* Output all parameter conversion instructions (possibly including calls)
3060 now that all parameters have been copied out of hard registers. */
3061 emit_insn (all.first_conversion_insn);
3063 /* Estimate reload stack alignment from scalar return mode. */
3064 if (SUPPORTS_STACK_ALIGNMENT)
3066 if (DECL_RESULT (fndecl))
3068 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3069 enum machine_mode mode = TYPE_MODE (type);
3073 && !AGGREGATE_TYPE_P (type))
3075 unsigned int align = GET_MODE_ALIGNMENT (mode);
3076 if (crtl->stack_alignment_estimated < align)
3078 gcc_assert (!crtl->stack_realign_processed);
3079 crtl->stack_alignment_estimated = align;
3085 /* If we are receiving a struct value address as the first argument, set up
3086 the RTL for the function result. As this might require code to convert
3087 the transmitted address to Pmode, we do this here to ensure that possible
3088 preliminary conversions of the address have been emitted already. */
3089 if (all.function_result_decl)
3091 tree result = DECL_RESULT (current_function_decl);
3092 rtx addr = DECL_RTL (all.function_result_decl);
3095 if (DECL_BY_REFERENCE (result))
3099 addr = convert_memory_address (Pmode, addr);
3100 x = gen_rtx_MEM (DECL_MODE (result), addr);
3101 set_mem_attributes (x, result, 1);
3103 SET_DECL_RTL (result, x);
3106 /* We have aligned all the args, so add space for the pretend args. */
3107 crtl->args.pretend_args_size = all.pretend_args_size;
3108 all.stack_args_size.constant += all.extra_pretend_bytes;
3109 crtl->args.size = all.stack_args_size.constant;
3111 /* Adjust function incoming argument size for alignment and
3114 #ifdef REG_PARM_STACK_SPACE
3115 crtl->args.size = MAX (crtl->args.size,
3116 REG_PARM_STACK_SPACE (fndecl));
3119 crtl->args.size = CEIL_ROUND (crtl->args.size,
3120 PARM_BOUNDARY / BITS_PER_UNIT);
3122 #ifdef ARGS_GROW_DOWNWARD
3123 crtl->args.arg_offset_rtx
3124 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3125 : expand_expr (size_diffop (all.stack_args_size.var,
3126 size_int (-all.stack_args_size.constant)),
3127 NULL_RTX, VOIDmode, 0));
3129 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3132 /* See how many bytes, if any, of its args a function should try to pop
3135 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3138 /* For stdarg.h function, save info about
3139 regs and stack space used by the named args. */
3141 crtl->args.info = all.args_so_far;
3143 /* Set the rtx used for the function return value. Put this in its
3144 own variable so any optimizers that need this information don't have
3145 to include tree.h. Do this here so it gets done when an inlined
3146 function gets output. */
3149 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3150 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3152 /* If scalar return value was computed in a pseudo-reg, or was a named
3153 return value that got dumped to the stack, copy that to the hard
3155 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3157 tree decl_result = DECL_RESULT (fndecl);
3158 rtx decl_rtl = DECL_RTL (decl_result);
3160 if (REG_P (decl_rtl)
3161 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3162 : DECL_REGISTER (decl_result))
3166 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3168 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3169 /* The delay slot scheduler assumes that crtl->return_rtx
3170 holds the hard register containing the return value, not a
3171 temporary pseudo. */
3172 crtl->return_rtx = real_decl_rtl;
3177 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3178 For all seen types, gimplify their sizes. */
3181 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3188 if (POINTER_TYPE_P (t))
3190 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3191 && !TYPE_SIZES_GIMPLIFIED (t))
3193 gimplify_type_sizes (t, (gimple_seq *) data);
3201 /* Gimplify the parameter list for current_function_decl. This involves
3202 evaluating SAVE_EXPRs of variable sized parameters and generating code
3203 to implement callee-copies reference parameters. Returns a sequence of
3204 statements to add to the beginning of the function. */
3207 gimplify_parameters (void)
3209 struct assign_parm_data_all all;
3211 gimple_seq stmts = NULL;
3213 assign_parms_initialize_all (&all);
3214 fnargs = assign_parms_augmented_arg_list (&all);
3216 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3218 struct assign_parm_data_one data;
3220 /* Extract the type of PARM; adjust it according to ABI. */
3221 assign_parm_find_data_types (&all, parm, &data);
3223 /* Early out for errors and void parameters. */
3224 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3227 /* Update info on where next arg arrives in registers. */
3228 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3229 data.passed_type, data.named_arg);
3231 /* ??? Once upon a time variable_size stuffed parameter list
3232 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3233 turned out to be less than manageable in the gimple world.
3234 Now we have to hunt them down ourselves. */
3235 walk_tree_without_duplicates (&data.passed_type,
3236 gimplify_parm_type, &stmts);
3238 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3240 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3241 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3244 if (data.passed_pointer)
3246 tree type = TREE_TYPE (data.passed_type);
3247 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3248 type, data.named_arg))
3252 /* For constant sized objects, this is trivial; for
3253 variable-sized objects, we have to play games. */
3254 if (TREE_CONSTANT (DECL_SIZE (parm)))
3256 local = create_tmp_var (type, get_name (parm));
3257 DECL_IGNORED_P (local) = 0;
3261 tree ptr_type, addr;
3263 ptr_type = build_pointer_type (type);
3264 addr = create_tmp_var (ptr_type, get_name (parm));
3265 DECL_IGNORED_P (addr) = 0;
3266 local = build_fold_indirect_ref (addr);
3268 t = built_in_decls[BUILT_IN_ALLOCA];
3269 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3270 t = fold_convert (ptr_type, t);
3271 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3272 gimplify_and_add (t, &stmts);
3275 gimplify_assign (local, parm, &stmts);
3277 SET_DECL_VALUE_EXPR (parm, local);
3278 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3286 /* Compute the size and offset from the start of the stacked arguments for a
3287 parm passed in mode PASSED_MODE and with type TYPE.
3289 INITIAL_OFFSET_PTR points to the current offset into the stacked
3292 The starting offset and size for this parm are returned in
3293 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3294 nonzero, the offset is that of stack slot, which is returned in
3295 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3296 padding required from the initial offset ptr to the stack slot.
3298 IN_REGS is nonzero if the argument will be passed in registers. It will
3299 never be set if REG_PARM_STACK_SPACE is not defined.
3301 FNDECL is the function in which the argument was defined.
3303 There are two types of rounding that are done. The first, controlled by
3304 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3305 list to be aligned to the specific boundary (in bits). This rounding
3306 affects the initial and starting offsets, but not the argument size.
3308 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3309 optionally rounds the size of the parm to PARM_BOUNDARY. The
3310 initial offset is not affected by this rounding, while the size always
3311 is and the starting offset may be. */
3313 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3314 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3315 callers pass in the total size of args so far as
3316 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3319 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3320 int partial, tree fndecl ATTRIBUTE_UNUSED,
3321 struct args_size *initial_offset_ptr,
3322 struct locate_and_pad_arg_data *locate)
3325 enum direction where_pad;
3326 unsigned int boundary;
3327 int reg_parm_stack_space = 0;
3328 int part_size_in_regs;
3330 #ifdef REG_PARM_STACK_SPACE
3331 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3333 /* If we have found a stack parm before we reach the end of the
3334 area reserved for registers, skip that area. */
3337 if (reg_parm_stack_space > 0)
3339 if (initial_offset_ptr->var)
3341 initial_offset_ptr->var
3342 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3343 ssize_int (reg_parm_stack_space));
3344 initial_offset_ptr->constant = 0;
3346 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3347 initial_offset_ptr->constant = reg_parm_stack_space;
3350 #endif /* REG_PARM_STACK_SPACE */
3352 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3355 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3356 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3357 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3358 locate->where_pad = where_pad;
3360 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3361 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3362 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3364 locate->boundary = boundary;
3366 if (SUPPORTS_STACK_ALIGNMENT)
3368 /* stack_alignment_estimated can't change after stack has been
3370 if (crtl->stack_alignment_estimated < boundary)
3372 if (!crtl->stack_realign_processed)
3373 crtl->stack_alignment_estimated = boundary;
3376 /* If stack is realigned and stack alignment value
3377 hasn't been finalized, it is OK not to increase
3378 stack_alignment_estimated. The bigger alignment
3379 requirement is recorded in stack_alignment_needed
3381 gcc_assert (!crtl->stack_realign_finalized
3382 && crtl->stack_realign_needed);
3387 /* Remember if the outgoing parameter requires extra alignment on the
3388 calling function side. */
3389 if (crtl->stack_alignment_needed < boundary)
3390 crtl->stack_alignment_needed = boundary;
3391 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3392 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3393 if (crtl->preferred_stack_boundary < boundary)
3394 crtl->preferred_stack_boundary = boundary;
3396 #ifdef ARGS_GROW_DOWNWARD
3397 locate->slot_offset.constant = -initial_offset_ptr->constant;
3398 if (initial_offset_ptr->var)
3399 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3400 initial_offset_ptr->var);
3404 if (where_pad != none
3405 && (!host_integerp (sizetree, 1)
3406 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3407 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3408 SUB_PARM_SIZE (locate->slot_offset, s2);
3411 locate->slot_offset.constant += part_size_in_regs;
3414 #ifdef REG_PARM_STACK_SPACE
3415 || REG_PARM_STACK_SPACE (fndecl) > 0
3418 pad_to_arg_alignment (&locate->slot_offset, boundary,
3419 &locate->alignment_pad);
3421 locate->size.constant = (-initial_offset_ptr->constant
3422 - locate->slot_offset.constant);
3423 if (initial_offset_ptr->var)
3424 locate->size.var = size_binop (MINUS_EXPR,
3425 size_binop (MINUS_EXPR,
3427 initial_offset_ptr->var),
3428 locate->slot_offset.var);
3430 /* Pad_below needs the pre-rounded size to know how much to pad
3432 locate->offset = locate->slot_offset;
3433 if (where_pad == downward)
3434 pad_below (&locate->offset, passed_mode, sizetree);
3436 #else /* !ARGS_GROW_DOWNWARD */
3438 #ifdef REG_PARM_STACK_SPACE
3439 || REG_PARM_STACK_SPACE (fndecl) > 0
3442 pad_to_arg_alignment (initial_offset_ptr, boundary,
3443 &locate->alignment_pad);
3444 locate->slot_offset = *initial_offset_ptr;
3446 #ifdef PUSH_ROUNDING
3447 if (passed_mode != BLKmode)
3448 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3451 /* Pad_below needs the pre-rounded size to know how much to pad below
3452 so this must be done before rounding up. */
3453 locate->offset = locate->slot_offset;
3454 if (where_pad == downward)
3455 pad_below (&locate->offset, passed_mode, sizetree);
3457 if (where_pad != none
3458 && (!host_integerp (sizetree, 1)
3459 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3460 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3462 ADD_PARM_SIZE (locate->size, sizetree);
3464 locate->size.constant -= part_size_in_regs;
3465 #endif /* ARGS_GROW_DOWNWARD */
3468 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3469 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3472 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3473 struct args_size *alignment_pad)
3475 tree save_var = NULL_TREE;
3476 HOST_WIDE_INT save_constant = 0;
3477 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3478 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3480 #ifdef SPARC_STACK_BOUNDARY_HACK
3481 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3482 the real alignment of %sp. However, when it does this, the
3483 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3484 if (SPARC_STACK_BOUNDARY_HACK)
3488 if (boundary > PARM_BOUNDARY)
3490 save_var = offset_ptr->var;
3491 save_constant = offset_ptr->constant;
3494 alignment_pad->var = NULL_TREE;
3495 alignment_pad->constant = 0;
3497 if (boundary > BITS_PER_UNIT)
3499 if (offset_ptr->var)
3501 tree sp_offset_tree = ssize_int (sp_offset);
3502 tree offset = size_binop (PLUS_EXPR,
3503 ARGS_SIZE_TREE (*offset_ptr),
3505 #ifdef ARGS_GROW_DOWNWARD
3506 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3508 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3511 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3512 /* ARGS_SIZE_TREE includes constant term. */
3513 offset_ptr->constant = 0;
3514 if (boundary > PARM_BOUNDARY)
3515 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3520 offset_ptr->constant = -sp_offset +
3521 #ifdef ARGS_GROW_DOWNWARD
3522 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3524 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3526 if (boundary > PARM_BOUNDARY)
3527 alignment_pad->constant = offset_ptr->constant - save_constant;
3533 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3535 if (passed_mode != BLKmode)
3537 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3538 offset_ptr->constant
3539 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3540 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3541 - GET_MODE_SIZE (passed_mode));
3545 if (TREE_CODE (sizetree) != INTEGER_CST
3546 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3548 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3549 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3551 ADD_PARM_SIZE (*offset_ptr, s2);
3552 SUB_PARM_SIZE (*offset_ptr, sizetree);
3558 /* True if register REGNO was alive at a place where `setjmp' was
3559 called and was set more than once or is an argument. Such regs may
3560 be clobbered by `longjmp'. */
3563 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3565 /* There appear to be cases where some local vars never reach the
3566 backend but have bogus regnos. */
3567 if (regno >= max_reg_num ())
3570 return ((REG_N_SETS (regno) > 1
3571 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3572 && REGNO_REG_SET_P (setjmp_crosses, regno));
3575 /* Walk the tree of blocks describing the binding levels within a
3576 function and warn about variables the might be killed by setjmp or
3577 vfork. This is done after calling flow_analysis before register
3578 allocation since that will clobber the pseudo-regs to hard
3582 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3586 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3588 if (TREE_CODE (decl) == VAR_DECL
3589 && DECL_RTL_SET_P (decl)
3590 && REG_P (DECL_RTL (decl))
3591 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3592 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3593 " %<longjmp%> or %<vfork%>", decl);
3596 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3597 setjmp_vars_warning (setjmp_crosses, sub);
3600 /* Do the appropriate part of setjmp_vars_warning
3601 but for arguments instead of local variables. */
3604 setjmp_args_warning (bitmap setjmp_crosses)
3607 for (decl = DECL_ARGUMENTS (current_function_decl);
3608 decl; decl = TREE_CHAIN (decl))
3609 if (DECL_RTL (decl) != 0
3610 && REG_P (DECL_RTL (decl))
3611 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3612 warning (OPT_Wclobbered,
3613 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3617 /* Generate warning messages for variables live across setjmp. */
3620 generate_setjmp_warnings (void)
3622 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3624 if (n_basic_blocks == NUM_FIXED_BLOCKS
3625 || bitmap_empty_p (setjmp_crosses))
3628 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3629 setjmp_args_warning (setjmp_crosses);
3633 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3634 and create duplicate blocks. */
3635 /* ??? Need an option to either create block fragments or to create
3636 abstract origin duplicates of a source block. It really depends
3637 on what optimization has been performed. */
3640 reorder_blocks (void)
3642 tree block = DECL_INITIAL (current_function_decl);
3643 VEC(tree,heap) *block_stack;
3645 if (block == NULL_TREE)
3648 block_stack = VEC_alloc (tree, heap, 10);
3650 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3651 clear_block_marks (block);
3653 /* Prune the old trees away, so that they don't get in the way. */
3654 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3655 BLOCK_CHAIN (block) = NULL_TREE;
3657 /* Recreate the block tree from the note nesting. */
3658 reorder_blocks_1 (get_insns (), block, &block_stack);
3659 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3661 VEC_free (tree, heap, block_stack);
3664 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3667 clear_block_marks (tree block)
3671 TREE_ASM_WRITTEN (block) = 0;
3672 clear_block_marks (BLOCK_SUBBLOCKS (block));
3673 block = BLOCK_CHAIN (block);
3678 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3682 for (insn = insns; insn; insn = NEXT_INSN (insn))
3686 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3688 tree block = NOTE_BLOCK (insn);
3691 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3692 ? BLOCK_FRAGMENT_ORIGIN (block)
3695 /* If we have seen this block before, that means it now
3696 spans multiple address regions. Create a new fragment. */
3697 if (TREE_ASM_WRITTEN (block))
3699 tree new_block = copy_node (block);
3701 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3702 BLOCK_FRAGMENT_CHAIN (new_block)
3703 = BLOCK_FRAGMENT_CHAIN (origin);
3704 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3706 NOTE_BLOCK (insn) = new_block;
3710 BLOCK_SUBBLOCKS (block) = 0;
3711 TREE_ASM_WRITTEN (block) = 1;
3712 /* When there's only one block for the entire function,
3713 current_block == block and we mustn't do this, it
3714 will cause infinite recursion. */
3715 if (block != current_block)
3717 if (block != origin)
3718 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3720 BLOCK_SUPERCONTEXT (block) = current_block;
3721 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3722 BLOCK_SUBBLOCKS (current_block) = block;
3723 current_block = origin;
3725 VEC_safe_push (tree, heap, *p_block_stack, block);
3727 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3729 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3730 BLOCK_SUBBLOCKS (current_block)
3731 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3732 current_block = BLOCK_SUPERCONTEXT (current_block);
3738 /* Reverse the order of elements in the chain T of blocks,
3739 and return the new head of the chain (old last element). */
3742 blocks_nreverse (tree t)
3744 tree prev = 0, decl, next;
3745 for (decl = t; decl; decl = next)
3747 next = BLOCK_CHAIN (decl);
3748 BLOCK_CHAIN (decl) = prev;
3754 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3755 non-NULL, list them all into VECTOR, in a depth-first preorder
3756 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3760 all_blocks (tree block, tree *vector)
3766 TREE_ASM_WRITTEN (block) = 0;
3768 /* Record this block. */
3770 vector[n_blocks] = block;
3774 /* Record the subblocks, and their subblocks... */
3775 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3776 vector ? vector + n_blocks : 0);
3777 block = BLOCK_CHAIN (block);
3783 /* Return a vector containing all the blocks rooted at BLOCK. The
3784 number of elements in the vector is stored in N_BLOCKS_P. The
3785 vector is dynamically allocated; it is the caller's responsibility
3786 to call `free' on the pointer returned. */
3789 get_block_vector (tree block, int *n_blocks_p)
3793 *n_blocks_p = all_blocks (block, NULL);
3794 block_vector = XNEWVEC (tree, *n_blocks_p);
3795 all_blocks (block, block_vector);
3797 return block_vector;
3800 static GTY(()) int next_block_index = 2;
3802 /* Set BLOCK_NUMBER for all the blocks in FN. */
3805 number_blocks (tree fn)
3811 /* For SDB and XCOFF debugging output, we start numbering the blocks
3812 from 1 within each function, rather than keeping a running
3814 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3815 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3816 next_block_index = 1;
3819 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3821 /* The top-level BLOCK isn't numbered at all. */
3822 for (i = 1; i < n_blocks; ++i)
3823 /* We number the blocks from two. */
3824 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3826 free (block_vector);
3831 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3834 debug_find_var_in_block_tree (tree var, tree block)
3838 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3842 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3844 tree ret = debug_find_var_in_block_tree (var, t);
3852 /* Keep track of whether we're in a dummy function context. If we are,
3853 we don't want to invoke the set_current_function hook, because we'll
3854 get into trouble if the hook calls target_reinit () recursively or
3855 when the initial initialization is not yet complete. */
3857 static bool in_dummy_function;
3859 /* Invoke the target hook when setting cfun. Update the optimization options
3860 if the function uses different options than the default. */
3863 invoke_set_current_function_hook (tree fndecl)
3865 if (!in_dummy_function)
3867 tree opts = ((fndecl)
3868 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
3869 : optimization_default_node);
3872 opts = optimization_default_node;
3874 /* Change optimization options if needed. */
3875 if (optimization_current_node != opts)
3877 optimization_current_node = opts;
3878 cl_optimization_restore (TREE_OPTIMIZATION (opts));
3881 targetm.set_current_function (fndecl);
3885 /* cfun should never be set directly; use this function. */
3888 set_cfun (struct function *new_cfun)
3890 if (cfun != new_cfun)
3893 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3897 /* Keep track of the cfun stack. */
3899 typedef struct function *function_p;
3901 DEF_VEC_P(function_p);
3902 DEF_VEC_ALLOC_P(function_p,heap);
3904 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3906 static VEC(function_p,heap) *cfun_stack;
3908 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3911 push_cfun (struct function *new_cfun)
3913 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3914 set_cfun (new_cfun);
3917 /* Pop cfun from the stack. */
3922 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3923 set_cfun (new_cfun);
3926 /* Return value of funcdef and increase it. */
3928 get_next_funcdef_no (void)
3930 return funcdef_no++;
3933 /* Allocate a function structure for FNDECL and set its contents
3934 to the defaults. Set cfun to the newly-allocated object.
3935 Some of the helper functions invoked during initialization assume
3936 that cfun has already been set. Therefore, assign the new object
3937 directly into cfun and invoke the back end hook explicitly at the
3938 very end, rather than initializing a temporary and calling set_cfun
3941 ABSTRACT_P is true if this is a function that will never be seen by
3942 the middle-end. Such functions are front-end concepts (like C++
3943 function templates) that do not correspond directly to functions
3944 placed in object files. */
3947 allocate_struct_function (tree fndecl, bool abstract_p)
3950 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3952 cfun = GGC_CNEW (struct function);
3954 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3956 init_eh_for_function ();
3958 if (init_machine_status)
3959 cfun->machine = (*init_machine_status) ();
3961 #ifdef OVERRIDE_ABI_FORMAT
3962 OVERRIDE_ABI_FORMAT (fndecl);
3965 if (fndecl != NULL_TREE)
3967 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3968 cfun->decl = fndecl;
3969 current_function_funcdef_no = get_next_funcdef_no ();
3971 result = DECL_RESULT (fndecl);
3972 if (!abstract_p && aggregate_value_p (result, fndecl))
3974 #ifdef PCC_STATIC_STRUCT_RETURN
3975 cfun->returns_pcc_struct = 1;
3977 cfun->returns_struct = 1;
3982 && TYPE_ARG_TYPES (fntype) != 0
3983 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3984 != void_type_node));
3986 /* Assume all registers in stdarg functions need to be saved. */
3987 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3988 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3991 invoke_set_current_function_hook (fndecl);
3994 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3995 instead of just setting it. */
3998 push_struct_function (tree fndecl)
4000 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4001 allocate_struct_function (fndecl, false);
4004 /* Reset cfun, and other non-struct-function variables to defaults as
4005 appropriate for emitting rtl at the start of a function. */
4008 prepare_function_start (void)
4010 gcc_assert (!crtl->emit.x_last_insn);
4012 init_varasm_status ();
4014 default_rtl_profile ();
4016 cse_not_expected = ! optimize;
4018 /* Caller save not needed yet. */
4019 caller_save_needed = 0;
4021 /* We haven't done register allocation yet. */
4024 /* Indicate that we have not instantiated virtual registers yet. */
4025 virtuals_instantiated = 0;
4027 /* Indicate that we want CONCATs now. */
4028 generating_concat_p = 1;
4030 /* Indicate we have no need of a frame pointer yet. */
4031 frame_pointer_needed = 0;
4034 /* Initialize the rtl expansion mechanism so that we can do simple things
4035 like generate sequences. This is used to provide a context during global
4036 initialization of some passes. You must call expand_dummy_function_end
4037 to exit this context. */
4040 init_dummy_function_start (void)
4042 gcc_assert (!in_dummy_function);
4043 in_dummy_function = true;
4044 push_struct_function (NULL_TREE);
4045 prepare_function_start ();
4048 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4049 and initialize static variables for generating RTL for the statements
4053 init_function_start (tree subr)
4055 if (subr && DECL_STRUCT_FUNCTION (subr))
4056 set_cfun (DECL_STRUCT_FUNCTION (subr));
4058 allocate_struct_function (subr, false);
4059 prepare_function_start ();
4061 /* Warn if this value is an aggregate type,
4062 regardless of which calling convention we are using for it. */
4063 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4064 warning (OPT_Waggregate_return, "function returns an aggregate");
4067 /* Make sure all values used by the optimization passes have sane
4070 init_function_for_compilation (void)
4074 /* No prologue/epilogue insns yet. Make sure that these vectors are
4076 gcc_assert (VEC_length (int, prologue) == 0);
4077 gcc_assert (VEC_length (int, epilogue) == 0);
4078 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4082 struct rtl_opt_pass pass_init_function =
4088 init_function_for_compilation, /* execute */
4091 0, /* static_pass_number */
4093 0, /* properties_required */
4094 0, /* properties_provided */
4095 0, /* properties_destroyed */
4096 0, /* todo_flags_start */
4097 0 /* todo_flags_finish */
4103 expand_main_function (void)
4105 #if (defined(INVOKE__main) \
4106 || (!defined(HAS_INIT_SECTION) \
4107 && !defined(INIT_SECTION_ASM_OP) \
4108 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4109 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4113 /* Expand code to initialize the stack_protect_guard. This is invoked at
4114 the beginning of a function to be protected. */
4116 #ifndef HAVE_stack_protect_set
4117 # define HAVE_stack_protect_set 0
4118 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4122 stack_protect_prologue (void)
4124 tree guard_decl = targetm.stack_protect_guard ();
4127 /* Avoid expand_expr here, because we don't want guard_decl pulled
4128 into registers unless absolutely necessary. And we know that
4129 crtl->stack_protect_guard is a local stack slot, so this skips
4131 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4132 y = validize_mem (DECL_RTL (guard_decl));
4134 /* Allow the target to copy from Y to X without leaking Y into a
4136 if (HAVE_stack_protect_set)
4138 rtx insn = gen_stack_protect_set (x, y);
4146 /* Otherwise do a straight move. */
4147 emit_move_insn (x, y);
4150 /* Expand code to verify the stack_protect_guard. This is invoked at
4151 the end of a function to be protected. */
4153 #ifndef HAVE_stack_protect_test
4154 # define HAVE_stack_protect_test 0
4155 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4159 stack_protect_epilogue (void)
4161 tree guard_decl = targetm.stack_protect_guard ();
4162 rtx label = gen_label_rtx ();
4165 /* Avoid expand_expr here, because we don't want guard_decl pulled
4166 into registers unless absolutely necessary. And we know that
4167 crtl->stack_protect_guard is a local stack slot, so this skips
4169 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4170 y = validize_mem (DECL_RTL (guard_decl));
4172 /* Allow the target to compare Y with X without leaking either into
4174 switch (HAVE_stack_protect_test != 0)
4177 tmp = gen_stack_protect_test (x, y, label);
4186 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4190 /* The noreturn predictor has been moved to the tree level. The rtl-level
4191 predictors estimate this branch about 20%, which isn't enough to get
4192 things moved out of line. Since this is the only extant case of adding
4193 a noreturn function at the rtl level, it doesn't seem worth doing ought
4194 except adding the prediction by hand. */
4195 tmp = get_last_insn ();
4197 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4199 expand_expr_stmt (targetm.stack_protect_fail ());
4203 /* Start the RTL for a new function, and set variables used for
4205 SUBR is the FUNCTION_DECL node.
4206 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4207 the function's parameters, which must be run at any return statement. */
4210 expand_function_start (tree subr)
4212 /* Make sure volatile mem refs aren't considered
4213 valid operands of arithmetic insns. */
4214 init_recog_no_volatile ();
4218 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4221 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4223 /* Make the label for return statements to jump to. Do not special
4224 case machines with special return instructions -- they will be
4225 handled later during jump, ifcvt, or epilogue creation. */
4226 return_label = gen_label_rtx ();
4228 /* Initialize rtx used to return the value. */
4229 /* Do this before assign_parms so that we copy the struct value address
4230 before any library calls that assign parms might generate. */
4232 /* Decide whether to return the value in memory or in a register. */
4233 if (aggregate_value_p (DECL_RESULT (subr), subr))
4235 /* Returning something that won't go in a register. */
4236 rtx value_address = 0;
4238 #ifdef PCC_STATIC_STRUCT_RETURN
4239 if (cfun->returns_pcc_struct)
4241 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4242 value_address = assemble_static_space (size);
4247 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4248 /* Expect to be passed the address of a place to store the value.
4249 If it is passed as an argument, assign_parms will take care of
4253 value_address = gen_reg_rtx (Pmode);
4254 emit_move_insn (value_address, sv);
4259 rtx x = value_address;
4260 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4262 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4263 set_mem_attributes (x, DECL_RESULT (subr), 1);
4265 SET_DECL_RTL (DECL_RESULT (subr), x);
4268 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4269 /* If return mode is void, this decl rtl should not be used. */
4270 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4273 /* Compute the return values into a pseudo reg, which we will copy
4274 into the true return register after the cleanups are done. */
4275 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4276 if (TYPE_MODE (return_type) != BLKmode
4277 && targetm.calls.return_in_msb (return_type))
4278 /* expand_function_end will insert the appropriate padding in
4279 this case. Use the return value's natural (unpadded) mode
4280 within the function proper. */
4281 SET_DECL_RTL (DECL_RESULT (subr),
4282 gen_reg_rtx (TYPE_MODE (return_type)));
4285 /* In order to figure out what mode to use for the pseudo, we
4286 figure out what the mode of the eventual return register will
4287 actually be, and use that. */
4288 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4290 /* Structures that are returned in registers are not
4291 aggregate_value_p, so we may see a PARALLEL or a REG. */
4292 if (REG_P (hard_reg))
4293 SET_DECL_RTL (DECL_RESULT (subr),
4294 gen_reg_rtx (GET_MODE (hard_reg)));
4297 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4298 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4302 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4303 result to the real return register(s). */
4304 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4307 /* Initialize rtx for parameters and local variables.
4308 In some cases this requires emitting insns. */
4309 assign_parms (subr);
4311 /* If function gets a static chain arg, store it. */
4312 if (cfun->static_chain_decl)
4314 tree parm = cfun->static_chain_decl;
4315 rtx local = gen_reg_rtx (Pmode);
4317 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4318 SET_DECL_RTL (parm, local);
4319 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4321 emit_move_insn (local, static_chain_incoming_rtx);
4324 /* If the function receives a non-local goto, then store the
4325 bits we need to restore the frame pointer. */
4326 if (cfun->nonlocal_goto_save_area)
4331 /* ??? We need to do this save early. Unfortunately here is
4332 before the frame variable gets declared. Help out... */
4333 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4334 if (!DECL_RTL_SET_P (var))
4337 t_save = build4 (ARRAY_REF, ptr_type_node,
4338 cfun->nonlocal_goto_save_area,
4339 integer_zero_node, NULL_TREE, NULL_TREE);
4340 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4341 r_save = convert_memory_address (Pmode, r_save);
4343 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4344 update_nonlocal_goto_save_area ();
4347 /* The following was moved from init_function_start.
4348 The move is supposed to make sdb output more accurate. */
4349 /* Indicate the beginning of the function body,
4350 as opposed to parm setup. */
4351 emit_note (NOTE_INSN_FUNCTION_BEG);
4353 gcc_assert (NOTE_P (get_last_insn ()));
4355 parm_birth_insn = get_last_insn ();
4360 PROFILE_HOOK (current_function_funcdef_no);
4364 /* After the display initializations is where the stack checking
4366 if(flag_stack_check)
4367 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4369 /* Make sure there is a line number after the function entry setup code. */
4370 force_next_line_note ();
4373 /* Undo the effects of init_dummy_function_start. */
4375 expand_dummy_function_end (void)
4377 gcc_assert (in_dummy_function);
4379 /* End any sequences that failed to be closed due to syntax errors. */
4380 while (in_sequence_p ())
4383 /* Outside function body, can't compute type's actual size
4384 until next function's body starts. */
4386 free_after_parsing (cfun);
4387 free_after_compilation (cfun);
4389 in_dummy_function = false;
4392 /* Call DOIT for each hard register used as a return value from
4393 the current function. */
4396 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4398 rtx outgoing = crtl->return_rtx;
4403 if (REG_P (outgoing))
4404 (*doit) (outgoing, arg);
4405 else if (GET_CODE (outgoing) == PARALLEL)
4409 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4411 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4413 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4420 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4426 clobber_return_register (void)
4428 diddle_return_value (do_clobber_return_reg, NULL);
4430 /* In case we do use pseudo to return value, clobber it too. */
4431 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4433 tree decl_result = DECL_RESULT (current_function_decl);
4434 rtx decl_rtl = DECL_RTL (decl_result);
4435 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4437 do_clobber_return_reg (decl_rtl, NULL);
4443 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4449 use_return_register (void)
4451 diddle_return_value (do_use_return_reg, NULL);
4454 /* Possibly warn about unused parameters. */
4456 do_warn_unused_parameter (tree fn)
4460 for (decl = DECL_ARGUMENTS (fn);
4461 decl; decl = TREE_CHAIN (decl))
4462 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4463 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4464 && !TREE_NO_WARNING (decl))
4465 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4468 static GTY(()) rtx initial_trampoline;
4470 /* Generate RTL for the end of the current function. */
4473 expand_function_end (void)
4477 /* If arg_pointer_save_area was referenced only from a nested
4478 function, we will not have initialized it yet. Do that now. */
4479 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4480 get_arg_pointer_save_area ();
4482 /* If we are doing stack checking and this function makes calls,
4483 do a stack probe at the start of the function to ensure we have enough
4484 space for another stack frame. */
4485 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4489 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4493 probe_stack_range (STACK_CHECK_PROTECT,
4494 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4497 emit_insn_before (seq, stack_check_probe_note);
4502 /* End any sequences that failed to be closed due to syntax errors. */
4503 while (in_sequence_p ())
4506 clear_pending_stack_adjust ();
4507 do_pending_stack_adjust ();
4509 /* Output a linenumber for the end of the function.
4510 SDB depends on this. */
4511 force_next_line_note ();
4512 set_curr_insn_source_location (input_location);
4514 /* Before the return label (if any), clobber the return
4515 registers so that they are not propagated live to the rest of
4516 the function. This can only happen with functions that drop
4517 through; if there had been a return statement, there would
4518 have either been a return rtx, or a jump to the return label.
4520 We delay actual code generation after the current_function_value_rtx
4522 clobber_after = get_last_insn ();
4524 /* Output the label for the actual return from the function. */
4525 emit_label (return_label);
4527 if (USING_SJLJ_EXCEPTIONS)
4529 /* Let except.c know where it should emit the call to unregister
4530 the function context for sjlj exceptions. */
4531 if (flag_exceptions)
4532 sjlj_emit_function_exit_after (get_last_insn ());
4536 /* We want to ensure that instructions that may trap are not
4537 moved into the epilogue by scheduling, because we don't
4538 always emit unwind information for the epilogue. */
4539 if (flag_non_call_exceptions)
4540 emit_insn (gen_blockage ());
4543 /* If this is an implementation of throw, do what's necessary to
4544 communicate between __builtin_eh_return and the epilogue. */
4545 expand_eh_return ();
4547 /* If scalar return value was computed in a pseudo-reg, or was a named
4548 return value that got dumped to the stack, copy that to the hard
4550 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4552 tree decl_result = DECL_RESULT (current_function_decl);
4553 rtx decl_rtl = DECL_RTL (decl_result);
4555 if (REG_P (decl_rtl)
4556 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4557 : DECL_REGISTER (decl_result))
4559 rtx real_decl_rtl = crtl->return_rtx;
4561 /* This should be set in assign_parms. */
4562 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4564 /* If this is a BLKmode structure being returned in registers,
4565 then use the mode computed in expand_return. Note that if
4566 decl_rtl is memory, then its mode may have been changed,
4567 but that crtl->return_rtx has not. */
4568 if (GET_MODE (real_decl_rtl) == BLKmode)
4569 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4571 /* If a non-BLKmode return value should be padded at the least
4572 significant end of the register, shift it left by the appropriate
4573 amount. BLKmode results are handled using the group load/store
4575 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4576 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4578 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4579 REGNO (real_decl_rtl)),
4581 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4583 /* If a named return value dumped decl_return to memory, then
4584 we may need to re-do the PROMOTE_MODE signed/unsigned
4586 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4588 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4590 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4591 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4594 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4596 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4598 /* If expand_function_start has created a PARALLEL for decl_rtl,
4599 move the result to the real return registers. Otherwise, do
4600 a group load from decl_rtl for a named return. */
4601 if (GET_CODE (decl_rtl) == PARALLEL)
4602 emit_group_move (real_decl_rtl, decl_rtl);
4604 emit_group_load (real_decl_rtl, decl_rtl,
4605 TREE_TYPE (decl_result),
4606 int_size_in_bytes (TREE_TYPE (decl_result)));
4608 /* In the case of complex integer modes smaller than a word, we'll
4609 need to generate some non-trivial bitfield insertions. Do that
4610 on a pseudo and not the hard register. */
4611 else if (GET_CODE (decl_rtl) == CONCAT
4612 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4613 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4615 int old_generating_concat_p;
4618 old_generating_concat_p = generating_concat_p;
4619 generating_concat_p = 0;
4620 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4621 generating_concat_p = old_generating_concat_p;
4623 emit_move_insn (tmp, decl_rtl);
4624 emit_move_insn (real_decl_rtl, tmp);
4627 emit_move_insn (real_decl_rtl, decl_rtl);
4631 /* If returning a structure, arrange to return the address of the value
4632 in a place where debuggers expect to find it.
4634 If returning a structure PCC style,
4635 the caller also depends on this value.
4636 And cfun->returns_pcc_struct is not necessarily set. */
4637 if (cfun->returns_struct
4638 || cfun->returns_pcc_struct)
4640 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4641 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4644 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4645 type = TREE_TYPE (type);
4647 value_address = XEXP (value_address, 0);
4649 outgoing = targetm.calls.function_value (build_pointer_type (type),
4650 current_function_decl, true);
4652 /* Mark this as a function return value so integrate will delete the
4653 assignment and USE below when inlining this function. */
4654 REG_FUNCTION_VALUE_P (outgoing) = 1;
4656 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4657 value_address = convert_memory_address (GET_MODE (outgoing),
4660 emit_move_insn (outgoing, value_address);
4662 /* Show return register used to hold result (in this case the address
4664 crtl->return_rtx = outgoing;
4667 /* Emit the actual code to clobber return register. */
4672 clobber_return_register ();
4673 expand_naked_return ();
4677 emit_insn_after (seq, clobber_after);
4680 /* Output the label for the naked return from the function. */
4681 emit_label (naked_return_label);
4683 /* @@@ This is a kludge. We want to ensure that instructions that
4684 may trap are not moved into the epilogue by scheduling, because
4685 we don't always emit unwind information for the epilogue. */
4686 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4687 emit_insn (gen_blockage ());
4689 /* If stack protection is enabled for this function, check the guard. */
4690 if (crtl->stack_protect_guard)
4691 stack_protect_epilogue ();
4693 /* If we had calls to alloca, and this machine needs
4694 an accurate stack pointer to exit the function,
4695 insert some code to save and restore the stack pointer. */
4696 if (! EXIT_IGNORE_STACK
4697 && cfun->calls_alloca)
4701 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4702 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4705 /* ??? This should no longer be necessary since stupid is no longer with
4706 us, but there are some parts of the compiler (eg reload_combine, and
4707 sh mach_dep_reorg) that still try and compute their own lifetime info
4708 instead of using the general framework. */
4709 use_return_register ();
4713 get_arg_pointer_save_area (void)
4715 rtx ret = arg_pointer_save_area;
4719 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4720 arg_pointer_save_area = ret;
4723 if (! crtl->arg_pointer_save_area_init)
4727 /* Save the arg pointer at the beginning of the function. The
4728 generated stack slot may not be a valid memory address, so we
4729 have to check it and fix it if necessary. */
4731 emit_move_insn (validize_mem (ret),
4732 crtl->args.internal_arg_pointer);
4736 push_topmost_sequence ();
4737 emit_insn_after (seq, entry_of_function ());
4738 pop_topmost_sequence ();
4744 /* Extend a vector that records the INSN_UIDs of INSNS
4745 (a list of one or more insns). */
4748 record_insns (rtx insns, VEC(int,heap) **vecp)
4752 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4753 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4756 /* Set the locator of the insn chain starting at INSN to LOC. */
4758 set_insn_locators (rtx insn, int loc)
4760 while (insn != NULL_RTX)
4763 INSN_LOCATOR (insn) = loc;
4764 insn = NEXT_INSN (insn);
4768 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4769 be running after reorg, SEQUENCE rtl is possible. */
4772 contains (const_rtx insn, VEC(int,heap) **vec)
4776 if (NONJUMP_INSN_P (insn)
4777 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4780 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4781 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4782 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4783 == VEC_index (int, *vec, j))
4789 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4790 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4797 prologue_epilogue_contains (const_rtx insn)
4799 if (contains (insn, &prologue))
4801 if (contains (insn, &epilogue))
4807 sibcall_epilogue_contains (const_rtx insn)
4809 if (sibcall_epilogue)
4810 return contains (insn, &sibcall_epilogue);
4815 /* Insert gen_return at the end of block BB. This also means updating
4816 block_for_insn appropriately. */
4819 emit_return_into_block (basic_block bb)
4821 emit_jump_insn_after (gen_return (), BB_END (bb));
4823 #endif /* HAVE_return */
4825 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4826 this into place with notes indicating where the prologue ends and where
4827 the epilogue begins. Update the basic block information when possible. */
4830 thread_prologue_and_epilogue_insns (void)
4834 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4837 #if defined (HAVE_epilogue) || defined(HAVE_return)
4838 rtx epilogue_end = NULL_RTX;
4842 #ifdef HAVE_prologue
4846 seq = gen_prologue ();
4849 /* Insert an explicit USE for the frame pointer
4850 if the profiling is on and the frame pointer is required. */
4851 if (crtl->profile && frame_pointer_needed)
4852 emit_use (hard_frame_pointer_rtx);
4854 /* Retain a map of the prologue insns. */
4855 record_insns (seq, &prologue);
4856 emit_note (NOTE_INSN_PROLOGUE_END);
4858 #ifndef PROFILE_BEFORE_PROLOGUE
4859 /* Ensure that instructions are not moved into the prologue when
4860 profiling is on. The call to the profiling routine can be
4861 emitted within the live range of a call-clobbered register. */
4863 emit_insn (gen_blockage ());
4868 set_insn_locators (seq, prologue_locator);
4870 /* Can't deal with multiple successors of the entry block
4871 at the moment. Function should always have at least one
4873 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4875 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4880 /* If the exit block has no non-fake predecessors, we don't need
4882 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4883 if ((e->flags & EDGE_FAKE) == 0)
4889 if (optimize && HAVE_return)
4891 /* If we're allowed to generate a simple return instruction,
4892 then by definition we don't need a full epilogue. Examine
4893 the block that falls through to EXIT. If it does not
4894 contain any code, examine its predecessors and try to
4895 emit (conditional) return instructions. */
4900 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4901 if (e->flags & EDGE_FALLTHRU)
4907 /* Verify that there are no active instructions in the last block. */
4908 label = BB_END (last);
4909 while (label && !LABEL_P (label))
4911 if (active_insn_p (label))
4913 label = PREV_INSN (label);
4916 if (BB_HEAD (last) == label && LABEL_P (label))
4920 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4922 basic_block bb = e->src;
4925 if (bb == ENTRY_BLOCK_PTR)
4932 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4938 /* If we have an unconditional jump, we can replace that
4939 with a simple return instruction. */
4940 if (simplejump_p (jump))
4942 emit_return_into_block (bb);
4946 /* If we have a conditional jump, we can try to replace
4947 that with a conditional return instruction. */
4948 else if (condjump_p (jump))
4950 if (! redirect_jump (jump, 0, 0))
4956 /* If this block has only one successor, it both jumps
4957 and falls through to the fallthru block, so we can't
4959 if (single_succ_p (bb))
4971 /* Fix up the CFG for the successful change we just made. */
4972 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4975 /* Emit a return insn for the exit fallthru block. Whether
4976 this is still reachable will be determined later. */
4978 emit_barrier_after (BB_END (last));
4979 emit_return_into_block (last);
4980 epilogue_end = BB_END (last);
4981 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4986 /* Find the edge that falls through to EXIT. Other edges may exist
4987 due to RETURN instructions, but those don't need epilogues.
4988 There really shouldn't be a mixture -- either all should have
4989 been converted or none, however... */
4991 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4992 if (e->flags & EDGE_FALLTHRU)
4997 #ifdef HAVE_epilogue
5001 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5002 seq = gen_epilogue ();
5003 emit_jump_insn (seq);
5005 /* Retain a map of the epilogue insns. */
5006 record_insns (seq, &epilogue);
5007 set_insn_locators (seq, epilogue_locator);
5012 insert_insn_on_edge (seq, e);
5020 if (! next_active_insn (BB_END (e->src)))
5022 /* We have a fall-through edge to the exit block, the source is not
5023 at the end of the function, and there will be an assembler epilogue
5024 at the end of the function.
5025 We can't use force_nonfallthru here, because that would try to
5026 use return. Inserting a jump 'by hand' is extremely messy, so
5027 we take advantage of cfg_layout_finalize using
5028 fixup_fallthru_exit_predecessor. */
5029 cfg_layout_initialize (0);
5030 FOR_EACH_BB (cur_bb)
5031 if (cur_bb->index >= NUM_FIXED_BLOCKS
5032 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5033 cur_bb->aux = cur_bb->next_bb;
5034 cfg_layout_finalize ();
5040 commit_edge_insertions ();
5042 /* The epilogue insns we inserted may cause the exit edge to no longer
5044 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5046 if (((e->flags & EDGE_FALLTHRU) != 0)
5047 && returnjump_p (BB_END (e->src)))
5048 e->flags &= ~EDGE_FALLTHRU;
5052 #ifdef HAVE_sibcall_epilogue
5053 /* Emit sibling epilogues before any sibling call sites. */
5054 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5056 basic_block bb = e->src;
5057 rtx insn = BB_END (bb);
5060 || ! SIBLING_CALL_P (insn))
5067 emit_insn (gen_sibcall_epilogue ());
5071 /* Retain a map of the epilogue insns. Used in life analysis to
5072 avoid getting rid of sibcall epilogue insns. Do this before we
5073 actually emit the sequence. */
5074 record_insns (seq, &sibcall_epilogue);
5075 set_insn_locators (seq, epilogue_locator);
5077 emit_insn_before (seq, insn);
5082 #ifdef HAVE_epilogue
5087 /* Similarly, move any line notes that appear after the epilogue.
5088 There is no need, however, to be quite so anal about the existence
5089 of such a note. Also possibly move
5090 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5092 for (insn = epilogue_end; insn; insn = next)
5094 next = NEXT_INSN (insn);
5096 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5097 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5102 /* Threading the prologue and epilogue changes the artificial refs
5103 in the entry and exit blocks. */
5104 epilogue_completed = 1;
5105 df_update_entry_exit_and_calls ();
5108 /* Reposition the prologue-end and epilogue-begin notes after instruction
5109 scheduling and delayed branch scheduling. */
5112 reposition_prologue_and_epilogue_notes (void)
5114 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5115 rtx insn, last, note;
5118 if ((len = VEC_length (int, prologue)) > 0)
5122 /* Scan from the beginning until we reach the last prologue insn.
5123 We apparently can't depend on basic_block_{head,end} after
5125 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5129 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5132 else if (contains (insn, &prologue))
5142 /* Find the prologue-end note if we haven't already, and
5143 move it to just after the last prologue insn. */
5146 for (note = last; (note = NEXT_INSN (note));)
5148 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5152 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5154 last = NEXT_INSN (last);
5155 reorder_insns (note, note, last);
5159 if ((len = VEC_length (int, epilogue)) > 0)
5163 /* Scan from the end until we reach the first epilogue insn.
5164 We apparently can't depend on basic_block_{head,end} after
5166 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5170 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5173 else if (contains (insn, &epilogue))
5183 /* Find the epilogue-begin note if we haven't already, and
5184 move it to just before the first epilogue insn. */
5187 for (note = insn; (note = PREV_INSN (note));)
5189 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5193 if (PREV_INSN (last) != note)
5194 reorder_insns (note, note, PREV_INSN (last));
5197 #endif /* HAVE_prologue or HAVE_epilogue */
5200 /* Returns the name of the current function. */
5202 current_function_name (void)
5204 return lang_hooks.decl_printable_name (cfun->decl, 2);
5207 /* Returns the raw (mangled) name of the current function. */
5209 current_function_assembler_name (void)
5211 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5216 rest_of_handle_check_leaf_regs (void)
5218 #ifdef LEAF_REGISTERS
5219 current_function_uses_only_leaf_regs
5220 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5225 /* Insert a TYPE into the used types hash table of CFUN. */
5227 used_types_insert_helper (tree type, struct function *func)
5229 if (type != NULL && func != NULL)
5233 if (func->used_types_hash == NULL)
5234 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5235 htab_eq_pointer, NULL);
5236 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5242 /* Given a type, insert it into the used hash table in cfun. */
5244 used_types_insert (tree t)
5246 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5248 t = TYPE_MAIN_VARIANT (t);
5249 if (debug_info_level > DINFO_LEVEL_NONE)
5250 used_types_insert_helper (t, cfun);
5253 struct rtl_opt_pass pass_leaf_regs =
5259 rest_of_handle_check_leaf_regs, /* execute */
5262 0, /* static_pass_number */
5264 0, /* properties_required */
5265 0, /* properties_provided */
5266 0, /* properties_destroyed */
5267 0, /* todo_flags_start */
5268 0 /* todo_flags_finish */
5273 rest_of_handle_thread_prologue_and_epilogue (void)
5276 cleanup_cfg (CLEANUP_EXPENSIVE);
5277 /* On some machines, the prologue and epilogue code, or parts thereof,
5278 can be represented as RTL. Doing so lets us schedule insns between
5279 it and the rest of the code and also allows delayed branch
5280 scheduling to operate in the epilogue. */
5282 thread_prologue_and_epilogue_insns ();
5286 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5290 "pro_and_epilogue", /* name */
5292 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5295 0, /* static_pass_number */
5296 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5297 0, /* properties_required */
5298 0, /* properties_provided */
5299 0, /* properties_destroyed */
5300 TODO_verify_flow, /* todo_flags_start */
5303 TODO_df_finish | TODO_verify_rtl_sharing |
5304 TODO_ggc_collect /* todo_flags_finish */
5309 /* This mini-pass fixes fall-out from SSA in asm statements that have
5310 in-out constraints. Say you start with
5313 asm ("": "+mr" (inout));
5316 which is transformed very early to use explicit output and match operands:
5319 asm ("": "=mr" (inout) : "0" (inout));
5322 Or, after SSA and copyprop,
5324 asm ("": "=mr" (inout_2) : "0" (inout_1));
5327 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5328 they represent two separate values, so they will get different pseudo
5329 registers during expansion. Then, since the two operands need to match
5330 per the constraints, but use different pseudo registers, reload can
5331 only register a reload for these operands. But reloads can only be
5332 satisfied by hardregs, not by memory, so we need a register for this
5333 reload, just because we are presented with non-matching operands.
5334 So, even though we allow memory for this operand, no memory can be
5335 used for it, just because the two operands don't match. This can
5336 cause reload failures on register-starved targets.
5338 So it's a symptom of reload not being able to use memory for reloads
5339 or, alternatively it's also a symptom of both operands not coming into
5340 reload as matching (in which case the pseudo could go to memory just
5341 fine, as the alternative allows it, and no reload would be necessary).
5342 We fix the latter problem here, by transforming
5344 asm ("": "=mr" (inout_2) : "0" (inout_1));
5349 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5352 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5355 bool changed = false;
5356 rtx op = SET_SRC (p_sets[0]);
5357 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5358 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5359 bool *output_matched = XALLOCAVEC (bool, noutputs);
5361 memset (output_matched, 0, noutputs * sizeof (bool));
5362 for (i = 0; i < ninputs; i++)
5364 rtx input, output, insns;
5365 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5369 match = strtoul (constraint, &end, 10);
5370 if (end == constraint)
5373 gcc_assert (match < noutputs);
5374 output = SET_DEST (p_sets[match]);
5375 input = RTVEC_ELT (inputs, i);
5376 /* Only do the transformation for pseudos. */
5377 if (! REG_P (output)
5378 || rtx_equal_p (output, input)
5379 || (GET_MODE (input) != VOIDmode
5380 && GET_MODE (input) != GET_MODE (output)))
5383 /* We can't do anything if the output is also used as input,
5384 as we're going to overwrite it. */
5385 for (j = 0; j < ninputs; j++)
5386 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5391 /* Avoid changing the same input several times. For
5392 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5393 only change in once (to out1), rather than changing it
5394 first to out1 and afterwards to out2. */
5397 for (j = 0; j < noutputs; j++)
5398 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5403 output_matched[match] = true;
5406 emit_move_insn (output, input);
5407 insns = get_insns ();
5409 emit_insn_before (insns, insn);
5411 /* Now replace all mentions of the input with output. We can't
5412 just replace the occurrence in inputs[i], as the register might
5413 also be used in some other input (or even in an address of an
5414 output), which would mean possibly increasing the number of
5415 inputs by one (namely 'output' in addition), which might pose
5416 a too complicated problem for reload to solve. E.g. this situation:
5418 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5420 Here 'input' is used in two occurrences as input (once for the
5421 input operand, once for the address in the second output operand).
5422 If we would replace only the occurrence of the input operand (to
5423 make the matching) we would be left with this:
5426 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5428 Now we suddenly have two different input values (containing the same
5429 value, but different pseudos) where we formerly had only one.
5430 With more complicated asms this might lead to reload failures
5431 which wouldn't have happen without this pass. So, iterate over
5432 all operands and replace all occurrences of the register used. */
5433 for (j = 0; j < noutputs; j++)
5434 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5435 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5436 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5438 for (j = 0; j < ninputs; j++)
5439 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5440 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5447 df_insn_rescan (insn);
5451 rest_of_match_asm_constraints (void)
5454 rtx insn, pat, *p_sets;
5457 if (!crtl->has_asm_statement)
5460 df_set_flags (DF_DEFER_INSN_RESCAN);
5463 FOR_BB_INSNS (bb, insn)
5468 pat = PATTERN (insn);
5469 if (GET_CODE (pat) == PARALLEL)
5470 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5471 else if (GET_CODE (pat) == SET)
5472 p_sets = &PATTERN (insn), noutputs = 1;
5476 if (GET_CODE (*p_sets) == SET
5477 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5478 match_asm_constraints_1 (insn, p_sets, noutputs);
5482 return TODO_df_finish;
5485 struct rtl_opt_pass pass_match_asm_constraints =
5489 "asmcons", /* name */
5491 rest_of_match_asm_constraints, /* execute */
5494 0, /* static_pass_number */
5496 0, /* properties_required */
5497 0, /* properties_provided */
5498 0, /* properties_destroyed */
5499 0, /* todo_flags_start */
5500 TODO_dump_func /* todo_flags_finish */
5505 #include "gt-function.h"