1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef LOCAL_ALIGNMENT
73 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
76 #ifndef STACK_ALIGNMENT_NEEDED
77 #define STACK_ALIGNMENT_NEEDED 1
80 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
87 #define NAME__MAIN "__main"
90 /* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
95 /* Similar, but round to the next highest integer that meets the
97 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 pass_stack_ptr_mod has run. */
108 int current_function_sp_is_unchanging;
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero.
117 calls.c:emit_library_call_value_1 uses it to set up
118 post-instantiation libcalls. */
119 int virtuals_instantiated;
121 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
122 static GTY(()) int funcdef_no;
124 /* These variables hold pointers to functions to create and destroy
125 target specific, per-function data structures. */
126 struct machine_function * (*init_machine_status) (void);
128 /* The currently compiled function. */
129 struct function *cfun = 0;
131 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
132 static VEC(int,heap) *prologue;
133 static VEC(int,heap) *epilogue;
135 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
137 static VEC(int,heap) *sibcall_epilogue;
139 /* In order to evaluate some expressions, such as function calls returning
140 structures in memory, we need to temporarily allocate stack locations.
141 We record each allocated temporary in the following structure.
143 Associated with each temporary slot is a nesting level. When we pop up
144 one level, all temporaries associated with the previous level are freed.
145 Normally, all temporaries are freed after the execution of the statement
146 in which they were created. However, if we are inside a ({...}) grouping,
147 the result may be in a temporary and hence must be preserved. If the
148 result could be in a temporary, we preserve it if we can determine which
149 one it is in. If we cannot determine which temporary may contain the
150 result, all temporaries are preserved. A temporary is preserved by
151 pretending it was allocated at the previous nesting level.
153 Automatic variables are also assigned temporary slots, at the nesting
154 level where they are defined. They are marked a "kept" so that
155 free_temp_slots will not free them. */
157 struct temp_slot GTY(())
159 /* Points to next temporary slot. */
160 struct temp_slot *next;
161 /* Points to previous temporary slot. */
162 struct temp_slot *prev;
164 /* The rtx to used to reference the slot. */
166 /* The rtx used to represent the address if not the address of the
167 slot above. May be an EXPR_LIST if multiple addresses exist. */
169 /* The alignment (in bits) of the slot. */
171 /* The size, in units, of the slot. */
173 /* The type of the object in the slot, or zero if it doesn't correspond
174 to a type. We use this to determine whether a slot can be reused.
175 It can be reused if objects of the type of the new slot will always
176 conflict with objects of the type of the old slot. */
178 /* Nonzero if this temporary is currently in use. */
180 /* Nonzero if this temporary has its address taken. */
182 /* Nesting level at which this slot is being used. */
184 /* Nonzero if this should survive a call to free_temp_slots. */
186 /* The offset of the slot from the frame_pointer, including extra space
187 for alignment. This info is for combine_temp_slots. */
188 HOST_WIDE_INT base_offset;
189 /* The size of the slot, including extra space for alignment. This
190 info is for combine_temp_slots. */
191 HOST_WIDE_INT full_size;
194 /* Forward declarations. */
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static int all_blocks (tree, tree *);
201 static tree *get_block_vector (tree, int *);
202 extern tree debug_find_var_in_block_tree (tree, tree);
203 /* We always define `record_insns' even if it's not used so that we
204 can always export `prologue_epilogue_contains'. */
205 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
206 static int contains (const_rtx, VEC(int,heap) **);
208 static void emit_return_into_block (basic_block);
210 static void prepare_function_start (void);
211 static void do_clobber_return_reg (rtx, void *);
212 static void do_use_return_reg (rtx, void *);
213 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
215 /* Pointer to chain of `struct function' for containing functions. */
216 struct function *outer_function_chain;
218 /* Given a function decl for a containing function,
219 return the `struct function' for it. */
222 find_function_data (tree decl)
226 for (p = outer_function_chain; p; p = p->outer)
233 /* Save the current context for compilation of a nested function.
234 This is called from language-specific code. */
237 push_function_context (void)
240 allocate_struct_function (NULL, false);
242 cfun->outer = outer_function_chain;
243 outer_function_chain = cfun;
247 /* Restore the last saved context, at the end of a nested function.
248 This function is called from language-specific code. */
251 pop_function_context (void)
253 struct function *p = outer_function_chain;
256 outer_function_chain = p->outer;
257 current_function_decl = p->decl;
259 /* Reset variables that have known state during rtx generation. */
260 virtuals_instantiated = 0;
261 generating_concat_p = 1;
264 /* Clear out all parts of the state in F that can safely be discarded
265 after the function has been parsed, but not compiled, to let
266 garbage collection reclaim the memory. */
269 free_after_parsing (struct function *f)
274 /* Clear out all parts of the state in F that can safely be discarded
275 after the function has been compiled, to let garbage collection
276 reclaim the memory. */
279 free_after_compilation (struct function *f)
281 VEC_free (int, heap, prologue);
282 VEC_free (int, heap, epilogue);
283 VEC_free (int, heap, sibcall_epilogue);
284 if (crtl->emit.regno_pointer_align)
285 free (crtl->emit.regno_pointer_align);
287 memset (crtl, 0, sizeof (struct rtl_data));
292 f->arg_offset_rtx = NULL;
293 f->return_rtx = NULL;
294 f->internal_arg_pointer = NULL;
295 f->epilogue_delay_list = NULL;
298 /* Return size needed for stack frame based on slots so far allocated.
299 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
300 the caller may have to do that. */
303 get_frame_size (void)
305 if (FRAME_GROWS_DOWNWARD)
306 return -frame_offset;
311 /* Issue an error message and return TRUE if frame OFFSET overflows in
312 the signed target pointer arithmetics for function FUNC. Otherwise
316 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
318 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
320 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
321 /* Leave room for the fixed part of the frame. */
322 - 64 * UNITS_PER_WORD)
324 error ("%Jtotal size of local objects too large", func);
331 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
332 with machine mode MODE.
334 ALIGN controls the amount of alignment for the address of the slot:
335 0 means according to MODE,
336 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
337 -2 means use BITS_PER_UNIT,
338 positive specifies alignment boundary in bits.
340 We do not round to stack_boundary here. */
343 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
346 int bigend_correction = 0;
347 unsigned int alignment;
348 int frame_off, frame_alignment, frame_phase;
355 alignment = BIGGEST_ALIGNMENT;
357 alignment = GET_MODE_ALIGNMENT (mode);
359 /* Allow the target to (possibly) increase the alignment of this
361 type = lang_hooks.types.type_for_mode (mode, 0);
363 alignment = LOCAL_ALIGNMENT (type, alignment);
365 alignment /= BITS_PER_UNIT;
367 else if (align == -1)
369 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
370 size = CEIL_ROUND (size, alignment);
372 else if (align == -2)
373 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
375 alignment = align / BITS_PER_UNIT;
377 if (FRAME_GROWS_DOWNWARD)
378 frame_offset -= size;
380 /* Ignore alignment we can't do with expected alignment of the boundary. */
381 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
382 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
384 if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
385 cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
387 /* Calculate how many bytes the start of local variables is off from
389 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
390 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
391 frame_phase = frame_off ? frame_alignment - frame_off : 0;
393 /* Round the frame offset to the specified alignment. The default is
394 to always honor requests to align the stack but a port may choose to
395 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
396 if (STACK_ALIGNMENT_NEEDED
400 /* We must be careful here, since FRAME_OFFSET might be negative and
401 division with a negative dividend isn't as well defined as we might
402 like. So we instead assume that ALIGNMENT is a power of two and
403 use logical operations which are unambiguous. */
404 if (FRAME_GROWS_DOWNWARD)
406 = (FLOOR_ROUND (frame_offset - frame_phase,
407 (unsigned HOST_WIDE_INT) alignment)
411 = (CEIL_ROUND (frame_offset - frame_phase,
412 (unsigned HOST_WIDE_INT) alignment)
416 /* On a big-endian machine, if we are allocating more space than we will use,
417 use the least significant bytes of those that are allocated. */
418 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
419 bigend_correction = size - GET_MODE_SIZE (mode);
421 /* If we have already instantiated virtual registers, return the actual
422 address relative to the frame pointer. */
423 if (virtuals_instantiated)
424 addr = plus_constant (frame_pointer_rtx,
426 (frame_offset + bigend_correction
427 + STARTING_FRAME_OFFSET, Pmode));
429 addr = plus_constant (virtual_stack_vars_rtx,
431 (frame_offset + bigend_correction,
434 if (!FRAME_GROWS_DOWNWARD)
435 frame_offset += size;
437 x = gen_rtx_MEM (mode, addr);
438 MEM_NOTRAP_P (x) = 1;
441 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
443 if (frame_offset_overflow (frame_offset, current_function_decl))
449 /* Removes temporary slot TEMP from LIST. */
452 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
455 temp->next->prev = temp->prev;
457 temp->prev->next = temp->next;
461 temp->prev = temp->next = NULL;
464 /* Inserts temporary slot TEMP to LIST. */
467 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
471 (*list)->prev = temp;
476 /* Returns the list of used temp slots at LEVEL. */
478 static struct temp_slot **
479 temp_slots_at_level (int level)
481 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
482 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
484 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
487 /* Returns the maximal temporary slot level. */
490 max_slot_level (void)
492 if (!used_temp_slots)
495 return VEC_length (temp_slot_p, used_temp_slots) - 1;
498 /* Moves temporary slot TEMP to LEVEL. */
501 move_slot_to_level (struct temp_slot *temp, int level)
503 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
504 insert_slot_to_list (temp, temp_slots_at_level (level));
508 /* Make temporary slot TEMP available. */
511 make_slot_available (struct temp_slot *temp)
513 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
514 insert_slot_to_list (temp, &avail_temp_slots);
519 /* Allocate a temporary stack slot and record it for possible later
522 MODE is the machine mode to be given to the returned rtx.
524 SIZE is the size in units of the space required. We do no rounding here
525 since assign_stack_local will do any required rounding.
527 KEEP is 1 if this slot is to be retained after a call to
528 free_temp_slots. Automatic variables for a block are allocated
529 with this flag. KEEP values of 2 or 3 were needed respectively
530 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
531 or for SAVE_EXPRs, but they are now unused.
533 TYPE is the type that will be used for the stack slot. */
536 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
540 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
543 /* If SIZE is -1 it means that somebody tried to allocate a temporary
544 of a variable size. */
545 gcc_assert (size != -1);
547 /* These are now unused. */
548 gcc_assert (keep <= 1);
551 align = BIGGEST_ALIGNMENT;
553 align = GET_MODE_ALIGNMENT (mode);
556 type = lang_hooks.types.type_for_mode (mode, 0);
559 align = LOCAL_ALIGNMENT (type, align);
561 /* Try to find an available, already-allocated temporary of the proper
562 mode which meets the size and alignment requirements. Choose the
563 smallest one with the closest alignment.
565 If assign_stack_temp is called outside of the tree->rtl expansion,
566 we cannot reuse the stack slots (that may still refer to
567 VIRTUAL_STACK_VARS_REGNUM). */
568 if (!virtuals_instantiated)
570 for (p = avail_temp_slots; p; p = p->next)
572 if (p->align >= align && p->size >= size
573 && GET_MODE (p->slot) == mode
574 && objects_must_conflict_p (p->type, type)
575 && (best_p == 0 || best_p->size > p->size
576 || (best_p->size == p->size && best_p->align > p->align)))
578 if (p->align == align && p->size == size)
581 cut_slot_from_list (selected, &avail_temp_slots);
590 /* Make our best, if any, the one to use. */
594 cut_slot_from_list (selected, &avail_temp_slots);
596 /* If there are enough aligned bytes left over, make them into a new
597 temp_slot so that the extra bytes don't get wasted. Do this only
598 for BLKmode slots, so that we can be sure of the alignment. */
599 if (GET_MODE (best_p->slot) == BLKmode)
601 int alignment = best_p->align / BITS_PER_UNIT;
602 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
604 if (best_p->size - rounded_size >= alignment)
606 p = ggc_alloc (sizeof (struct temp_slot));
607 p->in_use = p->addr_taken = 0;
608 p->size = best_p->size - rounded_size;
609 p->base_offset = best_p->base_offset + rounded_size;
610 p->full_size = best_p->full_size - rounded_size;
611 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
612 p->align = best_p->align;
614 p->type = best_p->type;
615 insert_slot_to_list (p, &avail_temp_slots);
617 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
620 best_p->size = rounded_size;
621 best_p->full_size = rounded_size;
626 /* If we still didn't find one, make a new temporary. */
629 HOST_WIDE_INT frame_offset_old = frame_offset;
631 p = ggc_alloc (sizeof (struct temp_slot));
633 /* We are passing an explicit alignment request to assign_stack_local.
634 One side effect of that is assign_stack_local will not round SIZE
635 to ensure the frame offset remains suitably aligned.
637 So for requests which depended on the rounding of SIZE, we go ahead
638 and round it now. We also make sure ALIGNMENT is at least
639 BIGGEST_ALIGNMENT. */
640 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
641 p->slot = assign_stack_local (mode,
643 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
649 /* The following slot size computation is necessary because we don't
650 know the actual size of the temporary slot until assign_stack_local
651 has performed all the frame alignment and size rounding for the
652 requested temporary. Note that extra space added for alignment
653 can be either above or below this stack slot depending on which
654 way the frame grows. We include the extra space if and only if it
655 is above this slot. */
656 if (FRAME_GROWS_DOWNWARD)
657 p->size = frame_offset_old - frame_offset;
661 /* Now define the fields used by combine_temp_slots. */
662 if (FRAME_GROWS_DOWNWARD)
664 p->base_offset = frame_offset;
665 p->full_size = frame_offset_old - frame_offset;
669 p->base_offset = frame_offset_old;
670 p->full_size = frame_offset - frame_offset_old;
681 p->level = temp_slot_level;
684 pp = temp_slots_at_level (p->level);
685 insert_slot_to_list (p, pp);
687 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
688 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
689 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
691 /* If we know the alias set for the memory that will be used, use
692 it. If there's no TYPE, then we don't know anything about the
693 alias set for the memory. */
694 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
695 set_mem_align (slot, align);
697 /* If a type is specified, set the relevant flags. */
700 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
701 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
702 || TREE_CODE (type) == COMPLEX_TYPE));
704 MEM_NOTRAP_P (slot) = 1;
709 /* Allocate a temporary stack slot and record it for possible later
710 reuse. First three arguments are same as in preceding function. */
713 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
715 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
718 /* Assign a temporary.
719 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
720 and so that should be used in error messages. In either case, we
721 allocate of the given type.
722 KEEP is as for assign_stack_temp.
723 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
724 it is 0 if a register is OK.
725 DONT_PROMOTE is 1 if we should not promote values in register
729 assign_temp (tree type_or_decl, int keep, int memory_required,
730 int dont_promote ATTRIBUTE_UNUSED)
733 enum machine_mode mode;
738 if (DECL_P (type_or_decl))
739 decl = type_or_decl, type = TREE_TYPE (decl);
741 decl = NULL, type = type_or_decl;
743 mode = TYPE_MODE (type);
745 unsignedp = TYPE_UNSIGNED (type);
748 if (mode == BLKmode || memory_required)
750 HOST_WIDE_INT size = int_size_in_bytes (type);
753 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
754 problems with allocating the stack space. */
758 /* Unfortunately, we don't yet know how to allocate variable-sized
759 temporaries. However, sometimes we can find a fixed upper limit on
760 the size, so try that instead. */
762 size = max_int_size_in_bytes (type);
764 /* The size of the temporary may be too large to fit into an integer. */
765 /* ??? Not sure this should happen except for user silliness, so limit
766 this to things that aren't compiler-generated temporaries. The
767 rest of the time we'll die in assign_stack_temp_for_type. */
768 if (decl && size == -1
769 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
771 error ("size of variable %q+D is too large", decl);
775 tmp = assign_stack_temp_for_type (mode, size, keep, type);
781 mode = promote_mode (type, mode, &unsignedp, 0);
784 return gen_reg_rtx (mode);
787 /* Combine temporary stack slots which are adjacent on the stack.
789 This allows for better use of already allocated stack space. This is only
790 done for BLKmode slots because we can be sure that we won't have alignment
791 problems in this case. */
794 combine_temp_slots (void)
796 struct temp_slot *p, *q, *next, *next_q;
799 /* We can't combine slots, because the information about which slot
800 is in which alias set will be lost. */
801 if (flag_strict_aliasing)
804 /* If there are a lot of temp slots, don't do anything unless
805 high levels of optimization. */
806 if (! flag_expensive_optimizations)
807 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
808 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
811 for (p = avail_temp_slots; p; p = next)
817 if (GET_MODE (p->slot) != BLKmode)
820 for (q = p->next; q; q = next_q)
826 if (GET_MODE (q->slot) != BLKmode)
829 if (p->base_offset + p->full_size == q->base_offset)
831 /* Q comes after P; combine Q into P. */
833 p->full_size += q->full_size;
836 else if (q->base_offset + q->full_size == p->base_offset)
838 /* P comes after Q; combine P into Q. */
840 q->full_size += p->full_size;
845 cut_slot_from_list (q, &avail_temp_slots);
848 /* Either delete P or advance past it. */
850 cut_slot_from_list (p, &avail_temp_slots);
854 /* Find the temp slot corresponding to the object at address X. */
856 static struct temp_slot *
857 find_temp_slot_from_address (rtx x)
863 for (i = max_slot_level (); i >= 0; i--)
864 for (p = *temp_slots_at_level (i); p; p = p->next)
866 if (XEXP (p->slot, 0) == x
868 || (GET_CODE (x) == PLUS
869 && XEXP (x, 0) == virtual_stack_vars_rtx
870 && GET_CODE (XEXP (x, 1)) == CONST_INT
871 && INTVAL (XEXP (x, 1)) >= p->base_offset
872 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
875 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
876 for (next = p->address; next; next = XEXP (next, 1))
877 if (XEXP (next, 0) == x)
881 /* If we have a sum involving a register, see if it points to a temp
883 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
884 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
886 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
887 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
893 /* Indicate that NEW is an alternate way of referring to the temp slot
894 that previously was known by OLD. */
897 update_temp_slot_address (rtx old, rtx new)
901 if (rtx_equal_p (old, new))
904 p = find_temp_slot_from_address (old);
906 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
907 is a register, see if one operand of the PLUS is a temporary
908 location. If so, NEW points into it. Otherwise, if both OLD and
909 NEW are a PLUS and if there is a register in common between them.
910 If so, try a recursive call on those values. */
913 if (GET_CODE (old) != PLUS)
918 update_temp_slot_address (XEXP (old, 0), new);
919 update_temp_slot_address (XEXP (old, 1), new);
922 else if (GET_CODE (new) != PLUS)
925 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
926 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
927 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
928 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
929 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
930 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
931 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
932 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
937 /* Otherwise add an alias for the temp's address. */
938 else if (p->address == 0)
942 if (GET_CODE (p->address) != EXPR_LIST)
943 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
945 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
949 /* If X could be a reference to a temporary slot, mark the fact that its
950 address was taken. */
953 mark_temp_addr_taken (rtx x)
960 /* If X is not in memory or is at a constant address, it cannot be in
962 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
965 p = find_temp_slot_from_address (XEXP (x, 0));
970 /* If X could be a reference to a temporary slot, mark that slot as
971 belonging to the to one level higher than the current level. If X
972 matched one of our slots, just mark that one. Otherwise, we can't
973 easily predict which it is, so upgrade all of them. Kept slots
976 This is called when an ({...}) construct occurs and a statement
977 returns a value in memory. */
980 preserve_temp_slots (rtx x)
982 struct temp_slot *p = 0, *next;
984 /* If there is no result, we still might have some objects whose address
985 were taken, so we need to make sure they stay around. */
988 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
993 move_slot_to_level (p, temp_slot_level - 1);
999 /* If X is a register that is being used as a pointer, see if we have
1000 a temporary slot we know it points to. To be consistent with
1001 the code below, we really should preserve all non-kept slots
1002 if we can't find a match, but that seems to be much too costly. */
1003 if (REG_P (x) && REG_POINTER (x))
1004 p = find_temp_slot_from_address (x);
1006 /* If X is not in memory or is at a constant address, it cannot be in
1007 a temporary slot, but it can contain something whose address was
1009 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1011 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1016 move_slot_to_level (p, temp_slot_level - 1);
1022 /* First see if we can find a match. */
1024 p = find_temp_slot_from_address (XEXP (x, 0));
1028 /* Move everything at our level whose address was taken to our new
1029 level in case we used its address. */
1030 struct temp_slot *q;
1032 if (p->level == temp_slot_level)
1034 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1038 if (p != q && q->addr_taken)
1039 move_slot_to_level (q, temp_slot_level - 1);
1042 move_slot_to_level (p, temp_slot_level - 1);
1048 /* Otherwise, preserve all non-kept slots at this level. */
1049 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1054 move_slot_to_level (p, temp_slot_level - 1);
1058 /* Free all temporaries used so far. This is normally called at the
1059 end of generating code for a statement. */
1062 free_temp_slots (void)
1064 struct temp_slot *p, *next;
1066 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1071 make_slot_available (p);
1074 combine_temp_slots ();
1077 /* Push deeper into the nesting level for stack temporaries. */
1080 push_temp_slots (void)
1085 /* Pop a temporary nesting level. All slots in use in the current level
1089 pop_temp_slots (void)
1091 struct temp_slot *p, *next;
1093 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1096 make_slot_available (p);
1099 combine_temp_slots ();
1104 /* Initialize temporary slots. */
1107 init_temp_slots (void)
1109 /* We have not allocated any temporaries yet. */
1110 avail_temp_slots = 0;
1111 used_temp_slots = 0;
1112 temp_slot_level = 0;
1115 /* These routines are responsible for converting virtual register references
1116 to the actual hard register references once RTL generation is complete.
1118 The following four variables are used for communication between the
1119 routines. They contain the offsets of the virtual registers from their
1120 respective hard registers. */
1122 static int in_arg_offset;
1123 static int var_offset;
1124 static int dynamic_offset;
1125 static int out_arg_offset;
1126 static int cfa_offset;
1128 /* In most machines, the stack pointer register is equivalent to the bottom
1131 #ifndef STACK_POINTER_OFFSET
1132 #define STACK_POINTER_OFFSET 0
1135 /* If not defined, pick an appropriate default for the offset of dynamically
1136 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1137 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1139 #ifndef STACK_DYNAMIC_OFFSET
1141 /* The bottom of the stack points to the actual arguments. If
1142 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1143 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1144 stack space for register parameters is not pushed by the caller, but
1145 rather part of the fixed stack areas and hence not included in
1146 `current_function_outgoing_args_size'. Nevertheless, we must allow
1147 for it when allocating stack dynamic objects. */
1149 #if defined(REG_PARM_STACK_SPACE)
1150 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1151 ((ACCUMULATE_OUTGOING_ARGS \
1152 ? (current_function_outgoing_args_size \
1153 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1154 : 0) + (STACK_POINTER_OFFSET))
1156 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1157 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1158 + (STACK_POINTER_OFFSET))
1163 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1164 is a virtual register, return the equivalent hard register and set the
1165 offset indirectly through the pointer. Otherwise, return 0. */
1168 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1171 HOST_WIDE_INT offset;
1173 if (x == virtual_incoming_args_rtx)
1174 new = arg_pointer_rtx, offset = in_arg_offset;
1175 else if (x == virtual_stack_vars_rtx)
1176 new = frame_pointer_rtx, offset = var_offset;
1177 else if (x == virtual_stack_dynamic_rtx)
1178 new = stack_pointer_rtx, offset = dynamic_offset;
1179 else if (x == virtual_outgoing_args_rtx)
1180 new = stack_pointer_rtx, offset = out_arg_offset;
1181 else if (x == virtual_cfa_rtx)
1183 #ifdef FRAME_POINTER_CFA_OFFSET
1184 new = frame_pointer_rtx;
1186 new = arg_pointer_rtx;
1188 offset = cfa_offset;
1197 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1198 Instantiate any virtual registers present inside of *LOC. The expression
1199 is simplified, as much as possible, but is not to be considered "valid"
1200 in any sense implied by the target. If any change is made, set CHANGED
1204 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1206 HOST_WIDE_INT offset;
1207 bool *changed = (bool *) data;
1214 switch (GET_CODE (x))
1217 new = instantiate_new_reg (x, &offset);
1220 *loc = plus_constant (new, offset);
1227 new = instantiate_new_reg (XEXP (x, 0), &offset);
1230 new = plus_constant (new, offset);
1231 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1237 /* FIXME -- from old code */
1238 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1239 we can commute the PLUS and SUBREG because pointers into the
1240 frame are well-behaved. */
1250 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1251 matches the predicate for insn CODE operand OPERAND. */
1254 safe_insn_predicate (int code, int operand, rtx x)
1256 const struct insn_operand_data *op_data;
1261 op_data = &insn_data[code].operand[operand];
1262 if (op_data->predicate == NULL)
1265 return op_data->predicate (x, op_data->mode);
1268 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1269 registers present inside of insn. The result will be a valid insn. */
1272 instantiate_virtual_regs_in_insn (rtx insn)
1274 HOST_WIDE_INT offset;
1276 bool any_change = false;
1277 rtx set, new, x, seq;
1279 /* There are some special cases to be handled first. */
1280 set = single_set (insn);
1283 /* We're allowed to assign to a virtual register. This is interpreted
1284 to mean that the underlying register gets assigned the inverse
1285 transformation. This is used, for example, in the handling of
1287 new = instantiate_new_reg (SET_DEST (set), &offset);
1292 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1293 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1295 x = force_operand (x, new);
1297 emit_move_insn (new, x);
1302 emit_insn_before (seq, insn);
1307 /* Handle a straight copy from a virtual register by generating a
1308 new add insn. The difference between this and falling through
1309 to the generic case is avoiding a new pseudo and eliminating a
1310 move insn in the initial rtl stream. */
1311 new = instantiate_new_reg (SET_SRC (set), &offset);
1312 if (new && offset != 0
1313 && REG_P (SET_DEST (set))
1314 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1318 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1319 new, GEN_INT (offset), SET_DEST (set),
1320 1, OPTAB_LIB_WIDEN);
1321 if (x != SET_DEST (set))
1322 emit_move_insn (SET_DEST (set), x);
1327 emit_insn_before (seq, insn);
1332 extract_insn (insn);
1333 insn_code = INSN_CODE (insn);
1335 /* Handle a plus involving a virtual register by determining if the
1336 operands remain valid if they're modified in place. */
1337 if (GET_CODE (SET_SRC (set)) == PLUS
1338 && recog_data.n_operands >= 3
1339 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1340 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1341 && GET_CODE (recog_data.operand[2]) == CONST_INT
1342 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1344 offset += INTVAL (recog_data.operand[2]);
1346 /* If the sum is zero, then replace with a plain move. */
1348 && REG_P (SET_DEST (set))
1349 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1352 emit_move_insn (SET_DEST (set), new);
1356 emit_insn_before (seq, insn);
1361 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1363 /* Using validate_change and apply_change_group here leaves
1364 recog_data in an invalid state. Since we know exactly what
1365 we want to check, do those two by hand. */
1366 if (safe_insn_predicate (insn_code, 1, new)
1367 && safe_insn_predicate (insn_code, 2, x))
1369 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1370 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1373 /* Fall through into the regular operand fixup loop in
1374 order to take care of operands other than 1 and 2. */
1380 extract_insn (insn);
1381 insn_code = INSN_CODE (insn);
1384 /* In the general case, we expect virtual registers to appear only in
1385 operands, and then only as either bare registers or inside memories. */
1386 for (i = 0; i < recog_data.n_operands; ++i)
1388 x = recog_data.operand[i];
1389 switch (GET_CODE (x))
1393 rtx addr = XEXP (x, 0);
1394 bool changed = false;
1396 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1401 x = replace_equiv_address (x, addr);
1402 /* It may happen that the address with the virtual reg
1403 was valid (e.g. based on the virtual stack reg, which might
1404 be acceptable to the predicates with all offsets), whereas
1405 the address now isn't anymore, for instance when the address
1406 is still offsetted, but the base reg isn't virtual-stack-reg
1407 anymore. Below we would do a force_reg on the whole operand,
1408 but this insn might actually only accept memory. Hence,
1409 before doing that last resort, try to reload the address into
1410 a register, so this operand stays a MEM. */
1411 if (!safe_insn_predicate (insn_code, i, x))
1413 addr = force_reg (GET_MODE (addr), addr);
1414 x = replace_equiv_address (x, addr);
1419 emit_insn_before (seq, insn);
1424 new = instantiate_new_reg (x, &offset);
1433 /* Careful, special mode predicates may have stuff in
1434 insn_data[insn_code].operand[i].mode that isn't useful
1435 to us for computing a new value. */
1436 /* ??? Recognize address_operand and/or "p" constraints
1437 to see if (plus new offset) is a valid before we put
1438 this through expand_simple_binop. */
1439 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1440 GEN_INT (offset), NULL_RTX,
1441 1, OPTAB_LIB_WIDEN);
1444 emit_insn_before (seq, insn);
1449 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1455 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1456 GEN_INT (offset), NULL_RTX,
1457 1, OPTAB_LIB_WIDEN);
1460 emit_insn_before (seq, insn);
1462 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1463 GET_MODE (new), SUBREG_BYTE (x));
1470 /* At this point, X contains the new value for the operand.
1471 Validate the new value vs the insn predicate. Note that
1472 asm insns will have insn_code -1 here. */
1473 if (!safe_insn_predicate (insn_code, i, x))
1476 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1480 emit_insn_before (seq, insn);
1483 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1489 /* Propagate operand changes into the duplicates. */
1490 for (i = 0; i < recog_data.n_dups; ++i)
1491 *recog_data.dup_loc[i]
1492 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1494 /* Force re-recognition of the instruction for validation. */
1495 INSN_CODE (insn) = -1;
1498 if (asm_noperands (PATTERN (insn)) >= 0)
1500 if (!check_asm_operands (PATTERN (insn)))
1502 error_for_asm (insn, "impossible constraint in %<asm%>");
1508 if (recog_memoized (insn) < 0)
1509 fatal_insn_not_found (insn);
1513 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1514 do any instantiation required. */
1517 instantiate_decl_rtl (rtx x)
1524 /* If this is a CONCAT, recurse for the pieces. */
1525 if (GET_CODE (x) == CONCAT)
1527 instantiate_decl_rtl (XEXP (x, 0));
1528 instantiate_decl_rtl (XEXP (x, 1));
1532 /* If this is not a MEM, no need to do anything. Similarly if the
1533 address is a constant or a register that is not a virtual register. */
1538 if (CONSTANT_P (addr)
1540 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1541 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1544 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1547 /* Helper for instantiate_decls called via walk_tree: Process all decls
1548 in the given DECL_VALUE_EXPR. */
1551 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1554 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1557 if (DECL_P (t) && DECL_RTL_SET_P (t))
1558 instantiate_decl_rtl (DECL_RTL (t));
1563 /* Subroutine of instantiate_decls: Process all decls in the given
1564 BLOCK node and all its subblocks. */
1567 instantiate_decls_1 (tree let)
1571 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1573 if (DECL_RTL_SET_P (t))
1574 instantiate_decl_rtl (DECL_RTL (t));
1575 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1577 tree v = DECL_VALUE_EXPR (t);
1578 walk_tree (&v, instantiate_expr, NULL, NULL);
1582 /* Process all subblocks. */
1583 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1584 instantiate_decls_1 (t);
1587 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1588 all virtual registers in their DECL_RTL's. */
1591 instantiate_decls (tree fndecl)
1595 /* Process all parameters of the function. */
1596 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1598 instantiate_decl_rtl (DECL_RTL (decl));
1599 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1600 if (DECL_HAS_VALUE_EXPR_P (decl))
1602 tree v = DECL_VALUE_EXPR (decl);
1603 walk_tree (&v, instantiate_expr, NULL, NULL);
1607 /* Now process all variables defined in the function or its subblocks. */
1608 instantiate_decls_1 (DECL_INITIAL (fndecl));
1611 /* Pass through the INSNS of function FNDECL and convert virtual register
1612 references to hard register references. */
1615 instantiate_virtual_regs (void)
1619 /* Compute the offsets to use for this function. */
1620 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1621 var_offset = STARTING_FRAME_OFFSET;
1622 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1623 out_arg_offset = STACK_POINTER_OFFSET;
1624 #ifdef FRAME_POINTER_CFA_OFFSET
1625 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1627 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1630 /* Initialize recognition, indicating that volatile is OK. */
1633 /* Scan through all the insns, instantiating every virtual register still
1635 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1638 /* These patterns in the instruction stream can never be recognized.
1639 Fortunately, they shouldn't contain virtual registers either. */
1640 if (GET_CODE (PATTERN (insn)) == USE
1641 || GET_CODE (PATTERN (insn)) == CLOBBER
1642 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1643 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1644 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1647 instantiate_virtual_regs_in_insn (insn);
1649 if (INSN_DELETED_P (insn))
1652 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1654 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1655 if (GET_CODE (insn) == CALL_INSN)
1656 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1657 instantiate_virtual_regs_in_rtx, NULL);
1660 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1661 instantiate_decls (current_function_decl);
1663 targetm.instantiate_decls ();
1665 /* Indicate that, from now on, assign_stack_local should use
1666 frame_pointer_rtx. */
1667 virtuals_instantiated = 1;
1671 struct rtl_opt_pass pass_instantiate_virtual_regs =
1677 instantiate_virtual_regs, /* execute */
1680 0, /* static_pass_number */
1682 0, /* properties_required */
1683 0, /* properties_provided */
1684 0, /* properties_destroyed */
1685 0, /* todo_flags_start */
1686 TODO_dump_func /* todo_flags_finish */
1691 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1692 This means a type for which function calls must pass an address to the
1693 function or get an address back from the function.
1694 EXP may be a type node or an expression (whose type is tested). */
1697 aggregate_value_p (const_tree exp, const_tree fntype)
1699 int i, regno, nregs;
1702 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1704 /* DECL node associated with FNTYPE when relevant, which we might need to
1705 check for by-invisible-reference returns, typically for CALL_EXPR input
1707 const_tree fndecl = NULL_TREE;
1710 switch (TREE_CODE (fntype))
1713 fndecl = get_callee_fndecl (fntype);
1714 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1718 fntype = TREE_TYPE (fndecl);
1723 case IDENTIFIER_NODE:
1727 /* We don't expect other rtl types here. */
1731 if (TREE_CODE (type) == VOID_TYPE)
1734 /* If the front end has decided that this needs to be passed by
1735 reference, do so. */
1736 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1737 && DECL_BY_REFERENCE (exp))
1740 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1741 called function RESULT_DECL, meaning the function returns in memory by
1742 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1743 on the function type, which used to be the way to request such a return
1744 mechanism but might now be causing troubles at gimplification time if
1745 temporaries with the function type need to be created. */
1746 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1747 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1750 if (targetm.calls.return_in_memory (type, fntype))
1752 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1753 and thus can't be returned in registers. */
1754 if (TREE_ADDRESSABLE (type))
1756 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1758 /* Make sure we have suitable call-clobbered regs to return
1759 the value in; if not, we must return it in memory. */
1760 reg = hard_function_value (type, 0, fntype, 0);
1762 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1767 regno = REGNO (reg);
1768 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1769 for (i = 0; i < nregs; i++)
1770 if (! call_used_regs[regno + i])
1775 /* Return true if we should assign DECL a pseudo register; false if it
1776 should live on the local stack. */
1779 use_register_for_decl (const_tree decl)
1781 /* Honor volatile. */
1782 if (TREE_SIDE_EFFECTS (decl))
1785 /* Honor addressability. */
1786 if (TREE_ADDRESSABLE (decl))
1789 /* Only register-like things go in registers. */
1790 if (DECL_MODE (decl) == BLKmode)
1793 /* If -ffloat-store specified, don't put explicit float variables
1795 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1796 propagates values across these stores, and it probably shouldn't. */
1797 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1800 /* If we're not interested in tracking debugging information for
1801 this decl, then we can certainly put it in a register. */
1802 if (DECL_IGNORED_P (decl))
1805 return (optimize || DECL_REGISTER (decl));
1808 /* Return true if TYPE should be passed by invisible reference. */
1811 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1812 tree type, bool named_arg)
1816 /* If this type contains non-trivial constructors, then it is
1817 forbidden for the middle-end to create any new copies. */
1818 if (TREE_ADDRESSABLE (type))
1821 /* GCC post 3.4 passes *all* variable sized types by reference. */
1822 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1826 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1829 /* Return true if TYPE, which is passed by reference, should be callee
1830 copied instead of caller copied. */
1833 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1834 tree type, bool named_arg)
1836 if (type && TREE_ADDRESSABLE (type))
1838 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1841 /* Structures to communicate between the subroutines of assign_parms.
1842 The first holds data persistent across all parameters, the second
1843 is cleared out for each parameter. */
1845 struct assign_parm_data_all
1847 CUMULATIVE_ARGS args_so_far;
1848 struct args_size stack_args_size;
1849 tree function_result_decl;
1851 rtx first_conversion_insn;
1852 rtx last_conversion_insn;
1853 HOST_WIDE_INT pretend_args_size;
1854 HOST_WIDE_INT extra_pretend_bytes;
1855 int reg_parm_stack_space;
1858 struct assign_parm_data_one
1864 enum machine_mode nominal_mode;
1865 enum machine_mode passed_mode;
1866 enum machine_mode promoted_mode;
1867 struct locate_and_pad_arg_data locate;
1869 BOOL_BITFIELD named_arg : 1;
1870 BOOL_BITFIELD passed_pointer : 1;
1871 BOOL_BITFIELD on_stack : 1;
1872 BOOL_BITFIELD loaded_in_reg : 1;
1875 /* A subroutine of assign_parms. Initialize ALL. */
1878 assign_parms_initialize_all (struct assign_parm_data_all *all)
1882 memset (all, 0, sizeof (*all));
1884 fntype = TREE_TYPE (current_function_decl);
1886 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1887 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1889 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1890 current_function_decl, -1);
1893 #ifdef REG_PARM_STACK_SPACE
1894 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1898 /* If ARGS contains entries with complex types, split the entry into two
1899 entries of the component type. Return a new list of substitutions are
1900 needed, else the old list. */
1903 split_complex_args (tree args)
1907 /* Before allocating memory, check for the common case of no complex. */
1908 for (p = args; p; p = TREE_CHAIN (p))
1910 tree type = TREE_TYPE (p);
1911 if (TREE_CODE (type) == COMPLEX_TYPE
1912 && targetm.calls.split_complex_arg (type))
1918 args = copy_list (args);
1920 for (p = args; p; p = TREE_CHAIN (p))
1922 tree type = TREE_TYPE (p);
1923 if (TREE_CODE (type) == COMPLEX_TYPE
1924 && targetm.calls.split_complex_arg (type))
1927 tree subtype = TREE_TYPE (type);
1928 bool addressable = TREE_ADDRESSABLE (p);
1930 /* Rewrite the PARM_DECL's type with its component. */
1931 TREE_TYPE (p) = subtype;
1932 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1933 DECL_MODE (p) = VOIDmode;
1934 DECL_SIZE (p) = NULL;
1935 DECL_SIZE_UNIT (p) = NULL;
1936 /* If this arg must go in memory, put it in a pseudo here.
1937 We can't allow it to go in memory as per normal parms,
1938 because the usual place might not have the imag part
1939 adjacent to the real part. */
1940 DECL_ARTIFICIAL (p) = addressable;
1941 DECL_IGNORED_P (p) = addressable;
1942 TREE_ADDRESSABLE (p) = 0;
1945 /* Build a second synthetic decl. */
1946 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1947 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1948 DECL_ARTIFICIAL (decl) = addressable;
1949 DECL_IGNORED_P (decl) = addressable;
1950 layout_decl (decl, 0);
1952 /* Splice it in; skip the new decl. */
1953 TREE_CHAIN (decl) = TREE_CHAIN (p);
1954 TREE_CHAIN (p) = decl;
1962 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1963 the hidden struct return argument, and (abi willing) complex args.
1964 Return the new parameter list. */
1967 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1969 tree fndecl = current_function_decl;
1970 tree fntype = TREE_TYPE (fndecl);
1971 tree fnargs = DECL_ARGUMENTS (fndecl);
1973 /* If struct value address is treated as the first argument, make it so. */
1974 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1975 && ! current_function_returns_pcc_struct
1976 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1978 tree type = build_pointer_type (TREE_TYPE (fntype));
1981 decl = build_decl (PARM_DECL, NULL_TREE, type);
1982 DECL_ARG_TYPE (decl) = type;
1983 DECL_ARTIFICIAL (decl) = 1;
1984 DECL_IGNORED_P (decl) = 1;
1986 TREE_CHAIN (decl) = fnargs;
1988 all->function_result_decl = decl;
1991 all->orig_fnargs = fnargs;
1993 /* If the target wants to split complex arguments into scalars, do so. */
1994 if (targetm.calls.split_complex_arg)
1995 fnargs = split_complex_args (fnargs);
2000 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2001 data for the parameter. Incorporate ABI specifics such as pass-by-
2002 reference and type promotion. */
2005 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2006 struct assign_parm_data_one *data)
2008 tree nominal_type, passed_type;
2009 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2011 memset (data, 0, sizeof (*data));
2013 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2014 if (!current_function_stdarg)
2015 data->named_arg = 1; /* No varadic parms. */
2016 else if (TREE_CHAIN (parm))
2017 data->named_arg = 1; /* Not the last non-varadic parm. */
2018 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2019 data->named_arg = 1; /* Only varadic ones are unnamed. */
2021 data->named_arg = 0; /* Treat as varadic. */
2023 nominal_type = TREE_TYPE (parm);
2024 passed_type = DECL_ARG_TYPE (parm);
2026 /* Look out for errors propagating this far. Also, if the parameter's
2027 type is void then its value doesn't matter. */
2028 if (TREE_TYPE (parm) == error_mark_node
2029 /* This can happen after weird syntax errors
2030 or if an enum type is defined among the parms. */
2031 || TREE_CODE (parm) != PARM_DECL
2032 || passed_type == NULL
2033 || VOID_TYPE_P (nominal_type))
2035 nominal_type = passed_type = void_type_node;
2036 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2040 /* Find mode of arg as it is passed, and mode of arg as it should be
2041 during execution of this function. */
2042 passed_mode = TYPE_MODE (passed_type);
2043 nominal_mode = TYPE_MODE (nominal_type);
2045 /* If the parm is to be passed as a transparent union, use the type of
2046 the first field for the tests below. We have already verified that
2047 the modes are the same. */
2048 if (TREE_CODE (passed_type) == UNION_TYPE
2049 && TYPE_TRANSPARENT_UNION (passed_type))
2050 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2052 /* See if this arg was passed by invisible reference. */
2053 if (pass_by_reference (&all->args_so_far, passed_mode,
2054 passed_type, data->named_arg))
2056 passed_type = nominal_type = build_pointer_type (passed_type);
2057 data->passed_pointer = true;
2058 passed_mode = nominal_mode = Pmode;
2061 /* Find mode as it is passed by the ABI. */
2062 promoted_mode = passed_mode;
2063 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2065 int unsignedp = TYPE_UNSIGNED (passed_type);
2066 promoted_mode = promote_mode (passed_type, promoted_mode,
2071 data->nominal_type = nominal_type;
2072 data->passed_type = passed_type;
2073 data->nominal_mode = nominal_mode;
2074 data->passed_mode = passed_mode;
2075 data->promoted_mode = promoted_mode;
2078 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2081 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2082 struct assign_parm_data_one *data, bool no_rtl)
2084 int varargs_pretend_bytes = 0;
2086 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2087 data->promoted_mode,
2089 &varargs_pretend_bytes, no_rtl);
2091 /* If the back-end has requested extra stack space, record how much is
2092 needed. Do not change pretend_args_size otherwise since it may be
2093 nonzero from an earlier partial argument. */
2094 if (varargs_pretend_bytes > 0)
2095 all->pretend_args_size = varargs_pretend_bytes;
2098 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2099 the incoming location of the current parameter. */
2102 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2103 struct assign_parm_data_one *data)
2105 HOST_WIDE_INT pretend_bytes = 0;
2109 if (data->promoted_mode == VOIDmode)
2111 data->entry_parm = data->stack_parm = const0_rtx;
2115 #ifdef FUNCTION_INCOMING_ARG
2116 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2117 data->passed_type, data->named_arg);
2119 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2120 data->passed_type, data->named_arg);
2123 if (entry_parm == 0)
2124 data->promoted_mode = data->passed_mode;
2126 /* Determine parm's home in the stack, in case it arrives in the stack
2127 or we should pretend it did. Compute the stack position and rtx where
2128 the argument arrives and its size.
2130 There is one complexity here: If this was a parameter that would
2131 have been passed in registers, but wasn't only because it is
2132 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2133 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2134 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2135 as it was the previous time. */
2136 in_regs = entry_parm != 0;
2137 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2140 if (!in_regs && !data->named_arg)
2142 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2145 #ifdef FUNCTION_INCOMING_ARG
2146 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2147 data->passed_type, true);
2149 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2150 data->passed_type, true);
2152 in_regs = tem != NULL;
2156 /* If this parameter was passed both in registers and in the stack, use
2157 the copy on the stack. */
2158 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2166 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2167 data->promoted_mode,
2170 data->partial = partial;
2172 /* The caller might already have allocated stack space for the
2173 register parameters. */
2174 if (partial != 0 && all->reg_parm_stack_space == 0)
2176 /* Part of this argument is passed in registers and part
2177 is passed on the stack. Ask the prologue code to extend
2178 the stack part so that we can recreate the full value.
2180 PRETEND_BYTES is the size of the registers we need to store.
2181 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2182 stack space that the prologue should allocate.
2184 Internally, gcc assumes that the argument pointer is aligned
2185 to STACK_BOUNDARY bits. This is used both for alignment
2186 optimizations (see init_emit) and to locate arguments that are
2187 aligned to more than PARM_BOUNDARY bits. We must preserve this
2188 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2189 a stack boundary. */
2191 /* We assume at most one partial arg, and it must be the first
2192 argument on the stack. */
2193 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2195 pretend_bytes = partial;
2196 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2198 /* We want to align relative to the actual stack pointer, so
2199 don't include this in the stack size until later. */
2200 all->extra_pretend_bytes = all->pretend_args_size;
2204 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2205 entry_parm ? data->partial : 0, current_function_decl,
2206 &all->stack_args_size, &data->locate);
2208 /* Adjust offsets to include the pretend args. */
2209 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2210 data->locate.slot_offset.constant += pretend_bytes;
2211 data->locate.offset.constant += pretend_bytes;
2213 data->entry_parm = entry_parm;
2216 /* A subroutine of assign_parms. If there is actually space on the stack
2217 for this parm, count it in stack_args_size and return true. */
2220 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2221 struct assign_parm_data_one *data)
2223 /* Trivially true if we've no incoming register. */
2224 if (data->entry_parm == NULL)
2226 /* Also true if we're partially in registers and partially not,
2227 since we've arranged to drop the entire argument on the stack. */
2228 else if (data->partial != 0)
2230 /* Also true if the target says that it's passed in both registers
2231 and on the stack. */
2232 else if (GET_CODE (data->entry_parm) == PARALLEL
2233 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2235 /* Also true if the target says that there's stack allocated for
2236 all register parameters. */
2237 else if (all->reg_parm_stack_space > 0)
2239 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2243 all->stack_args_size.constant += data->locate.size.constant;
2244 if (data->locate.size.var)
2245 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2250 /* A subroutine of assign_parms. Given that this parameter is allocated
2251 stack space by the ABI, find it. */
2254 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2256 rtx offset_rtx, stack_parm;
2257 unsigned int align, boundary;
2259 /* If we're passing this arg using a reg, make its stack home the
2260 aligned stack slot. */
2261 if (data->entry_parm)
2262 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2264 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2266 stack_parm = current_function_internal_arg_pointer;
2267 if (offset_rtx != const0_rtx)
2268 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2269 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2271 set_mem_attributes (stack_parm, parm, 1);
2273 boundary = data->locate.boundary;
2274 align = BITS_PER_UNIT;
2276 /* If we're padding upward, we know that the alignment of the slot
2277 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2278 intentionally forcing upward padding. Otherwise we have to come
2279 up with a guess at the alignment based on OFFSET_RTX. */
2280 if (data->locate.where_pad != downward || data->entry_parm)
2282 else if (GET_CODE (offset_rtx) == CONST_INT)
2284 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2285 align = align & -align;
2287 set_mem_align (stack_parm, align);
2289 if (data->entry_parm)
2290 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2292 data->stack_parm = stack_parm;
2295 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2296 always valid and contiguous. */
2299 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2301 rtx entry_parm = data->entry_parm;
2302 rtx stack_parm = data->stack_parm;
2304 /* If this parm was passed part in regs and part in memory, pretend it
2305 arrived entirely in memory by pushing the register-part onto the stack.
2306 In the special case of a DImode or DFmode that is split, we could put
2307 it together in a pseudoreg directly, but for now that's not worth
2309 if (data->partial != 0)
2311 /* Handle calls that pass values in multiple non-contiguous
2312 locations. The Irix 6 ABI has examples of this. */
2313 if (GET_CODE (entry_parm) == PARALLEL)
2314 emit_group_store (validize_mem (stack_parm), entry_parm,
2316 int_size_in_bytes (data->passed_type));
2319 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2320 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2321 data->partial / UNITS_PER_WORD);
2324 entry_parm = stack_parm;
2327 /* If we didn't decide this parm came in a register, by default it came
2329 else if (entry_parm == NULL)
2330 entry_parm = stack_parm;
2332 /* When an argument is passed in multiple locations, we can't make use
2333 of this information, but we can save some copying if the whole argument
2334 is passed in a single register. */
2335 else if (GET_CODE (entry_parm) == PARALLEL
2336 && data->nominal_mode != BLKmode
2337 && data->passed_mode != BLKmode)
2339 size_t i, len = XVECLEN (entry_parm, 0);
2341 for (i = 0; i < len; i++)
2342 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2343 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2344 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2345 == data->passed_mode)
2346 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2348 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2353 data->entry_parm = entry_parm;
2356 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2357 always valid and properly aligned. */
2360 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2362 rtx stack_parm = data->stack_parm;
2364 /* If we can't trust the parm stack slot to be aligned enough for its
2365 ultimate type, don't use that slot after entry. We'll make another
2366 stack slot, if we need one. */
2368 && ((STRICT_ALIGNMENT
2369 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2370 || (data->nominal_type
2371 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2372 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2375 /* If parm was passed in memory, and we need to convert it on entry,
2376 don't store it back in that same slot. */
2377 else if (data->entry_parm == stack_parm
2378 && data->nominal_mode != BLKmode
2379 && data->nominal_mode != data->passed_mode)
2382 /* If stack protection is in effect for this function, don't leave any
2383 pointers in their passed stack slots. */
2384 else if (cfun->stack_protect_guard
2385 && (flag_stack_protect == 2
2386 || data->passed_pointer
2387 || POINTER_TYPE_P (data->nominal_type)))
2390 data->stack_parm = stack_parm;
2393 /* A subroutine of assign_parms. Return true if the current parameter
2394 should be stored as a BLKmode in the current frame. */
2397 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2399 if (data->nominal_mode == BLKmode)
2401 if (GET_CODE (data->entry_parm) == PARALLEL)
2404 #ifdef BLOCK_REG_PADDING
2405 /* Only assign_parm_setup_block knows how to deal with register arguments
2406 that are padded at the least significant end. */
2407 if (REG_P (data->entry_parm)
2408 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2409 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2410 == (BYTES_BIG_ENDIAN ? upward : downward)))
2417 /* A subroutine of assign_parms. Arrange for the parameter to be
2418 present and valid in DATA->STACK_RTL. */
2421 assign_parm_setup_block (struct assign_parm_data_all *all,
2422 tree parm, struct assign_parm_data_one *data)
2424 rtx entry_parm = data->entry_parm;
2425 rtx stack_parm = data->stack_parm;
2427 HOST_WIDE_INT size_stored;
2428 rtx orig_entry_parm = entry_parm;
2430 if (GET_CODE (entry_parm) == PARALLEL)
2431 entry_parm = emit_group_move_into_temps (entry_parm);
2433 /* If we've a non-block object that's nevertheless passed in parts,
2434 reconstitute it in register operations rather than on the stack. */
2435 if (GET_CODE (entry_parm) == PARALLEL
2436 && data->nominal_mode != BLKmode)
2438 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2440 if ((XVECLEN (entry_parm, 0) > 1
2441 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2442 && use_register_for_decl (parm))
2444 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2446 push_to_sequence2 (all->first_conversion_insn,
2447 all->last_conversion_insn);
2449 /* For values returned in multiple registers, handle possible
2450 incompatible calls to emit_group_store.
2452 For example, the following would be invalid, and would have to
2453 be fixed by the conditional below:
2455 emit_group_store ((reg:SF), (parallel:DF))
2456 emit_group_store ((reg:SI), (parallel:DI))
2458 An example of this are doubles in e500 v2:
2459 (parallel:DF (expr_list (reg:SI) (const_int 0))
2460 (expr_list (reg:SI) (const_int 4))). */
2461 if (data->nominal_mode != data->passed_mode)
2463 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2464 emit_group_store (t, entry_parm, NULL_TREE,
2465 GET_MODE_SIZE (GET_MODE (entry_parm)));
2466 convert_move (parmreg, t, 0);
2469 emit_group_store (parmreg, entry_parm, data->nominal_type,
2470 int_size_in_bytes (data->nominal_type));
2472 all->first_conversion_insn = get_insns ();
2473 all->last_conversion_insn = get_last_insn ();
2476 SET_DECL_RTL (parm, parmreg);
2481 size = int_size_in_bytes (data->passed_type);
2482 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2483 if (stack_parm == 0)
2485 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2486 stack_parm = assign_stack_local (BLKmode, size_stored,
2488 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2489 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2490 set_mem_attributes (stack_parm, parm, 1);
2493 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2494 calls that pass values in multiple non-contiguous locations. */
2495 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2499 /* Note that we will be storing an integral number of words.
2500 So we have to be careful to ensure that we allocate an
2501 integral number of words. We do this above when we call
2502 assign_stack_local if space was not allocated in the argument
2503 list. If it was, this will not work if PARM_BOUNDARY is not
2504 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2505 if it becomes a problem. Exception is when BLKmode arrives
2506 with arguments not conforming to word_mode. */
2508 if (data->stack_parm == 0)
2510 else if (GET_CODE (entry_parm) == PARALLEL)
2513 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2515 mem = validize_mem (stack_parm);
2517 /* Handle values in multiple non-contiguous locations. */
2518 if (GET_CODE (entry_parm) == PARALLEL)
2520 push_to_sequence2 (all->first_conversion_insn,
2521 all->last_conversion_insn);
2522 emit_group_store (mem, entry_parm, data->passed_type, size);
2523 all->first_conversion_insn = get_insns ();
2524 all->last_conversion_insn = get_last_insn ();
2531 /* If SIZE is that of a mode no bigger than a word, just use
2532 that mode's store operation. */
2533 else if (size <= UNITS_PER_WORD)
2535 enum machine_mode mode
2536 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2539 #ifdef BLOCK_REG_PADDING
2540 && (size == UNITS_PER_WORD
2541 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2542 != (BYTES_BIG_ENDIAN ? upward : downward)))
2548 /* We are really truncating a word_mode value containing
2549 SIZE bytes into a value of mode MODE. If such an
2550 operation requires no actual instructions, we can refer
2551 to the value directly in mode MODE, otherwise we must
2552 start with the register in word_mode and explicitly
2554 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2555 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2558 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2559 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2561 emit_move_insn (change_address (mem, mode, 0), reg);
2564 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2565 machine must be aligned to the left before storing
2566 to memory. Note that the previous test doesn't
2567 handle all cases (e.g. SIZE == 3). */
2568 else if (size != UNITS_PER_WORD
2569 #ifdef BLOCK_REG_PADDING
2570 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2578 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2579 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2581 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2582 build_int_cst (NULL_TREE, by),
2584 tem = change_address (mem, word_mode, 0);
2585 emit_move_insn (tem, x);
2588 move_block_from_reg (REGNO (entry_parm), mem,
2589 size_stored / UNITS_PER_WORD);
2592 move_block_from_reg (REGNO (entry_parm), mem,
2593 size_stored / UNITS_PER_WORD);
2595 else if (data->stack_parm == 0)
2597 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2598 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2600 all->first_conversion_insn = get_insns ();
2601 all->last_conversion_insn = get_last_insn ();
2605 data->stack_parm = stack_parm;
2606 SET_DECL_RTL (parm, stack_parm);
2609 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2610 parameter. Get it there. Perform all ABI specified conversions. */
2613 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2614 struct assign_parm_data_one *data)
2617 enum machine_mode promoted_nominal_mode;
2618 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2619 bool did_conversion = false;
2621 /* Store the parm in a pseudoregister during the function, but we may
2622 need to do it in a wider mode. */
2624 /* This is not really promoting for a call. However we need to be
2625 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2626 promoted_nominal_mode
2627 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2629 parmreg = gen_reg_rtx (promoted_nominal_mode);
2631 if (!DECL_ARTIFICIAL (parm))
2632 mark_user_reg (parmreg);
2634 /* If this was an item that we received a pointer to,
2635 set DECL_RTL appropriately. */
2636 if (data->passed_pointer)
2638 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2639 set_mem_attributes (x, parm, 1);
2640 SET_DECL_RTL (parm, x);
2643 SET_DECL_RTL (parm, parmreg);
2645 /* Copy the value into the register. */
2646 if (data->nominal_mode != data->passed_mode
2647 || promoted_nominal_mode != data->promoted_mode)
2651 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2652 mode, by the caller. We now have to convert it to
2653 NOMINAL_MODE, if different. However, PARMREG may be in
2654 a different mode than NOMINAL_MODE if it is being stored
2657 If ENTRY_PARM is a hard register, it might be in a register
2658 not valid for operating in its mode (e.g., an odd-numbered
2659 register for a DFmode). In that case, moves are the only
2660 thing valid, so we can't do a convert from there. This
2661 occurs when the calling sequence allow such misaligned
2664 In addition, the conversion may involve a call, which could
2665 clobber parameters which haven't been copied to pseudo
2666 registers yet. Therefore, we must first copy the parm to
2667 a pseudo reg here, and save the conversion until after all
2668 parameters have been moved. */
2670 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2672 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2674 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2675 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2677 if (GET_CODE (tempreg) == SUBREG
2678 && GET_MODE (tempreg) == data->nominal_mode
2679 && REG_P (SUBREG_REG (tempreg))
2680 && data->nominal_mode == data->passed_mode
2681 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2682 && GET_MODE_SIZE (GET_MODE (tempreg))
2683 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2685 /* The argument is already sign/zero extended, so note it
2687 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2688 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2691 /* TREE_USED gets set erroneously during expand_assignment. */
2692 save_tree_used = TREE_USED (parm);
2693 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2694 TREE_USED (parm) = save_tree_used;
2695 all->first_conversion_insn = get_insns ();
2696 all->last_conversion_insn = get_last_insn ();
2699 did_conversion = true;
2702 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2704 /* If we were passed a pointer but the actual value can safely live
2705 in a register, put it in one. */
2706 if (data->passed_pointer
2707 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2708 /* If by-reference argument was promoted, demote it. */
2709 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2710 || use_register_for_decl (parm)))
2712 /* We can't use nominal_mode, because it will have been set to
2713 Pmode above. We must use the actual mode of the parm. */
2714 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2715 mark_user_reg (parmreg);
2717 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2719 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2720 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2722 push_to_sequence2 (all->first_conversion_insn,
2723 all->last_conversion_insn);
2724 emit_move_insn (tempreg, DECL_RTL (parm));
2725 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2726 emit_move_insn (parmreg, tempreg);
2727 all->first_conversion_insn = get_insns ();
2728 all->last_conversion_insn = get_last_insn ();
2731 did_conversion = true;
2734 emit_move_insn (parmreg, DECL_RTL (parm));
2736 SET_DECL_RTL (parm, parmreg);
2738 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2740 data->stack_parm = NULL;
2743 /* Mark the register as eliminable if we did no conversion and it was
2744 copied from memory at a fixed offset, and the arg pointer was not
2745 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2746 offset formed an invalid address, such memory-equivalences as we
2747 make here would screw up life analysis for it. */
2748 if (data->nominal_mode == data->passed_mode
2750 && data->stack_parm != 0
2751 && MEM_P (data->stack_parm)
2752 && data->locate.offset.var == 0
2753 && reg_mentioned_p (virtual_incoming_args_rtx,
2754 XEXP (data->stack_parm, 0)))
2756 rtx linsn = get_last_insn ();
2759 /* Mark complex types separately. */
2760 if (GET_CODE (parmreg) == CONCAT)
2762 enum machine_mode submode
2763 = GET_MODE_INNER (GET_MODE (parmreg));
2764 int regnor = REGNO (XEXP (parmreg, 0));
2765 int regnoi = REGNO (XEXP (parmreg, 1));
2766 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2767 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2768 GET_MODE_SIZE (submode));
2770 /* Scan backwards for the set of the real and
2772 for (sinsn = linsn; sinsn != 0;
2773 sinsn = prev_nonnote_insn (sinsn))
2775 set = single_set (sinsn);
2779 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2780 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2781 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2782 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2785 else if ((set = single_set (linsn)) != 0
2786 && SET_DEST (set) == parmreg)
2787 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2790 /* For pointer data type, suggest pointer register. */
2791 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2792 mark_reg_pointer (parmreg,
2793 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2796 /* A subroutine of assign_parms. Allocate stack space to hold the current
2797 parameter. Get it there. Perform all ABI specified conversions. */
2800 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2801 struct assign_parm_data_one *data)
2803 /* Value must be stored in the stack slot STACK_PARM during function
2805 bool to_conversion = false;
2807 if (data->promoted_mode != data->nominal_mode)
2809 /* Conversion is required. */
2810 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2812 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2814 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2815 to_conversion = true;
2817 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2818 TYPE_UNSIGNED (TREE_TYPE (parm)));
2820 if (data->stack_parm)
2821 /* ??? This may need a big-endian conversion on sparc64. */
2823 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2826 if (data->entry_parm != data->stack_parm)
2830 if (data->stack_parm == 0)
2833 = assign_stack_local (GET_MODE (data->entry_parm),
2834 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2835 TYPE_ALIGN (data->passed_type));
2836 set_mem_attributes (data->stack_parm, parm, 1);
2839 dest = validize_mem (data->stack_parm);
2840 src = validize_mem (data->entry_parm);
2844 /* Use a block move to handle potentially misaligned entry_parm. */
2846 push_to_sequence2 (all->first_conversion_insn,
2847 all->last_conversion_insn);
2848 to_conversion = true;
2850 emit_block_move (dest, src,
2851 GEN_INT (int_size_in_bytes (data->passed_type)),
2855 emit_move_insn (dest, src);
2860 all->first_conversion_insn = get_insns ();
2861 all->last_conversion_insn = get_last_insn ();
2865 SET_DECL_RTL (parm, data->stack_parm);
2868 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2869 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2872 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2875 tree orig_fnargs = all->orig_fnargs;
2877 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2879 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2880 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2882 rtx tmp, real, imag;
2883 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2885 real = DECL_RTL (fnargs);
2886 imag = DECL_RTL (TREE_CHAIN (fnargs));
2887 if (inner != GET_MODE (real))
2889 real = gen_lowpart_SUBREG (inner, real);
2890 imag = gen_lowpart_SUBREG (inner, imag);
2893 if (TREE_ADDRESSABLE (parm))
2896 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2898 /* split_complex_arg put the real and imag parts in
2899 pseudos. Move them to memory. */
2900 tmp = assign_stack_local (DECL_MODE (parm), size,
2901 TYPE_ALIGN (TREE_TYPE (parm)));
2902 set_mem_attributes (tmp, parm, 1);
2903 rmem = adjust_address_nv (tmp, inner, 0);
2904 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2905 push_to_sequence2 (all->first_conversion_insn,
2906 all->last_conversion_insn);
2907 emit_move_insn (rmem, real);
2908 emit_move_insn (imem, imag);
2909 all->first_conversion_insn = get_insns ();
2910 all->last_conversion_insn = get_last_insn ();
2914 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2915 SET_DECL_RTL (parm, tmp);
2917 real = DECL_INCOMING_RTL (fnargs);
2918 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2919 if (inner != GET_MODE (real))
2921 real = gen_lowpart_SUBREG (inner, real);
2922 imag = gen_lowpart_SUBREG (inner, imag);
2924 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2925 set_decl_incoming_rtl (parm, tmp, false);
2926 fnargs = TREE_CHAIN (fnargs);
2930 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2931 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2933 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2934 instead of the copy of decl, i.e. FNARGS. */
2935 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2936 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2939 fnargs = TREE_CHAIN (fnargs);
2943 /* Assign RTL expressions to the function's parameters. This may involve
2944 copying them into registers and using those registers as the DECL_RTL. */
2947 assign_parms (tree fndecl)
2949 struct assign_parm_data_all all;
2952 current_function_internal_arg_pointer
2953 = targetm.calls.internal_arg_pointer ();
2955 assign_parms_initialize_all (&all);
2956 fnargs = assign_parms_augmented_arg_list (&all);
2958 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2960 struct assign_parm_data_one data;
2962 /* Extract the type of PARM; adjust it according to ABI. */
2963 assign_parm_find_data_types (&all, parm, &data);
2965 /* Early out for errors and void parameters. */
2966 if (data.passed_mode == VOIDmode)
2968 SET_DECL_RTL (parm, const0_rtx);
2969 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2973 if (current_function_stdarg && !TREE_CHAIN (parm))
2974 assign_parms_setup_varargs (&all, &data, false);
2976 /* Find out where the parameter arrives in this function. */
2977 assign_parm_find_entry_rtl (&all, &data);
2979 /* Find out where stack space for this parameter might be. */
2980 if (assign_parm_is_stack_parm (&all, &data))
2982 assign_parm_find_stack_rtl (parm, &data);
2983 assign_parm_adjust_entry_rtl (&data);
2986 /* Record permanently how this parm was passed. */
2987 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2989 /* Update info on where next arg arrives in registers. */
2990 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2991 data.passed_type, data.named_arg);
2993 assign_parm_adjust_stack_rtl (&data);
2995 if (assign_parm_setup_block_p (&data))
2996 assign_parm_setup_block (&all, parm, &data);
2997 else if (data.passed_pointer || use_register_for_decl (parm))
2998 assign_parm_setup_reg (&all, parm, &data);
3000 assign_parm_setup_stack (&all, parm, &data);
3003 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3004 assign_parms_unsplit_complex (&all, fnargs);
3006 /* Output all parameter conversion instructions (possibly including calls)
3007 now that all parameters have been copied out of hard registers. */
3008 emit_insn (all.first_conversion_insn);
3010 /* If we are receiving a struct value address as the first argument, set up
3011 the RTL for the function result. As this might require code to convert
3012 the transmitted address to Pmode, we do this here to ensure that possible
3013 preliminary conversions of the address have been emitted already. */
3014 if (all.function_result_decl)
3016 tree result = DECL_RESULT (current_function_decl);
3017 rtx addr = DECL_RTL (all.function_result_decl);
3020 if (DECL_BY_REFERENCE (result))
3024 addr = convert_memory_address (Pmode, addr);
3025 x = gen_rtx_MEM (DECL_MODE (result), addr);
3026 set_mem_attributes (x, result, 1);
3028 SET_DECL_RTL (result, x);
3031 /* We have aligned all the args, so add space for the pretend args. */
3032 current_function_pretend_args_size = all.pretend_args_size;
3033 all.stack_args_size.constant += all.extra_pretend_bytes;
3034 current_function_args_size = all.stack_args_size.constant;
3036 /* Adjust function incoming argument size for alignment and
3039 #ifdef REG_PARM_STACK_SPACE
3040 current_function_args_size = MAX (current_function_args_size,
3041 REG_PARM_STACK_SPACE (fndecl));
3044 current_function_args_size = CEIL_ROUND (current_function_args_size,
3045 PARM_BOUNDARY / BITS_PER_UNIT);
3047 #ifdef ARGS_GROW_DOWNWARD
3048 current_function_arg_offset_rtx
3049 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3050 : expand_expr (size_diffop (all.stack_args_size.var,
3051 size_int (-all.stack_args_size.constant)),
3052 NULL_RTX, VOIDmode, 0));
3054 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3057 /* See how many bytes, if any, of its args a function should try to pop
3060 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3061 current_function_args_size);
3063 /* For stdarg.h function, save info about
3064 regs and stack space used by the named args. */
3066 current_function_args_info = all.args_so_far;
3068 /* Set the rtx used for the function return value. Put this in its
3069 own variable so any optimizers that need this information don't have
3070 to include tree.h. Do this here so it gets done when an inlined
3071 function gets output. */
3073 current_function_return_rtx
3074 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3075 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3077 /* If scalar return value was computed in a pseudo-reg, or was a named
3078 return value that got dumped to the stack, copy that to the hard
3080 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3082 tree decl_result = DECL_RESULT (fndecl);
3083 rtx decl_rtl = DECL_RTL (decl_result);
3085 if (REG_P (decl_rtl)
3086 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3087 : DECL_REGISTER (decl_result))
3091 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3093 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3094 /* The delay slot scheduler assumes that current_function_return_rtx
3095 holds the hard register containing the return value, not a
3096 temporary pseudo. */
3097 current_function_return_rtx = real_decl_rtl;
3102 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3103 For all seen types, gimplify their sizes. */
3106 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3113 if (POINTER_TYPE_P (t))
3115 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3116 && !TYPE_SIZES_GIMPLIFIED (t))
3118 gimplify_type_sizes (t, (tree *) data);
3126 /* Gimplify the parameter list for current_function_decl. This involves
3127 evaluating SAVE_EXPRs of variable sized parameters and generating code
3128 to implement callee-copies reference parameters. Returns a list of
3129 statements to add to the beginning of the function, or NULL if nothing
3133 gimplify_parameters (void)
3135 struct assign_parm_data_all all;
3136 tree fnargs, parm, stmts = NULL;
3138 assign_parms_initialize_all (&all);
3139 fnargs = assign_parms_augmented_arg_list (&all);
3141 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3143 struct assign_parm_data_one data;
3145 /* Extract the type of PARM; adjust it according to ABI. */
3146 assign_parm_find_data_types (&all, parm, &data);
3148 /* Early out for errors and void parameters. */
3149 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3152 /* Update info on where next arg arrives in registers. */
3153 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3154 data.passed_type, data.named_arg);
3156 /* ??? Once upon a time variable_size stuffed parameter list
3157 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3158 turned out to be less than manageable in the gimple world.
3159 Now we have to hunt them down ourselves. */
3160 walk_tree_without_duplicates (&data.passed_type,
3161 gimplify_parm_type, &stmts);
3163 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3165 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3166 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3169 if (data.passed_pointer)
3171 tree type = TREE_TYPE (data.passed_type);
3172 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3173 type, data.named_arg))
3177 /* For constant sized objects, this is trivial; for
3178 variable-sized objects, we have to play games. */
3179 if (TREE_CONSTANT (DECL_SIZE (parm)))
3181 local = create_tmp_var (type, get_name (parm));
3182 DECL_IGNORED_P (local) = 0;
3186 tree ptr_type, addr;
3188 ptr_type = build_pointer_type (type);
3189 addr = create_tmp_var (ptr_type, get_name (parm));
3190 DECL_IGNORED_P (addr) = 0;
3191 local = build_fold_indirect_ref (addr);
3193 t = built_in_decls[BUILT_IN_ALLOCA];
3194 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3195 t = fold_convert (ptr_type, t);
3196 t = build_gimple_modify_stmt (addr, t);
3197 gimplify_and_add (t, &stmts);
3200 t = build_gimple_modify_stmt (local, parm);
3201 gimplify_and_add (t, &stmts);
3203 SET_DECL_VALUE_EXPR (parm, local);
3204 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3212 /* Compute the size and offset from the start of the stacked arguments for a
3213 parm passed in mode PASSED_MODE and with type TYPE.
3215 INITIAL_OFFSET_PTR points to the current offset into the stacked
3218 The starting offset and size for this parm are returned in
3219 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3220 nonzero, the offset is that of stack slot, which is returned in
3221 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3222 padding required from the initial offset ptr to the stack slot.
3224 IN_REGS is nonzero if the argument will be passed in registers. It will
3225 never be set if REG_PARM_STACK_SPACE is not defined.
3227 FNDECL is the function in which the argument was defined.
3229 There are two types of rounding that are done. The first, controlled by
3230 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3231 list to be aligned to the specific boundary (in bits). This rounding
3232 affects the initial and starting offsets, but not the argument size.
3234 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3235 optionally rounds the size of the parm to PARM_BOUNDARY. The
3236 initial offset is not affected by this rounding, while the size always
3237 is and the starting offset may be. */
3239 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3240 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3241 callers pass in the total size of args so far as
3242 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3245 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3246 int partial, tree fndecl ATTRIBUTE_UNUSED,
3247 struct args_size *initial_offset_ptr,
3248 struct locate_and_pad_arg_data *locate)
3251 enum direction where_pad;
3252 unsigned int boundary;
3253 int reg_parm_stack_space = 0;
3254 int part_size_in_regs;
3256 #ifdef REG_PARM_STACK_SPACE
3257 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3259 /* If we have found a stack parm before we reach the end of the
3260 area reserved for registers, skip that area. */
3263 if (reg_parm_stack_space > 0)
3265 if (initial_offset_ptr->var)
3267 initial_offset_ptr->var
3268 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3269 ssize_int (reg_parm_stack_space));
3270 initial_offset_ptr->constant = 0;
3272 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3273 initial_offset_ptr->constant = reg_parm_stack_space;
3276 #endif /* REG_PARM_STACK_SPACE */
3278 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3281 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3282 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3283 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3284 locate->where_pad = where_pad;
3285 locate->boundary = boundary;
3287 /* Remember if the outgoing parameter requires extra alignment on the
3288 calling function side. */
3289 if (boundary > PREFERRED_STACK_BOUNDARY)
3290 boundary = PREFERRED_STACK_BOUNDARY;
3291 if (cfun->stack_alignment_needed < boundary)
3292 cfun->stack_alignment_needed = boundary;
3294 #ifdef ARGS_GROW_DOWNWARD
3295 locate->slot_offset.constant = -initial_offset_ptr->constant;
3296 if (initial_offset_ptr->var)
3297 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3298 initial_offset_ptr->var);
3302 if (where_pad != none
3303 && (!host_integerp (sizetree, 1)
3304 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3305 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3306 SUB_PARM_SIZE (locate->slot_offset, s2);
3309 locate->slot_offset.constant += part_size_in_regs;
3312 #ifdef REG_PARM_STACK_SPACE
3313 || REG_PARM_STACK_SPACE (fndecl) > 0
3316 pad_to_arg_alignment (&locate->slot_offset, boundary,
3317 &locate->alignment_pad);
3319 locate->size.constant = (-initial_offset_ptr->constant
3320 - locate->slot_offset.constant);
3321 if (initial_offset_ptr->var)
3322 locate->size.var = size_binop (MINUS_EXPR,
3323 size_binop (MINUS_EXPR,
3325 initial_offset_ptr->var),
3326 locate->slot_offset.var);
3328 /* Pad_below needs the pre-rounded size to know how much to pad
3330 locate->offset = locate->slot_offset;
3331 if (where_pad == downward)
3332 pad_below (&locate->offset, passed_mode, sizetree);
3334 #else /* !ARGS_GROW_DOWNWARD */
3336 #ifdef REG_PARM_STACK_SPACE
3337 || REG_PARM_STACK_SPACE (fndecl) > 0
3340 pad_to_arg_alignment (initial_offset_ptr, boundary,
3341 &locate->alignment_pad);
3342 locate->slot_offset = *initial_offset_ptr;
3344 #ifdef PUSH_ROUNDING
3345 if (passed_mode != BLKmode)
3346 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3349 /* Pad_below needs the pre-rounded size to know how much to pad below
3350 so this must be done before rounding up. */
3351 locate->offset = locate->slot_offset;
3352 if (where_pad == downward)
3353 pad_below (&locate->offset, passed_mode, sizetree);
3355 if (where_pad != none
3356 && (!host_integerp (sizetree, 1)
3357 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3358 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3360 ADD_PARM_SIZE (locate->size, sizetree);
3362 locate->size.constant -= part_size_in_regs;
3363 #endif /* ARGS_GROW_DOWNWARD */
3366 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3367 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3370 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3371 struct args_size *alignment_pad)
3373 tree save_var = NULL_TREE;
3374 HOST_WIDE_INT save_constant = 0;
3375 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3376 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3378 #ifdef SPARC_STACK_BOUNDARY_HACK
3379 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3380 the real alignment of %sp. However, when it does this, the
3381 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3382 if (SPARC_STACK_BOUNDARY_HACK)
3386 if (boundary > PARM_BOUNDARY)
3388 save_var = offset_ptr->var;
3389 save_constant = offset_ptr->constant;
3392 alignment_pad->var = NULL_TREE;
3393 alignment_pad->constant = 0;
3395 if (boundary > BITS_PER_UNIT)
3397 if (offset_ptr->var)
3399 tree sp_offset_tree = ssize_int (sp_offset);
3400 tree offset = size_binop (PLUS_EXPR,
3401 ARGS_SIZE_TREE (*offset_ptr),
3403 #ifdef ARGS_GROW_DOWNWARD
3404 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3406 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3409 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3410 /* ARGS_SIZE_TREE includes constant term. */
3411 offset_ptr->constant = 0;
3412 if (boundary > PARM_BOUNDARY)
3413 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3418 offset_ptr->constant = -sp_offset +
3419 #ifdef ARGS_GROW_DOWNWARD
3420 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3422 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3424 if (boundary > PARM_BOUNDARY)
3425 alignment_pad->constant = offset_ptr->constant - save_constant;
3431 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3433 if (passed_mode != BLKmode)
3435 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3436 offset_ptr->constant
3437 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3438 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3439 - GET_MODE_SIZE (passed_mode));
3443 if (TREE_CODE (sizetree) != INTEGER_CST
3444 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3446 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3447 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3449 ADD_PARM_SIZE (*offset_ptr, s2);
3450 SUB_PARM_SIZE (*offset_ptr, sizetree);
3456 /* True if register REGNO was alive at a place where `setjmp' was
3457 called and was set more than once or is an argument. Such regs may
3458 be clobbered by `longjmp'. */
3461 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3463 /* There appear to be cases where some local vars never reach the
3464 backend but have bogus regnos. */
3465 if (regno >= max_reg_num ())
3468 return ((REG_N_SETS (regno) > 1
3469 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3470 && REGNO_REG_SET_P (setjmp_crosses, regno));
3473 /* Walk the tree of blocks describing the binding levels within a
3474 function and warn about variables the might be killed by setjmp or
3475 vfork. This is done after calling flow_analysis before register
3476 allocation since that will clobber the pseudo-regs to hard
3480 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3484 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3486 if (TREE_CODE (decl) == VAR_DECL
3487 && DECL_RTL_SET_P (decl)
3488 && REG_P (DECL_RTL (decl))
3489 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3490 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3491 " %<longjmp%> or %<vfork%>", decl);
3494 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3495 setjmp_vars_warning (setjmp_crosses, sub);
3498 /* Do the appropriate part of setjmp_vars_warning
3499 but for arguments instead of local variables. */
3502 setjmp_args_warning (bitmap setjmp_crosses)
3505 for (decl = DECL_ARGUMENTS (current_function_decl);
3506 decl; decl = TREE_CHAIN (decl))
3507 if (DECL_RTL (decl) != 0
3508 && REG_P (DECL_RTL (decl))
3509 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3510 warning (OPT_Wclobbered,
3511 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3515 /* Generate warning messages for variables live across setjmp. */
3518 generate_setjmp_warnings (void)
3520 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3522 if (n_basic_blocks == NUM_FIXED_BLOCKS
3523 || bitmap_empty_p (setjmp_crosses))
3526 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3527 setjmp_args_warning (setjmp_crosses);
3531 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3532 and create duplicate blocks. */
3533 /* ??? Need an option to either create block fragments or to create
3534 abstract origin duplicates of a source block. It really depends
3535 on what optimization has been performed. */
3538 reorder_blocks (void)
3540 tree block = DECL_INITIAL (current_function_decl);
3541 VEC(tree,heap) *block_stack;
3543 if (block == NULL_TREE)
3546 block_stack = VEC_alloc (tree, heap, 10);
3548 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3549 clear_block_marks (block);
3551 /* Prune the old trees away, so that they don't get in the way. */
3552 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3553 BLOCK_CHAIN (block) = NULL_TREE;
3555 /* Recreate the block tree from the note nesting. */
3556 reorder_blocks_1 (get_insns (), block, &block_stack);
3557 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3559 VEC_free (tree, heap, block_stack);
3562 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3565 clear_block_marks (tree block)
3569 TREE_ASM_WRITTEN (block) = 0;
3570 clear_block_marks (BLOCK_SUBBLOCKS (block));
3571 block = BLOCK_CHAIN (block);
3576 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3580 for (insn = insns; insn; insn = NEXT_INSN (insn))
3584 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3586 tree block = NOTE_BLOCK (insn);
3589 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3590 ? BLOCK_FRAGMENT_ORIGIN (block)
3593 /* If we have seen this block before, that means it now
3594 spans multiple address regions. Create a new fragment. */
3595 if (TREE_ASM_WRITTEN (block))
3597 tree new_block = copy_node (block);
3599 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3600 BLOCK_FRAGMENT_CHAIN (new_block)
3601 = BLOCK_FRAGMENT_CHAIN (origin);
3602 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3604 NOTE_BLOCK (insn) = new_block;
3608 BLOCK_SUBBLOCKS (block) = 0;
3609 TREE_ASM_WRITTEN (block) = 1;
3610 /* When there's only one block for the entire function,
3611 current_block == block and we mustn't do this, it
3612 will cause infinite recursion. */
3613 if (block != current_block)
3615 if (block != origin)
3616 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3618 BLOCK_SUPERCONTEXT (block) = current_block;
3619 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3620 BLOCK_SUBBLOCKS (current_block) = block;
3621 current_block = origin;
3623 VEC_safe_push (tree, heap, *p_block_stack, block);
3625 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3627 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3628 BLOCK_SUBBLOCKS (current_block)
3629 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3630 current_block = BLOCK_SUPERCONTEXT (current_block);
3636 /* Reverse the order of elements in the chain T of blocks,
3637 and return the new head of the chain (old last element). */
3640 blocks_nreverse (tree t)
3642 tree prev = 0, decl, next;
3643 for (decl = t; decl; decl = next)
3645 next = BLOCK_CHAIN (decl);
3646 BLOCK_CHAIN (decl) = prev;
3652 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3653 non-NULL, list them all into VECTOR, in a depth-first preorder
3654 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3658 all_blocks (tree block, tree *vector)
3664 TREE_ASM_WRITTEN (block) = 0;
3666 /* Record this block. */
3668 vector[n_blocks] = block;
3672 /* Record the subblocks, and their subblocks... */
3673 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3674 vector ? vector + n_blocks : 0);
3675 block = BLOCK_CHAIN (block);
3681 /* Return a vector containing all the blocks rooted at BLOCK. The
3682 number of elements in the vector is stored in N_BLOCKS_P. The
3683 vector is dynamically allocated; it is the caller's responsibility
3684 to call `free' on the pointer returned. */
3687 get_block_vector (tree block, int *n_blocks_p)
3691 *n_blocks_p = all_blocks (block, NULL);
3692 block_vector = XNEWVEC (tree, *n_blocks_p);
3693 all_blocks (block, block_vector);
3695 return block_vector;
3698 static GTY(()) int next_block_index = 2;
3700 /* Set BLOCK_NUMBER for all the blocks in FN. */
3703 number_blocks (tree fn)
3709 /* For SDB and XCOFF debugging output, we start numbering the blocks
3710 from 1 within each function, rather than keeping a running
3712 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3713 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3714 next_block_index = 1;
3717 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3719 /* The top-level BLOCK isn't numbered at all. */
3720 for (i = 1; i < n_blocks; ++i)
3721 /* We number the blocks from two. */
3722 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3724 free (block_vector);
3729 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3732 debug_find_var_in_block_tree (tree var, tree block)
3736 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3740 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3742 tree ret = debug_find_var_in_block_tree (var, t);
3750 /* Keep track of whether we're in a dummy function context. If we are,
3751 we don't want to invoke the set_current_function hook, because we'll
3752 get into trouble if the hook calls target_reinit () recursively or
3753 when the initial initialization is not yet complete. */
3755 static bool in_dummy_function;
3757 /* Invoke the target hook when setting cfun. */
3760 invoke_set_current_function_hook (tree fndecl)
3762 if (!in_dummy_function)
3763 targetm.set_current_function (fndecl);
3766 /* cfun should never be set directly; use this function. */
3769 set_cfun (struct function *new_cfun)
3771 if (cfun != new_cfun)
3774 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3778 /* Keep track of the cfun stack. */
3780 typedef struct function *function_p;
3782 DEF_VEC_P(function_p);
3783 DEF_VEC_ALLOC_P(function_p,heap);
3785 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3787 static VEC(function_p,heap) *cfun_stack;
3789 /* We save the value of in_system_header here when pushing the first
3790 function on the cfun stack, and we restore it from here when
3791 popping the last function. */
3793 static bool saved_in_system_header;
3795 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3798 push_cfun (struct function *new_cfun)
3801 saved_in_system_header = in_system_header;
3802 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3804 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3805 set_cfun (new_cfun);
3808 /* Pop cfun from the stack. */
3813 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3814 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3815 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3816 set_cfun (new_cfun);
3819 /* Return value of funcdef and increase it. */
3821 get_next_funcdef_no (void)
3823 return funcdef_no++;
3826 /* Allocate a function structure for FNDECL and set its contents
3827 to the defaults. Set cfun to the newly-allocated object.
3828 Some of the helper functions invoked during initialization assume
3829 that cfun has already been set. Therefore, assign the new object
3830 directly into cfun and invoke the back end hook explicitly at the
3831 very end, rather than initializing a temporary and calling set_cfun
3834 ABSTRACT_P is true if this is a function that will never be seen by
3835 the middle-end. Such functions are front-end concepts (like C++
3836 function templates) that do not correspond directly to functions
3837 placed in object files. */
3840 allocate_struct_function (tree fndecl, bool abstract_p)
3843 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3845 cfun = ggc_alloc_cleared (sizeof (struct function));
3847 cfun->stack_alignment_needed = STACK_BOUNDARY;
3848 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3850 current_function_funcdef_no = get_next_funcdef_no ();
3852 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3854 init_eh_for_function ();
3856 if (init_machine_status)
3857 cfun->machine = (*init_machine_status) ();
3861 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3862 cfun->decl = fndecl;
3864 result = DECL_RESULT (fndecl);
3865 if (!abstract_p && aggregate_value_p (result, fndecl))
3867 #ifdef PCC_STATIC_STRUCT_RETURN
3868 current_function_returns_pcc_struct = 1;
3870 current_function_returns_struct = 1;
3873 current_function_stdarg
3875 && TYPE_ARG_TYPES (fntype) != 0
3876 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3877 != void_type_node));
3879 /* Assume all registers in stdarg functions need to be saved. */
3880 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3881 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3884 invoke_set_current_function_hook (fndecl);
3887 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3888 instead of just setting it. */
3891 push_struct_function (tree fndecl)
3894 saved_in_system_header = in_system_header;
3895 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3897 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3898 allocate_struct_function (fndecl, false);
3901 /* Reset cfun, and other non-struct-function variables to defaults as
3902 appropriate for emitting rtl at the start of a function. */
3905 prepare_function_start (void)
3907 gcc_assert (!crtl->emit.x_last_insn);
3909 init_varasm_status ();
3912 cse_not_expected = ! optimize;
3914 /* Caller save not needed yet. */
3915 caller_save_needed = 0;
3917 /* We haven't done register allocation yet. */
3920 /* Indicate that we have not instantiated virtual registers yet. */
3921 virtuals_instantiated = 0;
3923 /* Indicate that we want CONCATs now. */
3924 generating_concat_p = 1;
3926 /* Indicate we have no need of a frame pointer yet. */
3927 frame_pointer_needed = 0;
3930 /* Initialize the rtl expansion mechanism so that we can do simple things
3931 like generate sequences. This is used to provide a context during global
3932 initialization of some passes. You must call expand_dummy_function_end
3933 to exit this context. */
3936 init_dummy_function_start (void)
3938 gcc_assert (!in_dummy_function);
3939 in_dummy_function = true;
3940 push_struct_function (NULL_TREE);
3941 prepare_function_start ();
3944 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3945 and initialize static variables for generating RTL for the statements
3949 init_function_start (tree subr)
3951 if (subr && DECL_STRUCT_FUNCTION (subr))
3952 set_cfun (DECL_STRUCT_FUNCTION (subr));
3954 allocate_struct_function (subr, false);
3955 prepare_function_start ();
3957 /* Warn if this value is an aggregate type,
3958 regardless of which calling convention we are using for it. */
3959 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3960 warning (OPT_Waggregate_return, "function returns an aggregate");
3963 /* Make sure all values used by the optimization passes have sane
3966 init_function_for_compilation (void)
3970 /* No prologue/epilogue insns yet. Make sure that these vectors are
3972 gcc_assert (VEC_length (int, prologue) == 0);
3973 gcc_assert (VEC_length (int, epilogue) == 0);
3974 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3978 struct rtl_opt_pass pass_init_function =
3984 init_function_for_compilation, /* execute */
3987 0, /* static_pass_number */
3989 0, /* properties_required */
3990 0, /* properties_provided */
3991 0, /* properties_destroyed */
3992 0, /* todo_flags_start */
3993 0 /* todo_flags_finish */
3999 expand_main_function (void)
4001 #if (defined(INVOKE__main) \
4002 || (!defined(HAS_INIT_SECTION) \
4003 && !defined(INIT_SECTION_ASM_OP) \
4004 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4005 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4009 /* Expand code to initialize the stack_protect_guard. This is invoked at
4010 the beginning of a function to be protected. */
4012 #ifndef HAVE_stack_protect_set
4013 # define HAVE_stack_protect_set 0
4014 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4018 stack_protect_prologue (void)
4020 tree guard_decl = targetm.stack_protect_guard ();
4023 /* Avoid expand_expr here, because we don't want guard_decl pulled
4024 into registers unless absolutely necessary. And we know that
4025 cfun->stack_protect_guard is a local stack slot, so this skips
4027 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4028 y = validize_mem (DECL_RTL (guard_decl));
4030 /* Allow the target to copy from Y to X without leaking Y into a
4032 if (HAVE_stack_protect_set)
4034 rtx insn = gen_stack_protect_set (x, y);
4042 /* Otherwise do a straight move. */
4043 emit_move_insn (x, y);
4046 /* Expand code to verify the stack_protect_guard. This is invoked at
4047 the end of a function to be protected. */
4049 #ifndef HAVE_stack_protect_test
4050 # define HAVE_stack_protect_test 0
4051 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4055 stack_protect_epilogue (void)
4057 tree guard_decl = targetm.stack_protect_guard ();
4058 rtx label = gen_label_rtx ();
4061 /* Avoid expand_expr here, because we don't want guard_decl pulled
4062 into registers unless absolutely necessary. And we know that
4063 cfun->stack_protect_guard is a local stack slot, so this skips
4065 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4066 y = validize_mem (DECL_RTL (guard_decl));
4068 /* Allow the target to compare Y with X without leaking either into
4070 switch (HAVE_stack_protect_test != 0)
4073 tmp = gen_stack_protect_test (x, y, label);
4082 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4086 /* The noreturn predictor has been moved to the tree level. The rtl-level
4087 predictors estimate this branch about 20%, which isn't enough to get
4088 things moved out of line. Since this is the only extant case of adding
4089 a noreturn function at the rtl level, it doesn't seem worth doing ought
4090 except adding the prediction by hand. */
4091 tmp = get_last_insn ();
4093 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4095 expand_expr_stmt (targetm.stack_protect_fail ());
4099 /* Start the RTL for a new function, and set variables used for
4101 SUBR is the FUNCTION_DECL node.
4102 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4103 the function's parameters, which must be run at any return statement. */
4106 expand_function_start (tree subr)
4108 /* Make sure volatile mem refs aren't considered
4109 valid operands of arithmetic insns. */
4110 init_recog_no_volatile ();
4112 current_function_profile
4114 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4116 current_function_limit_stack
4117 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4119 /* Make the label for return statements to jump to. Do not special
4120 case machines with special return instructions -- they will be
4121 handled later during jump, ifcvt, or epilogue creation. */
4122 return_label = gen_label_rtx ();
4124 /* Initialize rtx used to return the value. */
4125 /* Do this before assign_parms so that we copy the struct value address
4126 before any library calls that assign parms might generate. */
4128 /* Decide whether to return the value in memory or in a register. */
4129 if (aggregate_value_p (DECL_RESULT (subr), subr))
4131 /* Returning something that won't go in a register. */
4132 rtx value_address = 0;
4134 #ifdef PCC_STATIC_STRUCT_RETURN
4135 if (current_function_returns_pcc_struct)
4137 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4138 value_address = assemble_static_space (size);
4143 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4144 /* Expect to be passed the address of a place to store the value.
4145 If it is passed as an argument, assign_parms will take care of
4149 value_address = gen_reg_rtx (Pmode);
4150 emit_move_insn (value_address, sv);
4155 rtx x = value_address;
4156 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4158 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4159 set_mem_attributes (x, DECL_RESULT (subr), 1);
4161 SET_DECL_RTL (DECL_RESULT (subr), x);
4164 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4165 /* If return mode is void, this decl rtl should not be used. */
4166 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4169 /* Compute the return values into a pseudo reg, which we will copy
4170 into the true return register after the cleanups are done. */
4171 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4172 if (TYPE_MODE (return_type) != BLKmode
4173 && targetm.calls.return_in_msb (return_type))
4174 /* expand_function_end will insert the appropriate padding in
4175 this case. Use the return value's natural (unpadded) mode
4176 within the function proper. */
4177 SET_DECL_RTL (DECL_RESULT (subr),
4178 gen_reg_rtx (TYPE_MODE (return_type)));
4181 /* In order to figure out what mode to use for the pseudo, we
4182 figure out what the mode of the eventual return register will
4183 actually be, and use that. */
4184 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4186 /* Structures that are returned in registers are not
4187 aggregate_value_p, so we may see a PARALLEL or a REG. */
4188 if (REG_P (hard_reg))
4189 SET_DECL_RTL (DECL_RESULT (subr),
4190 gen_reg_rtx (GET_MODE (hard_reg)));
4193 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4194 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4198 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4199 result to the real return register(s). */
4200 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4203 /* Initialize rtx for parameters and local variables.
4204 In some cases this requires emitting insns. */
4205 assign_parms (subr);
4207 /* If function gets a static chain arg, store it. */
4208 if (cfun->static_chain_decl)
4210 tree parm = cfun->static_chain_decl;
4211 rtx local = gen_reg_rtx (Pmode);
4213 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4214 SET_DECL_RTL (parm, local);
4215 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4217 emit_move_insn (local, static_chain_incoming_rtx);
4220 /* If the function receives a non-local goto, then store the
4221 bits we need to restore the frame pointer. */
4222 if (cfun->nonlocal_goto_save_area)
4227 /* ??? We need to do this save early. Unfortunately here is
4228 before the frame variable gets declared. Help out... */
4229 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4230 if (!DECL_RTL_SET_P (var))
4233 t_save = build4 (ARRAY_REF, ptr_type_node,
4234 cfun->nonlocal_goto_save_area,
4235 integer_zero_node, NULL_TREE, NULL_TREE);
4236 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4237 r_save = convert_memory_address (Pmode, r_save);
4239 emit_move_insn (r_save, virtual_stack_vars_rtx);
4240 update_nonlocal_goto_save_area ();
4243 /* The following was moved from init_function_start.
4244 The move is supposed to make sdb output more accurate. */
4245 /* Indicate the beginning of the function body,
4246 as opposed to parm setup. */
4247 emit_note (NOTE_INSN_FUNCTION_BEG);
4249 gcc_assert (NOTE_P (get_last_insn ()));
4251 parm_birth_insn = get_last_insn ();
4253 if (current_function_profile)
4256 PROFILE_HOOK (current_function_funcdef_no);
4260 /* After the display initializations is where the stack checking
4262 if(flag_stack_check)
4263 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4265 /* Make sure there is a line number after the function entry setup code. */
4266 force_next_line_note ();
4269 /* Undo the effects of init_dummy_function_start. */
4271 expand_dummy_function_end (void)
4273 gcc_assert (in_dummy_function);
4275 /* End any sequences that failed to be closed due to syntax errors. */
4276 while (in_sequence_p ())
4279 /* Outside function body, can't compute type's actual size
4280 until next function's body starts. */
4282 free_after_parsing (cfun);
4283 free_after_compilation (cfun);
4285 in_dummy_function = false;
4288 /* Call DOIT for each hard register used as a return value from
4289 the current function. */
4292 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4294 rtx outgoing = current_function_return_rtx;
4299 if (REG_P (outgoing))
4300 (*doit) (outgoing, arg);
4301 else if (GET_CODE (outgoing) == PARALLEL)
4305 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4307 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4309 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4316 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4318 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4322 clobber_return_register (void)
4324 diddle_return_value (do_clobber_return_reg, NULL);
4326 /* In case we do use pseudo to return value, clobber it too. */
4327 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4329 tree decl_result = DECL_RESULT (current_function_decl);
4330 rtx decl_rtl = DECL_RTL (decl_result);
4331 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4333 do_clobber_return_reg (decl_rtl, NULL);
4339 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4341 emit_insn (gen_rtx_USE (VOIDmode, reg));
4345 use_return_register (void)
4347 diddle_return_value (do_use_return_reg, NULL);
4350 /* Possibly warn about unused parameters. */
4352 do_warn_unused_parameter (tree fn)
4356 for (decl = DECL_ARGUMENTS (fn);
4357 decl; decl = TREE_CHAIN (decl))
4358 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4359 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4360 && !TREE_NO_WARNING (decl))
4361 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4364 static GTY(()) rtx initial_trampoline;
4366 /* Generate RTL for the end of the current function. */
4369 expand_function_end (void)
4373 /* If arg_pointer_save_area was referenced only from a nested
4374 function, we will not have initialized it yet. Do that now. */
4375 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4376 get_arg_pointer_save_area ();
4378 /* If we are doing stack checking and this function makes calls,
4379 do a stack probe at the start of the function to ensure we have enough
4380 space for another stack frame. */
4381 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4385 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4389 probe_stack_range (STACK_CHECK_PROTECT,
4390 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4393 emit_insn_before (seq, stack_check_probe_note);
4398 /* End any sequences that failed to be closed due to syntax errors. */
4399 while (in_sequence_p ())
4402 clear_pending_stack_adjust ();
4403 do_pending_stack_adjust ();
4405 /* Output a linenumber for the end of the function.
4406 SDB depends on this. */
4407 force_next_line_note ();
4408 set_curr_insn_source_location (input_location);
4410 /* Before the return label (if any), clobber the return
4411 registers so that they are not propagated live to the rest of
4412 the function. This can only happen with functions that drop
4413 through; if there had been a return statement, there would
4414 have either been a return rtx, or a jump to the return label.
4416 We delay actual code generation after the current_function_value_rtx
4418 clobber_after = get_last_insn ();
4420 /* Output the label for the actual return from the function. */
4421 emit_label (return_label);
4423 if (USING_SJLJ_EXCEPTIONS)
4425 /* Let except.c know where it should emit the call to unregister
4426 the function context for sjlj exceptions. */
4427 if (flag_exceptions)
4428 sjlj_emit_function_exit_after (get_last_insn ());
4432 /* We want to ensure that instructions that may trap are not
4433 moved into the epilogue by scheduling, because we don't
4434 always emit unwind information for the epilogue. */
4435 if (flag_non_call_exceptions)
4436 emit_insn (gen_blockage ());
4439 /* If this is an implementation of throw, do what's necessary to
4440 communicate between __builtin_eh_return and the epilogue. */
4441 expand_eh_return ();
4443 /* If scalar return value was computed in a pseudo-reg, or was a named
4444 return value that got dumped to the stack, copy that to the hard
4446 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4448 tree decl_result = DECL_RESULT (current_function_decl);
4449 rtx decl_rtl = DECL_RTL (decl_result);
4451 if (REG_P (decl_rtl)
4452 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4453 : DECL_REGISTER (decl_result))
4455 rtx real_decl_rtl = current_function_return_rtx;
4457 /* This should be set in assign_parms. */
4458 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4460 /* If this is a BLKmode structure being returned in registers,
4461 then use the mode computed in expand_return. Note that if
4462 decl_rtl is memory, then its mode may have been changed,
4463 but that current_function_return_rtx has not. */
4464 if (GET_MODE (real_decl_rtl) == BLKmode)
4465 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4467 /* If a non-BLKmode return value should be padded at the least
4468 significant end of the register, shift it left by the appropriate
4469 amount. BLKmode results are handled using the group load/store
4471 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4472 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4474 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4475 REGNO (real_decl_rtl)),
4477 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4479 /* If a named return value dumped decl_return to memory, then
4480 we may need to re-do the PROMOTE_MODE signed/unsigned
4482 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4484 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4486 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4487 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4490 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4492 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4494 /* If expand_function_start has created a PARALLEL for decl_rtl,
4495 move the result to the real return registers. Otherwise, do
4496 a group load from decl_rtl for a named return. */
4497 if (GET_CODE (decl_rtl) == PARALLEL)
4498 emit_group_move (real_decl_rtl, decl_rtl);
4500 emit_group_load (real_decl_rtl, decl_rtl,
4501 TREE_TYPE (decl_result),
4502 int_size_in_bytes (TREE_TYPE (decl_result)));
4504 /* In the case of complex integer modes smaller than a word, we'll
4505 need to generate some non-trivial bitfield insertions. Do that
4506 on a pseudo and not the hard register. */
4507 else if (GET_CODE (decl_rtl) == CONCAT
4508 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4509 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4511 int old_generating_concat_p;
4514 old_generating_concat_p = generating_concat_p;
4515 generating_concat_p = 0;
4516 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4517 generating_concat_p = old_generating_concat_p;
4519 emit_move_insn (tmp, decl_rtl);
4520 emit_move_insn (real_decl_rtl, tmp);
4523 emit_move_insn (real_decl_rtl, decl_rtl);
4527 /* If returning a structure, arrange to return the address of the value
4528 in a place where debuggers expect to find it.
4530 If returning a structure PCC style,
4531 the caller also depends on this value.
4532 And current_function_returns_pcc_struct is not necessarily set. */
4533 if (current_function_returns_struct
4534 || current_function_returns_pcc_struct)
4536 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4537 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4540 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4541 type = TREE_TYPE (type);
4543 value_address = XEXP (value_address, 0);
4545 outgoing = targetm.calls.function_value (build_pointer_type (type),
4546 current_function_decl, true);
4548 /* Mark this as a function return value so integrate will delete the
4549 assignment and USE below when inlining this function. */
4550 REG_FUNCTION_VALUE_P (outgoing) = 1;
4552 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4553 value_address = convert_memory_address (GET_MODE (outgoing),
4556 emit_move_insn (outgoing, value_address);
4558 /* Show return register used to hold result (in this case the address
4560 current_function_return_rtx = outgoing;
4563 /* Emit the actual code to clobber return register. */
4568 clobber_return_register ();
4569 expand_naked_return ();
4573 emit_insn_after (seq, clobber_after);
4576 /* Output the label for the naked return from the function. */
4577 emit_label (naked_return_label);
4579 /* @@@ This is a kludge. We want to ensure that instructions that
4580 may trap are not moved into the epilogue by scheduling, because
4581 we don't always emit unwind information for the epilogue. */
4582 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4583 emit_insn (gen_blockage ());
4585 /* If stack protection is enabled for this function, check the guard. */
4586 if (cfun->stack_protect_guard)
4587 stack_protect_epilogue ();
4589 /* If we had calls to alloca, and this machine needs
4590 an accurate stack pointer to exit the function,
4591 insert some code to save and restore the stack pointer. */
4592 if (! EXIT_IGNORE_STACK
4593 && current_function_calls_alloca)
4597 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4598 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4601 /* ??? This should no longer be necessary since stupid is no longer with
4602 us, but there are some parts of the compiler (eg reload_combine, and
4603 sh mach_dep_reorg) that still try and compute their own lifetime info
4604 instead of using the general framework. */
4605 use_return_register ();
4609 get_arg_pointer_save_area (void)
4611 rtx ret = arg_pointer_save_area;
4615 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4616 arg_pointer_save_area = ret;
4619 if (! cfun->arg_pointer_save_area_init)
4623 /* Save the arg pointer at the beginning of the function. The
4624 generated stack slot may not be a valid memory address, so we
4625 have to check it and fix it if necessary. */
4627 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4631 push_topmost_sequence ();
4632 emit_insn_after (seq, entry_of_function ());
4633 pop_topmost_sequence ();
4639 /* Extend a vector that records the INSN_UIDs of INSNS
4640 (a list of one or more insns). */
4643 record_insns (rtx insns, VEC(int,heap) **vecp)
4647 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4648 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4651 /* Set the locator of the insn chain starting at INSN to LOC. */
4653 set_insn_locators (rtx insn, int loc)
4655 while (insn != NULL_RTX)
4658 INSN_LOCATOR (insn) = loc;
4659 insn = NEXT_INSN (insn);
4663 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4664 be running after reorg, SEQUENCE rtl is possible. */
4667 contains (const_rtx insn, VEC(int,heap) **vec)
4671 if (NONJUMP_INSN_P (insn)
4672 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4675 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4676 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4677 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4678 == VEC_index (int, *vec, j))
4684 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4685 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4692 prologue_epilogue_contains (const_rtx insn)
4694 if (contains (insn, &prologue))
4696 if (contains (insn, &epilogue))
4702 sibcall_epilogue_contains (const_rtx insn)
4704 if (sibcall_epilogue)
4705 return contains (insn, &sibcall_epilogue);
4710 /* Insert gen_return at the end of block BB. This also means updating
4711 block_for_insn appropriately. */
4714 emit_return_into_block (basic_block bb)
4716 emit_jump_insn_after (gen_return (), BB_END (bb));
4718 #endif /* HAVE_return */
4720 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4721 this into place with notes indicating where the prologue ends and where
4722 the epilogue begins. Update the basic block information when possible. */
4725 thread_prologue_and_epilogue_insns (void)
4729 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4732 #if defined (HAVE_epilogue) || defined(HAVE_return)
4733 rtx epilogue_end = NULL_RTX;
4737 #ifdef HAVE_prologue
4741 seq = gen_prologue ();
4744 /* Insert an explicit USE for the frame pointer
4745 if the profiling is on and the frame pointer is required. */
4746 if (current_function_profile && frame_pointer_needed)
4747 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
4749 /* Retain a map of the prologue insns. */
4750 record_insns (seq, &prologue);
4751 emit_note (NOTE_INSN_PROLOGUE_END);
4753 #ifndef PROFILE_BEFORE_PROLOGUE
4754 /* Ensure that instructions are not moved into the prologue when
4755 profiling is on. The call to the profiling routine can be
4756 emitted within the live range of a call-clobbered register. */
4757 if (current_function_profile)
4758 emit_insn (gen_blockage ());
4763 set_insn_locators (seq, prologue_locator);
4765 /* Can't deal with multiple successors of the entry block
4766 at the moment. Function should always have at least one
4768 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4770 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4775 /* If the exit block has no non-fake predecessors, we don't need
4777 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4778 if ((e->flags & EDGE_FAKE) == 0)
4784 if (optimize && HAVE_return)
4786 /* If we're allowed to generate a simple return instruction,
4787 then by definition we don't need a full epilogue. Examine
4788 the block that falls through to EXIT. If it does not
4789 contain any code, examine its predecessors and try to
4790 emit (conditional) return instructions. */
4795 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4796 if (e->flags & EDGE_FALLTHRU)
4802 /* Verify that there are no active instructions in the last block. */
4803 label = BB_END (last);
4804 while (label && !LABEL_P (label))
4806 if (active_insn_p (label))
4808 label = PREV_INSN (label);
4811 if (BB_HEAD (last) == label && LABEL_P (label))
4815 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4817 basic_block bb = e->src;
4820 if (bb == ENTRY_BLOCK_PTR)
4827 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4833 /* If we have an unconditional jump, we can replace that
4834 with a simple return instruction. */
4835 if (simplejump_p (jump))
4837 emit_return_into_block (bb);
4841 /* If we have a conditional jump, we can try to replace
4842 that with a conditional return instruction. */
4843 else if (condjump_p (jump))
4845 if (! redirect_jump (jump, 0, 0))
4851 /* If this block has only one successor, it both jumps
4852 and falls through to the fallthru block, so we can't
4854 if (single_succ_p (bb))
4866 /* Fix up the CFG for the successful change we just made. */
4867 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4870 /* Emit a return insn for the exit fallthru block. Whether
4871 this is still reachable will be determined later. */
4873 emit_barrier_after (BB_END (last));
4874 emit_return_into_block (last);
4875 epilogue_end = BB_END (last);
4876 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4881 /* Find the edge that falls through to EXIT. Other edges may exist
4882 due to RETURN instructions, but those don't need epilogues.
4883 There really shouldn't be a mixture -- either all should have
4884 been converted or none, however... */
4886 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4887 if (e->flags & EDGE_FALLTHRU)
4892 #ifdef HAVE_epilogue
4896 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
4897 seq = gen_epilogue ();
4898 emit_jump_insn (seq);
4900 /* Retain a map of the epilogue insns. */
4901 record_insns (seq, &epilogue);
4902 set_insn_locators (seq, epilogue_locator);
4907 insert_insn_on_edge (seq, e);
4915 if (! next_active_insn (BB_END (e->src)))
4917 /* We have a fall-through edge to the exit block, the source is not
4918 at the end of the function, and there will be an assembler epilogue
4919 at the end of the function.
4920 We can't use force_nonfallthru here, because that would try to
4921 use return. Inserting a jump 'by hand' is extremely messy, so
4922 we take advantage of cfg_layout_finalize using
4923 fixup_fallthru_exit_predecessor. */
4924 cfg_layout_initialize (0);
4925 FOR_EACH_BB (cur_bb)
4926 if (cur_bb->index >= NUM_FIXED_BLOCKS
4927 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
4928 cur_bb->aux = cur_bb->next_bb;
4929 cfg_layout_finalize ();
4935 commit_edge_insertions ();
4937 /* The epilogue insns we inserted may cause the exit edge to no longer
4939 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4941 if (((e->flags & EDGE_FALLTHRU) != 0)
4942 && returnjump_p (BB_END (e->src)))
4943 e->flags &= ~EDGE_FALLTHRU;
4947 #ifdef HAVE_sibcall_epilogue
4948 /* Emit sibling epilogues before any sibling call sites. */
4949 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
4951 basic_block bb = e->src;
4952 rtx insn = BB_END (bb);
4955 || ! SIBLING_CALL_P (insn))
4962 emit_insn (gen_sibcall_epilogue ());
4966 /* Retain a map of the epilogue insns. Used in life analysis to
4967 avoid getting rid of sibcall epilogue insns. Do this before we
4968 actually emit the sequence. */
4969 record_insns (seq, &sibcall_epilogue);
4970 set_insn_locators (seq, epilogue_locator);
4972 emit_insn_before (seq, insn);
4977 #ifdef HAVE_epilogue
4982 /* Similarly, move any line notes that appear after the epilogue.
4983 There is no need, however, to be quite so anal about the existence
4984 of such a note. Also possibly move
4985 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
4987 for (insn = epilogue_end; insn; insn = next)
4989 next = NEXT_INSN (insn);
4991 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
4992 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
4997 /* Threading the prologue and epilogue changes the artificial refs
4998 in the entry and exit blocks. */
4999 epilogue_completed = 1;
5000 df_update_entry_exit_and_calls ();
5003 /* Reposition the prologue-end and epilogue-begin notes after instruction
5004 scheduling and delayed branch scheduling. */
5007 reposition_prologue_and_epilogue_notes (void)
5009 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5010 rtx insn, last, note;
5013 if ((len = VEC_length (int, prologue)) > 0)
5017 /* Scan from the beginning until we reach the last prologue insn.
5018 We apparently can't depend on basic_block_{head,end} after
5020 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5024 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5027 else if (contains (insn, &prologue))
5037 /* Find the prologue-end note if we haven't already, and
5038 move it to just after the last prologue insn. */
5041 for (note = last; (note = NEXT_INSN (note));)
5043 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5047 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5049 last = NEXT_INSN (last);
5050 reorder_insns (note, note, last);
5054 if ((len = VEC_length (int, epilogue)) > 0)
5058 /* Scan from the end until we reach the first epilogue insn.
5059 We apparently can't depend on basic_block_{head,end} after
5061 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5065 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5068 else if (contains (insn, &epilogue))
5078 /* Find the epilogue-begin note if we haven't already, and
5079 move it to just before the first epilogue insn. */
5082 for (note = insn; (note = PREV_INSN (note));)
5084 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5088 if (PREV_INSN (last) != note)
5089 reorder_insns (note, note, PREV_INSN (last));
5092 #endif /* HAVE_prologue or HAVE_epilogue */
5095 /* Returns the name of the current function. */
5097 current_function_name (void)
5099 return lang_hooks.decl_printable_name (cfun->decl, 2);
5102 /* Returns the raw (mangled) name of the current function. */
5104 current_function_assembler_name (void)
5106 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5111 rest_of_handle_check_leaf_regs (void)
5113 #ifdef LEAF_REGISTERS
5114 current_function_uses_only_leaf_regs
5115 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5120 /* Insert a TYPE into the used types hash table of CFUN. */
5122 used_types_insert_helper (tree type, struct function *func)
5124 if (type != NULL && func != NULL)
5128 if (func->used_types_hash == NULL)
5129 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5130 htab_eq_pointer, NULL);
5131 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5137 /* Given a type, insert it into the used hash table in cfun. */
5139 used_types_insert (tree t)
5141 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5143 t = TYPE_MAIN_VARIANT (t);
5144 if (debug_info_level > DINFO_LEVEL_NONE)
5145 used_types_insert_helper (t, cfun);
5148 struct rtl_opt_pass pass_leaf_regs =
5154 rest_of_handle_check_leaf_regs, /* execute */
5157 0, /* static_pass_number */
5159 0, /* properties_required */
5160 0, /* properties_provided */
5161 0, /* properties_destroyed */
5162 0, /* todo_flags_start */
5163 0 /* todo_flags_finish */
5168 rest_of_handle_thread_prologue_and_epilogue (void)
5171 cleanup_cfg (CLEANUP_EXPENSIVE);
5172 /* On some machines, the prologue and epilogue code, or parts thereof,
5173 can be represented as RTL. Doing so lets us schedule insns between
5174 it and the rest of the code and also allows delayed branch
5175 scheduling to operate in the epilogue. */
5177 thread_prologue_and_epilogue_insns ();
5181 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5185 "pro_and_epilogue", /* name */
5187 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5190 0, /* static_pass_number */
5191 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5192 0, /* properties_required */
5193 0, /* properties_provided */
5194 0, /* properties_destroyed */
5195 TODO_verify_flow, /* todo_flags_start */
5198 TODO_df_finish | TODO_verify_rtl_sharing |
5199 TODO_ggc_collect /* todo_flags_finish */
5204 /* This mini-pass fixes fall-out from SSA in asm statements that have
5205 in-out constraints. Say you start with
5208 asm ("": "+mr" (inout));
5211 which is transformed very early to use explicit output and match operands:
5214 asm ("": "=mr" (inout) : "0" (inout));
5217 Or, after SSA and copyprop,
5219 asm ("": "=mr" (inout_2) : "0" (inout_1));
5222 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5223 they represent two separate values, so they will get different pseudo
5224 registers during expansion. Then, since the two operands need to match
5225 per the constraints, but use different pseudo registers, reload can
5226 only register a reload for these operands. But reloads can only be
5227 satisfied by hardregs, not by memory, so we need a register for this
5228 reload, just because we are presented with non-matching operands.
5229 So, even though we allow memory for this operand, no memory can be
5230 used for it, just because the two operands don't match. This can
5231 cause reload failures on register-starved targets.
5233 So it's a symptom of reload not being able to use memory for reloads
5234 or, alternatively it's also a symptom of both operands not coming into
5235 reload as matching (in which case the pseudo could go to memory just
5236 fine, as the alternative allows it, and no reload would be necessary).
5237 We fix the latter problem here, by transforming
5239 asm ("": "=mr" (inout_2) : "0" (inout_1));
5244 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5247 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5250 bool changed = false;
5251 rtx op = SET_SRC (p_sets[0]);
5252 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5253 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5254 bool *output_matched = alloca (noutputs * sizeof (bool));
5256 memset (output_matched, 0, noutputs * sizeof (bool));
5257 for (i = 0; i < ninputs; i++)
5259 rtx input, output, insns;
5260 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5264 match = strtoul (constraint, &end, 10);
5265 if (end == constraint)
5268 gcc_assert (match < noutputs);
5269 output = SET_DEST (p_sets[match]);
5270 input = RTVEC_ELT (inputs, i);
5271 /* Only do the transformation for pseudos. */
5272 if (! REG_P (output)
5273 || rtx_equal_p (output, input)
5274 || (GET_MODE (input) != VOIDmode
5275 && GET_MODE (input) != GET_MODE (output)))
5278 /* We can't do anything if the output is also used as input,
5279 as we're going to overwrite it. */
5280 for (j = 0; j < ninputs; j++)
5281 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5286 /* Avoid changing the same input several times. For
5287 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5288 only change in once (to out1), rather than changing it
5289 first to out1 and afterwards to out2. */
5292 for (j = 0; j < noutputs; j++)
5293 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5298 output_matched[match] = true;
5301 emit_move_insn (output, input);
5302 insns = get_insns ();
5304 emit_insn_before (insns, insn);
5306 /* Now replace all mentions of the input with output. We can't
5307 just replace the occurence in inputs[i], as the register might
5308 also be used in some other input (or even in an address of an
5309 output), which would mean possibly increasing the number of
5310 inputs by one (namely 'output' in addition), which might pose
5311 a too complicated problem for reload to solve. E.g. this situation:
5313 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5315 Here 'input' is used in two occurrences as input (once for the
5316 input operand, once for the address in the second output operand).
5317 If we would replace only the occurence of the input operand (to
5318 make the matching) we would be left with this:
5321 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5323 Now we suddenly have two different input values (containing the same
5324 value, but different pseudos) where we formerly had only one.
5325 With more complicated asms this might lead to reload failures
5326 which wouldn't have happen without this pass. So, iterate over
5327 all operands and replace all occurrences of the register used. */
5328 for (j = 0; j < noutputs; j++)
5329 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5330 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5331 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5333 for (j = 0; j < ninputs; j++)
5334 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5335 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5342 df_insn_rescan (insn);
5346 rest_of_match_asm_constraints (void)
5349 rtx insn, pat, *p_sets;
5352 if (!cfun->has_asm_statement)
5355 df_set_flags (DF_DEFER_INSN_RESCAN);
5358 FOR_BB_INSNS (bb, insn)
5363 pat = PATTERN (insn);
5364 if (GET_CODE (pat) == PARALLEL)
5365 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5366 else if (GET_CODE (pat) == SET)
5367 p_sets = &PATTERN (insn), noutputs = 1;
5371 if (GET_CODE (*p_sets) == SET
5372 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5373 match_asm_constraints_1 (insn, p_sets, noutputs);
5377 return TODO_df_finish;
5380 struct rtl_opt_pass pass_match_asm_constraints =
5384 "asmcons", /* name */
5386 rest_of_match_asm_constraints, /* execute */
5389 0, /* static_pass_number */
5391 0, /* properties_required */
5392 0, /* properties_provided */
5393 0, /* properties_destroyed */
5394 0, /* todo_flags_start */
5395 TODO_dump_func /* todo_flags_finish */
5400 #include "gt-function.h"