1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
204 static void emit_return_into_block (basic_block);
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
211 /* Pointer to chain of `struct function' for containing functions. */
212 struct function *outer_function_chain;
214 /* Given a function decl for a containing function,
215 return the `struct function' for it. */
218 find_function_data (tree decl)
222 for (p = outer_function_chain; p; p = p->outer)
229 /* Save the current context for compilation of a nested function.
230 This is called from language-specific code. */
233 push_function_context (void)
236 allocate_struct_function (NULL, false);
238 cfun->outer = outer_function_chain;
239 outer_function_chain = cfun;
243 /* Restore the last saved context, at the end of a nested function.
244 This function is called from language-specific code. */
247 pop_function_context (void)
249 struct function *p = outer_function_chain;
252 outer_function_chain = p->outer;
253 current_function_decl = p->decl;
255 /* Reset variables that have known state during rtx generation. */
256 virtuals_instantiated = 0;
257 generating_concat_p = 1;
260 /* Clear out all parts of the state in F that can safely be discarded
261 after the function has been parsed, but not compiled, to let
262 garbage collection reclaim the memory. */
265 free_after_parsing (struct function *f)
270 /* Clear out all parts of the state in F that can safely be discarded
271 after the function has been compiled, to let garbage collection
272 reclaim the memory. */
275 free_after_compilation (struct function *f)
277 VEC_free (int, heap, prologue);
278 VEC_free (int, heap, epilogue);
279 VEC_free (int, heap, sibcall_epilogue);
280 if (crtl->emit.regno_pointer_align)
281 free (crtl->emit.regno_pointer_align);
283 memset (crtl, 0, sizeof (struct rtl_data));
288 regno_reg_rtx = NULL;
291 /* Return size needed for stack frame based on slots so far allocated.
292 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
293 the caller may have to do that. */
296 get_frame_size (void)
298 if (FRAME_GROWS_DOWNWARD)
299 return -frame_offset;
304 /* Issue an error message and return TRUE if frame OFFSET overflows in
305 the signed target pointer arithmetics for function FUNC. Otherwise
309 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
311 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
313 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
314 /* Leave room for the fixed part of the frame. */
315 - 64 * UNITS_PER_WORD)
317 error ("%Jtotal size of local objects too large", func);
324 /* Return stack slot alignment in bits for TYPE and MODE. */
327 get_stack_local_alignment (tree type, enum machine_mode mode)
329 unsigned int alignment;
332 alignment = BIGGEST_ALIGNMENT;
334 alignment = GET_MODE_ALIGNMENT (mode);
336 /* Allow the frond-end to (possibly) increase the alignment of this
339 type = lang_hooks.types.type_for_mode (mode, 0);
341 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
344 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
345 with machine mode MODE.
347 ALIGN controls the amount of alignment for the address of the slot:
348 0 means according to MODE,
349 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
350 -2 means use BITS_PER_UNIT,
351 positive specifies alignment boundary in bits.
353 We do not round to stack_boundary here. */
356 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
359 int bigend_correction = 0;
360 unsigned int alignment, alignment_in_bits;
361 int frame_off, frame_alignment, frame_phase;
365 alignment = get_stack_local_alignment (NULL, mode);
366 alignment /= BITS_PER_UNIT;
368 else if (align == -1)
370 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
371 size = CEIL_ROUND (size, alignment);
373 else if (align == -2)
374 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
376 alignment = align / BITS_PER_UNIT;
378 if (FRAME_GROWS_DOWNWARD)
379 frame_offset -= size;
381 /* Ignore alignment we can't do with expected alignment of the boundary. */
382 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
383 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
385 alignment_in_bits = alignment * BITS_PER_UNIT;
387 if (crtl->stack_alignment_needed < alignment_in_bits)
388 crtl->stack_alignment_needed = alignment_in_bits;
390 /* Calculate how many bytes the start of local variables is off from
392 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
393 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
394 frame_phase = frame_off ? frame_alignment - frame_off : 0;
396 /* Round the frame offset to the specified alignment. The default is
397 to always honor requests to align the stack but a port may choose to
398 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
399 if (STACK_ALIGNMENT_NEEDED
403 /* We must be careful here, since FRAME_OFFSET might be negative and
404 division with a negative dividend isn't as well defined as we might
405 like. So we instead assume that ALIGNMENT is a power of two and
406 use logical operations which are unambiguous. */
407 if (FRAME_GROWS_DOWNWARD)
409 = (FLOOR_ROUND (frame_offset - frame_phase,
410 (unsigned HOST_WIDE_INT) alignment)
414 = (CEIL_ROUND (frame_offset - frame_phase,
415 (unsigned HOST_WIDE_INT) alignment)
419 /* On a big-endian machine, if we are allocating more space than we will use,
420 use the least significant bytes of those that are allocated. */
421 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
422 bigend_correction = size - GET_MODE_SIZE (mode);
424 /* If we have already instantiated virtual registers, return the actual
425 address relative to the frame pointer. */
426 if (virtuals_instantiated)
427 addr = plus_constant (frame_pointer_rtx,
429 (frame_offset + bigend_correction
430 + STARTING_FRAME_OFFSET, Pmode));
432 addr = plus_constant (virtual_stack_vars_rtx,
434 (frame_offset + bigend_correction,
437 if (!FRAME_GROWS_DOWNWARD)
438 frame_offset += size;
440 x = gen_rtx_MEM (mode, addr);
441 set_mem_align (x, alignment_in_bits);
442 MEM_NOTRAP_P (x) = 1;
445 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
447 if (frame_offset_overflow (frame_offset, current_function_decl))
453 /* Removes temporary slot TEMP from LIST. */
456 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
459 temp->next->prev = temp->prev;
461 temp->prev->next = temp->next;
465 temp->prev = temp->next = NULL;
468 /* Inserts temporary slot TEMP to LIST. */
471 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
475 (*list)->prev = temp;
480 /* Returns the list of used temp slots at LEVEL. */
482 static struct temp_slot **
483 temp_slots_at_level (int level)
485 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
486 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
488 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
491 /* Returns the maximal temporary slot level. */
494 max_slot_level (void)
496 if (!used_temp_slots)
499 return VEC_length (temp_slot_p, used_temp_slots) - 1;
502 /* Moves temporary slot TEMP to LEVEL. */
505 move_slot_to_level (struct temp_slot *temp, int level)
507 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
508 insert_slot_to_list (temp, temp_slots_at_level (level));
512 /* Make temporary slot TEMP available. */
515 make_slot_available (struct temp_slot *temp)
517 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
518 insert_slot_to_list (temp, &avail_temp_slots);
523 /* Allocate a temporary stack slot and record it for possible later
526 MODE is the machine mode to be given to the returned rtx.
528 SIZE is the size in units of the space required. We do no rounding here
529 since assign_stack_local will do any required rounding.
531 KEEP is 1 if this slot is to be retained after a call to
532 free_temp_slots. Automatic variables for a block are allocated
533 with this flag. KEEP values of 2 or 3 were needed respectively
534 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535 or for SAVE_EXPRs, but they are now unused.
537 TYPE is the type that will be used for the stack slot. */
540 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
544 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
547 /* If SIZE is -1 it means that somebody tried to allocate a temporary
548 of a variable size. */
549 gcc_assert (size != -1);
551 /* These are now unused. */
552 gcc_assert (keep <= 1);
554 align = get_stack_local_alignment (type, mode);
556 /* Try to find an available, already-allocated temporary of the proper
557 mode which meets the size and alignment requirements. Choose the
558 smallest one with the closest alignment.
560 If assign_stack_temp is called outside of the tree->rtl expansion,
561 we cannot reuse the stack slots (that may still refer to
562 VIRTUAL_STACK_VARS_REGNUM). */
563 if (!virtuals_instantiated)
565 for (p = avail_temp_slots; p; p = p->next)
567 if (p->align >= align && p->size >= size
568 && GET_MODE (p->slot) == mode
569 && objects_must_conflict_p (p->type, type)
570 && (best_p == 0 || best_p->size > p->size
571 || (best_p->size == p->size && best_p->align > p->align)))
573 if (p->align == align && p->size == size)
576 cut_slot_from_list (selected, &avail_temp_slots);
585 /* Make our best, if any, the one to use. */
589 cut_slot_from_list (selected, &avail_temp_slots);
591 /* If there are enough aligned bytes left over, make them into a new
592 temp_slot so that the extra bytes don't get wasted. Do this only
593 for BLKmode slots, so that we can be sure of the alignment. */
594 if (GET_MODE (best_p->slot) == BLKmode)
596 int alignment = best_p->align / BITS_PER_UNIT;
597 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
599 if (best_p->size - rounded_size >= alignment)
601 p = ggc_alloc (sizeof (struct temp_slot));
602 p->in_use = p->addr_taken = 0;
603 p->size = best_p->size - rounded_size;
604 p->base_offset = best_p->base_offset + rounded_size;
605 p->full_size = best_p->full_size - rounded_size;
606 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
607 p->align = best_p->align;
609 p->type = best_p->type;
610 insert_slot_to_list (p, &avail_temp_slots);
612 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
615 best_p->size = rounded_size;
616 best_p->full_size = rounded_size;
621 /* If we still didn't find one, make a new temporary. */
624 HOST_WIDE_INT frame_offset_old = frame_offset;
626 p = ggc_alloc (sizeof (struct temp_slot));
628 /* We are passing an explicit alignment request to assign_stack_local.
629 One side effect of that is assign_stack_local will not round SIZE
630 to ensure the frame offset remains suitably aligned.
632 So for requests which depended on the rounding of SIZE, we go ahead
633 and round it now. We also make sure ALIGNMENT is at least
634 BIGGEST_ALIGNMENT. */
635 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
636 p->slot = assign_stack_local (mode,
638 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
644 /* The following slot size computation is necessary because we don't
645 know the actual size of the temporary slot until assign_stack_local
646 has performed all the frame alignment and size rounding for the
647 requested temporary. Note that extra space added for alignment
648 can be either above or below this stack slot depending on which
649 way the frame grows. We include the extra space if and only if it
650 is above this slot. */
651 if (FRAME_GROWS_DOWNWARD)
652 p->size = frame_offset_old - frame_offset;
656 /* Now define the fields used by combine_temp_slots. */
657 if (FRAME_GROWS_DOWNWARD)
659 p->base_offset = frame_offset;
660 p->full_size = frame_offset_old - frame_offset;
664 p->base_offset = frame_offset_old;
665 p->full_size = frame_offset - frame_offset_old;
676 p->level = temp_slot_level;
679 pp = temp_slots_at_level (p->level);
680 insert_slot_to_list (p, pp);
682 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
683 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
684 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
686 /* If we know the alias set for the memory that will be used, use
687 it. If there's no TYPE, then we don't know anything about the
688 alias set for the memory. */
689 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
690 set_mem_align (slot, align);
692 /* If a type is specified, set the relevant flags. */
695 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
696 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
697 || TREE_CODE (type) == COMPLEX_TYPE));
699 MEM_NOTRAP_P (slot) = 1;
704 /* Allocate a temporary stack slot and record it for possible later
705 reuse. First three arguments are same as in preceding function. */
708 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
710 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
713 /* Assign a temporary.
714 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
715 and so that should be used in error messages. In either case, we
716 allocate of the given type.
717 KEEP is as for assign_stack_temp.
718 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
719 it is 0 if a register is OK.
720 DONT_PROMOTE is 1 if we should not promote values in register
724 assign_temp (tree type_or_decl, int keep, int memory_required,
725 int dont_promote ATTRIBUTE_UNUSED)
728 enum machine_mode mode;
733 if (DECL_P (type_or_decl))
734 decl = type_or_decl, type = TREE_TYPE (decl);
736 decl = NULL, type = type_or_decl;
738 mode = TYPE_MODE (type);
740 unsignedp = TYPE_UNSIGNED (type);
743 if (mode == BLKmode || memory_required)
745 HOST_WIDE_INT size = int_size_in_bytes (type);
748 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
749 problems with allocating the stack space. */
753 /* Unfortunately, we don't yet know how to allocate variable-sized
754 temporaries. However, sometimes we can find a fixed upper limit on
755 the size, so try that instead. */
757 size = max_int_size_in_bytes (type);
759 /* The size of the temporary may be too large to fit into an integer. */
760 /* ??? Not sure this should happen except for user silliness, so limit
761 this to things that aren't compiler-generated temporaries. The
762 rest of the time we'll die in assign_stack_temp_for_type. */
763 if (decl && size == -1
764 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
766 error ("size of variable %q+D is too large", decl);
770 tmp = assign_stack_temp_for_type (mode, size, keep, type);
776 mode = promote_mode (type, mode, &unsignedp, 0);
779 return gen_reg_rtx (mode);
782 /* Combine temporary stack slots which are adjacent on the stack.
784 This allows for better use of already allocated stack space. This is only
785 done for BLKmode slots because we can be sure that we won't have alignment
786 problems in this case. */
789 combine_temp_slots (void)
791 struct temp_slot *p, *q, *next, *next_q;
794 /* We can't combine slots, because the information about which slot
795 is in which alias set will be lost. */
796 if (flag_strict_aliasing)
799 /* If there are a lot of temp slots, don't do anything unless
800 high levels of optimization. */
801 if (! flag_expensive_optimizations)
802 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
803 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
806 for (p = avail_temp_slots; p; p = next)
812 if (GET_MODE (p->slot) != BLKmode)
815 for (q = p->next; q; q = next_q)
821 if (GET_MODE (q->slot) != BLKmode)
824 if (p->base_offset + p->full_size == q->base_offset)
826 /* Q comes after P; combine Q into P. */
828 p->full_size += q->full_size;
831 else if (q->base_offset + q->full_size == p->base_offset)
833 /* P comes after Q; combine P into Q. */
835 q->full_size += p->full_size;
840 cut_slot_from_list (q, &avail_temp_slots);
843 /* Either delete P or advance past it. */
845 cut_slot_from_list (p, &avail_temp_slots);
849 /* Find the temp slot corresponding to the object at address X. */
851 static struct temp_slot *
852 find_temp_slot_from_address (rtx x)
858 for (i = max_slot_level (); i >= 0; i--)
859 for (p = *temp_slots_at_level (i); p; p = p->next)
861 if (XEXP (p->slot, 0) == x
863 || (GET_CODE (x) == PLUS
864 && XEXP (x, 0) == virtual_stack_vars_rtx
865 && GET_CODE (XEXP (x, 1)) == CONST_INT
866 && INTVAL (XEXP (x, 1)) >= p->base_offset
867 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
870 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
871 for (next = p->address; next; next = XEXP (next, 1))
872 if (XEXP (next, 0) == x)
876 /* If we have a sum involving a register, see if it points to a temp
878 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
879 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
881 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
882 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
888 /* Indicate that NEW is an alternate way of referring to the temp slot
889 that previously was known by OLD. */
892 update_temp_slot_address (rtx old, rtx new)
896 if (rtx_equal_p (old, new))
899 p = find_temp_slot_from_address (old);
901 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
902 is a register, see if one operand of the PLUS is a temporary
903 location. If so, NEW points into it. Otherwise, if both OLD and
904 NEW are a PLUS and if there is a register in common between them.
905 If so, try a recursive call on those values. */
908 if (GET_CODE (old) != PLUS)
913 update_temp_slot_address (XEXP (old, 0), new);
914 update_temp_slot_address (XEXP (old, 1), new);
917 else if (GET_CODE (new) != PLUS)
920 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
921 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
922 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
923 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
924 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
925 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
926 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
927 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
932 /* Otherwise add an alias for the temp's address. */
933 else if (p->address == 0)
937 if (GET_CODE (p->address) != EXPR_LIST)
938 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
940 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
944 /* If X could be a reference to a temporary slot, mark the fact that its
945 address was taken. */
948 mark_temp_addr_taken (rtx x)
955 /* If X is not in memory or is at a constant address, it cannot be in
957 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
960 p = find_temp_slot_from_address (XEXP (x, 0));
965 /* If X could be a reference to a temporary slot, mark that slot as
966 belonging to the to one level higher than the current level. If X
967 matched one of our slots, just mark that one. Otherwise, we can't
968 easily predict which it is, so upgrade all of them. Kept slots
971 This is called when an ({...}) construct occurs and a statement
972 returns a value in memory. */
975 preserve_temp_slots (rtx x)
977 struct temp_slot *p = 0, *next;
979 /* If there is no result, we still might have some objects whose address
980 were taken, so we need to make sure they stay around. */
983 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
988 move_slot_to_level (p, temp_slot_level - 1);
994 /* If X is a register that is being used as a pointer, see if we have
995 a temporary slot we know it points to. To be consistent with
996 the code below, we really should preserve all non-kept slots
997 if we can't find a match, but that seems to be much too costly. */
998 if (REG_P (x) && REG_POINTER (x))
999 p = find_temp_slot_from_address (x);
1001 /* If X is not in memory or is at a constant address, it cannot be in
1002 a temporary slot, but it can contain something whose address was
1004 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1006 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1011 move_slot_to_level (p, temp_slot_level - 1);
1017 /* First see if we can find a match. */
1019 p = find_temp_slot_from_address (XEXP (x, 0));
1023 /* Move everything at our level whose address was taken to our new
1024 level in case we used its address. */
1025 struct temp_slot *q;
1027 if (p->level == temp_slot_level)
1029 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1033 if (p != q && q->addr_taken)
1034 move_slot_to_level (q, temp_slot_level - 1);
1037 move_slot_to_level (p, temp_slot_level - 1);
1043 /* Otherwise, preserve all non-kept slots at this level. */
1044 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1049 move_slot_to_level (p, temp_slot_level - 1);
1053 /* Free all temporaries used so far. This is normally called at the
1054 end of generating code for a statement. */
1057 free_temp_slots (void)
1059 struct temp_slot *p, *next;
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1066 make_slot_available (p);
1069 combine_temp_slots ();
1072 /* Push deeper into the nesting level for stack temporaries. */
1075 push_temp_slots (void)
1080 /* Pop a temporary nesting level. All slots in use in the current level
1084 pop_temp_slots (void)
1086 struct temp_slot *p, *next;
1088 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1091 make_slot_available (p);
1094 combine_temp_slots ();
1099 /* Initialize temporary slots. */
1102 init_temp_slots (void)
1104 /* We have not allocated any temporaries yet. */
1105 avail_temp_slots = 0;
1106 used_temp_slots = 0;
1107 temp_slot_level = 0;
1110 /* These routines are responsible for converting virtual register references
1111 to the actual hard register references once RTL generation is complete.
1113 The following four variables are used for communication between the
1114 routines. They contain the offsets of the virtual registers from their
1115 respective hard registers. */
1117 static int in_arg_offset;
1118 static int var_offset;
1119 static int dynamic_offset;
1120 static int out_arg_offset;
1121 static int cfa_offset;
1123 /* In most machines, the stack pointer register is equivalent to the bottom
1126 #ifndef STACK_POINTER_OFFSET
1127 #define STACK_POINTER_OFFSET 0
1130 /* If not defined, pick an appropriate default for the offset of dynamically
1131 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1132 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1134 #ifndef STACK_DYNAMIC_OFFSET
1136 /* The bottom of the stack points to the actual arguments. If
1137 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1138 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1139 stack space for register parameters is not pushed by the caller, but
1140 rather part of the fixed stack areas and hence not included in
1141 `crtl->outgoing_args_size'. Nevertheless, we must allow
1142 for it when allocating stack dynamic objects. */
1144 #if defined(REG_PARM_STACK_SPACE)
1145 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1146 ((ACCUMULATE_OUTGOING_ARGS \
1147 ? (crtl->outgoing_args_size \
1148 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1149 : REG_PARM_STACK_SPACE (FNDECL))) \
1150 : 0) + (STACK_POINTER_OFFSET))
1152 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1153 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1154 + (STACK_POINTER_OFFSET))
1159 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1160 is a virtual register, return the equivalent hard register and set the
1161 offset indirectly through the pointer. Otherwise, return 0. */
1164 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1167 HOST_WIDE_INT offset;
1169 if (x == virtual_incoming_args_rtx)
1170 new = arg_pointer_rtx, offset = in_arg_offset;
1171 else if (x == virtual_stack_vars_rtx)
1172 new = frame_pointer_rtx, offset = var_offset;
1173 else if (x == virtual_stack_dynamic_rtx)
1174 new = stack_pointer_rtx, offset = dynamic_offset;
1175 else if (x == virtual_outgoing_args_rtx)
1176 new = stack_pointer_rtx, offset = out_arg_offset;
1177 else if (x == virtual_cfa_rtx)
1179 #ifdef FRAME_POINTER_CFA_OFFSET
1180 new = frame_pointer_rtx;
1182 new = arg_pointer_rtx;
1184 offset = cfa_offset;
1193 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1194 Instantiate any virtual registers present inside of *LOC. The expression
1195 is simplified, as much as possible, but is not to be considered "valid"
1196 in any sense implied by the target. If any change is made, set CHANGED
1200 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1202 HOST_WIDE_INT offset;
1203 bool *changed = (bool *) data;
1210 switch (GET_CODE (x))
1213 new = instantiate_new_reg (x, &offset);
1216 *loc = plus_constant (new, offset);
1223 new = instantiate_new_reg (XEXP (x, 0), &offset);
1226 new = plus_constant (new, offset);
1227 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1233 /* FIXME -- from old code */
1234 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1235 we can commute the PLUS and SUBREG because pointers into the
1236 frame are well-behaved. */
1246 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1247 matches the predicate for insn CODE operand OPERAND. */
1250 safe_insn_predicate (int code, int operand, rtx x)
1252 const struct insn_operand_data *op_data;
1257 op_data = &insn_data[code].operand[operand];
1258 if (op_data->predicate == NULL)
1261 return op_data->predicate (x, op_data->mode);
1264 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1265 registers present inside of insn. The result will be a valid insn. */
1268 instantiate_virtual_regs_in_insn (rtx insn)
1270 HOST_WIDE_INT offset;
1272 bool any_change = false;
1273 rtx set, new, x, seq;
1275 /* There are some special cases to be handled first. */
1276 set = single_set (insn);
1279 /* We're allowed to assign to a virtual register. This is interpreted
1280 to mean that the underlying register gets assigned the inverse
1281 transformation. This is used, for example, in the handling of
1283 new = instantiate_new_reg (SET_DEST (set), &offset);
1288 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1289 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1291 x = force_operand (x, new);
1293 emit_move_insn (new, x);
1298 emit_insn_before (seq, insn);
1303 /* Handle a straight copy from a virtual register by generating a
1304 new add insn. The difference between this and falling through
1305 to the generic case is avoiding a new pseudo and eliminating a
1306 move insn in the initial rtl stream. */
1307 new = instantiate_new_reg (SET_SRC (set), &offset);
1308 if (new && offset != 0
1309 && REG_P (SET_DEST (set))
1310 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1314 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1315 new, GEN_INT (offset), SET_DEST (set),
1316 1, OPTAB_LIB_WIDEN);
1317 if (x != SET_DEST (set))
1318 emit_move_insn (SET_DEST (set), x);
1323 emit_insn_before (seq, insn);
1328 extract_insn (insn);
1329 insn_code = INSN_CODE (insn);
1331 /* Handle a plus involving a virtual register by determining if the
1332 operands remain valid if they're modified in place. */
1333 if (GET_CODE (SET_SRC (set)) == PLUS
1334 && recog_data.n_operands >= 3
1335 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1336 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1337 && GET_CODE (recog_data.operand[2]) == CONST_INT
1338 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1340 offset += INTVAL (recog_data.operand[2]);
1342 /* If the sum is zero, then replace with a plain move. */
1344 && REG_P (SET_DEST (set))
1345 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1348 emit_move_insn (SET_DEST (set), new);
1352 emit_insn_before (seq, insn);
1357 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1359 /* Using validate_change and apply_change_group here leaves
1360 recog_data in an invalid state. Since we know exactly what
1361 we want to check, do those two by hand. */
1362 if (safe_insn_predicate (insn_code, 1, new)
1363 && safe_insn_predicate (insn_code, 2, x))
1365 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1366 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1369 /* Fall through into the regular operand fixup loop in
1370 order to take care of operands other than 1 and 2. */
1376 extract_insn (insn);
1377 insn_code = INSN_CODE (insn);
1380 /* In the general case, we expect virtual registers to appear only in
1381 operands, and then only as either bare registers or inside memories. */
1382 for (i = 0; i < recog_data.n_operands; ++i)
1384 x = recog_data.operand[i];
1385 switch (GET_CODE (x))
1389 rtx addr = XEXP (x, 0);
1390 bool changed = false;
1392 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1397 x = replace_equiv_address (x, addr);
1398 /* It may happen that the address with the virtual reg
1399 was valid (e.g. based on the virtual stack reg, which might
1400 be acceptable to the predicates with all offsets), whereas
1401 the address now isn't anymore, for instance when the address
1402 is still offsetted, but the base reg isn't virtual-stack-reg
1403 anymore. Below we would do a force_reg on the whole operand,
1404 but this insn might actually only accept memory. Hence,
1405 before doing that last resort, try to reload the address into
1406 a register, so this operand stays a MEM. */
1407 if (!safe_insn_predicate (insn_code, i, x))
1409 addr = force_reg (GET_MODE (addr), addr);
1410 x = replace_equiv_address (x, addr);
1415 emit_insn_before (seq, insn);
1420 new = instantiate_new_reg (x, &offset);
1429 /* Careful, special mode predicates may have stuff in
1430 insn_data[insn_code].operand[i].mode that isn't useful
1431 to us for computing a new value. */
1432 /* ??? Recognize address_operand and/or "p" constraints
1433 to see if (plus new offset) is a valid before we put
1434 this through expand_simple_binop. */
1435 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1436 GEN_INT (offset), NULL_RTX,
1437 1, OPTAB_LIB_WIDEN);
1440 emit_insn_before (seq, insn);
1445 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1451 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1452 GEN_INT (offset), NULL_RTX,
1453 1, OPTAB_LIB_WIDEN);
1456 emit_insn_before (seq, insn);
1458 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1459 GET_MODE (new), SUBREG_BYTE (x));
1466 /* At this point, X contains the new value for the operand.
1467 Validate the new value vs the insn predicate. Note that
1468 asm insns will have insn_code -1 here. */
1469 if (!safe_insn_predicate (insn_code, i, x))
1472 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1476 emit_insn_before (seq, insn);
1479 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1485 /* Propagate operand changes into the duplicates. */
1486 for (i = 0; i < recog_data.n_dups; ++i)
1487 *recog_data.dup_loc[i]
1488 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1490 /* Force re-recognition of the instruction for validation. */
1491 INSN_CODE (insn) = -1;
1494 if (asm_noperands (PATTERN (insn)) >= 0)
1496 if (!check_asm_operands (PATTERN (insn)))
1498 error_for_asm (insn, "impossible constraint in %<asm%>");
1504 if (recog_memoized (insn) < 0)
1505 fatal_insn_not_found (insn);
1509 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1510 do any instantiation required. */
1513 instantiate_decl_rtl (rtx x)
1520 /* If this is a CONCAT, recurse for the pieces. */
1521 if (GET_CODE (x) == CONCAT)
1523 instantiate_decl_rtl (XEXP (x, 0));
1524 instantiate_decl_rtl (XEXP (x, 1));
1528 /* If this is not a MEM, no need to do anything. Similarly if the
1529 address is a constant or a register that is not a virtual register. */
1534 if (CONSTANT_P (addr)
1536 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1537 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1540 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1543 /* Helper for instantiate_decls called via walk_tree: Process all decls
1544 in the given DECL_VALUE_EXPR. */
1547 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1550 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1553 if (DECL_P (t) && DECL_RTL_SET_P (t))
1554 instantiate_decl_rtl (DECL_RTL (t));
1559 /* Subroutine of instantiate_decls: Process all decls in the given
1560 BLOCK node and all its subblocks. */
1563 instantiate_decls_1 (tree let)
1567 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1569 if (DECL_RTL_SET_P (t))
1570 instantiate_decl_rtl (DECL_RTL (t));
1571 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1573 tree v = DECL_VALUE_EXPR (t);
1574 walk_tree (&v, instantiate_expr, NULL, NULL);
1578 /* Process all subblocks. */
1579 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1580 instantiate_decls_1 (t);
1583 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1584 all virtual registers in their DECL_RTL's. */
1587 instantiate_decls (tree fndecl)
1591 /* Process all parameters of the function. */
1592 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1594 instantiate_decl_rtl (DECL_RTL (decl));
1595 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1596 if (DECL_HAS_VALUE_EXPR_P (decl))
1598 tree v = DECL_VALUE_EXPR (decl);
1599 walk_tree (&v, instantiate_expr, NULL, NULL);
1603 /* Now process all variables defined in the function or its subblocks. */
1604 instantiate_decls_1 (DECL_INITIAL (fndecl));
1607 /* Pass through the INSNS of function FNDECL and convert virtual register
1608 references to hard register references. */
1611 instantiate_virtual_regs (void)
1615 /* Compute the offsets to use for this function. */
1616 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1617 var_offset = STARTING_FRAME_OFFSET;
1618 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1619 out_arg_offset = STACK_POINTER_OFFSET;
1620 #ifdef FRAME_POINTER_CFA_OFFSET
1621 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1623 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1626 /* Initialize recognition, indicating that volatile is OK. */
1629 /* Scan through all the insns, instantiating every virtual register still
1631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1634 /* These patterns in the instruction stream can never be recognized.
1635 Fortunately, they shouldn't contain virtual registers either. */
1636 if (GET_CODE (PATTERN (insn)) == USE
1637 || GET_CODE (PATTERN (insn)) == CLOBBER
1638 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1639 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1640 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1643 instantiate_virtual_regs_in_insn (insn);
1645 if (INSN_DELETED_P (insn))
1648 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1650 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1651 if (GET_CODE (insn) == CALL_INSN)
1652 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1653 instantiate_virtual_regs_in_rtx, NULL);
1656 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1657 instantiate_decls (current_function_decl);
1659 targetm.instantiate_decls ();
1661 /* Indicate that, from now on, assign_stack_local should use
1662 frame_pointer_rtx. */
1663 virtuals_instantiated = 1;
1667 struct rtl_opt_pass pass_instantiate_virtual_regs =
1673 instantiate_virtual_regs, /* execute */
1676 0, /* static_pass_number */
1678 0, /* properties_required */
1679 0, /* properties_provided */
1680 0, /* properties_destroyed */
1681 0, /* todo_flags_start */
1682 TODO_dump_func /* todo_flags_finish */
1687 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1688 This means a type for which function calls must pass an address to the
1689 function or get an address back from the function.
1690 EXP may be a type node or an expression (whose type is tested). */
1693 aggregate_value_p (const_tree exp, const_tree fntype)
1695 int i, regno, nregs;
1698 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1700 /* DECL node associated with FNTYPE when relevant, which we might need to
1701 check for by-invisible-reference returns, typically for CALL_EXPR input
1703 const_tree fndecl = NULL_TREE;
1706 switch (TREE_CODE (fntype))
1709 fndecl = get_callee_fndecl (fntype);
1710 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1714 fntype = TREE_TYPE (fndecl);
1719 case IDENTIFIER_NODE:
1723 /* We don't expect other rtl types here. */
1727 if (TREE_CODE (type) == VOID_TYPE)
1730 /* If the front end has decided that this needs to be passed by
1731 reference, do so. */
1732 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1733 && DECL_BY_REFERENCE (exp))
1736 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1737 called function RESULT_DECL, meaning the function returns in memory by
1738 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1739 on the function type, which used to be the way to request such a return
1740 mechanism but might now be causing troubles at gimplification time if
1741 temporaries with the function type need to be created. */
1742 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1743 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1746 if (targetm.calls.return_in_memory (type, fntype))
1748 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1749 and thus can't be returned in registers. */
1750 if (TREE_ADDRESSABLE (type))
1752 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1754 /* Make sure we have suitable call-clobbered regs to return
1755 the value in; if not, we must return it in memory. */
1756 reg = hard_function_value (type, 0, fntype, 0);
1758 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1763 regno = REGNO (reg);
1764 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1765 for (i = 0; i < nregs; i++)
1766 if (! call_used_regs[regno + i])
1771 /* Return true if we should assign DECL a pseudo register; false if it
1772 should live on the local stack. */
1775 use_register_for_decl (const_tree decl)
1777 if (!targetm.calls.allocate_stack_slots_for_args())
1780 /* Honor volatile. */
1781 if (TREE_SIDE_EFFECTS (decl))
1784 /* Honor addressability. */
1785 if (TREE_ADDRESSABLE (decl))
1788 /* Only register-like things go in registers. */
1789 if (DECL_MODE (decl) == BLKmode)
1792 /* If -ffloat-store specified, don't put explicit float variables
1794 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1795 propagates values across these stores, and it probably shouldn't. */
1796 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1799 /* If we're not interested in tracking debugging information for
1800 this decl, then we can certainly put it in a register. */
1801 if (DECL_IGNORED_P (decl))
1804 return (optimize || DECL_REGISTER (decl));
1807 /* Return true if TYPE should be passed by invisible reference. */
1810 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1811 tree type, bool named_arg)
1815 /* If this type contains non-trivial constructors, then it is
1816 forbidden for the middle-end to create any new copies. */
1817 if (TREE_ADDRESSABLE (type))
1820 /* GCC post 3.4 passes *all* variable sized types by reference. */
1821 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1825 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1828 /* Return true if TYPE, which is passed by reference, should be callee
1829 copied instead of caller copied. */
1832 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1833 tree type, bool named_arg)
1835 if (type && TREE_ADDRESSABLE (type))
1837 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1840 /* Structures to communicate between the subroutines of assign_parms.
1841 The first holds data persistent across all parameters, the second
1842 is cleared out for each parameter. */
1844 struct assign_parm_data_all
1846 CUMULATIVE_ARGS args_so_far;
1847 struct args_size stack_args_size;
1848 tree function_result_decl;
1850 rtx first_conversion_insn;
1851 rtx last_conversion_insn;
1852 HOST_WIDE_INT pretend_args_size;
1853 HOST_WIDE_INT extra_pretend_bytes;
1854 int reg_parm_stack_space;
1857 struct assign_parm_data_one
1863 enum machine_mode nominal_mode;
1864 enum machine_mode passed_mode;
1865 enum machine_mode promoted_mode;
1866 struct locate_and_pad_arg_data locate;
1868 BOOL_BITFIELD named_arg : 1;
1869 BOOL_BITFIELD passed_pointer : 1;
1870 BOOL_BITFIELD on_stack : 1;
1871 BOOL_BITFIELD loaded_in_reg : 1;
1874 /* A subroutine of assign_parms. Initialize ALL. */
1877 assign_parms_initialize_all (struct assign_parm_data_all *all)
1881 memset (all, 0, sizeof (*all));
1883 fntype = TREE_TYPE (current_function_decl);
1885 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1886 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1888 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1889 current_function_decl, -1);
1892 #ifdef REG_PARM_STACK_SPACE
1893 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1897 /* If ARGS contains entries with complex types, split the entry into two
1898 entries of the component type. Return a new list of substitutions are
1899 needed, else the old list. */
1902 split_complex_args (tree args)
1906 /* Before allocating memory, check for the common case of no complex. */
1907 for (p = args; p; p = TREE_CHAIN (p))
1909 tree type = TREE_TYPE (p);
1910 if (TREE_CODE (type) == COMPLEX_TYPE
1911 && targetm.calls.split_complex_arg (type))
1917 args = copy_list (args);
1919 for (p = args; p; p = TREE_CHAIN (p))
1921 tree type = TREE_TYPE (p);
1922 if (TREE_CODE (type) == COMPLEX_TYPE
1923 && targetm.calls.split_complex_arg (type))
1926 tree subtype = TREE_TYPE (type);
1927 bool addressable = TREE_ADDRESSABLE (p);
1929 /* Rewrite the PARM_DECL's type with its component. */
1930 TREE_TYPE (p) = subtype;
1931 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1932 DECL_MODE (p) = VOIDmode;
1933 DECL_SIZE (p) = NULL;
1934 DECL_SIZE_UNIT (p) = NULL;
1935 /* If this arg must go in memory, put it in a pseudo here.
1936 We can't allow it to go in memory as per normal parms,
1937 because the usual place might not have the imag part
1938 adjacent to the real part. */
1939 DECL_ARTIFICIAL (p) = addressable;
1940 DECL_IGNORED_P (p) = addressable;
1941 TREE_ADDRESSABLE (p) = 0;
1944 /* Build a second synthetic decl. */
1945 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1946 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1947 DECL_ARTIFICIAL (decl) = addressable;
1948 DECL_IGNORED_P (decl) = addressable;
1949 layout_decl (decl, 0);
1951 /* Splice it in; skip the new decl. */
1952 TREE_CHAIN (decl) = TREE_CHAIN (p);
1953 TREE_CHAIN (p) = decl;
1961 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1962 the hidden struct return argument, and (abi willing) complex args.
1963 Return the new parameter list. */
1966 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1968 tree fndecl = current_function_decl;
1969 tree fntype = TREE_TYPE (fndecl);
1970 tree fnargs = DECL_ARGUMENTS (fndecl);
1972 /* If struct value address is treated as the first argument, make it so. */
1973 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1974 && ! cfun->returns_pcc_struct
1975 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1977 tree type = build_pointer_type (TREE_TYPE (fntype));
1980 decl = build_decl (PARM_DECL, NULL_TREE, type);
1981 DECL_ARG_TYPE (decl) = type;
1982 DECL_ARTIFICIAL (decl) = 1;
1983 DECL_IGNORED_P (decl) = 1;
1985 TREE_CHAIN (decl) = fnargs;
1987 all->function_result_decl = decl;
1990 all->orig_fnargs = fnargs;
1992 /* If the target wants to split complex arguments into scalars, do so. */
1993 if (targetm.calls.split_complex_arg)
1994 fnargs = split_complex_args (fnargs);
1999 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2000 data for the parameter. Incorporate ABI specifics such as pass-by-
2001 reference and type promotion. */
2004 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2005 struct assign_parm_data_one *data)
2007 tree nominal_type, passed_type;
2008 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2010 memset (data, 0, sizeof (*data));
2012 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2014 data->named_arg = 1; /* No variadic parms. */
2015 else if (TREE_CHAIN (parm))
2016 data->named_arg = 1; /* Not the last non-variadic parm. */
2017 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2018 data->named_arg = 1; /* Only variadic ones are unnamed. */
2020 data->named_arg = 0; /* Treat as variadic. */
2022 nominal_type = TREE_TYPE (parm);
2023 passed_type = DECL_ARG_TYPE (parm);
2025 /* Look out for errors propagating this far. Also, if the parameter's
2026 type is void then its value doesn't matter. */
2027 if (TREE_TYPE (parm) == error_mark_node
2028 /* This can happen after weird syntax errors
2029 or if an enum type is defined among the parms. */
2030 || TREE_CODE (parm) != PARM_DECL
2031 || passed_type == NULL
2032 || VOID_TYPE_P (nominal_type))
2034 nominal_type = passed_type = void_type_node;
2035 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2039 /* Find mode of arg as it is passed, and mode of arg as it should be
2040 during execution of this function. */
2041 passed_mode = TYPE_MODE (passed_type);
2042 nominal_mode = TYPE_MODE (nominal_type);
2044 /* If the parm is to be passed as a transparent union, use the type of
2045 the first field for the tests below. We have already verified that
2046 the modes are the same. */
2047 if (TREE_CODE (passed_type) == UNION_TYPE
2048 && TYPE_TRANSPARENT_UNION (passed_type))
2049 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2051 /* See if this arg was passed by invisible reference. */
2052 if (pass_by_reference (&all->args_so_far, passed_mode,
2053 passed_type, data->named_arg))
2055 passed_type = nominal_type = build_pointer_type (passed_type);
2056 data->passed_pointer = true;
2057 passed_mode = nominal_mode = Pmode;
2060 /* Find mode as it is passed by the ABI. */
2061 promoted_mode = passed_mode;
2062 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2064 int unsignedp = TYPE_UNSIGNED (passed_type);
2065 promoted_mode = promote_mode (passed_type, promoted_mode,
2070 data->nominal_type = nominal_type;
2071 data->passed_type = passed_type;
2072 data->nominal_mode = nominal_mode;
2073 data->passed_mode = passed_mode;
2074 data->promoted_mode = promoted_mode;
2077 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2080 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2081 struct assign_parm_data_one *data, bool no_rtl)
2083 int varargs_pretend_bytes = 0;
2085 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2086 data->promoted_mode,
2088 &varargs_pretend_bytes, no_rtl);
2090 /* If the back-end has requested extra stack space, record how much is
2091 needed. Do not change pretend_args_size otherwise since it may be
2092 nonzero from an earlier partial argument. */
2093 if (varargs_pretend_bytes > 0)
2094 all->pretend_args_size = varargs_pretend_bytes;
2097 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2098 the incoming location of the current parameter. */
2101 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2102 struct assign_parm_data_one *data)
2104 HOST_WIDE_INT pretend_bytes = 0;
2108 if (data->promoted_mode == VOIDmode)
2110 data->entry_parm = data->stack_parm = const0_rtx;
2114 #ifdef FUNCTION_INCOMING_ARG
2115 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2116 data->passed_type, data->named_arg);
2118 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2119 data->passed_type, data->named_arg);
2122 if (entry_parm == 0)
2123 data->promoted_mode = data->passed_mode;
2125 /* Determine parm's home in the stack, in case it arrives in the stack
2126 or we should pretend it did. Compute the stack position and rtx where
2127 the argument arrives and its size.
2129 There is one complexity here: If this was a parameter that would
2130 have been passed in registers, but wasn't only because it is
2131 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2132 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2133 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2134 as it was the previous time. */
2135 in_regs = entry_parm != 0;
2136 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2139 if (!in_regs && !data->named_arg)
2141 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2144 #ifdef FUNCTION_INCOMING_ARG
2145 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2146 data->passed_type, true);
2148 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2149 data->passed_type, true);
2151 in_regs = tem != NULL;
2155 /* If this parameter was passed both in registers and in the stack, use
2156 the copy on the stack. */
2157 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2165 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2166 data->promoted_mode,
2169 data->partial = partial;
2171 /* The caller might already have allocated stack space for the
2172 register parameters. */
2173 if (partial != 0 && all->reg_parm_stack_space == 0)
2175 /* Part of this argument is passed in registers and part
2176 is passed on the stack. Ask the prologue code to extend
2177 the stack part so that we can recreate the full value.
2179 PRETEND_BYTES is the size of the registers we need to store.
2180 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2181 stack space that the prologue should allocate.
2183 Internally, gcc assumes that the argument pointer is aligned
2184 to STACK_BOUNDARY bits. This is used both for alignment
2185 optimizations (see init_emit) and to locate arguments that are
2186 aligned to more than PARM_BOUNDARY bits. We must preserve this
2187 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2188 a stack boundary. */
2190 /* We assume at most one partial arg, and it must be the first
2191 argument on the stack. */
2192 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2194 pretend_bytes = partial;
2195 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2197 /* We want to align relative to the actual stack pointer, so
2198 don't include this in the stack size until later. */
2199 all->extra_pretend_bytes = all->pretend_args_size;
2203 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2204 entry_parm ? data->partial : 0, current_function_decl,
2205 &all->stack_args_size, &data->locate);
2207 /* Adjust offsets to include the pretend args. */
2208 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2209 data->locate.slot_offset.constant += pretend_bytes;
2210 data->locate.offset.constant += pretend_bytes;
2212 data->entry_parm = entry_parm;
2215 /* A subroutine of assign_parms. If there is actually space on the stack
2216 for this parm, count it in stack_args_size and return true. */
2219 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2220 struct assign_parm_data_one *data)
2222 /* Trivially true if we've no incoming register. */
2223 if (data->entry_parm == NULL)
2225 /* Also true if we're partially in registers and partially not,
2226 since we've arranged to drop the entire argument on the stack. */
2227 else if (data->partial != 0)
2229 /* Also true if the target says that it's passed in both registers
2230 and on the stack. */
2231 else if (GET_CODE (data->entry_parm) == PARALLEL
2232 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2234 /* Also true if the target says that there's stack allocated for
2235 all register parameters. */
2236 else if (all->reg_parm_stack_space > 0)
2238 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2242 all->stack_args_size.constant += data->locate.size.constant;
2243 if (data->locate.size.var)
2244 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2249 /* A subroutine of assign_parms. Given that this parameter is allocated
2250 stack space by the ABI, find it. */
2253 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2255 rtx offset_rtx, stack_parm;
2256 unsigned int align, boundary;
2258 /* If we're passing this arg using a reg, make its stack home the
2259 aligned stack slot. */
2260 if (data->entry_parm)
2261 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2263 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2265 stack_parm = crtl->args.internal_arg_pointer;
2266 if (offset_rtx != const0_rtx)
2267 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2268 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2270 set_mem_attributes (stack_parm, parm, 1);
2272 boundary = data->locate.boundary;
2273 align = BITS_PER_UNIT;
2275 /* If we're padding upward, we know that the alignment of the slot
2276 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2277 intentionally forcing upward padding. Otherwise we have to come
2278 up with a guess at the alignment based on OFFSET_RTX. */
2279 if (data->locate.where_pad != downward || data->entry_parm)
2281 else if (GET_CODE (offset_rtx) == CONST_INT)
2283 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2284 align = align & -align;
2286 set_mem_align (stack_parm, align);
2288 if (data->entry_parm)
2289 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2291 data->stack_parm = stack_parm;
2294 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2295 always valid and contiguous. */
2298 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2300 rtx entry_parm = data->entry_parm;
2301 rtx stack_parm = data->stack_parm;
2303 /* If this parm was passed part in regs and part in memory, pretend it
2304 arrived entirely in memory by pushing the register-part onto the stack.
2305 In the special case of a DImode or DFmode that is split, we could put
2306 it together in a pseudoreg directly, but for now that's not worth
2308 if (data->partial != 0)
2310 /* Handle calls that pass values in multiple non-contiguous
2311 locations. The Irix 6 ABI has examples of this. */
2312 if (GET_CODE (entry_parm) == PARALLEL)
2313 emit_group_store (validize_mem (stack_parm), entry_parm,
2315 int_size_in_bytes (data->passed_type));
2318 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2319 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2320 data->partial / UNITS_PER_WORD);
2323 entry_parm = stack_parm;
2326 /* If we didn't decide this parm came in a register, by default it came
2328 else if (entry_parm == NULL)
2329 entry_parm = stack_parm;
2331 /* When an argument is passed in multiple locations, we can't make use
2332 of this information, but we can save some copying if the whole argument
2333 is passed in a single register. */
2334 else if (GET_CODE (entry_parm) == PARALLEL
2335 && data->nominal_mode != BLKmode
2336 && data->passed_mode != BLKmode)
2338 size_t i, len = XVECLEN (entry_parm, 0);
2340 for (i = 0; i < len; i++)
2341 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2342 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2343 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2344 == data->passed_mode)
2345 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2347 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2352 data->entry_parm = entry_parm;
2355 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2356 always valid and properly aligned. */
2359 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2361 rtx stack_parm = data->stack_parm;
2363 /* If we can't trust the parm stack slot to be aligned enough for its
2364 ultimate type, don't use that slot after entry. We'll make another
2365 stack slot, if we need one. */
2367 && ((STRICT_ALIGNMENT
2368 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2369 || (data->nominal_type
2370 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2371 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2374 /* If parm was passed in memory, and we need to convert it on entry,
2375 don't store it back in that same slot. */
2376 else if (data->entry_parm == stack_parm
2377 && data->nominal_mode != BLKmode
2378 && data->nominal_mode != data->passed_mode)
2381 /* If stack protection is in effect for this function, don't leave any
2382 pointers in their passed stack slots. */
2383 else if (crtl->stack_protect_guard
2384 && (flag_stack_protect == 2
2385 || data->passed_pointer
2386 || POINTER_TYPE_P (data->nominal_type)))
2389 data->stack_parm = stack_parm;
2392 /* A subroutine of assign_parms. Return true if the current parameter
2393 should be stored as a BLKmode in the current frame. */
2396 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2398 if (data->nominal_mode == BLKmode)
2400 if (GET_CODE (data->entry_parm) == PARALLEL)
2403 #ifdef BLOCK_REG_PADDING
2404 /* Only assign_parm_setup_block knows how to deal with register arguments
2405 that are padded at the least significant end. */
2406 if (REG_P (data->entry_parm)
2407 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2408 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2409 == (BYTES_BIG_ENDIAN ? upward : downward)))
2416 /* A subroutine of assign_parms. Arrange for the parameter to be
2417 present and valid in DATA->STACK_RTL. */
2420 assign_parm_setup_block (struct assign_parm_data_all *all,
2421 tree parm, struct assign_parm_data_one *data)
2423 rtx entry_parm = data->entry_parm;
2424 rtx stack_parm = data->stack_parm;
2426 HOST_WIDE_INT size_stored;
2427 rtx orig_entry_parm = entry_parm;
2429 if (GET_CODE (entry_parm) == PARALLEL)
2430 entry_parm = emit_group_move_into_temps (entry_parm);
2432 /* If we've a non-block object that's nevertheless passed in parts,
2433 reconstitute it in register operations rather than on the stack. */
2434 if (GET_CODE (entry_parm) == PARALLEL
2435 && data->nominal_mode != BLKmode)
2437 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2439 if ((XVECLEN (entry_parm, 0) > 1
2440 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2441 && use_register_for_decl (parm))
2443 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2445 push_to_sequence2 (all->first_conversion_insn,
2446 all->last_conversion_insn);
2448 /* For values returned in multiple registers, handle possible
2449 incompatible calls to emit_group_store.
2451 For example, the following would be invalid, and would have to
2452 be fixed by the conditional below:
2454 emit_group_store ((reg:SF), (parallel:DF))
2455 emit_group_store ((reg:SI), (parallel:DI))
2457 An example of this are doubles in e500 v2:
2458 (parallel:DF (expr_list (reg:SI) (const_int 0))
2459 (expr_list (reg:SI) (const_int 4))). */
2460 if (data->nominal_mode != data->passed_mode)
2462 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2463 emit_group_store (t, entry_parm, NULL_TREE,
2464 GET_MODE_SIZE (GET_MODE (entry_parm)));
2465 convert_move (parmreg, t, 0);
2468 emit_group_store (parmreg, entry_parm, data->nominal_type,
2469 int_size_in_bytes (data->nominal_type));
2471 all->first_conversion_insn = get_insns ();
2472 all->last_conversion_insn = get_last_insn ();
2475 SET_DECL_RTL (parm, parmreg);
2480 size = int_size_in_bytes (data->passed_type);
2481 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2482 if (stack_parm == 0)
2484 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2485 stack_parm = assign_stack_local (BLKmode, size_stored,
2487 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2488 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2489 set_mem_attributes (stack_parm, parm, 1);
2492 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2493 calls that pass values in multiple non-contiguous locations. */
2494 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2498 /* Note that we will be storing an integral number of words.
2499 So we have to be careful to ensure that we allocate an
2500 integral number of words. We do this above when we call
2501 assign_stack_local if space was not allocated in the argument
2502 list. If it was, this will not work if PARM_BOUNDARY is not
2503 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2504 if it becomes a problem. Exception is when BLKmode arrives
2505 with arguments not conforming to word_mode. */
2507 if (data->stack_parm == 0)
2509 else if (GET_CODE (entry_parm) == PARALLEL)
2512 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2514 mem = validize_mem (stack_parm);
2516 /* Handle values in multiple non-contiguous locations. */
2517 if (GET_CODE (entry_parm) == PARALLEL)
2519 push_to_sequence2 (all->first_conversion_insn,
2520 all->last_conversion_insn);
2521 emit_group_store (mem, entry_parm, data->passed_type, size);
2522 all->first_conversion_insn = get_insns ();
2523 all->last_conversion_insn = get_last_insn ();
2530 /* If SIZE is that of a mode no bigger than a word, just use
2531 that mode's store operation. */
2532 else if (size <= UNITS_PER_WORD)
2534 enum machine_mode mode
2535 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2538 #ifdef BLOCK_REG_PADDING
2539 && (size == UNITS_PER_WORD
2540 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2541 != (BYTES_BIG_ENDIAN ? upward : downward)))
2547 /* We are really truncating a word_mode value containing
2548 SIZE bytes into a value of mode MODE. If such an
2549 operation requires no actual instructions, we can refer
2550 to the value directly in mode MODE, otherwise we must
2551 start with the register in word_mode and explicitly
2553 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2554 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2557 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2558 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2560 emit_move_insn (change_address (mem, mode, 0), reg);
2563 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2564 machine must be aligned to the left before storing
2565 to memory. Note that the previous test doesn't
2566 handle all cases (e.g. SIZE == 3). */
2567 else if (size != UNITS_PER_WORD
2568 #ifdef BLOCK_REG_PADDING
2569 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2577 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2578 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2580 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2581 build_int_cst (NULL_TREE, by),
2583 tem = change_address (mem, word_mode, 0);
2584 emit_move_insn (tem, x);
2587 move_block_from_reg (REGNO (entry_parm), mem,
2588 size_stored / UNITS_PER_WORD);
2591 move_block_from_reg (REGNO (entry_parm), mem,
2592 size_stored / UNITS_PER_WORD);
2594 else if (data->stack_parm == 0)
2596 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2597 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2599 all->first_conversion_insn = get_insns ();
2600 all->last_conversion_insn = get_last_insn ();
2604 data->stack_parm = stack_parm;
2605 SET_DECL_RTL (parm, stack_parm);
2608 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2609 parameter. Get it there. Perform all ABI specified conversions. */
2612 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2613 struct assign_parm_data_one *data)
2616 enum machine_mode promoted_nominal_mode;
2617 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2618 bool did_conversion = false;
2620 /* Store the parm in a pseudoregister during the function, but we may
2621 need to do it in a wider mode. */
2623 /* This is not really promoting for a call. However we need to be
2624 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2625 promoted_nominal_mode
2626 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2628 parmreg = gen_reg_rtx (promoted_nominal_mode);
2630 if (!DECL_ARTIFICIAL (parm))
2631 mark_user_reg (parmreg);
2633 /* If this was an item that we received a pointer to,
2634 set DECL_RTL appropriately. */
2635 if (data->passed_pointer)
2637 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2638 set_mem_attributes (x, parm, 1);
2639 SET_DECL_RTL (parm, x);
2642 SET_DECL_RTL (parm, parmreg);
2644 /* Copy the value into the register. */
2645 if (data->nominal_mode != data->passed_mode
2646 || promoted_nominal_mode != data->promoted_mode)
2650 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2651 mode, by the caller. We now have to convert it to
2652 NOMINAL_MODE, if different. However, PARMREG may be in
2653 a different mode than NOMINAL_MODE if it is being stored
2656 If ENTRY_PARM is a hard register, it might be in a register
2657 not valid for operating in its mode (e.g., an odd-numbered
2658 register for a DFmode). In that case, moves are the only
2659 thing valid, so we can't do a convert from there. This
2660 occurs when the calling sequence allow such misaligned
2663 In addition, the conversion may involve a call, which could
2664 clobber parameters which haven't been copied to pseudo
2665 registers yet. Therefore, we must first copy the parm to
2666 a pseudo reg here, and save the conversion until after all
2667 parameters have been moved. */
2669 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2671 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2673 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2674 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2676 if (GET_CODE (tempreg) == SUBREG
2677 && GET_MODE (tempreg) == data->nominal_mode
2678 && REG_P (SUBREG_REG (tempreg))
2679 && data->nominal_mode == data->passed_mode
2680 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2681 && GET_MODE_SIZE (GET_MODE (tempreg))
2682 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2684 /* The argument is already sign/zero extended, so note it
2686 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2687 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2690 /* TREE_USED gets set erroneously during expand_assignment. */
2691 save_tree_used = TREE_USED (parm);
2692 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2693 TREE_USED (parm) = save_tree_used;
2694 all->first_conversion_insn = get_insns ();
2695 all->last_conversion_insn = get_last_insn ();
2698 did_conversion = true;
2701 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2703 /* If we were passed a pointer but the actual value can safely live
2704 in a register, put it in one. */
2705 if (data->passed_pointer
2706 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2707 /* If by-reference argument was promoted, demote it. */
2708 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2709 || use_register_for_decl (parm)))
2711 /* We can't use nominal_mode, because it will have been set to
2712 Pmode above. We must use the actual mode of the parm. */
2713 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2714 mark_user_reg (parmreg);
2716 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2718 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2719 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2721 push_to_sequence2 (all->first_conversion_insn,
2722 all->last_conversion_insn);
2723 emit_move_insn (tempreg, DECL_RTL (parm));
2724 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2725 emit_move_insn (parmreg, tempreg);
2726 all->first_conversion_insn = get_insns ();
2727 all->last_conversion_insn = get_last_insn ();
2730 did_conversion = true;
2733 emit_move_insn (parmreg, DECL_RTL (parm));
2735 SET_DECL_RTL (parm, parmreg);
2737 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2739 data->stack_parm = NULL;
2742 /* Mark the register as eliminable if we did no conversion and it was
2743 copied from memory at a fixed offset, and the arg pointer was not
2744 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2745 offset formed an invalid address, such memory-equivalences as we
2746 make here would screw up life analysis for it. */
2747 if (data->nominal_mode == data->passed_mode
2749 && data->stack_parm != 0
2750 && MEM_P (data->stack_parm)
2751 && data->locate.offset.var == 0
2752 && reg_mentioned_p (virtual_incoming_args_rtx,
2753 XEXP (data->stack_parm, 0)))
2755 rtx linsn = get_last_insn ();
2758 /* Mark complex types separately. */
2759 if (GET_CODE (parmreg) == CONCAT)
2761 enum machine_mode submode
2762 = GET_MODE_INNER (GET_MODE (parmreg));
2763 int regnor = REGNO (XEXP (parmreg, 0));
2764 int regnoi = REGNO (XEXP (parmreg, 1));
2765 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2766 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2767 GET_MODE_SIZE (submode));
2769 /* Scan backwards for the set of the real and
2771 for (sinsn = linsn; sinsn != 0;
2772 sinsn = prev_nonnote_insn (sinsn))
2774 set = single_set (sinsn);
2778 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2779 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2780 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2781 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2784 else if ((set = single_set (linsn)) != 0
2785 && SET_DEST (set) == parmreg)
2786 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2789 /* For pointer data type, suggest pointer register. */
2790 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2791 mark_reg_pointer (parmreg,
2792 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2795 /* A subroutine of assign_parms. Allocate stack space to hold the current
2796 parameter. Get it there. Perform all ABI specified conversions. */
2799 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2800 struct assign_parm_data_one *data)
2802 /* Value must be stored in the stack slot STACK_PARM during function
2804 bool to_conversion = false;
2806 if (data->promoted_mode != data->nominal_mode)
2808 /* Conversion is required. */
2809 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2811 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2813 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2814 to_conversion = true;
2816 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2817 TYPE_UNSIGNED (TREE_TYPE (parm)));
2819 if (data->stack_parm)
2820 /* ??? This may need a big-endian conversion on sparc64. */
2822 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2825 if (data->entry_parm != data->stack_parm)
2829 if (data->stack_parm == 0)
2832 = assign_stack_local (GET_MODE (data->entry_parm),
2833 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2834 TYPE_ALIGN (data->passed_type));
2835 set_mem_attributes (data->stack_parm, parm, 1);
2838 dest = validize_mem (data->stack_parm);
2839 src = validize_mem (data->entry_parm);
2843 /* Use a block move to handle potentially misaligned entry_parm. */
2845 push_to_sequence2 (all->first_conversion_insn,
2846 all->last_conversion_insn);
2847 to_conversion = true;
2849 emit_block_move (dest, src,
2850 GEN_INT (int_size_in_bytes (data->passed_type)),
2854 emit_move_insn (dest, src);
2859 all->first_conversion_insn = get_insns ();
2860 all->last_conversion_insn = get_last_insn ();
2864 SET_DECL_RTL (parm, data->stack_parm);
2867 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2868 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2871 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2874 tree orig_fnargs = all->orig_fnargs;
2876 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2878 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2879 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2881 rtx tmp, real, imag;
2882 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2884 real = DECL_RTL (fnargs);
2885 imag = DECL_RTL (TREE_CHAIN (fnargs));
2886 if (inner != GET_MODE (real))
2888 real = gen_lowpart_SUBREG (inner, real);
2889 imag = gen_lowpart_SUBREG (inner, imag);
2892 if (TREE_ADDRESSABLE (parm))
2895 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2897 /* split_complex_arg put the real and imag parts in
2898 pseudos. Move them to memory. */
2899 tmp = assign_stack_local (DECL_MODE (parm), size,
2900 TYPE_ALIGN (TREE_TYPE (parm)));
2901 set_mem_attributes (tmp, parm, 1);
2902 rmem = adjust_address_nv (tmp, inner, 0);
2903 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2904 push_to_sequence2 (all->first_conversion_insn,
2905 all->last_conversion_insn);
2906 emit_move_insn (rmem, real);
2907 emit_move_insn (imem, imag);
2908 all->first_conversion_insn = get_insns ();
2909 all->last_conversion_insn = get_last_insn ();
2913 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2914 SET_DECL_RTL (parm, tmp);
2916 real = DECL_INCOMING_RTL (fnargs);
2917 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2918 if (inner != GET_MODE (real))
2920 real = gen_lowpart_SUBREG (inner, real);
2921 imag = gen_lowpart_SUBREG (inner, imag);
2923 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2924 set_decl_incoming_rtl (parm, tmp, false);
2925 fnargs = TREE_CHAIN (fnargs);
2929 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2930 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2932 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2933 instead of the copy of decl, i.e. FNARGS. */
2934 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2935 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2938 fnargs = TREE_CHAIN (fnargs);
2942 /* Assign RTL expressions to the function's parameters. This may involve
2943 copying them into registers and using those registers as the DECL_RTL. */
2946 assign_parms (tree fndecl)
2948 struct assign_parm_data_all all;
2951 crtl->args.internal_arg_pointer
2952 = targetm.calls.internal_arg_pointer ();
2954 assign_parms_initialize_all (&all);
2955 fnargs = assign_parms_augmented_arg_list (&all);
2957 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2959 struct assign_parm_data_one data;
2961 /* Extract the type of PARM; adjust it according to ABI. */
2962 assign_parm_find_data_types (&all, parm, &data);
2964 /* Early out for errors and void parameters. */
2965 if (data.passed_mode == VOIDmode)
2967 SET_DECL_RTL (parm, const0_rtx);
2968 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2972 if (cfun->stdarg && !TREE_CHAIN (parm))
2973 assign_parms_setup_varargs (&all, &data, false);
2975 /* Find out where the parameter arrives in this function. */
2976 assign_parm_find_entry_rtl (&all, &data);
2978 /* Find out where stack space for this parameter might be. */
2979 if (assign_parm_is_stack_parm (&all, &data))
2981 assign_parm_find_stack_rtl (parm, &data);
2982 assign_parm_adjust_entry_rtl (&data);
2985 /* Record permanently how this parm was passed. */
2986 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2988 /* Update info on where next arg arrives in registers. */
2989 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2990 data.passed_type, data.named_arg);
2992 assign_parm_adjust_stack_rtl (&data);
2994 if (assign_parm_setup_block_p (&data))
2995 assign_parm_setup_block (&all, parm, &data);
2996 else if (data.passed_pointer || use_register_for_decl (parm))
2997 assign_parm_setup_reg (&all, parm, &data);
2999 assign_parm_setup_stack (&all, parm, &data);
3002 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3003 assign_parms_unsplit_complex (&all, fnargs);
3005 /* Output all parameter conversion instructions (possibly including calls)
3006 now that all parameters have been copied out of hard registers. */
3007 emit_insn (all.first_conversion_insn);
3009 /* If we are receiving a struct value address as the first argument, set up
3010 the RTL for the function result. As this might require code to convert
3011 the transmitted address to Pmode, we do this here to ensure that possible
3012 preliminary conversions of the address have been emitted already. */
3013 if (all.function_result_decl)
3015 tree result = DECL_RESULT (current_function_decl);
3016 rtx addr = DECL_RTL (all.function_result_decl);
3019 if (DECL_BY_REFERENCE (result))
3023 addr = convert_memory_address (Pmode, addr);
3024 x = gen_rtx_MEM (DECL_MODE (result), addr);
3025 set_mem_attributes (x, result, 1);
3027 SET_DECL_RTL (result, x);
3030 /* We have aligned all the args, so add space for the pretend args. */
3031 crtl->args.pretend_args_size = all.pretend_args_size;
3032 all.stack_args_size.constant += all.extra_pretend_bytes;
3033 crtl->args.size = all.stack_args_size.constant;
3035 /* Adjust function incoming argument size for alignment and
3038 #ifdef REG_PARM_STACK_SPACE
3039 crtl->args.size = MAX (crtl->args.size,
3040 REG_PARM_STACK_SPACE (fndecl));
3043 crtl->args.size = CEIL_ROUND (crtl->args.size,
3044 PARM_BOUNDARY / BITS_PER_UNIT);
3046 #ifdef ARGS_GROW_DOWNWARD
3047 crtl->args.arg_offset_rtx
3048 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3049 : expand_expr (size_diffop (all.stack_args_size.var,
3050 size_int (-all.stack_args_size.constant)),
3051 NULL_RTX, VOIDmode, 0));
3053 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3056 /* See how many bytes, if any, of its args a function should try to pop
3059 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3062 /* For stdarg.h function, save info about
3063 regs and stack space used by the named args. */
3065 crtl->args.info = all.args_so_far;
3067 /* Set the rtx used for the function return value. Put this in its
3068 own variable so any optimizers that need this information don't have
3069 to include tree.h. Do this here so it gets done when an inlined
3070 function gets output. */
3073 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3074 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3076 /* If scalar return value was computed in a pseudo-reg, or was a named
3077 return value that got dumped to the stack, copy that to the hard
3079 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3081 tree decl_result = DECL_RESULT (fndecl);
3082 rtx decl_rtl = DECL_RTL (decl_result);
3084 if (REG_P (decl_rtl)
3085 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3086 : DECL_REGISTER (decl_result))
3090 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3092 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3093 /* The delay slot scheduler assumes that crtl->return_rtx
3094 holds the hard register containing the return value, not a
3095 temporary pseudo. */
3096 crtl->return_rtx = real_decl_rtl;
3101 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3102 For all seen types, gimplify their sizes. */
3105 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3112 if (POINTER_TYPE_P (t))
3114 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3115 && !TYPE_SIZES_GIMPLIFIED (t))
3117 gimplify_type_sizes (t, (tree *) data);
3125 /* Gimplify the parameter list for current_function_decl. This involves
3126 evaluating SAVE_EXPRs of variable sized parameters and generating code
3127 to implement callee-copies reference parameters. Returns a list of
3128 statements to add to the beginning of the function, or NULL if nothing
3132 gimplify_parameters (void)
3134 struct assign_parm_data_all all;
3135 tree fnargs, parm, stmts = NULL;
3137 assign_parms_initialize_all (&all);
3138 fnargs = assign_parms_augmented_arg_list (&all);
3140 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3142 struct assign_parm_data_one data;
3144 /* Extract the type of PARM; adjust it according to ABI. */
3145 assign_parm_find_data_types (&all, parm, &data);
3147 /* Early out for errors and void parameters. */
3148 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3151 /* Update info on where next arg arrives in registers. */
3152 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3153 data.passed_type, data.named_arg);
3155 /* ??? Once upon a time variable_size stuffed parameter list
3156 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3157 turned out to be less than manageable in the gimple world.
3158 Now we have to hunt them down ourselves. */
3159 walk_tree_without_duplicates (&data.passed_type,
3160 gimplify_parm_type, &stmts);
3162 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3164 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3165 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3168 if (data.passed_pointer)
3170 tree type = TREE_TYPE (data.passed_type);
3171 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3172 type, data.named_arg))
3176 /* For constant sized objects, this is trivial; for
3177 variable-sized objects, we have to play games. */
3178 if (TREE_CONSTANT (DECL_SIZE (parm)))
3180 local = create_tmp_var (type, get_name (parm));
3181 DECL_IGNORED_P (local) = 0;
3185 tree ptr_type, addr;
3187 ptr_type = build_pointer_type (type);
3188 addr = create_tmp_var (ptr_type, get_name (parm));
3189 DECL_IGNORED_P (addr) = 0;
3190 local = build_fold_indirect_ref (addr);
3192 t = built_in_decls[BUILT_IN_ALLOCA];
3193 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3194 t = fold_convert (ptr_type, t);
3195 t = build_gimple_modify_stmt (addr, t);
3196 gimplify_and_add (t, &stmts);
3199 t = build_gimple_modify_stmt (local, parm);
3200 gimplify_and_add (t, &stmts);
3202 SET_DECL_VALUE_EXPR (parm, local);
3203 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3211 /* Compute the size and offset from the start of the stacked arguments for a
3212 parm passed in mode PASSED_MODE and with type TYPE.
3214 INITIAL_OFFSET_PTR points to the current offset into the stacked
3217 The starting offset and size for this parm are returned in
3218 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3219 nonzero, the offset is that of stack slot, which is returned in
3220 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3221 padding required from the initial offset ptr to the stack slot.
3223 IN_REGS is nonzero if the argument will be passed in registers. It will
3224 never be set if REG_PARM_STACK_SPACE is not defined.
3226 FNDECL is the function in which the argument was defined.
3228 There are two types of rounding that are done. The first, controlled by
3229 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3230 list to be aligned to the specific boundary (in bits). This rounding
3231 affects the initial and starting offsets, but not the argument size.
3233 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3234 optionally rounds the size of the parm to PARM_BOUNDARY. The
3235 initial offset is not affected by this rounding, while the size always
3236 is and the starting offset may be. */
3238 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3239 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3240 callers pass in the total size of args so far as
3241 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3244 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3245 int partial, tree fndecl ATTRIBUTE_UNUSED,
3246 struct args_size *initial_offset_ptr,
3247 struct locate_and_pad_arg_data *locate)
3250 enum direction where_pad;
3251 unsigned int boundary;
3252 int reg_parm_stack_space = 0;
3253 int part_size_in_regs;
3255 #ifdef REG_PARM_STACK_SPACE
3256 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3258 /* If we have found a stack parm before we reach the end of the
3259 area reserved for registers, skip that area. */
3262 if (reg_parm_stack_space > 0)
3264 if (initial_offset_ptr->var)
3266 initial_offset_ptr->var
3267 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3268 ssize_int (reg_parm_stack_space));
3269 initial_offset_ptr->constant = 0;
3271 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3272 initial_offset_ptr->constant = reg_parm_stack_space;
3275 #endif /* REG_PARM_STACK_SPACE */
3277 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3280 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3281 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3282 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3283 locate->where_pad = where_pad;
3284 locate->boundary = boundary;
3286 /* Remember if the outgoing parameter requires extra alignment on the
3287 calling function side. */
3288 if (boundary > PREFERRED_STACK_BOUNDARY)
3289 boundary = PREFERRED_STACK_BOUNDARY;
3290 if (crtl->stack_alignment_needed < boundary)
3291 crtl->stack_alignment_needed = boundary;
3293 #ifdef ARGS_GROW_DOWNWARD
3294 locate->slot_offset.constant = -initial_offset_ptr->constant;
3295 if (initial_offset_ptr->var)
3296 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3297 initial_offset_ptr->var);
3301 if (where_pad != none
3302 && (!host_integerp (sizetree, 1)
3303 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3304 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3305 SUB_PARM_SIZE (locate->slot_offset, s2);
3308 locate->slot_offset.constant += part_size_in_regs;
3311 #ifdef REG_PARM_STACK_SPACE
3312 || REG_PARM_STACK_SPACE (fndecl) > 0
3315 pad_to_arg_alignment (&locate->slot_offset, boundary,
3316 &locate->alignment_pad);
3318 locate->size.constant = (-initial_offset_ptr->constant
3319 - locate->slot_offset.constant);
3320 if (initial_offset_ptr->var)
3321 locate->size.var = size_binop (MINUS_EXPR,
3322 size_binop (MINUS_EXPR,
3324 initial_offset_ptr->var),
3325 locate->slot_offset.var);
3327 /* Pad_below needs the pre-rounded size to know how much to pad
3329 locate->offset = locate->slot_offset;
3330 if (where_pad == downward)
3331 pad_below (&locate->offset, passed_mode, sizetree);
3333 #else /* !ARGS_GROW_DOWNWARD */
3335 #ifdef REG_PARM_STACK_SPACE
3336 || REG_PARM_STACK_SPACE (fndecl) > 0
3339 pad_to_arg_alignment (initial_offset_ptr, boundary,
3340 &locate->alignment_pad);
3341 locate->slot_offset = *initial_offset_ptr;
3343 #ifdef PUSH_ROUNDING
3344 if (passed_mode != BLKmode)
3345 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3348 /* Pad_below needs the pre-rounded size to know how much to pad below
3349 so this must be done before rounding up. */
3350 locate->offset = locate->slot_offset;
3351 if (where_pad == downward)
3352 pad_below (&locate->offset, passed_mode, sizetree);
3354 if (where_pad != none
3355 && (!host_integerp (sizetree, 1)
3356 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3357 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3359 ADD_PARM_SIZE (locate->size, sizetree);
3361 locate->size.constant -= part_size_in_regs;
3362 #endif /* ARGS_GROW_DOWNWARD */
3365 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3366 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3369 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3370 struct args_size *alignment_pad)
3372 tree save_var = NULL_TREE;
3373 HOST_WIDE_INT save_constant = 0;
3374 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3375 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3377 #ifdef SPARC_STACK_BOUNDARY_HACK
3378 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3379 the real alignment of %sp. However, when it does this, the
3380 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3381 if (SPARC_STACK_BOUNDARY_HACK)
3385 if (boundary > PARM_BOUNDARY)
3387 save_var = offset_ptr->var;
3388 save_constant = offset_ptr->constant;
3391 alignment_pad->var = NULL_TREE;
3392 alignment_pad->constant = 0;
3394 if (boundary > BITS_PER_UNIT)
3396 if (offset_ptr->var)
3398 tree sp_offset_tree = ssize_int (sp_offset);
3399 tree offset = size_binop (PLUS_EXPR,
3400 ARGS_SIZE_TREE (*offset_ptr),
3402 #ifdef ARGS_GROW_DOWNWARD
3403 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3405 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3408 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3409 /* ARGS_SIZE_TREE includes constant term. */
3410 offset_ptr->constant = 0;
3411 if (boundary > PARM_BOUNDARY)
3412 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3417 offset_ptr->constant = -sp_offset +
3418 #ifdef ARGS_GROW_DOWNWARD
3419 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3421 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3423 if (boundary > PARM_BOUNDARY)
3424 alignment_pad->constant = offset_ptr->constant - save_constant;
3430 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3432 if (passed_mode != BLKmode)
3434 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3435 offset_ptr->constant
3436 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3437 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3438 - GET_MODE_SIZE (passed_mode));
3442 if (TREE_CODE (sizetree) != INTEGER_CST
3443 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3445 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3446 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3448 ADD_PARM_SIZE (*offset_ptr, s2);
3449 SUB_PARM_SIZE (*offset_ptr, sizetree);
3455 /* True if register REGNO was alive at a place where `setjmp' was
3456 called and was set more than once or is an argument. Such regs may
3457 be clobbered by `longjmp'. */
3460 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3462 /* There appear to be cases where some local vars never reach the
3463 backend but have bogus regnos. */
3464 if (regno >= max_reg_num ())
3467 return ((REG_N_SETS (regno) > 1
3468 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3469 && REGNO_REG_SET_P (setjmp_crosses, regno));
3472 /* Walk the tree of blocks describing the binding levels within a
3473 function and warn about variables the might be killed by setjmp or
3474 vfork. This is done after calling flow_analysis before register
3475 allocation since that will clobber the pseudo-regs to hard
3479 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3483 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3485 if (TREE_CODE (decl) == VAR_DECL
3486 && DECL_RTL_SET_P (decl)
3487 && REG_P (DECL_RTL (decl))
3488 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3489 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3490 " %<longjmp%> or %<vfork%>", decl);
3493 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3494 setjmp_vars_warning (setjmp_crosses, sub);
3497 /* Do the appropriate part of setjmp_vars_warning
3498 but for arguments instead of local variables. */
3501 setjmp_args_warning (bitmap setjmp_crosses)
3504 for (decl = DECL_ARGUMENTS (current_function_decl);
3505 decl; decl = TREE_CHAIN (decl))
3506 if (DECL_RTL (decl) != 0
3507 && REG_P (DECL_RTL (decl))
3508 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3509 warning (OPT_Wclobbered,
3510 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3514 /* Generate warning messages for variables live across setjmp. */
3517 generate_setjmp_warnings (void)
3519 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3521 if (n_basic_blocks == NUM_FIXED_BLOCKS
3522 || bitmap_empty_p (setjmp_crosses))
3525 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3526 setjmp_args_warning (setjmp_crosses);
3530 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3531 and create duplicate blocks. */
3532 /* ??? Need an option to either create block fragments or to create
3533 abstract origin duplicates of a source block. It really depends
3534 on what optimization has been performed. */
3537 reorder_blocks (void)
3539 tree block = DECL_INITIAL (current_function_decl);
3540 VEC(tree,heap) *block_stack;
3542 if (block == NULL_TREE)
3545 block_stack = VEC_alloc (tree, heap, 10);
3547 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3548 clear_block_marks (block);
3550 /* Prune the old trees away, so that they don't get in the way. */
3551 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3552 BLOCK_CHAIN (block) = NULL_TREE;
3554 /* Recreate the block tree from the note nesting. */
3555 reorder_blocks_1 (get_insns (), block, &block_stack);
3556 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3558 VEC_free (tree, heap, block_stack);
3561 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3564 clear_block_marks (tree block)
3568 TREE_ASM_WRITTEN (block) = 0;
3569 clear_block_marks (BLOCK_SUBBLOCKS (block));
3570 block = BLOCK_CHAIN (block);
3575 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3579 for (insn = insns; insn; insn = NEXT_INSN (insn))
3583 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3585 tree block = NOTE_BLOCK (insn);
3588 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3589 ? BLOCK_FRAGMENT_ORIGIN (block)
3592 /* If we have seen this block before, that means it now
3593 spans multiple address regions. Create a new fragment. */
3594 if (TREE_ASM_WRITTEN (block))
3596 tree new_block = copy_node (block);
3598 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3599 BLOCK_FRAGMENT_CHAIN (new_block)
3600 = BLOCK_FRAGMENT_CHAIN (origin);
3601 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3603 NOTE_BLOCK (insn) = new_block;
3607 BLOCK_SUBBLOCKS (block) = 0;
3608 TREE_ASM_WRITTEN (block) = 1;
3609 /* When there's only one block for the entire function,
3610 current_block == block and we mustn't do this, it
3611 will cause infinite recursion. */
3612 if (block != current_block)
3614 if (block != origin)
3615 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3617 BLOCK_SUPERCONTEXT (block) = current_block;
3618 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3619 BLOCK_SUBBLOCKS (current_block) = block;
3620 current_block = origin;
3622 VEC_safe_push (tree, heap, *p_block_stack, block);
3624 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3626 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3627 BLOCK_SUBBLOCKS (current_block)
3628 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3629 current_block = BLOCK_SUPERCONTEXT (current_block);
3635 /* Reverse the order of elements in the chain T of blocks,
3636 and return the new head of the chain (old last element). */
3639 blocks_nreverse (tree t)
3641 tree prev = 0, decl, next;
3642 for (decl = t; decl; decl = next)
3644 next = BLOCK_CHAIN (decl);
3645 BLOCK_CHAIN (decl) = prev;
3651 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3652 non-NULL, list them all into VECTOR, in a depth-first preorder
3653 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3657 all_blocks (tree block, tree *vector)
3663 TREE_ASM_WRITTEN (block) = 0;
3665 /* Record this block. */
3667 vector[n_blocks] = block;
3671 /* Record the subblocks, and their subblocks... */
3672 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3673 vector ? vector + n_blocks : 0);
3674 block = BLOCK_CHAIN (block);
3680 /* Return a vector containing all the blocks rooted at BLOCK. The
3681 number of elements in the vector is stored in N_BLOCKS_P. The
3682 vector is dynamically allocated; it is the caller's responsibility
3683 to call `free' on the pointer returned. */
3686 get_block_vector (tree block, int *n_blocks_p)
3690 *n_blocks_p = all_blocks (block, NULL);
3691 block_vector = XNEWVEC (tree, *n_blocks_p);
3692 all_blocks (block, block_vector);
3694 return block_vector;
3697 static GTY(()) int next_block_index = 2;
3699 /* Set BLOCK_NUMBER for all the blocks in FN. */
3702 number_blocks (tree fn)
3708 /* For SDB and XCOFF debugging output, we start numbering the blocks
3709 from 1 within each function, rather than keeping a running
3711 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3712 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3713 next_block_index = 1;
3716 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3718 /* The top-level BLOCK isn't numbered at all. */
3719 for (i = 1; i < n_blocks; ++i)
3720 /* We number the blocks from two. */
3721 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3723 free (block_vector);
3728 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3731 debug_find_var_in_block_tree (tree var, tree block)
3735 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3739 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3741 tree ret = debug_find_var_in_block_tree (var, t);
3749 /* Keep track of whether we're in a dummy function context. If we are,
3750 we don't want to invoke the set_current_function hook, because we'll
3751 get into trouble if the hook calls target_reinit () recursively or
3752 when the initial initialization is not yet complete. */
3754 static bool in_dummy_function;
3756 /* Invoke the target hook when setting cfun. */
3759 invoke_set_current_function_hook (tree fndecl)
3761 if (!in_dummy_function)
3762 targetm.set_current_function (fndecl);
3765 /* cfun should never be set directly; use this function. */
3768 set_cfun (struct function *new_cfun)
3770 if (cfun != new_cfun)
3773 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3777 /* Keep track of the cfun stack. */
3779 typedef struct function *function_p;
3781 DEF_VEC_P(function_p);
3782 DEF_VEC_ALLOC_P(function_p,heap);
3784 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3786 static VEC(function_p,heap) *cfun_stack;
3788 /* We save the value of in_system_header here when pushing the first
3789 function on the cfun stack, and we restore it from here when
3790 popping the last function. */
3792 static bool saved_in_system_header;
3794 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3797 push_cfun (struct function *new_cfun)
3800 saved_in_system_header = in_system_header;
3801 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3803 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3804 set_cfun (new_cfun);
3807 /* Pop cfun from the stack. */
3812 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3813 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3814 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3815 set_cfun (new_cfun);
3818 /* Return value of funcdef and increase it. */
3820 get_next_funcdef_no (void)
3822 return funcdef_no++;
3825 /* Allocate a function structure for FNDECL and set its contents
3826 to the defaults. Set cfun to the newly-allocated object.
3827 Some of the helper functions invoked during initialization assume
3828 that cfun has already been set. Therefore, assign the new object
3829 directly into cfun and invoke the back end hook explicitly at the
3830 very end, rather than initializing a temporary and calling set_cfun
3833 ABSTRACT_P is true if this is a function that will never be seen by
3834 the middle-end. Such functions are front-end concepts (like C++
3835 function templates) that do not correspond directly to functions
3836 placed in object files. */
3839 allocate_struct_function (tree fndecl, bool abstract_p)
3842 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3844 cfun = ggc_alloc_cleared (sizeof (struct function));
3846 current_function_funcdef_no = get_next_funcdef_no ();
3848 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3850 init_eh_for_function ();
3852 if (init_machine_status)
3853 cfun->machine = (*init_machine_status) ();
3855 #ifdef OVERRIDE_ABI_FORMAT
3856 OVERRIDE_ABI_FORMAT (fndecl);
3859 if (fndecl != NULL_TREE)
3861 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3862 cfun->decl = fndecl;
3864 result = DECL_RESULT (fndecl);
3865 if (!abstract_p && aggregate_value_p (result, fndecl))
3867 #ifdef PCC_STATIC_STRUCT_RETURN
3868 cfun->returns_pcc_struct = 1;
3870 cfun->returns_struct = 1;
3875 && TYPE_ARG_TYPES (fntype) != 0
3876 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3877 != void_type_node));
3879 /* Assume all registers in stdarg functions need to be saved. */
3880 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3881 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3884 invoke_set_current_function_hook (fndecl);
3887 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3888 instead of just setting it. */
3891 push_struct_function (tree fndecl)
3894 saved_in_system_header = in_system_header;
3895 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3897 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3898 allocate_struct_function (fndecl, false);
3901 /* Reset cfun, and other non-struct-function variables to defaults as
3902 appropriate for emitting rtl at the start of a function. */
3905 prepare_function_start (void)
3907 gcc_assert (!crtl->emit.x_last_insn);
3909 init_varasm_status ();
3912 cse_not_expected = ! optimize;
3914 /* Caller save not needed yet. */
3915 caller_save_needed = 0;
3917 /* We haven't done register allocation yet. */
3920 /* Indicate that we have not instantiated virtual registers yet. */
3921 virtuals_instantiated = 0;
3923 /* Indicate that we want CONCATs now. */
3924 generating_concat_p = 1;
3926 /* Indicate we have no need of a frame pointer yet. */
3927 frame_pointer_needed = 0;
3930 /* Initialize the rtl expansion mechanism so that we can do simple things
3931 like generate sequences. This is used to provide a context during global
3932 initialization of some passes. You must call expand_dummy_function_end
3933 to exit this context. */
3936 init_dummy_function_start (void)
3938 gcc_assert (!in_dummy_function);
3939 in_dummy_function = true;
3940 push_struct_function (NULL_TREE);
3941 prepare_function_start ();
3944 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3945 and initialize static variables for generating RTL for the statements
3949 init_function_start (tree subr)
3951 if (subr && DECL_STRUCT_FUNCTION (subr))
3952 set_cfun (DECL_STRUCT_FUNCTION (subr));
3954 allocate_struct_function (subr, false);
3955 prepare_function_start ();
3957 /* Warn if this value is an aggregate type,
3958 regardless of which calling convention we are using for it. */
3959 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3960 warning (OPT_Waggregate_return, "function returns an aggregate");
3963 /* Make sure all values used by the optimization passes have sane
3966 init_function_for_compilation (void)
3970 /* No prologue/epilogue insns yet. Make sure that these vectors are
3972 gcc_assert (VEC_length (int, prologue) == 0);
3973 gcc_assert (VEC_length (int, epilogue) == 0);
3974 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3978 struct rtl_opt_pass pass_init_function =
3984 init_function_for_compilation, /* execute */
3987 0, /* static_pass_number */
3989 0, /* properties_required */
3990 0, /* properties_provided */
3991 0, /* properties_destroyed */
3992 0, /* todo_flags_start */
3993 0 /* todo_flags_finish */
3999 expand_main_function (void)
4001 #if (defined(INVOKE__main) \
4002 || (!defined(HAS_INIT_SECTION) \
4003 && !defined(INIT_SECTION_ASM_OP) \
4004 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4005 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4009 /* Expand code to initialize the stack_protect_guard. This is invoked at
4010 the beginning of a function to be protected. */
4012 #ifndef HAVE_stack_protect_set
4013 # define HAVE_stack_protect_set 0
4014 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4018 stack_protect_prologue (void)
4020 tree guard_decl = targetm.stack_protect_guard ();
4023 /* Avoid expand_expr here, because we don't want guard_decl pulled
4024 into registers unless absolutely necessary. And we know that
4025 crtl->stack_protect_guard is a local stack slot, so this skips
4027 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4028 y = validize_mem (DECL_RTL (guard_decl));
4030 /* Allow the target to copy from Y to X without leaking Y into a
4032 if (HAVE_stack_protect_set)
4034 rtx insn = gen_stack_protect_set (x, y);
4042 /* Otherwise do a straight move. */
4043 emit_move_insn (x, y);
4046 /* Expand code to verify the stack_protect_guard. This is invoked at
4047 the end of a function to be protected. */
4049 #ifndef HAVE_stack_protect_test
4050 # define HAVE_stack_protect_test 0
4051 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4055 stack_protect_epilogue (void)
4057 tree guard_decl = targetm.stack_protect_guard ();
4058 rtx label = gen_label_rtx ();
4061 /* Avoid expand_expr here, because we don't want guard_decl pulled
4062 into registers unless absolutely necessary. And we know that
4063 crtl->stack_protect_guard is a local stack slot, so this skips
4065 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4066 y = validize_mem (DECL_RTL (guard_decl));
4068 /* Allow the target to compare Y with X without leaking either into
4070 switch (HAVE_stack_protect_test != 0)
4073 tmp = gen_stack_protect_test (x, y, label);
4082 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4086 /* The noreturn predictor has been moved to the tree level. The rtl-level
4087 predictors estimate this branch about 20%, which isn't enough to get
4088 things moved out of line. Since this is the only extant case of adding
4089 a noreturn function at the rtl level, it doesn't seem worth doing ought
4090 except adding the prediction by hand. */
4091 tmp = get_last_insn ();
4093 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4095 expand_expr_stmt (targetm.stack_protect_fail ());
4099 /* Start the RTL for a new function, and set variables used for
4101 SUBR is the FUNCTION_DECL node.
4102 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4103 the function's parameters, which must be run at any return statement. */
4106 expand_function_start (tree subr)
4108 /* Make sure volatile mem refs aren't considered
4109 valid operands of arithmetic insns. */
4110 init_recog_no_volatile ();
4114 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4117 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4119 /* Make the label for return statements to jump to. Do not special
4120 case machines with special return instructions -- they will be
4121 handled later during jump, ifcvt, or epilogue creation. */
4122 return_label = gen_label_rtx ();
4124 /* Initialize rtx used to return the value. */
4125 /* Do this before assign_parms so that we copy the struct value address
4126 before any library calls that assign parms might generate. */
4128 /* Decide whether to return the value in memory or in a register. */
4129 if (aggregate_value_p (DECL_RESULT (subr), subr))
4131 /* Returning something that won't go in a register. */
4132 rtx value_address = 0;
4134 #ifdef PCC_STATIC_STRUCT_RETURN
4135 if (cfun->returns_pcc_struct)
4137 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4138 value_address = assemble_static_space (size);
4143 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4144 /* Expect to be passed the address of a place to store the value.
4145 If it is passed as an argument, assign_parms will take care of
4149 value_address = gen_reg_rtx (Pmode);
4150 emit_move_insn (value_address, sv);
4155 rtx x = value_address;
4156 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4158 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4159 set_mem_attributes (x, DECL_RESULT (subr), 1);
4161 SET_DECL_RTL (DECL_RESULT (subr), x);
4164 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4165 /* If return mode is void, this decl rtl should not be used. */
4166 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4169 /* Compute the return values into a pseudo reg, which we will copy
4170 into the true return register after the cleanups are done. */
4171 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4172 if (TYPE_MODE (return_type) != BLKmode
4173 && targetm.calls.return_in_msb (return_type))
4174 /* expand_function_end will insert the appropriate padding in
4175 this case. Use the return value's natural (unpadded) mode
4176 within the function proper. */
4177 SET_DECL_RTL (DECL_RESULT (subr),
4178 gen_reg_rtx (TYPE_MODE (return_type)));
4181 /* In order to figure out what mode to use for the pseudo, we
4182 figure out what the mode of the eventual return register will
4183 actually be, and use that. */
4184 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4186 /* Structures that are returned in registers are not
4187 aggregate_value_p, so we may see a PARALLEL or a REG. */
4188 if (REG_P (hard_reg))
4189 SET_DECL_RTL (DECL_RESULT (subr),
4190 gen_reg_rtx (GET_MODE (hard_reg)));
4193 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4194 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4198 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4199 result to the real return register(s). */
4200 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4203 /* Initialize rtx for parameters and local variables.
4204 In some cases this requires emitting insns. */
4205 assign_parms (subr);
4207 /* If function gets a static chain arg, store it. */
4208 if (cfun->static_chain_decl)
4210 tree parm = cfun->static_chain_decl;
4211 rtx local = gen_reg_rtx (Pmode);
4213 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4214 SET_DECL_RTL (parm, local);
4215 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4217 emit_move_insn (local, static_chain_incoming_rtx);
4220 /* If the function receives a non-local goto, then store the
4221 bits we need to restore the frame pointer. */
4222 if (cfun->nonlocal_goto_save_area)
4227 /* ??? We need to do this save early. Unfortunately here is
4228 before the frame variable gets declared. Help out... */
4229 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4230 if (!DECL_RTL_SET_P (var))
4233 t_save = build4 (ARRAY_REF, ptr_type_node,
4234 cfun->nonlocal_goto_save_area,
4235 integer_zero_node, NULL_TREE, NULL_TREE);
4236 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4237 r_save = convert_memory_address (Pmode, r_save);
4239 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4240 update_nonlocal_goto_save_area ();
4243 /* The following was moved from init_function_start.
4244 The move is supposed to make sdb output more accurate. */
4245 /* Indicate the beginning of the function body,
4246 as opposed to parm setup. */
4247 emit_note (NOTE_INSN_FUNCTION_BEG);
4249 gcc_assert (NOTE_P (get_last_insn ()));
4251 parm_birth_insn = get_last_insn ();
4256 PROFILE_HOOK (current_function_funcdef_no);
4260 /* After the display initializations is where the stack checking
4262 if(flag_stack_check)
4263 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4265 /* Make sure there is a line number after the function entry setup code. */
4266 force_next_line_note ();
4269 /* Undo the effects of init_dummy_function_start. */
4271 expand_dummy_function_end (void)
4273 gcc_assert (in_dummy_function);
4275 /* End any sequences that failed to be closed due to syntax errors. */
4276 while (in_sequence_p ())
4279 /* Outside function body, can't compute type's actual size
4280 until next function's body starts. */
4282 free_after_parsing (cfun);
4283 free_after_compilation (cfun);
4285 in_dummy_function = false;
4288 /* Call DOIT for each hard register used as a return value from
4289 the current function. */
4292 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4294 rtx outgoing = crtl->return_rtx;
4299 if (REG_P (outgoing))
4300 (*doit) (outgoing, arg);
4301 else if (GET_CODE (outgoing) == PARALLEL)
4305 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4307 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4309 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4316 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4322 clobber_return_register (void)
4324 diddle_return_value (do_clobber_return_reg, NULL);
4326 /* In case we do use pseudo to return value, clobber it too. */
4327 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4329 tree decl_result = DECL_RESULT (current_function_decl);
4330 rtx decl_rtl = DECL_RTL (decl_result);
4331 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4333 do_clobber_return_reg (decl_rtl, NULL);
4339 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4345 use_return_register (void)
4347 diddle_return_value (do_use_return_reg, NULL);
4350 /* Possibly warn about unused parameters. */
4352 do_warn_unused_parameter (tree fn)
4356 for (decl = DECL_ARGUMENTS (fn);
4357 decl; decl = TREE_CHAIN (decl))
4358 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4359 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4360 && !TREE_NO_WARNING (decl))
4361 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4364 static GTY(()) rtx initial_trampoline;
4366 /* Generate RTL for the end of the current function. */
4369 expand_function_end (void)
4373 /* If arg_pointer_save_area was referenced only from a nested
4374 function, we will not have initialized it yet. Do that now. */
4375 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4376 get_arg_pointer_save_area ();
4378 /* If we are doing stack checking and this function makes calls,
4379 do a stack probe at the start of the function to ensure we have enough
4380 space for another stack frame. */
4381 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4385 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4389 probe_stack_range (STACK_CHECK_PROTECT,
4390 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4393 emit_insn_before (seq, stack_check_probe_note);
4398 /* End any sequences that failed to be closed due to syntax errors. */
4399 while (in_sequence_p ())
4402 clear_pending_stack_adjust ();
4403 do_pending_stack_adjust ();
4405 /* Output a linenumber for the end of the function.
4406 SDB depends on this. */
4407 force_next_line_note ();
4408 set_curr_insn_source_location (input_location);
4410 /* Before the return label (if any), clobber the return
4411 registers so that they are not propagated live to the rest of
4412 the function. This can only happen with functions that drop
4413 through; if there had been a return statement, there would
4414 have either been a return rtx, or a jump to the return label.
4416 We delay actual code generation after the current_function_value_rtx
4418 clobber_after = get_last_insn ();
4420 /* Output the label for the actual return from the function. */
4421 emit_label (return_label);
4423 if (USING_SJLJ_EXCEPTIONS)
4425 /* Let except.c know where it should emit the call to unregister
4426 the function context for sjlj exceptions. */
4427 if (flag_exceptions)
4428 sjlj_emit_function_exit_after (get_last_insn ());
4432 /* We want to ensure that instructions that may trap are not
4433 moved into the epilogue by scheduling, because we don't
4434 always emit unwind information for the epilogue. */
4435 if (flag_non_call_exceptions)
4436 emit_insn (gen_blockage ());
4439 /* If this is an implementation of throw, do what's necessary to
4440 communicate between __builtin_eh_return and the epilogue. */
4441 expand_eh_return ();
4443 /* If scalar return value was computed in a pseudo-reg, or was a named
4444 return value that got dumped to the stack, copy that to the hard
4446 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4448 tree decl_result = DECL_RESULT (current_function_decl);
4449 rtx decl_rtl = DECL_RTL (decl_result);
4451 if (REG_P (decl_rtl)
4452 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4453 : DECL_REGISTER (decl_result))
4455 rtx real_decl_rtl = crtl->return_rtx;
4457 /* This should be set in assign_parms. */
4458 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4460 /* If this is a BLKmode structure being returned in registers,
4461 then use the mode computed in expand_return. Note that if
4462 decl_rtl is memory, then its mode may have been changed,
4463 but that crtl->return_rtx has not. */
4464 if (GET_MODE (real_decl_rtl) == BLKmode)
4465 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4467 /* If a non-BLKmode return value should be padded at the least
4468 significant end of the register, shift it left by the appropriate
4469 amount. BLKmode results are handled using the group load/store
4471 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4472 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4474 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4475 REGNO (real_decl_rtl)),
4477 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4479 /* If a named return value dumped decl_return to memory, then
4480 we may need to re-do the PROMOTE_MODE signed/unsigned
4482 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4484 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4486 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4487 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4490 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4492 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4494 /* If expand_function_start has created a PARALLEL for decl_rtl,
4495 move the result to the real return registers. Otherwise, do
4496 a group load from decl_rtl for a named return. */
4497 if (GET_CODE (decl_rtl) == PARALLEL)
4498 emit_group_move (real_decl_rtl, decl_rtl);
4500 emit_group_load (real_decl_rtl, decl_rtl,
4501 TREE_TYPE (decl_result),
4502 int_size_in_bytes (TREE_TYPE (decl_result)));
4504 /* In the case of complex integer modes smaller than a word, we'll
4505 need to generate some non-trivial bitfield insertions. Do that
4506 on a pseudo and not the hard register. */
4507 else if (GET_CODE (decl_rtl) == CONCAT
4508 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4509 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4511 int old_generating_concat_p;
4514 old_generating_concat_p = generating_concat_p;
4515 generating_concat_p = 0;
4516 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4517 generating_concat_p = old_generating_concat_p;
4519 emit_move_insn (tmp, decl_rtl);
4520 emit_move_insn (real_decl_rtl, tmp);
4523 emit_move_insn (real_decl_rtl, decl_rtl);
4527 /* If returning a structure, arrange to return the address of the value
4528 in a place where debuggers expect to find it.
4530 If returning a structure PCC style,
4531 the caller also depends on this value.
4532 And cfun->returns_pcc_struct is not necessarily set. */
4533 if (cfun->returns_struct
4534 || cfun->returns_pcc_struct)
4536 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4537 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4540 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4541 type = TREE_TYPE (type);
4543 value_address = XEXP (value_address, 0);
4545 outgoing = targetm.calls.function_value (build_pointer_type (type),
4546 current_function_decl, true);
4548 /* Mark this as a function return value so integrate will delete the
4549 assignment and USE below when inlining this function. */
4550 REG_FUNCTION_VALUE_P (outgoing) = 1;
4552 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4553 value_address = convert_memory_address (GET_MODE (outgoing),
4556 emit_move_insn (outgoing, value_address);
4558 /* Show return register used to hold result (in this case the address
4560 crtl->return_rtx = outgoing;
4563 /* Emit the actual code to clobber return register. */
4568 clobber_return_register ();
4569 expand_naked_return ();
4573 emit_insn_after (seq, clobber_after);
4576 /* Output the label for the naked return from the function. */
4577 emit_label (naked_return_label);
4579 /* @@@ This is a kludge. We want to ensure that instructions that
4580 may trap are not moved into the epilogue by scheduling, because
4581 we don't always emit unwind information for the epilogue. */
4582 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4583 emit_insn (gen_blockage ());
4585 /* If stack protection is enabled for this function, check the guard. */
4586 if (crtl->stack_protect_guard)
4587 stack_protect_epilogue ();
4589 /* If we had calls to alloca, and this machine needs
4590 an accurate stack pointer to exit the function,
4591 insert some code to save and restore the stack pointer. */
4592 if (! EXIT_IGNORE_STACK
4593 && cfun->calls_alloca)
4597 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4598 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4601 /* ??? This should no longer be necessary since stupid is no longer with
4602 us, but there are some parts of the compiler (eg reload_combine, and
4603 sh mach_dep_reorg) that still try and compute their own lifetime info
4604 instead of using the general framework. */
4605 use_return_register ();
4609 get_arg_pointer_save_area (void)
4611 rtx ret = arg_pointer_save_area;
4615 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4616 arg_pointer_save_area = ret;
4619 if (! crtl->arg_pointer_save_area_init)
4623 /* Save the arg pointer at the beginning of the function. The
4624 generated stack slot may not be a valid memory address, so we
4625 have to check it and fix it if necessary. */
4627 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4631 push_topmost_sequence ();
4632 emit_insn_after (seq, entry_of_function ());
4633 pop_topmost_sequence ();
4639 /* Extend a vector that records the INSN_UIDs of INSNS
4640 (a list of one or more insns). */
4643 record_insns (rtx insns, VEC(int,heap) **vecp)
4647 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4648 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4651 /* Set the locator of the insn chain starting at INSN to LOC. */
4653 set_insn_locators (rtx insn, int loc)
4655 while (insn != NULL_RTX)
4658 INSN_LOCATOR (insn) = loc;
4659 insn = NEXT_INSN (insn);
4663 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4664 be running after reorg, SEQUENCE rtl is possible. */
4667 contains (const_rtx insn, VEC(int,heap) **vec)
4671 if (NONJUMP_INSN_P (insn)
4672 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4675 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4676 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4677 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4678 == VEC_index (int, *vec, j))
4684 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4685 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4692 prologue_epilogue_contains (const_rtx insn)
4694 if (contains (insn, &prologue))
4696 if (contains (insn, &epilogue))
4702 sibcall_epilogue_contains (const_rtx insn)
4704 if (sibcall_epilogue)
4705 return contains (insn, &sibcall_epilogue);
4710 /* Insert gen_return at the end of block BB. This also means updating
4711 block_for_insn appropriately. */
4714 emit_return_into_block (basic_block bb)
4716 emit_jump_insn_after (gen_return (), BB_END (bb));
4718 #endif /* HAVE_return */
4720 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4721 this into place with notes indicating where the prologue ends and where
4722 the epilogue begins. Update the basic block information when possible. */
4725 thread_prologue_and_epilogue_insns (void)
4729 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4732 #if defined (HAVE_epilogue) || defined(HAVE_return)
4733 rtx epilogue_end = NULL_RTX;
4737 #ifdef HAVE_prologue
4741 seq = gen_prologue ();
4744 /* Insert an explicit USE for the frame pointer
4745 if the profiling is on and the frame pointer is required. */
4746 if (crtl->profile && frame_pointer_needed)
4747 emit_use (hard_frame_pointer_rtx);
4749 /* Retain a map of the prologue insns. */
4750 record_insns (seq, &prologue);
4751 emit_note (NOTE_INSN_PROLOGUE_END);
4753 #ifndef PROFILE_BEFORE_PROLOGUE
4754 /* Ensure that instructions are not moved into the prologue when
4755 profiling is on. The call to the profiling routine can be
4756 emitted within the live range of a call-clobbered register. */
4758 emit_insn (gen_blockage ());
4763 set_insn_locators (seq, prologue_locator);
4765 /* Can't deal with multiple successors of the entry block
4766 at the moment. Function should always have at least one
4768 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4770 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4775 /* If the exit block has no non-fake predecessors, we don't need
4777 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4778 if ((e->flags & EDGE_FAKE) == 0)
4784 if (optimize && HAVE_return)
4786 /* If we're allowed to generate a simple return instruction,
4787 then by definition we don't need a full epilogue. Examine
4788 the block that falls through to EXIT. If it does not
4789 contain any code, examine its predecessors and try to
4790 emit (conditional) return instructions. */
4795 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4796 if (e->flags & EDGE_FALLTHRU)
4802 /* Verify that there are no active instructions in the last block. */
4803 label = BB_END (last);
4804 while (label && !LABEL_P (label))
4806 if (active_insn_p (label))
4808 label = PREV_INSN (label);
4811 if (BB_HEAD (last) == label && LABEL_P (label))
4815 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4817 basic_block bb = e->src;
4820 if (bb == ENTRY_BLOCK_PTR)
4827 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4833 /* If we have an unconditional jump, we can replace that
4834 with a simple return instruction. */
4835 if (simplejump_p (jump))
4837 emit_return_into_block (bb);
4841 /* If we have a conditional jump, we can try to replace
4842 that with a conditional return instruction. */
4843 else if (condjump_p (jump))
4845 if (! redirect_jump (jump, 0, 0))
4851 /* If this block has only one successor, it both jumps
4852 and falls through to the fallthru block, so we can't
4854 if (single_succ_p (bb))
4866 /* Fix up the CFG for the successful change we just made. */
4867 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4870 /* Emit a return insn for the exit fallthru block. Whether
4871 this is still reachable will be determined later. */
4873 emit_barrier_after (BB_END (last));
4874 emit_return_into_block (last);
4875 epilogue_end = BB_END (last);
4876 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4881 /* Find the edge that falls through to EXIT. Other edges may exist
4882 due to RETURN instructions, but those don't need epilogues.
4883 There really shouldn't be a mixture -- either all should have
4884 been converted or none, however... */
4886 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4887 if (e->flags & EDGE_FALLTHRU)
4892 #ifdef HAVE_epilogue
4896 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
4897 seq = gen_epilogue ();
4898 emit_jump_insn (seq);
4900 /* Retain a map of the epilogue insns. */
4901 record_insns (seq, &epilogue);
4902 set_insn_locators (seq, epilogue_locator);
4907 insert_insn_on_edge (seq, e);
4915 if (! next_active_insn (BB_END (e->src)))
4917 /* We have a fall-through edge to the exit block, the source is not
4918 at the end of the function, and there will be an assembler epilogue
4919 at the end of the function.
4920 We can't use force_nonfallthru here, because that would try to
4921 use return. Inserting a jump 'by hand' is extremely messy, so
4922 we take advantage of cfg_layout_finalize using
4923 fixup_fallthru_exit_predecessor. */
4924 cfg_layout_initialize (0);
4925 FOR_EACH_BB (cur_bb)
4926 if (cur_bb->index >= NUM_FIXED_BLOCKS
4927 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
4928 cur_bb->aux = cur_bb->next_bb;
4929 cfg_layout_finalize ();
4935 commit_edge_insertions ();
4937 /* The epilogue insns we inserted may cause the exit edge to no longer
4939 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4941 if (((e->flags & EDGE_FALLTHRU) != 0)
4942 && returnjump_p (BB_END (e->src)))
4943 e->flags &= ~EDGE_FALLTHRU;
4947 #ifdef HAVE_sibcall_epilogue
4948 /* Emit sibling epilogues before any sibling call sites. */
4949 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
4951 basic_block bb = e->src;
4952 rtx insn = BB_END (bb);
4955 || ! SIBLING_CALL_P (insn))
4962 emit_insn (gen_sibcall_epilogue ());
4966 /* Retain a map of the epilogue insns. Used in life analysis to
4967 avoid getting rid of sibcall epilogue insns. Do this before we
4968 actually emit the sequence. */
4969 record_insns (seq, &sibcall_epilogue);
4970 set_insn_locators (seq, epilogue_locator);
4972 emit_insn_before (seq, insn);
4977 #ifdef HAVE_epilogue
4982 /* Similarly, move any line notes that appear after the epilogue.
4983 There is no need, however, to be quite so anal about the existence
4984 of such a note. Also possibly move
4985 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
4987 for (insn = epilogue_end; insn; insn = next)
4989 next = NEXT_INSN (insn);
4991 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
4992 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
4997 /* Threading the prologue and epilogue changes the artificial refs
4998 in the entry and exit blocks. */
4999 epilogue_completed = 1;
5000 df_update_entry_exit_and_calls ();
5003 /* Reposition the prologue-end and epilogue-begin notes after instruction
5004 scheduling and delayed branch scheduling. */
5007 reposition_prologue_and_epilogue_notes (void)
5009 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5010 rtx insn, last, note;
5013 if ((len = VEC_length (int, prologue)) > 0)
5017 /* Scan from the beginning until we reach the last prologue insn.
5018 We apparently can't depend on basic_block_{head,end} after
5020 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5024 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5027 else if (contains (insn, &prologue))
5037 /* Find the prologue-end note if we haven't already, and
5038 move it to just after the last prologue insn. */
5041 for (note = last; (note = NEXT_INSN (note));)
5043 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5047 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5049 last = NEXT_INSN (last);
5050 reorder_insns (note, note, last);
5054 if ((len = VEC_length (int, epilogue)) > 0)
5058 /* Scan from the end until we reach the first epilogue insn.
5059 We apparently can't depend on basic_block_{head,end} after
5061 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5065 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5068 else if (contains (insn, &epilogue))
5078 /* Find the epilogue-begin note if we haven't already, and
5079 move it to just before the first epilogue insn. */
5082 for (note = insn; (note = PREV_INSN (note));)
5084 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5088 if (PREV_INSN (last) != note)
5089 reorder_insns (note, note, PREV_INSN (last));
5092 #endif /* HAVE_prologue or HAVE_epilogue */
5095 /* Returns the name of the current function. */
5097 current_function_name (void)
5099 return lang_hooks.decl_printable_name (cfun->decl, 2);
5102 /* Returns the raw (mangled) name of the current function. */
5104 current_function_assembler_name (void)
5106 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5111 rest_of_handle_check_leaf_regs (void)
5113 #ifdef LEAF_REGISTERS
5114 current_function_uses_only_leaf_regs
5115 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5120 /* Insert a TYPE into the used types hash table of CFUN. */
5122 used_types_insert_helper (tree type, struct function *func)
5124 if (type != NULL && func != NULL)
5128 if (func->used_types_hash == NULL)
5129 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5130 htab_eq_pointer, NULL);
5131 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5137 /* Given a type, insert it into the used hash table in cfun. */
5139 used_types_insert (tree t)
5141 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5143 t = TYPE_MAIN_VARIANT (t);
5144 if (debug_info_level > DINFO_LEVEL_NONE)
5145 used_types_insert_helper (t, cfun);
5148 struct rtl_opt_pass pass_leaf_regs =
5154 rest_of_handle_check_leaf_regs, /* execute */
5157 0, /* static_pass_number */
5159 0, /* properties_required */
5160 0, /* properties_provided */
5161 0, /* properties_destroyed */
5162 0, /* todo_flags_start */
5163 0 /* todo_flags_finish */
5168 rest_of_handle_thread_prologue_and_epilogue (void)
5171 cleanup_cfg (CLEANUP_EXPENSIVE);
5172 /* On some machines, the prologue and epilogue code, or parts thereof,
5173 can be represented as RTL. Doing so lets us schedule insns between
5174 it and the rest of the code and also allows delayed branch
5175 scheduling to operate in the epilogue. */
5177 thread_prologue_and_epilogue_insns ();
5181 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5185 "pro_and_epilogue", /* name */
5187 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5190 0, /* static_pass_number */
5191 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5192 0, /* properties_required */
5193 0, /* properties_provided */
5194 0, /* properties_destroyed */
5195 TODO_verify_flow, /* todo_flags_start */
5198 TODO_df_finish | TODO_verify_rtl_sharing |
5199 TODO_ggc_collect /* todo_flags_finish */
5204 /* This mini-pass fixes fall-out from SSA in asm statements that have
5205 in-out constraints. Say you start with
5208 asm ("": "+mr" (inout));
5211 which is transformed very early to use explicit output and match operands:
5214 asm ("": "=mr" (inout) : "0" (inout));
5217 Or, after SSA and copyprop,
5219 asm ("": "=mr" (inout_2) : "0" (inout_1));
5222 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5223 they represent two separate values, so they will get different pseudo
5224 registers during expansion. Then, since the two operands need to match
5225 per the constraints, but use different pseudo registers, reload can
5226 only register a reload for these operands. But reloads can only be
5227 satisfied by hardregs, not by memory, so we need a register for this
5228 reload, just because we are presented with non-matching operands.
5229 So, even though we allow memory for this operand, no memory can be
5230 used for it, just because the two operands don't match. This can
5231 cause reload failures on register-starved targets.
5233 So it's a symptom of reload not being able to use memory for reloads
5234 or, alternatively it's also a symptom of both operands not coming into
5235 reload as matching (in which case the pseudo could go to memory just
5236 fine, as the alternative allows it, and no reload would be necessary).
5237 We fix the latter problem here, by transforming
5239 asm ("": "=mr" (inout_2) : "0" (inout_1));
5244 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5247 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5250 bool changed = false;
5251 rtx op = SET_SRC (p_sets[0]);
5252 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5253 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5254 bool *output_matched = alloca (noutputs * sizeof (bool));
5256 memset (output_matched, 0, noutputs * sizeof (bool));
5257 for (i = 0; i < ninputs; i++)
5259 rtx input, output, insns;
5260 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5264 match = strtoul (constraint, &end, 10);
5265 if (end == constraint)
5268 gcc_assert (match < noutputs);
5269 output = SET_DEST (p_sets[match]);
5270 input = RTVEC_ELT (inputs, i);
5271 /* Only do the transformation for pseudos. */
5272 if (! REG_P (output)
5273 || rtx_equal_p (output, input)
5274 || (GET_MODE (input) != VOIDmode
5275 && GET_MODE (input) != GET_MODE (output)))
5278 /* We can't do anything if the output is also used as input,
5279 as we're going to overwrite it. */
5280 for (j = 0; j < ninputs; j++)
5281 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5286 /* Avoid changing the same input several times. For
5287 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5288 only change in once (to out1), rather than changing it
5289 first to out1 and afterwards to out2. */
5292 for (j = 0; j < noutputs; j++)
5293 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5298 output_matched[match] = true;
5301 emit_move_insn (output, input);
5302 insns = get_insns ();
5304 emit_insn_before (insns, insn);
5306 /* Now replace all mentions of the input with output. We can't
5307 just replace the occurrence in inputs[i], as the register might
5308 also be used in some other input (or even in an address of an
5309 output), which would mean possibly increasing the number of
5310 inputs by one (namely 'output' in addition), which might pose
5311 a too complicated problem for reload to solve. E.g. this situation:
5313 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5315 Here 'input' is used in two occurrences as input (once for the
5316 input operand, once for the address in the second output operand).
5317 If we would replace only the occurrence of the input operand (to
5318 make the matching) we would be left with this:
5321 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5323 Now we suddenly have two different input values (containing the same
5324 value, but different pseudos) where we formerly had only one.
5325 With more complicated asms this might lead to reload failures
5326 which wouldn't have happen without this pass. So, iterate over
5327 all operands and replace all occurrences of the register used. */
5328 for (j = 0; j < noutputs; j++)
5329 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5330 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5331 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5333 for (j = 0; j < ninputs; j++)
5334 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5335 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5342 df_insn_rescan (insn);
5346 rest_of_match_asm_constraints (void)
5349 rtx insn, pat, *p_sets;
5352 if (!crtl->has_asm_statement)
5355 df_set_flags (DF_DEFER_INSN_RESCAN);
5358 FOR_BB_INSNS (bb, insn)
5363 pat = PATTERN (insn);
5364 if (GET_CODE (pat) == PARALLEL)
5365 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5366 else if (GET_CODE (pat) == SET)
5367 p_sets = &PATTERN (insn), noutputs = 1;
5371 if (GET_CODE (*p_sets) == SET
5372 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5373 match_asm_constraints_1 (insn, p_sets, noutputs);
5377 return TODO_df_finish;
5380 struct rtl_opt_pass pass_match_asm_constraints =
5384 "asmcons", /* name */
5386 rest_of_match_asm_constraints, /* execute */
5389 0, /* static_pass_number */
5391 0, /* properties_required */
5392 0, /* properties_provided */
5393 0, /* properties_destroyed */
5394 0, /* todo_flags_start */
5395 TODO_dump_func /* todo_flags_finish */
5400 #include "gt-function.h"