1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
61 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
62 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
65 #ifndef TRAMPOLINE_ALIGNMENT
66 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
78 #define NAME__MAIN "__main"
79 #define SYMBOL__MAIN __main
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
92 during rtl generation. If they are different register numbers, this is
93 always true. It may also be true if
94 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
95 generation. See fix_lexical_addr for details. */
97 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
98 #define NEED_SEPARATE_AP
101 /* Number of bytes of args popped by function being compiled on its return.
102 Zero if no bytes are to be popped.
103 May affect compilation of return insn or of function epilogue. */
105 int current_function_pops_args;
107 /* Nonzero if function being compiled needs to be given an address
108 where the value should be stored. */
110 int current_function_returns_struct;
112 /* Nonzero if function being compiled needs to
113 return the address of where it has put a structure value. */
115 int current_function_returns_pcc_struct;
117 /* Nonzero if function being compiled needs to be passed a static chain. */
119 int current_function_needs_context;
121 /* Nonzero if function being compiled can call setjmp. */
123 int current_function_calls_setjmp;
125 /* Nonzero if function being compiled can call longjmp. */
127 int current_function_calls_longjmp;
129 /* Nonzero if function being compiled receives nonlocal gotos
130 from nested functions. */
132 int current_function_has_nonlocal_label;
134 /* Nonzero if function being compiled has nonlocal gotos to parent
137 int current_function_has_nonlocal_goto;
139 /* Nonzero if function being compiled contains nested functions. */
141 int current_function_contains_functions;
143 /* Nonzero if function being compiled doesn't modify the stack pointer
144 (ignoring the prologue and epilogue). This is only valid after
145 life_analysis has run. */
147 int current_function_sp_is_unchanging;
149 /* Nonzero if the function being compiled issues a computed jump. */
151 int current_function_has_computed_jump;
153 /* Nonzero if the current function is a thunk (a lightweight function that
154 just adjusts one of its arguments and forwards to another function), so
155 we should try to cut corners where we can. */
156 int current_function_is_thunk;
158 /* Nonzero if function being compiled can call alloca,
159 either as a subroutine or builtin. */
161 int current_function_calls_alloca;
163 /* Nonzero if the current function returns a pointer type */
165 int current_function_returns_pointer;
167 /* If some insns can be deferred to the delay slots of the epilogue, the
168 delay list for them is recorded here. */
170 rtx current_function_epilogue_delay_list;
172 /* If function's args have a fixed size, this is that size, in bytes.
174 May affect compilation of return insn or of function epilogue. */
176 int current_function_args_size;
178 /* # bytes the prologue should push and pretend that the caller pushed them.
179 The prologue must do this, but only if parms can be passed in registers. */
181 int current_function_pretend_args_size;
183 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
184 defined, the needed space is pushed by the prologue. */
186 int current_function_outgoing_args_size;
188 /* This is the offset from the arg pointer to the place where the first
189 anonymous arg can be found, if there is one. */
191 rtx current_function_arg_offset_rtx;
193 /* Nonzero if current function uses varargs.h or equivalent.
194 Zero for functions that use stdarg.h. */
196 int current_function_varargs;
198 /* Nonzero if current function uses stdarg.h or equivalent.
199 Zero for functions that use varargs.h. */
201 int current_function_stdarg;
203 /* Quantities of various kinds of registers
204 used for the current function's args. */
206 CUMULATIVE_ARGS current_function_args_info;
208 /* Name of function now being compiled. */
210 char *current_function_name;
212 /* If non-zero, an RTL expression for the location at which the current
213 function returns its result. If the current function returns its
214 result in a register, current_function_return_rtx will always be
215 the hard register containing the result. */
217 rtx current_function_return_rtx;
219 /* Nonzero if the current function uses the constant pool. */
221 int current_function_uses_const_pool;
223 /* Nonzero if the current function uses pic_offset_table_rtx. */
224 int current_function_uses_pic_offset_table;
226 /* The arg pointer hard register, or the pseudo into which it was copied. */
227 rtx current_function_internal_arg_pointer;
229 /* Language-specific reason why the current function cannot be made inline. */
230 char *current_function_cannot_inline;
232 /* Nonzero if instrumentation calls for function entry and exit should be
234 int current_function_instrument_entry_exit;
236 /* Nonzero if memory access checking be enabled in the current function. */
237 int current_function_check_memory_usage;
239 /* The FUNCTION_DECL for an inline function currently being expanded. */
240 tree inline_function_decl;
242 /* Number of function calls seen so far in current function. */
244 int function_call_count;
246 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
247 (labels to which there can be nonlocal gotos from nested functions)
250 tree nonlocal_labels;
252 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
253 for nonlocal gotos. There is one for every nonlocal label in the function;
254 this list matches the one in nonlocal_labels.
255 Zero when function does not have nonlocal labels. */
257 rtx nonlocal_goto_handler_slots;
259 /* List (chain of EXPR_LIST) of labels heading the current handlers for
262 rtx nonlocal_goto_handler_labels;
264 /* RTX for stack slot that holds the stack pointer value to restore
266 Zero when function does not have nonlocal labels. */
268 rtx nonlocal_goto_stack_level;
270 /* Label that will go on parm cleanup code, if any.
271 Jumping to this label runs cleanup code for parameters, if
272 such code must be run. Following this code is the logical return label. */
276 /* Label that will go on function epilogue.
277 Jumping to this label serves as a "return" instruction
278 on machines which require execution of the epilogue on all returns. */
282 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
283 So we can mark them all live at the end of the function, if nonopt. */
286 /* List (chain of EXPR_LISTs) of all stack slots in this function.
287 Made for the sake of unshare_all_rtl. */
290 /* Chain of all RTL_EXPRs that have insns in them. */
293 /* Label to jump back to for tail recursion, or 0 if we have
294 not yet needed one for this function. */
295 rtx tail_recursion_label;
297 /* Place after which to insert the tail_recursion_label if we need one. */
298 rtx tail_recursion_reentry;
300 /* Location at which to save the argument pointer if it will need to be
301 referenced. There are two cases where this is done: if nonlocal gotos
302 exist, or if vars stored at an offset from the argument pointer will be
303 needed by inner routines. */
305 rtx arg_pointer_save_area;
307 /* Offset to end of allocated area of stack frame.
308 If stack grows down, this is the address of the last stack slot allocated.
309 If stack grows up, this is the address for the next slot. */
310 HOST_WIDE_INT frame_offset;
312 /* List (chain of TREE_LISTs) of static chains for containing functions.
313 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
314 in an RTL_EXPR in the TREE_VALUE. */
315 static tree context_display;
317 /* List (chain of TREE_LISTs) of trampolines for nested functions.
318 The trampoline sets up the static chain and jumps to the function.
319 We supply the trampoline's address when the function's address is requested.
321 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
322 in an RTL_EXPR in the TREE_VALUE. */
323 static tree trampoline_list;
325 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
326 static rtx parm_birth_insn;
329 /* Nonzero if a stack slot has been generated whose address is not
330 actually valid. It means that the generated rtl must all be scanned
331 to detect and correct the invalid addresses where they occur. */
332 static int invalid_stack_slot;
335 /* Last insn of those whose job was to put parms into their nominal homes. */
336 static rtx last_parm_insn;
338 /* 1 + last pseudo register number possibly used for loading a copy
339 of a parameter of this function. */
342 /* Vector indexed by REGNO, containing location on stack in which
343 to put the parm which is nominally in pseudo register REGNO,
344 if we discover that that parm must go in the stack. The highest
345 element in this vector is one less than MAX_PARM_REG, above. */
346 rtx *parm_reg_stack_loc;
348 /* Nonzero once virtual register instantiation has been done.
349 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
350 static int virtuals_instantiated;
352 /* These variables hold pointers to functions to
353 save and restore machine-specific data,
354 in push_function_context and pop_function_context. */
355 void (*save_machine_status) PROTO((struct function *));
356 void (*restore_machine_status) PROTO((struct function *));
358 /* Nonzero if we need to distinguish between the return value of this function
359 and the return value of a function called by this function. This helps
362 extern int rtx_equal_function_value_matters;
363 extern tree sequence_rtl_expr;
365 /* In order to evaluate some expressions, such as function calls returning
366 structures in memory, we need to temporarily allocate stack locations.
367 We record each allocated temporary in the following structure.
369 Associated with each temporary slot is a nesting level. When we pop up
370 one level, all temporaries associated with the previous level are freed.
371 Normally, all temporaries are freed after the execution of the statement
372 in which they were created. However, if we are inside a ({...}) grouping,
373 the result may be in a temporary and hence must be preserved. If the
374 result could be in a temporary, we preserve it if we can determine which
375 one it is in. If we cannot determine which temporary may contain the
376 result, all temporaries are preserved. A temporary is preserved by
377 pretending it was allocated at the previous nesting level.
379 Automatic variables are also assigned temporary slots, at the nesting
380 level where they are defined. They are marked a "kept" so that
381 free_temp_slots will not free them. */
385 /* Points to next temporary slot. */
386 struct temp_slot *next;
387 /* The rtx to used to reference the slot. */
389 /* The rtx used to represent the address if not the address of the
390 slot above. May be an EXPR_LIST if multiple addresses exist. */
392 /* The alignment (in bits) of the slot. */
394 /* The size, in units, of the slot. */
396 /* The alias set for the slot. If the alias set is zero, we don't
397 know anything about the alias set of the slot. We must only
398 reuse a slot if it is assigned an object of the same alias set.
399 Otherwise, the rest of the compiler may assume that the new use
400 of the slot cannot alias the old use of the slot, which is
401 false. If the slot has alias set zero, then we can't reuse the
402 slot at all, since we have no idea what alias set may have been
403 imposed on the memory. For example, if the stack slot is the
404 call frame for an inline functioned, we have no idea what alias
405 sets will be assigned to various pieces of the call frame. */
407 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
409 /* Non-zero if this temporary is currently in use. */
411 /* Non-zero if this temporary has its address taken. */
413 /* Nesting level at which this slot is being used. */
415 /* Non-zero if this should survive a call to free_temp_slots. */
417 /* The offset of the slot from the frame_pointer, including extra space
418 for alignment. This info is for combine_temp_slots. */
419 HOST_WIDE_INT base_offset;
420 /* The size of the slot, including extra space for alignment. This
421 info is for combine_temp_slots. */
422 HOST_WIDE_INT full_size;
425 /* List of all temporaries allocated, both available and in use. */
427 struct temp_slot *temp_slots;
429 /* Current nesting level for temporaries. */
433 /* Current nesting level for variables in a block. */
435 int var_temp_slot_level;
437 /* When temporaries are created by TARGET_EXPRs, they are created at
438 this level of temp_slot_level, so that they can remain allocated
439 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
441 int target_temp_slot_level;
443 /* This structure is used to record MEMs or pseudos used to replace VAR, any
444 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
445 maintain this list in case two operands of an insn were required to match;
446 in that case we must ensure we use the same replacement. */
448 struct fixup_replacement
452 struct fixup_replacement *next;
455 struct insns_for_mem_entry {
456 /* The KEY in HE will be a MEM. */
457 struct hash_entry he;
458 /* These are the INSNS which reference the MEM. */
462 /* Forward declarations. */
464 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
465 int, struct function *));
466 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
468 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
469 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
470 enum machine_mode, enum machine_mode,
472 struct hash_table *));
473 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
474 struct hash_table *));
475 static struct fixup_replacement
476 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
477 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
478 rtx, int, struct hash_table *));
479 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
480 struct fixup_replacement **));
481 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
482 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
483 static rtx fixup_stack_1 PROTO((rtx, rtx));
484 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
485 static void instantiate_decls PROTO((tree, int));
486 static void instantiate_decls_1 PROTO((tree, int));
487 static void instantiate_decl PROTO((rtx, int, int));
488 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
489 static void delete_handlers PROTO((void));
490 static void pad_to_arg_alignment PROTO((struct args_size *, int));
491 #ifndef ARGS_GROW_DOWNWARD
492 static void pad_below PROTO((struct args_size *, enum machine_mode,
495 #ifdef ARGS_GROW_DOWNWARD
496 static tree round_down PROTO((tree, int));
498 static rtx round_trampoline_addr PROTO((rtx));
499 static tree blocks_nreverse PROTO((tree));
500 static int all_blocks PROTO((tree, tree *));
501 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
502 static int *record_insns PROTO((rtx));
503 static int contains PROTO((rtx, int *));
504 #endif /* HAVE_prologue || HAVE_epilogue */
505 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
506 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
507 struct hash_table *));
508 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
511 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
512 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
513 static int insns_for_mem_walk PROTO ((rtx *, void *));
514 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
517 /* Pointer to chain of `struct function' for containing functions. */
518 struct function *outer_function_chain;
520 /* Given a function decl for a containing function,
521 return the `struct function' for it. */
524 find_function_data (decl)
529 for (p = outer_function_chain; p; p = p->next)
536 /* Save the current context for compilation of a nested function.
537 This is called from language-specific code.
538 The caller is responsible for saving any language-specific status,
539 since this function knows only about language-independent variables. */
542 push_function_context_to (context)
545 struct function *p = (struct function *) xmalloc (sizeof (struct function));
547 p->next = outer_function_chain;
548 outer_function_chain = p;
550 p->name = current_function_name;
551 p->decl = current_function_decl;
552 p->pops_args = current_function_pops_args;
553 p->returns_struct = current_function_returns_struct;
554 p->returns_pcc_struct = current_function_returns_pcc_struct;
555 p->returns_pointer = current_function_returns_pointer;
556 p->needs_context = current_function_needs_context;
557 p->calls_setjmp = current_function_calls_setjmp;
558 p->calls_longjmp = current_function_calls_longjmp;
559 p->calls_alloca = current_function_calls_alloca;
560 p->has_nonlocal_label = current_function_has_nonlocal_label;
561 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
562 p->contains_functions = current_function_contains_functions;
563 p->has_computed_jump = current_function_has_computed_jump;
564 p->is_thunk = current_function_is_thunk;
565 p->args_size = current_function_args_size;
566 p->pretend_args_size = current_function_pretend_args_size;
567 p->arg_offset_rtx = current_function_arg_offset_rtx;
568 p->varargs = current_function_varargs;
569 p->stdarg = current_function_stdarg;
570 p->uses_const_pool = current_function_uses_const_pool;
571 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
572 p->internal_arg_pointer = current_function_internal_arg_pointer;
573 p->cannot_inline = current_function_cannot_inline;
574 p->max_parm_reg = max_parm_reg;
575 p->parm_reg_stack_loc = parm_reg_stack_loc;
576 p->outgoing_args_size = current_function_outgoing_args_size;
577 p->return_rtx = current_function_return_rtx;
578 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
579 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
580 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
581 p->nonlocal_labels = nonlocal_labels;
582 p->cleanup_label = cleanup_label;
583 p->return_label = return_label;
584 p->save_expr_regs = save_expr_regs;
585 p->stack_slot_list = stack_slot_list;
586 p->parm_birth_insn = parm_birth_insn;
587 p->frame_offset = frame_offset;
588 p->tail_recursion_label = tail_recursion_label;
589 p->tail_recursion_reentry = tail_recursion_reentry;
590 p->arg_pointer_save_area = arg_pointer_save_area;
591 p->rtl_expr_chain = rtl_expr_chain;
592 p->last_parm_insn = last_parm_insn;
593 p->context_display = context_display;
594 p->trampoline_list = trampoline_list;
595 p->function_call_count = function_call_count;
596 p->temp_slots = temp_slots;
597 p->temp_slot_level = temp_slot_level;
598 p->target_temp_slot_level = target_temp_slot_level;
599 p->var_temp_slot_level = var_temp_slot_level;
600 p->fixup_var_refs_queue = 0;
601 p->epilogue_delay_list = current_function_epilogue_delay_list;
602 p->args_info = current_function_args_info;
603 p->check_memory_usage = current_function_check_memory_usage;
604 p->instrument_entry_exit = current_function_instrument_entry_exit;
606 save_tree_status (p, context);
607 save_storage_status (p);
608 save_emit_status (p);
609 save_expr_status (p);
610 save_stmt_status (p);
611 save_varasm_status (p, context);
612 if (save_machine_status)
613 (*save_machine_status) (p);
617 push_function_context ()
619 push_function_context_to (current_function_decl);
622 /* Restore the last saved context, at the end of a nested function.
623 This function is called from language-specific code. */
626 pop_function_context_from (context)
629 struct function *p = outer_function_chain;
630 struct var_refs_queue *queue;
632 outer_function_chain = p->next;
634 current_function_contains_functions
635 = p->contains_functions || p->inline_obstacks
636 || context == current_function_decl;
637 current_function_has_computed_jump = p->has_computed_jump;
638 current_function_name = p->name;
639 current_function_decl = p->decl;
640 current_function_pops_args = p->pops_args;
641 current_function_returns_struct = p->returns_struct;
642 current_function_returns_pcc_struct = p->returns_pcc_struct;
643 current_function_returns_pointer = p->returns_pointer;
644 current_function_needs_context = p->needs_context;
645 current_function_calls_setjmp = p->calls_setjmp;
646 current_function_calls_longjmp = p->calls_longjmp;
647 current_function_calls_alloca = p->calls_alloca;
648 current_function_has_nonlocal_label = p->has_nonlocal_label;
649 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
650 current_function_is_thunk = p->is_thunk;
651 current_function_args_size = p->args_size;
652 current_function_pretend_args_size = p->pretend_args_size;
653 current_function_arg_offset_rtx = p->arg_offset_rtx;
654 current_function_varargs = p->varargs;
655 current_function_stdarg = p->stdarg;
656 current_function_uses_const_pool = p->uses_const_pool;
657 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
658 current_function_internal_arg_pointer = p->internal_arg_pointer;
659 current_function_cannot_inline = p->cannot_inline;
660 max_parm_reg = p->max_parm_reg;
661 parm_reg_stack_loc = p->parm_reg_stack_loc;
662 current_function_outgoing_args_size = p->outgoing_args_size;
663 current_function_return_rtx = p->return_rtx;
664 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
665 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
666 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
667 nonlocal_labels = p->nonlocal_labels;
668 cleanup_label = p->cleanup_label;
669 return_label = p->return_label;
670 save_expr_regs = p->save_expr_regs;
671 stack_slot_list = p->stack_slot_list;
672 parm_birth_insn = p->parm_birth_insn;
673 frame_offset = p->frame_offset;
674 tail_recursion_label = p->tail_recursion_label;
675 tail_recursion_reentry = p->tail_recursion_reentry;
676 arg_pointer_save_area = p->arg_pointer_save_area;
677 rtl_expr_chain = p->rtl_expr_chain;
678 last_parm_insn = p->last_parm_insn;
679 context_display = p->context_display;
680 trampoline_list = p->trampoline_list;
681 function_call_count = p->function_call_count;
682 temp_slots = p->temp_slots;
683 temp_slot_level = p->temp_slot_level;
684 target_temp_slot_level = p->target_temp_slot_level;
685 var_temp_slot_level = p->var_temp_slot_level;
686 current_function_epilogue_delay_list = p->epilogue_delay_list;
688 current_function_args_info = p->args_info;
689 current_function_check_memory_usage = p->check_memory_usage;
690 current_function_instrument_entry_exit = p->instrument_entry_exit;
692 restore_tree_status (p, context);
693 restore_storage_status (p);
694 restore_expr_status (p);
695 restore_emit_status (p);
696 restore_stmt_status (p);
697 restore_varasm_status (p);
699 if (restore_machine_status)
700 (*restore_machine_status) (p);
702 /* Finish doing put_var_into_stack for any of our variables
703 which became addressable during the nested function. */
704 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
705 fixup_var_refs (queue->modified, queue->promoted_mode,
706 queue->unsignedp, 0);
710 /* Reset variables that have known state during rtx generation. */
711 rtx_equal_function_value_matters = 1;
712 virtuals_instantiated = 0;
715 void pop_function_context ()
717 pop_function_context_from (current_function_decl);
720 /* Allocate fixed slots in the stack frame of the current function. */
722 /* Return size needed for stack frame based on slots so far allocated.
723 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
724 the caller may have to do that. */
729 #ifdef FRAME_GROWS_DOWNWARD
730 return -frame_offset;
736 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
737 with machine mode MODE.
739 ALIGN controls the amount of alignment for the address of the slot:
740 0 means according to MODE,
741 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
742 positive specifies alignment boundary in bits.
744 We do not round to stack_boundary here. */
747 assign_stack_local (mode, size, align)
748 enum machine_mode mode;
752 register rtx x, addr;
753 int bigend_correction = 0;
760 alignment = GET_MODE_ALIGNMENT (mode);
762 alignment = BIGGEST_ALIGNMENT;
764 /* Allow the target to (possibly) increase the alignment of this
766 type = type_for_mode (mode, 0);
768 alignment = LOCAL_ALIGNMENT (type, alignment);
770 alignment /= BITS_PER_UNIT;
772 else if (align == -1)
774 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
775 size = CEIL_ROUND (size, alignment);
778 alignment = align / BITS_PER_UNIT;
780 /* Round frame offset to that alignment.
781 We must be careful here, since FRAME_OFFSET might be negative and
782 division with a negative dividend isn't as well defined as we might
783 like. So we instead assume that ALIGNMENT is a power of two and
784 use logical operations which are unambiguous. */
785 #ifdef FRAME_GROWS_DOWNWARD
786 frame_offset = FLOOR_ROUND (frame_offset, alignment);
788 frame_offset = CEIL_ROUND (frame_offset, alignment);
791 /* On a big-endian machine, if we are allocating more space than we will use,
792 use the least significant bytes of those that are allocated. */
793 if (BYTES_BIG_ENDIAN && mode != BLKmode)
794 bigend_correction = size - GET_MODE_SIZE (mode);
796 #ifdef FRAME_GROWS_DOWNWARD
797 frame_offset -= size;
800 /* If we have already instantiated virtual registers, return the actual
801 address relative to the frame pointer. */
802 if (virtuals_instantiated)
803 addr = plus_constant (frame_pointer_rtx,
804 (frame_offset + bigend_correction
805 + STARTING_FRAME_OFFSET));
807 addr = plus_constant (virtual_stack_vars_rtx,
808 frame_offset + bigend_correction);
810 #ifndef FRAME_GROWS_DOWNWARD
811 frame_offset += size;
814 x = gen_rtx_MEM (mode, addr);
816 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
821 /* Assign a stack slot in a containing function.
822 First three arguments are same as in preceding function.
823 The last argument specifies the function to allocate in. */
826 assign_outer_stack_local (mode, size, align, function)
827 enum machine_mode mode;
830 struct function *function;
832 register rtx x, addr;
833 int bigend_correction = 0;
836 /* Allocate in the memory associated with the function in whose frame
838 push_obstacks (function->function_obstack,
839 function->function_maybepermanent_obstack);
845 alignment = GET_MODE_ALIGNMENT (mode);
847 alignment = BIGGEST_ALIGNMENT;
849 /* Allow the target to (possibly) increase the alignment of this
851 type = type_for_mode (mode, 0);
853 alignment = LOCAL_ALIGNMENT (type, alignment);
855 alignment /= BITS_PER_UNIT;
857 else if (align == -1)
859 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
860 size = CEIL_ROUND (size, alignment);
863 alignment = align / BITS_PER_UNIT;
865 /* Round frame offset to that alignment. */
866 #ifdef FRAME_GROWS_DOWNWARD
867 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
869 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
872 /* On a big-endian machine, if we are allocating more space than we will use,
873 use the least significant bytes of those that are allocated. */
874 if (BYTES_BIG_ENDIAN && mode != BLKmode)
875 bigend_correction = size - GET_MODE_SIZE (mode);
877 #ifdef FRAME_GROWS_DOWNWARD
878 function->frame_offset -= size;
880 addr = plus_constant (virtual_stack_vars_rtx,
881 function->frame_offset + bigend_correction);
882 #ifndef FRAME_GROWS_DOWNWARD
883 function->frame_offset += size;
886 x = gen_rtx_MEM (mode, addr);
888 function->stack_slot_list
889 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
896 /* Allocate a temporary stack slot and record it for possible later
899 MODE is the machine mode to be given to the returned rtx.
901 SIZE is the size in units of the space required. We do no rounding here
902 since assign_stack_local will do any required rounding.
904 KEEP is 1 if this slot is to be retained after a call to
905 free_temp_slots. Automatic variables for a block are allocated
906 with this flag. KEEP is 2 if we allocate a longer term temporary,
907 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
908 if we are to allocate something at an inner level to be treated as
909 a variable in the block (e.g., a SAVE_EXPR).
911 TYPE is the type that will be used for the stack slot. */
914 assign_stack_temp_for_type (mode, size, keep, type)
915 enum machine_mode mode;
922 struct temp_slot *p, *best_p = 0;
924 /* If SIZE is -1 it means that somebody tried to allocate a temporary
925 of a variable size. */
929 /* If we know the alias set for the memory that will be used, use
930 it. If there's no TYPE, then we don't know anything about the
931 alias set for the memory. */
933 alias_set = get_alias_set (type);
937 align = GET_MODE_ALIGNMENT (mode);
939 align = BIGGEST_ALIGNMENT;
942 type = type_for_mode (mode, 0);
944 align = LOCAL_ALIGNMENT (type, align);
946 /* Try to find an available, already-allocated temporary of the proper
947 mode which meets the size and alignment requirements. Choose the
948 smallest one with the closest alignment. */
949 for (p = temp_slots; p; p = p->next)
950 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
952 && (!flag_strict_aliasing
953 || (alias_set && p->alias_set == alias_set))
954 && (best_p == 0 || best_p->size > p->size
955 || (best_p->size == p->size && best_p->align > p->align)))
957 if (p->align == align && p->size == size)
965 /* Make our best, if any, the one to use. */
968 /* If there are enough aligned bytes left over, make them into a new
969 temp_slot so that the extra bytes don't get wasted. Do this only
970 for BLKmode slots, so that we can be sure of the alignment. */
971 if (GET_MODE (best_p->slot) == BLKmode
972 /* We can't split slots if -fstrict-aliasing because the
973 information about the alias set for the new slot will be
975 && !flag_strict_aliasing)
977 int alignment = best_p->align / BITS_PER_UNIT;
978 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
980 if (best_p->size - rounded_size >= alignment)
982 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
983 p->in_use = p->addr_taken = 0;
984 p->size = best_p->size - rounded_size;
985 p->base_offset = best_p->base_offset + rounded_size;
986 p->full_size = best_p->full_size - rounded_size;
987 p->slot = gen_rtx_MEM (BLKmode,
988 plus_constant (XEXP (best_p->slot, 0),
990 p->align = best_p->align;
993 p->next = temp_slots;
996 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
999 best_p->size = rounded_size;
1000 best_p->full_size = rounded_size;
1007 /* If we still didn't find one, make a new temporary. */
1010 HOST_WIDE_INT frame_offset_old = frame_offset;
1012 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1014 /* We are passing an explicit alignment request to assign_stack_local.
1015 One side effect of that is assign_stack_local will not round SIZE
1016 to ensure the frame offset remains suitably aligned.
1018 So for requests which depended on the rounding of SIZE, we go ahead
1019 and round it now. We also make sure ALIGNMENT is at least
1020 BIGGEST_ALIGNMENT. */
1021 if (mode == BLKmode && align < (BIGGEST_ALIGNMENT / BITS_PER_UNIT))
1023 p->slot = assign_stack_local (mode,
1025 ? CEIL_ROUND (size, align) : size,
1029 p->alias_set = alias_set;
1031 /* The following slot size computation is necessary because we don't
1032 know the actual size of the temporary slot until assign_stack_local
1033 has performed all the frame alignment and size rounding for the
1034 requested temporary. Note that extra space added for alignment
1035 can be either above or below this stack slot depending on which
1036 way the frame grows. We include the extra space if and only if it
1037 is above this slot. */
1038 #ifdef FRAME_GROWS_DOWNWARD
1039 p->size = frame_offset_old - frame_offset;
1044 /* Now define the fields used by combine_temp_slots. */
1045 #ifdef FRAME_GROWS_DOWNWARD
1046 p->base_offset = frame_offset;
1047 p->full_size = frame_offset_old - frame_offset;
1049 p->base_offset = frame_offset_old;
1050 p->full_size = frame_offset - frame_offset_old;
1053 p->next = temp_slots;
1059 p->rtl_expr = sequence_rtl_expr;
1063 p->level = target_temp_slot_level;
1068 p->level = var_temp_slot_level;
1073 p->level = temp_slot_level;
1077 /* We may be reusing an old slot, so clear any MEM flags that may have been
1079 RTX_UNCHANGING_P (p->slot) = 0;
1080 MEM_IN_STRUCT_P (p->slot) = 0;
1081 MEM_SCALAR_P (p->slot) = 0;
1082 MEM_ALIAS_SET (p->slot) = 0;
1086 /* Allocate a temporary stack slot and record it for possible later
1087 reuse. First three arguments are same as in preceding function. */
1090 assign_stack_temp (mode, size, keep)
1091 enum machine_mode mode;
1095 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1098 /* Assign a temporary of given TYPE.
1099 KEEP is as for assign_stack_temp.
1100 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1101 it is 0 if a register is OK.
1102 DONT_PROMOTE is 1 if we should not promote values in register
1106 assign_temp (type, keep, memory_required, dont_promote)
1109 int memory_required;
1112 enum machine_mode mode = TYPE_MODE (type);
1113 int unsignedp = TREE_UNSIGNED (type);
1115 if (mode == BLKmode || memory_required)
1117 HOST_WIDE_INT size = int_size_in_bytes (type);
1120 /* Unfortunately, we don't yet know how to allocate variable-sized
1121 temporaries. However, sometimes we have a fixed upper limit on
1122 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1123 instead. This is the case for Chill variable-sized strings. */
1124 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1125 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1126 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1127 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1129 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1130 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1134 #ifndef PROMOTE_FOR_CALL_ONLY
1136 mode = promote_mode (type, mode, &unsignedp, 0);
1139 return gen_reg_rtx (mode);
1142 /* Combine temporary stack slots which are adjacent on the stack.
1144 This allows for better use of already allocated stack space. This is only
1145 done for BLKmode slots because we can be sure that we won't have alignment
1146 problems in this case. */
1149 combine_temp_slots ()
1151 struct temp_slot *p, *q;
1152 struct temp_slot *prev_p, *prev_q;
1155 /* We can't combine slots, because the information about which slot
1156 is in which alias set will be lost. */
1157 if (flag_strict_aliasing)
1160 /* If there are a lot of temp slots, don't do anything unless
1161 high levels of optimizaton. */
1162 if (! flag_expensive_optimizations)
1163 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1164 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1167 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1171 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1172 for (q = p->next, prev_q = p; q; q = prev_q->next)
1175 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1177 if (p->base_offset + p->full_size == q->base_offset)
1179 /* Q comes after P; combine Q into P. */
1181 p->full_size += q->full_size;
1184 else if (q->base_offset + q->full_size == p->base_offset)
1186 /* P comes after Q; combine P into Q. */
1188 q->full_size += p->full_size;
1193 /* Either delete Q or advance past it. */
1195 prev_q->next = q->next;
1199 /* Either delete P or advance past it. */
1203 prev_p->next = p->next;
1205 temp_slots = p->next;
1212 /* Find the temp slot corresponding to the object at address X. */
1214 static struct temp_slot *
1215 find_temp_slot_from_address (x)
1218 struct temp_slot *p;
1221 for (p = temp_slots; p; p = p->next)
1226 else if (XEXP (p->slot, 0) == x
1228 || (GET_CODE (x) == PLUS
1229 && XEXP (x, 0) == virtual_stack_vars_rtx
1230 && GET_CODE (XEXP (x, 1)) == CONST_INT
1231 && INTVAL (XEXP (x, 1)) >= p->base_offset
1232 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1235 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1236 for (next = p->address; next; next = XEXP (next, 1))
1237 if (XEXP (next, 0) == x)
1244 /* Indicate that NEW is an alternate way of referring to the temp slot
1245 that previously was known by OLD. */
1248 update_temp_slot_address (old, new)
1251 struct temp_slot *p = find_temp_slot_from_address (old);
1253 /* If none, return. Else add NEW as an alias. */
1256 else if (p->address == 0)
1260 if (GET_CODE (p->address) != EXPR_LIST)
1261 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1263 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1267 /* If X could be a reference to a temporary slot, mark the fact that its
1268 address was taken. */
1271 mark_temp_addr_taken (x)
1274 struct temp_slot *p;
1279 /* If X is not in memory or is at a constant address, it cannot be in
1280 a temporary slot. */
1281 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1284 p = find_temp_slot_from_address (XEXP (x, 0));
1289 /* If X could be a reference to a temporary slot, mark that slot as
1290 belonging to the to one level higher than the current level. If X
1291 matched one of our slots, just mark that one. Otherwise, we can't
1292 easily predict which it is, so upgrade all of them. Kept slots
1293 need not be touched.
1295 This is called when an ({...}) construct occurs and a statement
1296 returns a value in memory. */
1299 preserve_temp_slots (x)
1302 struct temp_slot *p = 0;
1304 /* If there is no result, we still might have some objects whose address
1305 were taken, so we need to make sure they stay around. */
1308 for (p = temp_slots; p; p = p->next)
1309 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1315 /* If X is a register that is being used as a pointer, see if we have
1316 a temporary slot we know it points to. To be consistent with
1317 the code below, we really should preserve all non-kept slots
1318 if we can't find a match, but that seems to be much too costly. */
1319 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1320 p = find_temp_slot_from_address (x);
1322 /* If X is not in memory or is at a constant address, it cannot be in
1323 a temporary slot, but it can contain something whose address was
1325 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1327 for (p = temp_slots; p; p = p->next)
1328 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1334 /* First see if we can find a match. */
1336 p = find_temp_slot_from_address (XEXP (x, 0));
1340 /* Move everything at our level whose address was taken to our new
1341 level in case we used its address. */
1342 struct temp_slot *q;
1344 if (p->level == temp_slot_level)
1346 for (q = temp_slots; q; q = q->next)
1347 if (q != p && q->addr_taken && q->level == p->level)
1356 /* Otherwise, preserve all non-kept slots at this level. */
1357 for (p = temp_slots; p; p = p->next)
1358 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1362 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1363 with that RTL_EXPR, promote it into a temporary slot at the present
1364 level so it will not be freed when we free slots made in the
1368 preserve_rtl_expr_result (x)
1371 struct temp_slot *p;
1373 /* If X is not in memory or is at a constant address, it cannot be in
1374 a temporary slot. */
1375 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1378 /* If we can find a match, move it to our level unless it is already at
1380 p = find_temp_slot_from_address (XEXP (x, 0));
1383 p->level = MIN (p->level, temp_slot_level);
1390 /* Free all temporaries used so far. This is normally called at the end
1391 of generating code for a statement. Don't free any temporaries
1392 currently in use for an RTL_EXPR that hasn't yet been emitted.
1393 We could eventually do better than this since it can be reused while
1394 generating the same RTL_EXPR, but this is complex and probably not
1400 struct temp_slot *p;
1402 for (p = temp_slots; p; p = p->next)
1403 if (p->in_use && p->level == temp_slot_level && ! p->keep
1404 && p->rtl_expr == 0)
1407 combine_temp_slots ();
1410 /* Free all temporary slots used in T, an RTL_EXPR node. */
1413 free_temps_for_rtl_expr (t)
1416 struct temp_slot *p;
1418 for (p = temp_slots; p; p = p->next)
1419 if (p->rtl_expr == t)
1422 combine_temp_slots ();
1425 /* Mark all temporaries ever allocated in this function as not suitable
1426 for reuse until the current level is exited. */
1429 mark_all_temps_used ()
1431 struct temp_slot *p;
1433 for (p = temp_slots; p; p = p->next)
1435 p->in_use = p->keep = 1;
1436 p->level = MIN (p->level, temp_slot_level);
1440 /* Push deeper into the nesting level for stack temporaries. */
1448 /* Likewise, but save the new level as the place to allocate variables
1452 push_temp_slots_for_block ()
1456 var_temp_slot_level = temp_slot_level;
1459 /* Likewise, but save the new level as the place to allocate temporaries
1460 for TARGET_EXPRs. */
1463 push_temp_slots_for_target ()
1467 target_temp_slot_level = temp_slot_level;
1470 /* Set and get the value of target_temp_slot_level. The only
1471 permitted use of these functions is to save and restore this value. */
1474 get_target_temp_slot_level ()
1476 return target_temp_slot_level;
1480 set_target_temp_slot_level (level)
1483 target_temp_slot_level = level;
1486 /* Pop a temporary nesting level. All slots in use in the current level
1492 struct temp_slot *p;
1494 for (p = temp_slots; p; p = p->next)
1495 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1498 combine_temp_slots ();
1503 /* Initialize temporary slots. */
1508 /* We have not allocated any temporaries yet. */
1510 temp_slot_level = 0;
1511 var_temp_slot_level = 0;
1512 target_temp_slot_level = 0;
1515 /* Retroactively move an auto variable from a register to a stack slot.
1516 This is done when an address-reference to the variable is seen. */
1519 put_var_into_stack (decl)
1523 enum machine_mode promoted_mode, decl_mode;
1524 struct function *function = 0;
1526 int can_use_addressof;
1528 context = decl_function_context (decl);
1530 /* Get the current rtl used for this object and its original mode. */
1531 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1533 /* No need to do anything if decl has no rtx yet
1534 since in that case caller is setting TREE_ADDRESSABLE
1535 and a stack slot will be assigned when the rtl is made. */
1539 /* Get the declared mode for this object. */
1540 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1541 : DECL_MODE (decl));
1542 /* Get the mode it's actually stored in. */
1543 promoted_mode = GET_MODE (reg);
1545 /* If this variable comes from an outer function,
1546 find that function's saved context. */
1547 if (context != current_function_decl && context != inline_function_decl)
1548 for (function = outer_function_chain; function; function = function->next)
1549 if (function->decl == context)
1552 /* If this is a variable-size object with a pseudo to address it,
1553 put that pseudo into the stack, if the var is nonlocal. */
1554 if (DECL_NONLOCAL (decl)
1555 && GET_CODE (reg) == MEM
1556 && GET_CODE (XEXP (reg, 0)) == REG
1557 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1559 reg = XEXP (reg, 0);
1560 decl_mode = promoted_mode = GET_MODE (reg);
1566 /* FIXME make it work for promoted modes too */
1567 && decl_mode == promoted_mode
1568 #ifdef NON_SAVING_SETJMP
1569 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1573 /* If we can't use ADDRESSOF, make sure we see through one we already
1575 if (! can_use_addressof && GET_CODE (reg) == MEM
1576 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1577 reg = XEXP (XEXP (reg, 0), 0);
1579 /* Now we should have a value that resides in one or more pseudo regs. */
1581 if (GET_CODE (reg) == REG)
1583 /* If this variable lives in the current function and we don't need
1584 to put things in the stack for the sake of setjmp, try to keep it
1585 in a register until we know we actually need the address. */
1586 if (can_use_addressof)
1587 gen_mem_addressof (reg, decl);
1589 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1590 promoted_mode, decl_mode,
1591 TREE_SIDE_EFFECTS (decl), 0,
1592 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1595 else if (GET_CODE (reg) == CONCAT)
1597 /* A CONCAT contains two pseudos; put them both in the stack.
1598 We do it so they end up consecutive. */
1599 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1600 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1601 #ifdef FRAME_GROWS_DOWNWARD
1602 /* Since part 0 should have a lower address, do it second. */
1603 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1604 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1605 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1607 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1608 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1609 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1612 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1613 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1614 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1616 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1617 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1618 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1622 /* Change the CONCAT into a combined MEM for both parts. */
1623 PUT_CODE (reg, MEM);
1624 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1625 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1627 /* The two parts are in memory order already.
1628 Use the lower parts address as ours. */
1629 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1630 /* Prevent sharing of rtl that might lose. */
1631 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1632 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1637 if (current_function_check_memory_usage)
1638 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1639 XEXP (reg, 0), ptr_mode,
1640 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1641 TYPE_MODE (sizetype),
1642 GEN_INT (MEMORY_USE_RW),
1643 TYPE_MODE (integer_type_node));
1646 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1647 into the stack frame of FUNCTION (0 means the current function).
1648 DECL_MODE is the machine mode of the user-level data type.
1649 PROMOTED_MODE is the machine mode of the register.
1650 VOLATILE_P is nonzero if this is for a "volatile" decl.
1651 USED_P is nonzero if this reg might have already been used in an insn. */
1654 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1655 original_regno, used_p, ht)
1656 struct function *function;
1659 enum machine_mode promoted_mode, decl_mode;
1663 struct hash_table *ht;
1666 int regno = original_regno;
1669 regno = REGNO (reg);
1673 if (regno < function->max_parm_reg)
1674 new = function->parm_reg_stack_loc[regno];
1676 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1681 if (regno < max_parm_reg)
1682 new = parm_reg_stack_loc[regno];
1684 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1687 PUT_MODE (reg, decl_mode);
1688 XEXP (reg, 0) = XEXP (new, 0);
1689 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1690 MEM_VOLATILE_P (reg) = volatile_p;
1691 PUT_CODE (reg, MEM);
1693 /* If this is a memory ref that contains aggregate components,
1694 mark it as such for cse and loop optimize. If we are reusing a
1695 previously generated stack slot, then we need to copy the bit in
1696 case it was set for other reasons. For instance, it is set for
1697 __builtin_va_alist. */
1698 MEM_SET_IN_STRUCT_P (reg,
1699 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1700 MEM_ALIAS_SET (reg) = get_alias_set (type);
1702 /* Now make sure that all refs to the variable, previously made
1703 when it was a register, are fixed up to be valid again. */
1705 if (used_p && function != 0)
1707 struct var_refs_queue *temp;
1709 /* Variable is inherited; fix it up when we get back to its function. */
1710 push_obstacks (function->function_obstack,
1711 function->function_maybepermanent_obstack);
1713 /* See comment in restore_tree_status in tree.c for why this needs to be
1714 on saveable obstack. */
1716 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1717 temp->modified = reg;
1718 temp->promoted_mode = promoted_mode;
1719 temp->unsignedp = TREE_UNSIGNED (type);
1720 temp->next = function->fixup_var_refs_queue;
1721 function->fixup_var_refs_queue = temp;
1725 /* Variable is local; fix it up now. */
1726 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1730 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1732 enum machine_mode promoted_mode;
1734 struct hash_table *ht;
1737 rtx first_insn = get_insns ();
1738 struct sequence_stack *stack = sequence_stack;
1739 tree rtl_exps = rtl_expr_chain;
1741 /* Must scan all insns for stack-refs that exceed the limit. */
1742 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1744 /* If there's a hash table, it must record all uses of VAR. */
1748 /* Scan all pending sequences too. */
1749 for (; stack; stack = stack->next)
1751 push_to_sequence (stack->first);
1752 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1753 stack->first, stack->next != 0, 0);
1754 /* Update remembered end of sequence
1755 in case we added an insn at the end. */
1756 stack->last = get_last_insn ();
1760 /* Scan all waiting RTL_EXPRs too. */
1761 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1763 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1764 if (seq != const0_rtx && seq != 0)
1766 push_to_sequence (seq);
1767 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1773 /* Scan the catch clauses for exception handling too. */
1774 push_to_sequence (catch_clauses);
1775 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1780 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1781 some part of an insn. Return a struct fixup_replacement whose OLD
1782 value is equal to X. Allocate a new structure if no such entry exists. */
1784 static struct fixup_replacement *
1785 find_fixup_replacement (replacements, x)
1786 struct fixup_replacement **replacements;
1789 struct fixup_replacement *p;
1791 /* See if we have already replaced this. */
1792 for (p = *replacements; p && p->old != x; p = p->next)
1797 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1800 p->next = *replacements;
1807 /* Scan the insn-chain starting with INSN for refs to VAR
1808 and fix them up. TOPLEVEL is nonzero if this chain is the
1809 main chain of insns for the current function. */
1812 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1814 enum machine_mode promoted_mode;
1818 struct hash_table *ht;
1823 /* If we already know which INSNs reference VAR there's no need
1824 to walk the entire instruction chain. */
1827 insn_list = ((struct insns_for_mem_entry *)
1828 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1829 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1830 insn_list = XEXP (insn_list, 1);
1835 rtx next = NEXT_INSN (insn);
1836 rtx set, prev, prev_set;
1839 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1841 /* If this is a CLOBBER of VAR, delete it.
1843 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1844 and REG_RETVAL notes too. */
1845 if (GET_CODE (PATTERN (insn)) == CLOBBER
1846 && (XEXP (PATTERN (insn), 0) == var
1847 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1848 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1849 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1851 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1852 /* The REG_LIBCALL note will go away since we are going to
1853 turn INSN into a NOTE, so just delete the
1854 corresponding REG_RETVAL note. */
1855 remove_note (XEXP (note, 0),
1856 find_reg_note (XEXP (note, 0), REG_RETVAL,
1859 /* In unoptimized compilation, we shouldn't call delete_insn
1860 except in jump.c doing warnings. */
1861 PUT_CODE (insn, NOTE);
1862 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1863 NOTE_SOURCE_FILE (insn) = 0;
1866 /* The insn to load VAR from a home in the arglist
1867 is now a no-op. When we see it, just delete it.
1868 Similarly if this is storing VAR from a register from which
1869 it was loaded in the previous insn. This will occur
1870 when an ADDRESSOF was made for an arglist slot. */
1872 && (set = single_set (insn)) != 0
1873 && SET_DEST (set) == var
1874 /* If this represents the result of an insn group,
1875 don't delete the insn. */
1876 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1877 && (rtx_equal_p (SET_SRC (set), var)
1878 || (GET_CODE (SET_SRC (set)) == REG
1879 && (prev = prev_nonnote_insn (insn)) != 0
1880 && (prev_set = single_set (prev)) != 0
1881 && SET_DEST (prev_set) == SET_SRC (set)
1882 && rtx_equal_p (SET_SRC (prev_set), var))))
1884 /* In unoptimized compilation, we shouldn't call delete_insn
1885 except in jump.c doing warnings. */
1886 PUT_CODE (insn, NOTE);
1887 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1888 NOTE_SOURCE_FILE (insn) = 0;
1889 if (insn == last_parm_insn)
1890 last_parm_insn = PREV_INSN (next);
1894 struct fixup_replacement *replacements = 0;
1895 rtx next_insn = NEXT_INSN (insn);
1897 if (SMALL_REGISTER_CLASSES)
1899 /* If the insn that copies the results of a CALL_INSN
1900 into a pseudo now references VAR, we have to use an
1901 intermediate pseudo since we want the life of the
1902 return value register to be only a single insn.
1904 If we don't use an intermediate pseudo, such things as
1905 address computations to make the address of VAR valid
1906 if it is not can be placed between the CALL_INSN and INSN.
1908 To make sure this doesn't happen, we record the destination
1909 of the CALL_INSN and see if the next insn uses both that
1912 if (call_dest != 0 && GET_CODE (insn) == INSN
1913 && reg_mentioned_p (var, PATTERN (insn))
1914 && reg_mentioned_p (call_dest, PATTERN (insn)))
1916 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1918 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1920 PATTERN (insn) = replace_rtx (PATTERN (insn),
1924 if (GET_CODE (insn) == CALL_INSN
1925 && GET_CODE (PATTERN (insn)) == SET)
1926 call_dest = SET_DEST (PATTERN (insn));
1927 else if (GET_CODE (insn) == CALL_INSN
1928 && GET_CODE (PATTERN (insn)) == PARALLEL
1929 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1930 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1935 /* See if we have to do anything to INSN now that VAR is in
1936 memory. If it needs to be loaded into a pseudo, use a single
1937 pseudo for the entire insn in case there is a MATCH_DUP
1938 between two operands. We pass a pointer to the head of
1939 a list of struct fixup_replacements. If fixup_var_refs_1
1940 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1941 it will record them in this list.
1943 If it allocated a pseudo for any replacement, we copy into
1946 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1949 /* If this is last_parm_insn, and any instructions were output
1950 after it to fix it up, then we must set last_parm_insn to
1951 the last such instruction emitted. */
1952 if (insn == last_parm_insn)
1953 last_parm_insn = PREV_INSN (next_insn);
1955 while (replacements)
1957 if (GET_CODE (replacements->new) == REG)
1962 /* OLD might be a (subreg (mem)). */
1963 if (GET_CODE (replacements->old) == SUBREG)
1965 = fixup_memory_subreg (replacements->old, insn, 0);
1968 = fixup_stack_1 (replacements->old, insn);
1970 insert_before = insn;
1972 /* If we are changing the mode, do a conversion.
1973 This might be wasteful, but combine.c will
1974 eliminate much of the waste. */
1976 if (GET_MODE (replacements->new)
1977 != GET_MODE (replacements->old))
1980 convert_move (replacements->new,
1981 replacements->old, unsignedp);
1982 seq = gen_sequence ();
1986 seq = gen_move_insn (replacements->new,
1989 emit_insn_before (seq, insert_before);
1992 replacements = replacements->next;
1996 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1997 But don't touch other insns referred to by reg-notes;
1998 we will get them elsewhere. */
1999 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2000 if (GET_CODE (note) != INSN_LIST)
2002 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2009 insn = XEXP (insn_list, 0);
2010 insn_list = XEXP (insn_list, 1);
2017 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2018 See if the rtx expression at *LOC in INSN needs to be changed.
2020 REPLACEMENTS is a pointer to a list head that starts out zero, but may
2021 contain a list of original rtx's and replacements. If we find that we need
2022 to modify this insn by replacing a memory reference with a pseudo or by
2023 making a new MEM to implement a SUBREG, we consult that list to see if
2024 we have already chosen a replacement. If none has already been allocated,
2025 we allocate it and update the list. fixup_var_refs_insns will copy VAR
2026 or the SUBREG, as appropriate, to the pseudo. */
2029 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2031 enum machine_mode promoted_mode;
2034 struct fixup_replacement **replacements;
2037 register rtx x = *loc;
2038 RTX_CODE code = GET_CODE (x);
2040 register rtx tem, tem1;
2041 struct fixup_replacement *replacement;
2046 if (XEXP (x, 0) == var)
2048 /* Prevent sharing of rtl that might lose. */
2049 rtx sub = copy_rtx (XEXP (var, 0));
2051 if (! validate_change (insn, loc, sub, 0))
2053 rtx y = gen_reg_rtx (GET_MODE (sub));
2056 /* We should be able to replace with a register or all is lost.
2057 Note that we can't use validate_change to verify this, since
2058 we're not caring for replacing all dups simultaneously. */
2059 if (! validate_replace_rtx (*loc, y, insn))
2062 /* Careful! First try to recognize a direct move of the
2063 value, mimicking how things are done in gen_reload wrt
2064 PLUS. Consider what happens when insn is a conditional
2065 move instruction and addsi3 clobbers flags. */
2068 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2069 seq = gen_sequence ();
2072 if (recog_memoized (new_insn) < 0)
2074 /* That failed. Fall back on force_operand and hope. */
2077 force_operand (sub, y);
2078 seq = gen_sequence ();
2083 /* Don't separate setter from user. */
2084 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2085 insn = PREV_INSN (insn);
2088 emit_insn_before (seq, insn);
2096 /* If we already have a replacement, use it. Otherwise,
2097 try to fix up this address in case it is invalid. */
2099 replacement = find_fixup_replacement (replacements, var);
2100 if (replacement->new)
2102 *loc = replacement->new;
2106 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2108 /* Unless we are forcing memory to register or we changed the mode,
2109 we can leave things the way they are if the insn is valid. */
2111 INSN_CODE (insn) = -1;
2112 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2113 && recog_memoized (insn) >= 0)
2116 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2120 /* If X contains VAR, we need to unshare it here so that we update
2121 each occurrence separately. But all identical MEMs in one insn
2122 must be replaced with the same rtx because of the possibility of
2125 if (reg_mentioned_p (var, x))
2127 replacement = find_fixup_replacement (replacements, x);
2128 if (replacement->new == 0)
2129 replacement->new = copy_most_rtx (x, var);
2131 *loc = x = replacement->new;
2147 /* Note that in some cases those types of expressions are altered
2148 by optimize_bit_field, and do not survive to get here. */
2149 if (XEXP (x, 0) == var
2150 || (GET_CODE (XEXP (x, 0)) == SUBREG
2151 && SUBREG_REG (XEXP (x, 0)) == var))
2153 /* Get TEM as a valid MEM in the mode presently in the insn.
2155 We don't worry about the possibility of MATCH_DUP here; it
2156 is highly unlikely and would be tricky to handle. */
2159 if (GET_CODE (tem) == SUBREG)
2161 if (GET_MODE_BITSIZE (GET_MODE (tem))
2162 > GET_MODE_BITSIZE (GET_MODE (var)))
2164 replacement = find_fixup_replacement (replacements, var);
2165 if (replacement->new == 0)
2166 replacement->new = gen_reg_rtx (GET_MODE (var));
2167 SUBREG_REG (tem) = replacement->new;
2170 tem = fixup_memory_subreg (tem, insn, 0);
2173 tem = fixup_stack_1 (tem, insn);
2175 /* Unless we want to load from memory, get TEM into the proper mode
2176 for an extract from memory. This can only be done if the
2177 extract is at a constant position and length. */
2179 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2180 && GET_CODE (XEXP (x, 2)) == CONST_INT
2181 && ! mode_dependent_address_p (XEXP (tem, 0))
2182 && ! MEM_VOLATILE_P (tem))
2184 enum machine_mode wanted_mode = VOIDmode;
2185 enum machine_mode is_mode = GET_MODE (tem);
2186 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2189 if (GET_CODE (x) == ZERO_EXTRACT)
2191 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2192 if (wanted_mode == VOIDmode)
2193 wanted_mode = word_mode;
2197 if (GET_CODE (x) == SIGN_EXTRACT)
2199 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2200 if (wanted_mode == VOIDmode)
2201 wanted_mode = word_mode;
2204 /* If we have a narrower mode, we can do something. */
2205 if (wanted_mode != VOIDmode
2206 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2208 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2209 rtx old_pos = XEXP (x, 2);
2212 /* If the bytes and bits are counted differently, we
2213 must adjust the offset. */
2214 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2215 offset = (GET_MODE_SIZE (is_mode)
2216 - GET_MODE_SIZE (wanted_mode) - offset);
2218 pos %= GET_MODE_BITSIZE (wanted_mode);
2220 newmem = gen_rtx_MEM (wanted_mode,
2221 plus_constant (XEXP (tem, 0), offset));
2222 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2223 MEM_COPY_ATTRIBUTES (newmem, tem);
2225 /* Make the change and see if the insn remains valid. */
2226 INSN_CODE (insn) = -1;
2227 XEXP (x, 0) = newmem;
2228 XEXP (x, 2) = GEN_INT (pos);
2230 if (recog_memoized (insn) >= 0)
2233 /* Otherwise, restore old position. XEXP (x, 0) will be
2235 XEXP (x, 2) = old_pos;
2239 /* If we get here, the bitfield extract insn can't accept a memory
2240 reference. Copy the input into a register. */
2242 tem1 = gen_reg_rtx (GET_MODE (tem));
2243 emit_insn_before (gen_move_insn (tem1, tem), insn);
2250 if (SUBREG_REG (x) == var)
2252 /* If this is a special SUBREG made because VAR was promoted
2253 from a wider mode, replace it with VAR and call ourself
2254 recursively, this time saying that the object previously
2255 had its current mode (by virtue of the SUBREG). */
2257 if (SUBREG_PROMOTED_VAR_P (x))
2260 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2264 /* If this SUBREG makes VAR wider, it has become a paradoxical
2265 SUBREG with VAR in memory, but these aren't allowed at this
2266 stage of the compilation. So load VAR into a pseudo and take
2267 a SUBREG of that pseudo. */
2268 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2270 replacement = find_fixup_replacement (replacements, var);
2271 if (replacement->new == 0)
2272 replacement->new = gen_reg_rtx (GET_MODE (var));
2273 SUBREG_REG (x) = replacement->new;
2277 /* See if we have already found a replacement for this SUBREG.
2278 If so, use it. Otherwise, make a MEM and see if the insn
2279 is recognized. If not, or if we should force MEM into a register,
2280 make a pseudo for this SUBREG. */
2281 replacement = find_fixup_replacement (replacements, x);
2282 if (replacement->new)
2284 *loc = replacement->new;
2288 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2290 INSN_CODE (insn) = -1;
2291 if (! flag_force_mem && recog_memoized (insn) >= 0)
2294 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2300 /* First do special simplification of bit-field references. */
2301 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2302 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2303 optimize_bit_field (x, insn, 0);
2304 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2305 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2306 optimize_bit_field (x, insn, NULL_PTR);
2308 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2309 into a register and then store it back out. */
2310 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2311 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2312 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2313 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2314 > GET_MODE_SIZE (GET_MODE (var))))
2316 replacement = find_fixup_replacement (replacements, var);
2317 if (replacement->new == 0)
2318 replacement->new = gen_reg_rtx (GET_MODE (var));
2320 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2321 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2324 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2325 insn into a pseudo and store the low part of the pseudo into VAR. */
2326 if (GET_CODE (SET_DEST (x)) == SUBREG
2327 && SUBREG_REG (SET_DEST (x)) == var
2328 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2329 > GET_MODE_SIZE (GET_MODE (var))))
2331 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2332 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2339 rtx dest = SET_DEST (x);
2340 rtx src = SET_SRC (x);
2342 rtx outerdest = dest;
2345 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2346 || GET_CODE (dest) == SIGN_EXTRACT
2347 || GET_CODE (dest) == ZERO_EXTRACT)
2348 dest = XEXP (dest, 0);
2350 if (GET_CODE (src) == SUBREG)
2351 src = XEXP (src, 0);
2353 /* If VAR does not appear at the top level of the SET
2354 just scan the lower levels of the tree. */
2356 if (src != var && dest != var)
2359 /* We will need to rerecognize this insn. */
2360 INSN_CODE (insn) = -1;
2363 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2365 /* Since this case will return, ensure we fixup all the
2367 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2368 insn, replacements);
2369 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2370 insn, replacements);
2371 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2372 insn, replacements);
2374 tem = XEXP (outerdest, 0);
2376 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2377 that may appear inside a ZERO_EXTRACT.
2378 This was legitimate when the MEM was a REG. */
2379 if (GET_CODE (tem) == SUBREG
2380 && SUBREG_REG (tem) == var)
2381 tem = fixup_memory_subreg (tem, insn, 0);
2383 tem = fixup_stack_1 (tem, insn);
2385 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2386 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2387 && ! mode_dependent_address_p (XEXP (tem, 0))
2388 && ! MEM_VOLATILE_P (tem))
2390 enum machine_mode wanted_mode;
2391 enum machine_mode is_mode = GET_MODE (tem);
2392 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2394 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2395 if (wanted_mode == VOIDmode)
2396 wanted_mode = word_mode;
2398 /* If we have a narrower mode, we can do something. */
2399 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2401 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2402 rtx old_pos = XEXP (outerdest, 2);
2405 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2406 offset = (GET_MODE_SIZE (is_mode)
2407 - GET_MODE_SIZE (wanted_mode) - offset);
2409 pos %= GET_MODE_BITSIZE (wanted_mode);
2411 newmem = gen_rtx_MEM (wanted_mode,
2412 plus_constant (XEXP (tem, 0), offset));
2413 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2414 MEM_COPY_ATTRIBUTES (newmem, tem);
2416 /* Make the change and see if the insn remains valid. */
2417 INSN_CODE (insn) = -1;
2418 XEXP (outerdest, 0) = newmem;
2419 XEXP (outerdest, 2) = GEN_INT (pos);
2421 if (recog_memoized (insn) >= 0)
2424 /* Otherwise, restore old position. XEXP (x, 0) will be
2426 XEXP (outerdest, 2) = old_pos;
2430 /* If we get here, the bit-field store doesn't allow memory
2431 or isn't located at a constant position. Load the value into
2432 a register, do the store, and put it back into memory. */
2434 tem1 = gen_reg_rtx (GET_MODE (tem));
2435 emit_insn_before (gen_move_insn (tem1, tem), insn);
2436 emit_insn_after (gen_move_insn (tem, tem1), insn);
2437 XEXP (outerdest, 0) = tem1;
2442 /* STRICT_LOW_PART is a no-op on memory references
2443 and it can cause combinations to be unrecognizable,
2446 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2447 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2449 /* A valid insn to copy VAR into or out of a register
2450 must be left alone, to avoid an infinite loop here.
2451 If the reference to VAR is by a subreg, fix that up,
2452 since SUBREG is not valid for a memref.
2453 Also fix up the address of the stack slot.
2455 Note that we must not try to recognize the insn until
2456 after we know that we have valid addresses and no
2457 (subreg (mem ...) ...) constructs, since these interfere
2458 with determining the validity of the insn. */
2460 if ((SET_SRC (x) == var
2461 || (GET_CODE (SET_SRC (x)) == SUBREG
2462 && SUBREG_REG (SET_SRC (x)) == var))
2463 && (GET_CODE (SET_DEST (x)) == REG
2464 || (GET_CODE (SET_DEST (x)) == SUBREG
2465 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2466 && GET_MODE (var) == promoted_mode
2467 && x == single_set (insn))
2471 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2472 if (replacement->new)
2473 SET_SRC (x) = replacement->new;
2474 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2475 SET_SRC (x) = replacement->new
2476 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2478 SET_SRC (x) = replacement->new
2479 = fixup_stack_1 (SET_SRC (x), insn);
2481 if (recog_memoized (insn) >= 0)
2484 /* INSN is not valid, but we know that we want to
2485 copy SET_SRC (x) to SET_DEST (x) in some way. So
2486 we generate the move and see whether it requires more
2487 than one insn. If it does, we emit those insns and
2488 delete INSN. Otherwise, we an just replace the pattern
2489 of INSN; we have already verified above that INSN has
2490 no other function that to do X. */
2492 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2493 if (GET_CODE (pat) == SEQUENCE)
2495 emit_insn_after (pat, insn);
2496 PUT_CODE (insn, NOTE);
2497 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2498 NOTE_SOURCE_FILE (insn) = 0;
2501 PATTERN (insn) = pat;
2506 if ((SET_DEST (x) == var
2507 || (GET_CODE (SET_DEST (x)) == SUBREG
2508 && SUBREG_REG (SET_DEST (x)) == var))
2509 && (GET_CODE (SET_SRC (x)) == REG
2510 || (GET_CODE (SET_SRC (x)) == SUBREG
2511 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2512 && GET_MODE (var) == promoted_mode
2513 && x == single_set (insn))
2517 if (GET_CODE (SET_DEST (x)) == SUBREG)
2518 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2520 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2522 if (recog_memoized (insn) >= 0)
2525 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2526 if (GET_CODE (pat) == SEQUENCE)
2528 emit_insn_after (pat, insn);
2529 PUT_CODE (insn, NOTE);
2530 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2531 NOTE_SOURCE_FILE (insn) = 0;
2534 PATTERN (insn) = pat;
2539 /* Otherwise, storing into VAR must be handled specially
2540 by storing into a temporary and copying that into VAR
2541 with a new insn after this one. Note that this case
2542 will be used when storing into a promoted scalar since
2543 the insn will now have different modes on the input
2544 and output and hence will be invalid (except for the case
2545 of setting it to a constant, which does not need any
2546 change if it is valid). We generate extra code in that case,
2547 but combine.c will eliminate it. */
2552 rtx fixeddest = SET_DEST (x);
2554 /* STRICT_LOW_PART can be discarded, around a MEM. */
2555 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2556 fixeddest = XEXP (fixeddest, 0);
2557 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2558 if (GET_CODE (fixeddest) == SUBREG)
2560 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2561 promoted_mode = GET_MODE (fixeddest);
2564 fixeddest = fixup_stack_1 (fixeddest, insn);
2566 temp = gen_reg_rtx (promoted_mode);
2568 emit_insn_after (gen_move_insn (fixeddest,
2569 gen_lowpart (GET_MODE (fixeddest),
2573 SET_DEST (x) = temp;
2581 /* Nothing special about this RTX; fix its operands. */
2583 fmt = GET_RTX_FORMAT (code);
2584 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2587 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2591 for (j = 0; j < XVECLEN (x, i); j++)
2592 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2593 insn, replacements);
2598 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2599 return an rtx (MEM:m1 newaddr) which is equivalent.
2600 If any insns must be emitted to compute NEWADDR, put them before INSN.
2602 UNCRITICAL nonzero means accept paradoxical subregs.
2603 This is used for subregs found inside REG_NOTES. */
2606 fixup_memory_subreg (x, insn, uncritical)
2611 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2612 rtx addr = XEXP (SUBREG_REG (x), 0);
2613 enum machine_mode mode = GET_MODE (x);
2616 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2617 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2621 if (BYTES_BIG_ENDIAN)
2622 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2623 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2624 addr = plus_constant (addr, offset);
2625 if (!flag_force_addr && memory_address_p (mode, addr))
2626 /* Shortcut if no insns need be emitted. */
2627 return change_address (SUBREG_REG (x), mode, addr);
2629 result = change_address (SUBREG_REG (x), mode, addr);
2630 emit_insn_before (gen_sequence (), insn);
2635 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2636 Replace subexpressions of X in place.
2637 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2638 Otherwise return X, with its contents possibly altered.
2640 If any insns must be emitted to compute NEWADDR, put them before INSN.
2642 UNCRITICAL is as in fixup_memory_subreg. */
2645 walk_fixup_memory_subreg (x, insn, uncritical)
2650 register enum rtx_code code;
2657 code = GET_CODE (x);
2659 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2660 return fixup_memory_subreg (x, insn, uncritical);
2662 /* Nothing special about this RTX; fix its operands. */
2664 fmt = GET_RTX_FORMAT (code);
2665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2668 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2672 for (j = 0; j < XVECLEN (x, i); j++)
2674 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2680 /* For each memory ref within X, if it refers to a stack slot
2681 with an out of range displacement, put the address in a temp register
2682 (emitting new insns before INSN to load these registers)
2683 and alter the memory ref to use that register.
2684 Replace each such MEM rtx with a copy, to avoid clobberage. */
2687 fixup_stack_1 (x, insn)
2692 register RTX_CODE code = GET_CODE (x);
2697 register rtx ad = XEXP (x, 0);
2698 /* If we have address of a stack slot but it's not valid
2699 (displacement is too large), compute the sum in a register. */
2700 if (GET_CODE (ad) == PLUS
2701 && GET_CODE (XEXP (ad, 0)) == REG
2702 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2703 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2704 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2705 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2706 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2708 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2709 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2710 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2711 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2714 if (memory_address_p (GET_MODE (x), ad))
2718 temp = copy_to_reg (ad);
2719 seq = gen_sequence ();
2721 emit_insn_before (seq, insn);
2722 return change_address (x, VOIDmode, temp);
2727 fmt = GET_RTX_FORMAT (code);
2728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2731 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2735 for (j = 0; j < XVECLEN (x, i); j++)
2736 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2742 /* Optimization: a bit-field instruction whose field
2743 happens to be a byte or halfword in memory
2744 can be changed to a move instruction.
2746 We call here when INSN is an insn to examine or store into a bit-field.
2747 BODY is the SET-rtx to be altered.
2749 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2750 (Currently this is called only from function.c, and EQUIV_MEM
2754 optimize_bit_field (body, insn, equiv_mem)
2759 register rtx bitfield;
2762 enum machine_mode mode;
2764 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2765 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2766 bitfield = SET_DEST (body), destflag = 1;
2768 bitfield = SET_SRC (body), destflag = 0;
2770 /* First check that the field being stored has constant size and position
2771 and is in fact a byte or halfword suitably aligned. */
2773 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2774 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2775 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2777 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2779 register rtx memref = 0;
2781 /* Now check that the containing word is memory, not a register,
2782 and that it is safe to change the machine mode. */
2784 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2785 memref = XEXP (bitfield, 0);
2786 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2788 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2789 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2790 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2791 memref = SUBREG_REG (XEXP (bitfield, 0));
2792 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2794 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2795 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2798 && ! mode_dependent_address_p (XEXP (memref, 0))
2799 && ! MEM_VOLATILE_P (memref))
2801 /* Now adjust the address, first for any subreg'ing
2802 that we are now getting rid of,
2803 and then for which byte of the word is wanted. */
2805 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2808 /* Adjust OFFSET to count bits from low-address byte. */
2809 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2810 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2811 - offset - INTVAL (XEXP (bitfield, 1)));
2813 /* Adjust OFFSET to count bytes from low-address byte. */
2814 offset /= BITS_PER_UNIT;
2815 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2817 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2818 if (BYTES_BIG_ENDIAN)
2819 offset -= (MIN (UNITS_PER_WORD,
2820 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2821 - MIN (UNITS_PER_WORD,
2822 GET_MODE_SIZE (GET_MODE (memref))));
2826 memref = change_address (memref, mode,
2827 plus_constant (XEXP (memref, 0), offset));
2828 insns = get_insns ();
2830 emit_insns_before (insns, insn);
2832 /* Store this memory reference where
2833 we found the bit field reference. */
2837 validate_change (insn, &SET_DEST (body), memref, 1);
2838 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2840 rtx src = SET_SRC (body);
2841 while (GET_CODE (src) == SUBREG
2842 && SUBREG_WORD (src) == 0)
2843 src = SUBREG_REG (src);
2844 if (GET_MODE (src) != GET_MODE (memref))
2845 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2846 validate_change (insn, &SET_SRC (body), src, 1);
2848 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2849 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2850 /* This shouldn't happen because anything that didn't have
2851 one of these modes should have got converted explicitly
2852 and then referenced through a subreg.
2853 This is so because the original bit-field was
2854 handled by agg_mode and so its tree structure had
2855 the same mode that memref now has. */
2860 rtx dest = SET_DEST (body);
2862 while (GET_CODE (dest) == SUBREG
2863 && SUBREG_WORD (dest) == 0
2864 && (GET_MODE_CLASS (GET_MODE (dest))
2865 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2866 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2868 dest = SUBREG_REG (dest);
2870 validate_change (insn, &SET_DEST (body), dest, 1);
2872 if (GET_MODE (dest) == GET_MODE (memref))
2873 validate_change (insn, &SET_SRC (body), memref, 1);
2876 /* Convert the mem ref to the destination mode. */
2877 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2880 convert_move (newreg, memref,
2881 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2885 validate_change (insn, &SET_SRC (body), newreg, 1);
2889 /* See if we can convert this extraction or insertion into
2890 a simple move insn. We might not be able to do so if this
2891 was, for example, part of a PARALLEL.
2893 If we succeed, write out any needed conversions. If we fail,
2894 it is hard to guess why we failed, so don't do anything
2895 special; just let the optimization be suppressed. */
2897 if (apply_change_group () && seq)
2898 emit_insns_before (seq, insn);
2903 /* These routines are responsible for converting virtual register references
2904 to the actual hard register references once RTL generation is complete.
2906 The following four variables are used for communication between the
2907 routines. They contain the offsets of the virtual registers from their
2908 respective hard registers. */
2910 static int in_arg_offset;
2911 static int var_offset;
2912 static int dynamic_offset;
2913 static int out_arg_offset;
2914 static int cfa_offset;
2916 /* In most machines, the stack pointer register is equivalent to the bottom
2919 #ifndef STACK_POINTER_OFFSET
2920 #define STACK_POINTER_OFFSET 0
2923 /* If not defined, pick an appropriate default for the offset of dynamically
2924 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2925 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2927 #ifndef STACK_DYNAMIC_OFFSET
2929 #ifdef ACCUMULATE_OUTGOING_ARGS
2930 /* The bottom of the stack points to the actual arguments. If
2931 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2932 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2933 stack space for register parameters is not pushed by the caller, but
2934 rather part of the fixed stack areas and hence not included in
2935 `current_function_outgoing_args_size'. Nevertheless, we must allow
2936 for it when allocating stack dynamic objects. */
2938 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2939 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2940 (current_function_outgoing_args_size \
2941 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2944 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2945 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2949 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2953 /* On a few machines, the CFA coincides with the arg pointer. */
2955 #ifndef ARG_POINTER_CFA_OFFSET
2956 #define ARG_POINTER_CFA_OFFSET 0
2960 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2961 its address taken. DECL is the decl for the object stored in the
2962 register, for later use if we do need to force REG into the stack.
2963 REG is overwritten by the MEM like in put_reg_into_stack. */
2966 gen_mem_addressof (reg, decl)
2970 tree type = TREE_TYPE (decl);
2971 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2972 SET_ADDRESSOF_DECL (r, decl);
2973 /* If the original REG was a user-variable, then so is the REG whose
2974 address is being taken. */
2975 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2978 PUT_CODE (reg, MEM);
2979 PUT_MODE (reg, DECL_MODE (decl));
2980 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2981 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2982 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2984 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2985 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2990 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2993 flush_addressof (decl)
2996 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2997 && DECL_RTL (decl) != 0
2998 && GET_CODE (DECL_RTL (decl)) == MEM
2999 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3000 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3001 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3004 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3007 put_addressof_into_stack (r, ht)
3009 struct hash_table *ht;
3011 tree decl = ADDRESSOF_DECL (r);
3012 rtx reg = XEXP (r, 0);
3014 if (GET_CODE (reg) != REG)
3017 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3018 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3019 ADDRESSOF_REGNO (r),
3020 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3023 /* List of replacements made below in purge_addressof_1 when creating
3024 bitfield insertions. */
3025 static rtx purge_addressof_replacements;
3027 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3028 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3032 purge_addressof_1 (loc, insn, force, store, ht)
3036 struct hash_table *ht;
3043 /* Re-start here to avoid recursion in common cases. */
3050 code = GET_CODE (x);
3052 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3055 /* We must create a copy of the rtx because it was created by
3056 overwriting a REG rtx which is always shared. */
3057 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3059 if (validate_change (insn, loc, sub, 0)
3060 || validate_replace_rtx (x, sub, insn))
3064 sub = force_operand (sub, NULL_RTX);
3065 if (! validate_change (insn, loc, sub, 0)
3066 && ! validate_replace_rtx (x, sub, insn))
3069 insns = gen_sequence ();
3071 emit_insn_before (insns, insn);
3074 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3076 rtx sub = XEXP (XEXP (x, 0), 0);
3079 if (GET_CODE (sub) == MEM)
3081 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3082 MEM_COPY_ATTRIBUTES (sub2, sub);
3083 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3087 if (GET_CODE (sub) == REG
3088 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3090 put_addressof_into_stack (XEXP (x, 0), ht);
3093 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3095 int size_x, size_sub;
3099 /* When processing REG_NOTES look at the list of
3100 replacements done on the insn to find the register that X
3104 for (tem = purge_addressof_replacements; tem != NULL_RTX;
3105 tem = XEXP (XEXP (tem, 1), 1))
3107 rtx y = XEXP (tem, 0);
3108 if (GET_CODE (y) == MEM
3109 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
3111 /* It can happen that the note may speak of things in
3112 a wider (or just different) mode than the code did.
3113 This is especially true of REG_RETVAL. */
3115 rtx z = XEXP (XEXP (tem, 1), 0);
3116 if (GET_MODE (x) != GET_MODE (y))
3118 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3121 /* ??? If we'd gotten into any of the really complex
3122 cases below, I'm not sure we can do a proper
3123 replacement. Might we be able to delete the
3124 note in some cases? */
3125 if (GET_MODE_SIZE (GET_MODE (x))
3126 < GET_MODE_SIZE (GET_MODE (y)))
3129 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3130 && (GET_MODE_SIZE (GET_MODE (x))
3131 > GET_MODE_SIZE (GET_MODE (z))))
3133 /* This can occur as a result in invalid
3134 pointer casts, e.g. float f; ...
3135 *(long long int *)&f.
3136 ??? We could emit a warning here, but
3137 without a line number that wouldn't be
3139 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3142 z = gen_lowpart (GET_MODE (x), z);
3150 /* There should always be such a replacement. */
3154 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3155 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3157 /* Don't even consider working with paradoxical subregs,
3158 or the moral equivalent seen here. */
3159 if (size_x <= size_sub
3160 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3162 /* Do a bitfield insertion to mirror what would happen
3169 rtx p = PREV_INSN (insn);
3172 val = gen_reg_rtx (GET_MODE (x));
3173 if (! validate_change (insn, loc, val, 0))
3175 /* Discard the current sequence and put the
3176 ADDRESSOF on stack. */
3180 seq = gen_sequence ();
3182 emit_insn_before (seq, insn);
3183 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3187 store_bit_field (sub, size_x, 0, GET_MODE (x),
3188 val, GET_MODE_SIZE (GET_MODE (sub)),
3189 GET_MODE_SIZE (GET_MODE (sub)));
3191 /* Make sure to unshare any shared rtl that store_bit_field
3192 might have created. */
3193 for (p = get_insns(); p; p = NEXT_INSN (p))
3195 reset_used_flags (PATTERN (p));
3196 reset_used_flags (REG_NOTES (p));
3197 reset_used_flags (LOG_LINKS (p));
3199 unshare_all_rtl (get_insns ());
3201 seq = gen_sequence ();
3203 p = emit_insn_after (seq, insn);
3204 if (NEXT_INSN (insn))
3205 compute_insns_for_mem (NEXT_INSN (insn),
3206 p ? NEXT_INSN (p) : NULL_RTX,
3211 rtx p = PREV_INSN (insn);
3214 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3215 GET_MODE (x), GET_MODE (x),
3216 GET_MODE_SIZE (GET_MODE (sub)),
3217 GET_MODE_SIZE (GET_MODE (sub)));
3219 if (! validate_change (insn, loc, val, 0))
3221 /* Discard the current sequence and put the
3222 ADDRESSOF on stack. */
3227 seq = gen_sequence ();
3229 emit_insn_before (seq, insn);
3230 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3234 /* Remember the replacement so that the same one can be done
3235 on the REG_NOTES. */
3236 purge_addressof_replacements
3237 = gen_rtx_EXPR_LIST (VOIDmode, x,
3238 gen_rtx_EXPR_LIST (VOIDmode, val,
3239 purge_addressof_replacements));
3241 /* We replaced with a reg -- all done. */
3245 else if (validate_change (insn, loc, sub, 0))
3247 /* Remember the replacement so that the same one can be done
3248 on the REG_NOTES. */
3249 purge_addressof_replacements
3250 = gen_rtx_EXPR_LIST (VOIDmode, x,
3251 gen_rtx_EXPR_LIST (VOIDmode, sub,
3252 purge_addressof_replacements));
3256 /* else give up and put it into the stack */
3258 else if (code == ADDRESSOF)
3260 put_addressof_into_stack (x, ht);
3263 else if (code == SET)
3265 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3266 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3270 /* Scan all subexpressions. */
3271 fmt = GET_RTX_FORMAT (code);
3272 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3275 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3276 else if (*fmt == 'E')
3277 for (j = 0; j < XVECLEN (x, i); j++)
3278 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3282 /* Return a new hash table entry in HT. */
3284 static struct hash_entry *
3285 insns_for_mem_newfunc (he, ht, k)
3286 struct hash_entry *he;
3287 struct hash_table *ht;
3288 hash_table_key k ATTRIBUTE_UNUSED;
3290 struct insns_for_mem_entry *ifmhe;
3294 ifmhe = ((struct insns_for_mem_entry *)
3295 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3296 ifmhe->insns = NULL_RTX;
3301 /* Return a hash value for K, a REG. */
3303 static unsigned long
3304 insns_for_mem_hash (k)
3307 /* K is really a RTX. Just use the address as the hash value. */
3308 return (unsigned long) k;
3311 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3314 insns_for_mem_comp (k1, k2)
3321 struct insns_for_mem_walk_info {
3322 /* The hash table that we are using to record which INSNs use which
3324 struct hash_table *ht;
3326 /* The INSN we are currently proessing. */
3329 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3330 to find the insns that use the REGs in the ADDRESSOFs. */
3334 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3335 that might be used in an ADDRESSOF expression, record this INSN in
3336 the hash table given by DATA (which is really a pointer to an
3337 insns_for_mem_walk_info structure). */
3340 insns_for_mem_walk (r, data)
3344 struct insns_for_mem_walk_info *ifmwi
3345 = (struct insns_for_mem_walk_info *) data;
3347 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3348 && GET_CODE (XEXP (*r, 0)) == REG)
3349 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3350 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3352 /* Lookup this MEM in the hashtable, creating it if necessary. */
3353 struct insns_for_mem_entry *ifme
3354 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3359 /* If we have not already recorded this INSN, do so now. Since
3360 we process the INSNs in order, we know that if we have
3361 recorded it it must be at the front of the list. */
3362 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3364 /* We do the allocation on the same obstack as is used for
3365 the hash table since this memory will not be used once
3366 the hash table is deallocated. */
3367 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3368 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3377 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3378 which REGs in HT. */
3381 compute_insns_for_mem (insns, last_insn, ht)
3384 struct hash_table *ht;
3387 struct insns_for_mem_walk_info ifmwi;
3390 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3391 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3392 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3395 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3399 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3400 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3404 purge_addressof (insns)
3408 struct hash_table ht;
3410 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3411 requires a fixup pass over the instruction stream to correct
3412 INSNs that depended on the REG being a REG, and not a MEM. But,
3413 these fixup passes are slow. Furthermore, more MEMs are not
3414 mentioned in very many instructions. So, we speed up the process
3415 by pre-calculating which REGs occur in which INSNs; that allows
3416 us to perform the fixup passes much more quickly. */
3417 hash_table_init (&ht,
3418 insns_for_mem_newfunc,
3420 insns_for_mem_comp);
3421 compute_insns_for_mem (insns, NULL_RTX, &ht);
3423 for (insn = insns; insn; insn = NEXT_INSN (insn))
3424 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3425 || GET_CODE (insn) == CALL_INSN)
3427 purge_addressof_1 (&PATTERN (insn), insn,
3428 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3429 purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht);
3433 hash_table_free (&ht);
3434 purge_addressof_replacements = 0;
3437 /* Pass through the INSNS of function FNDECL and convert virtual register
3438 references to hard register references. */
3441 instantiate_virtual_regs (fndecl, insns)
3448 /* Compute the offsets to use for this function. */
3449 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3450 var_offset = STARTING_FRAME_OFFSET;
3451 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3452 out_arg_offset = STACK_POINTER_OFFSET;
3453 cfa_offset = ARG_POINTER_CFA_OFFSET;
3455 /* Scan all variables and parameters of this function. For each that is
3456 in memory, instantiate all virtual registers if the result is a valid
3457 address. If not, we do it later. That will handle most uses of virtual
3458 regs on many machines. */
3459 instantiate_decls (fndecl, 1);
3461 /* Initialize recognition, indicating that volatile is OK. */
3464 /* Scan through all the insns, instantiating every virtual register still
3466 for (insn = insns; insn; insn = NEXT_INSN (insn))
3467 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3468 || GET_CODE (insn) == CALL_INSN)
3470 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3471 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
3474 /* Instantiate the stack slots for the parm registers, for later use in
3475 addressof elimination. */
3476 for (i = 0; i < max_parm_reg; ++i)
3477 if (parm_reg_stack_loc[i])
3478 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3480 /* Now instantiate the remaining register equivalences for debugging info.
3481 These will not be valid addresses. */
3482 instantiate_decls (fndecl, 0);
3484 /* Indicate that, from now on, assign_stack_local should use
3485 frame_pointer_rtx. */
3486 virtuals_instantiated = 1;
3489 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3490 all virtual registers in their DECL_RTL's.
3492 If VALID_ONLY, do this only if the resulting address is still valid.
3493 Otherwise, always do it. */
3496 instantiate_decls (fndecl, valid_only)
3502 if (DECL_SAVED_INSNS (fndecl))
3503 /* When compiling an inline function, the obstack used for
3504 rtl allocation is the maybepermanent_obstack. Calling
3505 `resume_temporary_allocation' switches us back to that
3506 obstack while we process this function's parameters. */
3507 resume_temporary_allocation ();
3509 /* Process all parameters of the function. */
3510 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3512 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3514 instantiate_decl (DECL_RTL (decl), size, valid_only);
3516 /* If the parameter was promoted, then the incoming RTL mode may be
3517 larger than the declared type size. We must use the larger of
3519 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3520 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3523 /* Now process all variables defined in the function or its subblocks. */
3524 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3526 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3528 /* Save all rtl allocated for this function by raising the
3529 high-water mark on the maybepermanent_obstack. */
3531 /* All further rtl allocation is now done in the current_obstack. */
3532 rtl_in_current_obstack ();
3536 /* Subroutine of instantiate_decls: Process all decls in the given
3537 BLOCK node and all its subblocks. */
3540 instantiate_decls_1 (let, valid_only)
3546 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3547 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3550 /* Process all subblocks. */
3551 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3552 instantiate_decls_1 (t, valid_only);
3555 /* Subroutine of the preceding procedures: Given RTL representing a
3556 decl and the size of the object, do any instantiation required.
3558 If VALID_ONLY is non-zero, it means that the RTL should only be
3559 changed if the new address is valid. */
3562 instantiate_decl (x, size, valid_only)
3567 enum machine_mode mode;
3570 /* If this is not a MEM, no need to do anything. Similarly if the
3571 address is a constant or a register that is not a virtual register. */
3573 if (x == 0 || GET_CODE (x) != MEM)
3577 if (CONSTANT_P (addr)
3578 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3579 || (GET_CODE (addr) == REG
3580 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3581 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3584 /* If we should only do this if the address is valid, copy the address.
3585 We need to do this so we can undo any changes that might make the
3586 address invalid. This copy is unfortunate, but probably can't be
3590 addr = copy_rtx (addr);
3592 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3596 /* Now verify that the resulting address is valid for every integer or
3597 floating-point mode up to and including SIZE bytes long. We do this
3598 since the object might be accessed in any mode and frame addresses
3601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3602 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3603 mode = GET_MODE_WIDER_MODE (mode))
3604 if (! memory_address_p (mode, addr))
3607 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3608 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3609 mode = GET_MODE_WIDER_MODE (mode))
3610 if (! memory_address_p (mode, addr))
3614 /* Put back the address now that we have updated it and we either know
3615 it is valid or we don't care whether it is valid. */
3620 /* Given a pointer to a piece of rtx and an optional pointer to the
3621 containing object, instantiate any virtual registers present in it.
3623 If EXTRA_INSNS, we always do the replacement and generate
3624 any extra insns before OBJECT. If it zero, we do nothing if replacement
3627 Return 1 if we either had nothing to do or if we were able to do the
3628 needed replacement. Return 0 otherwise; we only return zero if
3629 EXTRA_INSNS is zero.
3631 We first try some simple transformations to avoid the creation of extra
3635 instantiate_virtual_regs_1 (loc, object, extra_insns)
3643 HOST_WIDE_INT offset;
3649 /* Re-start here to avoid recursion in common cases. */
3656 code = GET_CODE (x);
3658 /* Check for some special cases. */
3675 /* We are allowed to set the virtual registers. This means that
3676 the actual register should receive the source minus the
3677 appropriate offset. This is used, for example, in the handling
3678 of non-local gotos. */
3679 if (SET_DEST (x) == virtual_incoming_args_rtx)
3680 new = arg_pointer_rtx, offset = - in_arg_offset;
3681 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3682 new = frame_pointer_rtx, offset = - var_offset;
3683 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3684 new = stack_pointer_rtx, offset = - dynamic_offset;
3685 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3686 new = stack_pointer_rtx, offset = - out_arg_offset;
3687 else if (SET_DEST (x) == virtual_cfa_rtx)
3688 new = arg_pointer_rtx, offset = - cfa_offset;
3692 /* The only valid sources here are PLUS or REG. Just do
3693 the simplest possible thing to handle them. */
3694 if (GET_CODE (SET_SRC (x)) != REG
3695 && GET_CODE (SET_SRC (x)) != PLUS)
3699 if (GET_CODE (SET_SRC (x)) != REG)
3700 temp = force_operand (SET_SRC (x), NULL_RTX);
3703 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3707 emit_insns_before (seq, object);
3710 if (! validate_change (object, &SET_SRC (x), temp, 0)
3717 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3722 /* Handle special case of virtual register plus constant. */
3723 if (CONSTANT_P (XEXP (x, 1)))
3725 rtx old, new_offset;
3727 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3728 if (GET_CODE (XEXP (x, 0)) == PLUS)
3730 rtx inner = XEXP (XEXP (x, 0), 0);
3732 if (inner == virtual_incoming_args_rtx)
3733 new = arg_pointer_rtx, offset = in_arg_offset;
3734 else if (inner == virtual_stack_vars_rtx)
3735 new = frame_pointer_rtx, offset = var_offset;
3736 else if (inner == virtual_stack_dynamic_rtx)
3737 new = stack_pointer_rtx, offset = dynamic_offset;
3738 else if (inner == virtual_outgoing_args_rtx)
3739 new = stack_pointer_rtx, offset = out_arg_offset;
3740 else if (inner == virtual_cfa_rtx)
3741 new = arg_pointer_rtx, offset = cfa_offset;
3748 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3750 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3753 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3754 new = arg_pointer_rtx, offset = in_arg_offset;
3755 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3756 new = frame_pointer_rtx, offset = var_offset;
3757 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3758 new = stack_pointer_rtx, offset = dynamic_offset;
3759 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3760 new = stack_pointer_rtx, offset = out_arg_offset;
3761 else if (XEXP (x, 0) == virtual_cfa_rtx)
3762 new = arg_pointer_rtx, offset = cfa_offset;
3765 /* We know the second operand is a constant. Unless the
3766 first operand is a REG (which has been already checked),
3767 it needs to be checked. */
3768 if (GET_CODE (XEXP (x, 0)) != REG)
3776 new_offset = plus_constant (XEXP (x, 1), offset);
3778 /* If the new constant is zero, try to replace the sum with just
3780 if (new_offset == const0_rtx
3781 && validate_change (object, loc, new, 0))
3784 /* Next try to replace the register and new offset.
3785 There are two changes to validate here and we can't assume that
3786 in the case of old offset equals new just changing the register
3787 will yield a valid insn. In the interests of a little efficiency,
3788 however, we only call validate change once (we don't queue up the
3789 changes and then call apply_change_group). */
3793 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3794 : (XEXP (x, 0) = new,
3795 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3803 /* Otherwise copy the new constant into a register and replace
3804 constant with that register. */
3805 temp = gen_reg_rtx (Pmode);
3807 if (validate_change (object, &XEXP (x, 1), temp, 0))
3808 emit_insn_before (gen_move_insn (temp, new_offset), object);
3811 /* If that didn't work, replace this expression with a
3812 register containing the sum. */
3815 new = gen_rtx_PLUS (Pmode, new, new_offset);
3818 temp = force_operand (new, NULL_RTX);
3822 emit_insns_before (seq, object);
3823 if (! validate_change (object, loc, temp, 0)
3824 && ! validate_replace_rtx (x, temp, object))
3832 /* Fall through to generic two-operand expression case. */
3838 case DIV: case UDIV:
3839 case MOD: case UMOD:
3840 case AND: case IOR: case XOR:
3841 case ROTATERT: case ROTATE:
3842 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3844 case GE: case GT: case GEU: case GTU:
3845 case LE: case LT: case LEU: case LTU:
3846 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3847 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3852 /* Most cases of MEM that convert to valid addresses have already been
3853 handled by our scan of decls. The only special handling we
3854 need here is to make a copy of the rtx to ensure it isn't being
3855 shared if we have to change it to a pseudo.
3857 If the rtx is a simple reference to an address via a virtual register,
3858 it can potentially be shared. In such cases, first try to make it
3859 a valid address, which can also be shared. Otherwise, copy it and
3862 First check for common cases that need no processing. These are
3863 usually due to instantiation already being done on a previous instance
3867 if (CONSTANT_ADDRESS_P (temp)
3868 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3869 || temp == arg_pointer_rtx
3871 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3872 || temp == hard_frame_pointer_rtx
3874 || temp == frame_pointer_rtx)
3877 if (GET_CODE (temp) == PLUS
3878 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3879 && (XEXP (temp, 0) == frame_pointer_rtx
3880 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3881 || XEXP (temp, 0) == hard_frame_pointer_rtx
3883 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3884 || XEXP (temp, 0) == arg_pointer_rtx
3889 if (temp == virtual_stack_vars_rtx
3890 || temp == virtual_incoming_args_rtx
3891 || (GET_CODE (temp) == PLUS
3892 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3893 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3894 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3896 /* This MEM may be shared. If the substitution can be done without
3897 the need to generate new pseudos, we want to do it in place
3898 so all copies of the shared rtx benefit. The call below will
3899 only make substitutions if the resulting address is still
3902 Note that we cannot pass X as the object in the recursive call
3903 since the insn being processed may not allow all valid
3904 addresses. However, if we were not passed on object, we can
3905 only modify X without copying it if X will have a valid
3908 ??? Also note that this can still lose if OBJECT is an insn that
3909 has less restrictions on an address that some other insn.
3910 In that case, we will modify the shared address. This case
3911 doesn't seem very likely, though. One case where this could
3912 happen is in the case of a USE or CLOBBER reference, but we
3913 take care of that below. */
3915 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3916 object ? object : x, 0))
3919 /* Otherwise make a copy and process that copy. We copy the entire
3920 RTL expression since it might be a PLUS which could also be
3922 *loc = x = copy_rtx (x);
3925 /* Fall through to generic unary operation case. */
3927 case STRICT_LOW_PART:
3929 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3930 case SIGN_EXTEND: case ZERO_EXTEND:
3931 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3932 case FLOAT: case FIX:
3933 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3937 /* These case either have just one operand or we know that we need not
3938 check the rest of the operands. */
3944 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3945 go ahead and make the invalid one, but do it to a copy. For a REG,
3946 just make the recursive call, since there's no chance of a problem. */
3948 if ((GET_CODE (XEXP (x, 0)) == MEM
3949 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3951 || (GET_CODE (XEXP (x, 0)) == REG
3952 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3955 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3960 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3961 in front of this insn and substitute the temporary. */
3962 if (x == virtual_incoming_args_rtx)
3963 new = arg_pointer_rtx, offset = in_arg_offset;
3964 else if (x == virtual_stack_vars_rtx)
3965 new = frame_pointer_rtx, offset = var_offset;
3966 else if (x == virtual_stack_dynamic_rtx)
3967 new = stack_pointer_rtx, offset = dynamic_offset;
3968 else if (x == virtual_outgoing_args_rtx)
3969 new = stack_pointer_rtx, offset = out_arg_offset;
3970 else if (x == virtual_cfa_rtx)
3971 new = arg_pointer_rtx, offset = cfa_offset;
3975 temp = plus_constant (new, offset);
3976 if (!validate_change (object, loc, temp, 0))
3982 temp = force_operand (temp, NULL_RTX);
3986 emit_insns_before (seq, object);
3987 if (! validate_change (object, loc, temp, 0)
3988 && ! validate_replace_rtx (x, temp, object))
3996 if (GET_CODE (XEXP (x, 0)) == REG)
3999 else if (GET_CODE (XEXP (x, 0)) == MEM)
4001 /* If we have a (addressof (mem ..)), do any instantiation inside
4002 since we know we'll be making the inside valid when we finally
4003 remove the ADDRESSOF. */
4004 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4013 /* Scan all subexpressions. */
4014 fmt = GET_RTX_FORMAT (code);
4015 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4018 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4021 else if (*fmt == 'E')
4022 for (j = 0; j < XVECLEN (x, i); j++)
4023 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4030 /* Optimization: assuming this function does not receive nonlocal gotos,
4031 delete the handlers for such, as well as the insns to establish
4032 and disestablish them. */
4038 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4040 /* Delete the handler by turning off the flag that would
4041 prevent jump_optimize from deleting it.
4042 Also permit deletion of the nonlocal labels themselves
4043 if nothing local refers to them. */
4044 if (GET_CODE (insn) == CODE_LABEL)
4048 LABEL_PRESERVE_P (insn) = 0;
4050 /* Remove it from the nonlocal_label list, to avoid confusing
4052 for (t = nonlocal_labels, last_t = 0; t;
4053 last_t = t, t = TREE_CHAIN (t))
4054 if (DECL_RTL (TREE_VALUE (t)) == insn)
4059 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4061 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4064 if (GET_CODE (insn) == INSN)
4068 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4069 if (reg_mentioned_p (t, PATTERN (insn)))
4075 || (nonlocal_goto_stack_level != 0
4076 && reg_mentioned_p (nonlocal_goto_stack_level,
4083 /* Output a USE for any register use in RTL.
4084 This is used with -noreg to mark the extent of lifespan
4085 of any registers used in a user-visible variable's DECL_RTL. */
4091 if (GET_CODE (rtl) == REG)
4092 /* This is a register variable. */
4093 emit_insn (gen_rtx_USE (VOIDmode, rtl));
4094 else if (GET_CODE (rtl) == MEM
4095 && GET_CODE (XEXP (rtl, 0)) == REG
4096 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4097 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4098 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4099 /* This is a variable-sized structure. */
4100 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4103 /* Like use_variable except that it outputs the USEs after INSN
4104 instead of at the end of the insn-chain. */
4107 use_variable_after (rtl, insn)
4110 if (GET_CODE (rtl) == REG)
4111 /* This is a register variable. */
4112 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4113 else if (GET_CODE (rtl) == MEM
4114 && GET_CODE (XEXP (rtl, 0)) == REG
4115 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4116 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4117 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4118 /* This is a variable-sized structure. */
4119 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4125 return max_parm_reg;
4128 /* Return the first insn following those generated by `assign_parms'. */
4131 get_first_nonparm_insn ()
4134 return NEXT_INSN (last_parm_insn);
4135 return get_insns ();
4138 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4139 Crash if there is none. */
4142 get_first_block_beg ()
4144 register rtx searcher;
4145 register rtx insn = get_first_nonparm_insn ();
4147 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4148 if (GET_CODE (searcher) == NOTE
4149 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4152 abort (); /* Invalid call to this function. (See comments above.) */
4156 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4157 This means a type for which function calls must pass an address to the
4158 function or get an address back from the function.
4159 EXP may be a type node or an expression (whose type is tested). */
4162 aggregate_value_p (exp)
4165 int i, regno, nregs;
4168 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4171 type = TREE_TYPE (exp);
4173 if (RETURN_IN_MEMORY (type))
4175 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4176 and thus can't be returned in registers. */
4177 if (TREE_ADDRESSABLE (type))
4179 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4181 /* Make sure we have suitable call-clobbered regs to return
4182 the value in; if not, we must return it in memory. */
4183 reg = hard_function_value (type, 0);
4185 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4187 if (GET_CODE (reg) != REG)
4190 regno = REGNO (reg);
4191 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4192 for (i = 0; i < nregs; i++)
4193 if (! call_used_regs[regno + i])
4198 /* Assign RTL expressions to the function's parameters.
4199 This may involve copying them into registers and using
4200 those registers as the RTL for them.
4202 If SECOND_TIME is non-zero it means that this function is being
4203 called a second time. This is done by integrate.c when a function's
4204 compilation is deferred. We need to come back here in case the
4205 FUNCTION_ARG macro computes items needed for the rest of the compilation
4206 (such as changing which registers are fixed or caller-saved). But suppress
4207 writing any insns or setting DECL_RTL of anything in this case. */
4210 assign_parms (fndecl, second_time)
4215 register rtx entry_parm = 0;
4216 register rtx stack_parm = 0;
4217 CUMULATIVE_ARGS args_so_far;
4218 enum machine_mode promoted_mode, passed_mode;
4219 enum machine_mode nominal_mode, promoted_nominal_mode;
4221 /* Total space needed so far for args on the stack,
4222 given as a constant and a tree-expression. */
4223 struct args_size stack_args_size;
4224 tree fntype = TREE_TYPE (fndecl);
4225 tree fnargs = DECL_ARGUMENTS (fndecl);
4226 /* This is used for the arg pointer when referring to stack args. */
4227 rtx internal_arg_pointer;
4228 /* This is a dummy PARM_DECL that we used for the function result if
4229 the function returns a structure. */
4230 tree function_result_decl = 0;
4231 #ifdef SETUP_INCOMING_VARARGS
4232 int varargs_setup = 0;
4234 rtx conversion_insns = 0;
4236 /* Nonzero if the last arg is named `__builtin_va_alist',
4237 which is used on some machines for old-fashioned non-ANSI varargs.h;
4238 this should be stuck onto the stack as if it had arrived there. */
4240 = (current_function_varargs
4242 && (parm = tree_last (fnargs)) != 0
4244 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4245 "__builtin_va_alist")));
4247 /* Nonzero if function takes extra anonymous args.
4248 This means the last named arg must be on the stack
4249 right before the anonymous ones. */
4251 = (TYPE_ARG_TYPES (fntype) != 0
4252 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4253 != void_type_node));
4255 current_function_stdarg = stdarg;
4257 /* If the reg that the virtual arg pointer will be translated into is
4258 not a fixed reg or is the stack pointer, make a copy of the virtual
4259 arg pointer, and address parms via the copy. The frame pointer is
4260 considered fixed even though it is not marked as such.
4262 The second time through, simply use ap to avoid generating rtx. */
4264 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4265 || ! (fixed_regs[ARG_POINTER_REGNUM]
4266 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4268 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4270 internal_arg_pointer = virtual_incoming_args_rtx;
4271 current_function_internal_arg_pointer = internal_arg_pointer;
4273 stack_args_size.constant = 0;
4274 stack_args_size.var = 0;
4276 /* If struct value address is treated as the first argument, make it so. */
4277 if (aggregate_value_p (DECL_RESULT (fndecl))
4278 && ! current_function_returns_pcc_struct
4279 && struct_value_incoming_rtx == 0)
4281 tree type = build_pointer_type (TREE_TYPE (fntype));
4283 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4285 DECL_ARG_TYPE (function_result_decl) = type;
4286 TREE_CHAIN (function_result_decl) = fnargs;
4287 fnargs = function_result_decl;
4290 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4291 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4292 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4294 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4295 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4297 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4300 /* We haven't yet found an argument that we must push and pretend the
4302 current_function_pretend_args_size = 0;
4304 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4306 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4307 struct args_size stack_offset;
4308 struct args_size arg_size;
4309 int passed_pointer = 0;
4310 int did_conversion = 0;
4311 tree passed_type = DECL_ARG_TYPE (parm);
4312 tree nominal_type = TREE_TYPE (parm);
4315 /* Set LAST_NAMED if this is last named arg before some
4317 int last_named = ((TREE_CHAIN (parm) == 0
4318 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4319 && (stdarg || current_function_varargs));
4320 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4321 most machines, if this is a varargs/stdarg function, then we treat
4322 the last named arg as if it were anonymous too. */
4323 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4325 if (TREE_TYPE (parm) == error_mark_node
4326 /* This can happen after weird syntax errors
4327 or if an enum type is defined among the parms. */
4328 || TREE_CODE (parm) != PARM_DECL
4329 || passed_type == NULL)
4331 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4332 = gen_rtx_MEM (BLKmode, const0_rtx);
4333 TREE_USED (parm) = 1;
4337 /* For varargs.h function, save info about regs and stack space
4338 used by the individual args, not including the va_alist arg. */
4339 if (hide_last_arg && last_named)
4340 current_function_args_info = args_so_far;
4342 /* Find mode of arg as it is passed, and mode of arg
4343 as it should be during execution of this function. */
4344 passed_mode = TYPE_MODE (passed_type);
4345 nominal_mode = TYPE_MODE (nominal_type);
4347 /* If the parm's mode is VOID, its value doesn't matter,
4348 and avoid the usual things like emit_move_insn that could crash. */
4349 if (nominal_mode == VOIDmode)
4351 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4355 /* If the parm is to be passed as a transparent union, use the
4356 type of the first field for the tests below. We have already
4357 verified that the modes are the same. */
4358 if (DECL_TRANSPARENT_UNION (parm)
4359 || TYPE_TRANSPARENT_UNION (passed_type))
4360 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4362 /* See if this arg was passed by invisible reference. It is if
4363 it is an object whose size depends on the contents of the
4364 object itself or if the machine requires these objects be passed
4367 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4368 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4369 || TREE_ADDRESSABLE (passed_type)
4370 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4371 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4372 passed_type, named_arg)
4376 passed_type = nominal_type = build_pointer_type (passed_type);
4378 passed_mode = nominal_mode = Pmode;
4381 promoted_mode = passed_mode;
4383 #ifdef PROMOTE_FUNCTION_ARGS
4384 /* Compute the mode in which the arg is actually extended to. */
4385 unsignedp = TREE_UNSIGNED (passed_type);
4386 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4389 /* Let machine desc say which reg (if any) the parm arrives in.
4390 0 means it arrives on the stack. */
4391 #ifdef FUNCTION_INCOMING_ARG
4392 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4393 passed_type, named_arg);
4395 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4396 passed_type, named_arg);
4399 if (entry_parm == 0)
4400 promoted_mode = passed_mode;
4402 #ifdef SETUP_INCOMING_VARARGS
4403 /* If this is the last named parameter, do any required setup for
4404 varargs or stdargs. We need to know about the case of this being an
4405 addressable type, in which case we skip the registers it
4406 would have arrived in.
4408 For stdargs, LAST_NAMED will be set for two parameters, the one that
4409 is actually the last named, and the dummy parameter. We only
4410 want to do this action once.
4412 Also, indicate when RTL generation is to be suppressed. */
4413 if (last_named && !varargs_setup)
4415 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4416 current_function_pretend_args_size,
4422 /* Determine parm's home in the stack,
4423 in case it arrives in the stack or we should pretend it did.
4425 Compute the stack position and rtx where the argument arrives
4428 There is one complexity here: If this was a parameter that would
4429 have been passed in registers, but wasn't only because it is
4430 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4431 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4432 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4433 0 as it was the previous time. */
4435 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4436 locate_and_pad_parm (promoted_mode, passed_type,
4437 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4440 #ifdef FUNCTION_INCOMING_ARG
4441 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4443 pretend_named) != 0,
4445 FUNCTION_ARG (args_so_far, promoted_mode,
4447 pretend_named) != 0,
4450 fndecl, &stack_args_size, &stack_offset, &arg_size);
4454 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4456 if (offset_rtx == const0_rtx)
4457 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4459 stack_parm = gen_rtx_MEM (promoted_mode,
4460 gen_rtx_PLUS (Pmode,
4461 internal_arg_pointer,
4464 /* If this is a memory ref that contains aggregate components,
4465 mark it as such for cse and loop optimize. Likewise if it
4467 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4468 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4469 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4472 /* If this parameter was passed both in registers and in the stack,
4473 use the copy on the stack. */
4474 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4477 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4478 /* If this parm was passed part in regs and part in memory,
4479 pretend it arrived entirely in memory
4480 by pushing the register-part onto the stack.
4482 In the special case of a DImode or DFmode that is split,
4483 we could put it together in a pseudoreg directly,
4484 but for now that's not worth bothering with. */
4488 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4489 passed_type, named_arg);
4493 current_function_pretend_args_size
4494 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4495 / (PARM_BOUNDARY / BITS_PER_UNIT)
4496 * (PARM_BOUNDARY / BITS_PER_UNIT));
4500 /* Handle calls that pass values in multiple non-contiguous
4501 locations. The Irix 6 ABI has examples of this. */
4502 if (GET_CODE (entry_parm) == PARALLEL)
4503 emit_group_store (validize_mem (stack_parm), entry_parm,
4504 int_size_in_bytes (TREE_TYPE (parm)),
4505 (TYPE_ALIGN (TREE_TYPE (parm))
4508 move_block_from_reg (REGNO (entry_parm),
4509 validize_mem (stack_parm), nregs,
4510 int_size_in_bytes (TREE_TYPE (parm)));
4512 entry_parm = stack_parm;
4517 /* If we didn't decide this parm came in a register,
4518 by default it came on the stack. */
4519 if (entry_parm == 0)
4520 entry_parm = stack_parm;
4522 /* Record permanently how this parm was passed. */
4524 DECL_INCOMING_RTL (parm) = entry_parm;
4526 /* If there is actually space on the stack for this parm,
4527 count it in stack_args_size; otherwise set stack_parm to 0
4528 to indicate there is no preallocated stack slot for the parm. */
4530 if (entry_parm == stack_parm
4531 || (GET_CODE (entry_parm) == PARALLEL
4532 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4533 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4534 /* On some machines, even if a parm value arrives in a register
4535 there is still an (uninitialized) stack slot allocated for it.
4537 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4538 whether this parameter already has a stack slot allocated,
4539 because an arg block exists only if current_function_args_size
4540 is larger than some threshold, and we haven't calculated that
4541 yet. So, for now, we just assume that stack slots never exist
4543 || REG_PARM_STACK_SPACE (fndecl) > 0
4547 stack_args_size.constant += arg_size.constant;
4549 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4552 /* No stack slot was pushed for this parm. */
4555 /* Update info on where next arg arrives in registers. */
4557 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4558 passed_type, named_arg);
4560 /* If this is our second time through, we are done with this parm. */
4564 /* If we can't trust the parm stack slot to be aligned enough
4565 for its ultimate type, don't use that slot after entry.
4566 We'll make another stack slot, if we need one. */
4568 int thisparm_boundary
4569 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4571 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4575 /* If parm was passed in memory, and we need to convert it on entry,
4576 don't store it back in that same slot. */
4578 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4582 /* Now adjust STACK_PARM to the mode and precise location
4583 where this parameter should live during execution,
4584 if we discover that it must live in the stack during execution.
4585 To make debuggers happier on big-endian machines, we store
4586 the value in the last bytes of the space available. */
4588 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4593 if (BYTES_BIG_ENDIAN
4594 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4595 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4596 - GET_MODE_SIZE (nominal_mode));
4598 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4599 if (offset_rtx == const0_rtx)
4600 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4602 stack_parm = gen_rtx_MEM (nominal_mode,
4603 gen_rtx_PLUS (Pmode,
4604 internal_arg_pointer,
4607 /* If this is a memory ref that contains aggregate components,
4608 mark it as such for cse and loop optimize. */
4609 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4614 /* We need this "use" info, because the gcc-register->stack-register
4615 converter in reg-stack.c needs to know which registers are active
4616 at the start of the function call. The actual parameter loading
4617 instructions are not always available then anymore, since they might
4618 have been optimised away. */
4620 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4621 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4624 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4625 in the mode in which it arrives.
4626 STACK_PARM is an RTX for a stack slot where the parameter can live
4627 during the function (in case we want to put it there).
4628 STACK_PARM is 0 if no stack slot was pushed for it.
4630 Now output code if necessary to convert ENTRY_PARM to
4631 the type in which this function declares it,
4632 and store that result in an appropriate place,
4633 which may be a pseudo reg, may be STACK_PARM,
4634 or may be a local stack slot if STACK_PARM is 0.
4636 Set DECL_RTL to that place. */
4638 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4640 /* If a BLKmode arrives in registers, copy it to a stack slot.
4641 Handle calls that pass values in multiple non-contiguous
4642 locations. The Irix 6 ABI has examples of this. */
4643 if (GET_CODE (entry_parm) == REG
4644 || GET_CODE (entry_parm) == PARALLEL)
4647 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4650 /* Note that we will be storing an integral number of words.
4651 So we have to be careful to ensure that we allocate an
4652 integral number of words. We do this below in the
4653 assign_stack_local if space was not allocated in the argument
4654 list. If it was, this will not work if PARM_BOUNDARY is not
4655 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4656 if it becomes a problem. */
4658 if (stack_parm == 0)
4661 = assign_stack_local (GET_MODE (entry_parm),
4664 /* If this is a memory ref that contains aggregate
4665 components, mark it as such for cse and loop optimize. */
4666 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4669 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4672 if (TREE_READONLY (parm))
4673 RTX_UNCHANGING_P (stack_parm) = 1;
4675 /* Handle calls that pass values in multiple non-contiguous
4676 locations. The Irix 6 ABI has examples of this. */
4677 if (GET_CODE (entry_parm) == PARALLEL)
4678 emit_group_store (validize_mem (stack_parm), entry_parm,
4679 int_size_in_bytes (TREE_TYPE (parm)),
4680 (TYPE_ALIGN (TREE_TYPE (parm))
4683 move_block_from_reg (REGNO (entry_parm),
4684 validize_mem (stack_parm),
4685 size_stored / UNITS_PER_WORD,
4686 int_size_in_bytes (TREE_TYPE (parm)));
4688 DECL_RTL (parm) = stack_parm;
4690 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4691 && ! DECL_INLINE (fndecl))
4692 /* layout_decl may set this. */
4693 || TREE_ADDRESSABLE (parm)
4694 || TREE_SIDE_EFFECTS (parm)
4695 /* If -ffloat-store specified, don't put explicit
4696 float variables into registers. */
4697 || (flag_float_store
4698 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4699 /* Always assign pseudo to structure return or item passed
4700 by invisible reference. */
4701 || passed_pointer || parm == function_result_decl)
4703 /* Store the parm in a pseudoregister during the function, but we
4704 may need to do it in a wider mode. */
4706 register rtx parmreg;
4707 int regno, regnoi = 0, regnor = 0;
4709 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4711 promoted_nominal_mode
4712 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4714 parmreg = gen_reg_rtx (promoted_nominal_mode);
4715 mark_user_reg (parmreg);
4717 /* If this was an item that we received a pointer to, set DECL_RTL
4722 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4723 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4726 DECL_RTL (parm) = parmreg;
4728 /* Copy the value into the register. */
4729 if (nominal_mode != passed_mode
4730 || promoted_nominal_mode != promoted_mode)
4733 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4734 mode, by the caller. We now have to convert it to
4735 NOMINAL_MODE, if different. However, PARMREG may be in
4736 a different mode than NOMINAL_MODE if it is being stored
4739 If ENTRY_PARM is a hard register, it might be in a register
4740 not valid for operating in its mode (e.g., an odd-numbered
4741 register for a DFmode). In that case, moves are the only
4742 thing valid, so we can't do a convert from there. This
4743 occurs when the calling sequence allow such misaligned
4746 In addition, the conversion may involve a call, which could
4747 clobber parameters which haven't been copied to pseudo
4748 registers yet. Therefore, we must first copy the parm to
4749 a pseudo reg here, and save the conversion until after all
4750 parameters have been moved. */
4752 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4754 emit_move_insn (tempreg, validize_mem (entry_parm));
4756 push_to_sequence (conversion_insns);
4757 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4759 /* TREE_USED gets set erroneously during expand_assignment. */
4760 save_tree_used = TREE_USED (parm);
4761 expand_assignment (parm,
4762 make_tree (nominal_type, tempreg), 0, 0);
4763 TREE_USED (parm) = save_tree_used;
4764 conversion_insns = get_insns ();
4769 emit_move_insn (parmreg, validize_mem (entry_parm));
4771 /* If we were passed a pointer but the actual value
4772 can safely live in a register, put it in one. */
4773 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4774 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4775 && ! DECL_INLINE (fndecl))
4776 /* layout_decl may set this. */
4777 || TREE_ADDRESSABLE (parm)
4778 || TREE_SIDE_EFFECTS (parm)
4779 /* If -ffloat-store specified, don't put explicit
4780 float variables into registers. */
4781 || (flag_float_store
4782 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4784 /* We can't use nominal_mode, because it will have been set to
4785 Pmode above. We must use the actual mode of the parm. */
4786 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4787 mark_user_reg (parmreg);
4788 emit_move_insn (parmreg, DECL_RTL (parm));
4789 DECL_RTL (parm) = parmreg;
4790 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4794 #ifdef FUNCTION_ARG_CALLEE_COPIES
4795 /* If we are passed an arg by reference and it is our responsibility
4796 to make a copy, do it now.
4797 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4798 original argument, so we must recreate them in the call to
4799 FUNCTION_ARG_CALLEE_COPIES. */
4800 /* ??? Later add code to handle the case that if the argument isn't
4801 modified, don't do the copy. */
4803 else if (passed_pointer
4804 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4805 TYPE_MODE (DECL_ARG_TYPE (parm)),
4806 DECL_ARG_TYPE (parm),
4808 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4811 tree type = DECL_ARG_TYPE (parm);
4813 /* This sequence may involve a library call perhaps clobbering
4814 registers that haven't been copied to pseudos yet. */
4816 push_to_sequence (conversion_insns);
4818 if (TYPE_SIZE (type) == 0
4819 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4820 /* This is a variable sized object. */
4821 copy = gen_rtx_MEM (BLKmode,
4822 allocate_dynamic_stack_space
4823 (expr_size (parm), NULL_RTX,
4824 TYPE_ALIGN (type)));
4826 copy = assign_stack_temp (TYPE_MODE (type),
4827 int_size_in_bytes (type), 1);
4828 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4829 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4831 store_expr (parm, copy, 0);
4832 emit_move_insn (parmreg, XEXP (copy, 0));
4833 if (current_function_check_memory_usage)
4834 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4835 XEXP (copy, 0), ptr_mode,
4836 GEN_INT (int_size_in_bytes (type)),
4837 TYPE_MODE (sizetype),
4838 GEN_INT (MEMORY_USE_RW),
4839 TYPE_MODE (integer_type_node));
4840 conversion_insns = get_insns ();
4844 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4846 /* In any case, record the parm's desired stack location
4847 in case we later discover it must live in the stack.
4849 If it is a COMPLEX value, store the stack location for both
4852 if (GET_CODE (parmreg) == CONCAT)
4853 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4855 regno = REGNO (parmreg);
4857 if (regno >= max_parm_reg)
4860 int old_max_parm_reg = max_parm_reg;
4862 /* It's slow to expand this one register at a time,
4863 but it's also rare and we need max_parm_reg to be
4864 precisely correct. */
4865 max_parm_reg = regno + 1;
4866 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4867 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4868 old_max_parm_reg * sizeof (rtx));
4869 bzero ((char *) (new + old_max_parm_reg),
4870 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4871 parm_reg_stack_loc = new;
4874 if (GET_CODE (parmreg) == CONCAT)
4876 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4878 regnor = REGNO (gen_realpart (submode, parmreg));
4879 regnoi = REGNO (gen_imagpart (submode, parmreg));
4881 if (stack_parm != 0)
4883 parm_reg_stack_loc[regnor]
4884 = gen_realpart (submode, stack_parm);
4885 parm_reg_stack_loc[regnoi]
4886 = gen_imagpart (submode, stack_parm);
4890 parm_reg_stack_loc[regnor] = 0;
4891 parm_reg_stack_loc[regnoi] = 0;
4895 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4897 /* Mark the register as eliminable if we did no conversion
4898 and it was copied from memory at a fixed offset,
4899 and the arg pointer was not copied to a pseudo-reg.
4900 If the arg pointer is a pseudo reg or the offset formed
4901 an invalid address, such memory-equivalences
4902 as we make here would screw up life analysis for it. */
4903 if (nominal_mode == passed_mode
4906 && GET_CODE (stack_parm) == MEM
4907 && stack_offset.var == 0
4908 && reg_mentioned_p (virtual_incoming_args_rtx,
4909 XEXP (stack_parm, 0)))
4911 rtx linsn = get_last_insn ();
4914 /* Mark complex types separately. */
4915 if (GET_CODE (parmreg) == CONCAT)
4916 /* Scan backwards for the set of the real and
4918 for (sinsn = linsn; sinsn != 0;
4919 sinsn = prev_nonnote_insn (sinsn))
4921 set = single_set (sinsn);
4923 && SET_DEST (set) == regno_reg_rtx [regnoi])
4925 = gen_rtx_EXPR_LIST (REG_EQUIV,
4926 parm_reg_stack_loc[regnoi],
4929 && SET_DEST (set) == regno_reg_rtx [regnor])
4931 = gen_rtx_EXPR_LIST (REG_EQUIV,
4932 parm_reg_stack_loc[regnor],
4935 else if ((set = single_set (linsn)) != 0
4936 && SET_DEST (set) == parmreg)
4938 = gen_rtx_EXPR_LIST (REG_EQUIV,
4939 stack_parm, REG_NOTES (linsn));
4942 /* For pointer data type, suggest pointer register. */
4943 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4944 mark_reg_pointer (parmreg,
4945 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4950 /* Value must be stored in the stack slot STACK_PARM
4951 during function execution. */
4953 if (promoted_mode != nominal_mode)
4955 /* Conversion is required. */
4956 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4958 emit_move_insn (tempreg, validize_mem (entry_parm));
4960 push_to_sequence (conversion_insns);
4961 entry_parm = convert_to_mode (nominal_mode, tempreg,
4962 TREE_UNSIGNED (TREE_TYPE (parm)));
4965 /* ??? This may need a big-endian conversion on sparc64. */
4966 stack_parm = change_address (stack_parm, nominal_mode,
4969 conversion_insns = get_insns ();
4974 if (entry_parm != stack_parm)
4976 if (stack_parm == 0)
4979 = assign_stack_local (GET_MODE (entry_parm),
4980 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4981 /* If this is a memory ref that contains aggregate components,
4982 mark it as such for cse and loop optimize. */
4983 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4986 if (promoted_mode != nominal_mode)
4988 push_to_sequence (conversion_insns);
4989 emit_move_insn (validize_mem (stack_parm),
4990 validize_mem (entry_parm));
4991 conversion_insns = get_insns ();
4995 emit_move_insn (validize_mem (stack_parm),
4996 validize_mem (entry_parm));
4998 if (current_function_check_memory_usage)
5000 push_to_sequence (conversion_insns);
5001 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5002 XEXP (stack_parm, 0), ptr_mode,
5003 GEN_INT (GET_MODE_SIZE (GET_MODE
5005 TYPE_MODE (sizetype),
5006 GEN_INT (MEMORY_USE_RW),
5007 TYPE_MODE (integer_type_node));
5009 conversion_insns = get_insns ();
5012 DECL_RTL (parm) = stack_parm;
5015 /* If this "parameter" was the place where we are receiving the
5016 function's incoming structure pointer, set up the result. */
5017 if (parm == function_result_decl)
5019 tree result = DECL_RESULT (fndecl);
5020 tree restype = TREE_TYPE (result);
5023 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5025 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5026 AGGREGATE_TYPE_P (restype));
5029 if (TREE_THIS_VOLATILE (parm))
5030 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5031 if (TREE_READONLY (parm))
5032 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5035 /* Output all parameter conversion instructions (possibly including calls)
5036 now that all parameters have been copied out of hard registers. */
5037 emit_insns (conversion_insns);
5039 last_parm_insn = get_last_insn ();
5041 current_function_args_size = stack_args_size.constant;
5043 /* Adjust function incoming argument size for alignment and
5046 #ifdef REG_PARM_STACK_SPACE
5047 #ifndef MAYBE_REG_PARM_STACK_SPACE
5048 current_function_args_size = MAX (current_function_args_size,
5049 REG_PARM_STACK_SPACE (fndecl));
5053 #ifdef PREFERRED_STACK_BOUNDARY
5054 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
5056 current_function_args_size
5057 = ((current_function_args_size + STACK_BYTES - 1)
5058 / STACK_BYTES) * STACK_BYTES;
5061 #ifdef ARGS_GROW_DOWNWARD
5062 current_function_arg_offset_rtx
5063 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5064 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5065 size_int (-stack_args_size.constant)),
5066 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5068 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5071 /* See how many bytes, if any, of its args a function should try to pop
5074 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5075 current_function_args_size);
5077 /* For stdarg.h function, save info about
5078 regs and stack space used by the named args. */
5081 current_function_args_info = args_so_far;
5083 /* Set the rtx used for the function return value. Put this in its
5084 own variable so any optimizers that need this information don't have
5085 to include tree.h. Do this here so it gets done when an inlined
5086 function gets output. */
5088 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5091 /* Indicate whether REGNO is an incoming argument to the current function
5092 that was promoted to a wider mode. If so, return the RTX for the
5093 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5094 that REGNO is promoted from and whether the promotion was signed or
5097 #ifdef PROMOTE_FUNCTION_ARGS
5100 promoted_input_arg (regno, pmode, punsignedp)
5102 enum machine_mode *pmode;
5107 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5108 arg = TREE_CHAIN (arg))
5109 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5110 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5111 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5113 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5114 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5116 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5117 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5118 && mode != DECL_MODE (arg))
5120 *pmode = DECL_MODE (arg);
5121 *punsignedp = unsignedp;
5122 return DECL_INCOMING_RTL (arg);
5131 /* Compute the size and offset from the start of the stacked arguments for a
5132 parm passed in mode PASSED_MODE and with type TYPE.
5134 INITIAL_OFFSET_PTR points to the current offset into the stacked
5137 The starting offset and size for this parm are returned in *OFFSET_PTR
5138 and *ARG_SIZE_PTR, respectively.
5140 IN_REGS is non-zero if the argument will be passed in registers. It will
5141 never be set if REG_PARM_STACK_SPACE is not defined.
5143 FNDECL is the function in which the argument was defined.
5145 There are two types of rounding that are done. The first, controlled by
5146 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5147 list to be aligned to the specific boundary (in bits). This rounding
5148 affects the initial and starting offsets, but not the argument size.
5150 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5151 optionally rounds the size of the parm to PARM_BOUNDARY. The
5152 initial offset is not affected by this rounding, while the size always
5153 is and the starting offset may be. */
5155 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5156 initial_offset_ptr is positive because locate_and_pad_parm's
5157 callers pass in the total size of args so far as
5158 initial_offset_ptr. arg_size_ptr is always positive.*/
5161 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5162 initial_offset_ptr, offset_ptr, arg_size_ptr)
5163 enum machine_mode passed_mode;
5166 tree fndecl ATTRIBUTE_UNUSED;
5167 struct args_size *initial_offset_ptr;
5168 struct args_size *offset_ptr;
5169 struct args_size *arg_size_ptr;
5172 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5173 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5174 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5176 #ifdef REG_PARM_STACK_SPACE
5177 /* If we have found a stack parm before we reach the end of the
5178 area reserved for registers, skip that area. */
5181 int reg_parm_stack_space = 0;
5183 #ifdef MAYBE_REG_PARM_STACK_SPACE
5184 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5186 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5188 if (reg_parm_stack_space > 0)
5190 if (initial_offset_ptr->var)
5192 initial_offset_ptr->var
5193 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5194 size_int (reg_parm_stack_space));
5195 initial_offset_ptr->constant = 0;
5197 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5198 initial_offset_ptr->constant = reg_parm_stack_space;
5201 #endif /* REG_PARM_STACK_SPACE */
5203 arg_size_ptr->var = 0;
5204 arg_size_ptr->constant = 0;
5206 #ifdef ARGS_GROW_DOWNWARD
5207 if (initial_offset_ptr->var)
5209 offset_ptr->constant = 0;
5210 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5211 initial_offset_ptr->var);
5215 offset_ptr->constant = - initial_offset_ptr->constant;
5216 offset_ptr->var = 0;
5218 if (where_pad != none
5219 && (TREE_CODE (sizetree) != INTEGER_CST
5220 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5221 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5222 SUB_PARM_SIZE (*offset_ptr, sizetree);
5223 if (where_pad != downward)
5224 pad_to_arg_alignment (offset_ptr, boundary);
5225 if (initial_offset_ptr->var)
5227 arg_size_ptr->var = size_binop (MINUS_EXPR,
5228 size_binop (MINUS_EXPR,
5230 initial_offset_ptr->var),
5235 arg_size_ptr->constant = (- initial_offset_ptr->constant
5236 - offset_ptr->constant);
5238 #else /* !ARGS_GROW_DOWNWARD */
5239 pad_to_arg_alignment (initial_offset_ptr, boundary);
5240 *offset_ptr = *initial_offset_ptr;
5242 #ifdef PUSH_ROUNDING
5243 if (passed_mode != BLKmode)
5244 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5247 /* Pad_below needs the pre-rounded size to know how much to pad below
5248 so this must be done before rounding up. */
5249 if (where_pad == downward
5250 /* However, BLKmode args passed in regs have their padding done elsewhere.
5251 The stack slot must be able to hold the entire register. */
5252 && !(in_regs && passed_mode == BLKmode))
5253 pad_below (offset_ptr, passed_mode, sizetree);
5255 if (where_pad != none
5256 && (TREE_CODE (sizetree) != INTEGER_CST
5257 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5258 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5260 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5261 #endif /* ARGS_GROW_DOWNWARD */
5264 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5265 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5268 pad_to_arg_alignment (offset_ptr, boundary)
5269 struct args_size *offset_ptr;
5272 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5274 if (boundary > BITS_PER_UNIT)
5276 if (offset_ptr->var)
5279 #ifdef ARGS_GROW_DOWNWARD
5284 (ARGS_SIZE_TREE (*offset_ptr),
5285 boundary / BITS_PER_UNIT);
5286 offset_ptr->constant = 0; /*?*/
5289 offset_ptr->constant =
5290 #ifdef ARGS_GROW_DOWNWARD
5291 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5293 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5298 #ifndef ARGS_GROW_DOWNWARD
5300 pad_below (offset_ptr, passed_mode, sizetree)
5301 struct args_size *offset_ptr;
5302 enum machine_mode passed_mode;
5305 if (passed_mode != BLKmode)
5307 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5308 offset_ptr->constant
5309 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5310 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5311 - GET_MODE_SIZE (passed_mode));
5315 if (TREE_CODE (sizetree) != INTEGER_CST
5316 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5318 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5319 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5321 ADD_PARM_SIZE (*offset_ptr, s2);
5322 SUB_PARM_SIZE (*offset_ptr, sizetree);
5328 #ifdef ARGS_GROW_DOWNWARD
5330 round_down (value, divisor)
5334 return size_binop (MULT_EXPR,
5335 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5336 size_int (divisor));
5340 /* Walk the tree of blocks describing the binding levels within a function
5341 and warn about uninitialized variables.
5342 This is done after calling flow_analysis and before global_alloc
5343 clobbers the pseudo-regs to hard regs. */
5346 uninitialized_vars_warning (block)
5349 register tree decl, sub;
5350 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5352 if (TREE_CODE (decl) == VAR_DECL
5353 /* These warnings are unreliable for and aggregates
5354 because assigning the fields one by one can fail to convince
5355 flow.c that the entire aggregate was initialized.
5356 Unions are troublesome because members may be shorter. */
5357 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5358 && DECL_RTL (decl) != 0
5359 && GET_CODE (DECL_RTL (decl)) == REG
5360 /* Global optimizations can make it difficult to determine if a
5361 particular variable has been initialized. However, a VAR_DECL
5362 with a nonzero DECL_INITIAL had an initializer, so do not
5363 claim it is potentially uninitialized.
5365 We do not care about the actual value in DECL_INITIAL, so we do
5366 not worry that it may be a dangling pointer. */
5367 && DECL_INITIAL (decl) == NULL_TREE
5368 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5369 warning_with_decl (decl,
5370 "`%s' might be used uninitialized in this function");
5371 if (TREE_CODE (decl) == VAR_DECL
5372 && DECL_RTL (decl) != 0
5373 && GET_CODE (DECL_RTL (decl)) == REG
5374 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5375 warning_with_decl (decl,
5376 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5378 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5379 uninitialized_vars_warning (sub);
5382 /* Do the appropriate part of uninitialized_vars_warning
5383 but for arguments instead of local variables. */
5386 setjmp_args_warning ()
5389 for (decl = DECL_ARGUMENTS (current_function_decl);
5390 decl; decl = TREE_CHAIN (decl))
5391 if (DECL_RTL (decl) != 0
5392 && GET_CODE (DECL_RTL (decl)) == REG
5393 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5394 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5397 /* If this function call setjmp, put all vars into the stack
5398 unless they were declared `register'. */
5401 setjmp_protect (block)
5404 register tree decl, sub;
5405 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5406 if ((TREE_CODE (decl) == VAR_DECL
5407 || TREE_CODE (decl) == PARM_DECL)
5408 && DECL_RTL (decl) != 0
5409 && (GET_CODE (DECL_RTL (decl)) == REG
5410 || (GET_CODE (DECL_RTL (decl)) == MEM
5411 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5412 /* If this variable came from an inline function, it must be
5413 that its life doesn't overlap the setjmp. If there was a
5414 setjmp in the function, it would already be in memory. We
5415 must exclude such variable because their DECL_RTL might be
5416 set to strange things such as virtual_stack_vars_rtx. */
5417 && ! DECL_FROM_INLINE (decl)
5419 #ifdef NON_SAVING_SETJMP
5420 /* If longjmp doesn't restore the registers,
5421 don't put anything in them. */
5425 ! DECL_REGISTER (decl)))
5426 put_var_into_stack (decl);
5427 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5428 setjmp_protect (sub);
5431 /* Like the previous function, but for args instead of local variables. */
5434 setjmp_protect_args ()
5437 for (decl = DECL_ARGUMENTS (current_function_decl);
5438 decl; decl = TREE_CHAIN (decl))
5439 if ((TREE_CODE (decl) == VAR_DECL
5440 || TREE_CODE (decl) == PARM_DECL)
5441 && DECL_RTL (decl) != 0
5442 && (GET_CODE (DECL_RTL (decl)) == REG
5443 || (GET_CODE (DECL_RTL (decl)) == MEM
5444 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5446 /* If longjmp doesn't restore the registers,
5447 don't put anything in them. */
5448 #ifdef NON_SAVING_SETJMP
5452 ! DECL_REGISTER (decl)))
5453 put_var_into_stack (decl);
5456 /* Return the context-pointer register corresponding to DECL,
5457 or 0 if it does not need one. */
5460 lookup_static_chain (decl)
5463 tree context = decl_function_context (decl);
5467 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5470 /* We treat inline_function_decl as an alias for the current function
5471 because that is the inline function whose vars, types, etc.
5472 are being merged into the current function.
5473 See expand_inline_function. */
5474 if (context == current_function_decl || context == inline_function_decl)
5475 return virtual_stack_vars_rtx;
5477 for (link = context_display; link; link = TREE_CHAIN (link))
5478 if (TREE_PURPOSE (link) == context)
5479 return RTL_EXPR_RTL (TREE_VALUE (link));
5484 /* Convert a stack slot address ADDR for variable VAR
5485 (from a containing function)
5486 into an address valid in this function (using a static chain). */
5489 fix_lexical_addr (addr, var)
5494 HOST_WIDE_INT displacement;
5495 tree context = decl_function_context (var);
5496 struct function *fp;
5499 /* If this is the present function, we need not do anything. */
5500 if (context == current_function_decl || context == inline_function_decl)
5503 for (fp = outer_function_chain; fp; fp = fp->next)
5504 if (fp->decl == context)
5510 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5511 addr = XEXP (XEXP (addr, 0), 0);
5513 /* Decode given address as base reg plus displacement. */
5514 if (GET_CODE (addr) == REG)
5515 basereg = addr, displacement = 0;
5516 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5517 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5521 /* We accept vars reached via the containing function's
5522 incoming arg pointer and via its stack variables pointer. */
5523 if (basereg == fp->internal_arg_pointer)
5525 /* If reached via arg pointer, get the arg pointer value
5526 out of that function's stack frame.
5528 There are two cases: If a separate ap is needed, allocate a
5529 slot in the outer function for it and dereference it that way.
5530 This is correct even if the real ap is actually a pseudo.
5531 Otherwise, just adjust the offset from the frame pointer to
5534 #ifdef NEED_SEPARATE_AP
5537 if (fp->arg_pointer_save_area == 0)
5538 fp->arg_pointer_save_area
5539 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5541 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5542 addr = memory_address (Pmode, addr);
5544 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5546 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5547 base = lookup_static_chain (var);
5551 else if (basereg == virtual_stack_vars_rtx)
5553 /* This is the same code as lookup_static_chain, duplicated here to
5554 avoid an extra call to decl_function_context. */
5557 for (link = context_display; link; link = TREE_CHAIN (link))
5558 if (TREE_PURPOSE (link) == context)
5560 base = RTL_EXPR_RTL (TREE_VALUE (link));
5568 /* Use same offset, relative to appropriate static chain or argument
5570 return plus_constant (base, displacement);
5573 /* Return the address of the trampoline for entering nested fn FUNCTION.
5574 If necessary, allocate a trampoline (in the stack frame)
5575 and emit rtl to initialize its contents (at entry to this function). */
5578 trampoline_address (function)
5584 struct function *fp;
5587 /* Find an existing trampoline and return it. */
5588 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5589 if (TREE_PURPOSE (link) == function)
5591 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5593 for (fp = outer_function_chain; fp; fp = fp->next)
5594 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5595 if (TREE_PURPOSE (link) == function)
5597 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5599 return round_trampoline_addr (tramp);
5602 /* None exists; we must make one. */
5604 /* Find the `struct function' for the function containing FUNCTION. */
5606 fn_context = decl_function_context (function);
5607 if (fn_context != current_function_decl
5608 && fn_context != inline_function_decl)
5609 for (fp = outer_function_chain; fp; fp = fp->next)
5610 if (fp->decl == fn_context)
5613 /* Allocate run-time space for this trampoline
5614 (usually in the defining function's stack frame). */
5615 #ifdef ALLOCATE_TRAMPOLINE
5616 tramp = ALLOCATE_TRAMPOLINE (fp);
5618 /* If rounding needed, allocate extra space
5619 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5620 #ifdef TRAMPOLINE_ALIGNMENT
5621 #define TRAMPOLINE_REAL_SIZE \
5622 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5624 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5627 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5629 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5632 /* Record the trampoline for reuse and note it for later initialization
5633 by expand_function_end. */
5636 push_obstacks (fp->function_maybepermanent_obstack,
5637 fp->function_maybepermanent_obstack);
5638 rtlexp = make_node (RTL_EXPR);
5639 RTL_EXPR_RTL (rtlexp) = tramp;
5640 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5645 /* Make the RTL_EXPR node temporary, not momentary, so that the
5646 trampoline_list doesn't become garbage. */
5647 int momentary = suspend_momentary ();
5648 rtlexp = make_node (RTL_EXPR);
5649 resume_momentary (momentary);
5651 RTL_EXPR_RTL (rtlexp) = tramp;
5652 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5655 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5656 return round_trampoline_addr (tramp);
5659 /* Given a trampoline address,
5660 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5663 round_trampoline_addr (tramp)
5666 #ifdef TRAMPOLINE_ALIGNMENT
5667 /* Round address up to desired boundary. */
5668 rtx temp = gen_reg_rtx (Pmode);
5669 temp = expand_binop (Pmode, add_optab, tramp,
5670 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5671 temp, 0, OPTAB_LIB_WIDEN);
5672 tramp = expand_binop (Pmode, and_optab, temp,
5673 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5674 temp, 0, OPTAB_LIB_WIDEN);
5679 /* The functions identify_blocks and reorder_blocks provide a way to
5680 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5681 duplicate portions of the RTL code. Call identify_blocks before
5682 changing the RTL, and call reorder_blocks after. */
5684 /* Put all this function's BLOCK nodes including those that are chained
5685 onto the first block into a vector, and return it.
5686 Also store in each NOTE for the beginning or end of a block
5687 the index of that block in the vector.
5688 The arguments are BLOCK, the chain of top-level blocks of the function,
5689 and INSNS, the insn chain of the function. */
5692 identify_blocks (block, insns)
5700 int next_block_number = 1;
5701 int current_block_number = 1;
5707 n_blocks = all_blocks (block, 0);
5708 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5709 block_stack = (int *) alloca (n_blocks * sizeof (int));
5711 all_blocks (block, block_vector);
5713 for (insn = insns; insn; insn = NEXT_INSN (insn))
5714 if (GET_CODE (insn) == NOTE)
5716 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5718 block_stack[depth++] = current_block_number;
5719 current_block_number = next_block_number;
5720 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5722 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5724 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5725 current_block_number = block_stack[--depth];
5729 if (n_blocks != next_block_number)
5732 return block_vector;
5735 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5736 and a revised instruction chain, rebuild the tree structure
5737 of BLOCK nodes to correspond to the new order of RTL.
5738 The new block tree is inserted below TOP_BLOCK.
5739 Returns the current top-level block. */
5742 reorder_blocks (block_vector, block, insns)
5747 tree current_block = block;
5750 if (block_vector == 0)
5753 /* Prune the old trees away, so that it doesn't get in the way. */
5754 BLOCK_SUBBLOCKS (current_block) = 0;
5755 BLOCK_CHAIN (current_block) = 0;
5757 for (insn = insns; insn; insn = NEXT_INSN (insn))
5758 if (GET_CODE (insn) == NOTE)
5760 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5762 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5763 /* If we have seen this block before, copy it. */
5764 if (TREE_ASM_WRITTEN (block))
5765 block = copy_node (block);
5766 BLOCK_SUBBLOCKS (block) = 0;
5767 TREE_ASM_WRITTEN (block) = 1;
5768 BLOCK_SUPERCONTEXT (block) = current_block;
5769 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5770 BLOCK_SUBBLOCKS (current_block) = block;
5771 current_block = block;
5772 NOTE_SOURCE_FILE (insn) = 0;
5774 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5776 BLOCK_SUBBLOCKS (current_block)
5777 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5778 current_block = BLOCK_SUPERCONTEXT (current_block);
5779 NOTE_SOURCE_FILE (insn) = 0;
5783 BLOCK_SUBBLOCKS (current_block)
5784 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5785 return current_block;
5788 /* Reverse the order of elements in the chain T of blocks,
5789 and return the new head of the chain (old last element). */
5795 register tree prev = 0, decl, next;
5796 for (decl = t; decl; decl = next)
5798 next = BLOCK_CHAIN (decl);
5799 BLOCK_CHAIN (decl) = prev;
5805 /* Count the subblocks of the list starting with BLOCK, and list them
5806 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5810 all_blocks (block, vector)
5818 TREE_ASM_WRITTEN (block) = 0;
5820 /* Record this block. */
5822 vector[n_blocks] = block;
5826 /* Record the subblocks, and their subblocks... */
5827 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5828 vector ? vector + n_blocks : 0);
5829 block = BLOCK_CHAIN (block);
5835 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5836 and initialize static variables for generating RTL for the statements
5840 init_function_start (subr, filename, line)
5845 init_stmt_for_function ();
5847 cse_not_expected = ! optimize;
5849 /* Caller save not needed yet. */
5850 caller_save_needed = 0;
5852 /* No stack slots have been made yet. */
5853 stack_slot_list = 0;
5855 /* There is no stack slot for handling nonlocal gotos. */
5856 nonlocal_goto_handler_slots = 0;
5857 nonlocal_goto_stack_level = 0;
5859 /* No labels have been declared for nonlocal use. */
5860 nonlocal_labels = 0;
5861 nonlocal_goto_handler_labels = 0;
5863 /* No function calls so far in this function. */
5864 function_call_count = 0;
5866 /* No parm regs have been allocated.
5867 (This is important for output_inline_function.) */
5868 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5870 /* Initialize the RTL mechanism. */
5873 /* Initialize the queue of pending postincrement and postdecrements,
5874 and some other info in expr.c. */
5877 /* We haven't done register allocation yet. */
5880 init_const_rtx_hash_table ();
5882 current_function_name = (*decl_printable_name) (subr, 2);
5884 /* Nonzero if this is a nested function that uses a static chain. */
5886 current_function_needs_context
5887 = (decl_function_context (current_function_decl) != 0
5888 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5890 /* Set if a call to setjmp is seen. */
5891 current_function_calls_setjmp = 0;
5893 /* Set if a call to longjmp is seen. */
5894 current_function_calls_longjmp = 0;
5896 current_function_calls_alloca = 0;
5897 current_function_has_nonlocal_label = 0;
5898 current_function_has_nonlocal_goto = 0;
5899 current_function_contains_functions = 0;
5900 current_function_sp_is_unchanging = 0;
5901 current_function_has_computed_jump = 0;
5902 current_function_is_thunk = 0;
5904 current_function_returns_pcc_struct = 0;
5905 current_function_returns_struct = 0;
5906 current_function_epilogue_delay_list = 0;
5907 current_function_uses_const_pool = 0;
5908 current_function_uses_pic_offset_table = 0;
5909 current_function_cannot_inline = 0;
5911 /* We have not yet needed to make a label to jump to for tail-recursion. */
5912 tail_recursion_label = 0;
5914 /* We haven't had a need to make a save area for ap yet. */
5916 arg_pointer_save_area = 0;
5918 /* No stack slots allocated yet. */
5921 /* No SAVE_EXPRs in this function yet. */
5924 /* No RTL_EXPRs in this function yet. */
5927 /* Set up to allocate temporaries. */
5930 /* Within function body, compute a type's size as soon it is laid out. */
5931 immediate_size_expand++;
5933 /* We haven't made any trampolines for this function yet. */
5934 trampoline_list = 0;
5936 init_pending_stack_adjust ();
5937 inhibit_defer_pop = 0;
5939 current_function_outgoing_args_size = 0;
5941 /* Prevent ever trying to delete the first instruction of a function.
5942 Also tell final how to output a linenum before the function prologue.
5943 Note linenums could be missing, e.g. when compiling a Java .class file. */
5945 emit_line_note (filename, line);
5947 /* Make sure first insn is a note even if we don't want linenums.
5948 This makes sure the first insn will never be deleted.
5949 Also, final expects a note to appear there. */
5950 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5952 /* Set flags used by final.c. */
5953 if (aggregate_value_p (DECL_RESULT (subr)))
5955 #ifdef PCC_STATIC_STRUCT_RETURN
5956 current_function_returns_pcc_struct = 1;
5958 current_function_returns_struct = 1;
5961 /* Warn if this value is an aggregate type,
5962 regardless of which calling convention we are using for it. */
5963 if (warn_aggregate_return
5964 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5965 warning ("function returns an aggregate");
5967 current_function_returns_pointer
5968 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5970 /* Indicate that we need to distinguish between the return value of the
5971 present function and the return value of a function being called. */
5972 rtx_equal_function_value_matters = 1;
5974 /* Indicate that we have not instantiated virtual registers yet. */
5975 virtuals_instantiated = 0;
5977 /* Indicate we have no need of a frame pointer yet. */
5978 frame_pointer_needed = 0;
5980 /* By default assume not varargs or stdarg. */
5981 current_function_varargs = 0;
5982 current_function_stdarg = 0;
5985 /* Indicate that the current function uses extra args
5986 not explicitly mentioned in the argument list in any fashion. */
5991 current_function_varargs = 1;
5994 /* Expand a call to __main at the beginning of a possible main function. */
5996 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5997 #undef HAS_INIT_SECTION
5998 #define HAS_INIT_SECTION
6002 expand_main_function ()
6004 #if !defined (HAS_INIT_SECTION)
6005 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6007 #endif /* not HAS_INIT_SECTION */
6010 extern struct obstack permanent_obstack;
6012 /* Start the RTL for a new function, and set variables used for
6014 SUBR is the FUNCTION_DECL node.
6015 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6016 the function's parameters, which must be run at any return statement. */
6019 expand_function_start (subr, parms_have_cleanups)
6021 int parms_have_cleanups;
6025 rtx last_ptr = NULL_RTX;
6027 /* Make sure volatile mem refs aren't considered
6028 valid operands of arithmetic insns. */
6029 init_recog_no_volatile ();
6031 /* Set this before generating any memory accesses. */
6032 current_function_check_memory_usage
6033 = (flag_check_memory_usage
6034 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6036 current_function_instrument_entry_exit
6037 = (flag_instrument_function_entry_exit
6038 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6040 /* If function gets a static chain arg, store it in the stack frame.
6041 Do this first, so it gets the first stack slot offset. */
6042 if (current_function_needs_context)
6044 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6046 /* Delay copying static chain if it is not a register to avoid
6047 conflicts with regs used for parameters. */
6048 if (! SMALL_REGISTER_CLASSES
6049 || GET_CODE (static_chain_incoming_rtx) == REG)
6050 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6053 /* If the parameters of this function need cleaning up, get a label
6054 for the beginning of the code which executes those cleanups. This must
6055 be done before doing anything with return_label. */
6056 if (parms_have_cleanups)
6057 cleanup_label = gen_label_rtx ();
6061 /* Make the label for return statements to jump to, if this machine
6062 does not have a one-instruction return and uses an epilogue,
6063 or if it returns a structure, or if it has parm cleanups. */
6065 if (cleanup_label == 0 && HAVE_return
6066 && ! current_function_instrument_entry_exit
6067 && ! current_function_returns_pcc_struct
6068 && ! (current_function_returns_struct && ! optimize))
6071 return_label = gen_label_rtx ();
6073 return_label = gen_label_rtx ();
6076 /* Initialize rtx used to return the value. */
6077 /* Do this before assign_parms so that we copy the struct value address
6078 before any library calls that assign parms might generate. */
6080 /* Decide whether to return the value in memory or in a register. */
6081 if (aggregate_value_p (DECL_RESULT (subr)))
6083 /* Returning something that won't go in a register. */
6084 register rtx value_address = 0;
6086 #ifdef PCC_STATIC_STRUCT_RETURN
6087 if (current_function_returns_pcc_struct)
6089 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6090 value_address = assemble_static_space (size);
6095 /* Expect to be passed the address of a place to store the value.
6096 If it is passed as an argument, assign_parms will take care of
6098 if (struct_value_incoming_rtx)
6100 value_address = gen_reg_rtx (Pmode);
6101 emit_move_insn (value_address, struct_value_incoming_rtx);
6106 DECL_RTL (DECL_RESULT (subr))
6107 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6108 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6109 AGGREGATE_TYPE_P (TREE_TYPE
6114 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6115 /* If return mode is void, this decl rtl should not be used. */
6116 DECL_RTL (DECL_RESULT (subr)) = 0;
6117 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6119 /* If function will end with cleanup code for parms,
6120 compute the return values into a pseudo reg,
6121 which we will copy into the true return register
6122 after the cleanups are done. */
6124 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6126 #ifdef PROMOTE_FUNCTION_RETURN
6127 tree type = TREE_TYPE (DECL_RESULT (subr));
6128 int unsignedp = TREE_UNSIGNED (type);
6130 mode = promote_mode (type, mode, &unsignedp, 1);
6133 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6136 /* Scalar, returned in a register. */
6138 #ifdef FUNCTION_OUTGOING_VALUE
6139 DECL_RTL (DECL_RESULT (subr))
6140 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6142 DECL_RTL (DECL_RESULT (subr))
6143 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6146 /* Mark this reg as the function's return value. */
6147 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6149 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6150 /* Needed because we may need to move this to memory
6151 in case it's a named return value whose address is taken. */
6152 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6156 /* Initialize rtx for parameters and local variables.
6157 In some cases this requires emitting insns. */
6159 assign_parms (subr, 0);
6161 /* Copy the static chain now if it wasn't a register. The delay is to
6162 avoid conflicts with the parameter passing registers. */
6164 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6165 if (GET_CODE (static_chain_incoming_rtx) != REG)
6166 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6168 /* The following was moved from init_function_start.
6169 The move is supposed to make sdb output more accurate. */
6170 /* Indicate the beginning of the function body,
6171 as opposed to parm setup. */
6172 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6174 /* If doing stupid allocation, mark parms as born here. */
6176 if (GET_CODE (get_last_insn ()) != NOTE)
6177 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6178 parm_birth_insn = get_last_insn ();
6182 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6183 use_variable (regno_reg_rtx[i]);
6185 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6186 use_variable (current_function_internal_arg_pointer);
6189 context_display = 0;
6190 if (current_function_needs_context)
6192 /* Fetch static chain values for containing functions. */
6193 tem = decl_function_context (current_function_decl);
6194 /* If not doing stupid register allocation copy the static chain
6195 pointer into a pseudo. If we have small register classes, copy
6196 the value from memory if static_chain_incoming_rtx is a REG. If
6197 we do stupid register allocation, we use the stack address
6199 if (tem && ! obey_regdecls)
6201 /* If the static chain originally came in a register, put it back
6202 there, then move it out in the next insn. The reason for
6203 this peculiar code is to satisfy function integration. */
6204 if (SMALL_REGISTER_CLASSES
6205 && GET_CODE (static_chain_incoming_rtx) == REG)
6206 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6207 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6212 tree rtlexp = make_node (RTL_EXPR);
6214 RTL_EXPR_RTL (rtlexp) = last_ptr;
6215 context_display = tree_cons (tem, rtlexp, context_display);
6216 tem = decl_function_context (tem);
6219 /* Chain thru stack frames, assuming pointer to next lexical frame
6220 is found at the place we always store it. */
6221 #ifdef FRAME_GROWS_DOWNWARD
6222 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6224 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6225 memory_address (Pmode, last_ptr)));
6227 /* If we are not optimizing, ensure that we know that this
6228 piece of context is live over the entire function. */
6230 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6235 if (current_function_instrument_entry_exit)
6237 rtx fun = DECL_RTL (current_function_decl);
6238 if (GET_CODE (fun) == MEM)
6239 fun = XEXP (fun, 0);
6242 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6244 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6246 hard_frame_pointer_rtx),
6250 /* After the display initializations is where the tail-recursion label
6251 should go, if we end up needing one. Ensure we have a NOTE here
6252 since some things (like trampolines) get placed before this. */
6253 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6255 /* Evaluate now the sizes of any types declared among the arguments. */
6256 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6258 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6259 EXPAND_MEMORY_USE_BAD);
6260 /* Flush the queue in case this parameter declaration has
6265 /* Make sure there is a line number after the function entry setup code. */
6266 force_next_line_note ();
6269 /* Generate RTL for the end of the current function.
6270 FILENAME and LINE are the current position in the source file.
6272 It is up to language-specific callers to do cleanups for parameters--
6273 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6276 expand_function_end (filename, line, end_bindings)
6284 #ifdef TRAMPOLINE_TEMPLATE
6285 static rtx initial_trampoline;
6288 #ifdef NON_SAVING_SETJMP
6289 /* Don't put any variables in registers if we call setjmp
6290 on a machine that fails to restore the registers. */
6291 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6293 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6294 setjmp_protect (DECL_INITIAL (current_function_decl));
6296 setjmp_protect_args ();
6300 /* Save the argument pointer if a save area was made for it. */
6301 if (arg_pointer_save_area)
6303 /* arg_pointer_save_area may not be a valid memory address, so we
6304 have to check it and fix it if necessary. */
6307 emit_move_insn (validize_mem (arg_pointer_save_area),
6308 virtual_incoming_args_rtx);
6309 seq = gen_sequence ();
6311 emit_insn_before (seq, tail_recursion_reentry);
6314 /* Initialize any trampolines required by this function. */
6315 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6317 tree function = TREE_PURPOSE (link);
6318 rtx context = lookup_static_chain (function);
6319 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6320 #ifdef TRAMPOLINE_TEMPLATE
6325 #ifdef TRAMPOLINE_TEMPLATE
6326 /* First make sure this compilation has a template for
6327 initializing trampolines. */
6328 if (initial_trampoline == 0)
6330 end_temporary_allocation ();
6332 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6333 resume_temporary_allocation ();
6337 /* Generate insns to initialize the trampoline. */
6339 tramp = round_trampoline_addr (XEXP (tramp, 0));
6340 #ifdef TRAMPOLINE_TEMPLATE
6341 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6342 emit_block_move (blktramp, initial_trampoline,
6343 GEN_INT (TRAMPOLINE_SIZE),
6344 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6346 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6350 /* Put those insns at entry to the containing function (this one). */
6351 emit_insns_before (seq, tail_recursion_reentry);
6354 /* If we are doing stack checking and this function makes calls,
6355 do a stack probe at the start of the function to ensure we have enough
6356 space for another stack frame. */
6357 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6361 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6362 if (GET_CODE (insn) == CALL_INSN)
6365 probe_stack_range (STACK_CHECK_PROTECT,
6366 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6369 emit_insns_before (seq, tail_recursion_reentry);
6374 /* Warn about unused parms if extra warnings were specified. */
6375 if (warn_unused && extra_warnings)
6379 for (decl = DECL_ARGUMENTS (current_function_decl);
6380 decl; decl = TREE_CHAIN (decl))
6381 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6382 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6383 warning_with_decl (decl, "unused parameter `%s'");
6386 /* Delete handlers for nonlocal gotos if nothing uses them. */
6387 if (nonlocal_goto_handler_slots != 0
6388 && ! current_function_has_nonlocal_label)
6391 /* End any sequences that failed to be closed due to syntax errors. */
6392 while (in_sequence_p ())
6395 /* Outside function body, can't compute type's actual size
6396 until next function's body starts. */
6397 immediate_size_expand--;
6399 /* If doing stupid register allocation,
6400 mark register parms as dying here. */
6405 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6406 use_variable (regno_reg_rtx[i]);
6408 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6410 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6412 use_variable (XEXP (tem, 0));
6413 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6416 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6417 use_variable (current_function_internal_arg_pointer);
6420 clear_pending_stack_adjust ();
6421 do_pending_stack_adjust ();
6423 /* Mark the end of the function body.
6424 If control reaches this insn, the function can drop through
6425 without returning a value. */
6426 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6428 /* Must mark the last line number note in the function, so that the test
6429 coverage code can avoid counting the last line twice. This just tells
6430 the code to ignore the immediately following line note, since there
6431 already exists a copy of this note somewhere above. This line number
6432 note is still needed for debugging though, so we can't delete it. */
6433 if (flag_test_coverage)
6434 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6436 /* Output a linenumber for the end of the function.
6437 SDB depends on this. */
6438 emit_line_note_force (filename, line);
6440 /* Output the label for the actual return from the function,
6441 if one is expected. This happens either because a function epilogue
6442 is used instead of a return instruction, or because a return was done
6443 with a goto in order to run local cleanups, or because of pcc-style
6444 structure returning. */
6447 emit_label (return_label);
6449 /* C++ uses this. */
6451 expand_end_bindings (0, 0, 0);
6453 /* Now handle any leftover exception regions that may have been
6454 created for the parameters. */
6456 rtx last = get_last_insn ();
6459 expand_leftover_cleanups ();
6461 /* If the above emitted any code, may sure we jump around it. */
6462 if (last != get_last_insn ())
6464 label = gen_label_rtx ();
6465 last = emit_jump_insn_after (gen_jump (label), last);
6466 last = emit_barrier_after (last);
6471 if (current_function_instrument_entry_exit)
6473 rtx fun = DECL_RTL (current_function_decl);
6474 if (GET_CODE (fun) == MEM)
6475 fun = XEXP (fun, 0);
6478 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6480 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6482 hard_frame_pointer_rtx),
6486 /* If we had calls to alloca, and this machine needs
6487 an accurate stack pointer to exit the function,
6488 insert some code to save and restore the stack pointer. */
6489 #ifdef EXIT_IGNORE_STACK
6490 if (! EXIT_IGNORE_STACK)
6492 if (current_function_calls_alloca)
6496 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6497 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6500 /* If scalar return value was computed in a pseudo-reg,
6501 copy that to the hard return register. */
6502 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6503 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6504 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6505 >= FIRST_PSEUDO_REGISTER))
6507 rtx real_decl_result;
6509 #ifdef FUNCTION_OUTGOING_VALUE
6511 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6512 current_function_decl);
6515 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6516 current_function_decl);
6518 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6519 /* If this is a BLKmode structure being returned in registers, then use
6520 the mode computed in expand_return. */
6521 if (GET_MODE (real_decl_result) == BLKmode)
6522 PUT_MODE (real_decl_result,
6523 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6524 emit_move_insn (real_decl_result,
6525 DECL_RTL (DECL_RESULT (current_function_decl)));
6526 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6528 /* The delay slot scheduler assumes that current_function_return_rtx
6529 holds the hard register containing the return value, not a temporary
6531 current_function_return_rtx = real_decl_result;
6534 /* If returning a structure, arrange to return the address of the value
6535 in a place where debuggers expect to find it.
6537 If returning a structure PCC style,
6538 the caller also depends on this value.
6539 And current_function_returns_pcc_struct is not necessarily set. */
6540 if (current_function_returns_struct
6541 || current_function_returns_pcc_struct)
6543 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6544 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6545 #ifdef FUNCTION_OUTGOING_VALUE
6547 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6548 current_function_decl);
6551 = FUNCTION_VALUE (build_pointer_type (type),
6552 current_function_decl);
6555 /* Mark this as a function return value so integrate will delete the
6556 assignment and USE below when inlining this function. */
6557 REG_FUNCTION_VALUE_P (outgoing) = 1;
6559 emit_move_insn (outgoing, value_address);
6560 use_variable (outgoing);
6563 /* If this is an implementation of __throw, do what's necessary to
6564 communicate between __builtin_eh_return and the epilogue. */
6565 expand_eh_return ();
6567 /* Output a return insn if we are using one.
6568 Otherwise, let the rtl chain end here, to drop through
6569 into the epilogue. */
6574 emit_jump_insn (gen_return ());
6579 /* Fix up any gotos that jumped out to the outermost
6580 binding level of the function.
6581 Must follow emitting RETURN_LABEL. */
6583 /* If you have any cleanups to do at this point,
6584 and they need to create temporary variables,
6585 then you will lose. */
6586 expand_fixups (get_insns ());
6589 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6591 static int *prologue;
6592 static int *epilogue;
6594 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6595 or a single insn). */
6597 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6599 record_insns (insns)
6604 if (GET_CODE (insns) == SEQUENCE)
6606 int len = XVECLEN (insns, 0);
6607 vec = (int *) oballoc ((len + 1) * sizeof (int));
6610 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6614 vec = (int *) oballoc (2 * sizeof (int));
6615 vec[0] = INSN_UID (insns);
6621 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6624 contains (insn, vec)
6630 if (GET_CODE (insn) == INSN
6631 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6634 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6635 for (j = 0; vec[j]; j++)
6636 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6642 for (j = 0; vec[j]; j++)
6643 if (INSN_UID (insn) == vec[j])
6648 #endif /* HAVE_prologue || HAVE_epilogue */
6650 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6651 this into place with notes indicating where the prologue ends and where
6652 the epilogue begins. Update the basic block information when possible. */
6655 thread_prologue_and_epilogue_insns (f)
6656 rtx f ATTRIBUTE_UNUSED;
6661 #ifdef HAVE_prologue
6667 seq = gen_prologue();
6670 /* Retain a map of the prologue insns. */
6671 if (GET_CODE (seq) != SEQUENCE)
6673 prologue = record_insns (seq);
6675 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6676 seq = gen_sequence ();
6679 /* If optimization is off, and perhaps in an empty function,
6680 the entry block will have no successors. */
6681 if (ENTRY_BLOCK_PTR->succ)
6683 /* Can't deal with multiple successsors of the entry block. */
6684 if (ENTRY_BLOCK_PTR->succ->succ_next)
6687 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6691 emit_insn_after (seq, f);
6696 #ifdef HAVE_epilogue
6701 rtx tail = get_last_insn ();
6703 /* ??? This is gastly. If function returns were not done via uses,
6704 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6705 and all of this uglyness would go away. */
6710 /* If the exit block has no non-fake predecessors, we don't
6711 need an epilogue. Furthermore, only pay attention to the
6712 fallthru predecessors; if (conditional) return insns were
6713 generated, by definition we do not need to emit epilogue
6716 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6717 if ((e->flags & EDGE_FAKE) == 0
6718 && (e->flags & EDGE_FALLTHRU) != 0)
6723 /* We can't handle multiple epilogues -- if one is needed,
6724 we won't be able to place it multiple times.
6726 ??? Fix epilogue expanders to not assume they are the
6727 last thing done compiling the function. Either that
6728 or copy_rtx each insn.
6730 ??? Blah, it's not a simple expression to assert that
6731 we've exactly one fallthru exit edge. */
6736 /* ??? If the last insn of the basic block is a jump, then we
6737 are creating a new basic block. Wimp out and leave these
6738 insns outside any block. */
6739 if (GET_CODE (tail) == JUMP_INSN)
6745 rtx prev, seq, first_use;
6747 /* Move the USE insns at the end of a function onto a list. */
6749 if (GET_CODE (prev) == BARRIER
6750 || GET_CODE (prev) == NOTE)
6751 prev = prev_nonnote_insn (prev);
6755 && GET_CODE (prev) == INSN
6756 && GET_CODE (PATTERN (prev)) == USE)
6758 /* If the end of the block is the use, grab hold of something
6759 else so that we emit barriers etc in the right place. */
6763 tail = PREV_INSN (tail);
6764 while (GET_CODE (tail) == INSN
6765 && GET_CODE (PATTERN (tail)) == USE);
6771 prev = prev_nonnote_insn (prev);
6776 NEXT_INSN (use) = first_use;
6777 PREV_INSN (first_use) = use;
6780 NEXT_INSN (use) = NULL_RTX;
6784 && GET_CODE (prev) == INSN
6785 && GET_CODE (PATTERN (prev)) == USE);
6788 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6789 epilogue insns, the USE insns at the end of a function,
6790 the jump insn that returns, and then a BARRIER. */
6792 if (GET_CODE (tail) != BARRIER)
6794 prev = next_nonnote_insn (tail);
6795 if (!prev || GET_CODE (prev) != BARRIER)
6796 emit_barrier_after (tail);
6799 seq = gen_epilogue ();
6801 tail = emit_jump_insn_after (seq, tail);
6803 /* Insert the USE insns immediately before the return insn, which
6804 must be the last instruction emitted in the sequence. */
6806 emit_insns_before (first_use, tail);
6807 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6809 /* Update the tail of the basic block. */
6813 /* Retain a map of the epilogue insns. */
6814 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6821 commit_edge_insertions ();
6824 /* Reposition the prologue-end and epilogue-begin notes after instruction
6825 scheduling and delayed branch scheduling. */
6828 reposition_prologue_and_epilogue_notes (f)
6829 rtx f ATTRIBUTE_UNUSED;
6831 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6832 /* Reposition the prologue and epilogue notes. */
6839 register rtx insn, note = 0;
6841 /* Scan from the beginning until we reach the last prologue insn.
6842 We apparently can't depend on basic_block_{head,end} after
6844 for (len = 0; prologue[len]; len++)
6846 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6848 if (GET_CODE (insn) == NOTE)
6850 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6853 else if ((len -= contains (insn, prologue)) == 0)
6856 /* Find the prologue-end note if we haven't already, and
6857 move it to just after the last prologue insn. */
6860 for (note = insn; (note = NEXT_INSN (note));)
6861 if (GET_CODE (note) == NOTE
6862 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6866 next = NEXT_INSN (note);
6868 /* Whether or not we can depend on BLOCK_HEAD,
6869 attempt to keep it up-to-date. */
6870 if (BLOCK_HEAD (0) == note)
6871 BLOCK_HEAD (0) = next;
6874 add_insn_after (note, insn);
6881 register rtx insn, note = 0;
6883 /* Scan from the end until we reach the first epilogue insn.
6884 We apparently can't depend on basic_block_{head,end} after
6886 for (len = 0; epilogue[len]; len++)
6888 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6890 if (GET_CODE (insn) == NOTE)
6892 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6895 else if ((len -= contains (insn, epilogue)) == 0)
6897 /* Find the epilogue-begin note if we haven't already, and
6898 move it to just before the first epilogue insn. */
6901 for (note = insn; (note = PREV_INSN (note));)
6902 if (GET_CODE (note) == NOTE
6903 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6907 /* Whether or not we can depend on BLOCK_HEAD,
6908 attempt to keep it up-to-date. */
6910 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6911 BLOCK_HEAD (n_basic_blocks-1) = note;
6914 add_insn_before (note, insn);
6919 #endif /* HAVE_prologue or HAVE_epilogue */