1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
43 #include "coretypes.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
57 #include "basic-block.h"
62 #include "integrate.h"
63 #include "langhooks.h"
65 #ifndef TRAMPOLINE_ALIGNMENT
66 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #ifndef STACK_ALIGNMENT_NEEDED
74 #define STACK_ALIGNMENT_NEEDED 1
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
108 int current_function_is_leaf;
110 /* Nonzero if function being compiled doesn't contain any instructions
111 that can throw an exception. This is set prior to final. */
113 int current_function_nothrow;
115 /* Nonzero if function being compiled doesn't modify the stack pointer
116 (ignoring the prologue and epilogue). This is only valid after
117 life_analysis has run. */
118 int current_function_sp_is_unchanging;
120 /* Nonzero if the function being compiled is a leaf function which only
121 uses leaf registers. This is valid after reload (specifically after
122 sched2) and is useful only if the port defines LEAF_REGISTERS. */
123 int current_function_uses_only_leaf_regs;
125 /* Nonzero once virtual register instantiation has been done.
126 assign_stack_local uses frame_pointer_rtx when this is nonzero.
127 calls.c:emit_library_call_value_1 uses it to set up
128 post-instantiation libcalls. */
129 int virtuals_instantiated;
131 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
132 static GTY(()) int funcdef_no;
134 /* These variables hold pointers to functions to create and destroy
135 target specific, per-function data structures. */
136 struct machine_function * (*init_machine_status) PARAMS ((void));
138 /* The FUNCTION_DECL for an inline function currently being expanded. */
139 tree inline_function_decl;
141 /* The currently compiled function. */
142 struct function *cfun = 0;
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static GTY(()) varray_type prologue;
146 static GTY(()) varray_type epilogue;
148 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
150 static GTY(()) varray_type sibcall_epilogue;
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
170 struct temp_slot GTY(())
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The type of the object in the slot, or zero if it doesn't correspond
184 to a type. We use this to determine whether a slot can be reused.
185 It can be reused if objects of the type of the new slot will always
186 conflict with objects of the type of the old slot. */
188 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
190 /* Nonzero if this temporary is currently in use. */
192 /* Nonzero if this temporary has its address taken. */
194 /* Nesting level at which this slot is being used. */
196 /* Nonzero if this should survive a call to free_temp_slots. */
198 /* The offset of the slot from the frame_pointer, including extra space
199 for alignment. This info is for combine_temp_slots. */
200 HOST_WIDE_INT base_offset;
201 /* The size of the slot, including extra space for alignment. This
202 info is for combine_temp_slots. */
203 HOST_WIDE_INT full_size;
206 /* This structure is used to record MEMs or pseudos used to replace VAR, any
207 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
208 maintain this list in case two operands of an insn were required to match;
209 in that case we must ensure we use the same replacement. */
211 struct fixup_replacement GTY(())
215 struct fixup_replacement *next;
218 struct insns_for_mem_entry
222 /* These are the INSNs which reference the MEM. */
226 /* Forward declarations. */
228 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
229 int, struct function *));
230 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
231 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
232 enum machine_mode, enum machine_mode,
233 int, unsigned int, int,
235 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
238 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
240 static struct fixup_replacement
241 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
242 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
244 static void fixup_var_refs_insns_with_hash
245 PARAMS ((htab_t, rtx,
246 enum machine_mode, int, rtx));
247 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
249 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
250 struct fixup_replacement **, rtx));
251 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
252 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
254 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
255 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
256 static void instantiate_decls PARAMS ((tree, int));
257 static void instantiate_decls_1 PARAMS ((tree, int));
258 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
259 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
260 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
261 static void delete_handlers PARAMS ((void));
262 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
263 struct args_size *));
264 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
266 static rtx round_trampoline_addr PARAMS ((rtx));
267 static rtx adjust_trampoline_addr PARAMS ((rtx));
268 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
269 static void reorder_blocks_0 PARAMS ((tree));
270 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
271 static void reorder_fix_fragments PARAMS ((tree));
272 static tree blocks_nreverse PARAMS ((tree));
273 static int all_blocks PARAMS ((tree, tree *));
274 static tree *get_block_vector PARAMS ((tree, int *));
275 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
276 /* We always define `record_insns' even if its not used so that we
277 can always export `prologue_epilogue_contains'. */
278 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
279 static int contains PARAMS ((rtx, varray_type));
281 static void emit_return_into_block PARAMS ((basic_block, rtx));
283 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
284 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
286 static void purge_single_hard_subreg_set PARAMS ((rtx));
287 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
288 static rtx keep_stack_depressed PARAMS ((rtx));
290 static int is_addressof PARAMS ((rtx *, void *));
291 static hashval_t insns_for_mem_hash PARAMS ((const void *));
292 static int insns_for_mem_comp PARAMS ((const void *, const void *));
293 static int insns_for_mem_walk PARAMS ((rtx *, void *));
294 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
295 static void prepare_function_start PARAMS ((void));
296 static void do_clobber_return_reg PARAMS ((rtx, void *));
297 static void do_use_return_reg PARAMS ((rtx, void *));
299 /* Pointer to chain of `struct function' for containing functions. */
300 static GTY(()) struct function *outer_function_chain;
302 /* Given a function decl for a containing function,
303 return the `struct function' for it. */
306 find_function_data (decl)
311 for (p = outer_function_chain; p; p = p->outer)
318 /* Save the current context for compilation of a nested function.
319 This is called from language-specific code. The caller should use
320 the enter_nested langhook to save any language-specific state,
321 since this function knows only about language-independent
325 push_function_context_to (context)
332 if (context == current_function_decl)
333 cfun->contains_functions = 1;
336 struct function *containing = find_function_data (context);
337 containing->contains_functions = 1;
342 init_dummy_function_start ();
345 p->outer = outer_function_chain;
346 outer_function_chain = p;
347 p->fixup_var_refs_queue = 0;
349 (*lang_hooks.function.enter_nested) (p);
355 push_function_context ()
357 push_function_context_to (current_function_decl);
360 /* Restore the last saved context, at the end of a nested function.
361 This function is called from language-specific code. */
364 pop_function_context_from (context)
365 tree context ATTRIBUTE_UNUSED;
367 struct function *p = outer_function_chain;
368 struct var_refs_queue *queue;
371 outer_function_chain = p->outer;
373 current_function_decl = p->decl;
376 restore_emit_status (p);
378 (*lang_hooks.function.leave_nested) (p);
380 /* Finish doing put_var_into_stack for any of our variables which became
381 addressable during the nested function. If only one entry has to be
382 fixed up, just do that one. Otherwise, first make a list of MEMs that
383 are not to be unshared. */
384 if (p->fixup_var_refs_queue == 0)
386 else if (p->fixup_var_refs_queue->next == 0)
387 fixup_var_refs (p->fixup_var_refs_queue->modified,
388 p->fixup_var_refs_queue->promoted_mode,
389 p->fixup_var_refs_queue->unsignedp,
390 p->fixup_var_refs_queue->modified, 0);
395 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
396 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
398 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
399 fixup_var_refs (queue->modified, queue->promoted_mode,
400 queue->unsignedp, list, 0);
404 p->fixup_var_refs_queue = 0;
406 /* Reset variables that have known state during rtx generation. */
407 rtx_equal_function_value_matters = 1;
408 virtuals_instantiated = 0;
409 generating_concat_p = 1;
413 pop_function_context ()
415 pop_function_context_from (current_function_decl);
418 /* Clear out all parts of the state in F that can safely be discarded
419 after the function has been parsed, but not compiled, to let
420 garbage collection reclaim the memory. */
423 free_after_parsing (f)
426 /* f->expr->forced_labels is used by code generation. */
427 /* f->emit->regno_reg_rtx is used by code generation. */
428 /* f->varasm is used by code generation. */
429 /* f->eh->eh_return_stub_label is used by code generation. */
431 (*lang_hooks.function.final) (f);
435 /* Clear out all parts of the state in F that can safely be discarded
436 after the function has been compiled, to let garbage collection
437 reclaim the memory. */
440 free_after_compilation (f)
449 f->x_temp_slots = NULL;
450 f->arg_offset_rtx = NULL;
451 f->return_rtx = NULL;
452 f->internal_arg_pointer = NULL;
453 f->x_nonlocal_labels = NULL;
454 f->x_nonlocal_goto_handler_slots = NULL;
455 f->x_nonlocal_goto_handler_labels = NULL;
456 f->x_nonlocal_goto_stack_level = NULL;
457 f->x_cleanup_label = NULL;
458 f->x_return_label = NULL;
459 f->computed_goto_common_label = NULL;
460 f->computed_goto_common_reg = NULL;
461 f->x_save_expr_regs = NULL;
462 f->x_stack_slot_list = NULL;
463 f->x_rtl_expr_chain = NULL;
464 f->x_tail_recursion_label = NULL;
465 f->x_tail_recursion_reentry = NULL;
466 f->x_arg_pointer_save_area = NULL;
467 f->x_clobber_return_insn = NULL;
468 f->x_context_display = NULL;
469 f->x_trampoline_list = NULL;
470 f->x_parm_birth_insn = NULL;
471 f->x_last_parm_insn = NULL;
472 f->x_parm_reg_stack_loc = NULL;
473 f->fixup_var_refs_queue = NULL;
474 f->original_arg_vector = NULL;
475 f->original_decl_initial = NULL;
476 f->inl_last_parm_insn = NULL;
477 f->epilogue_delay_list = NULL;
480 /* Allocate fixed slots in the stack frame of the current function. */
482 /* Return size needed for stack frame based on slots so far allocated in
484 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
485 the caller may have to do that. */
488 get_func_frame_size (f)
491 #ifdef FRAME_GROWS_DOWNWARD
492 return -f->x_frame_offset;
494 return f->x_frame_offset;
498 /* Return size needed for stack frame based on slots so far allocated.
499 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
500 the caller may have to do that. */
504 return get_func_frame_size (cfun);
507 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
508 with machine mode MODE.
510 ALIGN controls the amount of alignment for the address of the slot:
511 0 means according to MODE,
512 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
513 positive specifies alignment boundary in bits.
515 We do not round to stack_boundary here.
517 FUNCTION specifies the function to allocate in. */
520 assign_stack_local_1 (mode, size, align, function)
521 enum machine_mode mode;
524 struct function *function;
527 int bigend_correction = 0;
529 int frame_off, frame_alignment, frame_phase;
536 alignment = BIGGEST_ALIGNMENT;
538 alignment = GET_MODE_ALIGNMENT (mode);
540 /* Allow the target to (possibly) increase the alignment of this
542 type = (*lang_hooks.types.type_for_mode) (mode, 0);
544 alignment = LOCAL_ALIGNMENT (type, alignment);
546 alignment /= BITS_PER_UNIT;
548 else if (align == -1)
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 size = CEIL_ROUND (size, alignment);
554 alignment = align / BITS_PER_UNIT;
556 #ifdef FRAME_GROWS_DOWNWARD
557 function->x_frame_offset -= size;
560 /* Ignore alignment we can't do with expected alignment of the boundary. */
561 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
562 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
564 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
565 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
567 /* Calculate how many bytes the start of local variables is off from
569 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
570 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
571 frame_phase = frame_off ? frame_alignment - frame_off : 0;
573 /* Round the frame offset to the specified alignment. The default is
574 to always honor requests to align the stack but a port may choose to
575 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
576 if (STACK_ALIGNMENT_NEEDED
580 /* We must be careful here, since FRAME_OFFSET might be negative and
581 division with a negative dividend isn't as well defined as we might
582 like. So we instead assume that ALIGNMENT is a power of two and
583 use logical operations which are unambiguous. */
584 #ifdef FRAME_GROWS_DOWNWARD
585 function->x_frame_offset
586 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
589 function->x_frame_offset
590 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
595 /* On a big-endian machine, if we are allocating more space than we will use,
596 use the least significant bytes of those that are allocated. */
597 if (BYTES_BIG_ENDIAN && mode != BLKmode)
598 bigend_correction = size - GET_MODE_SIZE (mode);
600 /* If we have already instantiated virtual registers, return the actual
601 address relative to the frame pointer. */
602 if (function == cfun && virtuals_instantiated)
603 addr = plus_constant (frame_pointer_rtx,
605 (frame_offset + bigend_correction
606 + STARTING_FRAME_OFFSET, Pmode));
608 addr = plus_constant (virtual_stack_vars_rtx,
610 (function->x_frame_offset + bigend_correction,
613 #ifndef FRAME_GROWS_DOWNWARD
614 function->x_frame_offset += size;
617 x = gen_rtx_MEM (mode, addr);
619 function->x_stack_slot_list
620 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
625 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
629 assign_stack_local (mode, size, align)
630 enum machine_mode mode;
634 return assign_stack_local_1 (mode, size, align, cfun);
637 /* Allocate a temporary stack slot and record it for possible later
640 MODE is the machine mode to be given to the returned rtx.
642 SIZE is the size in units of the space required. We do no rounding here
643 since assign_stack_local will do any required rounding.
645 KEEP is 1 if this slot is to be retained after a call to
646 free_temp_slots. Automatic variables for a block are allocated
647 with this flag. KEEP is 2 if we allocate a longer term temporary,
648 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
649 if we are to allocate something at an inner level to be treated as
650 a variable in the block (e.g., a SAVE_EXPR).
652 TYPE is the type that will be used for the stack slot. */
655 assign_stack_temp_for_type (mode, size, keep, type)
656 enum machine_mode mode;
662 struct temp_slot *p, *best_p = 0;
665 /* If SIZE is -1 it means that somebody tried to allocate a temporary
666 of a variable size. */
671 align = BIGGEST_ALIGNMENT;
673 align = GET_MODE_ALIGNMENT (mode);
676 type = (*lang_hooks.types.type_for_mode) (mode, 0);
679 align = LOCAL_ALIGNMENT (type, align);
681 /* Try to find an available, already-allocated temporary of the proper
682 mode which meets the size and alignment requirements. Choose the
683 smallest one with the closest alignment. */
684 for (p = temp_slots; p; p = p->next)
685 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
687 && objects_must_conflict_p (p->type, type)
688 && (best_p == 0 || best_p->size > p->size
689 || (best_p->size == p->size && best_p->align > p->align)))
691 if (p->align == align && p->size == size)
699 /* Make our best, if any, the one to use. */
702 /* If there are enough aligned bytes left over, make them into a new
703 temp_slot so that the extra bytes don't get wasted. Do this only
704 for BLKmode slots, so that we can be sure of the alignment. */
705 if (GET_MODE (best_p->slot) == BLKmode)
707 int alignment = best_p->align / BITS_PER_UNIT;
708 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
710 if (best_p->size - rounded_size >= alignment)
712 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
713 p->in_use = p->addr_taken = 0;
714 p->size = best_p->size - rounded_size;
715 p->base_offset = best_p->base_offset + rounded_size;
716 p->full_size = best_p->full_size - rounded_size;
717 p->slot = gen_rtx_MEM (BLKmode,
718 plus_constant (XEXP (best_p->slot, 0),
720 p->align = best_p->align;
723 p->type = best_p->type;
724 p->next = temp_slots;
727 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
730 best_p->size = rounded_size;
731 best_p->full_size = rounded_size;
738 /* If we still didn't find one, make a new temporary. */
741 HOST_WIDE_INT frame_offset_old = frame_offset;
743 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
745 /* We are passing an explicit alignment request to assign_stack_local.
746 One side effect of that is assign_stack_local will not round SIZE
747 to ensure the frame offset remains suitably aligned.
749 So for requests which depended on the rounding of SIZE, we go ahead
750 and round it now. We also make sure ALIGNMENT is at least
751 BIGGEST_ALIGNMENT. */
752 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
754 p->slot = assign_stack_local (mode,
756 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
762 /* The following slot size computation is necessary because we don't
763 know the actual size of the temporary slot until assign_stack_local
764 has performed all the frame alignment and size rounding for the
765 requested temporary. Note that extra space added for alignment
766 can be either above or below this stack slot depending on which
767 way the frame grows. We include the extra space if and only if it
768 is above this slot. */
769 #ifdef FRAME_GROWS_DOWNWARD
770 p->size = frame_offset_old - frame_offset;
775 /* Now define the fields used by combine_temp_slots. */
776 #ifdef FRAME_GROWS_DOWNWARD
777 p->base_offset = frame_offset;
778 p->full_size = frame_offset_old - frame_offset;
780 p->base_offset = frame_offset_old;
781 p->full_size = frame_offset - frame_offset_old;
784 p->next = temp_slots;
790 p->rtl_expr = seq_rtl_expr;
795 p->level = target_temp_slot_level;
800 p->level = var_temp_slot_level;
805 p->level = temp_slot_level;
810 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
811 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
812 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
814 /* If we know the alias set for the memory that will be used, use
815 it. If there's no TYPE, then we don't know anything about the
816 alias set for the memory. */
817 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
818 set_mem_align (slot, align);
820 /* If a type is specified, set the relevant flags. */
823 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
824 && TYPE_READONLY (type));
825 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
826 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
832 /* Allocate a temporary stack slot and record it for possible later
833 reuse. First three arguments are same as in preceding function. */
836 assign_stack_temp (mode, size, keep)
837 enum machine_mode mode;
841 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
844 /* Assign a temporary.
845 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
846 and so that should be used in error messages. In either case, we
847 allocate of the given type.
848 KEEP is as for assign_stack_temp.
849 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
850 it is 0 if a register is OK.
851 DONT_PROMOTE is 1 if we should not promote values in register
855 assign_temp (type_or_decl, keep, memory_required, dont_promote)
859 int dont_promote ATTRIBUTE_UNUSED;
862 enum machine_mode mode;
863 #ifndef PROMOTE_FOR_CALL_ONLY
867 if (DECL_P (type_or_decl))
868 decl = type_or_decl, type = TREE_TYPE (decl);
870 decl = NULL, type = type_or_decl;
872 mode = TYPE_MODE (type);
873 #ifndef PROMOTE_FOR_CALL_ONLY
874 unsignedp = TREE_UNSIGNED (type);
877 if (mode == BLKmode || memory_required)
879 HOST_WIDE_INT size = int_size_in_bytes (type);
882 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
883 problems with allocating the stack space. */
887 /* Unfortunately, we don't yet know how to allocate variable-sized
888 temporaries. However, sometimes we have a fixed upper limit on
889 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
890 instead. This is the case for Chill variable-sized strings. */
891 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
892 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
893 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
894 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
896 /* The size of the temporary may be too large to fit into an integer. */
897 /* ??? Not sure this should happen except for user silliness, so limit
898 this to things that aren't compiler-generated temporaries. The
899 rest of the time we'll abort in assign_stack_temp_for_type. */
900 if (decl && size == -1
901 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
903 error_with_decl (decl, "size of variable `%s' is too large");
907 tmp = assign_stack_temp_for_type (mode, size, keep, type);
911 #ifndef PROMOTE_FOR_CALL_ONLY
913 mode = promote_mode (type, mode, &unsignedp, 0);
916 return gen_reg_rtx (mode);
919 /* Combine temporary stack slots which are adjacent on the stack.
921 This allows for better use of already allocated stack space. This is only
922 done for BLKmode slots because we can be sure that we won't have alignment
923 problems in this case. */
926 combine_temp_slots ()
928 struct temp_slot *p, *q;
929 struct temp_slot *prev_p, *prev_q;
932 /* We can't combine slots, because the information about which slot
933 is in which alias set will be lost. */
934 if (flag_strict_aliasing)
937 /* If there are a lot of temp slots, don't do anything unless
938 high levels of optimization. */
939 if (! flag_expensive_optimizations)
940 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
941 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
944 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
948 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
949 for (q = p->next, prev_q = p; q; q = prev_q->next)
952 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
954 if (p->base_offset + p->full_size == q->base_offset)
956 /* Q comes after P; combine Q into P. */
958 p->full_size += q->full_size;
961 else if (q->base_offset + q->full_size == p->base_offset)
963 /* P comes after Q; combine P into Q. */
965 q->full_size += p->full_size;
970 /* Either delete Q or advance past it. */
972 prev_q->next = q->next;
976 /* Either delete P or advance past it. */
980 prev_p->next = p->next;
982 temp_slots = p->next;
989 /* Find the temp slot corresponding to the object at address X. */
991 static struct temp_slot *
992 find_temp_slot_from_address (x)
998 for (p = temp_slots; p; p = p->next)
1003 else if (XEXP (p->slot, 0) == x
1005 || (GET_CODE (x) == PLUS
1006 && XEXP (x, 0) == virtual_stack_vars_rtx
1007 && GET_CODE (XEXP (x, 1)) == CONST_INT
1008 && INTVAL (XEXP (x, 1)) >= p->base_offset
1009 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1012 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1013 for (next = p->address; next; next = XEXP (next, 1))
1014 if (XEXP (next, 0) == x)
1018 /* If we have a sum involving a register, see if it points to a temp
1020 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1021 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1023 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1024 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1030 /* Indicate that NEW is an alternate way of referring to the temp slot
1031 that previously was known by OLD. */
1034 update_temp_slot_address (old, new)
1037 struct temp_slot *p;
1039 if (rtx_equal_p (old, new))
1042 p = find_temp_slot_from_address (old);
1044 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1045 is a register, see if one operand of the PLUS is a temporary
1046 location. If so, NEW points into it. Otherwise, if both OLD and
1047 NEW are a PLUS and if there is a register in common between them.
1048 If so, try a recursive call on those values. */
1051 if (GET_CODE (old) != PLUS)
1054 if (GET_CODE (new) == REG)
1056 update_temp_slot_address (XEXP (old, 0), new);
1057 update_temp_slot_address (XEXP (old, 1), new);
1060 else if (GET_CODE (new) != PLUS)
1063 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1064 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1065 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1066 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1067 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1068 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1069 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1070 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1075 /* Otherwise add an alias for the temp's address. */
1076 else if (p->address == 0)
1080 if (GET_CODE (p->address) != EXPR_LIST)
1081 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1083 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1087 /* If X could be a reference to a temporary slot, mark the fact that its
1088 address was taken. */
1091 mark_temp_addr_taken (x)
1094 struct temp_slot *p;
1099 /* If X is not in memory or is at a constant address, it cannot be in
1100 a temporary slot. */
1101 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1104 p = find_temp_slot_from_address (XEXP (x, 0));
1109 /* If X could be a reference to a temporary slot, mark that slot as
1110 belonging to the to one level higher than the current level. If X
1111 matched one of our slots, just mark that one. Otherwise, we can't
1112 easily predict which it is, so upgrade all of them. Kept slots
1113 need not be touched.
1115 This is called when an ({...}) construct occurs and a statement
1116 returns a value in memory. */
1119 preserve_temp_slots (x)
1122 struct temp_slot *p = 0;
1124 /* If there is no result, we still might have some objects whose address
1125 were taken, so we need to make sure they stay around. */
1128 for (p = temp_slots; p; p = p->next)
1129 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1135 /* If X is a register that is being used as a pointer, see if we have
1136 a temporary slot we know it points to. To be consistent with
1137 the code below, we really should preserve all non-kept slots
1138 if we can't find a match, but that seems to be much too costly. */
1139 if (GET_CODE (x) == REG && REG_POINTER (x))
1140 p = find_temp_slot_from_address (x);
1142 /* If X is not in memory or is at a constant address, it cannot be in
1143 a temporary slot, but it can contain something whose address was
1145 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1147 for (p = temp_slots; p; p = p->next)
1148 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1154 /* First see if we can find a match. */
1156 p = find_temp_slot_from_address (XEXP (x, 0));
1160 /* Move everything at our level whose address was taken to our new
1161 level in case we used its address. */
1162 struct temp_slot *q;
1164 if (p->level == temp_slot_level)
1166 for (q = temp_slots; q; q = q->next)
1167 if (q != p && q->addr_taken && q->level == p->level)
1176 /* Otherwise, preserve all non-kept slots at this level. */
1177 for (p = temp_slots; p; p = p->next)
1178 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1182 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1183 with that RTL_EXPR, promote it into a temporary slot at the present
1184 level so it will not be freed when we free slots made in the
1188 preserve_rtl_expr_result (x)
1191 struct temp_slot *p;
1193 /* If X is not in memory or is at a constant address, it cannot be in
1194 a temporary slot. */
1195 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1198 /* If we can find a match, move it to our level unless it is already at
1200 p = find_temp_slot_from_address (XEXP (x, 0));
1203 p->level = MIN (p->level, temp_slot_level);
1210 /* Free all temporaries used so far. This is normally called at the end
1211 of generating code for a statement. Don't free any temporaries
1212 currently in use for an RTL_EXPR that hasn't yet been emitted.
1213 We could eventually do better than this since it can be reused while
1214 generating the same RTL_EXPR, but this is complex and probably not
1220 struct temp_slot *p;
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->in_use && p->level == temp_slot_level && ! p->keep
1224 && p->rtl_expr == 0)
1227 combine_temp_slots ();
1230 /* Free all temporary slots used in T, an RTL_EXPR node. */
1233 free_temps_for_rtl_expr (t)
1236 struct temp_slot *p;
1238 for (p = temp_slots; p; p = p->next)
1239 if (p->rtl_expr == t)
1241 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1242 needs to be preserved. This can happen if a temporary in
1243 the RTL_EXPR was addressed; preserve_temp_slots will move
1244 the temporary into a higher level. */
1245 if (temp_slot_level <= p->level)
1248 p->rtl_expr = NULL_TREE;
1251 combine_temp_slots ();
1254 /* Mark all temporaries ever allocated in this function as not suitable
1255 for reuse until the current level is exited. */
1258 mark_all_temps_used ()
1260 struct temp_slot *p;
1262 for (p = temp_slots; p; p = p->next)
1264 p->in_use = p->keep = 1;
1265 p->level = MIN (p->level, temp_slot_level);
1269 /* Push deeper into the nesting level for stack temporaries. */
1277 /* Pop a temporary nesting level. All slots in use in the current level
1283 struct temp_slot *p;
1285 for (p = temp_slots; p; p = p->next)
1286 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1289 combine_temp_slots ();
1294 /* Initialize temporary slots. */
1299 /* We have not allocated any temporaries yet. */
1301 temp_slot_level = 0;
1302 var_temp_slot_level = 0;
1303 target_temp_slot_level = 0;
1306 /* Retroactively move an auto variable from a register to a stack slot.
1307 This is done when an address-reference to the variable is seen. */
1310 put_var_into_stack (decl)
1314 enum machine_mode promoted_mode, decl_mode;
1315 struct function *function = 0;
1317 int can_use_addressof;
1318 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1319 int usedp = (TREE_USED (decl)
1320 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1322 context = decl_function_context (decl);
1324 /* Get the current rtl used for this object and its original mode. */
1325 reg = (TREE_CODE (decl) == SAVE_EXPR
1326 ? SAVE_EXPR_RTL (decl)
1327 : DECL_RTL_IF_SET (decl));
1329 /* No need to do anything if decl has no rtx yet
1330 since in that case caller is setting TREE_ADDRESSABLE
1331 and a stack slot will be assigned when the rtl is made. */
1335 /* Get the declared mode for this object. */
1336 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1337 : DECL_MODE (decl));
1338 /* Get the mode it's actually stored in. */
1339 promoted_mode = GET_MODE (reg);
1341 /* If this variable comes from an outer function, find that
1342 function's saved context. Don't use find_function_data here,
1343 because it might not be in any active function.
1344 FIXME: Is that really supposed to happen?
1345 It does in ObjC at least. */
1346 if (context != current_function_decl && context != inline_function_decl)
1347 for (function = outer_function_chain; function; function = function->outer)
1348 if (function->decl == context)
1351 /* If this is a variable-size object with a pseudo to address it,
1352 put that pseudo into the stack, if the var is nonlocal. */
1353 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1354 && GET_CODE (reg) == MEM
1355 && GET_CODE (XEXP (reg, 0)) == REG
1356 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1358 reg = XEXP (reg, 0);
1359 decl_mode = promoted_mode = GET_MODE (reg);
1365 /* FIXME make it work for promoted modes too */
1366 && decl_mode == promoted_mode
1367 #ifdef NON_SAVING_SETJMP
1368 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1372 /* If we can't use ADDRESSOF, make sure we see through one we already
1374 if (! can_use_addressof && GET_CODE (reg) == MEM
1375 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1376 reg = XEXP (XEXP (reg, 0), 0);
1378 /* Now we should have a value that resides in one or more pseudo regs. */
1380 if (GET_CODE (reg) == REG)
1382 /* If this variable lives in the current function and we don't need
1383 to put things in the stack for the sake of setjmp, try to keep it
1384 in a register until we know we actually need the address. */
1385 if (can_use_addressof)
1386 gen_mem_addressof (reg, decl);
1388 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1389 decl_mode, volatilep, 0, usedp, 0);
1391 else if (GET_CODE (reg) == CONCAT)
1393 /* A CONCAT contains two pseudos; put them both in the stack.
1394 We do it so they end up consecutive.
1395 We fixup references to the parts only after we fixup references
1396 to the whole CONCAT, lest we do double fixups for the latter
1398 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1399 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1400 rtx lopart = XEXP (reg, 0);
1401 rtx hipart = XEXP (reg, 1);
1402 #ifdef FRAME_GROWS_DOWNWARD
1403 /* Since part 0 should have a lower address, do it second. */
1404 put_reg_into_stack (function, hipart, part_type, part_mode,
1405 part_mode, volatilep, 0, 0, 0);
1406 put_reg_into_stack (function, lopart, part_type, part_mode,
1407 part_mode, volatilep, 0, 0, 0);
1409 put_reg_into_stack (function, lopart, part_type, part_mode,
1410 part_mode, volatilep, 0, 0, 0);
1411 put_reg_into_stack (function, hipart, part_type, part_mode,
1412 part_mode, volatilep, 0, 0, 0);
1415 /* Change the CONCAT into a combined MEM for both parts. */
1416 PUT_CODE (reg, MEM);
1417 MEM_ATTRS (reg) = 0;
1419 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1420 already computed alias sets. Here we want to re-generate. */
1422 SET_DECL_RTL (decl, NULL);
1423 set_mem_attributes (reg, decl, 1);
1425 SET_DECL_RTL (decl, reg);
1427 /* The two parts are in memory order already.
1428 Use the lower parts address as ours. */
1429 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1430 /* Prevent sharing of rtl that might lose. */
1431 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1432 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1435 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1437 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1438 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1445 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1446 into the stack frame of FUNCTION (0 means the current function).
1447 DECL_MODE is the machine mode of the user-level data type.
1448 PROMOTED_MODE is the machine mode of the register.
1449 VOLATILE_P is nonzero if this is for a "volatile" decl.
1450 USED_P is nonzero if this reg might have already been used in an insn. */
1453 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1454 original_regno, used_p, ht)
1455 struct function *function;
1458 enum machine_mode promoted_mode, decl_mode;
1460 unsigned int original_regno;
1464 struct function *func = function ? function : cfun;
1466 unsigned int regno = original_regno;
1469 regno = REGNO (reg);
1471 if (regno < func->x_max_parm_reg)
1472 new = func->x_parm_reg_stack_loc[regno];
1475 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1477 PUT_CODE (reg, MEM);
1478 PUT_MODE (reg, decl_mode);
1479 XEXP (reg, 0) = XEXP (new, 0);
1480 MEM_ATTRS (reg) = 0;
1481 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1482 MEM_VOLATILE_P (reg) = volatile_p;
1484 /* If this is a memory ref that contains aggregate components,
1485 mark it as such for cse and loop optimize. If we are reusing a
1486 previously generated stack slot, then we need to copy the bit in
1487 case it was set for other reasons. For instance, it is set for
1488 __builtin_va_alist. */
1491 MEM_SET_IN_STRUCT_P (reg,
1492 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1493 set_mem_alias_set (reg, get_alias_set (type));
1497 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1500 /* Make sure that all refs to the variable, previously made
1501 when it was a register, are fixed up to be valid again.
1502 See function above for meaning of arguments. */
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1506 struct function *function;
1509 enum machine_mode promoted_mode;
1512 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1516 struct var_refs_queue *temp;
1519 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1520 temp->modified = reg;
1521 temp->promoted_mode = promoted_mode;
1522 temp->unsignedp = unsigned_p;
1523 temp->next = function->fixup_var_refs_queue;
1524 function->fixup_var_refs_queue = temp;
1527 /* Variable is local; fix it up now. */
1528 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1532 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1534 enum machine_mode promoted_mode;
1540 rtx first_insn = get_insns ();
1541 struct sequence_stack *stack = seq_stack;
1542 tree rtl_exps = rtl_expr_chain;
1544 /* If there's a hash table, it must record all uses of VAR. */
1549 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1554 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1555 stack == 0, may_share);
1557 /* Scan all pending sequences too. */
1558 for (; stack; stack = stack->next)
1560 push_to_full_sequence (stack->first, stack->last);
1561 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1562 stack->next != 0, may_share);
1563 /* Update remembered end of sequence
1564 in case we added an insn at the end. */
1565 stack->last = get_last_insn ();
1569 /* Scan all waiting RTL_EXPRs too. */
1570 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1572 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1573 if (seq != const0_rtx && seq != 0)
1575 push_to_sequence (seq);
1576 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1583 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1584 some part of an insn. Return a struct fixup_replacement whose OLD
1585 value is equal to X. Allocate a new structure if no such entry exists. */
1587 static struct fixup_replacement *
1588 find_fixup_replacement (replacements, x)
1589 struct fixup_replacement **replacements;
1592 struct fixup_replacement *p;
1594 /* See if we have already replaced this. */
1595 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1600 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1603 p->next = *replacements;
1610 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1611 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1612 for the current function. MAY_SHARE is either a MEM that is not
1613 to be unshared or a list of them. */
1616 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1619 enum machine_mode promoted_mode;
1626 /* fixup_var_refs_insn might modify insn, so save its next
1628 rtx next = NEXT_INSN (insn);
1630 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1631 the three sequences they (potentially) contain, and process
1632 them recursively. The CALL_INSN itself is not interesting. */
1634 if (GET_CODE (insn) == CALL_INSN
1635 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1639 /* Look at the Normal call, sibling call and tail recursion
1640 sequences attached to the CALL_PLACEHOLDER. */
1641 for (i = 0; i < 3; i++)
1643 rtx seq = XEXP (PATTERN (insn), i);
1646 push_to_sequence (seq);
1647 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1649 XEXP (PATTERN (insn), i) = get_insns ();
1655 else if (INSN_P (insn))
1656 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1663 /* Look up the insns which reference VAR in HT and fix them up. Other
1664 arguments are the same as fixup_var_refs_insns.
1666 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1667 because the hash table will point straight to the interesting insn
1668 (inside the CALL_PLACEHOLDER). */
1671 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1674 enum machine_mode promoted_mode;
1678 struct insns_for_mem_entry tmp;
1679 struct insns_for_mem_entry *ime;
1683 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1684 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1685 if (INSN_P (XEXP (insn_list, 0)))
1686 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1687 unsignedp, 1, may_share);
1691 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1692 the insn under examination, VAR is the variable to fix up
1693 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1694 TOPLEVEL is nonzero if this is the main insn chain for this
1698 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1701 enum machine_mode promoted_mode;
1707 rtx set, prev, prev_set;
1710 /* Remember the notes in case we delete the insn. */
1711 note = REG_NOTES (insn);
1713 /* If this is a CLOBBER of VAR, delete it.
1715 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1716 and REG_RETVAL notes too. */
1717 if (GET_CODE (PATTERN (insn)) == CLOBBER
1718 && (XEXP (PATTERN (insn), 0) == var
1719 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1720 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1721 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1723 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1724 /* The REG_LIBCALL note will go away since we are going to
1725 turn INSN into a NOTE, so just delete the
1726 corresponding REG_RETVAL note. */
1727 remove_note (XEXP (note, 0),
1728 find_reg_note (XEXP (note, 0), REG_RETVAL,
1734 /* The insn to load VAR from a home in the arglist
1735 is now a no-op. When we see it, just delete it.
1736 Similarly if this is storing VAR from a register from which
1737 it was loaded in the previous insn. This will occur
1738 when an ADDRESSOF was made for an arglist slot. */
1740 && (set = single_set (insn)) != 0
1741 && SET_DEST (set) == var
1742 /* If this represents the result of an insn group,
1743 don't delete the insn. */
1744 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1745 && (rtx_equal_p (SET_SRC (set), var)
1746 || (GET_CODE (SET_SRC (set)) == REG
1747 && (prev = prev_nonnote_insn (insn)) != 0
1748 && (prev_set = single_set (prev)) != 0
1749 && SET_DEST (prev_set) == SET_SRC (set)
1750 && rtx_equal_p (SET_SRC (prev_set), var))))
1756 struct fixup_replacement *replacements = 0;
1757 rtx next_insn = NEXT_INSN (insn);
1759 if (SMALL_REGISTER_CLASSES)
1761 /* If the insn that copies the results of a CALL_INSN
1762 into a pseudo now references VAR, we have to use an
1763 intermediate pseudo since we want the life of the
1764 return value register to be only a single insn.
1766 If we don't use an intermediate pseudo, such things as
1767 address computations to make the address of VAR valid
1768 if it is not can be placed between the CALL_INSN and INSN.
1770 To make sure this doesn't happen, we record the destination
1771 of the CALL_INSN and see if the next insn uses both that
1774 if (call_dest != 0 && GET_CODE (insn) == INSN
1775 && reg_mentioned_p (var, PATTERN (insn))
1776 && reg_mentioned_p (call_dest, PATTERN (insn)))
1778 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1780 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1782 PATTERN (insn) = replace_rtx (PATTERN (insn),
1786 if (GET_CODE (insn) == CALL_INSN
1787 && GET_CODE (PATTERN (insn)) == SET)
1788 call_dest = SET_DEST (PATTERN (insn));
1789 else if (GET_CODE (insn) == CALL_INSN
1790 && GET_CODE (PATTERN (insn)) == PARALLEL
1791 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1792 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1797 /* See if we have to do anything to INSN now that VAR is in
1798 memory. If it needs to be loaded into a pseudo, use a single
1799 pseudo for the entire insn in case there is a MATCH_DUP
1800 between two operands. We pass a pointer to the head of
1801 a list of struct fixup_replacements. If fixup_var_refs_1
1802 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1803 it will record them in this list.
1805 If it allocated a pseudo for any replacement, we copy into
1808 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1809 &replacements, no_share);
1811 /* If this is last_parm_insn, and any instructions were output
1812 after it to fix it up, then we must set last_parm_insn to
1813 the last such instruction emitted. */
1814 if (insn == last_parm_insn)
1815 last_parm_insn = PREV_INSN (next_insn);
1817 while (replacements)
1819 struct fixup_replacement *next;
1821 if (GET_CODE (replacements->new) == REG)
1826 /* OLD might be a (subreg (mem)). */
1827 if (GET_CODE (replacements->old) == SUBREG)
1829 = fixup_memory_subreg (replacements->old, insn,
1833 = fixup_stack_1 (replacements->old, insn);
1835 insert_before = insn;
1837 /* If we are changing the mode, do a conversion.
1838 This might be wasteful, but combine.c will
1839 eliminate much of the waste. */
1841 if (GET_MODE (replacements->new)
1842 != GET_MODE (replacements->old))
1845 convert_move (replacements->new,
1846 replacements->old, unsignedp);
1851 seq = gen_move_insn (replacements->new,
1854 emit_insn_before (seq, insert_before);
1857 next = replacements->next;
1858 free (replacements);
1859 replacements = next;
1863 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1864 But don't touch other insns referred to by reg-notes;
1865 we will get them elsewhere. */
1868 if (GET_CODE (note) != INSN_LIST)
1870 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1872 note = XEXP (note, 1);
1876 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1877 See if the rtx expression at *LOC in INSN needs to be changed.
1879 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1880 contain a list of original rtx's and replacements. If we find that we need
1881 to modify this insn by replacing a memory reference with a pseudo or by
1882 making a new MEM to implement a SUBREG, we consult that list to see if
1883 we have already chosen a replacement. If none has already been allocated,
1884 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1885 or the SUBREG, as appropriate, to the pseudo. */
1888 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1890 enum machine_mode promoted_mode;
1893 struct fixup_replacement **replacements;
1898 RTX_CODE code = GET_CODE (x);
1901 struct fixup_replacement *replacement;
1906 if (XEXP (x, 0) == var)
1908 /* Prevent sharing of rtl that might lose. */
1909 rtx sub = copy_rtx (XEXP (var, 0));
1911 if (! validate_change (insn, loc, sub, 0))
1913 rtx y = gen_reg_rtx (GET_MODE (sub));
1916 /* We should be able to replace with a register or all is lost.
1917 Note that we can't use validate_change to verify this, since
1918 we're not caring for replacing all dups simultaneously. */
1919 if (! validate_replace_rtx (*loc, y, insn))
1922 /* Careful! First try to recognize a direct move of the
1923 value, mimicking how things are done in gen_reload wrt
1924 PLUS. Consider what happens when insn is a conditional
1925 move instruction and addsi3 clobbers flags. */
1928 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1932 if (recog_memoized (new_insn) < 0)
1934 /* That failed. Fall back on force_operand and hope. */
1937 sub = force_operand (sub, y);
1939 emit_insn (gen_move_insn (y, sub));
1945 /* Don't separate setter from user. */
1946 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1947 insn = PREV_INSN (insn);
1950 emit_insn_before (seq, insn);
1958 /* If we already have a replacement, use it. Otherwise,
1959 try to fix up this address in case it is invalid. */
1961 replacement = find_fixup_replacement (replacements, var);
1962 if (replacement->new)
1964 *loc = replacement->new;
1968 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1970 /* Unless we are forcing memory to register or we changed the mode,
1971 we can leave things the way they are if the insn is valid. */
1973 INSN_CODE (insn) = -1;
1974 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1975 && recog_memoized (insn) >= 0)
1978 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1982 /* If X contains VAR, we need to unshare it here so that we update
1983 each occurrence separately. But all identical MEMs in one insn
1984 must be replaced with the same rtx because of the possibility of
1987 if (reg_mentioned_p (var, x))
1989 replacement = find_fixup_replacement (replacements, x);
1990 if (replacement->new == 0)
1991 replacement->new = copy_most_rtx (x, no_share);
1993 *loc = x = replacement->new;
1994 code = GET_CODE (x);
2011 /* Note that in some cases those types of expressions are altered
2012 by optimize_bit_field, and do not survive to get here. */
2013 if (XEXP (x, 0) == var
2014 || (GET_CODE (XEXP (x, 0)) == SUBREG
2015 && SUBREG_REG (XEXP (x, 0)) == var))
2017 /* Get TEM as a valid MEM in the mode presently in the insn.
2019 We don't worry about the possibility of MATCH_DUP here; it
2020 is highly unlikely and would be tricky to handle. */
2023 if (GET_CODE (tem) == SUBREG)
2025 if (GET_MODE_BITSIZE (GET_MODE (tem))
2026 > GET_MODE_BITSIZE (GET_MODE (var)))
2028 replacement = find_fixup_replacement (replacements, var);
2029 if (replacement->new == 0)
2030 replacement->new = gen_reg_rtx (GET_MODE (var));
2031 SUBREG_REG (tem) = replacement->new;
2033 /* The following code works only if we have a MEM, so we
2034 need to handle the subreg here. We directly substitute
2035 it assuming that a subreg must be OK here. We already
2036 scheduled a replacement to copy the mem into the
2042 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2045 tem = fixup_stack_1 (tem, insn);
2047 /* Unless we want to load from memory, get TEM into the proper mode
2048 for an extract from memory. This can only be done if the
2049 extract is at a constant position and length. */
2051 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2052 && GET_CODE (XEXP (x, 2)) == CONST_INT
2053 && ! mode_dependent_address_p (XEXP (tem, 0))
2054 && ! MEM_VOLATILE_P (tem))
2056 enum machine_mode wanted_mode = VOIDmode;
2057 enum machine_mode is_mode = GET_MODE (tem);
2058 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2060 if (GET_CODE (x) == ZERO_EXTRACT)
2062 enum machine_mode new_mode
2063 = mode_for_extraction (EP_extzv, 1);
2064 if (new_mode != MAX_MACHINE_MODE)
2065 wanted_mode = new_mode;
2067 else if (GET_CODE (x) == SIGN_EXTRACT)
2069 enum machine_mode new_mode
2070 = mode_for_extraction (EP_extv, 1);
2071 if (new_mode != MAX_MACHINE_MODE)
2072 wanted_mode = new_mode;
2075 /* If we have a narrower mode, we can do something. */
2076 if (wanted_mode != VOIDmode
2077 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2079 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2080 rtx old_pos = XEXP (x, 2);
2083 /* If the bytes and bits are counted differently, we
2084 must adjust the offset. */
2085 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2086 offset = (GET_MODE_SIZE (is_mode)
2087 - GET_MODE_SIZE (wanted_mode) - offset);
2089 pos %= GET_MODE_BITSIZE (wanted_mode);
2091 newmem = adjust_address_nv (tem, wanted_mode, offset);
2093 /* Make the change and see if the insn remains valid. */
2094 INSN_CODE (insn) = -1;
2095 XEXP (x, 0) = newmem;
2096 XEXP (x, 2) = GEN_INT (pos);
2098 if (recog_memoized (insn) >= 0)
2101 /* Otherwise, restore old position. XEXP (x, 0) will be
2103 XEXP (x, 2) = old_pos;
2107 /* If we get here, the bitfield extract insn can't accept a memory
2108 reference. Copy the input into a register. */
2110 tem1 = gen_reg_rtx (GET_MODE (tem));
2111 emit_insn_before (gen_move_insn (tem1, tem), insn);
2118 if (SUBREG_REG (x) == var)
2120 /* If this is a special SUBREG made because VAR was promoted
2121 from a wider mode, replace it with VAR and call ourself
2122 recursively, this time saying that the object previously
2123 had its current mode (by virtue of the SUBREG). */
2125 if (SUBREG_PROMOTED_VAR_P (x))
2128 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2133 /* If this SUBREG makes VAR wider, it has become a paradoxical
2134 SUBREG with VAR in memory, but these aren't allowed at this
2135 stage of the compilation. So load VAR into a pseudo and take
2136 a SUBREG of that pseudo. */
2137 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2139 replacement = find_fixup_replacement (replacements, var);
2140 if (replacement->new == 0)
2141 replacement->new = gen_reg_rtx (promoted_mode);
2142 SUBREG_REG (x) = replacement->new;
2146 /* See if we have already found a replacement for this SUBREG.
2147 If so, use it. Otherwise, make a MEM and see if the insn
2148 is recognized. If not, or if we should force MEM into a register,
2149 make a pseudo for this SUBREG. */
2150 replacement = find_fixup_replacement (replacements, x);
2151 if (replacement->new)
2153 *loc = replacement->new;
2157 replacement->new = *loc = fixup_memory_subreg (x, insn,
2160 INSN_CODE (insn) = -1;
2161 if (! flag_force_mem && recog_memoized (insn) >= 0)
2164 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2170 /* First do special simplification of bit-field references. */
2171 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2172 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2173 optimize_bit_field (x, insn, 0);
2174 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2175 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2176 optimize_bit_field (x, insn, 0);
2178 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2179 into a register and then store it back out. */
2180 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2181 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2182 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2183 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2184 > GET_MODE_SIZE (GET_MODE (var))))
2186 replacement = find_fixup_replacement (replacements, var);
2187 if (replacement->new == 0)
2188 replacement->new = gen_reg_rtx (GET_MODE (var));
2190 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2191 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2194 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2195 insn into a pseudo and store the low part of the pseudo into VAR. */
2196 if (GET_CODE (SET_DEST (x)) == SUBREG
2197 && SUBREG_REG (SET_DEST (x)) == var
2198 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2199 > GET_MODE_SIZE (GET_MODE (var))))
2201 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2202 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2209 rtx dest = SET_DEST (x);
2210 rtx src = SET_SRC (x);
2211 rtx outerdest = dest;
2213 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2214 || GET_CODE (dest) == SIGN_EXTRACT
2215 || GET_CODE (dest) == ZERO_EXTRACT)
2216 dest = XEXP (dest, 0);
2218 if (GET_CODE (src) == SUBREG)
2219 src = SUBREG_REG (src);
2221 /* If VAR does not appear at the top level of the SET
2222 just scan the lower levels of the tree. */
2224 if (src != var && dest != var)
2227 /* We will need to rerecognize this insn. */
2228 INSN_CODE (insn) = -1;
2230 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2231 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2233 /* Since this case will return, ensure we fixup all the
2235 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2236 insn, replacements, no_share);
2237 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2238 insn, replacements, no_share);
2239 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2240 insn, replacements, no_share);
2242 tem = XEXP (outerdest, 0);
2244 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2245 that may appear inside a ZERO_EXTRACT.
2246 This was legitimate when the MEM was a REG. */
2247 if (GET_CODE (tem) == SUBREG
2248 && SUBREG_REG (tem) == var)
2249 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2251 tem = fixup_stack_1 (tem, insn);
2253 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2254 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2255 && ! mode_dependent_address_p (XEXP (tem, 0))
2256 && ! MEM_VOLATILE_P (tem))
2258 enum machine_mode wanted_mode;
2259 enum machine_mode is_mode = GET_MODE (tem);
2260 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2262 wanted_mode = mode_for_extraction (EP_insv, 0);
2264 /* If we have a narrower mode, we can do something. */
2265 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2267 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2268 rtx old_pos = XEXP (outerdest, 2);
2271 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2272 offset = (GET_MODE_SIZE (is_mode)
2273 - GET_MODE_SIZE (wanted_mode) - offset);
2275 pos %= GET_MODE_BITSIZE (wanted_mode);
2277 newmem = adjust_address_nv (tem, wanted_mode, offset);
2279 /* Make the change and see if the insn remains valid. */
2280 INSN_CODE (insn) = -1;
2281 XEXP (outerdest, 0) = newmem;
2282 XEXP (outerdest, 2) = GEN_INT (pos);
2284 if (recog_memoized (insn) >= 0)
2287 /* Otherwise, restore old position. XEXP (x, 0) will be
2289 XEXP (outerdest, 2) = old_pos;
2293 /* If we get here, the bit-field store doesn't allow memory
2294 or isn't located at a constant position. Load the value into
2295 a register, do the store, and put it back into memory. */
2297 tem1 = gen_reg_rtx (GET_MODE (tem));
2298 emit_insn_before (gen_move_insn (tem1, tem), insn);
2299 emit_insn_after (gen_move_insn (tem, tem1), insn);
2300 XEXP (outerdest, 0) = tem1;
2304 /* STRICT_LOW_PART is a no-op on memory references
2305 and it can cause combinations to be unrecognizable,
2308 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2309 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2311 /* A valid insn to copy VAR into or out of a register
2312 must be left alone, to avoid an infinite loop here.
2313 If the reference to VAR is by a subreg, fix that up,
2314 since SUBREG is not valid for a memref.
2315 Also fix up the address of the stack slot.
2317 Note that we must not try to recognize the insn until
2318 after we know that we have valid addresses and no
2319 (subreg (mem ...) ...) constructs, since these interfere
2320 with determining the validity of the insn. */
2322 if ((SET_SRC (x) == var
2323 || (GET_CODE (SET_SRC (x)) == SUBREG
2324 && SUBREG_REG (SET_SRC (x)) == var))
2325 && (GET_CODE (SET_DEST (x)) == REG
2326 || (GET_CODE (SET_DEST (x)) == SUBREG
2327 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2328 && GET_MODE (var) == promoted_mode
2329 && x == single_set (insn))
2333 if (GET_CODE (SET_SRC (x)) == SUBREG
2334 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2335 > GET_MODE_SIZE (GET_MODE (var))))
2337 /* This (subreg VAR) is now a paradoxical subreg. We need
2338 to replace VAR instead of the subreg. */
2339 replacement = find_fixup_replacement (replacements, var);
2340 if (replacement->new == NULL_RTX)
2341 replacement->new = gen_reg_rtx (GET_MODE (var));
2342 SUBREG_REG (SET_SRC (x)) = replacement->new;
2346 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2347 if (replacement->new)
2348 SET_SRC (x) = replacement->new;
2349 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2350 SET_SRC (x) = replacement->new
2351 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2354 SET_SRC (x) = replacement->new
2355 = fixup_stack_1 (SET_SRC (x), insn);
2358 if (recog_memoized (insn) >= 0)
2361 /* INSN is not valid, but we know that we want to
2362 copy SET_SRC (x) to SET_DEST (x) in some way. So
2363 we generate the move and see whether it requires more
2364 than one insn. If it does, we emit those insns and
2365 delete INSN. Otherwise, we can just replace the pattern
2366 of INSN; we have already verified above that INSN has
2367 no other function that to do X. */
2369 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2370 if (NEXT_INSN (pat) != NULL_RTX)
2372 last = emit_insn_before (pat, insn);
2374 /* INSN might have REG_RETVAL or other important notes, so
2375 we need to store the pattern of the last insn in the
2376 sequence into INSN similarly to the normal case. LAST
2377 should not have REG_NOTES, but we allow them if INSN has
2379 if (REG_NOTES (last) && REG_NOTES (insn))
2381 if (REG_NOTES (last))
2382 REG_NOTES (insn) = REG_NOTES (last);
2383 PATTERN (insn) = PATTERN (last);
2388 PATTERN (insn) = PATTERN (pat);
2393 if ((SET_DEST (x) == var
2394 || (GET_CODE (SET_DEST (x)) == SUBREG
2395 && SUBREG_REG (SET_DEST (x)) == var))
2396 && (GET_CODE (SET_SRC (x)) == REG
2397 || (GET_CODE (SET_SRC (x)) == SUBREG
2398 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2399 && GET_MODE (var) == promoted_mode
2400 && x == single_set (insn))
2404 if (GET_CODE (SET_DEST (x)) == SUBREG)
2405 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2408 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2410 if (recog_memoized (insn) >= 0)
2413 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2414 if (NEXT_INSN (pat) != NULL_RTX)
2416 last = emit_insn_before (pat, insn);
2418 /* INSN might have REG_RETVAL or other important notes, so
2419 we need to store the pattern of the last insn in the
2420 sequence into INSN similarly to the normal case. LAST
2421 should not have REG_NOTES, but we allow them if INSN has
2423 if (REG_NOTES (last) && REG_NOTES (insn))
2425 if (REG_NOTES (last))
2426 REG_NOTES (insn) = REG_NOTES (last);
2427 PATTERN (insn) = PATTERN (last);
2432 PATTERN (insn) = PATTERN (pat);
2437 /* Otherwise, storing into VAR must be handled specially
2438 by storing into a temporary and copying that into VAR
2439 with a new insn after this one. Note that this case
2440 will be used when storing into a promoted scalar since
2441 the insn will now have different modes on the input
2442 and output and hence will be invalid (except for the case
2443 of setting it to a constant, which does not need any
2444 change if it is valid). We generate extra code in that case,
2445 but combine.c will eliminate it. */
2450 rtx fixeddest = SET_DEST (x);
2451 enum machine_mode temp_mode;
2453 /* STRICT_LOW_PART can be discarded, around a MEM. */
2454 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2455 fixeddest = XEXP (fixeddest, 0);
2456 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2457 if (GET_CODE (fixeddest) == SUBREG)
2459 fixeddest = fixup_memory_subreg (fixeddest, insn,
2461 temp_mode = GET_MODE (fixeddest);
2465 fixeddest = fixup_stack_1 (fixeddest, insn);
2466 temp_mode = promoted_mode;
2469 temp = gen_reg_rtx (temp_mode);
2471 emit_insn_after (gen_move_insn (fixeddest,
2472 gen_lowpart (GET_MODE (fixeddest),
2476 SET_DEST (x) = temp;
2484 /* Nothing special about this RTX; fix its operands. */
2486 fmt = GET_RTX_FORMAT (code);
2487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2490 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2492 else if (fmt[i] == 'E')
2495 for (j = 0; j < XVECLEN (x, i); j++)
2496 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2497 insn, replacements, no_share);
2502 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2503 The REG was placed on the stack, so X now has the form (SUBREG:m1
2506 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2507 must be emitted to compute NEWADDR, put them before INSN.
2509 UNCRITICAL nonzero means accept paradoxical subregs.
2510 This is used for subregs found inside REG_NOTES. */
2513 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2516 enum machine_mode promoted_mode;
2520 rtx mem = SUBREG_REG (x);
2521 rtx addr = XEXP (mem, 0);
2522 enum machine_mode mode = GET_MODE (x);
2525 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2526 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2529 offset = SUBREG_BYTE (x);
2530 if (BYTES_BIG_ENDIAN)
2531 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2532 the offset so that it points to the right location within the
2534 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2536 if (!flag_force_addr
2537 && memory_address_p (mode, plus_constant (addr, offset)))
2538 /* Shortcut if no insns need be emitted. */
2539 return adjust_address (mem, mode, offset);
2542 result = adjust_address (mem, mode, offset);
2546 emit_insn_before (seq, insn);
2550 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2551 Replace subexpressions of X in place.
2552 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2553 Otherwise return X, with its contents possibly altered.
2555 INSN, PROMOTED_MODE and UNCRITICAL are as for
2556 fixup_memory_subreg. */
2559 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2562 enum machine_mode promoted_mode;
2572 code = GET_CODE (x);
2574 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2575 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2577 /* Nothing special about this RTX; fix its operands. */
2579 fmt = GET_RTX_FORMAT (code);
2580 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2583 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2584 promoted_mode, uncritical);
2585 else if (fmt[i] == 'E')
2588 for (j = 0; j < XVECLEN (x, i); j++)
2590 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2591 promoted_mode, uncritical);
2597 /* For each memory ref within X, if it refers to a stack slot
2598 with an out of range displacement, put the address in a temp register
2599 (emitting new insns before INSN to load these registers)
2600 and alter the memory ref to use that register.
2601 Replace each such MEM rtx with a copy, to avoid clobberage. */
2604 fixup_stack_1 (x, insn)
2609 RTX_CODE code = GET_CODE (x);
2614 rtx ad = XEXP (x, 0);
2615 /* If we have address of a stack slot but it's not valid
2616 (displacement is too large), compute the sum in a register. */
2617 if (GET_CODE (ad) == PLUS
2618 && GET_CODE (XEXP (ad, 0)) == REG
2619 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2620 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2621 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2622 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2623 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2625 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2626 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2627 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2628 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2631 if (memory_address_p (GET_MODE (x), ad))
2635 temp = copy_to_reg (ad);
2638 emit_insn_before (seq, insn);
2639 return replace_equiv_address (x, temp);
2644 fmt = GET_RTX_FORMAT (code);
2645 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2648 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2649 else if (fmt[i] == 'E')
2652 for (j = 0; j < XVECLEN (x, i); j++)
2653 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2659 /* Optimization: a bit-field instruction whose field
2660 happens to be a byte or halfword in memory
2661 can be changed to a move instruction.
2663 We call here when INSN is an insn to examine or store into a bit-field.
2664 BODY is the SET-rtx to be altered.
2666 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2667 (Currently this is called only from function.c, and EQUIV_MEM
2671 optimize_bit_field (body, insn, equiv_mem)
2679 enum machine_mode mode;
2681 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2682 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2683 bitfield = SET_DEST (body), destflag = 1;
2685 bitfield = SET_SRC (body), destflag = 0;
2687 /* First check that the field being stored has constant size and position
2688 and is in fact a byte or halfword suitably aligned. */
2690 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2691 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2692 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2694 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2698 /* Now check that the containing word is memory, not a register,
2699 and that it is safe to change the machine mode. */
2701 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2702 memref = XEXP (bitfield, 0);
2703 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2705 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2706 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2707 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2708 memref = SUBREG_REG (XEXP (bitfield, 0));
2709 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2711 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2712 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2715 && ! mode_dependent_address_p (XEXP (memref, 0))
2716 && ! MEM_VOLATILE_P (memref))
2718 /* Now adjust the address, first for any subreg'ing
2719 that we are now getting rid of,
2720 and then for which byte of the word is wanted. */
2722 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2725 /* Adjust OFFSET to count bits from low-address byte. */
2726 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2727 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2728 - offset - INTVAL (XEXP (bitfield, 1)));
2730 /* Adjust OFFSET to count bytes from low-address byte. */
2731 offset /= BITS_PER_UNIT;
2732 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2734 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2735 / UNITS_PER_WORD) * UNITS_PER_WORD;
2736 if (BYTES_BIG_ENDIAN)
2737 offset -= (MIN (UNITS_PER_WORD,
2738 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2739 - MIN (UNITS_PER_WORD,
2740 GET_MODE_SIZE (GET_MODE (memref))));
2744 memref = adjust_address (memref, mode, offset);
2745 insns = get_insns ();
2747 emit_insn_before (insns, insn);
2749 /* Store this memory reference where
2750 we found the bit field reference. */
2754 validate_change (insn, &SET_DEST (body), memref, 1);
2755 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2757 rtx src = SET_SRC (body);
2758 while (GET_CODE (src) == SUBREG
2759 && SUBREG_BYTE (src) == 0)
2760 src = SUBREG_REG (src);
2761 if (GET_MODE (src) != GET_MODE (memref))
2762 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2763 validate_change (insn, &SET_SRC (body), src, 1);
2765 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2766 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2767 /* This shouldn't happen because anything that didn't have
2768 one of these modes should have got converted explicitly
2769 and then referenced through a subreg.
2770 This is so because the original bit-field was
2771 handled by agg_mode and so its tree structure had
2772 the same mode that memref now has. */
2777 rtx dest = SET_DEST (body);
2779 while (GET_CODE (dest) == SUBREG
2780 && SUBREG_BYTE (dest) == 0
2781 && (GET_MODE_CLASS (GET_MODE (dest))
2782 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2783 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2785 dest = SUBREG_REG (dest);
2787 validate_change (insn, &SET_DEST (body), dest, 1);
2789 if (GET_MODE (dest) == GET_MODE (memref))
2790 validate_change (insn, &SET_SRC (body), memref, 1);
2793 /* Convert the mem ref to the destination mode. */
2794 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2797 convert_move (newreg, memref,
2798 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2802 validate_change (insn, &SET_SRC (body), newreg, 1);
2806 /* See if we can convert this extraction or insertion into
2807 a simple move insn. We might not be able to do so if this
2808 was, for example, part of a PARALLEL.
2810 If we succeed, write out any needed conversions. If we fail,
2811 it is hard to guess why we failed, so don't do anything
2812 special; just let the optimization be suppressed. */
2814 if (apply_change_group () && seq)
2815 emit_insn_before (seq, insn);
2820 /* These routines are responsible for converting virtual register references
2821 to the actual hard register references once RTL generation is complete.
2823 The following four variables are used for communication between the
2824 routines. They contain the offsets of the virtual registers from their
2825 respective hard registers. */
2827 static int in_arg_offset;
2828 static int var_offset;
2829 static int dynamic_offset;
2830 static int out_arg_offset;
2831 static int cfa_offset;
2833 /* In most machines, the stack pointer register is equivalent to the bottom
2836 #ifndef STACK_POINTER_OFFSET
2837 #define STACK_POINTER_OFFSET 0
2840 /* If not defined, pick an appropriate default for the offset of dynamically
2841 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2842 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2844 #ifndef STACK_DYNAMIC_OFFSET
2846 /* The bottom of the stack points to the actual arguments. If
2847 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2848 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2849 stack space for register parameters is not pushed by the caller, but
2850 rather part of the fixed stack areas and hence not included in
2851 `current_function_outgoing_args_size'. Nevertheless, we must allow
2852 for it when allocating stack dynamic objects. */
2854 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2855 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2856 ((ACCUMULATE_OUTGOING_ARGS \
2857 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2858 + (STACK_POINTER_OFFSET)) \
2861 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2862 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2863 + (STACK_POINTER_OFFSET))
2867 /* On most machines, the CFA coincides with the first incoming parm. */
2869 #ifndef ARG_POINTER_CFA_OFFSET
2870 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2873 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2874 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2875 register, for later use if we do need to force REG into the stack. REG is
2876 overwritten by the MEM like in put_reg_into_stack. */
2879 gen_mem_addressof (reg, decl)
2883 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2886 /* Calculate this before we start messing with decl's RTL. */
2887 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2889 /* If the original REG was a user-variable, then so is the REG whose
2890 address is being taken. Likewise for unchanging. */
2891 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2892 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2894 PUT_CODE (reg, MEM);
2895 MEM_ATTRS (reg) = 0;
2900 tree type = TREE_TYPE (decl);
2901 enum machine_mode decl_mode
2902 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2903 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2904 : DECL_RTL_IF_SET (decl));
2906 PUT_MODE (reg, decl_mode);
2908 /* Clear DECL_RTL momentarily so functions below will work
2909 properly, then set it again. */
2910 if (DECL_P (decl) && decl_rtl == reg)
2911 SET_DECL_RTL (decl, 0);
2913 set_mem_attributes (reg, decl, 1);
2914 set_mem_alias_set (reg, set);
2916 if (DECL_P (decl) && decl_rtl == reg)
2917 SET_DECL_RTL (decl, reg);
2919 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2920 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2923 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2928 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2931 flush_addressof (decl)
2934 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2935 && DECL_RTL (decl) != 0
2936 && GET_CODE (DECL_RTL (decl)) == MEM
2937 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2938 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2939 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2942 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2945 put_addressof_into_stack (r, ht)
2950 int volatile_p, used_p;
2952 rtx reg = XEXP (r, 0);
2954 if (GET_CODE (reg) != REG)
2957 decl = ADDRESSOF_DECL (r);
2960 type = TREE_TYPE (decl);
2961 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2962 && TREE_THIS_VOLATILE (decl));
2963 used_p = (TREE_USED (decl)
2964 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2973 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2974 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2977 /* List of replacements made below in purge_addressof_1 when creating
2978 bitfield insertions. */
2979 static rtx purge_bitfield_addressof_replacements;
2981 /* List of replacements made below in purge_addressof_1 for patterns
2982 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2983 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2984 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2985 enough in complex cases, e.g. when some field values can be
2986 extracted by usage MEM with narrower mode. */
2987 static rtx purge_addressof_replacements;
2989 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2990 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2991 the stack. If the function returns FALSE then the replacement could not
2995 purge_addressof_1 (loc, insn, force, store, ht)
3007 /* Re-start here to avoid recursion in common cases. */
3014 code = GET_CODE (x);
3016 /* If we don't return in any of the cases below, we will recurse inside
3017 the RTX, which will normally result in any ADDRESSOF being forced into
3021 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3022 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3025 else if (code == ADDRESSOF)
3029 if (GET_CODE (XEXP (x, 0)) != MEM)
3030 put_addressof_into_stack (x, ht);
3032 /* We must create a copy of the rtx because it was created by
3033 overwriting a REG rtx which is always shared. */
3034 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3035 if (validate_change (insn, loc, sub, 0)
3036 || validate_replace_rtx (x, sub, insn))
3040 sub = force_operand (sub, NULL_RTX);
3041 if (! validate_change (insn, loc, sub, 0)
3042 && ! validate_replace_rtx (x, sub, insn))
3045 insns = get_insns ();
3047 emit_insn_before (insns, insn);
3051 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3053 rtx sub = XEXP (XEXP (x, 0), 0);
3055 if (GET_CODE (sub) == MEM)
3056 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3057 else if (GET_CODE (sub) == REG
3058 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3060 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3062 int size_x, size_sub;
3066 /* When processing REG_NOTES look at the list of
3067 replacements done on the insn to find the register that X
3071 for (tem = purge_bitfield_addressof_replacements;
3073 tem = XEXP (XEXP (tem, 1), 1))
3074 if (rtx_equal_p (x, XEXP (tem, 0)))
3076 *loc = XEXP (XEXP (tem, 1), 0);
3080 /* See comment for purge_addressof_replacements. */
3081 for (tem = purge_addressof_replacements;
3083 tem = XEXP (XEXP (tem, 1), 1))
3084 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3086 rtx z = XEXP (XEXP (tem, 1), 0);
3088 if (GET_MODE (x) == GET_MODE (z)
3089 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3090 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3093 /* It can happen that the note may speak of things
3094 in a wider (or just different) mode than the
3095 code did. This is especially true of
3098 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3101 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3102 && (GET_MODE_SIZE (GET_MODE (x))
3103 > GET_MODE_SIZE (GET_MODE (z))))
3105 /* This can occur as a result in invalid
3106 pointer casts, e.g. float f; ...
3107 *(long long int *)&f.
3108 ??? We could emit a warning here, but
3109 without a line number that wouldn't be
3111 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3114 z = gen_lowpart (GET_MODE (x), z);
3120 /* Sometimes we may not be able to find the replacement. For
3121 example when the original insn was a MEM in a wider mode,
3122 and the note is part of a sign extension of a narrowed
3123 version of that MEM. Gcc testcase compile/990829-1.c can
3124 generate an example of this situation. Rather than complain
3125 we return false, which will prompt our caller to remove the
3130 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3131 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3133 /* Don't even consider working with paradoxical subregs,
3134 or the moral equivalent seen here. */
3135 if (size_x <= size_sub
3136 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3138 /* Do a bitfield insertion to mirror what would happen
3145 rtx p = PREV_INSN (insn);
3148 val = gen_reg_rtx (GET_MODE (x));
3149 if (! validate_change (insn, loc, val, 0))
3151 /* Discard the current sequence and put the
3152 ADDRESSOF on stack. */
3158 emit_insn_before (seq, insn);
3159 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3163 store_bit_field (sub, size_x, 0, GET_MODE (x),
3164 val, GET_MODE_SIZE (GET_MODE (sub)));
3166 /* Make sure to unshare any shared rtl that store_bit_field
3167 might have created. */
3168 unshare_all_rtl_again (get_insns ());
3172 p = emit_insn_after (seq, insn);
3173 if (NEXT_INSN (insn))
3174 compute_insns_for_mem (NEXT_INSN (insn),
3175 p ? NEXT_INSN (p) : NULL_RTX,
3180 rtx p = PREV_INSN (insn);
3183 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3184 GET_MODE (x), GET_MODE (x),
3185 GET_MODE_SIZE (GET_MODE (sub)));
3187 if (! validate_change (insn, loc, val, 0))
3189 /* Discard the current sequence and put the
3190 ADDRESSOF on stack. */
3197 emit_insn_before (seq, insn);
3198 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3202 /* Remember the replacement so that the same one can be done
3203 on the REG_NOTES. */
3204 purge_bitfield_addressof_replacements
3205 = gen_rtx_EXPR_LIST (VOIDmode, x,
3208 purge_bitfield_addressof_replacements));
3210 /* We replaced with a reg -- all done. */
3215 else if (validate_change (insn, loc, sub, 0))
3217 /* Remember the replacement so that the same one can be done
3218 on the REG_NOTES. */
3219 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3223 for (tem = purge_addressof_replacements;
3225 tem = XEXP (XEXP (tem, 1), 1))
3226 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3228 XEXP (XEXP (tem, 1), 0) = sub;
3231 purge_addressof_replacements
3232 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3233 gen_rtx_EXPR_LIST (VOIDmode, sub,
3234 purge_addressof_replacements));
3242 /* Scan all subexpressions. */
3243 fmt = GET_RTX_FORMAT (code);
3244 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3247 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3248 else if (*fmt == 'E')
3249 for (j = 0; j < XVECLEN (x, i); j++)
3250 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3256 /* Return a hash value for K, a REG. */
3259 insns_for_mem_hash (k)
3262 /* Use the address of the key for the hash value. */
3263 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3264 return htab_hash_pointer (m->key);
3267 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3270 insns_for_mem_comp (k1, k2)
3274 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3275 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3276 return m1->key == m2->key;
3279 struct insns_for_mem_walk_info
3281 /* The hash table that we are using to record which INSNs use which
3285 /* The INSN we are currently processing. */
3288 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3289 to find the insns that use the REGs in the ADDRESSOFs. */
3293 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3294 that might be used in an ADDRESSOF expression, record this INSN in
3295 the hash table given by DATA (which is really a pointer to an
3296 insns_for_mem_walk_info structure). */
3299 insns_for_mem_walk (r, data)
3303 struct insns_for_mem_walk_info *ifmwi
3304 = (struct insns_for_mem_walk_info *) data;
3305 struct insns_for_mem_entry tmp;
3306 tmp.insns = NULL_RTX;
3308 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3309 && GET_CODE (XEXP (*r, 0)) == REG)
3312 tmp.key = XEXP (*r, 0);
3313 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3316 *e = ggc_alloc (sizeof (tmp));
3317 memcpy (*e, &tmp, sizeof (tmp));
3320 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3322 struct insns_for_mem_entry *ifme;
3324 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3326 /* If we have not already recorded this INSN, do so now. Since
3327 we process the INSNs in order, we know that if we have
3328 recorded it it must be at the front of the list. */
3329 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3330 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3337 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3338 which REGs in HT. */
3341 compute_insns_for_mem (insns, last_insn, ht)
3347 struct insns_for_mem_walk_info ifmwi;
3350 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3351 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3355 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3359 /* Helper function for purge_addressof called through for_each_rtx.
3360 Returns true iff the rtl is an ADDRESSOF. */
3363 is_addressof (rtl, data)
3365 void *data ATTRIBUTE_UNUSED;
3367 return GET_CODE (*rtl) == ADDRESSOF;
3370 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3371 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3375 purge_addressof (insns)
3381 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3382 requires a fixup pass over the instruction stream to correct
3383 INSNs that depended on the REG being a REG, and not a MEM. But,
3384 these fixup passes are slow. Furthermore, most MEMs are not
3385 mentioned in very many instructions. So, we speed up the process
3386 by pre-calculating which REGs occur in which INSNs; that allows
3387 us to perform the fixup passes much more quickly. */
3388 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3389 compute_insns_for_mem (insns, NULL_RTX, ht);
3391 for (insn = insns; insn; insn = NEXT_INSN (insn))
3394 if (! purge_addressof_1 (&PATTERN (insn), insn,
3395 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3396 /* If we could not replace the ADDRESSOFs in the insn,
3397 something is wrong. */
3400 if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, ht))
3402 /* If we could not replace the ADDRESSOFs in the insn's notes,
3403 we can just remove the offending notes instead. */
3406 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3408 /* If we find a REG_RETVAL note then the insn is a libcall.
3409 Such insns must have REG_EQUAL notes as well, in order
3410 for later passes of the compiler to work. So it is not
3411 safe to delete the notes here, and instead we abort. */
3412 if (REG_NOTE_KIND (note) == REG_RETVAL)
3414 if (for_each_rtx (¬e, is_addressof, NULL))
3415 remove_note (insn, note);
3421 purge_bitfield_addressof_replacements = 0;
3422 purge_addressof_replacements = 0;
3424 /* REGs are shared. purge_addressof will destructively replace a REG