1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
78 /* Structure storing statistics on how many call clobbers we have, and
79 how many where avoided. */
83 /* Number of call-clobbered ops we attempt to add to calls in
84 add_call_clobbered_mem_symbols. */
85 unsigned int clobbered_vars;
87 /* Number of write-clobbers (VDEFs) avoided by using
88 not_written information. */
89 unsigned int static_write_clobbers_avoided;
91 /* Number of reads (VUSEs) avoided by using not_read information. */
92 unsigned int static_read_clobbers_avoided;
94 /* Number of write-clobbers avoided because the variable can't escape to
96 unsigned int unescapable_clobbers_avoided;
98 /* Number of read-only uses we attempt to add to calls in
99 add_call_read_mem_symbols. */
100 unsigned int readonly_clobbers;
102 /* Number of read-only uses we avoid using not_read information. */
103 unsigned int static_readonly_clobbers_avoided;
107 /* Flags to describe operand properties in helpers. */
109 /* By default, operands are loaded. */
112 /* Operand is the target of an assignment expression or a
113 call-clobbered variable. */
114 #define opf_def (1 << 0)
116 /* No virtual operands should be created in the expression. This is used
117 when traversing ADDR_EXPR nodes which have different semantics than
118 other expressions. Inside an ADDR_EXPR node, the only operands that we
119 need to consider are indices into arrays. For instance, &a.b[i] should
120 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 #define opf_no_vops (1 << 1)
124 /* Operand is an implicit reference. This is used to distinguish
125 explicit assignments in the form of MODIFY_EXPR from
126 clobbering sites like function calls or ASM_EXPRs. */
127 #define opf_implicit (1 << 2)
129 /* Array for building all the def operands. */
130 static VEC(tree,heap) *build_defs;
132 /* Array for building all the use operands. */
133 static VEC(tree,heap) *build_uses;
135 /* The built VDEF operand. */
136 static tree build_vdef;
138 /* The built VUSE operand. */
139 static tree build_vuse;
141 /* Bitmap obstack for our datastructures that needs to survive across
142 compilations of multiple functions. */
143 static bitmap_obstack operands_bitmap_obstack;
145 static void get_expr_operands (gimple, tree *, int);
147 /* Number of functions with initialized ssa_operands. */
148 static int n_initialized = 0;
150 /* Stack of statements to change. Every call to
151 push_stmt_changes pushes the stmt onto the stack. Calls to
152 pop_stmt_changes pop a stmt off of the stack and compute the set
153 of changes for the popped statement. */
154 static VEC(gimple_p,heap) *scb_stack;
156 /* Return the DECL_UID of the base variable of T. */
158 static inline unsigned
159 get_name_decl (const_tree t)
161 if (TREE_CODE (t) != SSA_NAME)
164 return DECL_UID (SSA_NAME_VAR (t));
168 /* Return true if the SSA operands cache is active. */
171 ssa_operands_active (void)
173 /* This function may be invoked from contexts where CFUN is NULL
174 (IPA passes), return false for now. FIXME: operands may be
175 active in each individual function, maybe this function should
176 take CFUN as a parameter. */
180 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
184 /* Create the VOP variable, an artificial global variable to act as a
185 representative of all of the virtual operands FUD chain. */
188 create_vop_var (void)
192 gcc_assert (cfun->gimple_df->vop == NULL_TREE);
194 global_var = build_decl (VAR_DECL, get_identifier (".MEM"),
196 DECL_ARTIFICIAL (global_var) = 1;
197 TREE_READONLY (global_var) = 0;
198 DECL_EXTERNAL (global_var) = 1;
199 TREE_STATIC (global_var) = 1;
200 TREE_USED (global_var) = 1;
201 DECL_CONTEXT (global_var) = NULL_TREE;
202 TREE_THIS_VOLATILE (global_var) = 0;
203 TREE_ADDRESSABLE (global_var) = 0;
205 create_var_ann (global_var);
206 add_referenced_var (global_var);
207 cfun->gimple_df->vop = global_var;
210 /* These are the sizes of the operand memory buffer in bytes which gets
211 allocated each time more operands space is required. The final value is
212 the amount that is allocated every time after that.
213 In 1k we can fit 25 use operands (or 63 def operands) on a host with
214 8 byte pointers, that would be 10 statements each with 1 def and 2
217 #define OP_SIZE_INIT 0
218 #define OP_SIZE_1 (1024 - sizeof (void *))
219 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
220 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
222 /* Initialize the operand cache routines. */
225 init_ssa_operands (void)
227 if (!n_initialized++)
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
231 build_vuse = NULL_TREE;
232 build_vdef = NULL_TREE;
233 bitmap_obstack_initialize (&operands_bitmap_obstack);
234 scb_stack = VEC_alloc (gimple_p, heap, 20);
237 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
238 gimple_ssa_operands (cfun)->operand_memory_index
239 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
240 gimple_ssa_operands (cfun)->ops_active = true;
241 memset (&clobber_stats, 0, sizeof (clobber_stats));
242 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
247 /* Dispose of anything required by the operand routines. */
250 fini_ssa_operands (void)
252 struct ssa_operand_memory_d *ptr;
254 if (!--n_initialized)
256 VEC_free (tree, heap, build_defs);
257 VEC_free (tree, heap, build_uses);
258 build_vdef = NULL_TREE;
259 build_vuse = NULL_TREE;
261 /* The change buffer stack had better be empty. */
262 gcc_assert (VEC_length (gimple_p, scb_stack) == 0);
263 VEC_free (gimple_p, heap, scb_stack);
267 gimple_ssa_operands (cfun)->free_defs = NULL;
268 gimple_ssa_operands (cfun)->free_uses = NULL;
270 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
272 gimple_ssa_operands (cfun)->operand_memory
273 = gimple_ssa_operands (cfun)->operand_memory->next;
277 gimple_ssa_operands (cfun)->ops_active = false;
280 bitmap_obstack_release (&operands_bitmap_obstack);
282 cfun->gimple_df->vop = NULL_TREE;
284 if (dump_file && (dump_flags & TDF_STATS))
286 fprintf (dump_file, "Original clobbered vars: %d\n",
287 clobber_stats.clobbered_vars);
288 fprintf (dump_file, "Static write clobbers avoided: %d\n",
289 clobber_stats.static_write_clobbers_avoided);
290 fprintf (dump_file, "Static read clobbers avoided: %d\n",
291 clobber_stats.static_read_clobbers_avoided);
292 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
293 clobber_stats.unescapable_clobbers_avoided);
294 fprintf (dump_file, "Original read-only clobbers: %d\n",
295 clobber_stats.readonly_clobbers);
296 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
297 clobber_stats.static_readonly_clobbers_avoided);
302 /* Return memory for an operand of size SIZE. */
305 ssa_operand_alloc (unsigned size)
309 gcc_assert (size == sizeof (struct use_optype_d)
310 || size == sizeof (struct def_optype_d));
312 if (gimple_ssa_operands (cfun)->operand_memory_index + size
313 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
315 struct ssa_operand_memory_d *ptr;
317 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
320 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
323 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
327 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
333 ptr = (struct ssa_operand_memory_d *)
334 ggc_alloc (sizeof (void *)
335 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
336 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
337 gimple_ssa_operands (cfun)->operand_memory = ptr;
338 gimple_ssa_operands (cfun)->operand_memory_index = 0;
341 ptr = &(gimple_ssa_operands (cfun)->operand_memory
342 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
343 gimple_ssa_operands (cfun)->operand_memory_index += size;
348 /* Allocate a DEF operand. */
350 static inline struct def_optype_d *
353 struct def_optype_d *ret;
354 if (gimple_ssa_operands (cfun)->free_defs)
356 ret = gimple_ssa_operands (cfun)->free_defs;
357 gimple_ssa_operands (cfun)->free_defs
358 = gimple_ssa_operands (cfun)->free_defs->next;
361 ret = (struct def_optype_d *)
362 ssa_operand_alloc (sizeof (struct def_optype_d));
367 /* Allocate a USE operand. */
369 static inline struct use_optype_d *
372 struct use_optype_d *ret;
373 if (gimple_ssa_operands (cfun)->free_uses)
375 ret = gimple_ssa_operands (cfun)->free_uses;
376 gimple_ssa_operands (cfun)->free_uses
377 = gimple_ssa_operands (cfun)->free_uses->next;
380 ret = (struct use_optype_d *)
381 ssa_operand_alloc (sizeof (struct use_optype_d));
386 /* Adds OP to the list of defs after LAST. */
388 static inline def_optype_p
389 add_def_op (tree *op, def_optype_p last)
391 def_optype_p new_def;
393 new_def = alloc_def ();
394 DEF_OP_PTR (new_def) = op;
395 last->next = new_def;
396 new_def->next = NULL;
401 /* Adds OP to the list of uses of statement STMT after LAST. */
403 static inline use_optype_p
404 add_use_op (gimple stmt, tree *op, use_optype_p last)
406 use_optype_p new_use;
408 new_use = alloc_use ();
409 USE_OP_PTR (new_use)->use = op;
410 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
411 last->next = new_use;
412 new_use->next = NULL;
418 /* Takes elements from build_defs and turns them into def operands of STMT.
419 TODO -- Make build_defs VEC of tree *. */
422 finalize_ssa_defs (gimple stmt)
425 struct def_optype_d new_list;
426 def_optype_p old_ops, last;
427 unsigned int num = VEC_length (tree, build_defs);
429 /* There should only be a single real definition per assignment. */
430 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
432 /* Pre-pend the vdef we may have built. */
433 if (build_vdef != NULL_TREE)
435 tree oldvdef = gimple_vdef (stmt);
437 && TREE_CODE (oldvdef) == SSA_NAME)
438 oldvdef = SSA_NAME_VAR (oldvdef);
439 if (oldvdef != build_vdef)
440 gimple_set_vdef (stmt, build_vdef);
441 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
445 new_list.next = NULL;
448 old_ops = gimple_def_ops (stmt);
452 /* Clear and unlink a no longer necessary VDEF. */
453 if (build_vdef == NULL_TREE
454 && gimple_vdef (stmt) != NULL_TREE)
456 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
458 unlink_stmt_vdef (stmt);
459 release_ssa_name (gimple_vdef (stmt));
461 gimple_set_vdef (stmt, NULL_TREE);
464 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
465 if (gimple_vdef (stmt)
466 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
467 mark_sym_for_renaming (gimple_vdef (stmt));
469 /* Check for the common case of 1 def that hasn't changed. */
470 if (old_ops && old_ops->next == NULL && num == 1
471 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
474 /* If there is anything in the old list, free it. */
477 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
478 gimple_ssa_operands (cfun)->free_defs = old_ops;
481 /* If there is anything remaining in the build_defs list, simply emit it. */
482 for ( ; new_i < num; new_i++)
483 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
485 /* Now set the stmt's operands. */
486 gimple_set_def_ops (stmt, new_list.next);
490 /* Takes elements from build_uses and turns them into use operands of STMT.
491 TODO -- Make build_uses VEC of tree *. */
494 finalize_ssa_uses (gimple stmt)
497 struct use_optype_d new_list;
498 use_optype_p old_ops, ptr, last;
500 /* Pre-pend the VUSE we may have built. */
501 if (build_vuse != NULL_TREE)
503 tree oldvuse = gimple_vuse (stmt);
505 && TREE_CODE (oldvuse) == SSA_NAME)
506 oldvuse = SSA_NAME_VAR (oldvuse);
507 if (oldvuse != (build_vuse != NULL_TREE
508 ? build_vuse : build_vdef))
509 gimple_set_vuse (stmt, NULL_TREE);
510 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
513 new_list.next = NULL;
516 old_ops = gimple_use_ops (stmt);
518 /* Clear a no longer necessary VUSE. */
519 if (build_vuse == NULL_TREE
520 && gimple_vuse (stmt) != NULL_TREE)
521 gimple_set_vuse (stmt, NULL_TREE);
523 /* If there is anything in the old list, free it. */
526 for (ptr = old_ops; ptr; ptr = ptr->next)
527 delink_imm_use (USE_OP_PTR (ptr));
528 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
529 gimple_ssa_operands (cfun)->free_uses = old_ops;
532 /* If we added a VUSE, make sure to set the operand if it is not already
533 present and mark it for renaming. */
534 if (build_vuse != NULL_TREE
535 && gimple_vuse (stmt) == NULL_TREE)
537 gimple_set_vuse (stmt, gimple_vop (cfun));
538 mark_sym_for_renaming (gimple_vop (cfun));
541 /* Now create nodes for all the new nodes. */
542 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
543 last = add_use_op (stmt,
544 (tree *) VEC_index (tree, build_uses, new_i),
547 /* Now set the stmt's operands. */
548 gimple_set_use_ops (stmt, new_list.next);
552 /* Clear the in_list bits and empty the build array for VDEFs and
556 cleanup_build_arrays (void)
558 build_vdef = NULL_TREE;
559 build_vuse = NULL_TREE;
560 VEC_truncate (tree, build_defs, 0);
561 VEC_truncate (tree, build_uses, 0);
565 /* Finalize all the build vectors, fill the new ones into INFO. */
568 finalize_ssa_stmt_operands (gimple stmt)
570 finalize_ssa_defs (stmt);
571 finalize_ssa_uses (stmt);
572 cleanup_build_arrays ();
576 /* Start the process of building up operands vectors in INFO. */
579 start_ssa_stmt_operands (void)
581 gcc_assert (VEC_length (tree, build_defs) == 0);
582 gcc_assert (VEC_length (tree, build_uses) == 0);
583 gcc_assert (build_vuse == NULL_TREE);
584 gcc_assert (build_vdef == NULL_TREE);
588 /* Add DEF_P to the list of pointers to operands. */
591 append_def (tree *def_p)
593 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
597 /* Add USE_P to the list of pointers to operands. */
600 append_use (tree *use_p)
602 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
606 /* Add VAR to the set of variables that require a VDEF operator. */
609 append_vdef (tree var)
614 gcc_assert ((build_vdef == NULL_TREE
615 || build_vdef == var)
616 && (build_vuse == NULL_TREE
617 || build_vuse == var));
624 /* Add VAR to the set of variables that require a VUSE operator. */
627 append_vuse (tree var)
632 gcc_assert (build_vuse == NULL_TREE
633 || build_vuse == var);
638 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
641 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
643 /* Add virtual operands to the stmt, unless the caller has specifically
644 requested not to do that (used when adding operands inside an
645 ADDR_EXPR expression). */
646 if (flags & opf_no_vops)
650 append_vdef (gimple_vop (cfun));
652 append_vuse (gimple_vop (cfun));
656 /* Add *VAR_P to the appropriate operand array for statement STMT.
657 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
658 it will be added to the statement's real operands, otherwise it is
659 added to virtual operands. */
662 add_stmt_operand (tree *var_p, gimple stmt, int flags)
667 gcc_assert (SSA_VAR_P (*var_p));
670 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
671 v_ann = var_ann (sym);
673 /* Mark statements with volatile operands. */
674 if (TREE_THIS_VOLATILE (sym))
675 gimple_set_has_volatile_ops (stmt, true);
677 if (is_gimple_reg (sym))
679 /* The variable is a GIMPLE register. Add it to real operands. */
686 add_virtual_operand (stmt, flags);
689 /* Add the base address of REF to SET. */
692 add_to_addressable_set (tree ref, bitmap *set)
696 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
697 as the only thing we take the address of. If VAR is a structure,
698 taking the address of a field means that the whole structure may
699 be referenced using pointer arithmetic. See PR 21407 and the
700 ensuing mailing list discussion. */
701 var = get_base_address (ref);
702 if (var && SSA_VAR_P (var))
705 *set = BITMAP_ALLOC (&operands_bitmap_obstack);
707 bitmap_set_bit (*set, DECL_UID (var));
708 TREE_ADDRESSABLE (var) = 1;
712 /* Add the base address of REF to the set of addresses taken by STMT.
713 REF may be a single variable whose address has been taken or any
714 other valid GIMPLE memory reference (structure reference, array,
715 etc). If the base address of REF is a decl that has sub-variables,
716 also add all of its sub-variables. */
719 gimple_add_to_addresses_taken (gimple stmt, tree ref)
721 gcc_assert (gimple_has_ops (stmt));
722 add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt));
726 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
727 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
729 STMT is the statement being processed, EXPR is the INDIRECT_REF
732 FLAGS is as in get_expr_operands.
734 RECURSE_ON_BASE should be set to true if we want to continue
735 calling get_expr_operands on the base pointer, and false if
736 something else will do it for us. */
739 get_indirect_ref_operands (gimple stmt, tree expr, int flags,
740 bool recurse_on_base)
742 tree *pptr = &TREE_OPERAND (expr, 0);
744 if (TREE_THIS_VOLATILE (expr))
745 gimple_set_has_volatile_ops (stmt, true);
748 add_virtual_operand (stmt, flags);
750 /* If requested, add a USE operand for the base pointer. */
752 get_expr_operands (stmt, pptr, opf_use);
756 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
759 get_tmr_operands (gimple stmt, tree expr, int flags)
761 /* First record the real operands. */
762 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
763 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
765 if (TMR_SYMBOL (expr))
766 gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr));
768 add_virtual_operand (stmt, flags);
772 /* If STMT is a call that may clobber globals and other symbols that
773 escape, add them to the VDEF/VUSE lists for it. */
776 maybe_add_call_vops (gimple stmt)
778 int call_flags = gimple_call_flags (stmt);
780 /* If aliases have been computed already, add VDEF or VUSE
781 operands for all the symbols that have been found to be
783 if (!(call_flags & ECF_NOVOPS))
785 /* A 'pure' or a 'const' function never call-clobbers anything.
786 A 'noreturn' function might, but since we don't return anyway
787 there is no point in recording that. */
788 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
789 add_virtual_operand (stmt, opf_def);
790 else if (!(call_flags & ECF_CONST))
791 add_virtual_operand (stmt, opf_use);
796 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
799 get_asm_expr_operands (gimple stmt)
802 const char **oconstraints;
803 const char *constraint;
804 bool allows_mem, allows_reg, is_inout;
806 noutputs = gimple_asm_noutputs (stmt);
807 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
809 /* Gather all output operands. */
810 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
812 tree link = gimple_asm_output_op (stmt, i);
813 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
814 oconstraints[i] = constraint;
815 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
816 &allows_reg, &is_inout);
818 /* This should have been split in gimplify_asm_expr. */
819 gcc_assert (!allows_reg || !is_inout);
821 /* Memory operands are addressable. Note that STMT needs the
822 address of this operand. */
823 if (!allows_reg && allows_mem)
825 tree t = get_base_address (TREE_VALUE (link));
827 gimple_add_to_addresses_taken (stmt, t);
830 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
833 /* Gather all input operands. */
834 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
836 tree link = gimple_asm_input_op (stmt, i);
837 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
838 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
839 &allows_mem, &allows_reg);
841 /* Memory operands are addressable. Note that STMT needs the
842 address of this operand. */
843 if (!allows_reg && allows_mem)
845 tree t = get_base_address (TREE_VALUE (link));
847 gimple_add_to_addresses_taken (stmt, t);
850 get_expr_operands (stmt, &TREE_VALUE (link), 0);
853 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
854 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
856 tree link = gimple_asm_clobber_op (stmt, i);
857 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
859 add_virtual_operand (stmt, opf_def);
866 /* Recursively scan the expression pointed to by EXPR_P in statement
867 STMT. FLAGS is one of the OPF_* constants modifying how to
868 interpret the operands found. */
871 get_expr_operands (gimple stmt, tree *expr_p, int flags)
874 enum tree_code_class codeclass;
880 code = TREE_CODE (expr);
881 codeclass = TREE_CODE_CLASS (code);
886 /* Taking the address of a variable does not represent a
887 reference to it, but the fact that the statement takes its
888 address will be of interest to some passes (e.g. alias
890 gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0));
892 /* If the address is invariant, there may be no interesting
893 variable references inside. */
894 if (is_gimple_min_invariant (expr))
897 /* Otherwise, there may be variables referenced inside but there
898 should be no VUSEs created, since the referenced objects are
899 not really accessed. The only operands that we should find
900 here are ARRAY_REF indices which will always be real operands
901 (GIMPLE does not allow non-registers as array indices). */
902 flags |= opf_no_vops;
903 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
907 add_stmt_operand (expr_p, stmt, flags);
913 add_stmt_operand (expr_p, stmt, flags);
916 case MISALIGNED_INDIRECT_REF:
917 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
920 case ALIGN_INDIRECT_REF:
922 get_indirect_ref_operands (stmt, expr, flags, true);
926 get_tmr_operands (stmt, expr, flags);
930 case ARRAY_RANGE_REF:
936 HOST_WIDE_INT offset, size, maxsize;
938 if (TREE_THIS_VOLATILE (expr))
939 gimple_set_has_volatile_ops (stmt, true);
941 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
942 if (TREE_CODE (ref) == INDIRECT_REF)
944 get_indirect_ref_operands (stmt, ref, flags, false);
945 flags |= opf_no_vops;
948 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
950 if (code == COMPONENT_REF)
952 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
953 gimple_set_has_volatile_ops (stmt, true);
954 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
956 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
958 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
959 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
960 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
967 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
968 and an rvalue reference to its second argument. */
969 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
970 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
975 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
976 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
977 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
982 /* General aggregate CONSTRUCTORs have been decomposed, but they
983 are still in use as the COMPLEX_EXPR equivalent for vectors. */
985 unsigned HOST_WIDE_INT idx;
988 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
990 get_expr_operands (stmt, &ce->value, opf_use);
996 if (TREE_THIS_VOLATILE (expr))
997 gimple_set_has_volatile_ops (stmt, true);
1000 case TRUTH_NOT_EXPR:
1001 case VIEW_CONVERT_EXPR:
1003 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1006 case TRUTH_AND_EXPR:
1008 case TRUTH_XOR_EXPR:
1014 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1015 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1020 case REALIGN_LOAD_EXPR:
1022 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1023 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1024 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1028 case CHANGE_DYNAMIC_TYPE_EXPR:
1034 case CASE_LABEL_EXPR:
1037 /* Expressions that make no memory references. */
1041 if (codeclass == tcc_unary)
1043 if (codeclass == tcc_binary || codeclass == tcc_comparison)
1045 if (codeclass == tcc_constant || codeclass == tcc_type)
1049 /* If we get here, something has gone wrong. */
1050 #ifdef ENABLE_CHECKING
1051 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1053 fputs ("\n", stderr);
1059 /* Parse STMT looking for operands. When finished, the various
1060 build_* operand vectors will have potential operands in them. */
1063 parse_ssa_operands (gimple stmt)
1065 enum gimple_code code = gimple_code (stmt);
1067 if (code == GIMPLE_ASM)
1068 get_asm_expr_operands (stmt);
1071 size_t i, start = 0;
1073 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1075 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1079 for (i = start; i < gimple_num_ops (stmt); i++)
1080 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1082 /* Add call-clobbered operands, if needed. */
1083 if (code == GIMPLE_CALL)
1084 maybe_add_call_vops (stmt);
1089 /* Create an operands cache for STMT. */
1092 build_ssa_operands (gimple stmt)
1094 /* Initially assume that the statement has no volatile operands and
1095 makes no memory references. */
1096 gimple_set_has_volatile_ops (stmt, false);
1098 /* Just clear the bitmap so we don't end up reallocating it over and over. */
1099 if (gimple_addresses_taken (stmt))
1100 bitmap_clear (gimple_addresses_taken (stmt));
1102 start_ssa_stmt_operands ();
1103 parse_ssa_operands (stmt);
1104 finalize_ssa_stmt_operands (stmt);
1108 /* Releases the operands of STMT back to their freelists, and clears
1109 the stmt operand lists. */
1112 free_stmt_operands (gimple stmt)
1114 def_optype_p defs = gimple_def_ops (stmt), last_def;
1115 use_optype_p uses = gimple_use_ops (stmt), last_use;
1119 for (last_def = defs; last_def->next; last_def = last_def->next)
1121 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1122 gimple_ssa_operands (cfun)->free_defs = defs;
1123 gimple_set_def_ops (stmt, NULL);
1128 for (last_use = uses; last_use->next; last_use = last_use->next)
1129 delink_imm_use (USE_OP_PTR (last_use));
1130 delink_imm_use (USE_OP_PTR (last_use));
1131 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1132 gimple_ssa_operands (cfun)->free_uses = uses;
1133 gimple_set_use_ops (stmt, NULL);
1136 if (gimple_has_ops (stmt))
1137 gimple_set_addresses_taken (stmt, NULL);
1139 if (gimple_has_mem_ops (stmt))
1141 gimple_set_vuse (stmt, NULL_TREE);
1142 gimple_set_vdef (stmt, NULL_TREE);
1147 /* Get the operands of statement STMT. */
1150 update_stmt_operands (gimple stmt)
1152 /* If update_stmt_operands is called before SSA is initialized, do
1154 if (!ssa_operands_active ())
1157 timevar_push (TV_TREE_OPS);
1159 gcc_assert (gimple_modified_p (stmt));
1160 build_ssa_operands (stmt);
1161 gimple_set_modified (stmt, false);
1163 timevar_pop (TV_TREE_OPS);
1167 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1168 to test the validity of the swap operation. */
1171 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1177 /* If the operand cache is active, attempt to preserve the relative
1178 positions of these two operands in their respective immediate use
1180 if (ssa_operands_active () && op0 != op1)
1182 use_optype_p use0, use1, ptr;
1185 /* Find the 2 operands in the cache, if they are there. */
1186 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1187 if (USE_OP_PTR (ptr)->use == exp0)
1193 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1194 if (USE_OP_PTR (ptr)->use == exp1)
1200 /* If both uses don't have operand entries, there isn't much we can do
1201 at this point. Presumably we don't need to worry about it. */
1204 tree *tmp = USE_OP_PTR (use1)->use;
1205 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1206 USE_OP_PTR (use0)->use = tmp;
1210 /* Now swap the data. */
1216 /* Scan the immediate_use list for VAR making sure its linked properly.
1217 Return TRUE if there is a problem and emit an error message to F. */
1220 verify_imm_links (FILE *f, tree var)
1222 use_operand_p ptr, prev, list;
1225 gcc_assert (TREE_CODE (var) == SSA_NAME);
1227 list = &(SSA_NAME_IMM_USE_NODE (var));
1228 gcc_assert (list->use == NULL);
1230 if (list->prev == NULL)
1232 gcc_assert (list->next == NULL);
1238 for (ptr = list->next; ptr != list; )
1240 if (prev != ptr->prev)
1243 if (ptr->use == NULL)
1244 goto error; /* 2 roots, or SAFE guard node. */
1245 else if (*(ptr->use) != var)
1251 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1253 if (count++ > 50000000)
1257 /* Verify list in the other direction. */
1259 for (ptr = list->prev; ptr != list; )
1261 if (prev != ptr->next)
1275 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1277 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1278 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1280 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1282 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1288 /* Dump all the immediate uses to FILE. */
1291 dump_immediate_uses_for (FILE *file, tree var)
1293 imm_use_iterator iter;
1294 use_operand_p use_p;
1296 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1298 print_generic_expr (file, var, TDF_SLIM);
1299 fprintf (file, " : -->");
1300 if (has_zero_uses (var))
1301 fprintf (file, " no uses.\n");
1303 if (has_single_use (var))
1304 fprintf (file, " single use.\n");
1306 fprintf (file, "%d uses.\n", num_imm_uses (var));
1308 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1310 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1311 fprintf (file, "***end of stmt iterator marker***\n");
1313 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1314 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1316 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1318 fprintf(file, "\n");
1322 /* Dump all the immediate uses to FILE. */
1325 dump_immediate_uses (FILE *file)
1330 fprintf (file, "Immediate_uses: \n\n");
1331 for (x = 1; x < num_ssa_names; x++)
1336 dump_immediate_uses_for (file, var);
1341 /* Dump def-use edges on stderr. */
1344 debug_immediate_uses (void)
1346 dump_immediate_uses (stderr);
1350 /* Dump def-use edges on stderr. */
1353 debug_immediate_uses_for (tree var)
1355 dump_immediate_uses_for (stderr, var);
1359 /* Push *STMT_P on the SCB_STACK. This function is deprecated, do not
1360 introduce new uses of it. */
1363 push_stmt_changes (gimple *stmt_p)
1365 gimple stmt = *stmt_p;
1367 /* It makes no sense to keep track of PHI nodes. */
1368 if (gimple_code (stmt) == GIMPLE_PHI)
1371 VEC_safe_push (gimple_p, heap, scb_stack, stmt_p);
1374 /* Pop the top stmt from SCB_STACK and act on the differences between
1375 what was recorded by push_stmt_changes and the current state of
1376 the statement. This function is deprecated, do not introduce
1380 pop_stmt_changes (gimple *stmt_p)
1382 gimple *stmt2_p, stmt = *stmt_p;
1384 /* It makes no sense to keep track of PHI nodes. */
1385 if (gimple_code (stmt) == GIMPLE_PHI)
1388 stmt2_p = VEC_pop (gimple_p, scb_stack);
1389 gcc_assert (stmt_p == stmt2_p);
1391 /* Force an operand re-scan on the statement and mark any newly
1392 exposed variables. This also will mark the virtual operand
1393 for renaming if necessary. */
1397 /* Discard the topmost stmt from SCB_STACK. This is useful
1398 when the caller realized that it did not actually modified the
1399 statement. It avoids the expensive operand re-scan.
1400 This function is deprecated, do not introduce new uses of it. */
1403 discard_stmt_changes (gimple *stmt_p)
1405 gimple *stmt2_p, stmt = *stmt_p;
1407 /* It makes no sense to keep track of PHI nodes. */
1408 if (gimple_code (stmt) == GIMPLE_PHI)
1411 stmt2_p = VEC_pop (gimple_p, scb_stack);
1412 gcc_assert (stmt_p == stmt2_p);
1415 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1418 unlink_stmt_vdef (gimple stmt)
1420 use_operand_p use_p;
1421 imm_use_iterator iter;
1423 tree vdef = gimple_vdef (stmt);
1426 || TREE_CODE (vdef) != SSA_NAME)
1429 FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1431 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1432 SET_USE (use_p, gimple_vuse (stmt));
1435 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1436 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;