1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "langhooks.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
37 #include "diagnostic.h"
39 #include "pointer-set.h"
40 #include "tree-flow.h"
42 #include "tree-inline.h"
46 #include "tree-dump.h"
47 #include "tree-pass.h"
50 /* Pointer map of variable mappings, keyed by edge. */
51 static struct pointer_map_t *edge_var_maps;
54 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
57 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
60 edge_var_map_vector old_head, head;
61 edge_var_map new_node;
63 if (edge_var_maps == NULL)
64 edge_var_maps = pointer_map_create ();
66 slot = pointer_map_insert (edge_var_maps, e);
67 old_head = head = (edge_var_map_vector) *slot;
70 head = VEC_alloc (edge_var_map, heap, 5);
74 new_node.result = result;
75 new_node.locus = locus;
77 VEC_safe_push (edge_var_map, heap, head, &new_node);
80 /* The push did some reallocation. Update the pointer map. */
86 /* Clear the var mappings in edge E. */
89 redirect_edge_var_map_clear (edge e)
92 edge_var_map_vector head;
97 slot = pointer_map_contains (edge_var_maps, e);
101 head = (edge_var_map_vector) *slot;
102 VEC_free (edge_var_map, heap, head);
108 /* Duplicate the redirected var mappings in OLDE in NEWE.
110 Since we can't remove a mapping, let's just duplicate it. This assumes a
111 pointer_map can have multiple edges mapping to the same var_map (many to
112 one mapping), since we don't remove the previous mappings. */
115 redirect_edge_var_map_dup (edge newe, edge olde)
117 void **new_slot, **old_slot;
118 edge_var_map_vector head;
123 new_slot = pointer_map_insert (edge_var_maps, newe);
124 old_slot = pointer_map_contains (edge_var_maps, olde);
127 head = (edge_var_map_vector) *old_slot;
130 *new_slot = VEC_copy (edge_var_map, heap, head);
132 *new_slot = VEC_alloc (edge_var_map, heap, 5);
136 /* Return the variable mappings for a given edge. If there is none, return
140 redirect_edge_var_map_vector (edge e)
144 /* Hey, what kind of idiot would... you'd be surprised. */
148 slot = pointer_map_contains (edge_var_maps, e);
152 return (edge_var_map_vector) *slot;
155 /* Used by redirect_edge_var_map_destroy to free all memory. */
158 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
160 void *data ATTRIBUTE_UNUSED)
162 edge_var_map_vector head = (edge_var_map_vector) *value;
163 VEC_free (edge_var_map, heap, head);
167 /* Clear the edge variable mappings. */
170 redirect_edge_var_map_destroy (void)
174 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
175 pointer_map_destroy (edge_var_maps);
176 edge_var_maps = NULL;
181 /* Remove the corresponding arguments from the PHI nodes in E's
182 destination block and redirect it to DEST. Return redirected edge.
183 The list of removed arguments is stored in a vector accessed
184 through edge_var_maps. */
187 ssa_redirect_edge (edge e, basic_block dest)
189 gimple_stmt_iterator gsi;
192 redirect_edge_var_map_clear (e);
194 /* Remove the appropriate PHI arguments in E's destination block. */
195 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
198 source_location locus ;
200 phi = gsi_stmt (gsi);
201 def = gimple_phi_arg_def (phi, e->dest_idx);
202 locus = gimple_phi_arg_location (phi, e->dest_idx);
204 if (def == NULL_TREE)
207 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
210 e = redirect_edge_succ_nodup (e, dest);
216 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
220 flush_pending_stmts (edge e)
223 edge_var_map_vector v;
226 gimple_stmt_iterator gsi;
228 v = redirect_edge_var_map_vector (e);
232 for (gsi = gsi_start_phis (e->dest), i = 0;
233 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
234 gsi_next (&gsi), i++)
238 phi = gsi_stmt (gsi);
239 def = redirect_edge_var_map_def (vm);
240 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
243 redirect_edge_var_map_clear (e);
246 /* Given a tree for an expression for which we might want to emit
247 locations or values in debug information (generally a variable, but
248 we might deal with other kinds of trees in the future), return the
249 tree that should be used as the variable of a DEBUG_BIND STMT or
250 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
253 target_for_debug_bind (tree var)
255 if (!MAY_HAVE_DEBUG_STMTS)
258 if (TREE_CODE (var) != VAR_DECL
259 && TREE_CODE (var) != PARM_DECL)
262 if (DECL_HAS_VALUE_EXPR_P (var))
263 return target_for_debug_bind (DECL_VALUE_EXPR (var));
265 if (DECL_IGNORED_P (var))
268 if (!is_gimple_reg (var))
274 /* Called via walk_tree, look for SSA_NAMEs that have already been
278 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
280 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
285 if (TREE_CODE (*tp) == SSA_NAME)
287 if (SSA_NAME_IN_FREE_LIST (*tp))
292 else if (IS_TYPE_OR_DECL_P (*tp))
298 /* Given a VAR whose definition STMT is to be moved to the iterator
299 position TOGSIP in the TOBB basic block, verify whether we're
300 moving it across any of the debug statements that use it, and
301 adjust them as needed. If TOBB is NULL, then the definition is
302 understood as being removed, and TOGSIP is unused. */
304 propagate_var_def_into_debug_stmts (tree var,
306 const gimple_stmt_iterator *togsip)
308 imm_use_iterator imm_iter;
312 bool no_value = false;
314 if (!MAY_HAVE_DEBUG_STMTS)
317 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
320 gimple_stmt_iterator si;
322 if (!is_gimple_debug (stmt))
327 bb = gimple_bb (stmt);
331 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
332 if (dominated_by_p (CDI_DOMINATORS, bb, tobb))
348 while (gsi_stmt (si) != stmt);
355 /* Here we compute (lazily) the value assigned to VAR, but we
356 remember if we tried before and failed, so that we don't try
358 if (!value && !no_value)
360 gimple def_stmt = SSA_NAME_DEF_STMT (var);
362 if (is_gimple_assign (def_stmt))
364 if (!dom_info_available_p (CDI_DOMINATORS))
366 struct walk_stmt_info wi;
368 memset (&wi, 0, sizeof (wi));
370 /* When removing blocks without following reverse
371 dominance order, we may sometimes encounter SSA_NAMEs
372 that have already been released, referenced in other
373 SSA_DEFs that we're about to release. Consider:
382 If we deleted BB X first, propagating the value of
383 w_2 won't do us any good. It's too late to recover
384 their original definition of v_1: when it was
385 deleted, it was only referenced in other DEFs, it
386 couldn't possibly know it should have been retained,
387 and propagating every single DEF just in case it
388 might have to be propagated into a DEBUG STMT would
389 probably be too wasteful.
391 When dominator information is not readily
392 available, we check for and accept some loss of
393 debug information. But if it is available,
394 there's no excuse for us to remove blocks in the
395 wrong order, so we don't even check for dead SSA
396 NAMEs. SSA verification shall catch any
398 if (!walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
403 value = gimple_assign_rhs_to_tree (def_stmt);
411 gimple_debug_bind_reset_value (stmt);
413 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
414 SET_USE (use_p, unshare_expr (value));
421 /* Given a STMT to be moved to the iterator position TOBSIP in the
422 TOBB basic block, verify whether we're moving it across any of the
423 debug statements that use it. If TOBB is NULL, then the definition
424 is understood as being removed, and TOBSIP is unused. */
427 propagate_defs_into_debug_stmts (gimple def, basic_block tobb,
428 const gimple_stmt_iterator *togsip)
433 if (!MAY_HAVE_DEBUG_STMTS)
436 FOR_EACH_SSA_DEF_OPERAND (def_p, def, op_iter, SSA_OP_DEF)
438 tree var = DEF_FROM_PTR (def_p);
440 if (TREE_CODE (var) != SSA_NAME)
443 propagate_var_def_into_debug_stmts (var, tobb, togsip);
447 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
448 dominated stmts before their dominators, so that release_ssa_defs
449 stands a chance of propagating DEFs into debug bind stmts. */
452 release_defs_bitset (bitmap toremove)
457 /* Performing a topological sort is probably overkill, this will
458 most likely run in slightly superlinear time, rather than the
459 pathological quadratic worst case. */
460 while (!bitmap_empty_p (toremove))
461 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
463 bool remove_now = true;
464 tree var = ssa_name (j);
466 imm_use_iterator uit;
468 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
473 /* We can't propagate PHI nodes into debug stmts. */
474 if (gimple_code (stmt) == GIMPLE_PHI
475 || is_gimple_debug (stmt))
478 /* If we find another definition to remove that uses
479 the one we're looking at, defer the removal of this
480 one, so that it can be propagated into debug stmts
481 after the other is. */
482 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
484 tree odef = DEF_FROM_PTR (def_p);
486 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
494 BREAK_FROM_IMM_USE_STMT (uit);
499 gimple def = SSA_NAME_DEF_STMT (var);
500 gimple_stmt_iterator gsi = gsi_for_stmt (def);
502 if (gimple_code (def) == GIMPLE_PHI)
503 remove_phi_node (&gsi, true);
506 gsi_remove (&gsi, true);
510 bitmap_clear_bit (toremove, j);
515 /* Return true if SSA_NAME is malformed and mark it visited.
517 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
521 verify_ssa_name (tree ssa_name, bool is_virtual)
523 if (TREE_CODE (ssa_name) != SSA_NAME)
525 error ("expected an SSA_NAME object");
529 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
531 error ("type mismatch between an SSA_NAME and its symbol");
535 if (SSA_NAME_IN_FREE_LIST (ssa_name))
537 error ("found an SSA_NAME that had been released into the free pool");
541 if (is_virtual && is_gimple_reg (ssa_name))
543 error ("found a virtual definition for a GIMPLE register");
547 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
549 error ("virtual SSA name for non-VOP decl");
553 if (!is_virtual && !is_gimple_reg (ssa_name))
555 error ("found a real definition for a non-register");
559 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
560 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
562 error ("found a default name with a non-empty defining statement");
570 /* Return true if the definition of SSA_NAME at block BB is malformed.
572 STMT is the statement where SSA_NAME is created.
574 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
575 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
576 it means that the block in that array slot contains the
577 definition of SSA_NAME.
579 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
582 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
583 gimple stmt, bool is_virtual)
585 if (verify_ssa_name (ssa_name, is_virtual))
588 if (definition_block[SSA_NAME_VERSION (ssa_name)])
590 error ("SSA_NAME created in two different blocks %i and %i",
591 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
595 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
597 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
599 error ("SSA_NAME_DEF_STMT is wrong");
600 fprintf (stderr, "Expected definition statement:\n");
601 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
602 fprintf (stderr, "\nActual definition statement:\n");
603 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
610 fprintf (stderr, "while verifying SSA_NAME ");
611 print_generic_expr (stderr, ssa_name, 0);
612 fprintf (stderr, " in statement\n");
613 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
619 /* Return true if the use of SSA_NAME at statement STMT in block BB is
622 DEF_BB is the block where SSA_NAME was found to be created.
624 IDOM contains immediate dominator information for the flowgraph.
626 CHECK_ABNORMAL is true if the caller wants to check whether this use
627 is flowing through an abnormal edge (only used when checking PHI
630 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
631 that are defined before STMT in basic block BB. */
634 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
635 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
638 tree ssa_name = USE_FROM_PTR (use_p);
640 if (!TREE_VISITED (ssa_name))
641 if (verify_imm_links (stderr, ssa_name))
644 TREE_VISITED (ssa_name) = 1;
646 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
647 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
648 ; /* Default definitions have empty statements. Nothing to do. */
651 error ("missing definition");
654 else if (bb != def_bb
655 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
657 error ("definition in block %i does not dominate use in block %i",
658 def_bb->index, bb->index);
661 else if (bb == def_bb
662 && names_defined_in_bb != NULL
663 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
665 error ("definition in block %i follows the use", def_bb->index);
670 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
672 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
676 /* Make sure the use is in an appropriate list by checking the previous
677 element to make sure it's the same. */
678 if (use_p->prev == NULL)
680 error ("no immediate_use list");
686 if (use_p->prev->use == NULL)
687 listvar = use_p->prev->loc.ssa_name;
689 listvar = USE_FROM_PTR (use_p->prev);
690 if (listvar != ssa_name)
692 error ("wrong immediate use list");
699 fprintf (stderr, "for SSA_NAME: ");
700 print_generic_expr (stderr, ssa_name, TDF_VOPS);
701 fprintf (stderr, " in statement:\n");
702 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
709 /* Return true if any of the arguments for PHI node PHI at block BB is
712 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
713 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
714 it means that the block in that array slot contains the
715 definition of SSA_NAME. */
718 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
722 size_t i, phi_num_args = gimple_phi_num_args (phi);
724 if (EDGE_COUNT (bb->preds) != phi_num_args)
726 error ("incoming edge count does not match number of PHI arguments");
731 for (i = 0; i < phi_num_args; i++)
733 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
734 tree op = USE_FROM_PTR (op_p);
736 e = EDGE_PRED (bb, i);
740 error ("PHI argument is missing for edge %d->%d",
747 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
749 error ("PHI argument is not SSA_NAME, or invariant");
753 if (TREE_CODE (op) == SSA_NAME)
755 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
756 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
757 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
760 if (TREE_CODE (op) == ADDR_EXPR)
762 tree base = TREE_OPERAND (op, 0);
763 while (handled_component_p (base))
764 base = TREE_OPERAND (base, 0);
765 if ((TREE_CODE (base) == VAR_DECL
766 || TREE_CODE (base) == PARM_DECL
767 || TREE_CODE (base) == RESULT_DECL)
768 && !TREE_ADDRESSABLE (base))
770 error ("address taken, but ADDRESSABLE bit not set");
777 error ("wrong edge %d->%d for PHI argument",
778 e->src->index, e->dest->index);
784 fprintf (stderr, "PHI argument\n");
785 print_generic_stmt (stderr, op, TDF_VOPS);
793 fprintf (stderr, "for PHI node\n");
794 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
802 /* Verify common invariants in the SSA web.
803 TODO: verify the variable annotations. */
806 verify_ssa (bool check_modified_stmt)
810 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
813 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
814 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
816 gcc_assert (!need_ssa_update_p (cfun));
820 timevar_push (TV_TREE_SSA_VERIFY);
822 /* Keep track of SSA names present in the IL. */
823 for (i = 1; i < num_ssa_names; i++)
825 tree name = ssa_name (i);
829 TREE_VISITED (name) = 0;
831 stmt = SSA_NAME_DEF_STMT (name);
832 if (!gimple_nop_p (stmt))
834 basic_block bb = gimple_bb (stmt);
835 verify_def (bb, definition_block,
836 name, stmt, !is_gimple_reg (name));
842 calculate_dominance_info (CDI_DOMINATORS);
844 /* Now verify all the uses and make sure they agree with the definitions
845 found in the previous pass. */
851 gimple_stmt_iterator gsi;
853 /* Make sure that all edges have a clear 'aux' field. */
854 FOR_EACH_EDGE (e, ei, bb->preds)
858 error ("AUX pointer initialized for edge %d->%d", e->src->index,
864 /* Verify the arguments for every PHI node in the block. */
865 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
867 phi = gsi_stmt (gsi);
868 if (verify_phi_args (phi, bb, definition_block))
871 bitmap_set_bit (names_defined_in_bb,
872 SSA_NAME_VERSION (gimple_phi_result (phi)));
875 /* Now verify all the uses and vuses in every statement of the block. */
876 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
878 gimple stmt = gsi_stmt (gsi);
882 if (check_modified_stmt && gimple_modified_p (stmt))
884 error ("stmt (%p) marked modified after optimization pass: ",
886 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
890 if (is_gimple_assign (stmt)
891 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
893 tree lhs, base_address;
895 lhs = gimple_assign_lhs (stmt);
896 base_address = get_base_address (lhs);
899 && SSA_VAR_P (base_address)
900 && !gimple_vdef (stmt)
903 error ("statement makes a memory store, but has no VDEFS");
904 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
908 else if (gimple_debug_bind_p (stmt)
909 && !gimple_debug_bind_has_value_p (stmt))
912 /* Verify the single virtual operand and its constraints. */
914 if (gimple_vdef (stmt))
916 if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
918 error ("statement has VDEF operand not in defs list");
921 if (!gimple_vuse (stmt))
923 error ("statement has VDEF but no VUSE operand");
926 else if (SSA_NAME_VAR (gimple_vdef (stmt))
927 != SSA_NAME_VAR (gimple_vuse (stmt)))
929 error ("VDEF and VUSE do not use the same symbol");
932 has_err |= verify_ssa_name (gimple_vdef (stmt), true);
934 if (gimple_vuse (stmt))
936 if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
938 error ("statement has VUSE operand not in uses list");
941 has_err |= verify_ssa_name (gimple_vuse (stmt), true);
945 error ("in statement");
946 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
950 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
952 if (verify_ssa_name (op, false))
954 error ("in statement");
955 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
960 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
962 op = USE_FROM_PTR (use_p);
963 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
964 use_p, stmt, false, names_defined_in_bb))
968 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
970 if (SSA_NAME_DEF_STMT (op) != stmt)
972 error ("SSA_NAME_DEF_STMT is wrong");
973 fprintf (stderr, "Expected definition statement:\n");
974 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
975 fprintf (stderr, "\nActual definition statement:\n");
976 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
980 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
984 bitmap_clear (names_defined_in_bb);
987 free (definition_block);
989 /* Restore the dominance information to its prior known state, so
990 that we do not perturb the compiler's subsequent behavior. */
991 if (orig_dom_state == DOM_NONE)
992 free_dominance_info (CDI_DOMINATORS);
994 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
996 BITMAP_FREE (names_defined_in_bb);
997 timevar_pop (TV_TREE_SSA_VERIFY);
1001 internal_error ("verify_ssa failed");
1004 /* Return true if the uid in both int tree maps are equal. */
1007 int_tree_map_eq (const void *va, const void *vb)
1009 const struct int_tree_map *a = (const struct int_tree_map *) va;
1010 const struct int_tree_map *b = (const struct int_tree_map *) vb;
1011 return (a->uid == b->uid);
1014 /* Hash a UID in a int_tree_map. */
1017 int_tree_map_hash (const void *item)
1019 return ((const struct int_tree_map *)item)->uid;
1022 /* Return true if the DECL_UID in both trees are equal. */
1025 uid_decl_map_eq (const void *va, const void *vb)
1027 const_tree a = (const_tree) va;
1028 const_tree b = (const_tree) vb;
1029 return (a->decl_minimal.uid == b->decl_minimal.uid);
1032 /* Hash a tree in a uid_decl_map. */
1035 uid_decl_map_hash (const void *item)
1037 return ((const_tree)item)->decl_minimal.uid;
1040 /* Return true if the DECL_UID in both trees are equal. */
1043 uid_ssaname_map_eq (const void *va, const void *vb)
1045 const_tree a = (const_tree) va;
1046 const_tree b = (const_tree) vb;
1047 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1050 /* Hash a tree in a uid_decl_map. */
1053 uid_ssaname_map_hash (const void *item)
1055 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1059 /* Initialize global DFA and SSA structures. */
1062 init_tree_ssa (struct function *fn)
1064 fn->gimple_df = GGC_CNEW (struct gimple_df);
1065 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
1066 uid_decl_map_eq, NULL);
1067 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1068 uid_ssaname_map_eq, NULL);
1069 pt_solution_reset (&fn->gimple_df->escaped);
1070 pt_solution_reset (&fn->gimple_df->callused);
1071 init_ssanames (fn, 0);
1076 /* Deallocate memory associated with SSA data structures for FNDECL. */
1079 delete_tree_ssa (void)
1081 referenced_var_iterator rvi;
1084 /* Remove annotations from every referenced local variable. */
1085 FOR_EACH_REFERENCED_VAR (var, rvi)
1087 if (is_global_var (var))
1090 ggc_free (var->base.ann);
1091 var->base.ann = NULL;
1093 htab_delete (gimple_referenced_vars (cfun));
1094 cfun->gimple_df->referenced_vars = NULL;
1099 /* We no longer maintain the SSA operand cache at this point. */
1100 if (ssa_operands_active ())
1101 fini_ssa_operands ();
1103 delete_alias_heapvars ();
1105 htab_delete (cfun->gimple_df->default_defs);
1106 cfun->gimple_df->default_defs = NULL;
1107 pt_solution_reset (&cfun->gimple_df->escaped);
1108 pt_solution_reset (&cfun->gimple_df->callused);
1109 if (cfun->gimple_df->decls_to_pointers != NULL)
1110 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1111 cfun->gimple_df->decls_to_pointers = NULL;
1112 cfun->gimple_df->modified_noreturn_calls = NULL;
1113 cfun->gimple_df = NULL;
1115 /* We no longer need the edge variable maps. */
1116 redirect_edge_var_map_destroy ();
1119 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1120 useless type conversion, otherwise return false.
1122 This function implicitly defines the middle-end type system. With
1123 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1124 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1125 the following invariants shall be fulfilled:
1127 1) useless_type_conversion_p is transitive.
1128 If a < b and b < c then a < c.
1130 2) useless_type_conversion_p is not symmetric.
1131 From a < b does not follow a > b.
1133 3) Types define the available set of operations applicable to values.
1134 A type conversion is useless if the operations for the target type
1135 is a subset of the operations for the source type. For example
1136 casts to void* are useless, casts from void* are not (void* can't
1137 be dereferenced or offsetted, but copied, hence its set of operations
1138 is a strict subset of that of all other data pointer types). Casts
1139 to const T* are useless (can't be written to), casts from const T*
1143 useless_type_conversion_p (tree outer_type, tree inner_type)
1145 /* Do the following before stripping toplevel qualifiers. */
1146 if (POINTER_TYPE_P (inner_type)
1147 && POINTER_TYPE_P (outer_type))
1149 /* If the outer type is (void *) or a pointer to an incomplete
1150 record type or a pointer to an unprototyped function,
1151 then the conversion is not necessary. */
1152 if (VOID_TYPE_P (TREE_TYPE (outer_type))
1153 || (AGGREGATE_TYPE_P (TREE_TYPE (outer_type))
1154 && TREE_CODE (TREE_TYPE (outer_type)) != ARRAY_TYPE
1155 && (TREE_CODE (TREE_TYPE (outer_type))
1156 == TREE_CODE (TREE_TYPE (inner_type)))
1157 && !COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
1158 || ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1159 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
1160 && (TREE_CODE (TREE_TYPE (outer_type))
1161 == TREE_CODE (TREE_TYPE (inner_type)))
1162 && !TYPE_ARG_TYPES (TREE_TYPE (outer_type))
1163 && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
1164 TREE_TYPE (TREE_TYPE (inner_type)))))
1167 /* Do not lose casts to restrict qualified pointers. */
1168 if ((TYPE_RESTRICT (outer_type)
1169 != TYPE_RESTRICT (inner_type))
1170 && TYPE_RESTRICT (outer_type))
1174 /* From now on qualifiers on value types do not matter. */
1175 inner_type = TYPE_MAIN_VARIANT (inner_type);
1176 outer_type = TYPE_MAIN_VARIANT (outer_type);
1178 if (inner_type == outer_type)
1181 /* If we know the canonical types, compare them. */
1182 if (TYPE_CANONICAL (inner_type)
1183 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1186 /* Changes in machine mode are never useless conversions unless we
1187 deal with aggregate types in which case we defer to later checks. */
1188 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1189 && !AGGREGATE_TYPE_P (inner_type))
1192 /* If both the inner and outer types are integral types, then the
1193 conversion is not necessary if they have the same mode and
1194 signedness and precision, and both or neither are boolean. */
1195 if (INTEGRAL_TYPE_P (inner_type)
1196 && INTEGRAL_TYPE_P (outer_type))
1198 /* Preserve changes in signedness or precision. */
1199 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1200 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1203 /* We don't need to preserve changes in the types minimum or
1204 maximum value in general as these do not generate code
1205 unless the types precisions are different. */
1209 /* Scalar floating point types with the same mode are compatible. */
1210 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1211 && SCALAR_FLOAT_TYPE_P (outer_type))
1214 /* Fixed point types with the same mode are compatible. */
1215 else if (FIXED_POINT_TYPE_P (inner_type)
1216 && FIXED_POINT_TYPE_P (outer_type))
1219 /* We need to take special care recursing to pointed-to types. */
1220 else if (POINTER_TYPE_P (inner_type)
1221 && POINTER_TYPE_P (outer_type))
1223 /* Don't lose casts between pointers to volatile and non-volatile
1224 qualified types. Doing so would result in changing the semantics
1225 of later accesses. For function types the volatile qualifier
1226 is used to indicate noreturn functions. */
1227 if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
1228 && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
1229 && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
1230 && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
1231 && (TYPE_VOLATILE (TREE_TYPE (outer_type))
1232 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
1233 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
1236 /* We require explicit conversions from incomplete target types. */
1237 if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
1238 && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
1241 /* Do not lose casts between pointers that when dereferenced access
1242 memory with different alias sets. */
1243 if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
1246 /* We do not care for const qualification of the pointed-to types
1247 as const qualification has no semantic value to the middle-end. */
1249 /* Otherwise pointers/references are equivalent if their pointed
1250 to types are effectively the same. We can strip qualifiers
1251 on pointed-to types for further comparison, which is done in
1252 the callee. Note we have to use true compatibility here
1253 because addresses are subject to propagation into dereferences
1254 and thus might get the original type exposed which is equivalent
1255 to a reverse conversion. */
1256 return types_compatible_p (TREE_TYPE (outer_type),
1257 TREE_TYPE (inner_type));
1260 /* Recurse for complex types. */
1261 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1262 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1263 return useless_type_conversion_p (TREE_TYPE (outer_type),
1264 TREE_TYPE (inner_type));
1266 /* Recurse for vector types with the same number of subparts. */
1267 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1268 && TREE_CODE (outer_type) == VECTOR_TYPE
1269 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1270 return useless_type_conversion_p (TREE_TYPE (outer_type),
1271 TREE_TYPE (inner_type));
1273 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1274 && TREE_CODE (outer_type) == ARRAY_TYPE)
1276 /* Preserve string attributes. */
1277 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1280 /* Conversions from array types with unknown extent to
1281 array types with known extent are not useless. */
1282 if (!TYPE_DOMAIN (inner_type)
1283 && TYPE_DOMAIN (outer_type))
1286 /* Nor are conversions from array types with non-constant size to
1287 array types with constant size or to different size. */
1288 if (TYPE_SIZE (outer_type)
1289 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1290 && (!TYPE_SIZE (inner_type)
1291 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1292 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1293 TYPE_SIZE (inner_type))))
1296 /* Check conversions between arrays with partially known extents.
1297 If the array min/max values are constant they have to match.
1298 Otherwise allow conversions to unknown and variable extents.
1299 In particular this declares conversions that may change the
1300 mode to BLKmode as useless. */
1301 if (TYPE_DOMAIN (inner_type)
1302 && TYPE_DOMAIN (outer_type)
1303 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1305 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1306 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1307 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1308 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1310 /* After gimplification a variable min/max value carries no
1311 additional information compared to a NULL value. All that
1312 matters has been lowered to be part of the IL. */
1313 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1314 inner_min = NULL_TREE;
1315 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1316 outer_min = NULL_TREE;
1317 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1318 inner_max = NULL_TREE;
1319 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1320 outer_max = NULL_TREE;
1322 /* Conversions NULL / variable <- cst are useless, but not
1323 the other way around. */
1326 || !tree_int_cst_equal (inner_min, outer_min)))
1330 || !tree_int_cst_equal (inner_max, outer_max)))
1334 /* Recurse on the element check. */
1335 return useless_type_conversion_p (TREE_TYPE (outer_type),
1336 TREE_TYPE (inner_type));
1339 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1340 || TREE_CODE (inner_type) == METHOD_TYPE)
1341 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1343 tree outer_parm, inner_parm;
1345 /* If the return types are not compatible bail out. */
1346 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1347 TREE_TYPE (inner_type)))
1350 /* Method types should belong to a compatible base class. */
1351 if (TREE_CODE (inner_type) == METHOD_TYPE
1352 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1353 TYPE_METHOD_BASETYPE (inner_type)))
1356 /* A conversion to an unprototyped argument list is ok. */
1357 if (!TYPE_ARG_TYPES (outer_type))
1360 /* If the unqualified argument types are compatible the conversion
1362 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1365 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1366 inner_parm = TYPE_ARG_TYPES (inner_type);
1367 outer_parm && inner_parm;
1368 outer_parm = TREE_CHAIN (outer_parm),
1369 inner_parm = TREE_CHAIN (inner_parm))
1370 if (!useless_type_conversion_p
1371 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1372 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
1375 /* If there is a mismatch in the number of arguments the functions
1376 are not compatible. */
1377 if (outer_parm || inner_parm)
1380 /* Defer to the target if necessary. */
1381 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1382 return targetm.comp_type_attributes (outer_type, inner_type) != 0;
1387 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1388 explicit conversions for types involving to be structurally
1390 else if (AGGREGATE_TYPE_P (inner_type)
1391 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1397 /* Return true if a conversion from either type of TYPE1 and TYPE2
1398 to the other is not required. Otherwise return false. */
1401 types_compatible_p (tree type1, tree type2)
1403 return (type1 == type2
1404 || (useless_type_conversion_p (type1, type2)
1405 && useless_type_conversion_p (type2, type1)));
1408 /* Return true if EXPR is a useless type conversion, otherwise return
1412 tree_ssa_useless_type_conversion (tree expr)
1414 /* If we have an assignment that merely uses a NOP_EXPR to change
1415 the top of the RHS to the type of the LHS and the type conversion
1416 is "safe", then strip away the type conversion so that we can
1417 enter LHS = RHS into the const_and_copies table. */
1418 if (CONVERT_EXPR_P (expr)
1419 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1420 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1421 return useless_type_conversion_p
1423 TREE_TYPE (TREE_OPERAND (expr, 0)));
1428 /* Strip conversions from EXP according to
1429 tree_ssa_useless_type_conversion and return the resulting
1433 tree_ssa_strip_useless_type_conversions (tree exp)
1435 while (tree_ssa_useless_type_conversion (exp))
1436 exp = TREE_OPERAND (exp, 0);
1441 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1442 described in walk_use_def_chains.
1444 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1445 infinite loops. We used to have a bitmap for this to just mark
1446 SSA versions we had visited. But non-sparse bitmaps are way too
1447 expensive, while sparse bitmaps may cause quadratic behavior.
1449 IS_DFS is true if the caller wants to perform a depth-first search
1450 when visiting PHI nodes. A DFS will visit each PHI argument and
1451 call FN after each one. Otherwise, all the arguments are
1452 visited first and then FN is called with each of the visited
1453 arguments in a separate pass. */
1456 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1457 struct pointer_set_t *visited, bool is_dfs)
1461 if (pointer_set_insert (visited, var))
1464 def_stmt = SSA_NAME_DEF_STMT (var);
1466 if (gimple_code (def_stmt) != GIMPLE_PHI)
1468 /* If we reached the end of the use-def chain, call FN. */
1469 return fn (var, def_stmt, data);
1475 /* When doing a breadth-first search, call FN before following the
1476 use-def links for each argument. */
1478 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1479 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1482 /* Follow use-def links out of each PHI argument. */
1483 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1485 tree arg = gimple_phi_arg_def (def_stmt, i);
1487 /* ARG may be NULL for newly introduced PHI nodes. */
1489 && TREE_CODE (arg) == SSA_NAME
1490 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1494 /* When doing a depth-first search, call FN after following the
1495 use-def links for each argument. */
1497 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1498 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1507 /* Walk use-def chains starting at the SSA variable VAR. Call
1508 function FN at each reaching definition found. FN takes three
1509 arguments: VAR, its defining statement (DEF_STMT) and a generic
1510 pointer to whatever state information that FN may want to maintain
1511 (DATA). FN is able to stop the walk by returning true, otherwise
1512 in order to continue the walk, FN should return false.
1514 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1515 different. The first argument to FN is no longer the original
1516 variable VAR, but the PHI argument currently being examined. If FN
1517 wants to get at VAR, it should call PHI_RESULT (PHI).
1519 If IS_DFS is true, this function will:
1521 1- walk the use-def chains for all the PHI arguments, and,
1522 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1524 If IS_DFS is false, the two steps above are done in reverse order
1525 (i.e., a breadth-first search). */
1528 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1533 gcc_assert (TREE_CODE (var) == SSA_NAME);
1535 def_stmt = SSA_NAME_DEF_STMT (var);
1537 /* We only need to recurse if the reaching definition comes from a PHI
1539 if (gimple_code (def_stmt) != GIMPLE_PHI)
1540 (*fn) (var, def_stmt, data);
1543 struct pointer_set_t *visited = pointer_set_create ();
1544 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1545 pointer_set_destroy (visited);
1550 /* Return true if T, an SSA_NAME, has an undefined value. */
1553 ssa_undefined_value_p (tree t)
1555 tree var = SSA_NAME_VAR (t);
1557 /* Parameters get their initial value from the function entry. */
1558 if (TREE_CODE (var) == PARM_DECL)
1561 /* Hard register variables get their initial value from the ether. */
1562 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1565 /* The value is undefined iff its definition statement is empty. */
1566 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1569 /* Emit warnings for uninitialized variables. This is done in two passes.
1571 The first pass notices real uses of SSA names with undefined values.
1572 Such uses are unconditionally uninitialized, and we can be certain that
1573 such a use is a mistake. This pass is run before most optimizations,
1574 so that we catch as many as we can.
1576 The second pass follows PHI nodes to find uses that are potentially
1577 uninitialized. In this case we can't necessarily prove that the use
1578 is really uninitialized. This pass is run after most optimizations,
1579 so that we thread as many jumps and possible, and delete as much dead
1580 code as possible, in order to reduce false positives. We also look
1581 again for plain uninitialized variables, since optimization may have
1582 changed conditionally uninitialized to unconditionally uninitialized. */
1584 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1585 warning text is in MSGID and LOCUS may contain a location or be null. */
1588 warn_uninit (tree t, const char *gmsgid, void *data)
1590 tree var = SSA_NAME_VAR (t);
1591 gimple context = (gimple) data;
1592 location_t location;
1593 expanded_location xloc, floc;
1595 if (!ssa_undefined_value_p (t))
1598 /* TREE_NO_WARNING either means we already warned, or the front end
1599 wishes to suppress the warning. */
1600 if (TREE_NO_WARNING (var))
1603 /* Do not warn if it can be initialized outside this module. */
1604 if (is_global_var (var))
1607 location = (context != NULL && gimple_has_location (context))
1608 ? gimple_location (context)
1609 : DECL_SOURCE_LOCATION (var);
1610 xloc = expand_location (location);
1611 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1612 if (warning_at (location, OPT_Wuninitialized, gmsgid, var))
1614 TREE_NO_WARNING (var) = 1;
1616 if (xloc.file != floc.file
1617 || xloc.line < floc.line
1618 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1619 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1625 bool always_executed;
1626 bool warn_possibly_uninitialized;
1629 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1630 and warn about them. */
1633 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
1635 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
1636 struct walk_data *data = (struct walk_data *) wi->info;
1639 /* We do not care about LHS. */
1642 /* Except for operands of INDIRECT_REF. */
1643 if (!INDIRECT_REF_P (t))
1645 t = TREE_OPERAND (t, 0);
1648 switch (TREE_CODE (t))
1651 /* Taking the address of an uninitialized variable does not
1652 count as using it. */
1658 /* A VAR_DECL in the RHS of a gimple statement may mean that
1659 this variable is loaded from memory. */
1663 /* If there is not gimple stmt,
1664 or alias information has not been computed,
1665 then we cannot check VUSE ops. */
1666 if (data->stmt == NULL)
1669 /* If the load happens as part of a call do not warn about it. */
1670 if (is_gimple_call (data->stmt))
1673 vuse = gimple_vuse_op (data->stmt);
1674 if (vuse == NULL_USE_OPERAND_P)
1677 op = USE_FROM_PTR (vuse);
1678 if (t != SSA_NAME_VAR (op)
1679 || !SSA_NAME_IS_DEFAULT_DEF (op))
1681 /* If this is a VUSE of t and it is the default definition,
1682 then warn about op. */
1684 /* Fall through into SSA_NAME. */
1688 /* We only do data flow with SSA_NAMEs, so that's all we
1690 if (data->always_executed)
1691 warn_uninit (t, "%qD is used uninitialized in this function",
1693 else if (data->warn_possibly_uninitialized)
1694 warn_uninit (t, "%qD may be used uninitialized in this function",
1701 /* The total store transformation performed during gimplification
1702 creates uninitialized variable uses. If all is well, these will
1703 be optimized away, so don't warn now. */
1704 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1709 if (IS_TYPE_OR_DECL_P (t))
1717 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1718 and warn about them. */
1721 warn_uninitialized_phi (gimple phi)
1723 size_t i, n = gimple_phi_num_args (phi);
1725 /* Don't look at memory tags. */
1726 if (!is_gimple_reg (gimple_phi_result (phi)))
1729 for (i = 0; i < n; ++i)
1731 tree op = gimple_phi_arg_def (phi, i);
1732 if (TREE_CODE (op) == SSA_NAME)
1733 warn_uninit (op, "%qD may be used uninitialized in this function",
1739 warn_uninitialized_vars (bool warn_possibly_uninitialized)
1741 gimple_stmt_iterator gsi;
1743 struct walk_data data;
1745 data.warn_possibly_uninitialized = warn_possibly_uninitialized;
1747 calculate_dominance_info (CDI_POST_DOMINATORS);
1751 data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1752 single_succ (ENTRY_BLOCK_PTR), bb);
1753 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1755 struct walk_stmt_info wi;
1756 data.stmt = gsi_stmt (gsi);
1757 if (is_gimple_debug (data.stmt))
1759 memset (&wi, 0, sizeof (wi));
1761 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
1765 /* Post-dominator information can not be reliably updated. Free it
1768 free_dominance_info (CDI_POST_DOMINATORS);
1773 execute_early_warn_uninitialized (void)
1775 /* Currently, this pass runs always but
1776 execute_late_warn_uninitialized only runs with optimization. With
1777 optimization we want to warn about possible uninitialized as late
1778 as possible, thus don't do it here. However, without
1779 optimization we need to warn here about "may be uninitialized".
1781 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
1786 execute_late_warn_uninitialized (void)
1789 gimple_stmt_iterator gsi;
1791 /* Re-do the plain uninitialized variable check, as optimization may have
1792 straightened control flow. Do this first so that we don't accidentally
1793 get a "may be" warning when we'd have seen an "is" warning later. */
1794 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1797 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1798 warn_uninitialized_phi (gsi_stmt (gsi));
1804 gate_warn_uninitialized (void)
1806 return warn_uninitialized != 0;
1809 struct gimple_opt_pass pass_early_warn_uninitialized =
1814 gate_warn_uninitialized, /* gate */
1815 execute_early_warn_uninitialized, /* execute */
1818 0, /* static_pass_number */
1819 TV_NONE, /* tv_id */
1820 PROP_ssa, /* properties_required */
1821 0, /* properties_provided */
1822 0, /* properties_destroyed */
1823 0, /* todo_flags_start */
1824 0 /* todo_flags_finish */
1828 struct gimple_opt_pass pass_late_warn_uninitialized =
1833 gate_warn_uninitialized, /* gate */
1834 execute_late_warn_uninitialized, /* execute */
1837 0, /* static_pass_number */
1838 TV_NONE, /* tv_id */
1839 PROP_ssa, /* properties_required */
1840 0, /* properties_provided */
1841 0, /* properties_destroyed */
1842 0, /* todo_flags_start */
1843 0 /* todo_flags_finish */
1847 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1850 execute_update_addresses_taken (bool do_optimize)
1853 referenced_var_iterator rvi;
1854 gimple_stmt_iterator gsi;
1856 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1857 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1858 bool update_vops = false;
1860 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1861 the function body. */
1864 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1866 gimple stmt = gsi_stmt (gsi);
1867 enum gimple_code code = gimple_code (stmt);
1869 /* Note all addresses taken by the stmt. */
1870 gimple_ior_addresses_taken (addresses_taken, stmt);
1872 /* If we have a call or an assignment, see if the lhs contains
1873 a local decl that requires not to be a gimple register. */
1874 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1876 tree lhs = gimple_get_lhs (stmt);
1878 /* We may not rewrite TMR_SYMBOL to SSA. */
1879 if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
1880 && TMR_SYMBOL (lhs))
1881 bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
1883 /* A plain decl does not need it set. */
1884 else if (lhs && handled_component_p (lhs))
1886 var = get_base_address (lhs);
1888 bitmap_set_bit (not_reg_needs, DECL_UID (var));
1893 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1896 gimple phi = gsi_stmt (gsi);
1898 for (i = 0; i < gimple_phi_num_args (phi); i++)
1900 tree op = PHI_ARG_DEF (phi, i), var;
1901 if (TREE_CODE (op) == ADDR_EXPR
1902 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1904 bitmap_set_bit (addresses_taken, DECL_UID (var));
1909 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
1910 and mark variable for conversion into SSA. */
1911 if (optimize && do_optimize)
1912 FOR_EACH_REFERENCED_VAR (var, rvi)
1914 /* Global Variables, result decls cannot be changed. */
1915 if (is_global_var (var)
1916 || TREE_CODE (var) == RESULT_DECL
1917 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1920 if (TREE_ADDRESSABLE (var)
1921 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1922 a non-register. Otherwise we are confused and forget to
1923 add virtual operands for it. */
1924 && (!is_gimple_reg_type (TREE_TYPE (var))
1925 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1927 TREE_ADDRESSABLE (var) = 0;
1928 if (is_gimple_reg (var))
1929 mark_sym_for_renaming (var);
1933 fprintf (dump_file, "No longer having address taken ");
1934 print_generic_expr (dump_file, var, 0);
1935 fprintf (dump_file, "\n");
1938 if (!DECL_GIMPLE_REG_P (var)
1939 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1940 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1941 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1942 && !TREE_THIS_VOLATILE (var)
1943 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1945 DECL_GIMPLE_REG_P (var) = 1;
1946 mark_sym_for_renaming (var);
1950 fprintf (dump_file, "Decl is now a gimple register ");
1951 print_generic_expr (dump_file, var, 0);
1952 fprintf (dump_file, "\n");
1957 /* Operand caches needs to be recomputed for operands referencing the updated
1962 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1964 gimple stmt = gsi_stmt (gsi);
1966 if (gimple_references_memory_p (stmt)
1967 || is_gimple_debug (stmt))
1971 /* Update SSA form here, we are called as non-pass as well. */
1972 update_ssa (TODO_update_ssa);
1975 BITMAP_FREE (not_reg_needs);
1976 BITMAP_FREE (addresses_taken);
1979 struct gimple_opt_pass pass_update_address_taken =
1983 "addressables", /* name */
1988 0, /* static_pass_number */
1989 TV_NONE, /* tv_id */
1990 PROP_ssa, /* properties_required */
1991 0, /* properties_provided */
1992 0, /* properties_destroyed */
1993 0, /* todo_flags_start */
1994 TODO_update_address_taken
1995 | TODO_dump_func /* todo_flags_finish */