1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "diagnostic.h"
30 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-ssa-live.h"
34 #include "tree-pass.h"
38 /* Used to hold all the components required to do SSA PHI elimination.
39 The node and pred/succ list is a simple linear list of nodes and
40 edges represented as pairs of nodes.
42 The predecessor and successor list: Nodes are entered in pairs, where
43 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
44 predecessors, all the odd elements are successors.
47 When implemented as bitmaps, very large programs SSA->Normal times were
48 being dominated by clearing the interference graph.
50 Typically this list of edges is extremely small since it only includes
51 PHI results and uses from a single edge which have not coalesced with
52 each other. This means that no virtual PHI nodes are included, and
53 empirical evidence suggests that the number of edges rarely exceed
54 3, and in a bootstrap of GCC, the maximum size encountered was 7.
55 This also limits the number of possible nodes that are involved to
56 rarely more than 6, and in the bootstrap of gcc, the maximum number
57 of nodes encountered was 12. */
59 typedef struct _elim_graph {
60 /* Size of the elimination vectors. */
63 /* List of nodes in the elimination graph. */
64 VEC(tree,heap) *nodes;
66 /* The predecessor and successor edge list. */
67 VEC(int,heap) *edge_list;
72 /* Stack for visited nodes. */
75 /* The variable partition map. */
78 /* Edge being eliminated by this graph. */
81 /* List of constant copies to emit. These are pushed on in pairs. */
82 VEC(tree,heap) *const_copies;
86 /* Create a temporary variable based on the type of variable T. Use T's name
93 const char *name = NULL;
96 if (TREE_CODE (t) == SSA_NAME)
99 gcc_assert (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL);
101 type = TREE_TYPE (t);
104 name = IDENTIFIER_POINTER (tmp);
108 tmp = create_tmp_var (type, name);
110 if (DECL_DEBUG_EXPR_IS_FROM (t) && DECL_DEBUG_EXPR (t))
112 SET_DECL_DEBUG_EXPR (tmp, DECL_DEBUG_EXPR (t));
113 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1;
115 else if (!DECL_IGNORED_P (t))
117 SET_DECL_DEBUG_EXPR (tmp, t);
118 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1;
120 DECL_ARTIFICIAL (tmp) = DECL_ARTIFICIAL (t);
121 DECL_IGNORED_P (tmp) = DECL_IGNORED_P (t);
122 DECL_GIMPLE_REG_P (tmp) = DECL_GIMPLE_REG_P (t);
123 add_referenced_var (tmp);
125 /* We should never have copied variables in non-automatic storage
126 or variables that have their address taken. So it is pointless
127 to try to copy call-clobber state here. */
128 gcc_assert (!may_be_aliased (t) && !is_global_var (t));
134 /* This helper function fill insert a copy from a constant or variable SRC to
135 variable DEST on edge E. */
138 insert_copy_on_edge (edge e, tree dest, tree src)
142 copy = gimple_build_assign (dest, src);
145 if (TREE_CODE (src) == ADDR_EXPR)
146 src = TREE_OPERAND (src, 0);
147 if (TREE_CODE (src) == VAR_DECL || TREE_CODE (src) == PARM_DECL)
150 if (dump_file && (dump_flags & TDF_DETAILS))
153 "Inserting a copy on edge BB%d->BB%d :",
156 print_gimple_stmt (dump_file, copy, 0, dump_flags);
157 fprintf (dump_file, "\n");
160 gsi_insert_on_edge (e, copy);
164 /* Create an elimination graph with SIZE nodes and associated data
168 new_elim_graph (int size)
170 elim_graph g = (elim_graph) xmalloc (sizeof (struct _elim_graph));
172 g->nodes = VEC_alloc (tree, heap, 30);
173 g->const_copies = VEC_alloc (tree, heap, 20);
174 g->edge_list = VEC_alloc (int, heap, 20);
175 g->stack = VEC_alloc (int, heap, 30);
177 g->visited = sbitmap_alloc (size);
183 /* Empty elimination graph G. */
186 clear_elim_graph (elim_graph g)
188 VEC_truncate (tree, g->nodes, 0);
189 VEC_truncate (int, g->edge_list, 0);
193 /* Delete elimination graph G. */
196 delete_elim_graph (elim_graph g)
198 sbitmap_free (g->visited);
199 VEC_free (int, heap, g->stack);
200 VEC_free (int, heap, g->edge_list);
201 VEC_free (tree, heap, g->const_copies);
202 VEC_free (tree, heap, g->nodes);
207 /* Return the number of nodes in graph G. */
210 elim_graph_size (elim_graph g)
212 return VEC_length (tree, g->nodes);
216 /* Add NODE to graph G, if it doesn't exist already. */
219 elim_graph_add_node (elim_graph g, tree node)
224 for (x = 0; VEC_iterate (tree, g->nodes, x, t); x++)
227 VEC_safe_push (tree, heap, g->nodes, node);
231 /* Add the edge PRED->SUCC to graph G. */
234 elim_graph_add_edge (elim_graph g, int pred, int succ)
236 VEC_safe_push (int, heap, g->edge_list, pred);
237 VEC_safe_push (int, heap, g->edge_list, succ);
241 /* Remove an edge from graph G for which NODE is the predecessor, and
242 return the successor node. -1 is returned if there is no such edge. */
245 elim_graph_remove_succ_edge (elim_graph g, int node)
249 for (x = 0; x < VEC_length (int, g->edge_list); x += 2)
250 if (VEC_index (int, g->edge_list, x) == node)
252 VEC_replace (int, g->edge_list, x, -1);
253 y = VEC_index (int, g->edge_list, x + 1);
254 VEC_replace (int, g->edge_list, x + 1, -1);
261 /* Find all the nodes in GRAPH which are successors to NODE in the
262 edge list. VAR will hold the partition number found. CODE is the
263 code fragment executed for every node found. */
265 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, CODE) \
269 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
271 y_ = VEC_index (int, (GRAPH)->edge_list, x_); \
274 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \
280 /* Find all the nodes which are predecessors of NODE in the edge list for
281 GRAPH. VAR will hold the partition number found. CODE is the
282 code fragment executed for every node found. */
284 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, CODE) \
288 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
290 y_ = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \
293 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_); \
299 /* Add T to elimination graph G. */
302 eliminate_name (elim_graph g, tree T)
304 elim_graph_add_node (g, T);
308 /* Build elimination graph G for basic block BB on incoming PHI edge
312 eliminate_build (elim_graph g, basic_block B)
316 gimple_stmt_iterator gsi;
318 clear_elim_graph (g);
320 for (gsi = gsi_start_phis (B); !gsi_end_p (gsi); gsi_next (&gsi))
322 gimple phi = gsi_stmt (gsi);
324 T0 = var_to_partition_to_var (g->map, gimple_phi_result (phi));
326 /* Ignore results which are not in partitions. */
330 Ti = PHI_ARG_DEF (phi, g->e->dest_idx);
332 /* If this argument is a constant, or a SSA_NAME which is being
333 left in SSA form, just queue a copy to be emitted on this
335 if (!phi_ssa_name_p (Ti)
336 || (TREE_CODE (Ti) == SSA_NAME
337 && var_to_partition (g->map, Ti) == NO_PARTITION))
339 /* Save constant copies until all other copies have been emitted
341 VEC_safe_push (tree, heap, g->const_copies, T0);
342 VEC_safe_push (tree, heap, g->const_copies, Ti);
346 Ti = var_to_partition_to_var (g->map, Ti);
349 eliminate_name (g, T0);
350 eliminate_name (g, Ti);
351 p0 = var_to_partition (g->map, T0);
352 pi = var_to_partition (g->map, Ti);
353 elim_graph_add_edge (g, p0, pi);
360 /* Push successors of T onto the elimination stack for G. */
363 elim_forward (elim_graph g, int T)
366 SET_BIT (g->visited, T);
367 FOR_EACH_ELIM_GRAPH_SUCC (g, T, S,
369 if (!TEST_BIT (g->visited, S))
372 VEC_safe_push (int, heap, g->stack, T);
376 /* Return 1 if there unvisited predecessors of T in graph G. */
379 elim_unvisited_predecessor (elim_graph g, int T)
382 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
384 if (!TEST_BIT (g->visited, P))
390 /* Process predecessors first, and insert a copy. */
393 elim_backward (elim_graph g, int T)
396 SET_BIT (g->visited, T);
397 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
399 if (!TEST_BIT (g->visited, P))
401 elim_backward (g, P);
402 insert_copy_on_edge (g->e,
403 partition_to_var (g->map, P),
404 partition_to_var (g->map, T));
409 /* Insert required copies for T in graph G. Check for a strongly connected
410 region, and create a temporary to break the cycle if one is found. */
413 elim_create (elim_graph g, int T)
418 if (elim_unvisited_predecessor (g, T))
420 U = create_temp (partition_to_var (g->map, T));
421 insert_copy_on_edge (g->e, U, partition_to_var (g->map, T));
422 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
424 if (!TEST_BIT (g->visited, P))
426 elim_backward (g, P);
427 insert_copy_on_edge (g->e, partition_to_var (g->map, P), U);
433 S = elim_graph_remove_succ_edge (g, T);
436 SET_BIT (g->visited, T);
437 insert_copy_on_edge (g->e,
438 partition_to_var (g->map, T),
439 partition_to_var (g->map, S));
446 /* Eliminate all the phi nodes on edge E in graph G. */
449 eliminate_phi (edge e, elim_graph g)
452 basic_block B = e->dest;
454 gcc_assert (VEC_length (tree, g->const_copies) == 0);
456 /* Abnormal edges already have everything coalesced. */
457 if (e->flags & EDGE_ABNORMAL)
462 eliminate_build (g, B);
464 if (elim_graph_size (g) != 0)
468 sbitmap_zero (g->visited);
469 VEC_truncate (int, g->stack, 0);
471 for (x = 0; VEC_iterate (tree, g->nodes, x, var); x++)
473 int p = var_to_partition (g->map, var);
474 if (!TEST_BIT (g->visited, p))
478 sbitmap_zero (g->visited);
479 while (VEC_length (int, g->stack) > 0)
481 x = VEC_pop (int, g->stack);
482 if (!TEST_BIT (g->visited, x))
487 /* If there are any pending constant copies, issue them now. */
488 while (VEC_length (tree, g->const_copies) > 0)
491 src = VEC_pop (tree, g->const_copies);
492 dest = VEC_pop (tree, g->const_copies);
493 insert_copy_on_edge (e, dest, src);
498 /* Take the ssa-name var_map MAP, and assign real variables to each
502 assign_vars (var_map map)
508 num = num_var_partitions (map);
509 for (x = 0; x < num; x++)
511 var = partition_to_var (map, x);
512 if (TREE_CODE (var) != SSA_NAME)
515 /* It must already be coalesced. */
516 gcc_assert (ann->out_of_ssa_tag == 1);
517 if (dump_file && (dump_flags & TDF_DETAILS))
519 fprintf (dump_file, "partition %d already has variable ", x);
520 print_generic_expr (dump_file, var, TDF_SLIM);
521 fprintf (dump_file, " assigned to it.\n");
526 root = SSA_NAME_VAR (var);
527 ann = var_ann (root);
528 /* If ROOT is already associated, create a new one. */
529 if (ann->out_of_ssa_tag)
531 root = create_temp (root);
532 ann = var_ann (root);
534 /* ROOT has not been coalesced yet, so use it. */
535 if (dump_file && (dump_flags & TDF_DETAILS))
537 fprintf (dump_file, "Partition %d is assigned to var ", x);
538 print_generic_stmt (dump_file, root, TDF_SLIM);
540 change_partition_var (map, root, x);
546 /* Replace use operand P with whatever variable it has been rewritten to based
547 on the partitions in MAP. EXPR is an optional expression vector over SSA
548 versions which is used to replace P with an expression instead of a variable.
549 If the stmt is changed, return true. */
552 replace_use_variable (var_map map, use_operand_p p, gimple *expr)
555 tree var = USE_FROM_PTR (p);
557 /* Check if we are replacing this variable with an expression. */
560 int version = SSA_NAME_VERSION (var);
563 SET_USE (p, gimple_assign_rhs_to_tree (expr[version]));
568 new_var = var_to_partition_to_var (map, var);
571 SET_USE (p, new_var);
572 set_is_used (new_var);
579 /* Replace def operand DEF_P with whatever variable it has been rewritten to
580 based on the partitions in MAP. EXPR is an optional expression vector over
581 SSA versions which is used to replace DEF_P with an expression instead of a
582 variable. If the stmt is changed, return true. */
585 replace_def_variable (var_map map, def_operand_p def_p, tree *expr)
588 tree var = DEF_FROM_PTR (def_p);
590 /* Do nothing if we are replacing this variable with an expression. */
591 if (expr && expr[SSA_NAME_VERSION (var)])
594 new_var = var_to_partition_to_var (map, var);
597 SET_DEF (def_p, new_var);
598 set_is_used (new_var);
605 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
606 check to see if this allows another PHI node to be removed. */
609 remove_gimple_phi_args (gimple phi)
614 if (dump_file && (dump_flags & TDF_DETAILS))
616 fprintf (dump_file, "Removing Dead PHI definition: ");
617 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
620 FOR_EACH_PHI_ARG (arg_p, phi, iter, SSA_OP_USE)
622 tree arg = USE_FROM_PTR (arg_p);
623 if (TREE_CODE (arg) == SSA_NAME)
625 /* Remove the reference to the existing argument. */
626 SET_USE (arg_p, NULL_TREE);
627 if (has_zero_uses (arg))
630 gimple_stmt_iterator gsi;
632 stmt = SSA_NAME_DEF_STMT (arg);
634 /* Also remove the def if it is a PHI node. */
635 if (gimple_code (stmt) == GIMPLE_PHI)
637 remove_gimple_phi_args (stmt);
638 gsi = gsi_for_stmt (stmt);
639 remove_phi_node (&gsi, true);
647 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
650 eliminate_useless_phis (void)
653 gimple_stmt_iterator gsi;
658 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
660 gimple phi = gsi_stmt (gsi);
661 result = gimple_phi_result (phi);
662 if (!is_gimple_reg (SSA_NAME_VAR (result)))
664 #ifdef ENABLE_CHECKING
666 /* There should be no arguments which are not virtual, or the
667 results will be incorrect. */
668 for (i = 0; i < gimple_phi_num_args (phi); i++)
670 tree arg = PHI_ARG_DEF (phi, i);
671 if (TREE_CODE (arg) == SSA_NAME
672 && is_gimple_reg (SSA_NAME_VAR (arg)))
674 fprintf (stderr, "Argument of PHI is not virtual (");
675 print_generic_expr (stderr, arg, TDF_SLIM);
676 fprintf (stderr, "), but the result is :");
677 print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
678 internal_error ("SSA corruption");
682 remove_phi_node (&gsi, true);
686 /* Also remove real PHIs with no uses. */
687 if (has_zero_uses (result))
689 remove_gimple_phi_args (phi);
690 remove_phi_node (&gsi, true);
700 /* This function will rewrite the current program using the variable mapping
701 found in MAP. If the replacement vector VALUES is provided, any
702 occurrences of partitions with non-null entries in the vector will be
703 replaced with the expression in the vector instead of its mapped
707 rewrite_trees (var_map map, gimple *values)
711 gimple_stmt_iterator gsi;
716 #ifdef ENABLE_CHECKING
717 /* Search for PHIs where the destination has no partition, but one
718 or more arguments has a partition. This should not happen and can
719 create incorrect code. */
722 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
724 gimple phi = gsi_stmt (gsi);
725 tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
729 for (i = 0; i < gimple_phi_num_args (phi); i++)
731 tree arg = PHI_ARG_DEF (phi, i);
733 if (TREE_CODE (arg) == SSA_NAME
734 && var_to_partition (map, arg) != NO_PARTITION)
736 fprintf (stderr, "Argument of PHI is in a partition :(");
737 print_generic_expr (stderr, arg, TDF_SLIM);
738 fprintf (stderr, "), but the result is not :");
739 print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
740 internal_error ("SSA corruption");
748 /* Replace PHI nodes with any required copies. */
749 g = new_elim_graph (map->num_partitions);
753 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
755 gimple stmt = gsi_stmt (gsi);
756 use_operand_p use_p, copy_use_p;
758 bool remove = false, is_copy = false;
764 if (gimple_assign_copy_p (stmt))
767 copy_use_p = NULL_USE_OPERAND_P;
768 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
770 if (replace_use_variable (map, use_p, values))
779 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
783 /* Mark this stmt for removal if it is the list of replaceable
785 if (values && values[SSA_NAME_VERSION (DEF_FROM_PTR (def_p))])
789 if (replace_def_variable (map, def_p, NULL))
791 /* If both SSA_NAMEs coalesce to the same variable,
792 mark the now redundant copy for removal. */
795 gcc_assert (copy_use_p != NULL_USE_OPERAND_P);
796 if (DEF_FROM_PTR (def_p) == USE_FROM_PTR (copy_use_p))
802 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
803 if (replace_def_variable (map, def_p, NULL))
806 /* Remove any stmts marked for removal. */
808 gsi_remove (&gsi, true);
812 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
813 gimple_purge_dead_eh_edges (bb);
818 phi = phi_nodes (bb);
822 FOR_EACH_EDGE (e, ei, bb->preds)
823 eliminate_phi (e, g);
827 delete_elim_graph (g);
830 /* These are the local work structures used to determine the best place to
831 insert the copies that were placed on edges by the SSA->normal pass.. */
832 static VEC(edge,heap) *edge_leader;
833 static VEC(gimple_seq,heap) *stmt_list;
834 static bitmap leader_has_match = NULL;
835 static edge leader_match = NULL;
838 /* Pass this function to make_forwarder_block so that all the edges with
839 matching PENDING_STMT lists to 'curr_stmt_list' get redirected. E is the
840 edge to test for a match. */
843 same_stmt_list_p (edge e)
845 return (e->aux == (PTR) leader_match) ? true : false;
849 /* Return TRUE if S1 and S2 are equivalent copies. */
852 identical_copies_p (const_gimple s1, const_gimple s2)
854 #ifdef ENABLE_CHECKING
855 gcc_assert (is_gimple_assign (s1));
856 gcc_assert (is_gimple_assign (s2));
857 gcc_assert (DECL_P (gimple_assign_lhs (s1)));
858 gcc_assert (DECL_P (gimple_assign_lhs (s2)));
861 if (gimple_assign_lhs (s1) != gimple_assign_lhs (s2))
864 if (gimple_assign_rhs1 (s1) != gimple_assign_rhs1 (s2))
871 /* Compare the PENDING_STMT list for edges E1 and E2. Return true if the lists
872 contain the same sequence of copies. */
875 identical_stmt_lists_p (const_edge e1, const_edge e2)
877 gimple_seq t1 = PENDING_STMT (e1);
878 gimple_seq t2 = PENDING_STMT (e2);
879 gimple_stmt_iterator gsi1, gsi2;
881 for (gsi1 = gsi_start (t1), gsi2 = gsi_start (t2);
882 !gsi_end_p (gsi1) && !gsi_end_p (gsi2);
883 gsi_next (&gsi1), gsi_next (&gsi2))
885 if (!identical_copies_p (gsi_stmt (gsi1), gsi_stmt (gsi2)))
889 if (!gsi_end_p (gsi1) || !gsi_end_p (gsi2))
896 /* Allocate data structures used in analyze_edges_for_bb. */
899 init_analyze_edges_for_bb (void)
901 edge_leader = VEC_alloc (edge, heap, 25);
902 stmt_list = VEC_alloc (gimple_seq, heap, 25);
903 leader_has_match = BITMAP_ALLOC (NULL);
907 /* Free data structures used in analyze_edges_for_bb. */
910 fini_analyze_edges_for_bb (void)
912 VEC_free (edge, heap, edge_leader);
913 VEC_free (gimple_seq, heap, stmt_list);
914 BITMAP_FREE (leader_has_match);
917 /* A helper function to be called via walk_tree. Return DATA if it is
918 contained in subtree TP. */
921 contains_tree_r (tree * tp, int *walk_subtrees, void *data)
932 /* A threshold for the number of insns contained in the latch block.
933 It is used to prevent blowing the loop with too many copies from
935 #define MAX_STMTS_IN_LATCH 2
937 /* Return TRUE if the stmts on SINGLE-EDGE can be moved to the
938 body of the loop. This should be permitted only if SINGLE-EDGE is a
939 single-basic-block latch edge and thus cleaning the latch will help
940 to create a single-basic-block loop. Otherwise return FALSE. */
943 process_single_block_loop_latch (edge single_edge)
946 basic_block b_exit, b_pheader, b_loop = single_edge->src;
949 gimple_stmt_iterator gsi, gsi_exit;
950 gimple_stmt_iterator tsi;
953 unsigned int count = 0;
955 if (single_edge == NULL || (single_edge->dest != single_edge->src)
956 || (EDGE_COUNT (b_loop->succs) != 2)
957 || (EDGE_COUNT (b_loop->preds) != 2))
960 /* Get the stmts on the latch edge. */
961 stmts = PENDING_STMT (single_edge);
963 /* Find the successor edge which is not the latch edge. */
964 FOR_EACH_EDGE (e, ei, b_loop->succs)
965 if (e->dest != b_loop)
970 /* Check that the exit block has only the loop as a predecessor,
971 and that there are no pending stmts on that edge as well. */
972 if (EDGE_COUNT (b_exit->preds) != 1 || PENDING_STMT (e))
975 /* Find the predecessor edge which is not the latch edge. */
976 FOR_EACH_EDGE (e, ei, b_loop->preds)
977 if (e->src != b_loop)
982 if (b_exit == b_pheader || b_exit == b_loop || b_pheader == b_loop)
985 gsi_exit = gsi_after_labels (b_exit);
987 /* Get the last stmt in the loop body. */
988 gsi = gsi_last_bb (single_edge->src);
989 stmt = gsi_stmt (gsi);
991 if (gimple_code (stmt) != GIMPLE_COND)
995 expr = build2 (gimple_cond_code (stmt), boolean_type_node,
996 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
997 /* Iterate over the insns on the latch and count them. */
998 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
1000 gimple stmt1 = gsi_stmt (tsi);
1004 /* Check that the condition does not contain any new definition
1005 created in the latch as the stmts from the latch intended
1007 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
1009 var = gimple_assign_lhs (stmt1);
1010 if (TREE_THIS_VOLATILE (var)
1011 || TYPE_VOLATILE (TREE_TYPE (var))
1012 || walk_tree (&expr, contains_tree_r, var, NULL))
1015 /* Check that the latch does not contain more than MAX_STMTS_IN_LATCH
1016 insns. The purpose of this restriction is to prevent blowing the
1017 loop with too many copies from the latch. */
1018 if (count > MAX_STMTS_IN_LATCH)
1021 /* Apply the transformation - clean up the latch block:
1026 if (cond) goto L2 else goto L3;
1040 if (cond) goto L1 else goto L2;
1045 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
1047 gimple stmt1 = gsi_stmt (tsi);
1051 /* Create a new variable to load back the value of var in case
1052 we exit the loop. */
1053 var = gimple_assign_lhs (stmt1);
1054 tmp_var = create_temp (var);
1055 copy = gimple_build_assign (tmp_var, var);
1056 set_is_used (tmp_var);
1057 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
1058 copy = gimple_build_assign (var, tmp_var);
1059 gsi_insert_before (&gsi_exit, copy, GSI_SAME_STMT);
1062 PENDING_STMT (single_edge) = 0;
1063 /* Insert the new stmts to the loop body. */
1064 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
1068 "\nCleaned-up latch block of loop with single BB: %d\n\n",
1069 single_edge->dest->index);
1074 /* Look at all the incoming edges to block BB, and decide where the best place
1075 to insert the stmts on each edge are, and perform those insertions. */
1078 analyze_edges_for_bb (basic_block bb)
1084 bool have_opportunity;
1085 gimple_stmt_iterator gsi;
1087 edge single_edge = NULL;
1093 /* Blocks which contain at least one abnormal edge cannot use
1094 make_forwarder_block. Look for these blocks, and commit any PENDING_STMTs
1095 found on edges in these block. */
1096 have_opportunity = true;
1097 FOR_EACH_EDGE (e, ei, bb->preds)
1098 if (e->flags & EDGE_ABNORMAL)
1100 have_opportunity = false;
1104 if (!have_opportunity)
1106 FOR_EACH_EDGE (e, ei, bb->preds)
1107 if (PENDING_STMT (e))
1108 gsi_commit_one_edge_insert (e, NULL);
1112 /* Find out how many edges there are with interesting pending stmts on them.
1113 Commit the stmts on edges we are not interested in. */
1114 FOR_EACH_EDGE (e, ei, bb->preds)
1116 if (PENDING_STMT (e))
1118 gcc_assert (!(e->flags & EDGE_ABNORMAL));
1119 if (e->flags & EDGE_FALLTHRU)
1121 gsi = gsi_start_bb (e->src);
1122 if (!gsi_end_p (gsi))
1124 stmt = gsi_stmt (gsi);
1126 gcc_assert (stmt != NULL);
1127 is_label = (gimple_code (stmt) == GIMPLE_LABEL);
1128 /* Punt if it has non-label stmts, or isn't local. */
1130 || DECL_NONLOCAL (gimple_label_label (stmt))
1131 || !gsi_end_p (gsi))
1133 gsi_commit_one_edge_insert (e, NULL);
1143 /* If there aren't at least 2 edges, no sharing will happen. */
1148 /* Add stmts to the edge unless processed specially as a
1149 single-block loop latch edge. */
1150 if (!process_single_block_loop_latch (single_edge))
1151 gsi_commit_one_edge_insert (single_edge, NULL);
1156 /* Ensure that we have empty worklists. */
1157 #ifdef ENABLE_CHECKING
1158 gcc_assert (VEC_length (edge, edge_leader) == 0);
1159 gcc_assert (VEC_length (gimple_seq, stmt_list) == 0);
1160 gcc_assert (bitmap_empty_p (leader_has_match));
1163 /* Find the "leader" block for each set of unique stmt lists. Preference is
1164 given to FALLTHRU blocks since they would need a GOTO to arrive at another
1165 block. The leader edge destination is the block which all the other edges
1166 with the same stmt list will be redirected to. */
1167 have_opportunity = false;
1168 FOR_EACH_EDGE (e, ei, bb->preds)
1170 if (PENDING_STMT (e))
1174 /* Look for the same stmt list in edge leaders list. */
1175 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1177 if (identical_stmt_lists_p (leader, e))
1179 /* Give this edge the same stmt list pointer. */
1180 PENDING_STMT (e) = NULL;
1182 bitmap_set_bit (leader_has_match, x);
1183 have_opportunity = found = true;
1188 /* If no similar stmt list, add this edge to the leader list. */
1191 VEC_safe_push (edge, heap, edge_leader, e);
1192 VEC_safe_push (gimple_seq, heap, stmt_list, PENDING_STMT (e));
1197 /* If there are no similar lists, just issue the stmts. */
1198 if (!have_opportunity)
1200 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1201 gsi_commit_one_edge_insert (leader, NULL);
1202 VEC_truncate (edge, edge_leader, 0);
1203 VEC_truncate (gimple_seq, stmt_list, 0);
1204 bitmap_clear (leader_has_match);
1209 fprintf (dump_file, "\nOpportunities in BB %d for stmt/block reduction:\n",
1212 /* For each common list, create a forwarding block and issue the stmt's
1214 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1215 if (bitmap_bit_p (leader_has_match, x))
1218 gimple_stmt_iterator gsi;
1219 gimple_seq curr_stmt_list;
1221 leader_match = leader;
1223 /* The tree_* cfg manipulation routines use the PENDING_EDGE field
1224 for various PHI manipulations, so it gets cleared when calls are
1225 made to make_forwarder_block(). So make sure the edge is clear,
1226 and use the saved stmt list. */
1227 PENDING_STMT (leader) = NULL;
1228 leader->aux = leader;
1229 curr_stmt_list = VEC_index (gimple_seq, stmt_list, x);
1231 new_edge = make_forwarder_block (leader->dest, same_stmt_list_p,
1233 bb = new_edge->dest;
1236 fprintf (dump_file, "Splitting BB %d for Common stmt list. ",
1237 leader->dest->index);
1238 fprintf (dump_file, "Original block is now BB%d.\n", bb->index);
1239 print_gimple_seq (dump_file, curr_stmt_list, 0, TDF_VOPS);
1242 FOR_EACH_EDGE (e, ei, new_edge->src->preds)
1246 fprintf (dump_file, " Edge (%d->%d) lands here.\n",
1247 e->src->index, e->dest->index);
1250 gsi = gsi_last_bb (leader->dest);
1251 gsi_insert_seq_after (&gsi, curr_stmt_list, GSI_NEW_STMT);
1253 leader_match = NULL;
1254 /* We should never get a new block now. */
1258 PENDING_STMT (leader) = VEC_index (gimple_seq, stmt_list, x);
1259 gsi_commit_one_edge_insert (leader, NULL);
1263 /* Clear the working data structures. */
1264 VEC_truncate (edge, edge_leader, 0);
1265 VEC_truncate (gimple_seq, stmt_list, 0);
1266 bitmap_clear (leader_has_match);
1270 /* This function will analyze the insertions which were performed on edges,
1271 and decide whether they should be left on that edge, or whether it is more
1272 efficient to emit some subset of them in a single block. All stmts are
1273 inserted somewhere. */
1276 perform_edge_inserts (void)
1281 fprintf(dump_file, "Analyzing Edge Insertions.\n");
1283 /* analyze_edges_for_bb calls make_forwarder_block, which tries to
1284 incrementally update the dominator information. Since we don't
1285 need dominator information after this pass, go ahead and free the
1286 dominator information. */
1287 free_dominance_info (CDI_DOMINATORS);
1288 free_dominance_info (CDI_POST_DOMINATORS);
1290 /* Allocate data structures used in analyze_edges_for_bb. */
1291 init_analyze_edges_for_bb ();
1294 analyze_edges_for_bb (bb);
1296 analyze_edges_for_bb (EXIT_BLOCK_PTR);
1298 /* Free data structures used in analyze_edges_for_bb. */
1299 fini_analyze_edges_for_bb ();
1301 #ifdef ENABLE_CHECKING
1307 FOR_EACH_EDGE (e, ei, bb->preds)
1309 if (PENDING_STMT (e))
1310 error (" Pending stmts not issued on PRED edge (%d, %d)\n",
1311 e->src->index, e->dest->index);
1313 FOR_EACH_EDGE (e, ei, bb->succs)
1315 if (PENDING_STMT (e))
1316 error (" Pending stmts not issued on SUCC edge (%d, %d)\n",
1317 e->src->index, e->dest->index);
1320 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1322 if (PENDING_STMT (e))
1323 error (" Pending stmts not issued on ENTRY edge (%d, %d)\n",
1324 e->src->index, e->dest->index);
1326 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1328 if (PENDING_STMT (e))
1329 error (" Pending stmts not issued on EXIT edge (%d, %d)\n",
1330 e->src->index, e->dest->index);
1337 /* Remove the ssa-names in the current function and translate them into normal
1338 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1339 should also be used. */
1342 remove_ssa_form (bool perform_ter)
1345 gimple *values = NULL;
1347 gimple_stmt_iterator gsi;
1349 map = coalesce_ssa_name ();
1351 /* Return to viewing the variable list as just all reference variables after
1352 coalescing has been performed. */
1353 partition_view_normal (map, false);
1355 if (dump_file && (dump_flags & TDF_DETAILS))
1357 fprintf (dump_file, "After Coalescing:\n");
1358 dump_var_map (dump_file, map);
1363 values = find_replaceable_exprs (map);
1364 if (values && dump_file && (dump_flags & TDF_DETAILS))
1365 dump_replaceable_exprs (dump_file, values);
1368 /* Assign real variables to the partitions now. */
1371 if (dump_file && (dump_flags & TDF_DETAILS))
1373 fprintf (dump_file, "After Base variable replacement:\n");
1374 dump_var_map (dump_file, map);
1377 rewrite_trees (map, values);
1382 /* Remove PHI nodes which have been translated back to real variables. */
1384 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1385 remove_phi_node (&gsi, true);
1387 /* If any copies were inserted on edges, analyze and insert them now. */
1388 perform_edge_inserts ();
1390 delete_var_map (map);
1394 /* Search every PHI node for arguments associated with backedges which
1395 we can trivially determine will need a copy (the argument is either
1396 not an SSA_NAME or the argument has a different underlying variable
1397 than the PHI result).
1399 Insert a copy from the PHI argument to a new destination at the
1400 end of the block with the backedge to the top of the loop. Update
1401 the PHI argument to reference this new destination. */
1404 insert_backedge_copies (void)
1407 gimple_stmt_iterator gsi;
1411 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1413 gimple phi = gsi_stmt (gsi);
1414 tree result = gimple_phi_result (phi);
1418 if (!is_gimple_reg (result))
1421 result_var = SSA_NAME_VAR (result);
1422 for (i = 0; i < gimple_phi_num_args (phi); i++)
1424 tree arg = gimple_phi_arg_def (phi, i);
1425 edge e = gimple_phi_arg_edge (phi, i);
1427 /* If the argument is not an SSA_NAME, then we will need a
1428 constant initialization. If the argument is an SSA_NAME with
1429 a different underlying variable then a copy statement will be
1431 if ((e->flags & EDGE_DFS_BACK)
1432 && (TREE_CODE (arg) != SSA_NAME
1433 || SSA_NAME_VAR (arg) != result_var))
1436 gimple stmt, last = NULL;
1437 gimple_stmt_iterator gsi2;
1439 gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
1440 if (!gsi_end_p (gsi2))
1441 last = gsi_stmt (gsi2);
1443 /* In theory the only way we ought to get back to the
1444 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1445 However, better safe than sorry.
1446 If the block ends with a control statement or
1447 something that might throw, then we have to
1448 insert this assignment before the last
1449 statement. Else insert it after the last statement. */
1450 if (last && stmt_ends_bb_p (last))
1452 /* If the last statement in the block is the definition
1453 site of the PHI argument, then we can't insert
1454 anything after it. */
1455 if (TREE_CODE (arg) == SSA_NAME
1456 && SSA_NAME_DEF_STMT (arg) == last)
1460 /* Create a new instance of the underlying variable of the
1462 stmt = gimple_build_assign (result_var,
1463 gimple_phi_arg_def (phi, i));
1464 name = make_ssa_name (result_var, stmt);
1465 gimple_assign_set_lhs (stmt, name);
1467 /* Insert the new statement into the block and update
1469 if (last && stmt_ends_bb_p (last))
1470 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
1472 gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
1473 SET_PHI_ARG_DEF (phi, i, name);
1480 /* Take the current function out of SSA form, translating PHIs as described in
1481 R. Morgan, ``Building an Optimizing Compiler'',
1482 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1485 rewrite_out_of_ssa (void)
1487 /* If elimination of a PHI requires inserting a copy on a backedge,
1488 then we will have to split the backedge which has numerous
1489 undesirable performance effects.
1491 A significant number of such cases can be handled here by inserting
1492 copies into the loop itself. */
1493 insert_backedge_copies ();
1496 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1497 eliminate_useless_phis ();
1499 if (dump_file && (dump_flags & TDF_DETAILS))
1500 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1502 remove_ssa_form (flag_tree_ter && !flag_mudflap);
1504 if (dump_file && (dump_flags & TDF_DETAILS))
1505 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1507 cfun->gimple_df->in_ssa_p = false;
1512 /* Define the parameters of the out of SSA pass. */
1514 struct gimple_opt_pass pass_del_ssa =
1518 "optimized", /* name */
1520 rewrite_out_of_ssa, /* execute */
1523 0, /* static_pass_number */
1524 TV_TREE_SSA_TO_NORMAL, /* tv_id */
1525 PROP_cfg | PROP_ssa, /* properties_required */
1526 0, /* properties_provided */
1527 /* ??? If TER is enabled, we also kill gimple. */
1528 PROP_ssa, /* properties_destroyed */
1529 TODO_verify_ssa | TODO_verify_flow
1530 | TODO_verify_stmts, /* todo_flags_start */
1533 | TODO_remove_unused_locals /* todo_flags_finish */