2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
97 An expression is partially redundant (excluding partial
100 1. It is AVAIL in some, but not all, of the predecessors of a
102 2. It is ANTIC in all the predecessors.
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
118 /* Representations of value numbers:
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
135 /* Representation of expressions on value numbers:
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
142 /* Representation of sets:
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
153 /* Type of expression, used to know which member of the PRE_EXPR union
164 typedef union pre_expr_union_d
169 vn_reference_t reference;
172 typedef struct pre_expr_d
174 enum pre_expr_kind kind;
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 pre_expr_eq (const void *p1, const void *p2)
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190 if (e1->kind != e2->kind)
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
211 pre_expr_hash (const void *p1)
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 return PRE_EXPR_NARY (e)->hashcode;
223 return PRE_EXPR_REFERENCE (e)->hashcode;
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
267 return next_expression_id - 1;
270 /* Return the expression id for tree EXPR. */
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
283 if (expr->kind == NAME)
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
288 return VEC_index (unsigned, name_to_id, version);
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
295 return ((pre_expr)*slot)->id;
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
305 unsigned int id = lookup_expression_id (expr);
307 return alloc_expression_id (expr);
308 return expr->id = id;
311 /* Return the expression that has expression id ID */
313 static inline pre_expr
314 expression_for_id (unsigned int id)
316 return VEC_index (pre_expr, expressions, id);
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
323 clear_expression_ids (void)
325 VEC_free (pre_expr, heap, expressions);
328 static alloc_pool pre_expr_pool;
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 get_or_alloc_expr_for_name (tree name)
335 struct pre_expr_d expr;
337 unsigned int result_id;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
344 return expression_for_id (result_id);
346 result = (pre_expr) pool_alloc (pre_expr_pool);
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
353 static bool in_fre = false;
355 /* An unordered bitmap set. One bitmap tracks values, the other,
357 typedef struct bitmap_set
359 bitmap_head expressions;
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
377 /* The EXP_GEN set, which represents expressions/values generated in
379 bitmap_set_t exp_gen;
381 /* The PHI_GEN set, which represents PHI results generated in a
383 bitmap_set_t phi_gen;
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
406 /* A cache for value_dies_in_block_x. */
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
433 /* Basic block list in postorder. */
434 static int *postorder;
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
440 /* The number of RHS computations eliminated by PRE. */
443 /* The number of new expressions/temporaries generated by PRE. */
446 /* The number of inserts found due to partial anticipation */
449 /* The number of new PHI nodes added by PRE. */
452 /* The number of values found constant. */
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 static unsigned int get_expr_value_id (pre_expr);
473 /* We can add and remove elements and entries to and from sets
474 and hash tables, so we use alloc pools for them. */
476 static alloc_pool bitmap_set_pool;
477 static bitmap_obstack grand_bitmap_obstack;
479 /* To avoid adding 300 temporary variables when we only need one, we
480 only create one temporary variable, on demand, and build ssa names
481 off that. We do have to change the variable if the types don't
482 match the current variable's type. */
484 static tree storetemp;
485 static tree prephitemp;
487 /* Set of blocks with statements that have had its EH information
489 static bitmap need_eh_cleanup;
491 /* The phi_translate_table caches phi translations for a given
492 expression and predecessor. */
494 static htab_t phi_translate_table;
496 /* A three tuple {e, pred, v} used to cache phi translations in the
497 phi_translate_table. */
499 typedef struct expr_pred_trans_d
501 /* The expression. */
504 /* The predecessor block along which we translated the expression. */
507 /* The value that resulted from the translation. */
510 /* The hashcode for the expression, pred pair. This is cached for
513 } *expr_pred_trans_t;
514 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
516 /* Return the hash value for a phi translation table entry. */
519 expr_pred_trans_hash (const void *p)
521 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
525 /* Return true if two phi translation table entries are the same.
526 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
529 expr_pred_trans_eq (const void *p1, const void *p2)
531 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
532 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
533 basic_block b1 = ve1->pred;
534 basic_block b2 = ve2->pred;
536 /* If they are not translations for the same basic block, they can't
540 return pre_expr_eq (ve1->e, ve2->e);
543 /* Search in the phi translation table for the translation of
544 expression E in basic block PRED.
545 Return the translated value, if found, NULL otherwise. */
547 static inline pre_expr
548 phi_trans_lookup (pre_expr e, basic_block pred)
551 struct expr_pred_trans_d ept;
555 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
556 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
561 return ((expr_pred_trans_t) *slot)->v;
565 /* Add the tuple mapping from {expression E, basic block PRED} to
566 value V, to the phi translation table. */
569 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
572 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
574 new_pair->pred = pred;
576 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
579 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
580 new_pair->hashcode, INSERT);
583 *slot = (void *) new_pair;
587 /* Add expression E to the expression set of value id V. */
590 add_to_value (unsigned int v, pre_expr e)
594 gcc_assert (get_expr_value_id (e) == v);
596 if (v >= VEC_length (bitmap_set_t, value_expressions))
598 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
602 set = VEC_index (bitmap_set_t, value_expressions, v);
605 set = bitmap_set_new ();
606 VEC_replace (bitmap_set_t, value_expressions, v, set);
609 bitmap_insert_into_set_1 (set, e, v, true);
612 /* Create a new bitmap set and return it. */
615 bitmap_set_new (void)
617 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
618 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
619 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
623 /* Return the value id for a PRE expression EXPR. */
626 get_expr_value_id (pre_expr expr)
633 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
636 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
637 add_to_value (id, expr);
642 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
644 return PRE_EXPR_NARY (expr)->value_id;
646 return PRE_EXPR_REFERENCE (expr)->value_id;
652 /* Remove an expression EXPR from a bitmapped set. */
655 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
657 unsigned int val = get_expr_value_id (expr);
658 if (!value_id_constant_p (val))
660 bitmap_clear_bit (&set->values, val);
661 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
666 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
667 unsigned int val, bool allow_constants)
669 if (allow_constants || !value_id_constant_p (val))
671 /* We specifically expect this and only this function to be able to
672 insert constants into a set. */
673 bitmap_set_bit (&set->values, val);
674 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
678 /* Insert an expression EXPR into a bitmapped set. */
681 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
683 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
686 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
689 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
691 bitmap_copy (&dest->expressions, &orig->expressions);
692 bitmap_copy (&dest->values, &orig->values);
696 /* Free memory used up by SET. */
698 bitmap_set_free (bitmap_set_t set)
700 bitmap_clear (&set->expressions);
701 bitmap_clear (&set->values);
705 /* Generate an topological-ordered array of bitmap set SET. */
707 static VEC(pre_expr, heap) *
708 sorted_array_from_bitmap_set (bitmap_set_t set)
711 bitmap_iterator bi, bj;
712 VEC(pre_expr, heap) *result;
714 /* Pre-allocate roughly enough space for the array. */
715 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
717 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
719 /* The number of expressions having a given value is usually
720 relatively small. Thus, rather than making a vector of all
721 the expressions and sorting it by value-id, we walk the values
722 and check in the reverse mapping that tells us what expressions
723 have a given value, to filter those in our set. As a result,
724 the expressions are inserted in value-id order, which means
727 If this is somehow a significant lose for some cases, we can
728 choose which set to walk based on the set size. */
729 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
730 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
732 if (bitmap_bit_p (&set->expressions, j))
733 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
740 /* Perform bitmapped set operation DEST &= ORIG. */
743 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
751 bitmap_initialize (&temp, &grand_bitmap_obstack);
753 bitmap_and_into (&dest->values, &orig->values);
754 bitmap_copy (&temp, &dest->expressions);
755 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
757 pre_expr expr = expression_for_id (i);
758 unsigned int value_id = get_expr_value_id (expr);
759 if (!bitmap_bit_p (&dest->values, value_id))
760 bitmap_clear_bit (&dest->expressions, i);
762 bitmap_clear (&temp);
766 /* Subtract all values and expressions contained in ORIG from DEST. */
769 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
771 bitmap_set_t result = bitmap_set_new ();
775 bitmap_and_compl (&result->expressions, &dest->expressions,
778 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
780 pre_expr expr = expression_for_id (i);
781 unsigned int value_id = get_expr_value_id (expr);
782 bitmap_set_bit (&result->values, value_id);
788 /* Subtract all the values in bitmap set B from bitmap set A. */
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
797 bitmap_initialize (&temp, &grand_bitmap_obstack);
799 bitmap_copy (&temp, &a->expressions);
800 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
802 pre_expr expr = expression_for_id (i);
803 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
804 bitmap_remove_from_set (a, expr);
806 bitmap_clear (&temp);
810 /* Return true if bitmapped set SET contains the value VALUE_ID. */
813 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
815 if (value_id_constant_p (value_id))
818 if (!set || bitmap_empty_p (&set->expressions))
821 return bitmap_bit_p (&set->values, value_id);
825 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
827 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
830 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
833 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
836 bitmap_set_t exprset;
840 if (value_id_constant_p (lookfor))
843 if (!bitmap_set_contains_value (set, lookfor))
846 /* The number of expressions having a given value is usually
847 significantly less than the total number of expressions in SET.
848 Thus, rather than check, for each expression in SET, whether it
849 has the value LOOKFOR, we walk the reverse mapping that tells us
850 what expressions have a given value, and see if any of those
851 expressions are in our set. For large testcases, this is about
852 5-10x faster than walking the bitmap. If this is somehow a
853 significant lose for some cases, we can choose which set to walk
854 based on the set size. */
855 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
856 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
858 if (bitmap_bit_p (&set->expressions, i))
860 bitmap_clear_bit (&set->expressions, i);
861 bitmap_set_bit (&set->expressions, get_expression_id (expr));
867 /* Return true if two bitmap sets are equal. */
870 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
872 return bitmap_equal_p (&a->values, &b->values);
875 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
876 and add it otherwise. */
879 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
881 unsigned int val = get_expr_value_id (expr);
883 if (bitmap_set_contains_value (set, val))
884 bitmap_set_replace_value (set, val, expr);
886 bitmap_insert_into_set (set, expr);
889 /* Insert EXPR into SET if EXPR's value is not already present in
893 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
895 unsigned int val = get_expr_value_id (expr);
897 #ifdef ENABLE_CHECKING
898 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
901 /* Constant values are always considered to be part of the set. */
902 if (value_id_constant_p (val))
905 /* If the value membership changed, add the expression. */
906 if (bitmap_set_bit (&set->values, val))
907 bitmap_set_bit (&set->expressions, expr->id);
910 /* Print out EXPR to outfile. */
913 print_pre_expr (FILE *outfile, const pre_expr expr)
918 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
921 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
926 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
927 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
928 for (i = 0; i < nary->length; i++)
930 print_generic_expr (outfile, nary->op[i], 0);
931 if (i != (unsigned) nary->length - 1)
932 fprintf (outfile, ",");
934 fprintf (outfile, "}");
940 vn_reference_op_t vro;
942 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
943 fprintf (outfile, "{");
945 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
948 bool closebrace = false;
949 if (vro->opcode != SSA_NAME
950 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
952 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
955 fprintf (outfile, "<");
961 print_generic_expr (outfile, vro->op0, 0);
964 fprintf (outfile, ",");
965 print_generic_expr (outfile, vro->op1, 0);
969 fprintf (outfile, ",");
970 print_generic_expr (outfile, vro->op2, 0);
974 fprintf (outfile, ">");
975 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
976 fprintf (outfile, ",");
978 fprintf (outfile, "}");
981 fprintf (outfile, "@");
982 print_generic_expr (outfile, ref->vuse, 0);
988 void debug_pre_expr (pre_expr);
990 /* Like print_pre_expr but always prints to stderr. */
992 debug_pre_expr (pre_expr e)
994 print_pre_expr (stderr, e);
995 fprintf (stderr, "\n");
998 /* Print out SET to OUTFILE. */
1001 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1002 const char *setname, int blockindex)
1004 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1011 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1013 const pre_expr expr = expression_for_id (i);
1016 fprintf (outfile, ", ");
1018 print_pre_expr (outfile, expr);
1020 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1023 fprintf (outfile, " }\n");
1026 void debug_bitmap_set (bitmap_set_t);
1029 debug_bitmap_set (bitmap_set_t set)
1031 print_bitmap_set (stderr, set, "debug", 0);
1034 /* Print out the expressions that have VAL to OUTFILE. */
1037 print_value_expressions (FILE *outfile, unsigned int val)
1039 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1043 sprintf (s, "%04d", val);
1044 print_bitmap_set (outfile, set, s, 0);
1050 debug_value_expressions (unsigned int val)
1052 print_value_expressions (stderr, val);
1055 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1059 get_or_alloc_expr_for_constant (tree constant)
1061 unsigned int result_id;
1062 unsigned int value_id;
1063 struct pre_expr_d expr;
1066 expr.kind = CONSTANT;
1067 PRE_EXPR_CONSTANT (&expr) = constant;
1068 result_id = lookup_expression_id (&expr);
1070 return expression_for_id (result_id);
1072 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1073 newexpr->kind = CONSTANT;
1074 PRE_EXPR_CONSTANT (newexpr) = constant;
1075 alloc_expression_id (newexpr);
1076 value_id = get_or_alloc_constant_value_id (constant);
1077 add_to_value (value_id, newexpr);
1081 /* Given a value id V, find the actual tree representing the constant
1082 value if there is one, and return it. Return NULL if we can't find
1086 get_constant_for_value_id (unsigned int v)
1088 if (value_id_constant_p (v))
1092 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1094 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1096 pre_expr expr = expression_for_id (i);
1097 if (expr->kind == CONSTANT)
1098 return PRE_EXPR_CONSTANT (expr);
1104 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1105 Currently only supports constants and SSA_NAMES. */
1107 get_or_alloc_expr_for (tree t)
1109 if (TREE_CODE (t) == SSA_NAME)
1110 return get_or_alloc_expr_for_name (t);
1111 else if (is_gimple_min_invariant (t))
1112 return get_or_alloc_expr_for_constant (t);
1115 /* More complex expressions can result from SCCVN expression
1116 simplification that inserts values for them. As they all
1117 do not have VOPs the get handled by the nary ops struct. */
1118 vn_nary_op_t result;
1119 unsigned int result_id;
1120 vn_nary_op_lookup (t, &result);
1123 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1125 PRE_EXPR_NARY (e) = result;
1126 result_id = lookup_expression_id (e);
1129 pool_free (pre_expr_pool, e);
1130 e = expression_for_id (result_id);
1133 alloc_expression_id (e);
1140 /* Return the folded version of T if T, when folded, is a gimple
1141 min_invariant. Otherwise, return T. */
1144 fully_constant_expression (pre_expr e)
1152 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1153 switch (TREE_CODE_CLASS (nary->opcode))
1155 case tcc_expression:
1156 if (nary->opcode == TRUTH_NOT_EXPR)
1158 if (nary->opcode != TRUTH_AND_EXPR
1159 && nary->opcode != TRUTH_OR_EXPR
1160 && nary->opcode != TRUTH_XOR_EXPR)
1164 case tcc_comparison:
1166 /* We have to go from trees to pre exprs to value ids to
1168 tree naryop0 = nary->op[0];
1169 tree naryop1 = nary->op[1];
1171 if (!is_gimple_min_invariant (naryop0))
1173 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1174 unsigned int vrep0 = get_expr_value_id (rep0);
1175 tree const0 = get_constant_for_value_id (vrep0);
1177 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1179 if (!is_gimple_min_invariant (naryop1))
1181 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1182 unsigned int vrep1 = get_expr_value_id (rep1);
1183 tree const1 = get_constant_for_value_id (vrep1);
1185 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1187 result = fold_binary (nary->opcode, nary->type,
1189 if (result && is_gimple_min_invariant (result))
1190 return get_or_alloc_expr_for_constant (result);
1191 /* We might have simplified the expression to a
1192 SSA_NAME for example from x_1 * 1. But we cannot
1193 insert a PHI for x_1 unconditionally as x_1 might
1194 not be available readily. */
1198 if (nary->opcode != REALPART_EXPR
1199 && nary->opcode != IMAGPART_EXPR
1200 && nary->opcode != VIEW_CONVERT_EXPR)
1206 /* We have to go from trees to pre exprs to value ids to
1208 tree naryop0 = nary->op[0];
1209 tree const0, result;
1210 if (is_gimple_min_invariant (naryop0))
1214 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1215 unsigned int vrep0 = get_expr_value_id (rep0);
1216 const0 = get_constant_for_value_id (vrep0);
1221 tree type1 = TREE_TYPE (nary->op[0]);
1222 const0 = fold_convert (type1, const0);
1223 result = fold_unary (nary->opcode, nary->type, const0);
1225 if (result && is_gimple_min_invariant (result))
1226 return get_or_alloc_expr_for_constant (result);
1235 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1237 if ((folded = fully_constant_vn_reference_p (ref)))
1238 return get_or_alloc_expr_for_constant (folded);
1247 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1248 it has the value it would have in BLOCK. Set *SAME_VALID to true
1249 in case the new vuse doesn't change the value id of the OPERANDS. */
1252 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1253 alias_set_type set, tree type, tree vuse,
1254 basic_block phiblock,
1255 basic_block block, bool *same_valid)
1257 gimple phi = SSA_NAME_DEF_STMT (vuse);
1264 if (gimple_bb (phi) != phiblock)
1267 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1269 /* Use the alias-oracle to find either the PHI node in this block,
1270 the first VUSE used in this block that is equivalent to vuse or
1271 the first VUSE which definition in this block kills the value. */
1272 if (gimple_code (phi) == GIMPLE_PHI)
1273 e = find_edge (block, phiblock);
1274 else if (use_oracle)
1275 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1277 vuse = gimple_vuse (phi);
1278 phi = SSA_NAME_DEF_STMT (vuse);
1279 if (gimple_bb (phi) != phiblock)
1281 if (gimple_code (phi) == GIMPLE_PHI)
1283 e = find_edge (block, phiblock);
1294 bitmap visited = NULL;
1295 /* Try to find a vuse that dominates this phi node by skipping
1296 non-clobbering statements. */
1297 vuse = get_continuation_for_phi (phi, &ref, &visited);
1299 BITMAP_FREE (visited);
1305 /* If we didn't find any, the value ID can't stay the same,
1306 but return the translated vuse. */
1307 *same_valid = false;
1308 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1310 /* ??? We would like to return vuse here as this is the canonical
1311 upmost vdef that this reference is associated with. But during
1312 insertion of the references into the hash tables we only ever
1313 directly insert with their direct gimple_vuse, hence returning
1314 something else would make us not find the other expression. */
1315 return PHI_ARG_DEF (phi, e->dest_idx);
1321 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1322 SET2. This is used to avoid making a set consisting of the union
1323 of PA_IN and ANTIC_IN during insert. */
1325 static inline pre_expr
1326 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1330 result = bitmap_find_leader (set1, val, NULL);
1331 if (!result && set2)
1332 result = bitmap_find_leader (set2, val, NULL);
1336 /* Get the tree type for our PRE expression e. */
1339 get_expr_type (const pre_expr e)
1344 return TREE_TYPE (PRE_EXPR_NAME (e));
1346 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1348 return PRE_EXPR_REFERENCE (e)->type;
1350 return PRE_EXPR_NARY (e)->type;
1355 /* Get a representative SSA_NAME for a given expression.
1356 Since all of our sub-expressions are treated as values, we require
1357 them to be SSA_NAME's for simplicity.
1358 Prior versions of GVNPRE used to use "value handles" here, so that
1359 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1360 either case, the operands are really values (IE we do not expect
1361 them to be usable without finding leaders). */
1364 get_representative_for (const pre_expr e)
1368 unsigned int value_id = get_expr_value_id (e);
1373 return PRE_EXPR_NAME (e);
1375 return PRE_EXPR_CONSTANT (e);
1379 /* Go through all of the expressions representing this value
1380 and pick out an SSA_NAME. */
1383 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1385 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1387 pre_expr rep = expression_for_id (i);
1388 if (rep->kind == NAME)
1389 return PRE_EXPR_NAME (rep);
1394 /* If we reached here we couldn't find an SSA_NAME. This can
1395 happen when we've discovered a value that has never appeared in
1396 the program as set to an SSA_NAME, most likely as the result of
1401 "Could not find SSA_NAME representative for expression:");
1402 print_pre_expr (dump_file, e);
1403 fprintf (dump_file, "\n");
1406 exprtype = get_expr_type (e);
1408 /* Build and insert the assignment of the end result to the temporary
1409 that we will return. */
1410 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1412 pretemp = create_tmp_reg (exprtype, "pretmp");
1413 get_var_ann (pretemp);
1416 name = make_ssa_name (pretemp, gimple_build_nop ());
1417 VN_INFO_GET (name)->value_id = value_id;
1418 if (e->kind == CONSTANT)
1419 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1421 VN_INFO (name)->valnum = name;
1423 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1426 fprintf (dump_file, "Created SSA_NAME representative ");
1427 print_generic_expr (dump_file, name, 0);
1428 fprintf (dump_file, " for expression:");
1429 print_pre_expr (dump_file, e);
1430 fprintf (dump_file, "\n");
1439 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1440 basic_block pred, basic_block phiblock);
1442 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1443 the phis in PRED. Return NULL if we can't find a leader for each part
1444 of the translated expression. */
1447 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1448 basic_block pred, basic_block phiblock)
1455 bool changed = false;
1456 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1457 struct vn_nary_op_s newnary;
1458 /* The NARY structure is only guaranteed to have been
1459 allocated to the nary->length operands. */
1460 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1461 - sizeof (tree) * (4 - nary->length)));
1463 for (i = 0; i < newnary.length; i++)
1465 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1469 pre_expr leader, result;
1470 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1471 leader = find_leader_in_sets (op_val_id, set1, set2);
1472 result = phi_translate (leader, set1, set2, pred, phiblock);
1473 if (result && result != leader)
1475 tree name = get_representative_for (result);
1478 newnary.op[i] = name;
1483 changed |= newnary.op[i] != nary->op[i];
1489 unsigned int new_val_id;
1491 tree result = vn_nary_op_lookup_pieces (newnary.length,
1499 if (result && is_gimple_min_invariant (result))
1500 return get_or_alloc_expr_for_constant (result);
1502 expr = (pre_expr) pool_alloc (pre_expr_pool);
1507 PRE_EXPR_NARY (expr) = nary;
1508 constant = fully_constant_expression (expr);
1509 if (constant != expr)
1512 new_val_id = nary->value_id;
1513 get_or_alloc_expression_id (expr);
1517 new_val_id = get_next_value_id ();
1518 VEC_safe_grow_cleared (bitmap_set_t, heap,
1520 get_max_value_id() + 1);
1521 nary = vn_nary_op_insert_pieces (newnary.length,
1528 result, new_val_id);
1529 PRE_EXPR_NARY (expr) = nary;
1530 constant = fully_constant_expression (expr);
1531 if (constant != expr)
1533 get_or_alloc_expression_id (expr);
1535 add_to_value (new_val_id, expr);
1543 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1544 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1545 tree vuse = ref->vuse;
1546 tree newvuse = vuse;
1547 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1548 bool changed = false, same_valid = true;
1550 vn_reference_op_t operand;
1551 vn_reference_t newref;
1554 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1558 tree oldop0 = operand->op0;
1559 tree oldop1 = operand->op1;
1560 tree oldop2 = operand->op2;
1564 tree type = operand->type;
1565 vn_reference_op_s newop = *operand;
1567 if (op0 && TREE_CODE (op0) == SSA_NAME)
1569 unsigned int op_val_id = VN_INFO (op0)->value_id;
1570 leader = find_leader_in_sets (op_val_id, set1, set2);
1571 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1572 if (opresult && opresult != leader)
1574 tree name = get_representative_for (opresult);
1582 changed |= op0 != oldop0;
1584 if (op1 && TREE_CODE (op1) == SSA_NAME)
1586 unsigned int op_val_id = VN_INFO (op1)->value_id;
1587 leader = find_leader_in_sets (op_val_id, set1, set2);
1588 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1589 if (opresult && opresult != leader)
1591 tree name = get_representative_for (opresult);
1599 /* We can't possibly insert these. */
1600 else if (op1 && !is_gimple_min_invariant (op1))
1602 changed |= op1 != oldop1;
1603 if (op2 && TREE_CODE (op2) == SSA_NAME)
1605 unsigned int op_val_id = VN_INFO (op2)->value_id;
1606 leader = find_leader_in_sets (op_val_id, set1, set2);
1607 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1608 if (opresult && opresult != leader)
1610 tree name = get_representative_for (opresult);
1618 /* We can't possibly insert these. */
1619 else if (op2 && !is_gimple_min_invariant (op2))
1621 changed |= op2 != oldop2;
1624 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1625 /* We may have changed from an SSA_NAME to a constant */
1626 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1627 newop.opcode = TREE_CODE (op0);
1632 /* If it transforms a non-constant ARRAY_REF into a constant
1633 one, adjust the constant offset. */
1634 if (newop.opcode == ARRAY_REF
1636 && TREE_CODE (op0) == INTEGER_CST
1637 && TREE_CODE (op1) == INTEGER_CST
1638 && TREE_CODE (op2) == INTEGER_CST)
1640 double_int off = tree_to_double_int (op0);
1641 off = double_int_add (off,
1643 (tree_to_double_int (op1)));
1644 off = double_int_mul (off, tree_to_double_int (op2));
1645 if (double_int_fits_in_shwi_p (off))
1646 newop.off = off.low;
1648 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1649 /* If it transforms from an SSA_NAME to an address, fold with
1650 a preceding indirect reference. */
1651 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1652 && VEC_index (vn_reference_op_s,
1653 newoperands, j - 1)->opcode == MEM_REF)
1654 vn_reference_fold_indirect (&newoperands, &j);
1656 if (i != VEC_length (vn_reference_op_s, operands))
1659 VEC_free (vn_reference_op_s, heap, newoperands);
1665 newvuse = translate_vuse_through_block (newoperands,
1666 ref->set, ref->type,
1667 vuse, phiblock, pred,
1669 if (newvuse == NULL_TREE)
1671 VEC_free (vn_reference_op_s, heap, newoperands);
1676 if (changed || newvuse != vuse)
1678 unsigned int new_val_id;
1680 bool converted = false;
1682 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1687 VEC_free (vn_reference_op_s, heap, newoperands);
1690 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1692 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
1696 if (result && is_gimple_min_invariant (result))
1698 gcc_assert (!newoperands);
1699 return get_or_alloc_expr_for_constant (result);
1702 expr = (pre_expr) pool_alloc (pre_expr_pool);
1703 expr->kind = REFERENCE;
1711 gcc_assert (CONVERT_EXPR_P (result)
1712 || TREE_CODE (result) == VIEW_CONVERT_EXPR);
1714 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
1716 TREE_OPERAND (result, 0),
1717 NULL_TREE, NULL_TREE,
1720 if (nresult && is_gimple_min_invariant (nresult))
1721 return get_or_alloc_expr_for_constant (nresult);
1726 PRE_EXPR_NARY (expr) = nary;
1727 constant = fully_constant_expression (expr);
1728 if (constant != expr)
1731 new_val_id = nary->value_id;
1732 get_or_alloc_expression_id (expr);
1736 new_val_id = get_next_value_id ();
1737 VEC_safe_grow_cleared (bitmap_set_t, heap,
1739 get_max_value_id() + 1);
1740 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
1742 TREE_OPERAND (result, 0),
1743 NULL_TREE, NULL_TREE,
1744 NULL_TREE, NULL_TREE,
1746 PRE_EXPR_NARY (expr) = nary;
1747 constant = fully_constant_expression (expr);
1748 if (constant != expr)
1750 get_or_alloc_expression_id (expr);
1755 PRE_EXPR_REFERENCE (expr) = newref;
1756 constant = fully_constant_expression (expr);
1757 if (constant != expr)
1760 new_val_id = newref->value_id;
1761 get_or_alloc_expression_id (expr);
1765 if (changed || !same_valid)
1767 new_val_id = get_next_value_id ();
1768 VEC_safe_grow_cleared (bitmap_set_t, heap,
1770 get_max_value_id() + 1);
1773 new_val_id = ref->value_id;
1774 newref = vn_reference_insert_pieces (newvuse, ref->set,
1777 result, new_val_id);
1779 PRE_EXPR_REFERENCE (expr) = newref;
1780 constant = fully_constant_expression (expr);
1781 if (constant != expr)
1783 get_or_alloc_expression_id (expr);
1785 add_to_value (new_val_id, expr);
1787 VEC_free (vn_reference_op_s, heap, newoperands);
1797 tree name = PRE_EXPR_NAME (expr);
1799 def_stmt = SSA_NAME_DEF_STMT (name);
1800 if (gimple_code (def_stmt) == GIMPLE_PHI
1801 && gimple_bb (def_stmt) == phiblock)
1806 e = find_edge (pred, gimple_bb (phi));
1809 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1812 if (TREE_CODE (def) == SSA_NAME)
1813 def = VN_INFO (def)->valnum;
1815 /* Handle constant. */
1816 if (is_gimple_min_invariant (def))
1817 return get_or_alloc_expr_for_constant (def);
1819 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1822 newexpr = get_or_alloc_expr_for_name (def);
1833 /* Wrapper around phi_translate_1 providing caching functionality. */
1836 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1837 basic_block pred, basic_block phiblock)
1844 /* Constants contain no values that need translation. */
1845 if (expr->kind == CONSTANT)
1848 if (value_id_constant_p (get_expr_value_id (expr)))
1851 if (expr->kind != NAME)
1853 phitrans = phi_trans_lookup (expr, pred);
1859 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1861 /* Don't add empty translations to the cache. Neither add
1862 translations of NAMEs as those are cheap to translate. */
1864 && expr->kind != NAME)
1865 phi_trans_add (expr, phitrans, pred);
1871 /* For each expression in SET, translate the values through phi nodes
1872 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1873 expressions in DEST. */
1876 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1877 basic_block phiblock)
1879 VEC (pre_expr, heap) *exprs;
1883 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1885 bitmap_set_copy (dest, set);
1889 exprs = sorted_array_from_bitmap_set (set);
1890 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1892 pre_expr translated;
1893 translated = phi_translate (expr, set, NULL, pred, phiblock);
1897 /* We might end up with multiple expressions from SET being
1898 translated to the same value. In this case we do not want
1899 to retain the NARY or REFERENCE expression but prefer a NAME
1900 which would be the leader. */
1901 if (translated->kind == NAME)
1902 bitmap_value_replace_in_set (dest, translated);
1904 bitmap_value_insert_into_set (dest, translated);
1906 VEC_free (pre_expr, heap, exprs);
1909 /* Find the leader for a value (i.e., the name representing that
1910 value) in a given set, and return it. If STMT is non-NULL it
1911 makes sure the defining statement for the leader dominates it.
1912 Return NULL if no leader is found. */
1915 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1917 if (value_id_constant_p (val))
1921 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1923 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1925 pre_expr expr = expression_for_id (i);
1926 if (expr->kind == CONSTANT)
1930 if (bitmap_set_contains_value (set, val))
1932 /* Rather than walk the entire bitmap of expressions, and see
1933 whether any of them has the value we are looking for, we look
1934 at the reverse mapping, which tells us the set of expressions
1935 that have a given value (IE value->expressions with that
1936 value) and see if any of those expressions are in our set.
1937 The number of expressions per value is usually significantly
1938 less than the number of expressions in the set. In fact, for
1939 large testcases, doing it this way is roughly 5-10x faster
1940 than walking the bitmap.
1941 If this is somehow a significant lose for some cases, we can
1942 choose which set to walk based on which set is smaller. */
1945 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1947 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1948 &set->expressions, 0, i, bi)
1950 pre_expr val = expression_for_id (i);
1951 /* At the point where stmt is not null, there should always
1952 be an SSA_NAME first in the list of expressions. */
1955 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1956 if (gimple_code (def_stmt) != GIMPLE_PHI
1957 && gimple_bb (def_stmt) == gimple_bb (stmt)
1958 /* PRE insertions are at the end of the basic-block
1960 && (gimple_uid (def_stmt) == 0
1961 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1970 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1971 BLOCK by seeing if it is not killed in the block. Note that we are
1972 only determining whether there is a store that kills it. Because
1973 of the order in which clean iterates over values, we are guaranteed
1974 that altered operands will have caused us to be eliminated from the
1975 ANTIC_IN set already. */
1978 value_dies_in_block_x (pre_expr expr, basic_block block)
1980 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1981 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1983 gimple_stmt_iterator gsi;
1984 unsigned id = get_expression_id (expr);
1991 /* Lookup a previously calculated result. */
1992 if (EXPR_DIES (block)
1993 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1994 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1996 /* A memory expression {e, VUSE} dies in the block if there is a
1997 statement that may clobber e. If, starting statement walk from the
1998 top of the basic block, a statement uses VUSE there can be no kill
1999 inbetween that use and the original statement that loaded {e, VUSE},
2000 so we can stop walking. */
2001 ref.base = NULL_TREE;
2002 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
2004 tree def_vuse, def_vdef;
2005 def = gsi_stmt (gsi);
2006 def_vuse = gimple_vuse (def);
2007 def_vdef = gimple_vdef (def);
2009 /* Not a memory statement. */
2013 /* Not a may-def. */
2016 /* A load with the same VUSE, we're done. */
2017 if (def_vuse == vuse)
2023 /* Init ref only if we really need it. */
2024 if (ref.base == NULL_TREE
2025 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
2031 /* If the statement may clobber expr, it dies. */
2032 if (stmt_may_clobber_ref_p_1 (def, &ref))
2039 /* Remember the result. */
2040 if (!EXPR_DIES (block))
2041 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2042 bitmap_set_bit (EXPR_DIES (block), id * 2);
2044 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2050 #define union_contains_value(SET1, SET2, VAL) \
2051 (bitmap_set_contains_value ((SET1), (VAL)) \
2052 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2054 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2057 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2058 vn_reference_op_t vro)
2060 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2062 struct pre_expr_d temp;
2065 PRE_EXPR_NAME (&temp) = vro->op0;
2066 temp.id = lookup_expression_id (&temp);
2069 if (!union_contains_value (set1, set2,
2070 get_expr_value_id (&temp)))
2073 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2075 struct pre_expr_d temp;
2078 PRE_EXPR_NAME (&temp) = vro->op1;
2079 temp.id = lookup_expression_id (&temp);
2082 if (!union_contains_value (set1, set2,
2083 get_expr_value_id (&temp)))
2087 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2089 struct pre_expr_d temp;
2092 PRE_EXPR_NAME (&temp) = vro->op2;
2093 temp.id = lookup_expression_id (&temp);
2096 if (!union_contains_value (set1, set2,
2097 get_expr_value_id (&temp)))
2104 /* Determine if the expression EXPR is valid in SET1 U SET2.
2105 ONLY SET2 CAN BE NULL.
2106 This means that we have a leader for each part of the expression
2107 (if it consists of values), or the expression is an SSA_NAME.
2108 For loads/calls, we also see if the vuse is killed in this block. */
2111 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2117 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2121 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2122 for (i = 0; i < nary->length; i++)
2124 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2126 struct pre_expr_d temp;
2129 PRE_EXPR_NAME (&temp) = nary->op[i];
2130 temp.id = lookup_expression_id (&temp);
2133 if (!union_contains_value (set1, set2,
2134 get_expr_value_id (&temp)))
2138 /* If the NARY may trap make sure the block does not contain
2139 a possible exit point.
2140 ??? This is overly conservative if we translate AVAIL_OUT
2141 as the available expression might be after the exit point. */
2142 if (BB_MAY_NOTRETURN (block)
2143 && vn_nary_may_trap (nary))
2150 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2151 vn_reference_op_t vro;
2154 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2156 if (!vro_valid_in_sets (set1, set2, vro))
2161 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2162 if (!gimple_nop_p (def_stmt)
2163 && gimple_bb (def_stmt) != block
2164 && !dominated_by_p (CDI_DOMINATORS,
2165 block, gimple_bb (def_stmt)))
2168 return !value_dies_in_block_x (expr, block);
2175 /* Clean the set of expressions that are no longer valid in SET1 or
2176 SET2. This means expressions that are made up of values we have no
2177 leaders for in SET1 or SET2. This version is used for partial
2178 anticipation, which means it is not valid in either ANTIC_IN or
2182 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2184 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2188 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2190 if (!valid_in_sets (set1, set2, expr, block))
2191 bitmap_remove_from_set (set1, expr);
2193 VEC_free (pre_expr, heap, exprs);
2196 /* Clean the set of expressions that are no longer valid in SET. This
2197 means expressions that are made up of values we have no leaders for
2201 clean (bitmap_set_t set, basic_block block)
2203 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2207 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2209 if (!valid_in_sets (set, NULL, expr, block))
2210 bitmap_remove_from_set (set, expr);
2212 VEC_free (pre_expr, heap, exprs);
2215 static sbitmap has_abnormal_preds;
2217 /* List of blocks that may have changed during ANTIC computation and
2218 thus need to be iterated over. */
2220 static sbitmap changed_blocks;
2222 /* Decide whether to defer a block for a later iteration, or PHI
2223 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2224 should defer the block, and true if we processed it. */
2227 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2228 basic_block block, basic_block phiblock)
2230 if (!BB_VISITED (phiblock))
2232 SET_BIT (changed_blocks, block->index);
2233 BB_VISITED (block) = 0;
2234 BB_DEFERRED (block) = 1;
2238 phi_translate_set (dest, source, block, phiblock);
2242 /* Compute the ANTIC set for BLOCK.
2244 If succs(BLOCK) > 1 then
2245 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2246 else if succs(BLOCK) == 1 then
2247 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2249 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2253 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2255 bool changed = false;
2256 bitmap_set_t S, old, ANTIC_OUT;
2262 old = ANTIC_OUT = S = NULL;
2263 BB_VISITED (block) = 1;
2265 /* If any edges from predecessors are abnormal, antic_in is empty,
2267 if (block_has_abnormal_pred_edge)
2268 goto maybe_dump_sets;
2270 old = ANTIC_IN (block);
2271 ANTIC_OUT = bitmap_set_new ();
2273 /* If the block has no successors, ANTIC_OUT is empty. */
2274 if (EDGE_COUNT (block->succs) == 0)
2276 /* If we have one successor, we could have some phi nodes to
2277 translate through. */
2278 else if (single_succ_p (block))
2280 basic_block succ_bb = single_succ (block);
2282 /* We trade iterations of the dataflow equations for having to
2283 phi translate the maximal set, which is incredibly slow
2284 (since the maximal set often has 300+ members, even when you
2285 have a small number of blocks).
2286 Basically, we defer the computation of ANTIC for this block
2287 until we have processed it's successor, which will inevitably
2288 have a *much* smaller set of values to phi translate once
2289 clean has been run on it.
2290 The cost of doing this is that we technically perform more
2291 iterations, however, they are lower cost iterations.
2293 Timings for PRE on tramp3d-v4:
2294 without maximal set fix: 11 seconds
2295 with maximal set fix/without deferring: 26 seconds
2296 with maximal set fix/with deferring: 11 seconds
2299 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2303 goto maybe_dump_sets;
2306 /* If we have multiple successors, we take the intersection of all of
2307 them. Note that in the case of loop exit phi nodes, we may have
2308 phis to translate through. */
2311 VEC(basic_block, heap) * worklist;
2313 basic_block bprime, first = NULL;
2315 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2316 FOR_EACH_EDGE (e, ei, block->succs)
2319 && BB_VISITED (e->dest))
2321 else if (BB_VISITED (e->dest))
2322 VEC_quick_push (basic_block, worklist, e->dest);
2325 /* Of multiple successors we have to have visited one already. */
2328 SET_BIT (changed_blocks, block->index);
2329 BB_VISITED (block) = 0;
2330 BB_DEFERRED (block) = 1;
2332 VEC_free (basic_block, heap, worklist);
2333 goto maybe_dump_sets;
2336 if (!gimple_seq_empty_p (phi_nodes (first)))
2337 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2339 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2341 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2343 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2345 bitmap_set_t tmp = bitmap_set_new ();
2346 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2347 bitmap_set_and (ANTIC_OUT, tmp);
2348 bitmap_set_free (tmp);
2351 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2353 VEC_free (basic_block, heap, worklist);
2356 /* Generate ANTIC_OUT - TMP_GEN. */
2357 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2359 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2360 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2363 /* Then union in the ANTIC_OUT - TMP_GEN values,
2364 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2365 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2366 bitmap_value_insert_into_set (ANTIC_IN (block),
2367 expression_for_id (bii));
2369 clean (ANTIC_IN (block), block);
2371 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2374 SET_BIT (changed_blocks, block->index);
2375 FOR_EACH_EDGE (e, ei, block->preds)
2376 SET_BIT (changed_blocks, e->src->index);
2379 RESET_BIT (changed_blocks, block->index);
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2384 if (!BB_DEFERRED (block) || BB_VISITED (block))
2387 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2389 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2393 print_bitmap_set (dump_file, S, "S", block->index);
2398 "Block %d was deferred for a future iteration.\n",
2403 bitmap_set_free (old);
2405 bitmap_set_free (S);
2407 bitmap_set_free (ANTIC_OUT);
2411 /* Compute PARTIAL_ANTIC for BLOCK.
2413 If succs(BLOCK) > 1 then
2414 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2415 in ANTIC_OUT for all succ(BLOCK)
2416 else if succs(BLOCK) == 1 then
2417 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2419 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2424 compute_partial_antic_aux (basic_block block,
2425 bool block_has_abnormal_pred_edge)
2427 bool changed = false;
2428 bitmap_set_t old_PA_IN;
2429 bitmap_set_t PA_OUT;
2432 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2434 old_PA_IN = PA_OUT = NULL;
2436 /* If any edges from predecessors are abnormal, antic_in is empty,
2438 if (block_has_abnormal_pred_edge)
2439 goto maybe_dump_sets;
2441 /* If there are too many partially anticipatable values in the
2442 block, phi_translate_set can take an exponential time: stop
2443 before the translation starts. */
2445 && single_succ_p (block)
2446 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2447 goto maybe_dump_sets;
2449 old_PA_IN = PA_IN (block);
2450 PA_OUT = bitmap_set_new ();
2452 /* If the block has no successors, ANTIC_OUT is empty. */
2453 if (EDGE_COUNT (block->succs) == 0)
2455 /* If we have one successor, we could have some phi nodes to
2456 translate through. Note that we can't phi translate across DFS
2457 back edges in partial antic, because it uses a union operation on
2458 the successors. For recurrences like IV's, we will end up
2459 generating a new value in the set on each go around (i + 3 (VH.1)
2460 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2461 else if (single_succ_p (block))
2463 basic_block succ = single_succ (block);
2464 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2465 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2467 /* If we have multiple successors, we take the union of all of
2471 VEC(basic_block, heap) * worklist;
2475 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2476 FOR_EACH_EDGE (e, ei, block->succs)
2478 if (e->flags & EDGE_DFS_BACK)
2480 VEC_quick_push (basic_block, worklist, e->dest);
2482 if (VEC_length (basic_block, worklist) > 0)
2484 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2489 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2490 bitmap_value_insert_into_set (PA_OUT,
2491 expression_for_id (i));
2492 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2494 bitmap_set_t pa_in = bitmap_set_new ();
2495 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2496 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2497 bitmap_value_insert_into_set (PA_OUT,
2498 expression_for_id (i));
2499 bitmap_set_free (pa_in);
2502 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2503 bitmap_value_insert_into_set (PA_OUT,
2504 expression_for_id (i));
2507 VEC_free (basic_block, heap, worklist);
2510 /* PA_IN starts with PA_OUT - TMP_GEN.
2511 Then we subtract things from ANTIC_IN. */
2512 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2514 /* For partial antic, we want to put back in the phi results, since
2515 we will properly avoid making them partially antic over backedges. */
2516 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2517 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2519 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2520 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2522 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2524 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2527 SET_BIT (changed_blocks, block->index);
2528 FOR_EACH_EDGE (e, ei, block->preds)
2529 SET_BIT (changed_blocks, e->src->index);
2532 RESET_BIT (changed_blocks, block->index);
2535 if (dump_file && (dump_flags & TDF_DETAILS))
2538 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2540 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2543 bitmap_set_free (old_PA_IN);
2545 bitmap_set_free (PA_OUT);
2549 /* Compute ANTIC and partial ANTIC sets. */
2552 compute_antic (void)
2554 bool changed = true;
2555 int num_iterations = 0;
2559 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2560 We pre-build the map of blocks with incoming abnormal edges here. */
2561 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2562 sbitmap_zero (has_abnormal_preds);
2569 FOR_EACH_EDGE (e, ei, block->preds)
2571 e->flags &= ~EDGE_DFS_BACK;
2572 if (e->flags & EDGE_ABNORMAL)
2574 SET_BIT (has_abnormal_preds, block->index);
2579 BB_VISITED (block) = 0;
2580 BB_DEFERRED (block) = 0;
2582 /* While we are here, give empty ANTIC_IN sets to each block. */
2583 ANTIC_IN (block) = bitmap_set_new ();
2584 PA_IN (block) = bitmap_set_new ();
2587 /* At the exit block we anticipate nothing. */
2588 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2589 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2590 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2592 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2593 sbitmap_ones (changed_blocks);
2596 if (dump_file && (dump_flags & TDF_DETAILS))
2597 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2600 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2602 if (TEST_BIT (changed_blocks, postorder[i]))
2604 basic_block block = BASIC_BLOCK (postorder[i]);
2605 changed |= compute_antic_aux (block,
2606 TEST_BIT (has_abnormal_preds,
2610 #ifdef ENABLE_CHECKING
2611 /* Theoretically possible, but *highly* unlikely. */
2612 gcc_assert (num_iterations < 500);
2616 statistics_histogram_event (cfun, "compute_antic iterations",
2619 if (do_partial_partial)
2621 sbitmap_ones (changed_blocks);
2622 mark_dfs_back_edges ();
2627 if (dump_file && (dump_flags & TDF_DETAILS))
2628 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2631 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2633 if (TEST_BIT (changed_blocks, postorder[i]))
2635 basic_block block = BASIC_BLOCK (postorder[i]);
2637 |= compute_partial_antic_aux (block,
2638 TEST_BIT (has_abnormal_preds,
2642 #ifdef ENABLE_CHECKING
2643 /* Theoretically possible, but *highly* unlikely. */
2644 gcc_assert (num_iterations < 500);
2647 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2650 sbitmap_free (has_abnormal_preds);
2651 sbitmap_free (changed_blocks);
2654 /* Return true if we can value number the call in STMT. This is true
2655 if we have a pure or constant call. */
2658 can_value_number_call (gimple stmt)
2660 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2665 /* Return true if OP is a tree which we can perform PRE on.
2666 This may not match the operations we can value number, but in
2667 a perfect world would. */
2670 can_PRE_operation (tree op)
2672 return UNARY_CLASS_P (op)
2673 || BINARY_CLASS_P (op)
2674 || COMPARISON_CLASS_P (op)
2675 || TREE_CODE (op) == MEM_REF
2676 || TREE_CODE (op) == COMPONENT_REF
2677 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2678 || TREE_CODE (op) == CALL_EXPR
2679 || TREE_CODE (op) == ARRAY_REF;
2683 /* Inserted expressions are placed onto this worklist, which is used
2684 for performing quick dead code elimination of insertions we made
2685 that didn't turn out to be necessary. */
2686 static bitmap inserted_exprs;
2688 /* Pool allocated fake store expressions are placed onto this
2689 worklist, which, after performing dead code elimination, is walked
2690 to see which expressions need to be put into GC'able memory */
2691 static VEC(gimple, heap) *need_creation;
2693 /* The actual worker for create_component_ref_by_pieces. */
2696 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2697 unsigned int *operand, gimple_seq *stmts,
2700 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2704 switch (currop->opcode)
2708 tree folded, sc = NULL_TREE;
2709 unsigned int nargs = 0;
2711 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2715 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2716 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2722 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2723 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2727 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2728 ref->operands) - 1);
2729 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2731 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2741 folded = build_call_array (currop->type,
2742 (TREE_CODE (fn) == FUNCTION_DECL
2743 ? build_fold_addr_expr (fn) : fn),
2747 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2753 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2755 tree offset = currop->op0;
2758 if (TREE_CODE (baseop) == ADDR_EXPR
2759 && handled_component_p (TREE_OPERAND (baseop, 0)))
2763 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2766 offset = int_const_binop (PLUS_EXPR, offset,
2767 build_int_cst (TREE_TYPE (offset),
2769 baseop = build_fold_addr_expr (base);
2771 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2774 case TARGET_MEM_REF:
2776 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2779 tree genop0 = NULL_TREE;
2780 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2786 op0expr = get_or_alloc_expr_for (currop->op0);
2787 genop0 = find_or_generate_expression (block, op0expr,
2792 if (DECL_P (baseop))
2793 return build6 (TARGET_MEM_REF, currop->type,
2795 genop0, currop->op1, currop->op2,
2796 unshare_expr (nextop->op1));
2798 return build6 (TARGET_MEM_REF, currop->type,
2800 genop0, currop->op1, currop->op2,
2801 unshare_expr (nextop->op1));
2807 gcc_assert (is_gimple_min_invariant (currop->op0));
2813 case VIEW_CONVERT_EXPR:
2816 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2821 folded = fold_build1 (currop->opcode, currop->type,
2826 case ALIGN_INDIRECT_REF:
2827 case MISALIGNED_INDIRECT_REF:
2830 tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2835 genop1 = fold_convert (build_pointer_type (currop->type),
2838 if (currop->opcode == MISALIGNED_INDIRECT_REF)
2839 folded = fold_build2 (currop->opcode, currop->type,
2840 genop1, currop->op1);
2842 folded = fold_build1 (currop->opcode, currop->type,
2850 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2852 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2853 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2859 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2862 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2865 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2870 /* For array ref vn_reference_op's, operand 1 of the array ref
2871 is op0 of the reference op and operand 3 of the array ref is
2873 case ARRAY_RANGE_REF:
2877 tree genop1 = currop->op0;
2879 tree genop2 = currop->op1;
2881 tree genop3 = currop->op2;
2883 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2887 op1expr = get_or_alloc_expr_for (genop1);
2888 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2893 /* Drop zero minimum index. */
2894 if (tree_int_cst_equal (genop2, integer_zero_node))
2898 op2expr = get_or_alloc_expr_for (genop2);
2899 genop2 = find_or_generate_expression (block, op2expr, stmts,
2907 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2908 /* We can't always put a size in units of the element alignment
2909 here as the element alignment may be not visible. See
2910 PR43783. Simply drop the element size for constant
2912 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2916 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2917 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2918 op3expr = get_or_alloc_expr_for (genop3);
2919 genop3 = find_or_generate_expression (block, op3expr, stmts,
2925 return build4 (currop->opcode, currop->type, genop0, genop1,
2932 tree genop2 = currop->op1;
2934 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2938 /* op1 should be a FIELD_DECL, which are represented by
2943 op2expr = get_or_alloc_expr_for (genop2);
2944 genop2 = find_or_generate_expression (block, op2expr, stmts,
2950 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2956 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2957 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2978 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2979 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2980 trying to rename aggregates into ssa form directly, which is a no no.
2982 Thus, this routine doesn't create temporaries, it just builds a
2983 single access expression for the array, calling
2984 find_or_generate_expression to build the innermost pieces.
2986 This function is a subroutine of create_expression_by_pieces, and
2987 should not be called on it's own unless you really know what you
2991 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2992 gimple_seq *stmts, gimple domstmt)
2994 unsigned int op = 0;
2995 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2998 /* Find a leader for an expression, or generate one using
2999 create_expression_by_pieces if it's ANTIC but
3001 BLOCK is the basic_block we are looking for leaders in.
3002 EXPR is the expression to find a leader or generate for.
3003 STMTS is the statement list to put the inserted expressions on.
3004 Returns the SSA_NAME of the LHS of the generated expression or the
3006 DOMSTMT if non-NULL is a statement that should be dominated by
3007 all uses in the generated expression. If DOMSTMT is non-NULL this
3008 routine can fail and return NULL_TREE. Otherwise it will assert
3012 find_or_generate_expression (basic_block block, pre_expr expr,
3013 gimple_seq *stmts, gimple domstmt)
3015 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
3016 get_expr_value_id (expr), domstmt);
3020 if (leader->kind == NAME)
3021 genop = PRE_EXPR_NAME (leader);
3022 else if (leader->kind == CONSTANT)
3023 genop = PRE_EXPR_CONSTANT (leader);
3026 /* If it's still NULL, it must be a complex expression, so generate
3027 it recursively. Not so if inserting expressions for values generated
3032 bitmap_set_t exprset;
3033 unsigned int lookfor = get_expr_value_id (expr);
3034 bool handled = false;
3038 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
3039 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
3041 pre_expr temp = expression_for_id (i);
3042 if (temp->kind != NAME)
3045 genop = create_expression_by_pieces (block, temp, stmts,
3047 get_expr_type (expr));
3051 if (!handled && domstmt)
3054 gcc_assert (handled);
3059 #define NECESSARY GF_PLF_1
3061 /* Create an expression in pieces, so that we can handle very complex
3062 expressions that may be ANTIC, but not necessary GIMPLE.
3063 BLOCK is the basic block the expression will be inserted into,
3064 EXPR is the expression to insert (in value form)
3065 STMTS is a statement list to append the necessary insertions into.
3067 This function will die if we hit some value that shouldn't be
3068 ANTIC but is (IE there is no leader for it, or its components).
3069 This function may also generate expressions that are themselves
3070 partially or fully redundant. Those that are will be either made
3071 fully redundant during the next iteration of insert (for partially
3072 redundant ones), or eliminated by eliminate (for fully redundant
3075 If DOMSTMT is non-NULL then we make sure that all uses in the
3076 expressions dominate that statement. In this case the function
3077 can return NULL_TREE to signal failure. */
3080 create_expression_by_pieces (basic_block block, pre_expr expr,
3081 gimple_seq *stmts, gimple domstmt, tree type)
3085 gimple_seq forced_stmts = NULL;
3086 unsigned int value_id;
3087 gimple_stmt_iterator gsi;
3088 tree exprtype = type ? type : get_expr_type (expr);
3094 /* We may hit the NAME/CONSTANT case if we have to convert types
3095 that value numbering saw through. */
3097 folded = PRE_EXPR_NAME (expr);
3100 folded = PRE_EXPR_CONSTANT (expr);
3104 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3105 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3110 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3111 switch (nary->length)
3115 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3116 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3117 tree genop1 = find_or_generate_expression (block, op1,
3119 tree genop2 = find_or_generate_expression (block, op2,
3121 if (!genop1 || !genop2)
3123 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3124 may be a constant with the wrong type. */
3125 if (nary->opcode == POINTER_PLUS_EXPR)
3127 genop1 = fold_convert (nary->type, genop1);
3128 genop2 = fold_convert (sizetype, genop2);
3132 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3133 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3136 folded = fold_build2 (nary->opcode, nary->type,
3142 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3143 tree genop1 = find_or_generate_expression (block, op1,
3147 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3149 folded = fold_build1 (nary->opcode, nary->type,
3162 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3163 folded = fold_convert (exprtype, folded);
3165 /* Force the generated expression to be a sequence of GIMPLE
3167 We have to call unshare_expr because force_gimple_operand may
3168 modify the tree we pass to it. */
3169 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3172 /* If we have any intermediate expressions to the value sets, add them
3173 to the value sets and chain them in the instruction stream. */
3176 gsi = gsi_start (forced_stmts);
3177 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3179 gimple stmt = gsi_stmt (gsi);
3180 tree forcedname = gimple_get_lhs (stmt);
3183 if (TREE_CODE (forcedname) == SSA_NAME)
3185 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3186 VN_INFO_GET (forcedname)->valnum = forcedname;
3187 VN_INFO (forcedname)->value_id = get_next_value_id ();
3188 nameexpr = get_or_alloc_expr_for_name (forcedname);
3189 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3191 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3192 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3194 mark_symbols_for_renaming (stmt);
3196 gimple_seq_add_seq (stmts, forced_stmts);
3199 /* Build and insert the assignment of the end result to the temporary
3200 that we will return. */
3201 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3203 pretemp = create_tmp_reg (exprtype, "pretmp");
3204 get_var_ann (pretemp);
3208 add_referenced_var (temp);
3210 newstmt = gimple_build_assign (temp, folded);
3211 name = make_ssa_name (temp, newstmt);
3212 gimple_assign_set_lhs (newstmt, name);
3213 gimple_set_plf (newstmt, NECESSARY, false);
3215 gimple_seq_add_stmt (stmts, newstmt);
3216 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3218 /* All the symbols in NEWEXPR should be put into SSA form. */
3219 mark_symbols_for_renaming (newstmt);
3221 /* Add a value number to the temporary.
3222 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3223 we are creating the expression by pieces, and this particular piece of
3224 the expression may have been represented. There is no harm in replacing
3226 VN_INFO_GET (name)->valnum = name;
3227 value_id = get_expr_value_id (expr);
3228 VN_INFO (name)->value_id = value_id;
3229 nameexpr = get_or_alloc_expr_for_name (name);
3230 add_to_value (value_id, nameexpr);
3231 if (NEW_SETS (block))
3232 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3233 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3235 pre_stats.insertions++;
3236 if (dump_file && (dump_flags & TDF_DETAILS))
3238 fprintf (dump_file, "Inserted ");
3239 print_gimple_stmt (dump_file, newstmt, 0, 0);
3240 fprintf (dump_file, " in predecessor %d\n", block->index);
3247 /* Returns true if we want to inhibit the insertions of PHI nodes
3248 for the given EXPR for basic block BB (a member of a loop).
3249 We want to do this, when we fear that the induction variable we
3250 create might inhibit vectorization. */
3253 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3255 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3256 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3257 vn_reference_op_t op;
3260 /* If we aren't going to vectorize we don't inhibit anything. */
3261 if (!flag_tree_vectorize)
3264 /* Otherwise we inhibit the insertion when the address of the
3265 memory reference is a simple induction variable. In other
3266 cases the vectorizer won't do anything anyway (either it's
3267 loop invariant or a complicated expression). */
3268 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3273 case ARRAY_RANGE_REF:
3274 if (TREE_CODE (op->op0) != SSA_NAME)
3279 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3281 /* Default defs are loop invariant. */
3284 /* Defined outside this loop, also loop invariant. */
3285 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3287 /* If it's a simple induction variable inhibit insertion,
3288 the vectorizer might be interested in this one. */
3289 if (simple_iv (bb->loop_father, bb->loop_father,
3290 op->op0, &iv, true))
3292 /* No simple IV, vectorizer can't do anything, hence no
3293 reason to inhibit the transformation for this operand. */
3303 /* Insert the to-be-made-available values of expression EXPRNUM for each
3304 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3305 merge the result with a phi node, given the same value number as
3306 NODE. Return true if we have inserted new stuff. */
3309 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3312 pre_expr expr = expression_for_id (exprnum);
3314 unsigned int val = get_expr_value_id (expr);
3316 bool insertions = false;
3321 tree type = get_expr_type (expr);
3325 if (dump_file && (dump_flags & TDF_DETAILS))
3327 fprintf (dump_file, "Found partial redundancy for expression ");
3328 print_pre_expr (dump_file, expr);
3329 fprintf (dump_file, " (%04d)\n", val);
3332 /* Make sure we aren't creating an induction variable. */
3333 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3335 bool firstinsideloop = false;
3336 bool secondinsideloop = false;
3337 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3338 EDGE_PRED (block, 0)->src);
3339 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3340 EDGE_PRED (block, 1)->src);
3341 /* Induction variables only have one edge inside the loop. */
3342 if ((firstinsideloop ^ secondinsideloop)
3343 && (expr->kind != REFERENCE
3344 || inhibit_phi_insertion (block, expr)))
3346 if (dump_file && (dump_flags & TDF_DETAILS))
3347 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3352 /* Make the necessary insertions. */
3353 FOR_EACH_EDGE (pred, ei, block->preds)
3355 gimple_seq stmts = NULL;
3358 eprime = avail[bprime->index];
3360 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3362 builtexpr = create_expression_by_pieces (bprime,
3366 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3367 gsi_insert_seq_on_edge (pred, stmts);
3368 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3371 else if (eprime->kind == CONSTANT)
3373 /* Constants may not have the right type, fold_convert
3374 should give us back a constant with the right type.
3376 tree constant = PRE_EXPR_CONSTANT (eprime);
3377 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3379 tree builtexpr = fold_convert (type, constant);
3380 if (!is_gimple_min_invariant (builtexpr))
3382 tree forcedexpr = force_gimple_operand (builtexpr,
3385 if (!is_gimple_min_invariant (forcedexpr))
3387 if (forcedexpr != builtexpr)
3389 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3390 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3394 gimple_stmt_iterator gsi;
3395 gsi = gsi_start (stmts);
3396 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3398 gimple stmt = gsi_stmt (gsi);
3399 tree lhs = gimple_get_lhs (stmt);
3400 if (TREE_CODE (lhs) == SSA_NAME)
3401 bitmap_set_bit (inserted_exprs,
3402 SSA_NAME_VERSION (lhs));
3403 gimple_set_plf (stmt, NECESSARY, false);
3405 gsi_insert_seq_on_edge (pred, stmts);
3407 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3411 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3414 else if (eprime->kind == NAME)
3416 /* We may have to do a conversion because our value
3417 numbering can look through types in certain cases, but
3418 our IL requires all operands of a phi node have the same
3420 tree name = PRE_EXPR_NAME (eprime);
3421 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3425 builtexpr = fold_convert (type, name);
3426 forcedexpr = force_gimple_operand (builtexpr,
3430 if (forcedexpr != name)
3432 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3433 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3438 gimple_stmt_iterator gsi;
3439 gsi = gsi_start (stmts);
3440 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3442 gimple stmt = gsi_stmt (gsi);
3443 tree lhs = gimple_get_lhs (stmt);
3444 if (TREE_CODE (lhs) == SSA_NAME)
3445 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3446 gimple_set_plf (stmt, NECESSARY, false);
3448 gsi_insert_seq_on_edge (pred, stmts);
3450 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3454 /* If we didn't want a phi node, and we made insertions, we still have
3455 inserted new stuff, and thus return true. If we didn't want a phi node,
3456 and didn't make insertions, we haven't added anything new, so return
3458 if (nophi && insertions)
3460 else if (nophi && !insertions)
3463 /* Now build a phi for the new variable. */
3464 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3466 prephitemp = create_tmp_var (type, "prephitmp");
3467 get_var_ann (prephitemp);
3471 add_referenced_var (temp);
3473 if (TREE_CODE (type) == COMPLEX_TYPE
3474 || TREE_CODE (type) == VECTOR_TYPE)
3475 DECL_GIMPLE_REG_P (temp) = 1;
3476 phi = create_phi_node (temp, block);
3478 gimple_set_plf (phi, NECESSARY, false);
3479 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3480 VN_INFO (gimple_phi_result (phi))->value_id = val;
3481 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3482 FOR_EACH_EDGE (pred, ei, block->preds)
3484 pre_expr ae = avail[pred->src->index];
3485 gcc_assert (get_expr_type (ae) == type
3486 || useless_type_conversion_p (type, get_expr_type (ae)));
3487 if (ae->kind == CONSTANT)
3488 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3490 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3494 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3495 add_to_value (val, newphi);
3497 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3498 this insertion, since we test for the existence of this value in PHI_GEN
3499 before proceeding with the partial redundancy checks in insert_aux.
3501 The value may exist in AVAIL_OUT, in particular, it could be represented
3502 by the expression we are trying to eliminate, in which case we want the
3503 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3506 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3507 this block, because if it did, it would have existed in our dominator's
3508 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3511 bitmap_insert_into_set (PHI_GEN (block), newphi);
3512 bitmap_value_replace_in_set (AVAIL_OUT (block),
3514 bitmap_insert_into_set (NEW_SETS (block),
3517 if (dump_file && (dump_flags & TDF_DETAILS))
3519 fprintf (dump_file, "Created phi ");
3520 print_gimple_stmt (dump_file, phi, 0, 0);
3521 fprintf (dump_file, " in block %d\n", block->index);
3529 /* Perform insertion of partially redundant values.
3530 For BLOCK, do the following:
3531 1. Propagate the NEW_SETS of the dominator into the current block.
3532 If the block has multiple predecessors,
3533 2a. Iterate over the ANTIC expressions for the block to see if
3534 any of them are partially redundant.
3535 2b. If so, insert them into the necessary predecessors to make
3536 the expression fully redundant.
3537 2c. Insert a new PHI merging the values of the predecessors.
3538 2d. Insert the new PHI, and the new expressions, into the
3540 3. Recursively call ourselves on the dominator children of BLOCK.
3542 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3543 do_regular_insertion and do_partial_insertion.
3548 do_regular_insertion (basic_block block, basic_block dom)
3550 bool new_stuff = false;
3551 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3555 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3557 if (expr->kind != NAME)
3561 bool by_some = false;
3562 bool cant_insert = false;
3563 bool all_same = true;
3564 pre_expr first_s = NULL;
3567 pre_expr eprime = NULL;
3569 pre_expr edoubleprime = NULL;
3570 bool do_insertion = false;
3572 val = get_expr_value_id (expr);
3573 if (bitmap_set_contains_value (PHI_GEN (block), val))
3575 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3577 if (dump_file && (dump_flags & TDF_DETAILS))
3578 fprintf (dump_file, "Found fully redundant value\n");
3582 avail = XCNEWVEC (pre_expr, last_basic_block);
3583 FOR_EACH_EDGE (pred, ei, block->preds)
3585 unsigned int vprime;
3587 /* We should never run insertion for the exit block
3588 and so not come across fake pred edges. */
3589 gcc_assert (!(pred->flags & EDGE_FAKE));
3591 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3594 /* eprime will generally only be NULL if the
3595 value of the expression, translated
3596 through the PHI for this predecessor, is
3597 undefined. If that is the case, we can't
3598 make the expression fully redundant,
3599 because its value is undefined along a
3600 predecessor path. We can thus break out
3601 early because it doesn't matter what the
3602 rest of the results are. */
3609 eprime = fully_constant_expression (eprime);
3610 vprime = get_expr_value_id (eprime);
3611 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3613 if (edoubleprime == NULL)
3615 avail[bprime->index] = eprime;
3620 avail[bprime->index] = edoubleprime;
3622 /* We want to perform insertions to remove a redundancy on
3623 a path in the CFG we want to optimize for speed. */
3624 if (optimize_edge_for_speed_p (pred))
3625 do_insertion = true;
3626 if (first_s == NULL)
3627 first_s = edoubleprime;
3628 else if (!pre_expr_eq (first_s, edoubleprime))
3632 /* If we can insert it, it's not the same value
3633 already existing along every predecessor, and
3634 it's defined by some predecessor, it is
3635 partially redundant. */
3636 if (!cant_insert && !all_same && by_some && do_insertion
3637 && dbg_cnt (treepre_insert))
3639 if (insert_into_preds_of_block (block, get_expression_id (expr),
3643 /* If all edges produce the same value and that value is
3644 an invariant, then the PHI has the same value on all
3645 edges. Note this. */
3646 else if (!cant_insert && all_same && eprime
3647 && (edoubleprime->kind == CONSTANT
3648 || edoubleprime->kind == NAME)
3649 && !value_id_constant_p (val))
3653 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3654 value_expressions, val);
3656 unsigned int new_val = get_expr_value_id (edoubleprime);
3657 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3659 pre_expr expr = expression_for_id (j);
3661 if (expr->kind == NAME)
3663 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3664 /* Just reset the value id and valnum so it is
3665 the same as the constant we have discovered. */
3666 if (edoubleprime->kind == CONSTANT)
3668 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3669 pre_stats.constified++;
3672 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3673 info->value_id = new_val;
3681 VEC_free (pre_expr, heap, exprs);
3686 /* Perform insertion for partially anticipatable expressions. There
3687 is only one case we will perform insertion for these. This case is
3688 if the expression is partially anticipatable, and fully available.
3689 In this case, we know that putting it earlier will enable us to
3690 remove the later computation. */
3694 do_partial_partial_insertion (basic_block block, basic_block dom)
3696 bool new_stuff = false;
3697 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3701 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3703 if (expr->kind != NAME)
3708 bool cant_insert = false;
3711 pre_expr eprime = NULL;
3714 val = get_expr_value_id (expr);
3715 if (bitmap_set_contains_value (PHI_GEN (block), val))
3717 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3720 avail = XCNEWVEC (pre_expr, last_basic_block);
3721 FOR_EACH_EDGE (pred, ei, block->preds)
3723 unsigned int vprime;
3724 pre_expr edoubleprime;
3726 /* We should never run insertion for the exit block
3727 and so not come across fake pred edges. */
3728 gcc_assert (!(pred->flags & EDGE_FAKE));
3730 eprime = phi_translate (expr, ANTIC_IN (block),
3734 /* eprime will generally only be NULL if the
3735 value of the expression, translated
3736 through the PHI for this predecessor, is
3737 undefined. If that is the case, we can't
3738 make the expression fully redundant,
3739 because its value is undefined along a
3740 predecessor path. We can thus break out
3741 early because it doesn't matter what the
3742 rest of the results are. */
3749 eprime = fully_constant_expression (eprime);
3750 vprime = get_expr_value_id (eprime);
3751 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3753 if (edoubleprime == NULL)
3759 avail[bprime->index] = edoubleprime;
3763 /* If we can insert it, it's not the same value
3764 already existing along every predecessor, and
3765 it's defined by some predecessor, it is
3766 partially redundant. */
3767 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3769 pre_stats.pa_insert++;
3770 if (insert_into_preds_of_block (block, get_expression_id (expr),
3778 VEC_free (pre_expr, heap, exprs);
3783 insert_aux (basic_block block)
3786 bool new_stuff = false;
3791 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3796 bitmap_set_t newset = NEW_SETS (dom);
3799 /* Note that we need to value_replace both NEW_SETS, and
3800 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3801 represented by some non-simple expression here that we want
3802 to replace it with. */
3803 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3805 pre_expr expr = expression_for_id (i);
3806 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3807 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3810 if (!single_pred_p (block))
3812 new_stuff |= do_regular_insertion (block, dom);
3813 if (do_partial_partial)
3814 new_stuff |= do_partial_partial_insertion (block, dom);
3818 for (son = first_dom_son (CDI_DOMINATORS, block);
3820 son = next_dom_son (CDI_DOMINATORS, son))
3822 new_stuff |= insert_aux (son);
3828 /* Perform insertion of partially redundant values. */
3833 bool new_stuff = true;
3835 int num_iterations = 0;
3838 NEW_SETS (bb) = bitmap_set_new ();
3843 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3845 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3849 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3852 add_to_exp_gen (basic_block block, tree op)
3857 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3859 result = get_or_alloc_expr_for_name (op);
3860 bitmap_value_insert_into_set (EXP_GEN (block), result);
3864 /* Create value ids for PHI in BLOCK. */
3867 make_values_for_phi (gimple phi, basic_block block)
3869 tree result = gimple_phi_result (phi);
3871 /* We have no need for virtual phis, as they don't represent
3872 actual computations. */
3873 if (is_gimple_reg (result))
3875 pre_expr e = get_or_alloc_expr_for_name (result);
3876 add_to_value (get_expr_value_id (e), e);
3877 bitmap_insert_into_set (PHI_GEN (block), e);
3878 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3882 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3884 tree arg = gimple_phi_arg_def (phi, i);
3885 if (TREE_CODE (arg) == SSA_NAME)
3887 e = get_or_alloc_expr_for_name (arg);
3888 add_to_value (get_expr_value_id (e), e);
3895 /* Compute the AVAIL set for all basic blocks.
3897 This function performs value numbering of the statements in each basic
3898 block. The AVAIL sets are built from information we glean while doing
3899 this value numbering, since the AVAIL sets contain only one entry per
3902 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3903 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3906 compute_avail (void)
3909 basic_block block, son;
3910 basic_block *worklist;
3914 /* We pretend that default definitions are defined in the entry block.
3915 This includes function arguments and the static chain decl. */
3916 for (i = 1; i < num_ssa_names; ++i)
3918 tree name = ssa_name (i);
3921 || !SSA_NAME_IS_DEFAULT_DEF (name)
3922 || has_zero_uses (name)
3923 || !is_gimple_reg (name))
3926 e = get_or_alloc_expr_for_name (name);
3927 add_to_value (get_expr_value_id (e), e);
3929 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3930 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3933 /* Allocate the worklist. */
3934 worklist = XNEWVEC (basic_block, n_basic_blocks);
3936 /* Seed the algorithm by putting the dominator children of the entry
3937 block on the worklist. */
3938 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3940 son = next_dom_son (CDI_DOMINATORS, son))
3941 worklist[sp++] = son;
3943 /* Loop until the worklist is empty. */
3946 gimple_stmt_iterator gsi;
3949 unsigned int stmt_uid = 1;
3951 /* Pick a block from the worklist. */
3952 block = worklist[--sp];
3954 /* Initially, the set of available values in BLOCK is that of
3955 its immediate dominator. */
3956 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3958 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3960 /* Generate values for PHI nodes. */
3961 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3962 make_values_for_phi (gsi_stmt (gsi), block);
3964 BB_MAY_NOTRETURN (block) = 0;
3966 /* Now compute value numbers and populate value sets with all
3967 the expressions computed in BLOCK. */
3968 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3973 stmt = gsi_stmt (gsi);
3974 gimple_set_uid (stmt, stmt_uid++);
3976 /* Cache whether the basic-block has any non-visible side-effect
3978 If this isn't a call or it is the last stmt in the
3979 basic-block then the CFG represents things correctly. */
3980 if (is_gimple_call (stmt)
3981 && !stmt_ends_bb_p (stmt))
3983 /* Non-looping const functions always return normally.
3984 Otherwise the call might not return or have side-effects
3985 that forbids hoisting possibly trapping expressions
3987 int flags = gimple_call_flags (stmt);
3988 if (!(flags & ECF_CONST)
3989 || (flags & ECF_LOOPING_CONST_OR_PURE))
3990 BB_MAY_NOTRETURN (block) = 1;
3993 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3995 pre_expr e = get_or_alloc_expr_for_name (op);
3997 add_to_value (get_expr_value_id (e), e);
3999 bitmap_insert_into_set (TMP_GEN (block), e);
4000 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
4003 if (gimple_has_volatile_ops (stmt)
4004 || stmt_could_throw_p (stmt))
4007 switch (gimple_code (stmt))
4010 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4011 add_to_exp_gen (block, op);
4018 vn_reference_op_t vro;
4019 pre_expr result = NULL;
4020 VEC(vn_reference_op_s, heap) *ops = NULL;
4022 if (!can_value_number_call (stmt))
4025 copy_reference_ops_from_call (stmt, &ops);
4026 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
4027 gimple_expr_type (stmt),
4029 VEC_free (vn_reference_op_s, heap, ops);
4033 for (i = 0; VEC_iterate (vn_reference_op_s,
4037 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4038 add_to_exp_gen (block, vro->op0);
4039 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4040 add_to_exp_gen (block, vro->op1);
4041 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4042 add_to_exp_gen (block, vro->op2);
4044 result = (pre_expr) pool_alloc (pre_expr_pool);
4045 result->kind = REFERENCE;
4047 PRE_EXPR_REFERENCE (result) = ref;
4049 get_or_alloc_expression_id (result);
4050 add_to_value (get_expr_value_id (result), result);
4052 bitmap_value_insert_into_set (EXP_GEN (block), result);
4058 pre_expr result = NULL;
4059 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4063 case tcc_comparison:
4068 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4069 gimple_assign_rhs_code (stmt),
4070 gimple_expr_type (stmt),
4071 gimple_assign_rhs1 (stmt),
4072 gimple_assign_rhs2 (stmt),
4073 NULL_TREE, NULL_TREE, &nary);
4078 for (i = 0; i < nary->length; i++)
4079 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4080 add_to_exp_gen (block, nary->op[i]);
4082 result = (pre_expr) pool_alloc (pre_expr_pool);
4083 result->kind = NARY;
4085 PRE_EXPR_NARY (result) = nary;
4089 case tcc_declaration:
4094 vn_reference_op_t vro;
4096 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4102 for (i = 0; VEC_iterate (vn_reference_op_s,
4106 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4107 add_to_exp_gen (block, vro->op0);
4108 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4109 add_to_exp_gen (block, vro->op1);
4110 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4111 add_to_exp_gen (block, vro->op2);
4113 result = (pre_expr) pool_alloc (pre_expr_pool);
4114 result->kind = REFERENCE;
4116 PRE_EXPR_REFERENCE (result) = ref;
4121 /* For any other statement that we don't
4122 recognize, simply add all referenced
4123 SSA_NAMEs to EXP_GEN. */
4124 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4125 add_to_exp_gen (block, op);
4129 get_or_alloc_expression_id (result);
4130 add_to_value (get_expr_value_id (result), result);
4132 bitmap_value_insert_into_set (EXP_GEN (block), result);
4141 /* Put the dominator children of BLOCK on the worklist of blocks
4142 to compute available sets for. */
4143 for (son = first_dom_son (CDI_DOMINATORS, block);
4145 son = next_dom_son (CDI_DOMINATORS, son))
4146 worklist[sp++] = son;
4152 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4153 than the available expressions for it. The insertion point is
4154 right before the first use in STMT. Returns the SSA_NAME that should
4155 be used for replacement. */
4158 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4160 basic_block bb = gimple_bb (stmt);
4161 gimple_stmt_iterator gsi;
4162 gimple_seq stmts = NULL;
4166 /* First create a value expression from the expression we want
4167 to insert and associate it with the value handle for SSA_VN. */
4168 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4172 /* Then use create_expression_by_pieces to generate a valid
4173 expression to insert at this point of the IL stream. */
4174 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4175 if (expr == NULL_TREE)
4177 gsi = gsi_for_stmt (stmt);
4178 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4183 /* Eliminate fully redundant computations. */
4188 VEC (gimple, heap) *to_remove = NULL;
4190 unsigned int todo = 0;
4191 gimple_stmt_iterator gsi;
4197 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4199 stmt = gsi_stmt (gsi);
4201 /* Lookup the RHS of the expression, see if we have an
4202 available computation for it. If so, replace the RHS with
4203 the available computation. */
4204 if (gimple_has_lhs (stmt)
4205 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4206 && !gimple_assign_ssa_name_copy_p (stmt)
4207 && (!gimple_assign_single_p (stmt)
4208 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4209 && !gimple_has_volatile_ops (stmt)
4210 && !has_zero_uses (gimple_get_lhs (stmt)))
4212 tree lhs = gimple_get_lhs (stmt);
4213 tree rhs = NULL_TREE;
4215 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4216 pre_expr sprimeexpr;
4218 if (gimple_assign_single_p (stmt))
4219 rhs = gimple_assign_rhs1 (stmt);
4221 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4222 get_expr_value_id (lhsexpr),
4227 if (sprimeexpr->kind == CONSTANT)
4228 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4229 else if (sprimeexpr->kind == NAME)
4230 sprime = PRE_EXPR_NAME (sprimeexpr);
4235 /* If there is no existing leader but SCCVN knows this
4236 value is constant, use that constant. */
4237 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4239 sprime = VN_INFO (lhs)->valnum;
4240 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4241 TREE_TYPE (sprime)))
4242 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4244 if (dump_file && (dump_flags & TDF_DETAILS))
4246 fprintf (dump_file, "Replaced ");
4247 print_gimple_expr (dump_file, stmt, 0, 0);
4248 fprintf (dump_file, " with ");
4249 print_generic_expr (dump_file, sprime, 0);
4250 fprintf (dump_file, " in ");
4251 print_gimple_stmt (dump_file, stmt, 0, 0);
4253 pre_stats.eliminations++;
4254 propagate_tree_value_into_stmt (&gsi, sprime);
4255 stmt = gsi_stmt (gsi);
4260 /* If there is no existing usable leader but SCCVN thinks
4261 it has an expression it wants to use as replacement,
4263 if (!sprime || sprime == lhs)
4265 tree val = VN_INFO (lhs)->valnum;
4267 && TREE_CODE (val) == SSA_NAME
4268 && VN_INFO (val)->needs_insertion
4269 && can_PRE_operation (vn_get_expr_for (val)))
4270 sprime = do_SCCVN_insertion (stmt, val);
4274 && (rhs == NULL_TREE
4275 || TREE_CODE (rhs) != SSA_NAME
4276 || may_propagate_copy (rhs, sprime)))
4278 gcc_assert (sprime != rhs);
4280 if (dump_file && (dump_flags & TDF_DETAILS))
4282 fprintf (dump_file, "Replaced ");
4283 print_gimple_expr (dump_file, stmt, 0, 0);
4284 fprintf (dump_file, " with ");
4285 print_generic_expr (dump_file, sprime, 0);
4286 fprintf (dump_file, " in ");
4287 print_gimple_stmt (dump_file, stmt, 0, 0);
4290 if (TREE_CODE (sprime) == SSA_NAME)
4291 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4293 /* We need to make sure the new and old types actually match,
4294 which may require adding a simple cast, which fold_convert
4296 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4297 && !useless_type_conversion_p (gimple_expr_type (stmt),
4298 TREE_TYPE (sprime)))
4299 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4301 pre_stats.eliminations++;
4302 propagate_tree_value_into_stmt (&gsi, sprime);
4303 stmt = gsi_stmt (gsi);
4306 /* If we removed EH side effects from the statement, clean
4307 its EH information. */
4308 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4310 bitmap_set_bit (need_eh_cleanup,
4311 gimple_bb (stmt)->index);
4312 if (dump_file && (dump_flags & TDF_DETAILS))
4313 fprintf (dump_file, " Removed EH side effects.\n");
4317 /* If the statement is a scalar store, see if the expression
4318 has the same value number as its rhs. If so, the store is
4320 else if (gimple_assign_single_p (stmt)
4321 && !is_gimple_reg (gimple_assign_lhs (stmt))
4322 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4323 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4325 tree rhs = gimple_assign_rhs1 (stmt);
4327 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4328 gimple_vuse (stmt), true, NULL);
4329 if (TREE_CODE (rhs) == SSA_NAME)
4330 rhs = VN_INFO (rhs)->valnum;
4332 && operand_equal_p (val, rhs, 0))
4334 if (dump_file && (dump_flags & TDF_DETAILS))
4336 fprintf (dump_file, "Deleted redundant store ");
4337 print_gimple_stmt (dump_file, stmt, 0, 0);
4340 /* Queue stmt for removal. */
4341 VEC_safe_push (gimple, heap, to_remove, stmt);
4344 /* Visit COND_EXPRs and fold the comparison with the
4345 available value-numbers. */
4346 else if (gimple_code (stmt) == GIMPLE_COND)
4348 tree op0 = gimple_cond_lhs (stmt);
4349 tree op1 = gimple_cond_rhs (stmt);
4352 if (TREE_CODE (op0) == SSA_NAME)
4353 op0 = VN_INFO (op0)->valnum;
4354 if (TREE_CODE (op1) == SSA_NAME)
4355 op1 = VN_INFO (op1)->valnum;
4356 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4358 if (result && TREE_CODE (result) == INTEGER_CST)
4360 if (integer_zerop (result))
4361 gimple_cond_make_false (stmt);
4363 gimple_cond_make_true (stmt);
4365 todo = TODO_cleanup_cfg;
4368 /* Visit indirect calls and turn them into direct calls if
4370 if (gimple_code (stmt) == GIMPLE_CALL
4371 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4373 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4374 if (TREE_CODE (fn) == ADDR_EXPR
4375 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4377 if (dump_file && (dump_flags & TDF_DETAILS))
4379 fprintf (dump_file, "Replacing call target with ");
4380 print_generic_expr (dump_file, fn, 0);
4381 fprintf (dump_file, " in ");
4382 print_gimple_stmt (dump_file, stmt, 0, 0);
4385 gimple_call_set_fn (stmt, fn);
4387 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4389 bitmap_set_bit (need_eh_cleanup,
4390 gimple_bb (stmt)->index);
4391 if (dump_file && (dump_flags & TDF_DETAILS))
4392 fprintf (dump_file, " Removed EH side effects.\n");
4395 /* Changing an indirect call to a direct call may
4396 have exposed different semantics. This may
4397 require an SSA update. */
4398 todo |= TODO_update_ssa_only_virtuals;
4403 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4405 gimple stmt, phi = gsi_stmt (gsi);
4406 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4407 pre_expr sprimeexpr, resexpr;
4408 gimple_stmt_iterator gsi2;
4410 /* We want to perform redundant PHI elimination. Do so by
4411 replacing the PHI with a single copy if possible.
4412 Do not touch inserted, single-argument or virtual PHIs. */
4413 if (gimple_phi_num_args (phi) == 1
4414 || !is_gimple_reg (res))
4420 resexpr = get_or_alloc_expr_for_name (res);
4421 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4422 get_expr_value_id (resexpr), NULL);
4425 if (sprimeexpr->kind == CONSTANT)
4426 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4427 else if (sprimeexpr->kind == NAME)
4428 sprime = PRE_EXPR_NAME (sprimeexpr);
4432 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4434 sprime = VN_INFO (res)->valnum;
4435 if (!useless_type_conversion_p (TREE_TYPE (res),
4436 TREE_TYPE (sprime)))
4437 sprime = fold_convert (TREE_TYPE (res), sprime);
4446 if (dump_file && (dump_flags & TDF_DETAILS))
4448 fprintf (dump_file, "Replaced redundant PHI node defining ");
4449 print_generic_expr (dump_file, res, 0);
4450 fprintf (dump_file, " with ");
4451 print_generic_expr (dump_file, sprime, 0);
4452 fprintf (dump_file, "\n");
4455 remove_phi_node (&gsi, false);
4457 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4458 && TREE_CODE (sprime) == SSA_NAME)
4459 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4461 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4462 sprime = fold_convert (TREE_TYPE (res), sprime);
4463 stmt = gimple_build_assign (res, sprime);
4464 SSA_NAME_DEF_STMT (res) = stmt;
4465 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4467 gsi2 = gsi_after_labels (b);
4468 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4469 /* Queue the copy for eventual removal. */
4470 VEC_safe_push (gimple, heap, to_remove, stmt);
4471 /* If we inserted this PHI node ourself, it's not an elimination. */
4472 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4475 pre_stats.eliminations++;
4479 /* We cannot remove stmts during BB walk, especially not release SSA
4480 names there as this confuses the VN machinery. The stmts ending
4481 up in to_remove are either stores or simple copies. */
4482 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4484 tree lhs = gimple_assign_lhs (stmt);
4485 tree rhs = gimple_assign_rhs1 (stmt);
4486 use_operand_p use_p;
4489 /* If there is a single use only, propagate the equivalency
4490 instead of keeping the copy. */
4491 if (TREE_CODE (lhs) == SSA_NAME
4492 && TREE_CODE (rhs) == SSA_NAME
4493 && single_imm_use (lhs, &use_p, &use_stmt)
4494 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4496 SET_USE (use_p, rhs);
4497 update_stmt (use_stmt);
4498 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4499 && TREE_CODE (rhs) == SSA_NAME)
4500 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4503 /* If this is a store or a now unused copy, remove it. */
4504 if (TREE_CODE (lhs) != SSA_NAME
4505 || has_zero_uses (lhs))
4507 gsi = gsi_for_stmt (stmt);
4508 unlink_stmt_vdef (stmt);
4509 gsi_remove (&gsi, true);
4510 if (TREE_CODE (lhs) == SSA_NAME)
4511 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4512 release_defs (stmt);
4515 VEC_free (gimple, heap, to_remove);
4520 /* Borrow a bit of tree-ssa-dce.c for the moment.
4521 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4522 this may be a bit faster, and we may want critical edges kept split. */
4524 /* If OP's defining statement has not already been determined to be necessary,
4525 mark that statement necessary. Return the stmt, if it is newly
4528 static inline gimple
4529 mark_operand_necessary (tree op)
4535 if (TREE_CODE (op) != SSA_NAME)
4538 stmt = SSA_NAME_DEF_STMT (op);
4541 if (gimple_plf (stmt, NECESSARY)
4542 || gimple_nop_p (stmt))
4545 gimple_set_plf (stmt, NECESSARY, true);
4549 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4550 to insert PHI nodes sometimes, and because value numbering of casts isn't
4551 perfect, we sometimes end up inserting dead code. This simple DCE-like
4552 pass removes any insertions we made that weren't actually used. */
4555 remove_dead_inserted_code (void)
4562 worklist = BITMAP_ALLOC (NULL);
4563 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4565 t = SSA_NAME_DEF_STMT (ssa_name (i));
4566 if (gimple_plf (t, NECESSARY))
4567 bitmap_set_bit (worklist, i);
4569 while (!bitmap_empty_p (worklist))
4571 i = bitmap_first_set_bit (worklist);
4572 bitmap_clear_bit (worklist, i);
4573 t = SSA_NAME_DEF_STMT (ssa_name (i));
4575 /* PHI nodes are somewhat special in that each PHI alternative has
4576 data and control dependencies. All the statements feeding the
4577 PHI node's arguments are always necessary. */
4578 if (gimple_code (t) == GIMPLE_PHI)
4582 for (k = 0; k < gimple_phi_num_args (t); k++)
4584 tree arg = PHI_ARG_DEF (t, k);
4585 if (TREE_CODE (arg) == SSA_NAME)
4587 gimple n = mark_operand_necessary (arg);
4589 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4595 /* Propagate through the operands. Examine all the USE, VUSE and
4596 VDEF operands in this statement. Mark all the statements
4597 which feed this statement's uses as necessary. */
4601 /* The operands of VDEF expressions are also needed as they
4602 represent potential definitions that may reach this
4603 statement (VDEF operands allow us to follow def-def
4606 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4608 gimple n = mark_operand_necessary (use);
4610 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4615 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4617 t = SSA_NAME_DEF_STMT (ssa_name (i));
4618 if (!gimple_plf (t, NECESSARY))
4620 gimple_stmt_iterator gsi;
4622 if (dump_file && (dump_flags & TDF_DETAILS))
4624 fprintf (dump_file, "Removing unnecessary insertion:");
4625 print_gimple_stmt (dump_file, t, 0, 0);
4628 gsi = gsi_for_stmt (t);
4629 if (gimple_code (t) == GIMPLE_PHI)
4630 remove_phi_node (&gsi, true);
4633 gsi_remove (&gsi, true);
4638 BITMAP_FREE (worklist);
4641 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4642 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4643 the number of visited blocks. */
4646 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4648 edge_iterator *stack;
4650 int post_order_num = 0;
4653 if (include_entry_exit)
4654 post_order[post_order_num++] = EXIT_BLOCK;
4656 /* Allocate stack for back-tracking up CFG. */
4657 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4660 /* Allocate bitmap to track nodes that have been visited. */
4661 visited = sbitmap_alloc (last_basic_block);
4663 /* None of the nodes in the CFG have been visited yet. */
4664 sbitmap_zero (visited);
4666 /* Push the last edge on to the stack. */
4667 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4675 /* Look at the edge on the top of the stack. */
4677 src = ei_edge (ei)->src;
4678 dest = ei_edge (ei)->dest;
4680 /* Check if the edge destination has been visited yet. */
4681 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4683 /* Mark that we have visited the destination. */
4684 SET_BIT (visited, src->index);
4686 if (EDGE_COUNT (src->preds) > 0)
4687 /* Since the DEST node has been visited for the first
4688 time, check its successors. */
4689 stack[sp++] = ei_start (src->preds);
4691 post_order[post_order_num++] = src->index;
4695 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4696 post_order[post_order_num++] = dest->index;
4698 if (!ei_one_before_end_p (ei))
4699 ei_next (&stack[sp - 1]);
4705 if (include_entry_exit)
4706 post_order[post_order_num++] = ENTRY_BLOCK;
4709 sbitmap_free (visited);
4710 return post_order_num;
4714 /* Initialize data structures used by PRE. */
4717 init_pre (bool do_fre)
4721 next_expression_id = 1;
4723 VEC_safe_push (pre_expr, heap, expressions, NULL);
4724 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4725 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4726 get_max_value_id() + 1);
4731 inserted_exprs = BITMAP_ALLOC (NULL);
4732 need_creation = NULL;
4733 pretemp = NULL_TREE;
4734 storetemp = NULL_TREE;
4735 prephitemp = NULL_TREE;
4737 connect_infinite_loops_to_exit ();
4738 memset (&pre_stats, 0, sizeof (pre_stats));
4741 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4742 my_rev_post_order_compute (postorder, false);
4745 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4747 calculate_dominance_info (CDI_POST_DOMINATORS);
4748 calculate_dominance_info (CDI_DOMINATORS);
4750 bitmap_obstack_initialize (&grand_bitmap_obstack);
4751 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4752 expr_pred_trans_eq, free);
4753 expression_to_id = htab_create (num_ssa_names * 3,
4756 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4757 sizeof (struct bitmap_set), 30);
4758 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4759 sizeof (struct pre_expr_d), 30);
4762 EXP_GEN (bb) = bitmap_set_new ();
4763 PHI_GEN (bb) = bitmap_set_new ();
4764 TMP_GEN (bb) = bitmap_set_new ();
4765 AVAIL_OUT (bb) = bitmap_set_new ();
4768 need_eh_cleanup = BITMAP_ALLOC (NULL);
4772 /* Deallocate data structures used by PRE. */
4775 fini_pre (bool do_fre)
4780 VEC_free (bitmap_set_t, heap, value_expressions);
4781 BITMAP_FREE (inserted_exprs);
4782 VEC_free (gimple, heap, need_creation);
4783 bitmap_obstack_release (&grand_bitmap_obstack);
4784 free_alloc_pool (bitmap_set_pool);
4785 free_alloc_pool (pre_expr_pool);
4786 htab_delete (phi_translate_table);
4787 htab_delete (expression_to_id);
4788 VEC_free (unsigned, heap, name_to_id);
4796 free_dominance_info (CDI_POST_DOMINATORS);
4798 if (!bitmap_empty_p (need_eh_cleanup))
4800 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4801 cleanup_tree_cfg ();
4804 BITMAP_FREE (need_eh_cleanup);
4807 loop_optimizer_finalize ();
4810 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4811 only wants to do full redundancy elimination. */
4814 execute_pre (bool do_fre)
4816 unsigned int todo = 0;
4818 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4820 /* This has to happen before SCCVN runs because
4821 loop_optimizer_init may create new phis, etc. */
4823 loop_optimizer_init (LOOPS_NORMAL);
4828 loop_optimizer_finalize ();
4836 /* Collect and value number expressions computed in each basic block. */
4839 if (dump_file && (dump_flags & TDF_DETAILS))
4845 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4846 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4847 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4848 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4852 /* Insert can get quite slow on an incredibly large number of basic
4853 blocks due to some quadratic behavior. Until this behavior is
4854 fixed, don't run it when he have an incredibly large number of
4855 bb's. If we aren't going to run insert, there is no point in
4856 computing ANTIC, either, even though it's plenty fast. */
4857 if (!do_fre && n_basic_blocks < 4000)
4863 /* Make sure to remove fake edges before committing our inserts.
4864 This makes sure we don't end up with extra critical edges that
4865 we would need to split. */
4866 remove_fake_exit_edges ();
4867 gsi_commit_edge_inserts ();
4869 /* Remove all the redundant expressions. */
4870 todo |= eliminate ();
4872 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4873 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4874 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4875 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4876 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4878 clear_expression_ids ();
4881 remove_dead_inserted_code ();
4889 /* Gate and execute functions for PRE. */
4894 return execute_pre (false);
4900 return flag_tree_pre != 0;
4903 struct gimple_opt_pass pass_pre =
4908 gate_pre, /* gate */
4909 do_pre, /* execute */
4912 0, /* static_pass_number */
4913 TV_TREE_PRE, /* tv_id */
4914 PROP_no_crit_edges | PROP_cfg
4915 | PROP_ssa, /* properties_required */
4916 0, /* properties_provided */
4917 0, /* properties_destroyed */
4918 TODO_rebuild_alias, /* todo_flags_start */
4919 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4920 | TODO_verify_ssa /* todo_flags_finish */
4925 /* Gate and execute functions for FRE. */
4930 return execute_pre (true);
4936 return flag_tree_fre != 0;
4939 struct gimple_opt_pass pass_fre =
4944 gate_fre, /* gate */
4945 execute_fre, /* execute */
4948 0, /* static_pass_number */
4949 TV_TREE_FRE, /* tv_id */
4950 PROP_cfg | PROP_ssa, /* properties_required */
4951 0, /* properties_provided */
4952 0, /* properties_destroyed */
4953 0, /* todo_flags_start */
4954 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */