2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
97 An expression is partially redundant (excluding partial
100 1. It is AVAIL in some, but not all, of the predecessors of a
102 2. It is ANTIC in all the predecessors.
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
118 /* Representations of value numbers:
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
135 /* Representation of expressions on value numbers:
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
142 /* Representation of sets:
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
153 /* Type of expression, used to know which member of the PRE_EXPR union
164 typedef union pre_expr_union_d
169 vn_reference_t reference;
172 typedef struct pre_expr_d
174 enum pre_expr_kind kind;
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 pre_expr_eq (const void *p1, const void *p2)
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190 if (e1->kind != e2->kind)
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
211 pre_expr_hash (const void *p1)
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 return PRE_EXPR_NARY (e)->hashcode;
223 return PRE_EXPR_REFERENCE (e)->hashcode;
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
267 return next_expression_id - 1;
270 /* Return the expression id for tree EXPR. */
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
283 if (expr->kind == NAME)
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
288 return VEC_index (unsigned, name_to_id, version);
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
295 return ((pre_expr)*slot)->id;
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
305 unsigned int id = lookup_expression_id (expr);
307 return alloc_expression_id (expr);
308 return expr->id = id;
311 /* Return the expression that has expression id ID */
313 static inline pre_expr
314 expression_for_id (unsigned int id)
316 return VEC_index (pre_expr, expressions, id);
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
323 clear_expression_ids (void)
325 VEC_free (pre_expr, heap, expressions);
328 static alloc_pool pre_expr_pool;
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 get_or_alloc_expr_for_name (tree name)
335 struct pre_expr_d expr;
337 unsigned int result_id;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
344 return expression_for_id (result_id);
346 result = (pre_expr) pool_alloc (pre_expr_pool);
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
353 static bool in_fre = false;
355 /* An unordered bitmap set. One bitmap tracks values, the other,
357 typedef struct bitmap_set
359 bitmap_head expressions;
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
377 /* The EXP_GEN set, which represents expressions/values generated in
379 bitmap_set_t exp_gen;
381 /* The PHI_GEN set, which represents PHI results generated in a
383 bitmap_set_t phi_gen;
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
406 /* A cache for value_dies_in_block_x. */
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
433 /* Basic block list in postorder. */
434 static int *postorder;
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
440 /* The number of RHS computations eliminated by PRE. */
443 /* The number of new expressions/temporaries generated by PRE. */
446 /* The number of inserts found due to partial anticipation */
449 /* The number of new PHI nodes added by PRE. */
452 /* The number of values found constant. */
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 static unsigned int get_expr_value_id (pre_expr);
473 /* We can add and remove elements and entries to and from sets
474 and hash tables, so we use alloc pools for them. */
476 static alloc_pool bitmap_set_pool;
477 static bitmap_obstack grand_bitmap_obstack;
479 /* To avoid adding 300 temporary variables when we only need one, we
480 only create one temporary variable, on demand, and build ssa names
481 off that. We do have to change the variable if the types don't
482 match the current variable's type. */
484 static tree storetemp;
485 static tree prephitemp;
487 /* Set of blocks with statements that have had its EH information
489 static bitmap need_eh_cleanup;
491 /* The phi_translate_table caches phi translations for a given
492 expression and predecessor. */
494 static htab_t phi_translate_table;
496 /* A three tuple {e, pred, v} used to cache phi translations in the
497 phi_translate_table. */
499 typedef struct expr_pred_trans_d
501 /* The expression. */
504 /* The predecessor block along which we translated the expression. */
507 /* The value that resulted from the translation. */
510 /* The hashcode for the expression, pred pair. This is cached for
513 } *expr_pred_trans_t;
514 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
516 /* Return the hash value for a phi translation table entry. */
519 expr_pred_trans_hash (const void *p)
521 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
525 /* Return true if two phi translation table entries are the same.
526 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
529 expr_pred_trans_eq (const void *p1, const void *p2)
531 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
532 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
533 basic_block b1 = ve1->pred;
534 basic_block b2 = ve2->pred;
536 /* If they are not translations for the same basic block, they can't
540 return pre_expr_eq (ve1->e, ve2->e);
543 /* Search in the phi translation table for the translation of
544 expression E in basic block PRED.
545 Return the translated value, if found, NULL otherwise. */
547 static inline pre_expr
548 phi_trans_lookup (pre_expr e, basic_block pred)
551 struct expr_pred_trans_d ept;
555 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
556 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
561 return ((expr_pred_trans_t) *slot)->v;
565 /* Add the tuple mapping from {expression E, basic block PRED} to
566 value V, to the phi translation table. */
569 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
572 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
574 new_pair->pred = pred;
576 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
579 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
580 new_pair->hashcode, INSERT);
583 *slot = (void *) new_pair;
587 /* Add expression E to the expression set of value id V. */
590 add_to_value (unsigned int v, pre_expr e)
594 gcc_assert (get_expr_value_id (e) == v);
596 if (v >= VEC_length (bitmap_set_t, value_expressions))
598 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
602 set = VEC_index (bitmap_set_t, value_expressions, v);
605 set = bitmap_set_new ();
606 VEC_replace (bitmap_set_t, value_expressions, v, set);
609 bitmap_insert_into_set_1 (set, e, v, true);
612 /* Create a new bitmap set and return it. */
615 bitmap_set_new (void)
617 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
618 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
619 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
623 /* Return the value id for a PRE expression EXPR. */
626 get_expr_value_id (pre_expr expr)
633 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
636 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
637 add_to_value (id, expr);
642 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
644 return PRE_EXPR_NARY (expr)->value_id;
646 return PRE_EXPR_REFERENCE (expr)->value_id;
652 /* Remove an expression EXPR from a bitmapped set. */
655 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
657 unsigned int val = get_expr_value_id (expr);
658 if (!value_id_constant_p (val))
660 bitmap_clear_bit (&set->values, val);
661 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
666 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
667 unsigned int val, bool allow_constants)
669 if (allow_constants || !value_id_constant_p (val))
671 /* We specifically expect this and only this function to be able to
672 insert constants into a set. */
673 bitmap_set_bit (&set->values, val);
674 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
678 /* Insert an expression EXPR into a bitmapped set. */
681 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
683 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
686 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
689 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
691 bitmap_copy (&dest->expressions, &orig->expressions);
692 bitmap_copy (&dest->values, &orig->values);
696 /* Free memory used up by SET. */
698 bitmap_set_free (bitmap_set_t set)
700 bitmap_clear (&set->expressions);
701 bitmap_clear (&set->values);
705 /* Generate an topological-ordered array of bitmap set SET. */
707 static VEC(pre_expr, heap) *
708 sorted_array_from_bitmap_set (bitmap_set_t set)
711 bitmap_iterator bi, bj;
712 VEC(pre_expr, heap) *result;
714 /* Pre-allocate roughly enough space for the array. */
715 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
717 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
719 /* The number of expressions having a given value is usually
720 relatively small. Thus, rather than making a vector of all
721 the expressions and sorting it by value-id, we walk the values
722 and check in the reverse mapping that tells us what expressions
723 have a given value, to filter those in our set. As a result,
724 the expressions are inserted in value-id order, which means
727 If this is somehow a significant lose for some cases, we can
728 choose which set to walk based on the set size. */
729 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
730 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
732 if (bitmap_bit_p (&set->expressions, j))
733 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
740 /* Perform bitmapped set operation DEST &= ORIG. */
743 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
751 bitmap_initialize (&temp, &grand_bitmap_obstack);
753 bitmap_and_into (&dest->values, &orig->values);
754 bitmap_copy (&temp, &dest->expressions);
755 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
757 pre_expr expr = expression_for_id (i);
758 unsigned int value_id = get_expr_value_id (expr);
759 if (!bitmap_bit_p (&dest->values, value_id))
760 bitmap_clear_bit (&dest->expressions, i);
762 bitmap_clear (&temp);
766 /* Subtract all values and expressions contained in ORIG from DEST. */
769 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
771 bitmap_set_t result = bitmap_set_new ();
775 bitmap_and_compl (&result->expressions, &dest->expressions,
778 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
780 pre_expr expr = expression_for_id (i);
781 unsigned int value_id = get_expr_value_id (expr);
782 bitmap_set_bit (&result->values, value_id);
788 /* Subtract all the values in bitmap set B from bitmap set A. */
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
797 bitmap_initialize (&temp, &grand_bitmap_obstack);
799 bitmap_copy (&temp, &a->expressions);
800 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
802 pre_expr expr = expression_for_id (i);
803 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
804 bitmap_remove_from_set (a, expr);
806 bitmap_clear (&temp);
810 /* Return true if bitmapped set SET contains the value VALUE_ID. */
813 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
815 if (value_id_constant_p (value_id))
818 if (!set || bitmap_empty_p (&set->expressions))
821 return bitmap_bit_p (&set->values, value_id);
825 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
827 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
830 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
833 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
836 bitmap_set_t exprset;
840 if (value_id_constant_p (lookfor))
843 if (!bitmap_set_contains_value (set, lookfor))
846 /* The number of expressions having a given value is usually
847 significantly less than the total number of expressions in SET.
848 Thus, rather than check, for each expression in SET, whether it
849 has the value LOOKFOR, we walk the reverse mapping that tells us
850 what expressions have a given value, and see if any of those
851 expressions are in our set. For large testcases, this is about
852 5-10x faster than walking the bitmap. If this is somehow a
853 significant lose for some cases, we can choose which set to walk
854 based on the set size. */
855 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
856 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
858 if (bitmap_bit_p (&set->expressions, i))
860 bitmap_clear_bit (&set->expressions, i);
861 bitmap_set_bit (&set->expressions, get_expression_id (expr));
867 /* Return true if two bitmap sets are equal. */
870 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
872 return bitmap_equal_p (&a->values, &b->values);
875 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
876 and add it otherwise. */
879 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
881 unsigned int val = get_expr_value_id (expr);
883 if (bitmap_set_contains_value (set, val))
884 bitmap_set_replace_value (set, val, expr);
886 bitmap_insert_into_set (set, expr);
889 /* Insert EXPR into SET if EXPR's value is not already present in
893 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
895 unsigned int val = get_expr_value_id (expr);
897 #ifdef ENABLE_CHECKING
898 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
901 /* Constant values are always considered to be part of the set. */
902 if (value_id_constant_p (val))
905 /* If the value membership changed, add the expression. */
906 if (bitmap_set_bit (&set->values, val))
907 bitmap_set_bit (&set->expressions, expr->id);
910 /* Print out EXPR to outfile. */
913 print_pre_expr (FILE *outfile, const pre_expr expr)
918 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
921 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
926 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
927 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
928 for (i = 0; i < nary->length; i++)
930 print_generic_expr (outfile, nary->op[i], 0);
931 if (i != (unsigned) nary->length - 1)
932 fprintf (outfile, ",");
934 fprintf (outfile, "}");
940 vn_reference_op_t vro;
942 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
943 fprintf (outfile, "{");
945 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
948 bool closebrace = false;
949 if (vro->opcode != SSA_NAME
950 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
952 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
955 fprintf (outfile, "<");
961 print_generic_expr (outfile, vro->op0, 0);
964 fprintf (outfile, ",");
965 print_generic_expr (outfile, vro->op1, 0);
969 fprintf (outfile, ",");
970 print_generic_expr (outfile, vro->op2, 0);
974 fprintf (outfile, ">");
975 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
976 fprintf (outfile, ",");
978 fprintf (outfile, "}");
981 fprintf (outfile, "@");
982 print_generic_expr (outfile, ref->vuse, 0);
988 void debug_pre_expr (pre_expr);
990 /* Like print_pre_expr but always prints to stderr. */
992 debug_pre_expr (pre_expr e)
994 print_pre_expr (stderr, e);
995 fprintf (stderr, "\n");
998 /* Print out SET to OUTFILE. */
1001 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1002 const char *setname, int blockindex)
1004 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1011 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1013 const pre_expr expr = expression_for_id (i);
1016 fprintf (outfile, ", ");
1018 print_pre_expr (outfile, expr);
1020 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1023 fprintf (outfile, " }\n");
1026 void debug_bitmap_set (bitmap_set_t);
1029 debug_bitmap_set (bitmap_set_t set)
1031 print_bitmap_set (stderr, set, "debug", 0);
1034 /* Print out the expressions that have VAL to OUTFILE. */
1037 print_value_expressions (FILE *outfile, unsigned int val)
1039 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1043 sprintf (s, "%04d", val);
1044 print_bitmap_set (outfile, set, s, 0);
1050 debug_value_expressions (unsigned int val)
1052 print_value_expressions (stderr, val);
1055 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1059 get_or_alloc_expr_for_constant (tree constant)
1061 unsigned int result_id;
1062 unsigned int value_id;
1063 struct pre_expr_d expr;
1066 expr.kind = CONSTANT;
1067 PRE_EXPR_CONSTANT (&expr) = constant;
1068 result_id = lookup_expression_id (&expr);
1070 return expression_for_id (result_id);
1072 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1073 newexpr->kind = CONSTANT;
1074 PRE_EXPR_CONSTANT (newexpr) = constant;
1075 alloc_expression_id (newexpr);
1076 value_id = get_or_alloc_constant_value_id (constant);
1077 add_to_value (value_id, newexpr);
1081 /* Given a value id V, find the actual tree representing the constant
1082 value if there is one, and return it. Return NULL if we can't find
1086 get_constant_for_value_id (unsigned int v)
1088 if (value_id_constant_p (v))
1092 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1094 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1096 pre_expr expr = expression_for_id (i);
1097 if (expr->kind == CONSTANT)
1098 return PRE_EXPR_CONSTANT (expr);
1104 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1105 Currently only supports constants and SSA_NAMES. */
1107 get_or_alloc_expr_for (tree t)
1109 if (TREE_CODE (t) == SSA_NAME)
1110 return get_or_alloc_expr_for_name (t);
1111 else if (is_gimple_min_invariant (t))
1112 return get_or_alloc_expr_for_constant (t);
1115 /* More complex expressions can result from SCCVN expression
1116 simplification that inserts values for them. As they all
1117 do not have VOPs the get handled by the nary ops struct. */
1118 vn_nary_op_t result;
1119 unsigned int result_id;
1120 vn_nary_op_lookup (t, &result);
1123 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1125 PRE_EXPR_NARY (e) = result;
1126 result_id = lookup_expression_id (e);
1129 pool_free (pre_expr_pool, e);
1130 e = expression_for_id (result_id);
1133 alloc_expression_id (e);
1140 /* Return the folded version of T if T, when folded, is a gimple
1141 min_invariant. Otherwise, return T. */
1144 fully_constant_expression (pre_expr e)
1152 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1153 switch (TREE_CODE_CLASS (nary->opcode))
1155 case tcc_expression:
1156 if (nary->opcode == TRUTH_NOT_EXPR)
1158 if (nary->opcode != TRUTH_AND_EXPR
1159 && nary->opcode != TRUTH_OR_EXPR
1160 && nary->opcode != TRUTH_XOR_EXPR)
1164 case tcc_comparison:
1166 /* We have to go from trees to pre exprs to value ids to
1168 tree naryop0 = nary->op[0];
1169 tree naryop1 = nary->op[1];
1171 if (!is_gimple_min_invariant (naryop0))
1173 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1174 unsigned int vrep0 = get_expr_value_id (rep0);
1175 tree const0 = get_constant_for_value_id (vrep0);
1177 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1179 if (!is_gimple_min_invariant (naryop1))
1181 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1182 unsigned int vrep1 = get_expr_value_id (rep1);
1183 tree const1 = get_constant_for_value_id (vrep1);
1185 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1187 result = fold_binary (nary->opcode, nary->type,
1189 if (result && is_gimple_min_invariant (result))
1190 return get_or_alloc_expr_for_constant (result);
1191 /* We might have simplified the expression to a
1192 SSA_NAME for example from x_1 * 1. But we cannot
1193 insert a PHI for x_1 unconditionally as x_1 might
1194 not be available readily. */
1198 if (nary->opcode != REALPART_EXPR
1199 && nary->opcode != IMAGPART_EXPR
1200 && nary->opcode != VIEW_CONVERT_EXPR)
1206 /* We have to go from trees to pre exprs to value ids to
1208 tree naryop0 = nary->op[0];
1209 tree const0, result;
1210 if (is_gimple_min_invariant (naryop0))
1214 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1215 unsigned int vrep0 = get_expr_value_id (rep0);
1216 const0 = get_constant_for_value_id (vrep0);
1221 tree type1 = TREE_TYPE (nary->op[0]);
1222 const0 = fold_convert (type1, const0);
1223 result = fold_unary (nary->opcode, nary->type, const0);
1225 if (result && is_gimple_min_invariant (result))
1226 return get_or_alloc_expr_for_constant (result);
1235 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1237 if ((folded = fully_constant_vn_reference_p (ref)))
1238 return get_or_alloc_expr_for_constant (folded);
1247 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1248 it has the value it would have in BLOCK. Set *SAME_VALID to true
1249 in case the new vuse doesn't change the value id of the OPERANDS. */
1252 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1253 alias_set_type set, tree type, tree vuse,
1254 basic_block phiblock,
1255 basic_block block, bool *same_valid)
1257 gimple phi = SSA_NAME_DEF_STMT (vuse);
1264 if (gimple_bb (phi) != phiblock)
1267 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1269 /* Use the alias-oracle to find either the PHI node in this block,
1270 the first VUSE used in this block that is equivalent to vuse or
1271 the first VUSE which definition in this block kills the value. */
1272 if (gimple_code (phi) == GIMPLE_PHI)
1273 e = find_edge (block, phiblock);
1274 else if (use_oracle)
1275 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1277 vuse = gimple_vuse (phi);
1278 phi = SSA_NAME_DEF_STMT (vuse);
1279 if (gimple_bb (phi) != phiblock)
1281 if (gimple_code (phi) == GIMPLE_PHI)
1283 e = find_edge (block, phiblock);
1294 bitmap visited = NULL;
1295 /* Try to find a vuse that dominates this phi node by skipping
1296 non-clobbering statements. */
1297 vuse = get_continuation_for_phi (phi, &ref, &visited);
1299 BITMAP_FREE (visited);
1305 /* If we didn't find any, the value ID can't stay the same,
1306 but return the translated vuse. */
1307 *same_valid = false;
1308 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1310 /* ??? We would like to return vuse here as this is the canonical
1311 upmost vdef that this reference is associated with. But during
1312 insertion of the references into the hash tables we only ever
1313 directly insert with their direct gimple_vuse, hence returning
1314 something else would make us not find the other expression. */
1315 return PHI_ARG_DEF (phi, e->dest_idx);
1321 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1322 SET2. This is used to avoid making a set consisting of the union
1323 of PA_IN and ANTIC_IN during insert. */
1325 static inline pre_expr
1326 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1330 result = bitmap_find_leader (set1, val, NULL);
1331 if (!result && set2)
1332 result = bitmap_find_leader (set2, val, NULL);
1336 /* Get the tree type for our PRE expression e. */
1339 get_expr_type (const pre_expr e)
1344 return TREE_TYPE (PRE_EXPR_NAME (e));
1346 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1348 return PRE_EXPR_REFERENCE (e)->type;
1350 return PRE_EXPR_NARY (e)->type;
1355 /* Get a representative SSA_NAME for a given expression.
1356 Since all of our sub-expressions are treated as values, we require
1357 them to be SSA_NAME's for simplicity.
1358 Prior versions of GVNPRE used to use "value handles" here, so that
1359 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1360 either case, the operands are really values (IE we do not expect
1361 them to be usable without finding leaders). */
1364 get_representative_for (const pre_expr e)
1368 unsigned int value_id = get_expr_value_id (e);
1373 return PRE_EXPR_NAME (e);
1375 return PRE_EXPR_CONSTANT (e);
1379 /* Go through all of the expressions representing this value
1380 and pick out an SSA_NAME. */
1383 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1385 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1387 pre_expr rep = expression_for_id (i);
1388 if (rep->kind == NAME)
1389 return PRE_EXPR_NAME (rep);
1394 /* If we reached here we couldn't find an SSA_NAME. This can
1395 happen when we've discovered a value that has never appeared in
1396 the program as set to an SSA_NAME, most likely as the result of
1401 "Could not find SSA_NAME representative for expression:");
1402 print_pre_expr (dump_file, e);
1403 fprintf (dump_file, "\n");
1406 exprtype = get_expr_type (e);
1408 /* Build and insert the assignment of the end result to the temporary
1409 that we will return. */
1410 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1412 pretemp = create_tmp_reg (exprtype, "pretmp");
1413 get_var_ann (pretemp);
1416 name = make_ssa_name (pretemp, gimple_build_nop ());
1417 VN_INFO_GET (name)->value_id = value_id;
1418 if (e->kind == CONSTANT)
1419 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1421 VN_INFO (name)->valnum = name;
1423 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1426 fprintf (dump_file, "Created SSA_NAME representative ");
1427 print_generic_expr (dump_file, name, 0);
1428 fprintf (dump_file, " for expression:");
1429 print_pre_expr (dump_file, e);
1430 fprintf (dump_file, "\n");
1439 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1440 basic_block pred, basic_block phiblock);
1442 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1443 the phis in PRED. Return NULL if we can't find a leader for each part
1444 of the translated expression. */
1447 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1448 basic_block pred, basic_block phiblock)
1455 bool changed = false;
1456 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1457 struct vn_nary_op_s newnary;
1458 /* The NARY structure is only guaranteed to have been
1459 allocated to the nary->length operands. */
1460 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1461 - sizeof (tree) * (4 - nary->length)));
1463 for (i = 0; i < newnary.length; i++)
1465 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1469 pre_expr leader, result;
1470 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1471 leader = find_leader_in_sets (op_val_id, set1, set2);
1472 result = phi_translate (leader, set1, set2, pred, phiblock);
1473 if (result && result != leader)
1475 tree name = get_representative_for (result);
1478 newnary.op[i] = name;
1483 changed |= newnary.op[i] != nary->op[i];
1489 unsigned int new_val_id;
1491 tree result = vn_nary_op_lookup_pieces (newnary.length,
1499 if (result && is_gimple_min_invariant (result))
1500 return get_or_alloc_expr_for_constant (result);
1502 expr = (pre_expr) pool_alloc (pre_expr_pool);
1507 PRE_EXPR_NARY (expr) = nary;
1508 constant = fully_constant_expression (expr);
1509 if (constant != expr)
1512 new_val_id = nary->value_id;
1513 get_or_alloc_expression_id (expr);
1517 new_val_id = get_next_value_id ();
1518 VEC_safe_grow_cleared (bitmap_set_t, heap,
1520 get_max_value_id() + 1);
1521 nary = vn_nary_op_insert_pieces (newnary.length,
1528 result, new_val_id);
1529 PRE_EXPR_NARY (expr) = nary;
1530 constant = fully_constant_expression (expr);
1531 if (constant != expr)
1533 get_or_alloc_expression_id (expr);
1535 add_to_value (new_val_id, expr);
1543 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1544 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1545 tree vuse = ref->vuse;
1546 tree newvuse = vuse;
1547 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1548 bool changed = false, same_valid = true;
1550 vn_reference_op_t operand;
1551 vn_reference_t newref;
1554 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1558 tree oldop0 = operand->op0;
1559 tree oldop1 = operand->op1;
1560 tree oldop2 = operand->op2;
1564 tree type = operand->type;
1565 vn_reference_op_s newop = *operand;
1567 if (op0 && TREE_CODE (op0) == SSA_NAME)
1569 unsigned int op_val_id = VN_INFO (op0)->value_id;
1570 leader = find_leader_in_sets (op_val_id, set1, set2);
1571 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1572 if (opresult && opresult != leader)
1574 tree name = get_representative_for (opresult);
1582 changed |= op0 != oldop0;
1584 if (op1 && TREE_CODE (op1) == SSA_NAME)
1586 unsigned int op_val_id = VN_INFO (op1)->value_id;
1587 leader = find_leader_in_sets (op_val_id, set1, set2);
1588 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1589 if (opresult && opresult != leader)
1591 tree name = get_representative_for (opresult);
1599 /* We can't possibly insert these. */
1600 else if (op1 && !is_gimple_min_invariant (op1))
1602 changed |= op1 != oldop1;
1603 if (op2 && TREE_CODE (op2) == SSA_NAME)
1605 unsigned int op_val_id = VN_INFO (op2)->value_id;
1606 leader = find_leader_in_sets (op_val_id, set1, set2);
1607 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1608 if (opresult && opresult != leader)
1610 tree name = get_representative_for (opresult);
1618 /* We can't possibly insert these. */
1619 else if (op2 && !is_gimple_min_invariant (op2))
1621 changed |= op2 != oldop2;
1624 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1625 /* We may have changed from an SSA_NAME to a constant */
1626 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1627 newop.opcode = TREE_CODE (op0);
1632 /* If it transforms a non-constant ARRAY_REF into a constant
1633 one, adjust the constant offset. */
1634 if (newop.opcode == ARRAY_REF
1636 && TREE_CODE (op0) == INTEGER_CST
1637 && TREE_CODE (op1) == INTEGER_CST
1638 && TREE_CODE (op2) == INTEGER_CST)
1640 double_int off = tree_to_double_int (op0);
1641 off = double_int_add (off,
1643 (tree_to_double_int (op1)));
1644 off = double_int_mul (off, tree_to_double_int (op2));
1645 if (double_int_fits_in_shwi_p (off))
1646 newop.off = off.low;
1648 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1649 /* If it transforms from an SSA_NAME to an address, fold with
1650 a preceding indirect reference. */
1651 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1652 && VEC_index (vn_reference_op_s,
1653 newoperands, j - 1)->opcode == MEM_REF)
1654 vn_reference_fold_indirect (&newoperands, &j);
1656 if (i != VEC_length (vn_reference_op_s, operands))
1659 VEC_free (vn_reference_op_s, heap, newoperands);
1665 newvuse = translate_vuse_through_block (newoperands,
1666 ref->set, ref->type,
1667 vuse, phiblock, pred,
1669 if (newvuse == NULL_TREE)
1671 VEC_free (vn_reference_op_s, heap, newoperands);
1676 if (changed || newvuse != vuse)
1678 unsigned int new_val_id;
1680 bool converted = false;
1682 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1687 VEC_free (vn_reference_op_s, heap, newoperands);
1690 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1692 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
1696 if (result && is_gimple_min_invariant (result))
1698 gcc_assert (!newoperands);
1699 return get_or_alloc_expr_for_constant (result);
1702 expr = (pre_expr) pool_alloc (pre_expr_pool);
1703 expr->kind = REFERENCE;
1711 gcc_assert (CONVERT_EXPR_P (result)
1712 || TREE_CODE (result) == VIEW_CONVERT_EXPR);
1714 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
1716 TREE_OPERAND (result, 0),
1717 NULL_TREE, NULL_TREE,
1720 if (nresult && is_gimple_min_invariant (nresult))
1721 return get_or_alloc_expr_for_constant (nresult);
1726 PRE_EXPR_NARY (expr) = nary;
1727 constant = fully_constant_expression (expr);
1728 if (constant != expr)
1731 new_val_id = nary->value_id;
1732 get_or_alloc_expression_id (expr);
1736 new_val_id = get_next_value_id ();
1737 VEC_safe_grow_cleared (bitmap_set_t, heap,
1739 get_max_value_id() + 1);
1740 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
1742 TREE_OPERAND (result, 0),
1743 NULL_TREE, NULL_TREE,
1744 NULL_TREE, NULL_TREE,
1746 PRE_EXPR_NARY (expr) = nary;
1747 constant = fully_constant_expression (expr);
1748 if (constant != expr)
1750 get_or_alloc_expression_id (expr);
1755 PRE_EXPR_REFERENCE (expr) = newref;
1756 constant = fully_constant_expression (expr);
1757 if (constant != expr)
1760 new_val_id = newref->value_id;
1761 get_or_alloc_expression_id (expr);
1765 if (changed || !same_valid)
1767 new_val_id = get_next_value_id ();
1768 VEC_safe_grow_cleared (bitmap_set_t, heap,
1770 get_max_value_id() + 1);
1773 new_val_id = ref->value_id;
1774 newref = vn_reference_insert_pieces (newvuse, ref->set,
1777 result, new_val_id);
1779 PRE_EXPR_REFERENCE (expr) = newref;
1780 constant = fully_constant_expression (expr);
1781 if (constant != expr)
1783 get_or_alloc_expression_id (expr);
1785 add_to_value (new_val_id, expr);
1787 VEC_free (vn_reference_op_s, heap, newoperands);
1797 tree name = PRE_EXPR_NAME (expr);
1799 def_stmt = SSA_NAME_DEF_STMT (name);
1800 if (gimple_code (def_stmt) == GIMPLE_PHI
1801 && gimple_bb (def_stmt) == phiblock)
1806 e = find_edge (pred, gimple_bb (phi));
1809 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1812 if (TREE_CODE (def) == SSA_NAME)
1813 def = VN_INFO (def)->valnum;
1815 /* Handle constant. */
1816 if (is_gimple_min_invariant (def))
1817 return get_or_alloc_expr_for_constant (def);
1819 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1822 newexpr = get_or_alloc_expr_for_name (def);
1833 /* Wrapper around phi_translate_1 providing caching functionality. */
1836 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1837 basic_block pred, basic_block phiblock)
1844 /* Constants contain no values that need translation. */
1845 if (expr->kind == CONSTANT)
1848 if (value_id_constant_p (get_expr_value_id (expr)))
1851 if (expr->kind != NAME)
1853 phitrans = phi_trans_lookup (expr, pred);
1859 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1861 /* Don't add empty translations to the cache. Neither add
1862 translations of NAMEs as those are cheap to translate. */
1864 && expr->kind != NAME)
1865 phi_trans_add (expr, phitrans, pred);
1871 /* For each expression in SET, translate the values through phi nodes
1872 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1873 expressions in DEST. */
1876 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1877 basic_block phiblock)
1879 VEC (pre_expr, heap) *exprs;
1883 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1885 bitmap_set_copy (dest, set);
1889 exprs = sorted_array_from_bitmap_set (set);
1890 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1892 pre_expr translated;
1893 translated = phi_translate (expr, set, NULL, pred, phiblock);
1897 /* We might end up with multiple expressions from SET being
1898 translated to the same value. In this case we do not want
1899 to retain the NARY or REFERENCE expression but prefer a NAME
1900 which would be the leader. */
1901 if (translated->kind == NAME)
1902 bitmap_value_replace_in_set (dest, translated);
1904 bitmap_value_insert_into_set (dest, translated);
1906 VEC_free (pre_expr, heap, exprs);
1909 /* Find the leader for a value (i.e., the name representing that
1910 value) in a given set, and return it. If STMT is non-NULL it
1911 makes sure the defining statement for the leader dominates it.
1912 Return NULL if no leader is found. */
1915 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1917 if (value_id_constant_p (val))
1921 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1923 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1925 pre_expr expr = expression_for_id (i);
1926 if (expr->kind == CONSTANT)
1930 if (bitmap_set_contains_value (set, val))
1932 /* Rather than walk the entire bitmap of expressions, and see
1933 whether any of them has the value we are looking for, we look
1934 at the reverse mapping, which tells us the set of expressions
1935 that have a given value (IE value->expressions with that
1936 value) and see if any of those expressions are in our set.
1937 The number of expressions per value is usually significantly
1938 less than the number of expressions in the set. In fact, for
1939 large testcases, doing it this way is roughly 5-10x faster
1940 than walking the bitmap.
1941 If this is somehow a significant lose for some cases, we can
1942 choose which set to walk based on which set is smaller. */
1945 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1947 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1948 &set->expressions, 0, i, bi)
1950 pre_expr val = expression_for_id (i);
1951 /* At the point where stmt is not null, there should always
1952 be an SSA_NAME first in the list of expressions. */
1955 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1956 if (gimple_code (def_stmt) != GIMPLE_PHI
1957 && gimple_bb (def_stmt) == gimple_bb (stmt)
1958 && gimple_uid (def_stmt) >= gimple_uid (stmt))
1967 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1968 BLOCK by seeing if it is not killed in the block. Note that we are
1969 only determining whether there is a store that kills it. Because
1970 of the order in which clean iterates over values, we are guaranteed
1971 that altered operands will have caused us to be eliminated from the
1972 ANTIC_IN set already. */
1975 value_dies_in_block_x (pre_expr expr, basic_block block)
1977 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1978 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1980 gimple_stmt_iterator gsi;
1981 unsigned id = get_expression_id (expr);
1988 /* Lookup a previously calculated result. */
1989 if (EXPR_DIES (block)
1990 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1991 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1993 /* A memory expression {e, VUSE} dies in the block if there is a
1994 statement that may clobber e. If, starting statement walk from the
1995 top of the basic block, a statement uses VUSE there can be no kill
1996 inbetween that use and the original statement that loaded {e, VUSE},
1997 so we can stop walking. */
1998 ref.base = NULL_TREE;
1999 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
2001 tree def_vuse, def_vdef;
2002 def = gsi_stmt (gsi);
2003 def_vuse = gimple_vuse (def);
2004 def_vdef = gimple_vdef (def);
2006 /* Not a memory statement. */
2010 /* Not a may-def. */
2013 /* A load with the same VUSE, we're done. */
2014 if (def_vuse == vuse)
2020 /* Init ref only if we really need it. */
2021 if (ref.base == NULL_TREE
2022 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
2028 /* If the statement may clobber expr, it dies. */
2029 if (stmt_may_clobber_ref_p_1 (def, &ref))
2036 /* Remember the result. */
2037 if (!EXPR_DIES (block))
2038 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2039 bitmap_set_bit (EXPR_DIES (block), id * 2);
2041 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2047 #define union_contains_value(SET1, SET2, VAL) \
2048 (bitmap_set_contains_value ((SET1), (VAL)) \
2049 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2051 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2054 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2055 vn_reference_op_t vro)
2057 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2059 struct pre_expr_d temp;
2062 PRE_EXPR_NAME (&temp) = vro->op0;
2063 temp.id = lookup_expression_id (&temp);
2066 if (!union_contains_value (set1, set2,
2067 get_expr_value_id (&temp)))
2070 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2072 struct pre_expr_d temp;
2075 PRE_EXPR_NAME (&temp) = vro->op1;
2076 temp.id = lookup_expression_id (&temp);
2079 if (!union_contains_value (set1, set2,
2080 get_expr_value_id (&temp)))
2084 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2086 struct pre_expr_d temp;
2089 PRE_EXPR_NAME (&temp) = vro->op2;
2090 temp.id = lookup_expression_id (&temp);
2093 if (!union_contains_value (set1, set2,
2094 get_expr_value_id (&temp)))
2101 /* Determine if the expression EXPR is valid in SET1 U SET2.
2102 ONLY SET2 CAN BE NULL.
2103 This means that we have a leader for each part of the expression
2104 (if it consists of values), or the expression is an SSA_NAME.
2105 For loads/calls, we also see if the vuse is killed in this block. */
2108 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2114 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2118 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2119 for (i = 0; i < nary->length; i++)
2121 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2123 struct pre_expr_d temp;
2126 PRE_EXPR_NAME (&temp) = nary->op[i];
2127 temp.id = lookup_expression_id (&temp);
2130 if (!union_contains_value (set1, set2,
2131 get_expr_value_id (&temp)))
2135 /* If the NARY may trap make sure the block does not contain
2136 a possible exit point.
2137 ??? This is overly conservative if we translate AVAIL_OUT
2138 as the available expression might be after the exit point. */
2139 if (BB_MAY_NOTRETURN (block)
2140 && vn_nary_may_trap (nary))
2147 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2148 vn_reference_op_t vro;
2151 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2153 if (!vro_valid_in_sets (set1, set2, vro))
2158 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2159 if (!gimple_nop_p (def_stmt)
2160 && gimple_bb (def_stmt) != block
2161 && !dominated_by_p (CDI_DOMINATORS,
2162 block, gimple_bb (def_stmt)))
2165 return !value_dies_in_block_x (expr, block);
2172 /* Clean the set of expressions that are no longer valid in SET1 or
2173 SET2. This means expressions that are made up of values we have no
2174 leaders for in SET1 or SET2. This version is used for partial
2175 anticipation, which means it is not valid in either ANTIC_IN or
2179 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2181 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2185 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2187 if (!valid_in_sets (set1, set2, expr, block))
2188 bitmap_remove_from_set (set1, expr);
2190 VEC_free (pre_expr, heap, exprs);
2193 /* Clean the set of expressions that are no longer valid in SET. This
2194 means expressions that are made up of values we have no leaders for
2198 clean (bitmap_set_t set, basic_block block)
2200 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2204 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2206 if (!valid_in_sets (set, NULL, expr, block))
2207 bitmap_remove_from_set (set, expr);
2209 VEC_free (pre_expr, heap, exprs);
2212 static sbitmap has_abnormal_preds;
2214 /* List of blocks that may have changed during ANTIC computation and
2215 thus need to be iterated over. */
2217 static sbitmap changed_blocks;
2219 /* Decide whether to defer a block for a later iteration, or PHI
2220 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2221 should defer the block, and true if we processed it. */
2224 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2225 basic_block block, basic_block phiblock)
2227 if (!BB_VISITED (phiblock))
2229 SET_BIT (changed_blocks, block->index);
2230 BB_VISITED (block) = 0;
2231 BB_DEFERRED (block) = 1;
2235 phi_translate_set (dest, source, block, phiblock);
2239 /* Compute the ANTIC set for BLOCK.
2241 If succs(BLOCK) > 1 then
2242 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2243 else if succs(BLOCK) == 1 then
2244 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2246 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2250 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2252 bool changed = false;
2253 bitmap_set_t S, old, ANTIC_OUT;
2259 old = ANTIC_OUT = S = NULL;
2260 BB_VISITED (block) = 1;
2262 /* If any edges from predecessors are abnormal, antic_in is empty,
2264 if (block_has_abnormal_pred_edge)
2265 goto maybe_dump_sets;
2267 old = ANTIC_IN (block);
2268 ANTIC_OUT = bitmap_set_new ();
2270 /* If the block has no successors, ANTIC_OUT is empty. */
2271 if (EDGE_COUNT (block->succs) == 0)
2273 /* If we have one successor, we could have some phi nodes to
2274 translate through. */
2275 else if (single_succ_p (block))
2277 basic_block succ_bb = single_succ (block);
2279 /* We trade iterations of the dataflow equations for having to
2280 phi translate the maximal set, which is incredibly slow
2281 (since the maximal set often has 300+ members, even when you
2282 have a small number of blocks).
2283 Basically, we defer the computation of ANTIC for this block
2284 until we have processed it's successor, which will inevitably
2285 have a *much* smaller set of values to phi translate once
2286 clean has been run on it.
2287 The cost of doing this is that we technically perform more
2288 iterations, however, they are lower cost iterations.
2290 Timings for PRE on tramp3d-v4:
2291 without maximal set fix: 11 seconds
2292 with maximal set fix/without deferring: 26 seconds
2293 with maximal set fix/with deferring: 11 seconds
2296 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2300 goto maybe_dump_sets;
2303 /* If we have multiple successors, we take the intersection of all of
2304 them. Note that in the case of loop exit phi nodes, we may have
2305 phis to translate through. */
2308 VEC(basic_block, heap) * worklist;
2310 basic_block bprime, first = NULL;
2312 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2313 FOR_EACH_EDGE (e, ei, block->succs)
2316 && BB_VISITED (e->dest))
2318 else if (BB_VISITED (e->dest))
2319 VEC_quick_push (basic_block, worklist, e->dest);
2322 /* Of multiple successors we have to have visited one already. */
2325 SET_BIT (changed_blocks, block->index);
2326 BB_VISITED (block) = 0;
2327 BB_DEFERRED (block) = 1;
2329 VEC_free (basic_block, heap, worklist);
2330 goto maybe_dump_sets;
2333 if (!gimple_seq_empty_p (phi_nodes (first)))
2334 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2336 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2338 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2340 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2342 bitmap_set_t tmp = bitmap_set_new ();
2343 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2344 bitmap_set_and (ANTIC_OUT, tmp);
2345 bitmap_set_free (tmp);
2348 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2350 VEC_free (basic_block, heap, worklist);
2353 /* Generate ANTIC_OUT - TMP_GEN. */
2354 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2356 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2357 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2360 /* Then union in the ANTIC_OUT - TMP_GEN values,
2361 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2362 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2363 bitmap_value_insert_into_set (ANTIC_IN (block),
2364 expression_for_id (bii));
2366 clean (ANTIC_IN (block), block);
2368 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2371 SET_BIT (changed_blocks, block->index);
2372 FOR_EACH_EDGE (e, ei, block->preds)
2373 SET_BIT (changed_blocks, e->src->index);
2376 RESET_BIT (changed_blocks, block->index);
2379 if (dump_file && (dump_flags & TDF_DETAILS))
2381 if (!BB_DEFERRED (block) || BB_VISITED (block))
2384 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2386 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2390 print_bitmap_set (dump_file, S, "S", block->index);
2395 "Block %d was deferred for a future iteration.\n",
2400 bitmap_set_free (old);
2402 bitmap_set_free (S);
2404 bitmap_set_free (ANTIC_OUT);
2408 /* Compute PARTIAL_ANTIC for BLOCK.
2410 If succs(BLOCK) > 1 then
2411 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2412 in ANTIC_OUT for all succ(BLOCK)
2413 else if succs(BLOCK) == 1 then
2414 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2416 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2421 compute_partial_antic_aux (basic_block block,
2422 bool block_has_abnormal_pred_edge)
2424 bool changed = false;
2425 bitmap_set_t old_PA_IN;
2426 bitmap_set_t PA_OUT;
2429 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2431 old_PA_IN = PA_OUT = NULL;
2433 /* If any edges from predecessors are abnormal, antic_in is empty,
2435 if (block_has_abnormal_pred_edge)
2436 goto maybe_dump_sets;
2438 /* If there are too many partially anticipatable values in the
2439 block, phi_translate_set can take an exponential time: stop
2440 before the translation starts. */
2442 && single_succ_p (block)
2443 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2444 goto maybe_dump_sets;
2446 old_PA_IN = PA_IN (block);
2447 PA_OUT = bitmap_set_new ();
2449 /* If the block has no successors, ANTIC_OUT is empty. */
2450 if (EDGE_COUNT (block->succs) == 0)
2452 /* If we have one successor, we could have some phi nodes to
2453 translate through. Note that we can't phi translate across DFS
2454 back edges in partial antic, because it uses a union operation on
2455 the successors. For recurrences like IV's, we will end up
2456 generating a new value in the set on each go around (i + 3 (VH.1)
2457 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2458 else if (single_succ_p (block))
2460 basic_block succ = single_succ (block);
2461 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2462 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2464 /* If we have multiple successors, we take the union of all of
2468 VEC(basic_block, heap) * worklist;
2472 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2473 FOR_EACH_EDGE (e, ei, block->succs)
2475 if (e->flags & EDGE_DFS_BACK)
2477 VEC_quick_push (basic_block, worklist, e->dest);
2479 if (VEC_length (basic_block, worklist) > 0)
2481 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2486 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2487 bitmap_value_insert_into_set (PA_OUT,
2488 expression_for_id (i));
2489 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2491 bitmap_set_t pa_in = bitmap_set_new ();
2492 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2493 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2494 bitmap_value_insert_into_set (PA_OUT,
2495 expression_for_id (i));
2496 bitmap_set_free (pa_in);
2499 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2500 bitmap_value_insert_into_set (PA_OUT,
2501 expression_for_id (i));
2504 VEC_free (basic_block, heap, worklist);
2507 /* PA_IN starts with PA_OUT - TMP_GEN.
2508 Then we subtract things from ANTIC_IN. */
2509 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2511 /* For partial antic, we want to put back in the phi results, since
2512 we will properly avoid making them partially antic over backedges. */
2513 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2514 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2516 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2517 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2519 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2521 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2524 SET_BIT (changed_blocks, block->index);
2525 FOR_EACH_EDGE (e, ei, block->preds)
2526 SET_BIT (changed_blocks, e->src->index);
2529 RESET_BIT (changed_blocks, block->index);
2532 if (dump_file && (dump_flags & TDF_DETAILS))
2535 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2537 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2540 bitmap_set_free (old_PA_IN);
2542 bitmap_set_free (PA_OUT);
2546 /* Compute ANTIC and partial ANTIC sets. */
2549 compute_antic (void)
2551 bool changed = true;
2552 int num_iterations = 0;
2556 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2557 We pre-build the map of blocks with incoming abnormal edges here. */
2558 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2559 sbitmap_zero (has_abnormal_preds);
2566 FOR_EACH_EDGE (e, ei, block->preds)
2568 e->flags &= ~EDGE_DFS_BACK;
2569 if (e->flags & EDGE_ABNORMAL)
2571 SET_BIT (has_abnormal_preds, block->index);
2576 BB_VISITED (block) = 0;
2577 BB_DEFERRED (block) = 0;
2579 /* While we are here, give empty ANTIC_IN sets to each block. */
2580 ANTIC_IN (block) = bitmap_set_new ();
2581 PA_IN (block) = bitmap_set_new ();
2584 /* At the exit block we anticipate nothing. */
2585 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2586 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2587 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2589 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2590 sbitmap_ones (changed_blocks);
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2597 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2599 if (TEST_BIT (changed_blocks, postorder[i]))
2601 basic_block block = BASIC_BLOCK (postorder[i]);
2602 changed |= compute_antic_aux (block,
2603 TEST_BIT (has_abnormal_preds,
2607 #ifdef ENABLE_CHECKING
2608 /* Theoretically possible, but *highly* unlikely. */
2609 gcc_assert (num_iterations < 500);
2613 statistics_histogram_event (cfun, "compute_antic iterations",
2616 if (do_partial_partial)
2618 sbitmap_ones (changed_blocks);
2619 mark_dfs_back_edges ();
2624 if (dump_file && (dump_flags & TDF_DETAILS))
2625 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2628 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2630 if (TEST_BIT (changed_blocks, postorder[i]))
2632 basic_block block = BASIC_BLOCK (postorder[i]);
2634 |= compute_partial_antic_aux (block,
2635 TEST_BIT (has_abnormal_preds,
2639 #ifdef ENABLE_CHECKING
2640 /* Theoretically possible, but *highly* unlikely. */
2641 gcc_assert (num_iterations < 500);
2644 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2647 sbitmap_free (has_abnormal_preds);
2648 sbitmap_free (changed_blocks);
2651 /* Return true if we can value number the call in STMT. This is true
2652 if we have a pure or constant call. */
2655 can_value_number_call (gimple stmt)
2657 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2662 /* Return true if OP is a tree which we can perform PRE on.
2663 This may not match the operations we can value number, but in
2664 a perfect world would. */
2667 can_PRE_operation (tree op)
2669 return UNARY_CLASS_P (op)
2670 || BINARY_CLASS_P (op)
2671 || COMPARISON_CLASS_P (op)
2672 || TREE_CODE (op) == MEM_REF
2673 || TREE_CODE (op) == COMPONENT_REF
2674 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2675 || TREE_CODE (op) == CALL_EXPR
2676 || TREE_CODE (op) == ARRAY_REF;
2680 /* Inserted expressions are placed onto this worklist, which is used
2681 for performing quick dead code elimination of insertions we made
2682 that didn't turn out to be necessary. */
2683 static bitmap inserted_exprs;
2685 /* Pool allocated fake store expressions are placed onto this
2686 worklist, which, after performing dead code elimination, is walked
2687 to see which expressions need to be put into GC'able memory */
2688 static VEC(gimple, heap) *need_creation;
2690 /* The actual worker for create_component_ref_by_pieces. */
2693 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2694 unsigned int *operand, gimple_seq *stmts,
2697 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2701 switch (currop->opcode)
2705 tree folded, sc = NULL_TREE;
2706 unsigned int nargs = 0;
2708 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2712 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2713 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2719 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2720 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2724 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2725 ref->operands) - 1);
2726 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2728 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2738 folded = build_call_array (currop->type,
2739 (TREE_CODE (fn) == FUNCTION_DECL
2740 ? build_fold_addr_expr (fn) : fn),
2744 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2750 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2752 tree offset = currop->op0;
2755 if (TREE_CODE (baseop) == ADDR_EXPR
2756 && handled_component_p (TREE_OPERAND (baseop, 0)))
2760 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2763 offset = int_const_binop (PLUS_EXPR, offset,
2764 build_int_cst (TREE_TYPE (offset),
2766 baseop = build_fold_addr_expr (base);
2768 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2771 case TARGET_MEM_REF:
2773 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2776 tree genop0 = NULL_TREE;
2777 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2783 op0expr = get_or_alloc_expr_for (currop->op0);
2784 genop0 = find_or_generate_expression (block, op0expr,
2789 if (DECL_P (baseop))
2790 return build6 (TARGET_MEM_REF, currop->type,
2792 genop0, currop->op1, currop->op2,
2793 unshare_expr (nextop->op1));
2795 return build6 (TARGET_MEM_REF, currop->type,
2797 genop0, currop->op1, currop->op2,
2798 unshare_expr (nextop->op1));
2804 gcc_assert (is_gimple_min_invariant (currop->op0));
2810 case VIEW_CONVERT_EXPR:
2813 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2818 folded = fold_build1 (currop->opcode, currop->type,
2823 case ALIGN_INDIRECT_REF:
2824 case MISALIGNED_INDIRECT_REF:
2827 tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2832 genop1 = fold_convert (build_pointer_type (currop->type),
2835 if (currop->opcode == MISALIGNED_INDIRECT_REF)
2836 folded = fold_build2 (currop->opcode, currop->type,
2837 genop1, currop->op1);
2839 folded = fold_build1 (currop->opcode, currop->type,
2847 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2849 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2850 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2856 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2859 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2862 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2867 /* For array ref vn_reference_op's, operand 1 of the array ref
2868 is op0 of the reference op and operand 3 of the array ref is
2870 case ARRAY_RANGE_REF:
2874 tree genop1 = currop->op0;
2876 tree genop2 = currop->op1;
2878 tree genop3 = currop->op2;
2880 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2884 op1expr = get_or_alloc_expr_for (genop1);
2885 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2890 /* Drop zero minimum index. */
2891 if (tree_int_cst_equal (genop2, integer_zero_node))
2895 op2expr = get_or_alloc_expr_for (genop2);
2896 genop2 = find_or_generate_expression (block, op2expr, stmts,
2904 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2905 /* We can't always put a size in units of the element alignment
2906 here as the element alignment may be not visible. See
2907 PR43783. Simply drop the element size for constant
2909 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2913 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2914 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2915 op3expr = get_or_alloc_expr_for (genop3);
2916 genop3 = find_or_generate_expression (block, op3expr, stmts,
2922 return build4 (currop->opcode, currop->type, genop0, genop1,
2929 tree genop2 = currop->op1;
2931 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2935 /* op1 should be a FIELD_DECL, which are represented by
2940 op2expr = get_or_alloc_expr_for (genop2);
2941 genop2 = find_or_generate_expression (block, op2expr, stmts,
2947 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2953 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2954 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2975 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2976 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2977 trying to rename aggregates into ssa form directly, which is a no no.
2979 Thus, this routine doesn't create temporaries, it just builds a
2980 single access expression for the array, calling
2981 find_or_generate_expression to build the innermost pieces.
2983 This function is a subroutine of create_expression_by_pieces, and
2984 should not be called on it's own unless you really know what you
2988 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2989 gimple_seq *stmts, gimple domstmt)
2991 unsigned int op = 0;
2992 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2995 /* Find a leader for an expression, or generate one using
2996 create_expression_by_pieces if it's ANTIC but
2998 BLOCK is the basic_block we are looking for leaders in.
2999 EXPR is the expression to find a leader or generate for.
3000 STMTS is the statement list to put the inserted expressions on.
3001 Returns the SSA_NAME of the LHS of the generated expression or the
3003 DOMSTMT if non-NULL is a statement that should be dominated by
3004 all uses in the generated expression. If DOMSTMT is non-NULL this
3005 routine can fail and return NULL_TREE. Otherwise it will assert
3009 find_or_generate_expression (basic_block block, pre_expr expr,
3010 gimple_seq *stmts, gimple domstmt)
3012 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
3013 get_expr_value_id (expr), domstmt);
3017 if (leader->kind == NAME)
3018 genop = PRE_EXPR_NAME (leader);
3019 else if (leader->kind == CONSTANT)
3020 genop = PRE_EXPR_CONSTANT (leader);
3023 /* If it's still NULL, it must be a complex expression, so generate
3024 it recursively. Not so for FRE though. */
3028 bitmap_set_t exprset;
3029 unsigned int lookfor = get_expr_value_id (expr);
3030 bool handled = false;
3034 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
3035 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
3037 pre_expr temp = expression_for_id (i);
3038 if (temp->kind != NAME)
3041 genop = create_expression_by_pieces (block, temp, stmts,
3043 get_expr_type (expr));
3047 if (!handled && domstmt)
3050 gcc_assert (handled);
3055 #define NECESSARY GF_PLF_1
3057 /* Create an expression in pieces, so that we can handle very complex
3058 expressions that may be ANTIC, but not necessary GIMPLE.
3059 BLOCK is the basic block the expression will be inserted into,
3060 EXPR is the expression to insert (in value form)
3061 STMTS is a statement list to append the necessary insertions into.
3063 This function will die if we hit some value that shouldn't be
3064 ANTIC but is (IE there is no leader for it, or its components).
3065 This function may also generate expressions that are themselves
3066 partially or fully redundant. Those that are will be either made
3067 fully redundant during the next iteration of insert (for partially
3068 redundant ones), or eliminated by eliminate (for fully redundant
3071 If DOMSTMT is non-NULL then we make sure that all uses in the
3072 expressions dominate that statement. In this case the function
3073 can return NULL_TREE to signal failure. */
3076 create_expression_by_pieces (basic_block block, pre_expr expr,
3077 gimple_seq *stmts, gimple domstmt, tree type)
3081 gimple_seq forced_stmts = NULL;
3082 unsigned int value_id;
3083 gimple_stmt_iterator gsi;
3084 tree exprtype = type ? type : get_expr_type (expr);
3090 /* We may hit the NAME/CONSTANT case if we have to convert types
3091 that value numbering saw through. */
3093 folded = PRE_EXPR_NAME (expr);
3096 folded = PRE_EXPR_CONSTANT (expr);
3100 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3101 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3106 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3107 switch (nary->length)
3111 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3112 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3113 tree genop1 = find_or_generate_expression (block, op1,
3115 tree genop2 = find_or_generate_expression (block, op2,
3117 if (!genop1 || !genop2)
3119 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3120 may be a constant with the wrong type. */
3121 if (nary->opcode == POINTER_PLUS_EXPR)
3123 genop1 = fold_convert (nary->type, genop1);
3124 genop2 = fold_convert (sizetype, genop2);
3128 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3129 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3132 folded = fold_build2 (nary->opcode, nary->type,
3138 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3139 tree genop1 = find_or_generate_expression (block, op1,
3143 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3145 folded = fold_build1 (nary->opcode, nary->type,
3158 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3159 folded = fold_convert (exprtype, folded);
3161 /* Force the generated expression to be a sequence of GIMPLE
3163 We have to call unshare_expr because force_gimple_operand may
3164 modify the tree we pass to it. */
3165 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3168 /* If we have any intermediate expressions to the value sets, add them
3169 to the value sets and chain them in the instruction stream. */
3172 gsi = gsi_start (forced_stmts);
3173 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3175 gimple stmt = gsi_stmt (gsi);
3176 tree forcedname = gimple_get_lhs (stmt);
3179 if (TREE_CODE (forcedname) == SSA_NAME)
3181 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3182 VN_INFO_GET (forcedname)->valnum = forcedname;
3183 VN_INFO (forcedname)->value_id = get_next_value_id ();
3184 nameexpr = get_or_alloc_expr_for_name (forcedname);
3185 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3187 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3188 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3190 mark_symbols_for_renaming (stmt);
3192 gimple_seq_add_seq (stmts, forced_stmts);
3195 /* Build and insert the assignment of the end result to the temporary
3196 that we will return. */
3197 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3199 pretemp = create_tmp_reg (exprtype, "pretmp");
3200 get_var_ann (pretemp);
3204 add_referenced_var (temp);
3206 newstmt = gimple_build_assign (temp, folded);
3207 name = make_ssa_name (temp, newstmt);
3208 gimple_assign_set_lhs (newstmt, name);
3209 gimple_set_plf (newstmt, NECESSARY, false);
3211 gimple_seq_add_stmt (stmts, newstmt);
3212 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3214 /* All the symbols in NEWEXPR should be put into SSA form. */
3215 mark_symbols_for_renaming (newstmt);
3217 /* Add a value number to the temporary.
3218 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3219 we are creating the expression by pieces, and this particular piece of
3220 the expression may have been represented. There is no harm in replacing
3222 VN_INFO_GET (name)->valnum = name;
3223 value_id = get_expr_value_id (expr);
3224 VN_INFO (name)->value_id = value_id;
3225 nameexpr = get_or_alloc_expr_for_name (name);
3226 add_to_value (value_id, nameexpr);
3227 if (NEW_SETS (block))
3228 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3229 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3231 pre_stats.insertions++;
3232 if (dump_file && (dump_flags & TDF_DETAILS))
3234 fprintf (dump_file, "Inserted ");
3235 print_gimple_stmt (dump_file, newstmt, 0, 0);
3236 fprintf (dump_file, " in predecessor %d\n", block->index);
3243 /* Returns true if we want to inhibit the insertions of PHI nodes
3244 for the given EXPR for basic block BB (a member of a loop).
3245 We want to do this, when we fear that the induction variable we
3246 create might inhibit vectorization. */
3249 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3251 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3252 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3253 vn_reference_op_t op;
3256 /* If we aren't going to vectorize we don't inhibit anything. */
3257 if (!flag_tree_vectorize)
3260 /* Otherwise we inhibit the insertion when the address of the
3261 memory reference is a simple induction variable. In other
3262 cases the vectorizer won't do anything anyway (either it's
3263 loop invariant or a complicated expression). */
3264 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3269 case ARRAY_RANGE_REF:
3270 if (TREE_CODE (op->op0) != SSA_NAME)
3275 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3277 /* Default defs are loop invariant. */
3280 /* Defined outside this loop, also loop invariant. */
3281 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3283 /* If it's a simple induction variable inhibit insertion,
3284 the vectorizer might be interested in this one. */
3285 if (simple_iv (bb->loop_father, bb->loop_father,
3286 op->op0, &iv, true))
3288 /* No simple IV, vectorizer can't do anything, hence no
3289 reason to inhibit the transformation for this operand. */
3299 /* Insert the to-be-made-available values of expression EXPRNUM for each
3300 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3301 merge the result with a phi node, given the same value number as
3302 NODE. Return true if we have inserted new stuff. */
3305 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3308 pre_expr expr = expression_for_id (exprnum);
3310 unsigned int val = get_expr_value_id (expr);
3312 bool insertions = false;
3317 tree type = get_expr_type (expr);
3321 if (dump_file && (dump_flags & TDF_DETAILS))
3323 fprintf (dump_file, "Found partial redundancy for expression ");
3324 print_pre_expr (dump_file, expr);
3325 fprintf (dump_file, " (%04d)\n", val);
3328 /* Make sure we aren't creating an induction variable. */
3329 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3331 bool firstinsideloop = false;
3332 bool secondinsideloop = false;
3333 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3334 EDGE_PRED (block, 0)->src);
3335 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3336 EDGE_PRED (block, 1)->src);
3337 /* Induction variables only have one edge inside the loop. */
3338 if ((firstinsideloop ^ secondinsideloop)
3339 && (expr->kind != REFERENCE
3340 || inhibit_phi_insertion (block, expr)))
3342 if (dump_file && (dump_flags & TDF_DETAILS))
3343 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3348 /* Make the necessary insertions. */
3349 FOR_EACH_EDGE (pred, ei, block->preds)
3351 gimple_seq stmts = NULL;
3354 eprime = avail[bprime->index];
3356 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3358 builtexpr = create_expression_by_pieces (bprime,
3362 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3363 gsi_insert_seq_on_edge (pred, stmts);
3364 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3367 else if (eprime->kind == CONSTANT)
3369 /* Constants may not have the right type, fold_convert
3370 should give us back a constant with the right type.
3372 tree constant = PRE_EXPR_CONSTANT (eprime);
3373 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3375 tree builtexpr = fold_convert (type, constant);
3376 if (!is_gimple_min_invariant (builtexpr))
3378 tree forcedexpr = force_gimple_operand (builtexpr,
3381 if (!is_gimple_min_invariant (forcedexpr))
3383 if (forcedexpr != builtexpr)
3385 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3386 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3390 gimple_stmt_iterator gsi;
3391 gsi = gsi_start (stmts);
3392 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3394 gimple stmt = gsi_stmt (gsi);
3395 tree lhs = gimple_get_lhs (stmt);
3396 if (TREE_CODE (lhs) == SSA_NAME)
3397 bitmap_set_bit (inserted_exprs,
3398 SSA_NAME_VERSION (lhs));
3399 gimple_set_plf (stmt, NECESSARY, false);
3401 gsi_insert_seq_on_edge (pred, stmts);
3403 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3407 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3410 else if (eprime->kind == NAME)
3412 /* We may have to do a conversion because our value
3413 numbering can look through types in certain cases, but
3414 our IL requires all operands of a phi node have the same
3416 tree name = PRE_EXPR_NAME (eprime);
3417 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3421 builtexpr = fold_convert (type, name);
3422 forcedexpr = force_gimple_operand (builtexpr,
3426 if (forcedexpr != name)
3428 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3429 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3434 gimple_stmt_iterator gsi;
3435 gsi = gsi_start (stmts);
3436 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3438 gimple stmt = gsi_stmt (gsi);
3439 tree lhs = gimple_get_lhs (stmt);
3440 if (TREE_CODE (lhs) == SSA_NAME)
3441 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3442 gimple_set_plf (stmt, NECESSARY, false);
3444 gsi_insert_seq_on_edge (pred, stmts);
3446 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3450 /* If we didn't want a phi node, and we made insertions, we still have
3451 inserted new stuff, and thus return true. If we didn't want a phi node,
3452 and didn't make insertions, we haven't added anything new, so return
3454 if (nophi && insertions)
3456 else if (nophi && !insertions)
3459 /* Now build a phi for the new variable. */
3460 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3462 prephitemp = create_tmp_var (type, "prephitmp");
3463 get_var_ann (prephitemp);
3467 add_referenced_var (temp);
3469 if (TREE_CODE (type) == COMPLEX_TYPE
3470 || TREE_CODE (type) == VECTOR_TYPE)
3471 DECL_GIMPLE_REG_P (temp) = 1;
3472 phi = create_phi_node (temp, block);
3474 gimple_set_plf (phi, NECESSARY, false);
3475 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3476 VN_INFO (gimple_phi_result (phi))->value_id = val;
3477 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3478 FOR_EACH_EDGE (pred, ei, block->preds)
3480 pre_expr ae = avail[pred->src->index];
3481 gcc_assert (get_expr_type (ae) == type
3482 || useless_type_conversion_p (type, get_expr_type (ae)));
3483 if (ae->kind == CONSTANT)
3484 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3486 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3490 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3491 add_to_value (val, newphi);
3493 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3494 this insertion, since we test for the existence of this value in PHI_GEN
3495 before proceeding with the partial redundancy checks in insert_aux.
3497 The value may exist in AVAIL_OUT, in particular, it could be represented
3498 by the expression we are trying to eliminate, in which case we want the
3499 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3502 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3503 this block, because if it did, it would have existed in our dominator's
3504 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3507 bitmap_insert_into_set (PHI_GEN (block), newphi);
3508 bitmap_value_replace_in_set (AVAIL_OUT (block),
3510 bitmap_insert_into_set (NEW_SETS (block),
3513 if (dump_file && (dump_flags & TDF_DETAILS))
3515 fprintf (dump_file, "Created phi ");
3516 print_gimple_stmt (dump_file, phi, 0, 0);
3517 fprintf (dump_file, " in block %d\n", block->index);
3525 /* Perform insertion of partially redundant values.
3526 For BLOCK, do the following:
3527 1. Propagate the NEW_SETS of the dominator into the current block.
3528 If the block has multiple predecessors,
3529 2a. Iterate over the ANTIC expressions for the block to see if
3530 any of them are partially redundant.
3531 2b. If so, insert them into the necessary predecessors to make
3532 the expression fully redundant.
3533 2c. Insert a new PHI merging the values of the predecessors.
3534 2d. Insert the new PHI, and the new expressions, into the
3536 3. Recursively call ourselves on the dominator children of BLOCK.
3538 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3539 do_regular_insertion and do_partial_insertion.
3544 do_regular_insertion (basic_block block, basic_block dom)
3546 bool new_stuff = false;
3547 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3551 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3553 if (expr->kind != NAME)
3557 bool by_some = false;
3558 bool cant_insert = false;
3559 bool all_same = true;
3560 pre_expr first_s = NULL;
3563 pre_expr eprime = NULL;
3565 pre_expr edoubleprime = NULL;
3566 bool do_insertion = false;
3568 val = get_expr_value_id (expr);
3569 if (bitmap_set_contains_value (PHI_GEN (block), val))
3571 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3573 if (dump_file && (dump_flags & TDF_DETAILS))
3574 fprintf (dump_file, "Found fully redundant value\n");
3578 avail = XCNEWVEC (pre_expr, last_basic_block);
3579 FOR_EACH_EDGE (pred, ei, block->preds)
3581 unsigned int vprime;
3583 /* We should never run insertion for the exit block
3584 and so not come across fake pred edges. */
3585 gcc_assert (!(pred->flags & EDGE_FAKE));
3587 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3590 /* eprime will generally only be NULL if the
3591 value of the expression, translated
3592 through the PHI for this predecessor, is
3593 undefined. If that is the case, we can't
3594 make the expression fully redundant,
3595 because its value is undefined along a
3596 predecessor path. We can thus break out
3597 early because it doesn't matter what the
3598 rest of the results are. */
3605 eprime = fully_constant_expression (eprime);
3606 vprime = get_expr_value_id (eprime);
3607 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3609 if (edoubleprime == NULL)
3611 avail[bprime->index] = eprime;
3616 avail[bprime->index] = edoubleprime;
3618 /* We want to perform insertions to remove a redundancy on
3619 a path in the CFG we want to optimize for speed. */
3620 if (optimize_edge_for_speed_p (pred))
3621 do_insertion = true;
3622 if (first_s == NULL)
3623 first_s = edoubleprime;
3624 else if (!pre_expr_eq (first_s, edoubleprime))
3628 /* If we can insert it, it's not the same value
3629 already existing along every predecessor, and
3630 it's defined by some predecessor, it is
3631 partially redundant. */
3632 if (!cant_insert && !all_same && by_some && do_insertion
3633 && dbg_cnt (treepre_insert))
3635 if (insert_into_preds_of_block (block, get_expression_id (expr),
3639 /* If all edges produce the same value and that value is
3640 an invariant, then the PHI has the same value on all
3641 edges. Note this. */
3642 else if (!cant_insert && all_same && eprime
3643 && (edoubleprime->kind == CONSTANT
3644 || edoubleprime->kind == NAME)
3645 && !value_id_constant_p (val))
3649 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3650 value_expressions, val);
3652 unsigned int new_val = get_expr_value_id (edoubleprime);
3653 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3655 pre_expr expr = expression_for_id (j);
3657 if (expr->kind == NAME)
3659 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3660 /* Just reset the value id and valnum so it is
3661 the same as the constant we have discovered. */
3662 if (edoubleprime->kind == CONSTANT)
3664 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3665 pre_stats.constified++;
3668 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3669 info->value_id = new_val;
3677 VEC_free (pre_expr, heap, exprs);
3682 /* Perform insertion for partially anticipatable expressions. There
3683 is only one case we will perform insertion for these. This case is
3684 if the expression is partially anticipatable, and fully available.
3685 In this case, we know that putting it earlier will enable us to
3686 remove the later computation. */
3690 do_partial_partial_insertion (basic_block block, basic_block dom)
3692 bool new_stuff = false;
3693 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3697 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3699 if (expr->kind != NAME)
3704 bool cant_insert = false;
3707 pre_expr eprime = NULL;
3710 val = get_expr_value_id (expr);
3711 if (bitmap_set_contains_value (PHI_GEN (block), val))
3713 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3716 avail = XCNEWVEC (pre_expr, last_basic_block);
3717 FOR_EACH_EDGE (pred, ei, block->preds)
3719 unsigned int vprime;
3720 pre_expr edoubleprime;
3722 /* We should never run insertion for the exit block
3723 and so not come across fake pred edges. */
3724 gcc_assert (!(pred->flags & EDGE_FAKE));
3726 eprime = phi_translate (expr, ANTIC_IN (block),
3730 /* eprime will generally only be NULL if the
3731 value of the expression, translated
3732 through the PHI for this predecessor, is
3733 undefined. If that is the case, we can't
3734 make the expression fully redundant,
3735 because its value is undefined along a
3736 predecessor path. We can thus break out
3737 early because it doesn't matter what the
3738 rest of the results are. */
3745 eprime = fully_constant_expression (eprime);
3746 vprime = get_expr_value_id (eprime);
3747 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3749 if (edoubleprime == NULL)
3755 avail[bprime->index] = edoubleprime;
3759 /* If we can insert it, it's not the same value
3760 already existing along every predecessor, and
3761 it's defined by some predecessor, it is
3762 partially redundant. */
3763 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3765 pre_stats.pa_insert++;
3766 if (insert_into_preds_of_block (block, get_expression_id (expr),
3774 VEC_free (pre_expr, heap, exprs);
3779 insert_aux (basic_block block)
3782 bool new_stuff = false;
3787 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3792 bitmap_set_t newset = NEW_SETS (dom);
3795 /* Note that we need to value_replace both NEW_SETS, and
3796 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3797 represented by some non-simple expression here that we want
3798 to replace it with. */
3799 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3801 pre_expr expr = expression_for_id (i);
3802 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3803 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3806 if (!single_pred_p (block))
3808 new_stuff |= do_regular_insertion (block, dom);
3809 if (do_partial_partial)
3810 new_stuff |= do_partial_partial_insertion (block, dom);
3814 for (son = first_dom_son (CDI_DOMINATORS, block);
3816 son = next_dom_son (CDI_DOMINATORS, son))
3818 new_stuff |= insert_aux (son);
3824 /* Perform insertion of partially redundant values. */
3829 bool new_stuff = true;
3831 int num_iterations = 0;
3834 NEW_SETS (bb) = bitmap_set_new ();
3839 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3841 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3845 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3848 add_to_exp_gen (basic_block block, tree op)
3853 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3855 result = get_or_alloc_expr_for_name (op);
3856 bitmap_value_insert_into_set (EXP_GEN (block), result);
3860 /* Create value ids for PHI in BLOCK. */
3863 make_values_for_phi (gimple phi, basic_block block)
3865 tree result = gimple_phi_result (phi);
3867 /* We have no need for virtual phis, as they don't represent
3868 actual computations. */
3869 if (is_gimple_reg (result))
3871 pre_expr e = get_or_alloc_expr_for_name (result);
3872 add_to_value (get_expr_value_id (e), e);
3873 bitmap_insert_into_set (PHI_GEN (block), e);
3874 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3878 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3880 tree arg = gimple_phi_arg_def (phi, i);
3881 if (TREE_CODE (arg) == SSA_NAME)
3883 e = get_or_alloc_expr_for_name (arg);
3884 add_to_value (get_expr_value_id (e), e);
3891 /* Compute the AVAIL set for all basic blocks.
3893 This function performs value numbering of the statements in each basic
3894 block. The AVAIL sets are built from information we glean while doing
3895 this value numbering, since the AVAIL sets contain only one entry per
3898 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3899 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3902 compute_avail (void)
3905 basic_block block, son;
3906 basic_block *worklist;
3910 /* We pretend that default definitions are defined in the entry block.
3911 This includes function arguments and the static chain decl. */
3912 for (i = 1; i < num_ssa_names; ++i)
3914 tree name = ssa_name (i);
3917 || !SSA_NAME_IS_DEFAULT_DEF (name)
3918 || has_zero_uses (name)
3919 || !is_gimple_reg (name))
3922 e = get_or_alloc_expr_for_name (name);
3923 add_to_value (get_expr_value_id (e), e);
3925 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3926 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3929 /* Allocate the worklist. */
3930 worklist = XNEWVEC (basic_block, n_basic_blocks);
3932 /* Seed the algorithm by putting the dominator children of the entry
3933 block on the worklist. */
3934 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3936 son = next_dom_son (CDI_DOMINATORS, son))
3937 worklist[sp++] = son;
3939 /* Loop until the worklist is empty. */
3942 gimple_stmt_iterator gsi;
3945 unsigned int stmt_uid = 1;
3947 /* Pick a block from the worklist. */
3948 block = worklist[--sp];
3950 /* Initially, the set of available values in BLOCK is that of
3951 its immediate dominator. */
3952 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3954 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3956 /* Generate values for PHI nodes. */
3957 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3958 make_values_for_phi (gsi_stmt (gsi), block);
3960 BB_MAY_NOTRETURN (block) = 0;
3962 /* Now compute value numbers and populate value sets with all
3963 the expressions computed in BLOCK. */
3964 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3969 stmt = gsi_stmt (gsi);
3970 gimple_set_uid (stmt, stmt_uid++);
3972 /* Cache whether the basic-block has any non-visible side-effect
3974 If this isn't a call or it is the last stmt in the
3975 basic-block then the CFG represents things correctly. */
3976 if (is_gimple_call (stmt)
3977 && !stmt_ends_bb_p (stmt))
3979 /* Non-looping const functions always return normally.
3980 Otherwise the call might not return or have side-effects
3981 that forbids hoisting possibly trapping expressions
3983 int flags = gimple_call_flags (stmt);
3984 if (!(flags & ECF_CONST)
3985 || (flags & ECF_LOOPING_CONST_OR_PURE))
3986 BB_MAY_NOTRETURN (block) = 1;
3989 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3991 pre_expr e = get_or_alloc_expr_for_name (op);
3993 add_to_value (get_expr_value_id (e), e);
3995 bitmap_insert_into_set (TMP_GEN (block), e);
3996 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3999 if (gimple_has_volatile_ops (stmt)
4000 || stmt_could_throw_p (stmt))
4003 switch (gimple_code (stmt))
4006 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4007 add_to_exp_gen (block, op);
4014 vn_reference_op_t vro;
4015 pre_expr result = NULL;
4016 VEC(vn_reference_op_s, heap) *ops = NULL;
4018 if (!can_value_number_call (stmt))
4021 copy_reference_ops_from_call (stmt, &ops);
4022 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
4023 gimple_expr_type (stmt),
4025 VEC_free (vn_reference_op_s, heap, ops);
4029 for (i = 0; VEC_iterate (vn_reference_op_s,
4033 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4034 add_to_exp_gen (block, vro->op0);
4035 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4036 add_to_exp_gen (block, vro->op1);
4037 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4038 add_to_exp_gen (block, vro->op2);
4040 result = (pre_expr) pool_alloc (pre_expr_pool);
4041 result->kind = REFERENCE;
4043 PRE_EXPR_REFERENCE (result) = ref;
4045 get_or_alloc_expression_id (result);
4046 add_to_value (get_expr_value_id (result), result);
4048 bitmap_value_insert_into_set (EXP_GEN (block), result);
4054 pre_expr result = NULL;
4055 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4059 case tcc_comparison:
4064 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4065 gimple_assign_rhs_code (stmt),
4066 gimple_expr_type (stmt),
4067 gimple_assign_rhs1 (stmt),
4068 gimple_assign_rhs2 (stmt),
4069 NULL_TREE, NULL_TREE, &nary);
4074 for (i = 0; i < nary->length; i++)
4075 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4076 add_to_exp_gen (block, nary->op[i]);
4078 result = (pre_expr) pool_alloc (pre_expr_pool);
4079 result->kind = NARY;
4081 PRE_EXPR_NARY (result) = nary;
4085 case tcc_declaration:
4090 vn_reference_op_t vro;
4092 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4098 for (i = 0; VEC_iterate (vn_reference_op_s,
4102 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4103 add_to_exp_gen (block, vro->op0);
4104 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4105 add_to_exp_gen (block, vro->op1);
4106 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4107 add_to_exp_gen (block, vro->op2);
4109 result = (pre_expr) pool_alloc (pre_expr_pool);
4110 result->kind = REFERENCE;
4112 PRE_EXPR_REFERENCE (result) = ref;
4117 /* For any other statement that we don't
4118 recognize, simply add all referenced
4119 SSA_NAMEs to EXP_GEN. */
4120 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4121 add_to_exp_gen (block, op);
4125 get_or_alloc_expression_id (result);
4126 add_to_value (get_expr_value_id (result), result);
4128 bitmap_value_insert_into_set (EXP_GEN (block), result);
4137 /* Put the dominator children of BLOCK on the worklist of blocks
4138 to compute available sets for. */
4139 for (son = first_dom_son (CDI_DOMINATORS, block);
4141 son = next_dom_son (CDI_DOMINATORS, son))
4142 worklist[sp++] = son;
4148 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4149 than the available expressions for it. The insertion point is
4150 right before the first use in STMT. Returns the SSA_NAME that should
4151 be used for replacement. */
4154 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4156 basic_block bb = gimple_bb (stmt);
4157 gimple_stmt_iterator gsi;
4158 gimple_seq stmts = NULL;
4162 /* First create a value expression from the expression we want
4163 to insert and associate it with the value handle for SSA_VN. */
4164 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4168 /* Then use create_expression_by_pieces to generate a valid
4169 expression to insert at this point of the IL stream. */
4170 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4171 if (expr == NULL_TREE)
4173 gsi = gsi_for_stmt (stmt);
4174 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4179 /* Eliminate fully redundant computations. */
4184 VEC (gimple, heap) *to_remove = NULL;
4186 unsigned int todo = 0;
4187 gimple_stmt_iterator gsi;
4193 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4195 stmt = gsi_stmt (gsi);
4197 /* Lookup the RHS of the expression, see if we have an
4198 available computation for it. If so, replace the RHS with
4199 the available computation. */
4200 if (gimple_has_lhs (stmt)
4201 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4202 && !gimple_assign_ssa_name_copy_p (stmt)
4203 && (!gimple_assign_single_p (stmt)
4204 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4205 && !gimple_has_volatile_ops (stmt)
4206 && !has_zero_uses (gimple_get_lhs (stmt)))
4208 tree lhs = gimple_get_lhs (stmt);
4209 tree rhs = NULL_TREE;
4211 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4212 pre_expr sprimeexpr;
4214 if (gimple_assign_single_p (stmt))
4215 rhs = gimple_assign_rhs1 (stmt);
4217 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4218 get_expr_value_id (lhsexpr),
4223 if (sprimeexpr->kind == CONSTANT)
4224 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4225 else if (sprimeexpr->kind == NAME)
4226 sprime = PRE_EXPR_NAME (sprimeexpr);
4231 /* If there is no existing leader but SCCVN knows this
4232 value is constant, use that constant. */
4233 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4235 sprime = VN_INFO (lhs)->valnum;
4236 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4237 TREE_TYPE (sprime)))
4238 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4240 if (dump_file && (dump_flags & TDF_DETAILS))
4242 fprintf (dump_file, "Replaced ");
4243 print_gimple_expr (dump_file, stmt, 0, 0);
4244 fprintf (dump_file, " with ");
4245 print_generic_expr (dump_file, sprime, 0);
4246 fprintf (dump_file, " in ");
4247 print_gimple_stmt (dump_file, stmt, 0, 0);
4249 pre_stats.eliminations++;
4250 propagate_tree_value_into_stmt (&gsi, sprime);
4251 stmt = gsi_stmt (gsi);
4256 /* If there is no existing usable leader but SCCVN thinks
4257 it has an expression it wants to use as replacement,
4259 if (!sprime || sprime == lhs)
4261 tree val = VN_INFO (lhs)->valnum;
4263 && TREE_CODE (val) == SSA_NAME
4264 && VN_INFO (val)->needs_insertion
4265 && can_PRE_operation (vn_get_expr_for (val)))
4266 sprime = do_SCCVN_insertion (stmt, val);
4270 && (rhs == NULL_TREE
4271 || TREE_CODE (rhs) != SSA_NAME
4272 || may_propagate_copy (rhs, sprime)))
4274 gcc_assert (sprime != rhs);
4276 if (dump_file && (dump_flags & TDF_DETAILS))
4278 fprintf (dump_file, "Replaced ");
4279 print_gimple_expr (dump_file, stmt, 0, 0);
4280 fprintf (dump_file, " with ");
4281 print_generic_expr (dump_file, sprime, 0);
4282 fprintf (dump_file, " in ");
4283 print_gimple_stmt (dump_file, stmt, 0, 0);
4286 if (TREE_CODE (sprime) == SSA_NAME)
4287 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4289 /* We need to make sure the new and old types actually match,
4290 which may require adding a simple cast, which fold_convert
4292 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4293 && !useless_type_conversion_p (gimple_expr_type (stmt),
4294 TREE_TYPE (sprime)))
4295 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4297 pre_stats.eliminations++;
4298 propagate_tree_value_into_stmt (&gsi, sprime);
4299 stmt = gsi_stmt (gsi);
4302 /* If we removed EH side effects from the statement, clean
4303 its EH information. */
4304 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4306 bitmap_set_bit (need_eh_cleanup,
4307 gimple_bb (stmt)->index);
4308 if (dump_file && (dump_flags & TDF_DETAILS))
4309 fprintf (dump_file, " Removed EH side effects.\n");
4313 /* If the statement is a scalar store, see if the expression
4314 has the same value number as its rhs. If so, the store is
4316 else if (gimple_assign_single_p (stmt)
4317 && !is_gimple_reg (gimple_assign_lhs (stmt))
4318 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4319 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4321 tree rhs = gimple_assign_rhs1 (stmt);
4323 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4324 gimple_vuse (stmt), true, NULL);
4325 if (TREE_CODE (rhs) == SSA_NAME)
4326 rhs = VN_INFO (rhs)->valnum;
4328 && operand_equal_p (val, rhs, 0))
4330 if (dump_file && (dump_flags & TDF_DETAILS))
4332 fprintf (dump_file, "Deleted redundant store ");
4333 print_gimple_stmt (dump_file, stmt, 0, 0);
4336 /* Queue stmt for removal. */
4337 VEC_safe_push (gimple, heap, to_remove, stmt);
4340 /* Visit COND_EXPRs and fold the comparison with the
4341 available value-numbers. */
4342 else if (gimple_code (stmt) == GIMPLE_COND)
4344 tree op0 = gimple_cond_lhs (stmt);
4345 tree op1 = gimple_cond_rhs (stmt);
4348 if (TREE_CODE (op0) == SSA_NAME)
4349 op0 = VN_INFO (op0)->valnum;
4350 if (TREE_CODE (op1) == SSA_NAME)
4351 op1 = VN_INFO (op1)->valnum;
4352 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4354 if (result && TREE_CODE (result) == INTEGER_CST)
4356 if (integer_zerop (result))
4357 gimple_cond_make_false (stmt);
4359 gimple_cond_make_true (stmt);
4361 todo = TODO_cleanup_cfg;
4364 /* Visit indirect calls and turn them into direct calls if
4366 if (gimple_code (stmt) == GIMPLE_CALL
4367 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4369 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4370 if (TREE_CODE (fn) == ADDR_EXPR
4371 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4373 if (dump_file && (dump_flags & TDF_DETAILS))
4375 fprintf (dump_file, "Replacing call target with ");
4376 print_generic_expr (dump_file, fn, 0);
4377 fprintf (dump_file, " in ");
4378 print_gimple_stmt (dump_file, stmt, 0, 0);
4381 gimple_call_set_fn (stmt, fn);
4383 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4385 bitmap_set_bit (need_eh_cleanup,
4386 gimple_bb (stmt)->index);
4387 if (dump_file && (dump_flags & TDF_DETAILS))
4388 fprintf (dump_file, " Removed EH side effects.\n");
4391 /* Changing an indirect call to a direct call may
4392 have exposed different semantics. This may
4393 require an SSA update. */
4394 todo |= TODO_update_ssa_only_virtuals;
4399 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4401 gimple stmt, phi = gsi_stmt (gsi);
4402 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4403 pre_expr sprimeexpr, resexpr;
4404 gimple_stmt_iterator gsi2;
4406 /* We want to perform redundant PHI elimination. Do so by
4407 replacing the PHI with a single copy if possible.
4408 Do not touch inserted, single-argument or virtual PHIs. */
4409 if (gimple_phi_num_args (phi) == 1
4410 || !is_gimple_reg (res))
4416 resexpr = get_or_alloc_expr_for_name (res);
4417 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4418 get_expr_value_id (resexpr), NULL);
4421 if (sprimeexpr->kind == CONSTANT)
4422 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4423 else if (sprimeexpr->kind == NAME)
4424 sprime = PRE_EXPR_NAME (sprimeexpr);
4428 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4430 sprime = VN_INFO (res)->valnum;
4431 if (!useless_type_conversion_p (TREE_TYPE (res),
4432 TREE_TYPE (sprime)))
4433 sprime = fold_convert (TREE_TYPE (res), sprime);
4442 if (dump_file && (dump_flags & TDF_DETAILS))
4444 fprintf (dump_file, "Replaced redundant PHI node defining ");
4445 print_generic_expr (dump_file, res, 0);
4446 fprintf (dump_file, " with ");
4447 print_generic_expr (dump_file, sprime, 0);
4448 fprintf (dump_file, "\n");
4451 remove_phi_node (&gsi, false);
4453 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4454 && TREE_CODE (sprime) == SSA_NAME)
4455 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4457 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4458 sprime = fold_convert (TREE_TYPE (res), sprime);
4459 stmt = gimple_build_assign (res, sprime);
4460 SSA_NAME_DEF_STMT (res) = stmt;
4461 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4463 gsi2 = gsi_after_labels (b);
4464 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4465 /* Queue the copy for eventual removal. */
4466 VEC_safe_push (gimple, heap, to_remove, stmt);
4467 /* If we inserted this PHI node ourself, it's not an elimination. */
4468 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4471 pre_stats.eliminations++;
4475 /* We cannot remove stmts during BB walk, especially not release SSA
4476 names there as this confuses the VN machinery. The stmts ending
4477 up in to_remove are either stores or simple copies. */
4478 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4480 tree lhs = gimple_assign_lhs (stmt);
4481 tree rhs = gimple_assign_rhs1 (stmt);
4482 use_operand_p use_p;
4485 /* If there is a single use only, propagate the equivalency
4486 instead of keeping the copy. */
4487 if (TREE_CODE (lhs) == SSA_NAME
4488 && TREE_CODE (rhs) == SSA_NAME
4489 && single_imm_use (lhs, &use_p, &use_stmt)
4490 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4492 SET_USE (use_p, rhs);
4493 update_stmt (use_stmt);
4494 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4495 && TREE_CODE (rhs) == SSA_NAME)
4496 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4499 /* If this is a store or a now unused copy, remove it. */
4500 if (TREE_CODE (lhs) != SSA_NAME
4501 || has_zero_uses (lhs))
4503 gsi = gsi_for_stmt (stmt);
4504 unlink_stmt_vdef (stmt);
4505 gsi_remove (&gsi, true);
4506 if (TREE_CODE (lhs) == SSA_NAME)
4507 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4508 release_defs (stmt);
4511 VEC_free (gimple, heap, to_remove);
4516 /* Borrow a bit of tree-ssa-dce.c for the moment.
4517 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4518 this may be a bit faster, and we may want critical edges kept split. */
4520 /* If OP's defining statement has not already been determined to be necessary,
4521 mark that statement necessary. Return the stmt, if it is newly
4524 static inline gimple
4525 mark_operand_necessary (tree op)
4531 if (TREE_CODE (op) != SSA_NAME)
4534 stmt = SSA_NAME_DEF_STMT (op);
4537 if (gimple_plf (stmt, NECESSARY)
4538 || gimple_nop_p (stmt))
4541 gimple_set_plf (stmt, NECESSARY, true);
4545 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4546 to insert PHI nodes sometimes, and because value numbering of casts isn't
4547 perfect, we sometimes end up inserting dead code. This simple DCE-like
4548 pass removes any insertions we made that weren't actually used. */
4551 remove_dead_inserted_code (void)
4558 worklist = BITMAP_ALLOC (NULL);
4559 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4561 t = SSA_NAME_DEF_STMT (ssa_name (i));
4562 if (gimple_plf (t, NECESSARY))
4563 bitmap_set_bit (worklist, i);
4565 while (!bitmap_empty_p (worklist))
4567 i = bitmap_first_set_bit (worklist);
4568 bitmap_clear_bit (worklist, i);
4569 t = SSA_NAME_DEF_STMT (ssa_name (i));
4571 /* PHI nodes are somewhat special in that each PHI alternative has
4572 data and control dependencies. All the statements feeding the
4573 PHI node's arguments are always necessary. */
4574 if (gimple_code (t) == GIMPLE_PHI)
4578 for (k = 0; k < gimple_phi_num_args (t); k++)
4580 tree arg = PHI_ARG_DEF (t, k);
4581 if (TREE_CODE (arg) == SSA_NAME)
4583 gimple n = mark_operand_necessary (arg);
4585 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4591 /* Propagate through the operands. Examine all the USE, VUSE and
4592 VDEF operands in this statement. Mark all the statements
4593 which feed this statement's uses as necessary. */
4597 /* The operands of VDEF expressions are also needed as they
4598 represent potential definitions that may reach this
4599 statement (VDEF operands allow us to follow def-def
4602 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4604 gimple n = mark_operand_necessary (use);
4606 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4611 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4613 t = SSA_NAME_DEF_STMT (ssa_name (i));
4614 if (!gimple_plf (t, NECESSARY))
4616 gimple_stmt_iterator gsi;
4618 if (dump_file && (dump_flags & TDF_DETAILS))
4620 fprintf (dump_file, "Removing unnecessary insertion:");
4621 print_gimple_stmt (dump_file, t, 0, 0);
4624 gsi = gsi_for_stmt (t);
4625 if (gimple_code (t) == GIMPLE_PHI)
4626 remove_phi_node (&gsi, true);
4629 gsi_remove (&gsi, true);
4634 BITMAP_FREE (worklist);
4637 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4638 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4639 the number of visited blocks. */
4642 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4644 edge_iterator *stack;
4646 int post_order_num = 0;
4649 if (include_entry_exit)
4650 post_order[post_order_num++] = EXIT_BLOCK;
4652 /* Allocate stack for back-tracking up CFG. */
4653 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4656 /* Allocate bitmap to track nodes that have been visited. */
4657 visited = sbitmap_alloc (last_basic_block);
4659 /* None of the nodes in the CFG have been visited yet. */
4660 sbitmap_zero (visited);
4662 /* Push the last edge on to the stack. */
4663 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4671 /* Look at the edge on the top of the stack. */
4673 src = ei_edge (ei)->src;
4674 dest = ei_edge (ei)->dest;
4676 /* Check if the edge destination has been visited yet. */
4677 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4679 /* Mark that we have visited the destination. */
4680 SET_BIT (visited, src->index);
4682 if (EDGE_COUNT (src->preds) > 0)
4683 /* Since the DEST node has been visited for the first
4684 time, check its successors. */
4685 stack[sp++] = ei_start (src->preds);
4687 post_order[post_order_num++] = src->index;
4691 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4692 post_order[post_order_num++] = dest->index;
4694 if (!ei_one_before_end_p (ei))
4695 ei_next (&stack[sp - 1]);
4701 if (include_entry_exit)
4702 post_order[post_order_num++] = ENTRY_BLOCK;
4705 sbitmap_free (visited);
4706 return post_order_num;
4710 /* Initialize data structures used by PRE. */
4713 init_pre (bool do_fre)
4717 next_expression_id = 1;
4719 VEC_safe_push (pre_expr, heap, expressions, NULL);
4720 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4721 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4722 get_max_value_id() + 1);
4727 inserted_exprs = BITMAP_ALLOC (NULL);
4728 need_creation = NULL;
4729 pretemp = NULL_TREE;
4730 storetemp = NULL_TREE;
4731 prephitemp = NULL_TREE;
4733 connect_infinite_loops_to_exit ();
4734 memset (&pre_stats, 0, sizeof (pre_stats));
4737 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4738 my_rev_post_order_compute (postorder, false);
4741 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4743 calculate_dominance_info (CDI_POST_DOMINATORS);
4744 calculate_dominance_info (CDI_DOMINATORS);
4746 bitmap_obstack_initialize (&grand_bitmap_obstack);
4747 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4748 expr_pred_trans_eq, free);
4749 expression_to_id = htab_create (num_ssa_names * 3,
4752 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4753 sizeof (struct bitmap_set), 30);
4754 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4755 sizeof (struct pre_expr_d), 30);
4758 EXP_GEN (bb) = bitmap_set_new ();
4759 PHI_GEN (bb) = bitmap_set_new ();
4760 TMP_GEN (bb) = bitmap_set_new ();
4761 AVAIL_OUT (bb) = bitmap_set_new ();
4764 need_eh_cleanup = BITMAP_ALLOC (NULL);
4768 /* Deallocate data structures used by PRE. */
4771 fini_pre (bool do_fre)
4776 VEC_free (bitmap_set_t, heap, value_expressions);
4777 BITMAP_FREE (inserted_exprs);
4778 VEC_free (gimple, heap, need_creation);
4779 bitmap_obstack_release (&grand_bitmap_obstack);
4780 free_alloc_pool (bitmap_set_pool);
4781 free_alloc_pool (pre_expr_pool);
4782 htab_delete (phi_translate_table);
4783 htab_delete (expression_to_id);
4784 VEC_free (unsigned, heap, name_to_id);
4792 free_dominance_info (CDI_POST_DOMINATORS);
4794 if (!bitmap_empty_p (need_eh_cleanup))
4796 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4797 cleanup_tree_cfg ();
4800 BITMAP_FREE (need_eh_cleanup);
4803 loop_optimizer_finalize ();
4806 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4807 only wants to do full redundancy elimination. */
4810 execute_pre (bool do_fre)
4812 unsigned int todo = 0;
4814 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4816 /* This has to happen before SCCVN runs because
4817 loop_optimizer_init may create new phis, etc. */
4819 loop_optimizer_init (LOOPS_NORMAL);
4824 loop_optimizer_finalize ();
4832 /* Collect and value number expressions computed in each basic block. */
4835 if (dump_file && (dump_flags & TDF_DETAILS))
4841 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4842 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4843 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4844 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4848 /* Insert can get quite slow on an incredibly large number of basic
4849 blocks due to some quadratic behavior. Until this behavior is
4850 fixed, don't run it when he have an incredibly large number of
4851 bb's. If we aren't going to run insert, there is no point in
4852 computing ANTIC, either, even though it's plenty fast. */
4853 if (!do_fre && n_basic_blocks < 4000)
4859 /* Make sure to remove fake edges before committing our inserts.
4860 This makes sure we don't end up with extra critical edges that
4861 we would need to split. */
4862 remove_fake_exit_edges ();
4863 gsi_commit_edge_inserts ();
4865 /* Remove all the redundant expressions. */
4866 todo |= eliminate ();
4868 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4869 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4870 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4871 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4872 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4874 clear_expression_ids ();
4877 remove_dead_inserted_code ();
4885 /* Gate and execute functions for PRE. */
4890 return execute_pre (false);
4896 return flag_tree_pre != 0;
4899 struct gimple_opt_pass pass_pre =
4904 gate_pre, /* gate */
4905 do_pre, /* execute */
4908 0, /* static_pass_number */
4909 TV_TREE_PRE, /* tv_id */
4910 PROP_no_crit_edges | PROP_cfg
4911 | PROP_ssa, /* properties_required */
4912 0, /* properties_provided */
4913 0, /* properties_destroyed */
4914 TODO_rebuild_alias, /* todo_flags_start */
4915 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4916 | TODO_verify_ssa /* todo_flags_finish */
4921 /* Gate and execute functions for FRE. */
4926 return execute_pre (true);
4932 return flag_tree_fre != 0;
4935 struct gimple_opt_pass pass_fre =
4940 gate_fre, /* gate */
4941 execute_fre, /* execute */
4944 0, /* static_pass_number */
4945 TV_TREE_FRE, /* tv_id */
4946 PROP_cfg | PROP_ssa, /* properties_required */
4947 0, /* properties_provided */
4948 0, /* properties_destroyed */
4949 0, /* todo_flags_start */
4950 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */