2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
97 An expression is partially redundant (excluding partial
100 1. It is AVAIL in some, but not all, of the predecessors of a
102 2. It is ANTIC in all the predecessors.
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
118 /* Representations of value numbers:
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
135 /* Representation of expressions on value numbers:
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
142 /* Representation of sets:
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
153 /* Type of expression, used to know which member of the PRE_EXPR union
164 typedef union pre_expr_union_d
169 vn_reference_t reference;
172 typedef struct pre_expr_d
174 enum pre_expr_kind kind;
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 pre_expr_eq (const void *p1, const void *p2)
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190 if (e1->kind != e2->kind)
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
211 pre_expr_hash (const void *p1)
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 return PRE_EXPR_NARY (e)->hashcode;
223 return PRE_EXPR_REFERENCE (e)->hashcode;
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
267 return next_expression_id - 1;
270 /* Return the expression id for tree EXPR. */
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
283 if (expr->kind == NAME)
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
288 return VEC_index (unsigned, name_to_id, version);
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
295 return ((pre_expr)*slot)->id;
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
305 unsigned int id = lookup_expression_id (expr);
307 return alloc_expression_id (expr);
308 return expr->id = id;
311 /* Return the expression that has expression id ID */
313 static inline pre_expr
314 expression_for_id (unsigned int id)
316 return VEC_index (pre_expr, expressions, id);
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
323 clear_expression_ids (void)
325 VEC_free (pre_expr, heap, expressions);
328 static alloc_pool pre_expr_pool;
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 get_or_alloc_expr_for_name (tree name)
335 struct pre_expr_d expr;
337 unsigned int result_id;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
344 return expression_for_id (result_id);
346 result = (pre_expr) pool_alloc (pre_expr_pool);
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
353 static bool in_fre = false;
355 /* An unordered bitmap set. One bitmap tracks values, the other,
357 typedef struct bitmap_set
359 bitmap_head expressions;
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
377 /* The EXP_GEN set, which represents expressions/values generated in
379 bitmap_set_t exp_gen;
381 /* The PHI_GEN set, which represents PHI results generated in a
383 bitmap_set_t phi_gen;
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
406 /* A cache for value_dies_in_block_x. */
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
433 /* Basic block list in postorder. */
434 static int *postorder;
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
440 /* The number of RHS computations eliminated by PRE. */
443 /* The number of new expressions/temporaries generated by PRE. */
446 /* The number of inserts found due to partial anticipation */
449 /* The number of new PHI nodes added by PRE. */
452 /* The number of values found constant. */
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 static unsigned int get_expr_value_id (pre_expr);
473 /* We can add and remove elements and entries to and from sets
474 and hash tables, so we use alloc pools for them. */
476 static alloc_pool bitmap_set_pool;
477 static bitmap_obstack grand_bitmap_obstack;
479 /* To avoid adding 300 temporary variables when we only need one, we
480 only create one temporary variable, on demand, and build ssa names
481 off that. We do have to change the variable if the types don't
482 match the current variable's type. */
484 static tree storetemp;
485 static tree prephitemp;
487 /* Set of blocks with statements that have had their EH properties changed. */
488 static bitmap need_eh_cleanup;
490 /* Set of blocks with statements that have had their AB properties changed. */
491 static bitmap need_ab_cleanup;
493 /* The phi_translate_table caches phi translations for a given
494 expression and predecessor. */
496 static htab_t phi_translate_table;
498 /* A three tuple {e, pred, v} used to cache phi translations in the
499 phi_translate_table. */
501 typedef struct expr_pred_trans_d
503 /* The expression. */
506 /* The predecessor block along which we translated the expression. */
509 /* The value that resulted from the translation. */
512 /* The hashcode for the expression, pred pair. This is cached for
515 } *expr_pred_trans_t;
516 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
518 /* Return the hash value for a phi translation table entry. */
521 expr_pred_trans_hash (const void *p)
523 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
527 /* Return true if two phi translation table entries are the same.
528 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
531 expr_pred_trans_eq (const void *p1, const void *p2)
533 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
534 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
535 basic_block b1 = ve1->pred;
536 basic_block b2 = ve2->pred;
538 /* If they are not translations for the same basic block, they can't
542 return pre_expr_eq (ve1->e, ve2->e);
545 /* Search in the phi translation table for the translation of
546 expression E in basic block PRED.
547 Return the translated value, if found, NULL otherwise. */
549 static inline pre_expr
550 phi_trans_lookup (pre_expr e, basic_block pred)
553 struct expr_pred_trans_d ept;
557 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
558 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
563 return ((expr_pred_trans_t) *slot)->v;
567 /* Add the tuple mapping from {expression E, basic block PRED} to
568 value V, to the phi translation table. */
571 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
574 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
576 new_pair->pred = pred;
578 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
581 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
582 new_pair->hashcode, INSERT);
584 *slot = (void *) new_pair;
588 /* Add expression E to the expression set of value id V. */
591 add_to_value (unsigned int v, pre_expr e)
595 gcc_assert (get_expr_value_id (e) == v);
597 if (v >= VEC_length (bitmap_set_t, value_expressions))
599 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
603 set = VEC_index (bitmap_set_t, value_expressions, v);
606 set = bitmap_set_new ();
607 VEC_replace (bitmap_set_t, value_expressions, v, set);
610 bitmap_insert_into_set_1 (set, e, v, true);
613 /* Create a new bitmap set and return it. */
616 bitmap_set_new (void)
618 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
619 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
620 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
624 /* Return the value id for a PRE expression EXPR. */
627 get_expr_value_id (pre_expr expr)
634 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
637 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
638 add_to_value (id, expr);
643 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
645 return PRE_EXPR_NARY (expr)->value_id;
647 return PRE_EXPR_REFERENCE (expr)->value_id;
653 /* Remove an expression EXPR from a bitmapped set. */
656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
658 unsigned int val = get_expr_value_id (expr);
659 if (!value_id_constant_p (val))
661 bitmap_clear_bit (&set->values, val);
662 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
668 unsigned int val, bool allow_constants)
670 if (allow_constants || !value_id_constant_p (val))
672 /* We specifically expect this and only this function to be able to
673 insert constants into a set. */
674 bitmap_set_bit (&set->values, val);
675 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
679 /* Insert an expression EXPR into a bitmapped set. */
682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
684 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
692 bitmap_copy (&dest->expressions, &orig->expressions);
693 bitmap_copy (&dest->values, &orig->values);
697 /* Free memory used up by SET. */
699 bitmap_set_free (bitmap_set_t set)
701 bitmap_clear (&set->expressions);
702 bitmap_clear (&set->values);
706 /* Generate an topological-ordered array of bitmap set SET. */
708 static VEC(pre_expr, heap) *
709 sorted_array_from_bitmap_set (bitmap_set_t set)
712 bitmap_iterator bi, bj;
713 VEC(pre_expr, heap) *result;
715 /* Pre-allocate roughly enough space for the array. */
716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
720 /* The number of expressions having a given value is usually
721 relatively small. Thus, rather than making a vector of all
722 the expressions and sorting it by value-id, we walk the values
723 and check in the reverse mapping that tells us what expressions
724 have a given value, to filter those in our set. As a result,
725 the expressions are inserted in value-id order, which means
728 If this is somehow a significant lose for some cases, we can
729 choose which set to walk based on the set size. */
730 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
731 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
733 if (bitmap_bit_p (&set->expressions, j))
734 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
741 /* Perform bitmapped set operation DEST &= ORIG. */
744 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
752 bitmap_initialize (&temp, &grand_bitmap_obstack);
754 bitmap_and_into (&dest->values, &orig->values);
755 bitmap_copy (&temp, &dest->expressions);
756 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
758 pre_expr expr = expression_for_id (i);
759 unsigned int value_id = get_expr_value_id (expr);
760 if (!bitmap_bit_p (&dest->values, value_id))
761 bitmap_clear_bit (&dest->expressions, i);
763 bitmap_clear (&temp);
767 /* Subtract all values and expressions contained in ORIG from DEST. */
770 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
772 bitmap_set_t result = bitmap_set_new ();
776 bitmap_and_compl (&result->expressions, &dest->expressions,
779 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
781 pre_expr expr = expression_for_id (i);
782 unsigned int value_id = get_expr_value_id (expr);
783 bitmap_set_bit (&result->values, value_id);
789 /* Subtract all the values in bitmap set B from bitmap set A. */
792 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
798 bitmap_initialize (&temp, &grand_bitmap_obstack);
800 bitmap_copy (&temp, &a->expressions);
801 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
803 pre_expr expr = expression_for_id (i);
804 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
805 bitmap_remove_from_set (a, expr);
807 bitmap_clear (&temp);
811 /* Return true if bitmapped set SET contains the value VALUE_ID. */
814 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
816 if (value_id_constant_p (value_id))
819 if (!set || bitmap_empty_p (&set->expressions))
822 return bitmap_bit_p (&set->values, value_id);
826 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
828 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
831 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
834 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
837 bitmap_set_t exprset;
841 if (value_id_constant_p (lookfor))
844 if (!bitmap_set_contains_value (set, lookfor))
847 /* The number of expressions having a given value is usually
848 significantly less than the total number of expressions in SET.
849 Thus, rather than check, for each expression in SET, whether it
850 has the value LOOKFOR, we walk the reverse mapping that tells us
851 what expressions have a given value, and see if any of those
852 expressions are in our set. For large testcases, this is about
853 5-10x faster than walking the bitmap. If this is somehow a
854 significant lose for some cases, we can choose which set to walk
855 based on the set size. */
856 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
857 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
859 if (bitmap_clear_bit (&set->expressions, i))
861 bitmap_set_bit (&set->expressions, get_expression_id (expr));
867 /* Return true if two bitmap sets are equal. */
870 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
872 return bitmap_equal_p (&a->values, &b->values);
875 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
876 and add it otherwise. */
879 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
881 unsigned int val = get_expr_value_id (expr);
883 if (bitmap_set_contains_value (set, val))
884 bitmap_set_replace_value (set, val, expr);
886 bitmap_insert_into_set (set, expr);
889 /* Insert EXPR into SET if EXPR's value is not already present in
893 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
895 unsigned int val = get_expr_value_id (expr);
897 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
899 /* Constant values are always considered to be part of the set. */
900 if (value_id_constant_p (val))
903 /* If the value membership changed, add the expression. */
904 if (bitmap_set_bit (&set->values, val))
905 bitmap_set_bit (&set->expressions, expr->id);
908 /* Print out EXPR to outfile. */
911 print_pre_expr (FILE *outfile, const pre_expr expr)
916 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
919 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
924 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
925 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
926 for (i = 0; i < nary->length; i++)
928 print_generic_expr (outfile, nary->op[i], 0);
929 if (i != (unsigned) nary->length - 1)
930 fprintf (outfile, ",");
932 fprintf (outfile, "}");
938 vn_reference_op_t vro;
940 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
941 fprintf (outfile, "{");
943 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
946 bool closebrace = false;
947 if (vro->opcode != SSA_NAME
948 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
950 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
953 fprintf (outfile, "<");
959 print_generic_expr (outfile, vro->op0, 0);
962 fprintf (outfile, ",");
963 print_generic_expr (outfile, vro->op1, 0);
967 fprintf (outfile, ",");
968 print_generic_expr (outfile, vro->op2, 0);
972 fprintf (outfile, ">");
973 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
974 fprintf (outfile, ",");
976 fprintf (outfile, "}");
979 fprintf (outfile, "@");
980 print_generic_expr (outfile, ref->vuse, 0);
986 void debug_pre_expr (pre_expr);
988 /* Like print_pre_expr but always prints to stderr. */
990 debug_pre_expr (pre_expr e)
992 print_pre_expr (stderr, e);
993 fprintf (stderr, "\n");
996 /* Print out SET to OUTFILE. */
999 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1000 const char *setname, int blockindex)
1002 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1009 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1011 const pre_expr expr = expression_for_id (i);
1014 fprintf (outfile, ", ");
1016 print_pre_expr (outfile, expr);
1018 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1021 fprintf (outfile, " }\n");
1024 void debug_bitmap_set (bitmap_set_t);
1027 debug_bitmap_set (bitmap_set_t set)
1029 print_bitmap_set (stderr, set, "debug", 0);
1032 /* Print out the expressions that have VAL to OUTFILE. */
1035 print_value_expressions (FILE *outfile, unsigned int val)
1037 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1041 sprintf (s, "%04d", val);
1042 print_bitmap_set (outfile, set, s, 0);
1048 debug_value_expressions (unsigned int val)
1050 print_value_expressions (stderr, val);
1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1057 get_or_alloc_expr_for_constant (tree constant)
1059 unsigned int result_id;
1060 unsigned int value_id;
1061 struct pre_expr_d expr;
1064 expr.kind = CONSTANT;
1065 PRE_EXPR_CONSTANT (&expr) = constant;
1066 result_id = lookup_expression_id (&expr);
1068 return expression_for_id (result_id);
1070 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1071 newexpr->kind = CONSTANT;
1072 PRE_EXPR_CONSTANT (newexpr) = constant;
1073 alloc_expression_id (newexpr);
1074 value_id = get_or_alloc_constant_value_id (constant);
1075 add_to_value (value_id, newexpr);
1079 /* Given a value id V, find the actual tree representing the constant
1080 value if there is one, and return it. Return NULL if we can't find
1084 get_constant_for_value_id (unsigned int v)
1086 if (value_id_constant_p (v))
1090 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1092 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1094 pre_expr expr = expression_for_id (i);
1095 if (expr->kind == CONSTANT)
1096 return PRE_EXPR_CONSTANT (expr);
1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103 Currently only supports constants and SSA_NAMES. */
1105 get_or_alloc_expr_for (tree t)
1107 if (TREE_CODE (t) == SSA_NAME)
1108 return get_or_alloc_expr_for_name (t);
1109 else if (is_gimple_min_invariant (t))
1110 return get_or_alloc_expr_for_constant (t);
1113 /* More complex expressions can result from SCCVN expression
1114 simplification that inserts values for them. As they all
1115 do not have VOPs the get handled by the nary ops struct. */
1116 vn_nary_op_t result;
1117 unsigned int result_id;
1118 vn_nary_op_lookup (t, &result);
1121 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1123 PRE_EXPR_NARY (e) = result;
1124 result_id = lookup_expression_id (e);
1127 pool_free (pre_expr_pool, e);
1128 e = expression_for_id (result_id);
1131 alloc_expression_id (e);
1138 /* Return the folded version of T if T, when folded, is a gimple
1139 min_invariant. Otherwise, return T. */
1142 fully_constant_expression (pre_expr e)
1150 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1151 switch (TREE_CODE_CLASS (nary->opcode))
1154 case tcc_comparison:
1156 /* We have to go from trees to pre exprs to value ids to
1158 tree naryop0 = nary->op[0];
1159 tree naryop1 = nary->op[1];
1161 if (!is_gimple_min_invariant (naryop0))
1163 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1164 unsigned int vrep0 = get_expr_value_id (rep0);
1165 tree const0 = get_constant_for_value_id (vrep0);
1167 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1169 if (!is_gimple_min_invariant (naryop1))
1171 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1172 unsigned int vrep1 = get_expr_value_id (rep1);
1173 tree const1 = get_constant_for_value_id (vrep1);
1175 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1177 result = fold_binary (nary->opcode, nary->type,
1179 if (result && is_gimple_min_invariant (result))
1180 return get_or_alloc_expr_for_constant (result);
1181 /* We might have simplified the expression to a
1182 SSA_NAME for example from x_1 * 1. But we cannot
1183 insert a PHI for x_1 unconditionally as x_1 might
1184 not be available readily. */
1188 if (nary->opcode != REALPART_EXPR
1189 && nary->opcode != IMAGPART_EXPR
1190 && nary->opcode != VIEW_CONVERT_EXPR)
1195 /* We have to go from trees to pre exprs to value ids to
1197 tree naryop0 = nary->op[0];
1198 tree const0, result;
1199 if (is_gimple_min_invariant (naryop0))
1203 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1204 unsigned int vrep0 = get_expr_value_id (rep0);
1205 const0 = get_constant_for_value_id (vrep0);
1210 tree type1 = TREE_TYPE (nary->op[0]);
1211 const0 = fold_convert (type1, const0);
1212 result = fold_unary (nary->opcode, nary->type, const0);
1214 if (result && is_gimple_min_invariant (result))
1215 return get_or_alloc_expr_for_constant (result);
1224 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1226 if ((folded = fully_constant_vn_reference_p (ref)))
1227 return get_or_alloc_expr_for_constant (folded);
1236 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1237 it has the value it would have in BLOCK. Set *SAME_VALID to true
1238 in case the new vuse doesn't change the value id of the OPERANDS. */
1241 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1242 alias_set_type set, tree type, tree vuse,
1243 basic_block phiblock,
1244 basic_block block, bool *same_valid)
1246 gimple phi = SSA_NAME_DEF_STMT (vuse);
1253 if (gimple_bb (phi) != phiblock)
1256 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1258 /* Use the alias-oracle to find either the PHI node in this block,
1259 the first VUSE used in this block that is equivalent to vuse or
1260 the first VUSE which definition in this block kills the value. */
1261 if (gimple_code (phi) == GIMPLE_PHI)
1262 e = find_edge (block, phiblock);
1263 else if (use_oracle)
1264 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1266 vuse = gimple_vuse (phi);
1267 phi = SSA_NAME_DEF_STMT (vuse);
1268 if (gimple_bb (phi) != phiblock)
1270 if (gimple_code (phi) == GIMPLE_PHI)
1272 e = find_edge (block, phiblock);
1283 bitmap visited = NULL;
1284 /* Try to find a vuse that dominates this phi node by skipping
1285 non-clobbering statements. */
1286 vuse = get_continuation_for_phi (phi, &ref, &visited);
1288 BITMAP_FREE (visited);
1294 /* If we didn't find any, the value ID can't stay the same,
1295 but return the translated vuse. */
1296 *same_valid = false;
1297 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1299 /* ??? We would like to return vuse here as this is the canonical
1300 upmost vdef that this reference is associated with. But during
1301 insertion of the references into the hash tables we only ever
1302 directly insert with their direct gimple_vuse, hence returning
1303 something else would make us not find the other expression. */
1304 return PHI_ARG_DEF (phi, e->dest_idx);
1310 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1311 SET2. This is used to avoid making a set consisting of the union
1312 of PA_IN and ANTIC_IN during insert. */
1314 static inline pre_expr
1315 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1319 result = bitmap_find_leader (set1, val, NULL);
1320 if (!result && set2)
1321 result = bitmap_find_leader (set2, val, NULL);
1325 /* Get the tree type for our PRE expression e. */
1328 get_expr_type (const pre_expr e)
1333 return TREE_TYPE (PRE_EXPR_NAME (e));
1335 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1337 return PRE_EXPR_REFERENCE (e)->type;
1339 return PRE_EXPR_NARY (e)->type;
1344 /* Get a representative SSA_NAME for a given expression.
1345 Since all of our sub-expressions are treated as values, we require
1346 them to be SSA_NAME's for simplicity.
1347 Prior versions of GVNPRE used to use "value handles" here, so that
1348 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1349 either case, the operands are really values (IE we do not expect
1350 them to be usable without finding leaders). */
1353 get_representative_for (const pre_expr e)
1357 unsigned int value_id = get_expr_value_id (e);
1362 return PRE_EXPR_NAME (e);
1364 return PRE_EXPR_CONSTANT (e);
1368 /* Go through all of the expressions representing this value
1369 and pick out an SSA_NAME. */
1372 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1374 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1376 pre_expr rep = expression_for_id (i);
1377 if (rep->kind == NAME)
1378 return PRE_EXPR_NAME (rep);
1383 /* If we reached here we couldn't find an SSA_NAME. This can
1384 happen when we've discovered a value that has never appeared in
1385 the program as set to an SSA_NAME, most likely as the result of
1390 "Could not find SSA_NAME representative for expression:");
1391 print_pre_expr (dump_file, e);
1392 fprintf (dump_file, "\n");
1395 exprtype = get_expr_type (e);
1397 /* Build and insert the assignment of the end result to the temporary
1398 that we will return. */
1399 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1401 pretemp = create_tmp_reg (exprtype, "pretmp");
1402 add_referenced_var (pretemp);
1405 name = make_ssa_name (pretemp, gimple_build_nop ());
1406 VN_INFO_GET (name)->value_id = value_id;
1407 if (e->kind == CONSTANT)
1408 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1410 VN_INFO (name)->valnum = name;
1412 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1415 fprintf (dump_file, "Created SSA_NAME representative ");
1416 print_generic_expr (dump_file, name, 0);
1417 fprintf (dump_file, " for expression:");
1418 print_pre_expr (dump_file, e);
1419 fprintf (dump_file, "\n");
1428 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1429 basic_block pred, basic_block phiblock);
1431 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1432 the phis in PRED. Return NULL if we can't find a leader for each part
1433 of the translated expression. */
1436 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1437 basic_block pred, basic_block phiblock)
1444 bool changed = false;
1445 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1446 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1447 sizeof_vn_nary_op (nary->length));
1448 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1450 for (i = 0; i < newnary->length; i++)
1452 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1456 pre_expr leader, result;
1457 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1458 leader = find_leader_in_sets (op_val_id, set1, set2);
1459 result = phi_translate (leader, set1, set2, pred, phiblock);
1460 if (result && result != leader)
1462 tree name = get_representative_for (result);
1465 newnary->op[i] = name;
1470 changed |= newnary->op[i] != nary->op[i];
1476 unsigned int new_val_id;
1478 tree result = vn_nary_op_lookup_pieces (newnary->length,
1483 if (result && is_gimple_min_invariant (result))
1484 return get_or_alloc_expr_for_constant (result);
1486 expr = (pre_expr) pool_alloc (pre_expr_pool);
1491 PRE_EXPR_NARY (expr) = nary;
1492 constant = fully_constant_expression (expr);
1493 if (constant != expr)
1496 new_val_id = nary->value_id;
1497 get_or_alloc_expression_id (expr);
1501 new_val_id = get_next_value_id ();
1502 VEC_safe_grow_cleared (bitmap_set_t, heap,
1504 get_max_value_id() + 1);
1505 nary = vn_nary_op_insert_pieces (newnary->length,
1509 result, new_val_id);
1510 PRE_EXPR_NARY (expr) = nary;
1511 constant = fully_constant_expression (expr);
1512 if (constant != expr)
1514 get_or_alloc_expression_id (expr);
1516 add_to_value (new_val_id, expr);
1524 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1525 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1526 tree vuse = ref->vuse;
1527 tree newvuse = vuse;
1528 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1529 bool changed = false, same_valid = true;
1530 unsigned int i, j, n;
1531 vn_reference_op_t operand;
1532 vn_reference_t newref;
1535 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1540 tree type = operand->type;
1541 vn_reference_op_s newop = *operand;
1542 op[0] = operand->op0;
1543 op[1] = operand->op1;
1544 op[2] = operand->op2;
1545 for (n = 0; n < 3; ++n)
1547 unsigned int op_val_id;
1550 if (TREE_CODE (op[n]) != SSA_NAME)
1552 /* We can't possibly insert these. */
1554 && !is_gimple_min_invariant (op[n]))
1558 op_val_id = VN_INFO (op[n])->value_id;
1559 leader = find_leader_in_sets (op_val_id, set1, set2);
1562 /* Make sure we do not recursively translate ourselves
1563 like for translating a[n_1] with the leader for
1564 n_1 being a[n_1]. */
1565 if (get_expression_id (leader) != get_expression_id (expr))
1567 opresult = phi_translate (leader, set1, set2,
1571 if (opresult != leader)
1573 tree name = get_representative_for (opresult);
1576 changed |= name != op[n];
1584 VEC_free (vn_reference_op_s, heap, newoperands);
1588 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1589 /* We may have changed from an SSA_NAME to a constant */
1590 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1591 newop.opcode = TREE_CODE (op[0]);
1596 /* If it transforms a non-constant ARRAY_REF into a constant
1597 one, adjust the constant offset. */
1598 if (newop.opcode == ARRAY_REF
1600 && TREE_CODE (op[0]) == INTEGER_CST
1601 && TREE_CODE (op[1]) == INTEGER_CST
1602 && TREE_CODE (op[2]) == INTEGER_CST)
1604 double_int off = tree_to_double_int (op[0]);
1605 off = double_int_add (off,
1607 (tree_to_double_int (op[1])));
1608 off = double_int_mul (off, tree_to_double_int (op[2]));
1609 if (double_int_fits_in_shwi_p (off))
1610 newop.off = off.low;
1612 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1613 /* If it transforms from an SSA_NAME to an address, fold with
1614 a preceding indirect reference. */
1615 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1616 && VEC_index (vn_reference_op_s,
1617 newoperands, j - 1)->opcode == MEM_REF)
1618 vn_reference_fold_indirect (&newoperands, &j);
1620 if (i != VEC_length (vn_reference_op_s, operands))
1623 VEC_free (vn_reference_op_s, heap, newoperands);
1629 newvuse = translate_vuse_through_block (newoperands,
1630 ref->set, ref->type,
1631 vuse, phiblock, pred,
1633 if (newvuse == NULL_TREE)
1635 VEC_free (vn_reference_op_s, heap, newoperands);
1640 if (changed || newvuse != vuse)
1642 unsigned int new_val_id;
1644 bool converted = false;
1646 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1651 VEC_free (vn_reference_op_s, heap, newoperands);
1654 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1656 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
1659 else if (!result && newref
1660 && !useless_type_conversion_p (ref->type, newref->type))
1662 VEC_free (vn_reference_op_s, heap, newoperands);
1666 if (result && is_gimple_min_invariant (result))
1668 gcc_assert (!newoperands);
1669 return get_or_alloc_expr_for_constant (result);
1672 expr = (pre_expr) pool_alloc (pre_expr_pool);
1673 expr->kind = REFERENCE;
1681 gcc_assert (CONVERT_EXPR_P (result)
1682 || TREE_CODE (result) == VIEW_CONVERT_EXPR);
1684 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
1686 &TREE_OPERAND (result, 0),
1688 if (nresult && is_gimple_min_invariant (nresult))
1689 return get_or_alloc_expr_for_constant (nresult);
1694 PRE_EXPR_NARY (expr) = nary;
1695 constant = fully_constant_expression (expr);
1696 if (constant != expr)
1699 new_val_id = nary->value_id;
1700 get_or_alloc_expression_id (expr);
1704 new_val_id = get_next_value_id ();
1705 VEC_safe_grow_cleared (bitmap_set_t, heap,
1707 get_max_value_id() + 1);
1708 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
1710 &TREE_OPERAND (result, 0),
1713 PRE_EXPR_NARY (expr) = nary;
1714 constant = fully_constant_expression (expr);
1715 if (constant != expr)
1717 get_or_alloc_expression_id (expr);
1722 PRE_EXPR_REFERENCE (expr) = newref;
1723 constant = fully_constant_expression (expr);
1724 if (constant != expr)
1727 new_val_id = newref->value_id;
1728 get_or_alloc_expression_id (expr);
1732 if (changed || !same_valid)
1734 new_val_id = get_next_value_id ();
1735 VEC_safe_grow_cleared (bitmap_set_t, heap,
1737 get_max_value_id() + 1);
1740 new_val_id = ref->value_id;
1741 newref = vn_reference_insert_pieces (newvuse, ref->set,
1744 result, new_val_id);
1746 PRE_EXPR_REFERENCE (expr) = newref;
1747 constant = fully_constant_expression (expr);
1748 if (constant != expr)
1750 get_or_alloc_expression_id (expr);
1752 add_to_value (new_val_id, expr);
1754 VEC_free (vn_reference_op_s, heap, newoperands);
1764 tree name = PRE_EXPR_NAME (expr);
1766 def_stmt = SSA_NAME_DEF_STMT (name);
1767 if (gimple_code (def_stmt) == GIMPLE_PHI
1768 && gimple_bb (def_stmt) == phiblock)
1773 e = find_edge (pred, gimple_bb (phi));
1776 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1779 if (TREE_CODE (def) == SSA_NAME)
1780 def = VN_INFO (def)->valnum;
1782 /* Handle constant. */
1783 if (is_gimple_min_invariant (def))
1784 return get_or_alloc_expr_for_constant (def);
1786 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1789 newexpr = get_or_alloc_expr_for_name (def);
1800 /* Wrapper around phi_translate_1 providing caching functionality. */
1803 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1804 basic_block pred, basic_block phiblock)
1811 /* Constants contain no values that need translation. */
1812 if (expr->kind == CONSTANT)
1815 if (value_id_constant_p (get_expr_value_id (expr)))
1818 if (expr->kind != NAME)
1820 phitrans = phi_trans_lookup (expr, pred);
1826 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1828 /* Don't add empty translations to the cache. Neither add
1829 translations of NAMEs as those are cheap to translate. */
1831 && expr->kind != NAME)
1832 phi_trans_add (expr, phitrans, pred);
1838 /* For each expression in SET, translate the values through phi nodes
1839 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1840 expressions in DEST. */
1843 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1844 basic_block phiblock)
1846 VEC (pre_expr, heap) *exprs;
1850 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1852 bitmap_set_copy (dest, set);
1856 exprs = sorted_array_from_bitmap_set (set);
1857 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1859 pre_expr translated;
1860 translated = phi_translate (expr, set, NULL, pred, phiblock);
1864 /* We might end up with multiple expressions from SET being
1865 translated to the same value. In this case we do not want
1866 to retain the NARY or REFERENCE expression but prefer a NAME
1867 which would be the leader. */
1868 if (translated->kind == NAME)
1869 bitmap_value_replace_in_set (dest, translated);
1871 bitmap_value_insert_into_set (dest, translated);
1873 VEC_free (pre_expr, heap, exprs);
1876 /* Find the leader for a value (i.e., the name representing that
1877 value) in a given set, and return it. If STMT is non-NULL it
1878 makes sure the defining statement for the leader dominates it.
1879 Return NULL if no leader is found. */
1882 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1884 if (value_id_constant_p (val))
1888 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1890 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1892 pre_expr expr = expression_for_id (i);
1893 if (expr->kind == CONSTANT)
1897 if (bitmap_set_contains_value (set, val))
1899 /* Rather than walk the entire bitmap of expressions, and see
1900 whether any of them has the value we are looking for, we look
1901 at the reverse mapping, which tells us the set of expressions
1902 that have a given value (IE value->expressions with that
1903 value) and see if any of those expressions are in our set.
1904 The number of expressions per value is usually significantly
1905 less than the number of expressions in the set. In fact, for
1906 large testcases, doing it this way is roughly 5-10x faster
1907 than walking the bitmap.
1908 If this is somehow a significant lose for some cases, we can
1909 choose which set to walk based on which set is smaller. */
1912 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1914 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1915 &set->expressions, 0, i, bi)
1917 pre_expr val = expression_for_id (i);
1918 /* At the point where stmt is not null, there should always
1919 be an SSA_NAME first in the list of expressions. */
1922 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1923 if (gimple_code (def_stmt) != GIMPLE_PHI
1924 && gimple_bb (def_stmt) == gimple_bb (stmt)
1925 /* PRE insertions are at the end of the basic-block
1927 && (gimple_uid (def_stmt) == 0
1928 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1937 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1938 BLOCK by seeing if it is not killed in the block. Note that we are
1939 only determining whether there is a store that kills it. Because
1940 of the order in which clean iterates over values, we are guaranteed
1941 that altered operands will have caused us to be eliminated from the
1942 ANTIC_IN set already. */
1945 value_dies_in_block_x (pre_expr expr, basic_block block)
1947 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1948 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1950 gimple_stmt_iterator gsi;
1951 unsigned id = get_expression_id (expr);
1958 /* Lookup a previously calculated result. */
1959 if (EXPR_DIES (block)
1960 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1961 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1963 /* A memory expression {e, VUSE} dies in the block if there is a
1964 statement that may clobber e. If, starting statement walk from the
1965 top of the basic block, a statement uses VUSE there can be no kill
1966 inbetween that use and the original statement that loaded {e, VUSE},
1967 so we can stop walking. */
1968 ref.base = NULL_TREE;
1969 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1971 tree def_vuse, def_vdef;
1972 def = gsi_stmt (gsi);
1973 def_vuse = gimple_vuse (def);
1974 def_vdef = gimple_vdef (def);
1976 /* Not a memory statement. */
1980 /* Not a may-def. */
1983 /* A load with the same VUSE, we're done. */
1984 if (def_vuse == vuse)
1990 /* Init ref only if we really need it. */
1991 if (ref.base == NULL_TREE
1992 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1998 /* If the statement may clobber expr, it dies. */
1999 if (stmt_may_clobber_ref_p_1 (def, &ref))
2006 /* Remember the result. */
2007 if (!EXPR_DIES (block))
2008 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2009 bitmap_set_bit (EXPR_DIES (block), id * 2);
2011 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2017 #define union_contains_value(SET1, SET2, VAL) \
2018 (bitmap_set_contains_value ((SET1), (VAL)) \
2019 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2021 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2024 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2025 vn_reference_op_t vro)
2027 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2029 struct pre_expr_d temp;
2032 PRE_EXPR_NAME (&temp) = vro->op0;
2033 temp.id = lookup_expression_id (&temp);
2036 if (!union_contains_value (set1, set2,
2037 get_expr_value_id (&temp)))
2040 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2042 struct pre_expr_d temp;
2045 PRE_EXPR_NAME (&temp) = vro->op1;
2046 temp.id = lookup_expression_id (&temp);
2049 if (!union_contains_value (set1, set2,
2050 get_expr_value_id (&temp)))
2054 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2056 struct pre_expr_d temp;
2059 PRE_EXPR_NAME (&temp) = vro->op2;
2060 temp.id = lookup_expression_id (&temp);
2063 if (!union_contains_value (set1, set2,
2064 get_expr_value_id (&temp)))
2071 /* Determine if the expression EXPR is valid in SET1 U SET2.
2072 ONLY SET2 CAN BE NULL.
2073 This means that we have a leader for each part of the expression
2074 (if it consists of values), or the expression is an SSA_NAME.
2075 For loads/calls, we also see if the vuse is killed in this block. */
2078 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2084 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2088 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2089 for (i = 0; i < nary->length; i++)
2091 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2093 struct pre_expr_d temp;
2096 PRE_EXPR_NAME (&temp) = nary->op[i];
2097 temp.id = lookup_expression_id (&temp);
2100 if (!union_contains_value (set1, set2,
2101 get_expr_value_id (&temp)))
2105 /* If the NARY may trap make sure the block does not contain
2106 a possible exit point.
2107 ??? This is overly conservative if we translate AVAIL_OUT
2108 as the available expression might be after the exit point. */
2109 if (BB_MAY_NOTRETURN (block)
2110 && vn_nary_may_trap (nary))
2117 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2118 vn_reference_op_t vro;
2121 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2123 if (!vro_valid_in_sets (set1, set2, vro))
2128 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2129 if (!gimple_nop_p (def_stmt)
2130 && gimple_bb (def_stmt) != block
2131 && !dominated_by_p (CDI_DOMINATORS,
2132 block, gimple_bb (def_stmt)))
2135 return !value_dies_in_block_x (expr, block);
2142 /* Clean the set of expressions that are no longer valid in SET1 or
2143 SET2. This means expressions that are made up of values we have no
2144 leaders for in SET1 or SET2. This version is used for partial
2145 anticipation, which means it is not valid in either ANTIC_IN or
2149 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2151 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2155 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2157 if (!valid_in_sets (set1, set2, expr, block))
2158 bitmap_remove_from_set (set1, expr);
2160 VEC_free (pre_expr, heap, exprs);
2163 /* Clean the set of expressions that are no longer valid in SET. This
2164 means expressions that are made up of values we have no leaders for
2168 clean (bitmap_set_t set, basic_block block)
2170 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2174 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2176 if (!valid_in_sets (set, NULL, expr, block))
2177 bitmap_remove_from_set (set, expr);
2179 VEC_free (pre_expr, heap, exprs);
2182 static sbitmap has_abnormal_preds;
2184 /* List of blocks that may have changed during ANTIC computation and
2185 thus need to be iterated over. */
2187 static sbitmap changed_blocks;
2189 /* Decide whether to defer a block for a later iteration, or PHI
2190 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2191 should defer the block, and true if we processed it. */
2194 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2195 basic_block block, basic_block phiblock)
2197 if (!BB_VISITED (phiblock))
2199 SET_BIT (changed_blocks, block->index);
2200 BB_VISITED (block) = 0;
2201 BB_DEFERRED (block) = 1;
2205 phi_translate_set (dest, source, block, phiblock);
2209 /* Compute the ANTIC set for BLOCK.
2211 If succs(BLOCK) > 1 then
2212 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2213 else if succs(BLOCK) == 1 then
2214 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2216 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2220 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2222 bool changed = false;
2223 bitmap_set_t S, old, ANTIC_OUT;
2229 old = ANTIC_OUT = S = NULL;
2230 BB_VISITED (block) = 1;
2232 /* If any edges from predecessors are abnormal, antic_in is empty,
2234 if (block_has_abnormal_pred_edge)
2235 goto maybe_dump_sets;
2237 old = ANTIC_IN (block);
2238 ANTIC_OUT = bitmap_set_new ();
2240 /* If the block has no successors, ANTIC_OUT is empty. */
2241 if (EDGE_COUNT (block->succs) == 0)
2243 /* If we have one successor, we could have some phi nodes to
2244 translate through. */
2245 else if (single_succ_p (block))
2247 basic_block succ_bb = single_succ (block);
2249 /* We trade iterations of the dataflow equations for having to
2250 phi translate the maximal set, which is incredibly slow
2251 (since the maximal set often has 300+ members, even when you
2252 have a small number of blocks).
2253 Basically, we defer the computation of ANTIC for this block
2254 until we have processed it's successor, which will inevitably
2255 have a *much* smaller set of values to phi translate once
2256 clean has been run on it.
2257 The cost of doing this is that we technically perform more
2258 iterations, however, they are lower cost iterations.
2260 Timings for PRE on tramp3d-v4:
2261 without maximal set fix: 11 seconds
2262 with maximal set fix/without deferring: 26 seconds
2263 with maximal set fix/with deferring: 11 seconds
2266 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2270 goto maybe_dump_sets;
2273 /* If we have multiple successors, we take the intersection of all of
2274 them. Note that in the case of loop exit phi nodes, we may have
2275 phis to translate through. */
2278 VEC(basic_block, heap) * worklist;
2280 basic_block bprime, first = NULL;
2282 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2283 FOR_EACH_EDGE (e, ei, block->succs)
2286 && BB_VISITED (e->dest))
2288 else if (BB_VISITED (e->dest))
2289 VEC_quick_push (basic_block, worklist, e->dest);
2292 /* Of multiple successors we have to have visited one already. */
2295 SET_BIT (changed_blocks, block->index);
2296 BB_VISITED (block) = 0;
2297 BB_DEFERRED (block) = 1;
2299 VEC_free (basic_block, heap, worklist);
2300 goto maybe_dump_sets;
2303 if (!gimple_seq_empty_p (phi_nodes (first)))
2304 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2306 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2308 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2310 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2312 bitmap_set_t tmp = bitmap_set_new ();
2313 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2314 bitmap_set_and (ANTIC_OUT, tmp);
2315 bitmap_set_free (tmp);
2318 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2320 VEC_free (basic_block, heap, worklist);
2323 /* Generate ANTIC_OUT - TMP_GEN. */
2324 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2326 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2327 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2330 /* Then union in the ANTIC_OUT - TMP_GEN values,
2331 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2332 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2333 bitmap_value_insert_into_set (ANTIC_IN (block),
2334 expression_for_id (bii));
2336 clean (ANTIC_IN (block), block);
2338 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2341 SET_BIT (changed_blocks, block->index);
2342 FOR_EACH_EDGE (e, ei, block->preds)
2343 SET_BIT (changed_blocks, e->src->index);
2346 RESET_BIT (changed_blocks, block->index);
2349 if (dump_file && (dump_flags & TDF_DETAILS))
2351 if (!BB_DEFERRED (block) || BB_VISITED (block))
2354 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2356 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2360 print_bitmap_set (dump_file, S, "S", block->index);
2365 "Block %d was deferred for a future iteration.\n",
2370 bitmap_set_free (old);
2372 bitmap_set_free (S);
2374 bitmap_set_free (ANTIC_OUT);
2378 /* Compute PARTIAL_ANTIC for BLOCK.
2380 If succs(BLOCK) > 1 then
2381 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2382 in ANTIC_OUT for all succ(BLOCK)
2383 else if succs(BLOCK) == 1 then
2384 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2386 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2391 compute_partial_antic_aux (basic_block block,
2392 bool block_has_abnormal_pred_edge)
2394 bool changed = false;
2395 bitmap_set_t old_PA_IN;
2396 bitmap_set_t PA_OUT;
2399 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2401 old_PA_IN = PA_OUT = NULL;
2403 /* If any edges from predecessors are abnormal, antic_in is empty,
2405 if (block_has_abnormal_pred_edge)
2406 goto maybe_dump_sets;
2408 /* If there are too many partially anticipatable values in the
2409 block, phi_translate_set can take an exponential time: stop
2410 before the translation starts. */
2412 && single_succ_p (block)
2413 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2414 goto maybe_dump_sets;
2416 old_PA_IN = PA_IN (block);
2417 PA_OUT = bitmap_set_new ();
2419 /* If the block has no successors, ANTIC_OUT is empty. */
2420 if (EDGE_COUNT (block->succs) == 0)
2422 /* If we have one successor, we could have some phi nodes to
2423 translate through. Note that we can't phi translate across DFS
2424 back edges in partial antic, because it uses a union operation on
2425 the successors. For recurrences like IV's, we will end up
2426 generating a new value in the set on each go around (i + 3 (VH.1)
2427 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2428 else if (single_succ_p (block))
2430 basic_block succ = single_succ (block);
2431 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2432 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2434 /* If we have multiple successors, we take the union of all of
2438 VEC(basic_block, heap) * worklist;
2442 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2443 FOR_EACH_EDGE (e, ei, block->succs)
2445 if (e->flags & EDGE_DFS_BACK)
2447 VEC_quick_push (basic_block, worklist, e->dest);
2449 if (VEC_length (basic_block, worklist) > 0)
2451 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2456 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2457 bitmap_value_insert_into_set (PA_OUT,
2458 expression_for_id (i));
2459 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2461 bitmap_set_t pa_in = bitmap_set_new ();
2462 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2463 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2464 bitmap_value_insert_into_set (PA_OUT,
2465 expression_for_id (i));
2466 bitmap_set_free (pa_in);
2469 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2470 bitmap_value_insert_into_set (PA_OUT,
2471 expression_for_id (i));
2474 VEC_free (basic_block, heap, worklist);
2477 /* PA_IN starts with PA_OUT - TMP_GEN.
2478 Then we subtract things from ANTIC_IN. */
2479 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2481 /* For partial antic, we want to put back in the phi results, since
2482 we will properly avoid making them partially antic over backedges. */
2483 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2484 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2486 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2487 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2489 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2491 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2494 SET_BIT (changed_blocks, block->index);
2495 FOR_EACH_EDGE (e, ei, block->preds)
2496 SET_BIT (changed_blocks, e->src->index);
2499 RESET_BIT (changed_blocks, block->index);
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2505 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2507 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2510 bitmap_set_free (old_PA_IN);
2512 bitmap_set_free (PA_OUT);
2516 /* Compute ANTIC and partial ANTIC sets. */
2519 compute_antic (void)
2521 bool changed = true;
2522 int num_iterations = 0;
2526 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2527 We pre-build the map of blocks with incoming abnormal edges here. */
2528 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2529 sbitmap_zero (has_abnormal_preds);
2536 FOR_EACH_EDGE (e, ei, block->preds)
2538 e->flags &= ~EDGE_DFS_BACK;
2539 if (e->flags & EDGE_ABNORMAL)
2541 SET_BIT (has_abnormal_preds, block->index);
2546 BB_VISITED (block) = 0;
2547 BB_DEFERRED (block) = 0;
2549 /* While we are here, give empty ANTIC_IN sets to each block. */
2550 ANTIC_IN (block) = bitmap_set_new ();
2551 PA_IN (block) = bitmap_set_new ();
2554 /* At the exit block we anticipate nothing. */
2555 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2556 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2557 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2559 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2560 sbitmap_ones (changed_blocks);
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2564 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2565 /* ??? We need to clear our PHI translation cache here as the
2566 ANTIC sets shrink and we restrict valid translations to
2567 those having operands with leaders in ANTIC. Same below
2568 for PA ANTIC computation. */
2571 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2573 if (TEST_BIT (changed_blocks, postorder[i]))
2575 basic_block block = BASIC_BLOCK (postorder[i]);
2576 changed |= compute_antic_aux (block,
2577 TEST_BIT (has_abnormal_preds,
2581 /* Theoretically possible, but *highly* unlikely. */
2582 gcc_checking_assert (num_iterations < 500);
2585 statistics_histogram_event (cfun, "compute_antic iterations",
2588 if (do_partial_partial)
2590 sbitmap_ones (changed_blocks);
2591 mark_dfs_back_edges ();
2596 if (dump_file && (dump_flags & TDF_DETAILS))
2597 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2600 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2602 if (TEST_BIT (changed_blocks, postorder[i]))
2604 basic_block block = BASIC_BLOCK (postorder[i]);
2606 |= compute_partial_antic_aux (block,
2607 TEST_BIT (has_abnormal_preds,
2611 /* Theoretically possible, but *highly* unlikely. */
2612 gcc_checking_assert (num_iterations < 500);
2614 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2617 sbitmap_free (has_abnormal_preds);
2618 sbitmap_free (changed_blocks);
2621 /* Return true if we can value number the call in STMT. This is true
2622 if we have a pure or constant call to a real function. */
2625 can_value_number_call (gimple stmt)
2627 if (gimple_call_internal_p (stmt))
2629 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2634 /* Return true if OP is a tree which we can perform PRE on.
2635 This may not match the operations we can value number, but in
2636 a perfect world would. */
2639 can_PRE_operation (tree op)
2641 return UNARY_CLASS_P (op)
2642 || BINARY_CLASS_P (op)
2643 || COMPARISON_CLASS_P (op)
2644 || TREE_CODE (op) == MEM_REF
2645 || TREE_CODE (op) == COMPONENT_REF
2646 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2647 || TREE_CODE (op) == CALL_EXPR
2648 || TREE_CODE (op) == ARRAY_REF;
2652 /* Inserted expressions are placed onto this worklist, which is used
2653 for performing quick dead code elimination of insertions we made
2654 that didn't turn out to be necessary. */
2655 static bitmap inserted_exprs;
2657 /* Pool allocated fake store expressions are placed onto this
2658 worklist, which, after performing dead code elimination, is walked
2659 to see which expressions need to be put into GC'able memory */
2660 static VEC(gimple, heap) *need_creation;
2662 /* The actual worker for create_component_ref_by_pieces. */
2665 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2666 unsigned int *operand, gimple_seq *stmts,
2669 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2673 switch (currop->opcode)
2677 tree folded, sc = NULL_TREE;
2678 unsigned int nargs = 0;
2680 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2684 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2685 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2691 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2692 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2696 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2697 ref->operands) - 1);
2698 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2700 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2710 folded = build_call_array (currop->type,
2711 (TREE_CODE (fn) == FUNCTION_DECL
2712 ? build_fold_addr_expr (fn) : fn),
2716 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2722 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2724 tree offset = currop->op0;
2727 if (TREE_CODE (baseop) == ADDR_EXPR
2728 && handled_component_p (TREE_OPERAND (baseop, 0)))
2732 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2735 offset = int_const_binop (PLUS_EXPR, offset,
2736 build_int_cst (TREE_TYPE (offset),
2738 baseop = build_fold_addr_expr (base);
2740 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2743 case TARGET_MEM_REF:
2745 pre_expr op0expr, op1expr;
2746 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2747 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2749 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2755 op0expr = get_or_alloc_expr_for (currop->op0);
2756 genop0 = find_or_generate_expression (block, op0expr,
2763 op1expr = get_or_alloc_expr_for (nextop->op0);
2764 genop1 = find_or_generate_expression (block, op1expr,
2769 return build5 (TARGET_MEM_REF, currop->type,
2770 baseop, currop->op2, genop0, currop->op1, genop1);
2776 gcc_assert (is_gimple_min_invariant (currop->op0));
2782 case VIEW_CONVERT_EXPR:
2785 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2790 folded = fold_build1 (currop->opcode, currop->type,
2798 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2800 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2801 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2807 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2810 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2813 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2818 /* For array ref vn_reference_op's, operand 1 of the array ref
2819 is op0 of the reference op and operand 3 of the array ref is
2821 case ARRAY_RANGE_REF:
2825 tree genop1 = currop->op0;
2827 tree genop2 = currop->op1;
2829 tree genop3 = currop->op2;
2831 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2835 op1expr = get_or_alloc_expr_for (genop1);
2836 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2841 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2842 /* Drop zero minimum index if redundant. */
2843 if (integer_zerop (genop2)
2845 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2849 op2expr = get_or_alloc_expr_for (genop2);
2850 genop2 = find_or_generate_expression (block, op2expr, stmts,
2858 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2859 /* We can't always put a size in units of the element alignment
2860 here as the element alignment may be not visible. See
2861 PR43783. Simply drop the element size for constant
2863 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2867 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2868 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2869 op3expr = get_or_alloc_expr_for (genop3);
2870 genop3 = find_or_generate_expression (block, op3expr, stmts,
2876 return build4 (currop->opcode, currop->type, genop0, genop1,
2883 tree genop2 = currop->op1;
2885 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2889 /* op1 should be a FIELD_DECL, which are represented by
2894 op2expr = get_or_alloc_expr_for (genop2);
2895 genop2 = find_or_generate_expression (block, op2expr, stmts,
2901 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2907 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2908 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2929 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2930 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2931 trying to rename aggregates into ssa form directly, which is a no no.
2933 Thus, this routine doesn't create temporaries, it just builds a
2934 single access expression for the array, calling
2935 find_or_generate_expression to build the innermost pieces.
2937 This function is a subroutine of create_expression_by_pieces, and
2938 should not be called on it's own unless you really know what you
2942 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2943 gimple_seq *stmts, gimple domstmt)
2945 unsigned int op = 0;
2946 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2949 /* Find a leader for an expression, or generate one using
2950 create_expression_by_pieces if it's ANTIC but
2952 BLOCK is the basic_block we are looking for leaders in.
2953 EXPR is the expression to find a leader or generate for.
2954 STMTS is the statement list to put the inserted expressions on.
2955 Returns the SSA_NAME of the LHS of the generated expression or the
2957 DOMSTMT if non-NULL is a statement that should be dominated by
2958 all uses in the generated expression. If DOMSTMT is non-NULL this
2959 routine can fail and return NULL_TREE. Otherwise it will assert
2963 find_or_generate_expression (basic_block block, pre_expr expr,
2964 gimple_seq *stmts, gimple domstmt)
2966 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2967 get_expr_value_id (expr), domstmt);
2971 if (leader->kind == NAME)
2972 genop = PRE_EXPR_NAME (leader);
2973 else if (leader->kind == CONSTANT)
2974 genop = PRE_EXPR_CONSTANT (leader);
2977 /* If it's still NULL, it must be a complex expression, so generate
2978 it recursively. Not so if inserting expressions for values generated
2983 bitmap_set_t exprset;
2984 unsigned int lookfor = get_expr_value_id (expr);
2985 bool handled = false;
2989 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2990 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2992 pre_expr temp = expression_for_id (i);
2993 if (temp->kind != NAME)
2996 genop = create_expression_by_pieces (block, temp, stmts,
2998 get_expr_type (expr));
3002 if (!handled && domstmt)
3005 gcc_assert (handled);
3010 #define NECESSARY GF_PLF_1
3012 /* Create an expression in pieces, so that we can handle very complex
3013 expressions that may be ANTIC, but not necessary GIMPLE.
3014 BLOCK is the basic block the expression will be inserted into,
3015 EXPR is the expression to insert (in value form)
3016 STMTS is a statement list to append the necessary insertions into.
3018 This function will die if we hit some value that shouldn't be
3019 ANTIC but is (IE there is no leader for it, or its components).
3020 This function may also generate expressions that are themselves
3021 partially or fully redundant. Those that are will be either made
3022 fully redundant during the next iteration of insert (for partially
3023 redundant ones), or eliminated by eliminate (for fully redundant
3026 If DOMSTMT is non-NULL then we make sure that all uses in the
3027 expressions dominate that statement. In this case the function
3028 can return NULL_TREE to signal failure. */
3031 create_expression_by_pieces (basic_block block, pre_expr expr,
3032 gimple_seq *stmts, gimple domstmt, tree type)
3036 gimple_seq forced_stmts = NULL;
3037 unsigned int value_id;
3038 gimple_stmt_iterator gsi;
3039 tree exprtype = type ? type : get_expr_type (expr);
3045 /* We may hit the NAME/CONSTANT case if we have to convert types
3046 that value numbering saw through. */
3048 folded = PRE_EXPR_NAME (expr);
3051 folded = PRE_EXPR_CONSTANT (expr);
3055 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3056 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3061 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3064 for (i = 0; i < nary->length; ++i)
3066 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
3067 genop[i] = find_or_generate_expression (block, op,
3071 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3072 may have conversions stripped. */
3073 if (nary->opcode == POINTER_PLUS_EXPR)
3076 genop[i] = fold_convert (nary->type, genop[i]);
3078 genop[i] = convert_to_ptrofftype (genop[i]);
3081 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3083 if (nary->opcode == CONSTRUCTOR)
3085 VEC(constructor_elt,gc) *elts = NULL;
3086 for (i = 0; i < nary->length; ++i)
3087 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3088 folded = build_constructor (nary->type, elts);
3092 switch (nary->length)
3095 folded = fold_build1 (nary->opcode, nary->type,
3099 folded = fold_build2 (nary->opcode, nary->type,
3100 genop[0], genop[1]);
3103 folded = fold_build3 (nary->opcode, nary->type,
3104 genop[0], genop[1], genop[3]);
3116 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3117 folded = fold_convert (exprtype, folded);
3119 /* Force the generated expression to be a sequence of GIMPLE
3121 We have to call unshare_expr because force_gimple_operand may
3122 modify the tree we pass to it. */
3123 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3126 /* If we have any intermediate expressions to the value sets, add them
3127 to the value sets and chain them in the instruction stream. */
3130 gsi = gsi_start (forced_stmts);
3131 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3133 gimple stmt = gsi_stmt (gsi);
3134 tree forcedname = gimple_get_lhs (stmt);
3137 if (TREE_CODE (forcedname) == SSA_NAME)
3139 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3140 VN_INFO_GET (forcedname)->valnum = forcedname;
3141 VN_INFO (forcedname)->value_id = get_next_value_id ();
3142 nameexpr = get_or_alloc_expr_for_name (forcedname);
3143 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3145 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3146 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3148 mark_symbols_for_renaming (stmt);
3150 gimple_seq_add_seq (stmts, forced_stmts);
3153 /* Build and insert the assignment of the end result to the temporary
3154 that we will return. */
3155 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3156 pretemp = create_tmp_reg (exprtype, "pretmp");
3159 add_referenced_var (temp);
3161 newstmt = gimple_build_assign (temp, folded);
3162 name = make_ssa_name (temp, newstmt);
3163 gimple_assign_set_lhs (newstmt, name);
3164 gimple_set_plf (newstmt, NECESSARY, false);
3166 gimple_seq_add_stmt (stmts, newstmt);
3167 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3169 /* All the symbols in NEWEXPR should be put into SSA form. */
3170 mark_symbols_for_renaming (newstmt);
3172 /* Fold the last statement. */
3173 gsi = gsi_last (*stmts);
3174 if (fold_stmt_inplace (&gsi))
3175 update_stmt (gsi_stmt (gsi));
3177 /* Add a value number to the temporary.
3178 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3179 we are creating the expression by pieces, and this particular piece of
3180 the expression may have been represented. There is no harm in replacing
3182 VN_INFO_GET (name)->valnum = name;
3183 value_id = get_expr_value_id (expr);
3184 VN_INFO (name)->value_id = value_id;
3185 nameexpr = get_or_alloc_expr_for_name (name);
3186 add_to_value (value_id, nameexpr);
3187 if (NEW_SETS (block))
3188 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3189 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3191 pre_stats.insertions++;
3192 if (dump_file && (dump_flags & TDF_DETAILS))
3194 fprintf (dump_file, "Inserted ");
3195 print_gimple_stmt (dump_file, newstmt, 0, 0);
3196 fprintf (dump_file, " in predecessor %d\n", block->index);
3203 /* Returns true if we want to inhibit the insertions of PHI nodes
3204 for the given EXPR for basic block BB (a member of a loop).
3205 We want to do this, when we fear that the induction variable we
3206 create might inhibit vectorization. */
3209 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3211 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3212 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3213 vn_reference_op_t op;
3216 /* If we aren't going to vectorize we don't inhibit anything. */
3217 if (!flag_tree_vectorize)
3220 /* Otherwise we inhibit the insertion when the address of the
3221 memory reference is a simple induction variable. In other
3222 cases the vectorizer won't do anything anyway (either it's
3223 loop invariant or a complicated expression). */
3224 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3229 case ARRAY_RANGE_REF:
3230 if (TREE_CODE (op->op0) != SSA_NAME)
3235 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3237 /* Default defs are loop invariant. */
3240 /* Defined outside this loop, also loop invariant. */
3241 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3243 /* If it's a simple induction variable inhibit insertion,
3244 the vectorizer might be interested in this one. */
3245 if (simple_iv (bb->loop_father, bb->loop_father,
3246 op->op0, &iv, true))
3248 /* No simple IV, vectorizer can't do anything, hence no
3249 reason to inhibit the transformation for this operand. */
3259 /* Insert the to-be-made-available values of expression EXPRNUM for each
3260 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3261 merge the result with a phi node, given the same value number as
3262 NODE. Return true if we have inserted new stuff. */
3265 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3268 pre_expr expr = expression_for_id (exprnum);
3270 unsigned int val = get_expr_value_id (expr);
3272 bool insertions = false;
3277 tree type = get_expr_type (expr);
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3283 fprintf (dump_file, "Found partial redundancy for expression ");
3284 print_pre_expr (dump_file, expr);
3285 fprintf (dump_file, " (%04d)\n", val);
3288 /* Make sure we aren't creating an induction variable. */
3289 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3291 bool firstinsideloop = false;
3292 bool secondinsideloop = false;
3293 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3294 EDGE_PRED (block, 0)->src);
3295 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3296 EDGE_PRED (block, 1)->src);
3297 /* Induction variables only have one edge inside the loop. */
3298 if ((firstinsideloop ^ secondinsideloop)
3299 && (expr->kind != REFERENCE
3300 || inhibit_phi_insertion (block, expr)))
3302 if (dump_file && (dump_flags & TDF_DETAILS))
3303 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3308 /* Make the necessary insertions. */
3309 FOR_EACH_EDGE (pred, ei, block->preds)
3311 gimple_seq stmts = NULL;
3314 eprime = avail[bprime->index];
3316 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3318 builtexpr = create_expression_by_pieces (bprime,
3322 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3323 gsi_insert_seq_on_edge (pred, stmts);
3324 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3327 else if (eprime->kind == CONSTANT)
3329 /* Constants may not have the right type, fold_convert
3330 should give us back a constant with the right type.
3332 tree constant = PRE_EXPR_CONSTANT (eprime);
3333 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3335 tree builtexpr = fold_convert (type, constant);
3336 if (!is_gimple_min_invariant (builtexpr))
3338 tree forcedexpr = force_gimple_operand (builtexpr,
3341 if (!is_gimple_min_invariant (forcedexpr))
3343 if (forcedexpr != builtexpr)
3345 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3346 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3350 gimple_stmt_iterator gsi;
3351 gsi = gsi_start (stmts);
3352 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3354 gimple stmt = gsi_stmt (gsi);
3355 tree lhs = gimple_get_lhs (stmt);
3356 if (TREE_CODE (lhs) == SSA_NAME)
3357 bitmap_set_bit (inserted_exprs,
3358 SSA_NAME_VERSION (lhs));
3359 gimple_set_plf (stmt, NECESSARY, false);
3361 gsi_insert_seq_on_edge (pred, stmts);
3363 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3367 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3370 else if (eprime->kind == NAME)
3372 /* We may have to do a conversion because our value
3373 numbering can look through types in certain cases, but
3374 our IL requires all operands of a phi node have the same
3376 tree name = PRE_EXPR_NAME (eprime);
3377 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3381 builtexpr = fold_convert (type, name);
3382 forcedexpr = force_gimple_operand (builtexpr,
3386 if (forcedexpr != name)
3388 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3389 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3394 gimple_stmt_iterator gsi;
3395 gsi = gsi_start (stmts);
3396 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3398 gimple stmt = gsi_stmt (gsi);
3399 tree lhs = gimple_get_lhs (stmt);
3400 if (TREE_CODE (lhs) == SSA_NAME)
3401 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3402 gimple_set_plf (stmt, NECESSARY, false);
3404 gsi_insert_seq_on_edge (pred, stmts);
3406 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3410 /* If we didn't want a phi node, and we made insertions, we still have
3411 inserted new stuff, and thus return true. If we didn't want a phi node,
3412 and didn't make insertions, we haven't added anything new, so return
3414 if (nophi && insertions)
3416 else if (nophi && !insertions)
3419 /* Now build a phi for the new variable. */
3420 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3421 prephitemp = create_tmp_var (type, "prephitmp");
3424 add_referenced_var (temp);
3426 if (TREE_CODE (type) == COMPLEX_TYPE
3427 || TREE_CODE (type) == VECTOR_TYPE)
3428 DECL_GIMPLE_REG_P (temp) = 1;
3429 phi = create_phi_node (temp, block);
3431 gimple_set_plf (phi, NECESSARY, false);
3432 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3433 VN_INFO (gimple_phi_result (phi))->value_id = val;
3434 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3435 FOR_EACH_EDGE (pred, ei, block->preds)
3437 pre_expr ae = avail[pred->src->index];
3438 gcc_assert (get_expr_type (ae) == type
3439 || useless_type_conversion_p (type, get_expr_type (ae)));
3440 if (ae->kind == CONSTANT)
3441 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3443 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3447 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3448 add_to_value (val, newphi);
3450 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3451 this insertion, since we test for the existence of this value in PHI_GEN
3452 before proceeding with the partial redundancy checks in insert_aux.
3454 The value may exist in AVAIL_OUT, in particular, it could be represented
3455 by the expression we are trying to eliminate, in which case we want the
3456 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3459 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3460 this block, because if it did, it would have existed in our dominator's
3461 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3464 bitmap_insert_into_set (PHI_GEN (block), newphi);
3465 bitmap_value_replace_in_set (AVAIL_OUT (block),
3467 bitmap_insert_into_set (NEW_SETS (block),
3470 if (dump_file && (dump_flags & TDF_DETAILS))
3472 fprintf (dump_file, "Created phi ");
3473 print_gimple_stmt (dump_file, phi, 0, 0);
3474 fprintf (dump_file, " in block %d\n", block->index);
3482 /* Perform insertion of partially redundant values.
3483 For BLOCK, do the following:
3484 1. Propagate the NEW_SETS of the dominator into the current block.
3485 If the block has multiple predecessors,
3486 2a. Iterate over the ANTIC expressions for the block to see if
3487 any of them are partially redundant.
3488 2b. If so, insert them into the necessary predecessors to make
3489 the expression fully redundant.
3490 2c. Insert a new PHI merging the values of the predecessors.
3491 2d. Insert the new PHI, and the new expressions, into the
3493 3. Recursively call ourselves on the dominator children of BLOCK.
3495 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3496 do_regular_insertion and do_partial_insertion.
3501 do_regular_insertion (basic_block block, basic_block dom)
3503 bool new_stuff = false;
3504 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3508 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3510 if (expr->kind != NAME)
3514 bool by_some = false;
3515 bool cant_insert = false;
3516 bool all_same = true;
3517 pre_expr first_s = NULL;
3520 pre_expr eprime = NULL;
3522 pre_expr edoubleprime = NULL;
3523 bool do_insertion = false;
3525 val = get_expr_value_id (expr);
3526 if (bitmap_set_contains_value (PHI_GEN (block), val))
3528 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3530 if (dump_file && (dump_flags & TDF_DETAILS))
3531 fprintf (dump_file, "Found fully redundant value\n");
3535 avail = XCNEWVEC (pre_expr, last_basic_block);
3536 FOR_EACH_EDGE (pred, ei, block->preds)
3538 unsigned int vprime;
3540 /* We should never run insertion for the exit block
3541 and so not come across fake pred edges. */
3542 gcc_assert (!(pred->flags & EDGE_FAKE));
3544 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3547 /* eprime will generally only be NULL if the
3548 value of the expression, translated
3549 through the PHI for this predecessor, is
3550 undefined. If that is the case, we can't
3551 make the expression fully redundant,
3552 because its value is undefined along a
3553 predecessor path. We can thus break out
3554 early because it doesn't matter what the
3555 rest of the results are. */
3562 eprime = fully_constant_expression (eprime);
3563 vprime = get_expr_value_id (eprime);
3564 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3566 if (edoubleprime == NULL)
3568 avail[bprime->index] = eprime;
3573 avail[bprime->index] = edoubleprime;
3575 /* We want to perform insertions to remove a redundancy on
3576 a path in the CFG we want to optimize for speed. */
3577 if (optimize_edge_for_speed_p (pred))
3578 do_insertion = true;
3579 if (first_s == NULL)
3580 first_s = edoubleprime;
3581 else if (!pre_expr_eq (first_s, edoubleprime))
3585 /* If we can insert it, it's not the same value
3586 already existing along every predecessor, and
3587 it's defined by some predecessor, it is
3588 partially redundant. */
3589 if (!cant_insert && !all_same && by_some)
3593 if (dump_file && (dump_flags & TDF_DETAILS))
3595 fprintf (dump_file, "Skipping partial redundancy for "
3597 print_pre_expr (dump_file, expr);
3598 fprintf (dump_file, " (%04d), no redundancy on to be "
3599 "optimized for speed edge\n", val);
3602 else if (dbg_cnt (treepre_insert)
3603 && insert_into_preds_of_block (block,
3604 get_expression_id (expr),
3608 /* If all edges produce the same value and that value is
3609 an invariant, then the PHI has the same value on all
3610 edges. Note this. */
3611 else if (!cant_insert && all_same && eprime
3612 && (edoubleprime->kind == CONSTANT
3613 || edoubleprime->kind == NAME)
3614 && !value_id_constant_p (val))
3618 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3619 value_expressions, val);
3621 unsigned int new_val = get_expr_value_id (edoubleprime);
3622 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3624 pre_expr expr = expression_for_id (j);
3626 if (expr->kind == NAME)
3628 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3629 /* Just reset the value id and valnum so it is
3630 the same as the constant we have discovered. */
3631 if (edoubleprime->kind == CONSTANT)
3633 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3634 pre_stats.constified++;
3637 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3638 info->value_id = new_val;
3646 VEC_free (pre_expr, heap, exprs);
3651 /* Perform insertion for partially anticipatable expressions. There
3652 is only one case we will perform insertion for these. This case is
3653 if the expression is partially anticipatable, and fully available.
3654 In this case, we know that putting it earlier will enable us to
3655 remove the later computation. */
3659 do_partial_partial_insertion (basic_block block, basic_block dom)
3661 bool new_stuff = false;
3662 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3666 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3668 if (expr->kind != NAME)
3673 bool cant_insert = false;
3676 pre_expr eprime = NULL;
3679 val = get_expr_value_id (expr);
3680 if (bitmap_set_contains_value (PHI_GEN (block), val))
3682 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3685 avail = XCNEWVEC (pre_expr, last_basic_block);
3686 FOR_EACH_EDGE (pred, ei, block->preds)
3688 unsigned int vprime;
3689 pre_expr edoubleprime;
3691 /* We should never run insertion for the exit block
3692 and so not come across fake pred edges. */
3693 gcc_assert (!(pred->flags & EDGE_FAKE));
3695 eprime = phi_translate (expr, ANTIC_IN (block),
3699 /* eprime will generally only be NULL if the
3700 value of the expression, translated
3701 through the PHI for this predecessor, is
3702 undefined. If that is the case, we can't
3703 make the expression fully redundant,
3704 because its value is undefined along a
3705 predecessor path. We can thus break out
3706 early because it doesn't matter what the
3707 rest of the results are. */
3714 eprime = fully_constant_expression (eprime);
3715 vprime = get_expr_value_id (eprime);
3716 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3718 if (edoubleprime == NULL)
3724 avail[bprime->index] = edoubleprime;
3728 /* If we can insert it, it's not the same value
3729 already existing along every predecessor, and
3730 it's defined by some predecessor, it is
3731 partially redundant. */
3732 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3734 pre_stats.pa_insert++;
3735 if (insert_into_preds_of_block (block, get_expression_id (expr),
3743 VEC_free (pre_expr, heap, exprs);
3748 insert_aux (basic_block block)
3751 bool new_stuff = false;
3756 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3761 bitmap_set_t newset = NEW_SETS (dom);
3764 /* Note that we need to value_replace both NEW_SETS, and
3765 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3766 represented by some non-simple expression here that we want
3767 to replace it with. */
3768 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3770 pre_expr expr = expression_for_id (i);
3771 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3772 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3775 if (!single_pred_p (block))
3777 new_stuff |= do_regular_insertion (block, dom);
3778 if (do_partial_partial)
3779 new_stuff |= do_partial_partial_insertion (block, dom);
3783 for (son = first_dom_son (CDI_DOMINATORS, block);
3785 son = next_dom_son (CDI_DOMINATORS, son))
3787 new_stuff |= insert_aux (son);
3793 /* Perform insertion of partially redundant values. */
3798 bool new_stuff = true;
3800 int num_iterations = 0;
3803 NEW_SETS (bb) = bitmap_set_new ();
3808 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3810 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3814 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3817 add_to_exp_gen (basic_block block, tree op)
3822 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3824 result = get_or_alloc_expr_for_name (op);
3825 bitmap_value_insert_into_set (EXP_GEN (block), result);
3829 /* Create value ids for PHI in BLOCK. */
3832 make_values_for_phi (gimple phi, basic_block block)
3834 tree result = gimple_phi_result (phi);
3836 /* We have no need for virtual phis, as they don't represent
3837 actual computations. */
3838 if (is_gimple_reg (result))
3840 pre_expr e = get_or_alloc_expr_for_name (result);
3841 add_to_value (get_expr_value_id (e), e);
3842 bitmap_insert_into_set (PHI_GEN (block), e);
3843 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3847 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3849 tree arg = gimple_phi_arg_def (phi, i);
3850 if (TREE_CODE (arg) == SSA_NAME)
3852 e = get_or_alloc_expr_for_name (arg);
3853 add_to_value (get_expr_value_id (e), e);
3860 /* Compute the AVAIL set for all basic blocks.
3862 This function performs value numbering of the statements in each basic
3863 block. The AVAIL sets are built from information we glean while doing
3864 this value numbering, since the AVAIL sets contain only one entry per
3867 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3868 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3871 compute_avail (void)
3874 basic_block block, son;
3875 basic_block *worklist;
3879 /* We pretend that default definitions are defined in the entry block.
3880 This includes function arguments and the static chain decl. */
3881 for (i = 1; i < num_ssa_names; ++i)
3883 tree name = ssa_name (i);
3886 || !SSA_NAME_IS_DEFAULT_DEF (name)
3887 || has_zero_uses (name)
3888 || !is_gimple_reg (name))
3891 e = get_or_alloc_expr_for_name (name);
3892 add_to_value (get_expr_value_id (e), e);
3894 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3895 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3898 /* Allocate the worklist. */
3899 worklist = XNEWVEC (basic_block, n_basic_blocks);
3901 /* Seed the algorithm by putting the dominator children of the entry
3902 block on the worklist. */
3903 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3905 son = next_dom_son (CDI_DOMINATORS, son))
3906 worklist[sp++] = son;
3908 /* Loop until the worklist is empty. */
3911 gimple_stmt_iterator gsi;
3914 unsigned int stmt_uid = 1;
3916 /* Pick a block from the worklist. */
3917 block = worklist[--sp];
3919 /* Initially, the set of available values in BLOCK is that of
3920 its immediate dominator. */
3921 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3923 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3925 /* Generate values for PHI nodes. */
3926 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3927 make_values_for_phi (gsi_stmt (gsi), block);
3929 BB_MAY_NOTRETURN (block) = 0;
3931 /* Now compute value numbers and populate value sets with all
3932 the expressions computed in BLOCK. */
3933 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3938 stmt = gsi_stmt (gsi);
3939 gimple_set_uid (stmt, stmt_uid++);
3941 /* Cache whether the basic-block has any non-visible side-effect
3943 If this isn't a call or it is the last stmt in the
3944 basic-block then the CFG represents things correctly. */
3945 if (is_gimple_call (stmt)
3946 && !stmt_ends_bb_p (stmt))
3948 /* Non-looping const functions always return normally.
3949 Otherwise the call might not return or have side-effects
3950 that forbids hoisting possibly trapping expressions
3952 int flags = gimple_call_flags (stmt);
3953 if (!(flags & ECF_CONST)
3954 || (flags & ECF_LOOPING_CONST_OR_PURE))
3955 BB_MAY_NOTRETURN (block) = 1;
3958 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3960 pre_expr e = get_or_alloc_expr_for_name (op);
3962 add_to_value (get_expr_value_id (e), e);
3964 bitmap_insert_into_set (TMP_GEN (block), e);
3965 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3968 if (gimple_has_volatile_ops (stmt)
3969 || stmt_could_throw_p (stmt))
3972 switch (gimple_code (stmt))
3975 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3976 add_to_exp_gen (block, op);
3983 vn_reference_op_t vro;
3984 pre_expr result = NULL;
3985 VEC(vn_reference_op_s, heap) *ops = NULL;
3987 if (!can_value_number_call (stmt))
3990 copy_reference_ops_from_call (stmt, &ops);
3991 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3992 gimple_expr_type (stmt),
3993 ops, &ref, VN_NOWALK);
3994 VEC_free (vn_reference_op_s, heap, ops);
3998 for (i = 0; VEC_iterate (vn_reference_op_s,
4002 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4003 add_to_exp_gen (block, vro->op0);
4004 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4005 add_to_exp_gen (block, vro->op1);
4006 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4007 add_to_exp_gen (block, vro->op2);
4009 result = (pre_expr) pool_alloc (pre_expr_pool);
4010 result->kind = REFERENCE;
4012 PRE_EXPR_REFERENCE (result) = ref;
4014 get_or_alloc_expression_id (result);
4015 add_to_value (get_expr_value_id (result), result);
4017 bitmap_value_insert_into_set (EXP_GEN (block), result);
4023 pre_expr result = NULL;
4024 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4028 case tcc_comparison:
4033 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4034 gimple_assign_rhs_code (stmt),
4035 gimple_expr_type (stmt),
4036 gimple_assign_rhs1_ptr (stmt),
4042 for (i = 0; i < nary->length; i++)
4043 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4044 add_to_exp_gen (block, nary->op[i]);
4046 result = (pre_expr) pool_alloc (pre_expr_pool);
4047 result->kind = NARY;
4049 PRE_EXPR_NARY (result) = nary;
4053 case tcc_declaration:
4058 vn_reference_op_t vro;
4060 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4066 for (i = 0; VEC_iterate (vn_reference_op_s,
4070 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4071 add_to_exp_gen (block, vro->op0);
4072 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4073 add_to_exp_gen (block, vro->op1);
4074 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4075 add_to_exp_gen (block, vro->op2);
4077 result = (pre_expr) pool_alloc (pre_expr_pool);
4078 result->kind = REFERENCE;
4080 PRE_EXPR_REFERENCE (result) = ref;
4085 /* For any other statement that we don't
4086 recognize, simply add all referenced
4087 SSA_NAMEs to EXP_GEN. */
4088 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4089 add_to_exp_gen (block, op);
4093 get_or_alloc_expression_id (result);
4094 add_to_value (get_expr_value_id (result), result);
4096 bitmap_value_insert_into_set (EXP_GEN (block), result);
4105 /* Put the dominator children of BLOCK on the worklist of blocks
4106 to compute available sets for. */
4107 for (son = first_dom_son (CDI_DOMINATORS, block);
4109 son = next_dom_son (CDI_DOMINATORS, son))
4110 worklist[sp++] = son;
4116 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4117 than the available expressions for it. The insertion point is
4118 right before the first use in STMT. Returns the SSA_NAME that should
4119 be used for replacement. */
4122 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4124 basic_block bb = gimple_bb (stmt);
4125 gimple_stmt_iterator gsi;
4126 gimple_seq stmts = NULL;
4130 /* First create a value expression from the expression we want
4131 to insert and associate it with the value handle for SSA_VN. */
4132 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4136 /* Then use create_expression_by_pieces to generate a valid
4137 expression to insert at this point of the IL stream. */
4138 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4139 if (expr == NULL_TREE)
4141 gsi = gsi_for_stmt (stmt);
4142 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4147 /* Eliminate fully redundant computations. */
4152 VEC (gimple, heap) *to_remove = NULL;
4153 VEC (gimple, heap) *to_update = NULL;
4155 unsigned int todo = 0;
4156 gimple_stmt_iterator gsi;
4162 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4164 tree lhs = NULL_TREE;
4165 tree rhs = NULL_TREE;
4167 stmt = gsi_stmt (gsi);
4169 if (gimple_has_lhs (stmt))
4170 lhs = gimple_get_lhs (stmt);
4172 if (gimple_assign_single_p (stmt))
4173 rhs = gimple_assign_rhs1 (stmt);
4175 /* Lookup the RHS of the expression, see if we have an
4176 available computation for it. If so, replace the RHS with
4177 the available computation.
4180 We don't replace global register variable when it is a the RHS of
4181 a single assign. We do replace local register variable since gcc
4182 does not guarantee local variable will be allocated in register. */
4183 if (gimple_has_lhs (stmt)
4184 && TREE_CODE (lhs) == SSA_NAME
4185 && !gimple_assign_ssa_name_copy_p (stmt)
4186 && (!gimple_assign_single_p (stmt)
4187 || (!is_gimple_min_invariant (rhs)
4188 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4189 || !is_global_var (rhs)
4190 || !DECL_HARD_REGISTER (rhs))))
4191 && !gimple_has_volatile_ops (stmt)
4192 && !has_zero_uses (lhs))
4195 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4196 pre_expr sprimeexpr;
4197 gimple orig_stmt = stmt;
4199 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4200 get_expr_value_id (lhsexpr),
4205 if (sprimeexpr->kind == CONSTANT)
4206 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4207 else if (sprimeexpr->kind == NAME)
4208 sprime = PRE_EXPR_NAME (sprimeexpr);
4213 /* If there is no existing leader but SCCVN knows this
4214 value is constant, use that constant. */
4215 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4217 sprime = VN_INFO (lhs)->valnum;
4218 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4219 TREE_TYPE (sprime)))
4220 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4222 if (dump_file && (dump_flags & TDF_DETAILS))
4224 fprintf (dump_file, "Replaced ");
4225 print_gimple_expr (dump_file, stmt, 0, 0);
4226 fprintf (dump_file, " with ");
4227 print_generic_expr (dump_file, sprime, 0);
4228 fprintf (dump_file, " in ");
4229 print_gimple_stmt (dump_file, stmt, 0, 0);
4231 pre_stats.eliminations++;
4232 propagate_tree_value_into_stmt (&gsi, sprime);
4233 stmt = gsi_stmt (gsi);
4236 /* If we removed EH side-effects from the statement, clean
4237 its EH information. */
4238 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4240 bitmap_set_bit (need_eh_cleanup,
4241 gimple_bb (stmt)->index);
4242 if (dump_file && (dump_flags & TDF_DETAILS))
4243 fprintf (dump_file, " Removed EH side-effects.\n");
4248 /* If there is no existing usable leader but SCCVN thinks
4249 it has an expression it wants to use as replacement,
4251 if (!sprime || sprime == lhs)
4253 tree val = VN_INFO (lhs)->valnum;
4255 && TREE_CODE (val) == SSA_NAME
4256 && VN_INFO (val)->needs_insertion
4257 && can_PRE_operation (vn_get_expr_for (val)))
4258 sprime = do_SCCVN_insertion (stmt, val);
4262 && (rhs == NULL_TREE
4263 || TREE_CODE (rhs) != SSA_NAME
4264 || may_propagate_copy (rhs, sprime)))
4266 bool can_make_abnormal_goto
4267 = is_gimple_call (stmt)
4268 && stmt_can_make_abnormal_goto (stmt);
4270 gcc_assert (sprime != rhs);
4272 if (dump_file && (dump_flags & TDF_DETAILS))
4274 fprintf (dump_file, "Replaced ");
4275 print_gimple_expr (dump_file, stmt, 0, 0);
4276 fprintf (dump_file, " with ");
4277 print_generic_expr (dump_file, sprime, 0);
4278 fprintf (dump_file, " in ");
4279 print_gimple_stmt (dump_file, stmt, 0, 0);
4282 if (TREE_CODE (sprime) == SSA_NAME)
4283 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4285 /* We need to make sure the new and old types actually match,
4286 which may require adding a simple cast, which fold_convert
4288 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4289 && !useless_type_conversion_p (gimple_expr_type (stmt),
4290 TREE_TYPE (sprime)))
4291 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4293 pre_stats.eliminations++;
4294 propagate_tree_value_into_stmt (&gsi, sprime);
4295 stmt = gsi_stmt (gsi);
4298 /* If we removed EH side-effects from the statement, clean
4299 its EH information. */
4300 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4302 bitmap_set_bit (need_eh_cleanup,
4303 gimple_bb (stmt)->index);
4304 if (dump_file && (dump_flags & TDF_DETAILS))
4305 fprintf (dump_file, " Removed EH side-effects.\n");
4308 /* Likewise for AB side-effects. */
4309 if (can_make_abnormal_goto
4310 && !stmt_can_make_abnormal_goto (stmt))
4312 bitmap_set_bit (need_ab_cleanup,
4313 gimple_bb (stmt)->index);
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4315 fprintf (dump_file, " Removed AB side-effects.\n");
4319 /* If the statement is a scalar store, see if the expression
4320 has the same value number as its rhs. If so, the store is
4322 else if (gimple_assign_single_p (stmt)
4323 && !is_gimple_reg (gimple_assign_lhs (stmt))
4324 && (TREE_CODE (rhs) == SSA_NAME
4325 || is_gimple_min_invariant (rhs)))
4328 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4329 gimple_vuse (stmt), VN_WALK, NULL);
4330 if (TREE_CODE (rhs) == SSA_NAME)
4331 rhs = VN_INFO (rhs)->valnum;
4333 && operand_equal_p (val, rhs, 0))
4335 if (dump_file && (dump_flags & TDF_DETAILS))
4337 fprintf (dump_file, "Deleted redundant store ");
4338 print_gimple_stmt (dump_file, stmt, 0, 0);
4341 /* Queue stmt for removal. */
4342 VEC_safe_push (gimple, heap, to_remove, stmt);
4345 /* Visit COND_EXPRs and fold the comparison with the
4346 available value-numbers. */
4347 else if (gimple_code (stmt) == GIMPLE_COND)
4349 tree op0 = gimple_cond_lhs (stmt);
4350 tree op1 = gimple_cond_rhs (stmt);
4353 if (TREE_CODE (op0) == SSA_NAME)
4354 op0 = VN_INFO (op0)->valnum;
4355 if (TREE_CODE (op1) == SSA_NAME)
4356 op1 = VN_INFO (op1)->valnum;
4357 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4359 if (result && TREE_CODE (result) == INTEGER_CST)
4361 if (integer_zerop (result))
4362 gimple_cond_make_false (stmt);
4364 gimple_cond_make_true (stmt);
4366 todo = TODO_cleanup_cfg;
4369 /* Visit indirect calls and turn them into direct calls if
4371 if (is_gimple_call (stmt))
4373 tree orig_fn = gimple_call_fn (stmt);
4377 if (TREE_CODE (orig_fn) == SSA_NAME)
4378 fn = VN_INFO (orig_fn)->valnum;
4379 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4380 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4381 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4384 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4385 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4388 bool can_make_abnormal_goto
4389 = stmt_can_make_abnormal_goto (stmt);
4390 bool was_noreturn = gimple_call_noreturn_p (stmt);
4392 if (dump_file && (dump_flags & TDF_DETAILS))
4394 fprintf (dump_file, "Replacing call target with ");
4395 print_generic_expr (dump_file, fn, 0);
4396 fprintf (dump_file, " in ");
4397 print_gimple_stmt (dump_file, stmt, 0, 0);
4400 gimple_call_set_fn (stmt, fn);
4401 VEC_safe_push (gimple, heap, to_update, stmt);
4403 /* When changing a call into a noreturn call, cfg cleanup
4404 is needed to fix up the noreturn call. */
4405 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4406 todo |= TODO_cleanup_cfg;
4408 /* If we removed EH side-effects from the statement, clean
4409 its EH information. */
4410 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4412 bitmap_set_bit (need_eh_cleanup,
4413 gimple_bb (stmt)->index);
4414 if (dump_file && (dump_flags & TDF_DETAILS))
4415 fprintf (dump_file, " Removed EH side-effects.\n");
4418 /* Likewise for AB side-effects. */
4419 if (can_make_abnormal_goto
4420 && !stmt_can_make_abnormal_goto (stmt))
4422 bitmap_set_bit (need_ab_cleanup,
4423 gimple_bb (stmt)->index);
4424 if (dump_file && (dump_flags & TDF_DETAILS))
4425 fprintf (dump_file, " Removed AB side-effects.\n");
4428 /* Changing an indirect call to a direct call may
4429 have exposed different semantics. This may
4430 require an SSA update. */
4431 todo |= TODO_update_ssa_only_virtuals;
4436 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4438 gimple stmt, phi = gsi_stmt (gsi);
4439 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4440 pre_expr sprimeexpr, resexpr;
4441 gimple_stmt_iterator gsi2;
4443 /* We want to perform redundant PHI elimination. Do so by
4444 replacing the PHI with a single copy if possible.
4445 Do not touch inserted, single-argument or virtual PHIs. */
4446 if (gimple_phi_num_args (phi) == 1
4447 || !is_gimple_reg (res))
4453 resexpr = get_or_alloc_expr_for_name (res);
4454 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4455 get_expr_value_id (resexpr), NULL);
4458 if (sprimeexpr->kind == CONSTANT)
4459 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4460 else if (sprimeexpr->kind == NAME)
4461 sprime = PRE_EXPR_NAME (sprimeexpr);
4465 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4467 sprime = VN_INFO (res)->valnum;
4468 if (!useless_type_conversion_p (TREE_TYPE (res),
4469 TREE_TYPE (sprime)))
4470 sprime = fold_convert (TREE_TYPE (res), sprime);
4479 if (dump_file && (dump_flags & TDF_DETAILS))
4481 fprintf (dump_file, "Replaced redundant PHI node defining ");
4482 print_generic_expr (dump_file, res, 0);
4483 fprintf (dump_file, " with ");
4484 print_generic_expr (dump_file, sprime, 0);
4485 fprintf (dump_file, "\n");
4488 remove_phi_node (&gsi, false);
4490 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4491 && TREE_CODE (sprime) == SSA_NAME)
4492 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4494 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4495 sprime = fold_convert (TREE_TYPE (res), sprime);
4496 stmt = gimple_build_assign (res, sprime);
4497 SSA_NAME_DEF_STMT (res) = stmt;
4498 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4500 gsi2 = gsi_after_labels (b);
4501 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4502 /* Queue the copy for eventual removal. */
4503 VEC_safe_push (gimple, heap, to_remove, stmt);
4504 /* If we inserted this PHI node ourself, it's not an elimination. */
4505 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4508 pre_stats.eliminations++;
4512 /* We cannot remove stmts during BB walk, especially not release SSA
4513 names there as this confuses the VN machinery. The stmts ending
4514 up in to_remove are either stores or simple copies. */
4515 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4517 tree lhs = gimple_assign_lhs (stmt);
4518 tree rhs = gimple_assign_rhs1 (stmt);
4519 use_operand_p use_p;
4522 /* If there is a single use only, propagate the equivalency
4523 instead of keeping the copy. */
4524 if (TREE_CODE (lhs) == SSA_NAME
4525 && TREE_CODE (rhs) == SSA_NAME
4526 && single_imm_use (lhs, &use_p, &use_stmt)
4527 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4529 SET_USE (use_p, rhs);
4530 update_stmt (use_stmt);
4531 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4532 && TREE_CODE (rhs) == SSA_NAME)
4533 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4536 /* If this is a store or a now unused copy, remove it. */
4537 if (TREE_CODE (lhs) != SSA_NAME
4538 || has_zero_uses (lhs))
4540 basic_block bb = gimple_bb (stmt);
4541 gsi = gsi_for_stmt (stmt);
4542 unlink_stmt_vdef (stmt);
4543 gsi_remove (&gsi, true);
4544 /* ??? gsi_remove doesn't tell us whether the stmt was
4545 in EH tables and thus whether we need to purge EH edges.
4546 Simply schedule the block for a cleanup. */
4547 bitmap_set_bit (need_eh_cleanup, bb->index);
4548 if (TREE_CODE (lhs) == SSA_NAME)
4549 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4550 release_defs (stmt);
4553 VEC_free (gimple, heap, to_remove);
4555 /* We cannot update call statements with virtual operands during
4556 SSA walk. This might remove them which in turn makes our
4557 VN lattice invalid. */
4558 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4560 VEC_free (gimple, heap, to_update);
4565 /* Borrow a bit of tree-ssa-dce.c for the moment.
4566 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4567 this may be a bit faster, and we may want critical edges kept split. */
4569 /* If OP's defining statement has not already been determined to be necessary,
4570 mark that statement necessary. Return the stmt, if it is newly
4573 static inline gimple
4574 mark_operand_necessary (tree op)
4580 if (TREE_CODE (op) != SSA_NAME)
4583 stmt = SSA_NAME_DEF_STMT (op);
4586 if (gimple_plf (stmt, NECESSARY)
4587 || gimple_nop_p (stmt))
4590 gimple_set_plf (stmt, NECESSARY, true);
4594 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4595 to insert PHI nodes sometimes, and because value numbering of casts isn't
4596 perfect, we sometimes end up inserting dead code. This simple DCE-like
4597 pass removes any insertions we made that weren't actually used. */
4600 remove_dead_inserted_code (void)
4607 worklist = BITMAP_ALLOC (NULL);
4608 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4610 t = SSA_NAME_DEF_STMT (ssa_name (i));
4611 if (gimple_plf (t, NECESSARY))
4612 bitmap_set_bit (worklist, i);
4614 while (!bitmap_empty_p (worklist))
4616 i = bitmap_first_set_bit (worklist);
4617 bitmap_clear_bit (worklist, i);
4618 t = SSA_NAME_DEF_STMT (ssa_name (i));
4620 /* PHI nodes are somewhat special in that each PHI alternative has
4621 data and control dependencies. All the statements feeding the
4622 PHI node's arguments are always necessary. */
4623 if (gimple_code (t) == GIMPLE_PHI)
4627 for (k = 0; k < gimple_phi_num_args (t); k++)
4629 tree arg = PHI_ARG_DEF (t, k);
4630 if (TREE_CODE (arg) == SSA_NAME)
4632 gimple n = mark_operand_necessary (arg);
4634 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4640 /* Propagate through the operands. Examine all the USE, VUSE and
4641 VDEF operands in this statement. Mark all the statements
4642 which feed this statement's uses as necessary. */
4646 /* The operands of VDEF expressions are also needed as they
4647 represent potential definitions that may reach this
4648 statement (VDEF operands allow us to follow def-def
4651 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4653 gimple n = mark_operand_necessary (use);
4655 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4660 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4662 t = SSA_NAME_DEF_STMT (ssa_name (i));
4663 if (!gimple_plf (t, NECESSARY))
4665 gimple_stmt_iterator gsi;
4667 if (dump_file && (dump_flags & TDF_DETAILS))
4669 fprintf (dump_file, "Removing unnecessary insertion:");
4670 print_gimple_stmt (dump_file, t, 0, 0);
4673 gsi = gsi_for_stmt (t);
4674 if (gimple_code (t) == GIMPLE_PHI)
4675 remove_phi_node (&gsi, true);
4678 gsi_remove (&gsi, true);
4683 BITMAP_FREE (worklist);
4686 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4687 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4688 the number of visited blocks. */
4691 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4693 edge_iterator *stack;
4695 int post_order_num = 0;
4698 if (include_entry_exit)
4699 post_order[post_order_num++] = EXIT_BLOCK;
4701 /* Allocate stack for back-tracking up CFG. */
4702 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4705 /* Allocate bitmap to track nodes that have been visited. */
4706 visited = sbitmap_alloc (last_basic_block);
4708 /* None of the nodes in the CFG have been visited yet. */
4709 sbitmap_zero (visited);
4711 /* Push the last edge on to the stack. */
4712 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4720 /* Look at the edge on the top of the stack. */
4722 src = ei_edge (ei)->src;
4723 dest = ei_edge (ei)->dest;
4725 /* Check if the edge destination has been visited yet. */
4726 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4728 /* Mark that we have visited the destination. */
4729 SET_BIT (visited, src->index);
4731 if (EDGE_COUNT (src->preds) > 0)
4732 /* Since the DEST node has been visited for the first
4733 time, check its successors. */
4734 stack[sp++] = ei_start (src->preds);
4736 post_order[post_order_num++] = src->index;
4740 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4741 post_order[post_order_num++] = dest->index;
4743 if (!ei_one_before_end_p (ei))
4744 ei_next (&stack[sp - 1]);
4750 if (include_entry_exit)
4751 post_order[post_order_num++] = ENTRY_BLOCK;
4754 sbitmap_free (visited);
4755 return post_order_num;
4759 /* Initialize data structures used by PRE. */
4762 init_pre (bool do_fre)
4766 next_expression_id = 1;
4768 VEC_safe_push (pre_expr, heap, expressions, NULL);
4769 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4770 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4771 get_max_value_id() + 1);
4776 inserted_exprs = BITMAP_ALLOC (NULL);
4777 need_creation = NULL;
4778 pretemp = NULL_TREE;
4779 storetemp = NULL_TREE;
4780 prephitemp = NULL_TREE;
4782 connect_infinite_loops_to_exit ();
4783 memset (&pre_stats, 0, sizeof (pre_stats));
4786 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4787 my_rev_post_order_compute (postorder, false);
4789 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4791 calculate_dominance_info (CDI_POST_DOMINATORS);
4792 calculate_dominance_info (CDI_DOMINATORS);
4794 bitmap_obstack_initialize (&grand_bitmap_obstack);
4795 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4796 expr_pred_trans_eq, free);
4797 expression_to_id = htab_create (num_ssa_names * 3,
4800 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4801 sizeof (struct bitmap_set), 30);
4802 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4803 sizeof (struct pre_expr_d), 30);
4806 EXP_GEN (bb) = bitmap_set_new ();
4807 PHI_GEN (bb) = bitmap_set_new ();
4808 TMP_GEN (bb) = bitmap_set_new ();
4809 AVAIL_OUT (bb) = bitmap_set_new ();
4812 need_eh_cleanup = BITMAP_ALLOC (NULL);
4813 need_ab_cleanup = BITMAP_ALLOC (NULL);
4817 /* Deallocate data structures used by PRE. */
4820 fini_pre (bool do_fre)
4823 VEC_free (bitmap_set_t, heap, value_expressions);
4824 BITMAP_FREE (inserted_exprs);
4825 VEC_free (gimple, heap, need_creation);
4826 bitmap_obstack_release (&grand_bitmap_obstack);
4827 free_alloc_pool (bitmap_set_pool);
4828 free_alloc_pool (pre_expr_pool);
4829 htab_delete (phi_translate_table);
4830 htab_delete (expression_to_id);
4831 VEC_free (unsigned, heap, name_to_id);
4833 free_aux_for_blocks ();
4835 free_dominance_info (CDI_POST_DOMINATORS);
4837 if (!bitmap_empty_p (need_eh_cleanup))
4839 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4840 cleanup_tree_cfg ();
4843 BITMAP_FREE (need_eh_cleanup);
4845 if (!bitmap_empty_p (need_ab_cleanup))
4847 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4848 cleanup_tree_cfg ();
4851 BITMAP_FREE (need_ab_cleanup);
4854 loop_optimizer_finalize ();
4857 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4858 only wants to do full redundancy elimination. */
4861 execute_pre (bool do_fre)
4863 unsigned int todo = 0;
4865 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4867 /* This has to happen before SCCVN runs because
4868 loop_optimizer_init may create new phis, etc. */
4870 loop_optimizer_init (LOOPS_NORMAL);
4872 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4875 loop_optimizer_finalize ();
4883 /* Collect and value number expressions computed in each basic block. */
4886 if (dump_file && (dump_flags & TDF_DETAILS))
4892 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4893 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4894 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4895 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4899 /* Insert can get quite slow on an incredibly large number of basic
4900 blocks due to some quadratic behavior. Until this behavior is
4901 fixed, don't run it when he have an incredibly large number of
4902 bb's. If we aren't going to run insert, there is no point in
4903 computing ANTIC, either, even though it's plenty fast. */
4904 if (!do_fre && n_basic_blocks < 4000)
4910 /* Make sure to remove fake edges before committing our inserts.
4911 This makes sure we don't end up with extra critical edges that
4912 we would need to split. */
4913 remove_fake_exit_edges ();
4914 gsi_commit_edge_inserts ();
4916 /* Remove all the redundant expressions. */
4917 todo |= eliminate ();
4919 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4920 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4921 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4922 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4923 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4925 clear_expression_ids ();
4928 remove_dead_inserted_code ();
4929 todo |= TODO_verify_flow;
4936 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4937 case we can merge the block with the remaining predecessor of the block.
4939 - call merge_blocks after each tail merge iteration
4940 - call merge_blocks after all tail merge iterations
4941 - mark TODO_cleanup_cfg when necessary
4942 - share the cfg cleanup with fini_pre. */
4943 todo |= tail_merge_optimize (todo);
4949 /* Gate and execute functions for PRE. */
4954 return execute_pre (false);
4960 return flag_tree_pre != 0;
4963 struct gimple_opt_pass pass_pre =
4968 gate_pre, /* gate */
4969 do_pre, /* execute */
4972 0, /* static_pass_number */
4973 TV_TREE_PRE, /* tv_id */
4974 PROP_no_crit_edges | PROP_cfg
4975 | PROP_ssa, /* properties_required */
4976 0, /* properties_provided */
4977 0, /* properties_destroyed */
4978 TODO_rebuild_alias, /* todo_flags_start */
4979 TODO_update_ssa_only_virtuals | TODO_ggc_collect
4980 | TODO_verify_ssa /* todo_flags_finish */
4985 /* Gate and execute functions for FRE. */
4990 return execute_pre (true);
4996 return flag_tree_fre != 0;
4999 struct gimple_opt_pass pass_fre =
5004 gate_fre, /* gate */
5005 execute_fre, /* execute */
5008 0, /* static_pass_number */
5009 TV_TREE_FRE, /* tv_id */
5010 PROP_cfg | PROP_ssa, /* properties_required */
5011 0, /* properties_provided */
5012 0, /* properties_destroyed */
5013 0, /* todo_flags_start */
5014 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */