2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
42 #include "tree-pass.h"
45 #include "langhooks.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
54 1. Avail sets can be shared by making an avail_find_leader that
55 walks up the dominator tree and looks in those avail sets.
56 This might affect code optimality, it's unclear right now.
57 2. Strength reduction can be performed by anticipating expressions
58 we can repair later on.
59 3. We can do back-substitution or smarter value numbering to catch
60 commutative expressions split up over multiple statements.
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
65 represent the actual statement containing the expressions we care about,
66 and we cache the value number by putting it in the expression. */
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem. An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
101 1. It is AVAIL in some, but not all, of the predecessors of a
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
108 For the partial anticipation case, we only perform insertion if it
109 is partially anticipated in some block, and fully available in all
112 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113 performs these steps.
115 Fourth, we eliminate fully redundant expressions.
116 This is a simple statement walk that replaces redundant
117 calculations with the now available values. */
119 /* Representations of value numbers:
121 Value numbers are represented by a representative SSA_NAME. We
122 will create fake SSA_NAME's in situations where we need a
123 representative but do not have one (because it is a complex
124 expression). In order to facilitate storing the value numbers in
125 bitmaps, and keep the number of wasted SSA_NAME's down, we also
126 associate a value_id with each value number, and create full blown
127 ssa_name's only where we actually need them (IE in operands of
128 existing expressions).
130 Theoretically you could replace all the value_id's with
131 SSA_NAME_VERSION, but this would allocate a large number of
132 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
133 It would also require an additional indirection at each point we
136 /* Representation of expressions on value numbers:
138 Expressions consisting of value numbers are represented the same
139 way as our VN internally represents them, with an additional
140 "pre_expr" wrapping around them in order to facilitate storing all
141 of the expressions in the same sets. */
143 /* Representation of sets:
145 The dataflow sets do not need to be sorted in any particular order
146 for the majority of their lifetime, are simply represented as two
147 bitmaps, one that keeps track of values present in the set, and one
148 that keeps track of expressions present in the set.
150 When we need them in topological order, we produce it on demand by
151 transforming the bitmap into an array and sorting it into topo
154 /* Type of expression, used to know which member of the PRE_EXPR union
165 typedef union pre_expr_union_d
170 vn_reference_t reference;
173 typedef struct pre_expr_d
175 enum pre_expr_kind kind;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
186 pre_expr_eq (const void *p1, const void *p2)
188 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
189 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
191 if (e1->kind != e2->kind)
197 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
198 PRE_EXPR_CONSTANT (e2));
200 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
205 PRE_EXPR_REFERENCE (e2));
212 pre_expr_hash (const void *p1)
214 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
222 return PRE_EXPR_NARY (e)->hashcode;
224 return PRE_EXPR_REFERENCE (e)->hashcode;
231 /* Next global expression id number. */
232 static unsigned int next_expression_id;
234 /* Mapping from expression to id number we can use in bitmap sets. */
235 DEF_VEC_P (pre_expr);
236 DEF_VEC_ALLOC_P (pre_expr, heap);
237 static VEC(pre_expr, heap) *expressions;
238 static htab_t expression_to_id;
239 static VEC(unsigned, heap) *name_to_id;
241 /* Allocate an expression id for EXPR. */
243 static inline unsigned int
244 alloc_expression_id (pre_expr expr)
247 /* Make sure we won't overflow. */
248 gcc_assert (next_expression_id + 1 > next_expression_id);
249 expr->id = next_expression_id++;
250 VEC_safe_push (pre_expr, heap, expressions, expr);
251 if (expr->kind == NAME)
253 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
254 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
255 re-allocations by using VEC_reserve upfront. There is no
256 VEC_quick_grow_cleared unfortunately. */
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
264 slot = htab_find_slot (expression_to_id, expr, INSERT);
268 return next_expression_id - 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
284 if (expr->kind == NAME)
286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287 if (VEC_length (unsigned, name_to_id) <= version)
289 return VEC_index (unsigned, name_to_id, version);
293 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
296 return ((pre_expr)*slot)->id;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
306 unsigned int id = lookup_expression_id (expr);
308 return alloc_expression_id (expr);
309 return expr->id = id;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id)
317 return VEC_index (pre_expr, expressions, id);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
324 clear_expression_ids (void)
326 VEC_free (pre_expr, heap, expressions);
329 static alloc_pool pre_expr_pool;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
334 get_or_alloc_expr_for_name (tree name)
336 struct pre_expr_d expr;
338 unsigned int result_id;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
345 return expression_for_id (result_id);
347 result = (pre_expr) pool_alloc (pre_expr_pool);
349 PRE_EXPR_NAME (result) = name;
350 alloc_expression_id (result);
354 static bool in_fre = false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
358 typedef struct bitmap_set
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 DEF_VEC_P (bitmap_set_t);
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373 static VEC(bitmap_set_t, heap) *value_expressions;
375 /* Sets that we need to keep track of. */
376 typedef struct bb_bitmap_sets
378 /* The EXP_GEN set, which represents expressions/values generated in
380 bitmap_set_t exp_gen;
382 /* The PHI_GEN set, which represents PHI results generated in a
384 bitmap_set_t phi_gen;
386 /* The TMP_GEN set, which represents results/temporaries generated
387 in a basic block. IE the LHS of an expression. */
388 bitmap_set_t tmp_gen;
390 /* The AVAIL_OUT set, which represents which values are available in
391 a given basic block. */
392 bitmap_set_t avail_out;
394 /* The ANTIC_IN set, which represents which values are anticipatable
395 in a given basic block. */
396 bitmap_set_t antic_in;
398 /* The PA_IN set, which represents which values are
399 partially anticipatable in a given basic block. */
402 /* The NEW_SETS set, which is used during insertion to augment the
403 AVAIL_OUT set of blocks with the new insertions performed during
404 the current iteration. */
405 bitmap_set_t new_sets;
407 /* A cache for value_dies_in_block_x. */
410 /* True if we have visited this block during ANTIC calculation. */
411 unsigned int visited : 1;
413 /* True we have deferred processing this block during ANTIC
414 calculation until its successor is processed. */
415 unsigned int deferred : 1;
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call : 1;
421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
434 /* Basic block list in postorder. */
435 static int *postorder;
437 /* This structure is used to keep track of statistics on what
438 optimization PRE was able to perform. */
441 /* The number of RHS computations eliminated by PRE. */
444 /* The number of new expressions/temporaries generated by PRE. */
447 /* The number of inserts found due to partial anticipation */
450 /* The number of new PHI nodes added by PRE. */
453 /* The number of values found constant. */
458 static bool do_partial_partial;
459 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
467 static bitmap_set_t bitmap_set_new (void);
468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
472 static unsigned int get_expr_value_id (pre_expr);
474 /* We can add and remove elements and entries to and from sets
475 and hash tables, so we use alloc pools for them. */
477 static alloc_pool bitmap_set_pool;
478 static bitmap_obstack grand_bitmap_obstack;
480 /* To avoid adding 300 temporary variables when we only need one, we
481 only create one temporary variable, on demand, and build ssa names
482 off that. We do have to change the variable if the types don't
483 match the current variable's type. */
485 static tree storetemp;
486 static tree prephitemp;
488 /* Set of blocks with statements that have had its EH information
490 static bitmap need_eh_cleanup;
492 /* The phi_translate_table caches phi translations for a given
493 expression and predecessor. */
495 static htab_t phi_translate_table;
497 /* A three tuple {e, pred, v} used to cache phi translations in the
498 phi_translate_table. */
500 typedef struct expr_pred_trans_d
502 /* The expression. */
505 /* The predecessor block along which we translated the expression. */
508 /* The value that resulted from the translation. */
511 /* The hashcode for the expression, pred pair. This is cached for
514 } *expr_pred_trans_t;
515 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
517 /* Return the hash value for a phi translation table entry. */
520 expr_pred_trans_hash (const void *p)
522 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
526 /* Return true if two phi translation table entries are the same.
527 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
530 expr_pred_trans_eq (const void *p1, const void *p2)
532 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
533 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
534 basic_block b1 = ve1->pred;
535 basic_block b2 = ve2->pred;
537 /* If they are not translations for the same basic block, they can't
541 return pre_expr_eq (ve1->e, ve2->e);
544 /* Search in the phi translation table for the translation of
545 expression E in basic block PRED.
546 Return the translated value, if found, NULL otherwise. */
548 static inline pre_expr
549 phi_trans_lookup (pre_expr e, basic_block pred)
552 struct expr_pred_trans_d ept;
556 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
557 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
562 return ((expr_pred_trans_t) *slot)->v;
566 /* Add the tuple mapping from {expression E, basic block PRED} to
567 value V, to the phi translation table. */
570 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
573 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
575 new_pair->pred = pred;
577 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
580 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
581 new_pair->hashcode, INSERT);
584 *slot = (void *) new_pair;
588 /* Add expression E to the expression set of value id V. */
591 add_to_value (unsigned int v, pre_expr e)
595 gcc_assert (get_expr_value_id (e) == v);
597 if (v >= VEC_length (bitmap_set_t, value_expressions))
599 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
603 set = VEC_index (bitmap_set_t, value_expressions, v);
606 set = bitmap_set_new ();
607 VEC_replace (bitmap_set_t, value_expressions, v, set);
610 bitmap_insert_into_set_1 (set, e, v, true);
613 /* Create a new bitmap set and return it. */
616 bitmap_set_new (void)
618 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
619 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
620 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
624 /* Return the value id for a PRE expression EXPR. */
627 get_expr_value_id (pre_expr expr)
634 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
637 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
638 add_to_value (id, expr);
643 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
645 return PRE_EXPR_NARY (expr)->value_id;
647 return PRE_EXPR_REFERENCE (expr)->value_id;
653 /* Remove an expression EXPR from a bitmapped set. */
656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
658 unsigned int val = get_expr_value_id (expr);
659 if (!value_id_constant_p (val))
661 bitmap_clear_bit (set->values, val);
662 bitmap_clear_bit (set->expressions, get_expression_id (expr));
667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
668 unsigned int val, bool allow_constants)
670 if (allow_constants || !value_id_constant_p (val))
672 /* We specifically expect this and only this function to be able to
673 insert constants into a set. */
674 bitmap_set_bit (set->values, val);
675 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
679 /* Insert an expression EXPR into a bitmapped set. */
682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
684 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
692 bitmap_copy (dest->expressions, orig->expressions);
693 bitmap_copy (dest->values, orig->values);
697 /* Free memory used up by SET. */
699 bitmap_set_free (bitmap_set_t set)
701 BITMAP_FREE (set->expressions);
702 BITMAP_FREE (set->values);
706 /* Generate an topological-ordered array of bitmap set SET. */
708 static VEC(pre_expr, heap) *
709 sorted_array_from_bitmap_set (bitmap_set_t set)
712 bitmap_iterator bi, bj;
713 VEC(pre_expr, heap) *result;
715 /* Pre-allocate roughly enough space for the array. */
716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
720 /* The number of expressions having a given value is usually
721 relatively small. Thus, rather than making a vector of all
722 the expressions and sorting it by value-id, we walk the values
723 and check in the reverse mapping that tells us what expressions
724 have a given value, to filter those in our set. As a result,
725 the expressions are inserted in value-id order, which means
728 If this is somehow a significant lose for some cases, we can
729 choose which set to walk based on the set size. */
730 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
731 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
733 if (bitmap_bit_p (set->expressions, j))
734 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
741 /* Perform bitmapped set operation DEST &= ORIG. */
744 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
751 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
753 bitmap_and_into (dest->values, orig->values);
754 bitmap_copy (temp, dest->expressions);
755 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
757 pre_expr expr = expression_for_id (i);
758 unsigned int value_id = get_expr_value_id (expr);
759 if (!bitmap_bit_p (dest->values, value_id))
760 bitmap_clear_bit (dest->expressions, i);
766 /* Subtract all values and expressions contained in ORIG from DEST. */
769 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
771 bitmap_set_t result = bitmap_set_new ();
775 bitmap_and_compl (result->expressions, dest->expressions,
778 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
780 pre_expr expr = expression_for_id (i);
781 unsigned int value_id = get_expr_value_id (expr);
782 bitmap_set_bit (result->values, value_id);
788 /* Subtract all the values in bitmap set B from bitmap set A. */
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
795 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
797 bitmap_copy (temp, a->expressions);
798 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
800 pre_expr expr = expression_for_id (i);
801 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
802 bitmap_remove_from_set (a, expr);
808 /* Return true if bitmapped set SET contains the value VALUE_ID. */
811 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
813 if (value_id_constant_p (value_id))
816 if (!set || bitmap_empty_p (set->expressions))
819 return bitmap_bit_p (set->values, value_id);
823 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
825 return bitmap_bit_p (set->expressions, get_expression_id (expr));
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
831 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
834 bitmap_set_t exprset;
838 if (value_id_constant_p (lookfor))
841 if (!bitmap_set_contains_value (set, lookfor))
844 /* The number of expressions having a given value is usually
845 significantly less than the total number of expressions in SET.
846 Thus, rather than check, for each expression in SET, whether it
847 has the value LOOKFOR, we walk the reverse mapping that tells us
848 what expressions have a given value, and see if any of those
849 expressions are in our set. For large testcases, this is about
850 5-10x faster than walking the bitmap. If this is somehow a
851 significant lose for some cases, we can choose which set to walk
852 based on the set size. */
853 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
854 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
856 if (bitmap_bit_p (set->expressions, i))
858 bitmap_clear_bit (set->expressions, i);
859 bitmap_set_bit (set->expressions, get_expression_id (expr));
865 /* Return true if two bitmap sets are equal. */
868 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
870 return bitmap_equal_p (a->values, b->values);
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874 and add it otherwise. */
877 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
879 unsigned int val = get_expr_value_id (expr);
881 if (bitmap_set_contains_value (set, val))
882 bitmap_set_replace_value (set, val, expr);
884 bitmap_insert_into_set (set, expr);
887 /* Insert EXPR into SET if EXPR's value is not already present in
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
893 unsigned int val = get_expr_value_id (expr);
895 #ifdef ENABLE_CHECKING
896 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
899 /* Constant values are always considered to be part of the set. */
900 if (value_id_constant_p (val))
903 /* If the value membership changed, add the expression. */
904 if (bitmap_set_bit (set->values, val))
905 bitmap_set_bit (set->expressions, expr->id);
908 /* Print out EXPR to outfile. */
911 print_pre_expr (FILE *outfile, const pre_expr expr)
916 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
919 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
924 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
925 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
926 for (i = 0; i < nary->length; i++)
928 print_generic_expr (outfile, nary->op[i], 0);
929 if (i != (unsigned) nary->length - 1)
930 fprintf (outfile, ",");
932 fprintf (outfile, "}");
938 vn_reference_op_t vro;
940 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
941 fprintf (outfile, "{");
943 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
946 bool closebrace = false;
947 if (vro->opcode != SSA_NAME
948 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
950 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
953 fprintf (outfile, "<");
959 print_generic_expr (outfile, vro->op0, 0);
962 fprintf (outfile, ",");
963 print_generic_expr (outfile, vro->op1, 0);
967 fprintf (outfile, ",");
968 print_generic_expr (outfile, vro->op2, 0);
972 fprintf (outfile, ">");
973 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
974 fprintf (outfile, ",");
976 fprintf (outfile, "}");
979 fprintf (outfile, "@");
980 print_generic_expr (outfile, ref->vuse, 0);
986 void debug_pre_expr (pre_expr);
988 /* Like print_pre_expr but always prints to stderr. */
990 debug_pre_expr (pre_expr e)
992 print_pre_expr (stderr, e);
993 fprintf (stderr, "\n");
996 /* Print out SET to OUTFILE. */
999 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1000 const char *setname, int blockindex)
1002 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1009 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1011 const pre_expr expr = expression_for_id (i);
1014 fprintf (outfile, ", ");
1016 print_pre_expr (outfile, expr);
1018 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1021 fprintf (outfile, " }\n");
1024 void debug_bitmap_set (bitmap_set_t);
1027 debug_bitmap_set (bitmap_set_t set)
1029 print_bitmap_set (stderr, set, "debug", 0);
1032 /* Print out the expressions that have VAL to OUTFILE. */
1035 print_value_expressions (FILE *outfile, unsigned int val)
1037 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1041 sprintf (s, "%04d", val);
1042 print_bitmap_set (outfile, set, s, 0);
1048 debug_value_expressions (unsigned int val)
1050 print_value_expressions (stderr, val);
1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1057 get_or_alloc_expr_for_constant (tree constant)
1059 unsigned int result_id;
1060 unsigned int value_id;
1061 struct pre_expr_d expr;
1064 expr.kind = CONSTANT;
1065 PRE_EXPR_CONSTANT (&expr) = constant;
1066 result_id = lookup_expression_id (&expr);
1068 return expression_for_id (result_id);
1070 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1071 newexpr->kind = CONSTANT;
1072 PRE_EXPR_CONSTANT (newexpr) = constant;
1073 alloc_expression_id (newexpr);
1074 value_id = get_or_alloc_constant_value_id (constant);
1075 add_to_value (value_id, newexpr);
1079 /* Given a value id V, find the actual tree representing the constant
1080 value if there is one, and return it. Return NULL if we can't find
1084 get_constant_for_value_id (unsigned int v)
1086 if (value_id_constant_p (v))
1090 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1092 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1094 pre_expr expr = expression_for_id (i);
1095 if (expr->kind == CONSTANT)
1096 return PRE_EXPR_CONSTANT (expr);
1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103 Currently only supports constants and SSA_NAMES. */
1105 get_or_alloc_expr_for (tree t)
1107 if (TREE_CODE (t) == SSA_NAME)
1108 return get_or_alloc_expr_for_name (t);
1109 else if (is_gimple_min_invariant (t))
1110 return get_or_alloc_expr_for_constant (t);
1113 /* More complex expressions can result from SCCVN expression
1114 simplification that inserts values for them. As they all
1115 do not have VOPs the get handled by the nary ops struct. */
1116 vn_nary_op_t result;
1117 unsigned int result_id;
1118 vn_nary_op_lookup (t, &result);
1121 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1123 PRE_EXPR_NARY (e) = result;
1124 result_id = lookup_expression_id (e);
1127 pool_free (pre_expr_pool, e);
1128 e = expression_for_id (result_id);
1131 alloc_expression_id (e);
1138 /* Return the folded version of T if T, when folded, is a gimple
1139 min_invariant. Otherwise, return T. */
1142 fully_constant_expression (pre_expr e)
1150 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1151 switch (TREE_CODE_CLASS (nary->opcode))
1153 case tcc_expression:
1154 if (nary->opcode == TRUTH_NOT_EXPR)
1156 if (nary->opcode != TRUTH_AND_EXPR
1157 && nary->opcode != TRUTH_OR_EXPR
1158 && nary->opcode != TRUTH_XOR_EXPR)
1162 case tcc_comparison:
1164 /* We have to go from trees to pre exprs to value ids to
1166 tree naryop0 = nary->op[0];
1167 tree naryop1 = nary->op[1];
1169 if (!is_gimple_min_invariant (naryop0))
1171 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 unsigned int vrep0 = get_expr_value_id (rep0);
1173 tree const0 = get_constant_for_value_id (vrep0);
1175 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1177 if (!is_gimple_min_invariant (naryop1))
1179 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 unsigned int vrep1 = get_expr_value_id (rep1);
1181 tree const1 = get_constant_for_value_id (vrep1);
1183 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1185 result = fold_binary (nary->opcode, nary->type,
1187 if (result && is_gimple_min_invariant (result))
1188 return get_or_alloc_expr_for_constant (result);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1196 if (nary->opcode != REALPART_EXPR
1197 && nary->opcode != IMAGPART_EXPR
1198 && nary->opcode != VIEW_CONVERT_EXPR)
1204 /* We have to go from trees to pre exprs to value ids to
1206 tree naryop0 = nary->op[0];
1207 tree const0, result;
1208 if (is_gimple_min_invariant (naryop0))
1212 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1213 unsigned int vrep0 = get_expr_value_id (rep0);
1214 const0 = get_constant_for_value_id (vrep0);
1219 tree type1 = TREE_TYPE (nary->op[0]);
1220 const0 = fold_convert (type1, const0);
1221 result = fold_unary (nary->opcode, nary->type, const0);
1223 if (result && is_gimple_min_invariant (result))
1224 return get_or_alloc_expr_for_constant (result);
1233 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1235 if ((folded = fully_constant_vn_reference_p (ref)))
1236 return get_or_alloc_expr_for_constant (folded);
1245 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1246 it has the value it would have in BLOCK. Set *SAME_VALID to true
1247 in case the new vuse doesn't change the value id of the OPERANDS. */
1250 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1251 alias_set_type set, tree type, tree vuse,
1252 basic_block phiblock,
1253 basic_block block, bool *same_valid)
1255 gimple phi = SSA_NAME_DEF_STMT (vuse);
1262 if (gimple_bb (phi) != phiblock)
1265 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1267 /* Use the alias-oracle to find either the PHI node in this block,
1268 the first VUSE used in this block that is equivalent to vuse or
1269 the first VUSE which definition in this block kills the value. */
1270 if (gimple_code (phi) == GIMPLE_PHI)
1271 e = find_edge (block, phiblock);
1272 else if (use_oracle)
1273 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1275 vuse = gimple_vuse (phi);
1276 phi = SSA_NAME_DEF_STMT (vuse);
1277 if (gimple_bb (phi) != phiblock)
1279 if (gimple_code (phi) == GIMPLE_PHI)
1281 e = find_edge (block, phiblock);
1292 bitmap visited = NULL;
1293 /* Try to find a vuse that dominates this phi node by skipping
1294 non-clobbering statements. */
1295 vuse = get_continuation_for_phi (phi, &ref, &visited);
1297 BITMAP_FREE (visited);
1303 /* If we didn't find any, the value ID can't stay the same,
1304 but return the translated vuse. */
1305 *same_valid = false;
1306 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1308 /* ??? We would like to return vuse here as this is the canonical
1309 upmost vdef that this reference is associated with. But during
1310 insertion of the references into the hash tables we only ever
1311 directly insert with their direct gimple_vuse, hence returning
1312 something else would make us not find the other expression. */
1313 return PHI_ARG_DEF (phi, e->dest_idx);
1319 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1320 SET2. This is used to avoid making a set consisting of the union
1321 of PA_IN and ANTIC_IN during insert. */
1323 static inline pre_expr
1324 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1328 result = bitmap_find_leader (set1, val, NULL);
1329 if (!result && set2)
1330 result = bitmap_find_leader (set2, val, NULL);
1334 /* Get the tree type for our PRE expression e. */
1337 get_expr_type (const pre_expr e)
1342 return TREE_TYPE (PRE_EXPR_NAME (e));
1344 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1346 return PRE_EXPR_REFERENCE (e)->type;
1348 return PRE_EXPR_NARY (e)->type;
1353 /* Get a representative SSA_NAME for a given expression.
1354 Since all of our sub-expressions are treated as values, we require
1355 them to be SSA_NAME's for simplicity.
1356 Prior versions of GVNPRE used to use "value handles" here, so that
1357 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1358 either case, the operands are really values (IE we do not expect
1359 them to be usable without finding leaders). */
1362 get_representative_for (const pre_expr e)
1366 unsigned int value_id = get_expr_value_id (e);
1371 return PRE_EXPR_NAME (e);
1373 return PRE_EXPR_CONSTANT (e);
1377 /* Go through all of the expressions representing this value
1378 and pick out an SSA_NAME. */
1381 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1383 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1385 pre_expr rep = expression_for_id (i);
1386 if (rep->kind == NAME)
1387 return PRE_EXPR_NAME (rep);
1392 /* If we reached here we couldn't find an SSA_NAME. This can
1393 happen when we've discovered a value that has never appeared in
1394 the program as set to an SSA_NAME, most likely as the result of
1399 "Could not find SSA_NAME representative for expression:");
1400 print_pre_expr (dump_file, e);
1401 fprintf (dump_file, "\n");
1404 exprtype = get_expr_type (e);
1406 /* Build and insert the assignment of the end result to the temporary
1407 that we will return. */
1408 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1410 pretemp = create_tmp_var (exprtype, "pretmp");
1411 get_var_ann (pretemp);
1414 name = make_ssa_name (pretemp, gimple_build_nop ());
1415 VN_INFO_GET (name)->value_id = value_id;
1416 if (e->kind == CONSTANT)
1417 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1419 VN_INFO (name)->valnum = name;
1421 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1424 fprintf (dump_file, "Created SSA_NAME representative ");
1425 print_generic_expr (dump_file, name, 0);
1426 fprintf (dump_file, " for expression:");
1427 print_pre_expr (dump_file, e);
1428 fprintf (dump_file, "\n");
1437 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1438 basic_block pred, basic_block phiblock);
1440 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1441 the phis in PRED. Return NULL if we can't find a leader for each part
1442 of the translated expression. */
1445 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1446 basic_block pred, basic_block phiblock)
1453 bool changed = false;
1454 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1455 struct vn_nary_op_s newnary;
1456 /* The NARY structure is only guaranteed to have been
1457 allocated to the nary->length operands. */
1458 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1459 - sizeof (tree) * (4 - nary->length)));
1461 for (i = 0; i < newnary.length; i++)
1463 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1467 pre_expr leader, result;
1468 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1469 leader = find_leader_in_sets (op_val_id, set1, set2);
1470 result = phi_translate (leader, set1, set2, pred, phiblock);
1471 if (result && result != leader)
1473 tree name = get_representative_for (result);
1476 newnary.op[i] = name;
1481 changed |= newnary.op[i] != nary->op[i];
1487 unsigned int new_val_id;
1489 tree result = vn_nary_op_lookup_pieces (newnary.length,
1497 if (result && is_gimple_min_invariant (result))
1498 return get_or_alloc_expr_for_constant (result);
1500 expr = (pre_expr) pool_alloc (pre_expr_pool);
1505 PRE_EXPR_NARY (expr) = nary;
1506 constant = fully_constant_expression (expr);
1507 if (constant != expr)
1510 new_val_id = nary->value_id;
1511 get_or_alloc_expression_id (expr);
1515 new_val_id = get_next_value_id ();
1516 VEC_safe_grow_cleared (bitmap_set_t, heap,
1518 get_max_value_id() + 1);
1519 nary = vn_nary_op_insert_pieces (newnary.length,
1526 result, new_val_id);
1527 PRE_EXPR_NARY (expr) = nary;
1528 constant = fully_constant_expression (expr);
1529 if (constant != expr)
1531 get_or_alloc_expression_id (expr);
1533 add_to_value (new_val_id, expr);
1541 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1542 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1543 tree vuse = ref->vuse;
1544 tree newvuse = vuse;
1545 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1546 bool changed = false, same_valid = true;
1548 vn_reference_op_t operand;
1549 vn_reference_t newref;
1552 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1556 tree oldop0 = operand->op0;
1557 tree oldop1 = operand->op1;
1558 tree oldop2 = operand->op2;
1562 tree type = operand->type;
1563 vn_reference_op_s newop = *operand;
1565 if (op0 && TREE_CODE (op0) == SSA_NAME)
1567 unsigned int op_val_id = VN_INFO (op0)->value_id;
1568 leader = find_leader_in_sets (op_val_id, set1, set2);
1569 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1570 if (opresult && opresult != leader)
1572 tree name = get_representative_for (opresult);
1580 changed |= op0 != oldop0;
1582 if (op1 && TREE_CODE (op1) == SSA_NAME)
1584 unsigned int op_val_id = VN_INFO (op1)->value_id;
1585 leader = find_leader_in_sets (op_val_id, set1, set2);
1586 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1587 if (opresult && opresult != leader)
1589 tree name = get_representative_for (opresult);
1597 /* We can't possibly insert these. */
1598 else if (op1 && !is_gimple_min_invariant (op1))
1600 changed |= op1 != oldop1;
1601 if (op2 && TREE_CODE (op2) == SSA_NAME)
1603 unsigned int op_val_id = VN_INFO (op2)->value_id;
1604 leader = find_leader_in_sets (op_val_id, set1, set2);
1605 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1606 if (opresult && opresult != leader)
1608 tree name = get_representative_for (opresult);
1616 /* We can't possibly insert these. */
1617 else if (op2 && !is_gimple_min_invariant (op2))
1619 changed |= op2 != oldop2;
1622 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1623 /* We may have changed from an SSA_NAME to a constant */
1624 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1625 newop.opcode = TREE_CODE (op0);
1630 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1631 /* If it transforms from an SSA_NAME to an address, fold with
1632 a preceding indirect reference. */
1633 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1634 && VEC_index (vn_reference_op_s,
1635 newoperands, j - 1)->opcode == INDIRECT_REF)
1636 vn_reference_fold_indirect (&newoperands, &j);
1638 if (i != VEC_length (vn_reference_op_s, operands))
1641 VEC_free (vn_reference_op_s, heap, newoperands);
1647 newvuse = translate_vuse_through_block (newoperands,
1648 ref->set, ref->type,
1649 vuse, phiblock, pred,
1651 if (newvuse == NULL_TREE)
1653 VEC_free (vn_reference_op_s, heap, newoperands);
1658 if (changed || newvuse != vuse)
1660 unsigned int new_val_id;
1663 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1668 VEC_free (vn_reference_op_s, heap, newoperands);
1670 if (result && is_gimple_min_invariant (result))
1672 gcc_assert (!newoperands);
1673 return get_or_alloc_expr_for_constant (result);
1676 expr = (pre_expr) pool_alloc (pre_expr_pool);
1677 expr->kind = REFERENCE;
1682 PRE_EXPR_REFERENCE (expr) = newref;
1683 constant = fully_constant_expression (expr);
1684 if (constant != expr)
1687 new_val_id = newref->value_id;
1688 get_or_alloc_expression_id (expr);
1692 if (changed || !same_valid)
1694 new_val_id = get_next_value_id ();
1695 VEC_safe_grow_cleared (bitmap_set_t, heap,
1697 get_max_value_id() + 1);
1700 new_val_id = ref->value_id;
1701 newref = vn_reference_insert_pieces (newvuse, ref->set,
1704 result, new_val_id);
1706 PRE_EXPR_REFERENCE (expr) = newref;
1707 constant = fully_constant_expression (expr);
1708 if (constant != expr)
1710 get_or_alloc_expression_id (expr);
1712 add_to_value (new_val_id, expr);
1714 VEC_free (vn_reference_op_s, heap, newoperands);
1724 tree name = PRE_EXPR_NAME (expr);
1726 def_stmt = SSA_NAME_DEF_STMT (name);
1727 if (gimple_code (def_stmt) == GIMPLE_PHI
1728 && gimple_bb (def_stmt) == phiblock)
1733 e = find_edge (pred, gimple_bb (phi));
1736 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1739 if (TREE_CODE (def) == SSA_NAME)
1740 def = VN_INFO (def)->valnum;
1742 /* Handle constant. */
1743 if (is_gimple_min_invariant (def))
1744 return get_or_alloc_expr_for_constant (def);
1746 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1749 newexpr = get_or_alloc_expr_for_name (def);
1760 /* Wrapper around phi_translate_1 providing caching functionality. */
1763 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1764 basic_block pred, basic_block phiblock)
1771 /* Constants contain no values that need translation. */
1772 if (expr->kind == CONSTANT)
1775 if (value_id_constant_p (get_expr_value_id (expr)))
1778 if (expr->kind != NAME)
1780 phitrans = phi_trans_lookup (expr, pred);
1786 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1788 /* Don't add empty translations to the cache. Neither add
1789 translations of NAMEs as those are cheap to translate. */
1791 && expr->kind != NAME)
1792 phi_trans_add (expr, phitrans, pred);
1798 /* For each expression in SET, translate the values through phi nodes
1799 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1800 expressions in DEST. */
1803 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1804 basic_block phiblock)
1806 VEC (pre_expr, heap) *exprs;
1810 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1812 bitmap_set_copy (dest, set);
1816 exprs = sorted_array_from_bitmap_set (set);
1817 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1819 pre_expr translated;
1820 translated = phi_translate (expr, set, NULL, pred, phiblock);
1824 /* We might end up with multiple expressions from SET being
1825 translated to the same value. In this case we do not want
1826 to retain the NARY or REFERENCE expression but prefer a NAME
1827 which would be the leader. */
1828 if (translated->kind == NAME)
1829 bitmap_value_replace_in_set (dest, translated);
1831 bitmap_value_insert_into_set (dest, translated);
1833 VEC_free (pre_expr, heap, exprs);
1836 /* Find the leader for a value (i.e., the name representing that
1837 value) in a given set, and return it. If STMT is non-NULL it
1838 makes sure the defining statement for the leader dominates it.
1839 Return NULL if no leader is found. */
1842 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1844 if (value_id_constant_p (val))
1848 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1850 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1852 pre_expr expr = expression_for_id (i);
1853 if (expr->kind == CONSTANT)
1857 if (bitmap_set_contains_value (set, val))
1859 /* Rather than walk the entire bitmap of expressions, and see
1860 whether any of them has the value we are looking for, we look
1861 at the reverse mapping, which tells us the set of expressions
1862 that have a given value (IE value->expressions with that
1863 value) and see if any of those expressions are in our set.
1864 The number of expressions per value is usually significantly
1865 less than the number of expressions in the set. In fact, for
1866 large testcases, doing it this way is roughly 5-10x faster
1867 than walking the bitmap.
1868 If this is somehow a significant lose for some cases, we can
1869 choose which set to walk based on which set is smaller. */
1872 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1874 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1875 set->expressions, 0, i, bi)
1877 pre_expr val = expression_for_id (i);
1878 /* At the point where stmt is not null, there should always
1879 be an SSA_NAME first in the list of expressions. */
1882 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1883 if (gimple_code (def_stmt) != GIMPLE_PHI
1884 && gimple_bb (def_stmt) == gimple_bb (stmt)
1885 && gimple_uid (def_stmt) >= gimple_uid (stmt))
1894 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1895 BLOCK by seeing if it is not killed in the block. Note that we are
1896 only determining whether there is a store that kills it. Because
1897 of the order in which clean iterates over values, we are guaranteed
1898 that altered operands will have caused us to be eliminated from the
1899 ANTIC_IN set already. */
1902 value_dies_in_block_x (pre_expr expr, basic_block block)
1904 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1905 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1907 gimple_stmt_iterator gsi;
1908 unsigned id = get_expression_id (expr);
1915 /* Lookup a previously calculated result. */
1916 if (EXPR_DIES (block)
1917 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1918 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1920 /* A memory expression {e, VUSE} dies in the block if there is a
1921 statement that may clobber e. If, starting statement walk from the
1922 top of the basic block, a statement uses VUSE there can be no kill
1923 inbetween that use and the original statement that loaded {e, VUSE},
1924 so we can stop walking. */
1925 ref.base = NULL_TREE;
1926 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1928 tree def_vuse, def_vdef;
1929 def = gsi_stmt (gsi);
1930 def_vuse = gimple_vuse (def);
1931 def_vdef = gimple_vdef (def);
1933 /* Not a memory statement. */
1937 /* Not a may-def. */
1940 /* A load with the same VUSE, we're done. */
1941 if (def_vuse == vuse)
1947 /* Init ref only if we really need it. */
1948 if (ref.base == NULL_TREE
1949 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1955 /* If the statement may clobber expr, it dies. */
1956 if (stmt_may_clobber_ref_p_1 (def, &ref))
1963 /* Remember the result. */
1964 if (!EXPR_DIES (block))
1965 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1966 bitmap_set_bit (EXPR_DIES (block), id * 2);
1968 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1974 #define union_contains_value(SET1, SET2, VAL) \
1975 (bitmap_set_contains_value ((SET1), (VAL)) \
1976 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1978 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
1981 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
1982 vn_reference_op_t vro)
1984 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
1986 struct pre_expr_d temp;
1989 PRE_EXPR_NAME (&temp) = vro->op0;
1990 temp.id = lookup_expression_id (&temp);
1993 if (!union_contains_value (set1, set2,
1994 get_expr_value_id (&temp)))
1997 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1999 struct pre_expr_d temp;
2002 PRE_EXPR_NAME (&temp) = vro->op1;
2003 temp.id = lookup_expression_id (&temp);
2006 if (!union_contains_value (set1, set2,
2007 get_expr_value_id (&temp)))
2011 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2013 struct pre_expr_d temp;
2016 PRE_EXPR_NAME (&temp) = vro->op2;
2017 temp.id = lookup_expression_id (&temp);
2020 if (!union_contains_value (set1, set2,
2021 get_expr_value_id (&temp)))
2028 /* Determine if the expression EXPR is valid in SET1 U SET2.
2029 ONLY SET2 CAN BE NULL.
2030 This means that we have a leader for each part of the expression
2031 (if it consists of values), or the expression is an SSA_NAME.
2032 For loads/calls, we also see if the vuse is killed in this block. */
2035 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2041 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2045 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2046 for (i = 0; i < nary->length; i++)
2048 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2050 struct pre_expr_d temp;
2053 PRE_EXPR_NAME (&temp) = nary->op[i];
2054 temp.id = lookup_expression_id (&temp);
2057 if (!union_contains_value (set1, set2,
2058 get_expr_value_id (&temp)))
2062 /* If the NARY may trap make sure the block does not contain
2063 a possible exit point.
2064 ??? This is overly conservative if we translate AVAIL_OUT
2065 as the available expression might be after the exit point. */
2066 if (BB_MAY_NOTRETURN (block)
2067 && vn_nary_may_trap (nary))
2074 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2075 vn_reference_op_t vro;
2078 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2080 if (!vro_valid_in_sets (set1, set2, vro))
2085 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2086 if (!gimple_nop_p (def_stmt)
2087 && gimple_bb (def_stmt) != block
2088 && !dominated_by_p (CDI_DOMINATORS,
2089 block, gimple_bb (def_stmt)))
2092 return !value_dies_in_block_x (expr, block);
2099 /* Clean the set of expressions that are no longer valid in SET1 or
2100 SET2. This means expressions that are made up of values we have no
2101 leaders for in SET1 or SET2. This version is used for partial
2102 anticipation, which means it is not valid in either ANTIC_IN or
2106 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2108 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2112 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2114 if (!valid_in_sets (set1, set2, expr, block))
2115 bitmap_remove_from_set (set1, expr);
2117 VEC_free (pre_expr, heap, exprs);
2120 /* Clean the set of expressions that are no longer valid in SET. This
2121 means expressions that are made up of values we have no leaders for
2125 clean (bitmap_set_t set, basic_block block)
2127 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2131 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2133 if (!valid_in_sets (set, NULL, expr, block))
2134 bitmap_remove_from_set (set, expr);
2136 VEC_free (pre_expr, heap, exprs);
2139 static sbitmap has_abnormal_preds;
2141 /* List of blocks that may have changed during ANTIC computation and
2142 thus need to be iterated over. */
2144 static sbitmap changed_blocks;
2146 /* Decide whether to defer a block for a later iteration, or PHI
2147 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2148 should defer the block, and true if we processed it. */
2151 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2152 basic_block block, basic_block phiblock)
2154 if (!BB_VISITED (phiblock))
2156 SET_BIT (changed_blocks, block->index);
2157 BB_VISITED (block) = 0;
2158 BB_DEFERRED (block) = 1;
2162 phi_translate_set (dest, source, block, phiblock);
2166 /* Compute the ANTIC set for BLOCK.
2168 If succs(BLOCK) > 1 then
2169 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2170 else if succs(BLOCK) == 1 then
2171 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2173 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2177 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2179 bool changed = false;
2180 bitmap_set_t S, old, ANTIC_OUT;
2186 old = ANTIC_OUT = S = NULL;
2187 BB_VISITED (block) = 1;
2189 /* If any edges from predecessors are abnormal, antic_in is empty,
2191 if (block_has_abnormal_pred_edge)
2192 goto maybe_dump_sets;
2194 old = ANTIC_IN (block);
2195 ANTIC_OUT = bitmap_set_new ();
2197 /* If the block has no successors, ANTIC_OUT is empty. */
2198 if (EDGE_COUNT (block->succs) == 0)
2200 /* If we have one successor, we could have some phi nodes to
2201 translate through. */
2202 else if (single_succ_p (block))
2204 basic_block succ_bb = single_succ (block);
2206 /* We trade iterations of the dataflow equations for having to
2207 phi translate the maximal set, which is incredibly slow
2208 (since the maximal set often has 300+ members, even when you
2209 have a small number of blocks).
2210 Basically, we defer the computation of ANTIC for this block
2211 until we have processed it's successor, which will inevitably
2212 have a *much* smaller set of values to phi translate once
2213 clean has been run on it.
2214 The cost of doing this is that we technically perform more
2215 iterations, however, they are lower cost iterations.
2217 Timings for PRE on tramp3d-v4:
2218 without maximal set fix: 11 seconds
2219 with maximal set fix/without deferring: 26 seconds
2220 with maximal set fix/with deferring: 11 seconds
2223 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2227 goto maybe_dump_sets;
2230 /* If we have multiple successors, we take the intersection of all of
2231 them. Note that in the case of loop exit phi nodes, we may have
2232 phis to translate through. */
2235 VEC(basic_block, heap) * worklist;
2237 basic_block bprime, first = NULL;
2239 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2240 FOR_EACH_EDGE (e, ei, block->succs)
2243 && BB_VISITED (e->dest))
2245 else if (BB_VISITED (e->dest))
2246 VEC_quick_push (basic_block, worklist, e->dest);
2249 /* Of multiple successors we have to have visited one already. */
2252 SET_BIT (changed_blocks, block->index);
2253 BB_VISITED (block) = 0;
2254 BB_DEFERRED (block) = 1;
2256 VEC_free (basic_block, heap, worklist);
2257 goto maybe_dump_sets;
2260 if (!gimple_seq_empty_p (phi_nodes (first)))
2261 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2263 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2265 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2267 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2269 bitmap_set_t tmp = bitmap_set_new ();
2270 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2271 bitmap_set_and (ANTIC_OUT, tmp);
2272 bitmap_set_free (tmp);
2275 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2277 VEC_free (basic_block, heap, worklist);
2280 /* Generate ANTIC_OUT - TMP_GEN. */
2281 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2283 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2284 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2287 /* Then union in the ANTIC_OUT - TMP_GEN values,
2288 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2289 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2290 bitmap_value_insert_into_set (ANTIC_IN (block),
2291 expression_for_id (bii));
2293 clean (ANTIC_IN (block), block);
2295 /* !old->expressions can happen when we deferred a block. */
2296 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
2299 SET_BIT (changed_blocks, block->index);
2300 FOR_EACH_EDGE (e, ei, block->preds)
2301 SET_BIT (changed_blocks, e->src->index);
2304 RESET_BIT (changed_blocks, block->index);
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2309 if (!BB_DEFERRED (block) || BB_VISITED (block))
2312 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2314 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2318 print_bitmap_set (dump_file, S, "S", block->index);
2323 "Block %d was deferred for a future iteration.\n",
2328 bitmap_set_free (old);
2330 bitmap_set_free (S);
2332 bitmap_set_free (ANTIC_OUT);
2336 /* Compute PARTIAL_ANTIC for BLOCK.
2338 If succs(BLOCK) > 1 then
2339 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2340 in ANTIC_OUT for all succ(BLOCK)
2341 else if succs(BLOCK) == 1 then
2342 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2344 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2349 compute_partial_antic_aux (basic_block block,
2350 bool block_has_abnormal_pred_edge)
2352 bool changed = false;
2353 bitmap_set_t old_PA_IN;
2354 bitmap_set_t PA_OUT;
2357 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2359 old_PA_IN = PA_OUT = NULL;
2361 /* If any edges from predecessors are abnormal, antic_in is empty,
2363 if (block_has_abnormal_pred_edge)
2364 goto maybe_dump_sets;
2366 /* If there are too many partially anticipatable values in the
2367 block, phi_translate_set can take an exponential time: stop
2368 before the translation starts. */
2370 && single_succ_p (block)
2371 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
2372 goto maybe_dump_sets;
2374 old_PA_IN = PA_IN (block);
2375 PA_OUT = bitmap_set_new ();
2377 /* If the block has no successors, ANTIC_OUT is empty. */
2378 if (EDGE_COUNT (block->succs) == 0)
2380 /* If we have one successor, we could have some phi nodes to
2381 translate through. Note that we can't phi translate across DFS
2382 back edges in partial antic, because it uses a union operation on
2383 the successors. For recurrences like IV's, we will end up
2384 generating a new value in the set on each go around (i + 3 (VH.1)
2385 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2386 else if (single_succ_p (block))
2388 basic_block succ = single_succ (block);
2389 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2390 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2392 /* If we have multiple successors, we take the union of all of
2396 VEC(basic_block, heap) * worklist;
2400 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2401 FOR_EACH_EDGE (e, ei, block->succs)
2403 if (e->flags & EDGE_DFS_BACK)
2405 VEC_quick_push (basic_block, worklist, e->dest);
2407 if (VEC_length (basic_block, worklist) > 0)
2409 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2414 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2415 bitmap_value_insert_into_set (PA_OUT,
2416 expression_for_id (i));
2417 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2419 bitmap_set_t pa_in = bitmap_set_new ();
2420 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2421 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2422 bitmap_value_insert_into_set (PA_OUT,
2423 expression_for_id (i));
2424 bitmap_set_free (pa_in);
2427 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2428 bitmap_value_insert_into_set (PA_OUT,
2429 expression_for_id (i));
2432 VEC_free (basic_block, heap, worklist);
2435 /* PA_IN starts with PA_OUT - TMP_GEN.
2436 Then we subtract things from ANTIC_IN. */
2437 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2439 /* For partial antic, we want to put back in the phi results, since
2440 we will properly avoid making them partially antic over backedges. */
2441 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
2442 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
2444 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2445 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2447 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2449 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2452 SET_BIT (changed_blocks, block->index);
2453 FOR_EACH_EDGE (e, ei, block->preds)
2454 SET_BIT (changed_blocks, e->src->index);
2457 RESET_BIT (changed_blocks, block->index);
2460 if (dump_file && (dump_flags & TDF_DETAILS))
2463 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2465 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2468 bitmap_set_free (old_PA_IN);
2470 bitmap_set_free (PA_OUT);
2474 /* Compute ANTIC and partial ANTIC sets. */
2477 compute_antic (void)
2479 bool changed = true;
2480 int num_iterations = 0;
2484 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2485 We pre-build the map of blocks with incoming abnormal edges here. */
2486 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2487 sbitmap_zero (has_abnormal_preds);
2494 FOR_EACH_EDGE (e, ei, block->preds)
2496 e->flags &= ~EDGE_DFS_BACK;
2497 if (e->flags & EDGE_ABNORMAL)
2499 SET_BIT (has_abnormal_preds, block->index);
2504 BB_VISITED (block) = 0;
2505 BB_DEFERRED (block) = 0;
2507 /* While we are here, give empty ANTIC_IN sets to each block. */
2508 ANTIC_IN (block) = bitmap_set_new ();
2509 PA_IN (block) = bitmap_set_new ();
2512 /* At the exit block we anticipate nothing. */
2513 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2514 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2515 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2517 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2518 sbitmap_ones (changed_blocks);
2521 if (dump_file && (dump_flags & TDF_DETAILS))
2522 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2525 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2527 if (TEST_BIT (changed_blocks, postorder[i]))
2529 basic_block block = BASIC_BLOCK (postorder[i]);
2530 changed |= compute_antic_aux (block,
2531 TEST_BIT (has_abnormal_preds,
2535 #ifdef ENABLE_CHECKING
2536 /* Theoretically possible, but *highly* unlikely. */
2537 gcc_assert (num_iterations < 500);
2541 statistics_histogram_event (cfun, "compute_antic iterations",
2544 if (do_partial_partial)
2546 sbitmap_ones (changed_blocks);
2547 mark_dfs_back_edges ();
2552 if (dump_file && (dump_flags & TDF_DETAILS))
2553 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2556 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2558 if (TEST_BIT (changed_blocks, postorder[i]))
2560 basic_block block = BASIC_BLOCK (postorder[i]);
2562 |= compute_partial_antic_aux (block,
2563 TEST_BIT (has_abnormal_preds,
2567 #ifdef ENABLE_CHECKING
2568 /* Theoretically possible, but *highly* unlikely. */
2569 gcc_assert (num_iterations < 500);
2572 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2575 sbitmap_free (has_abnormal_preds);
2576 sbitmap_free (changed_blocks);
2579 /* Return true if we can value number the call in STMT. This is true
2580 if we have a pure or constant call. */
2583 can_value_number_call (gimple stmt)
2585 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2590 /* Return true if OP is a tree which we can perform PRE on.
2591 This may not match the operations we can value number, but in
2592 a perfect world would. */
2595 can_PRE_operation (tree op)
2597 return UNARY_CLASS_P (op)
2598 || BINARY_CLASS_P (op)
2599 || COMPARISON_CLASS_P (op)
2600 || TREE_CODE (op) == INDIRECT_REF
2601 || TREE_CODE (op) == COMPONENT_REF
2602 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2603 || TREE_CODE (op) == CALL_EXPR
2604 || TREE_CODE (op) == ARRAY_REF;
2608 /* Inserted expressions are placed onto this worklist, which is used
2609 for performing quick dead code elimination of insertions we made
2610 that didn't turn out to be necessary. */
2611 static VEC(gimple,heap) *inserted_exprs;
2612 static bitmap inserted_phi_names;
2614 /* Pool allocated fake store expressions are placed onto this
2615 worklist, which, after performing dead code elimination, is walked
2616 to see which expressions need to be put into GC'able memory */
2617 static VEC(gimple, heap) *need_creation;
2619 /* The actual worker for create_component_ref_by_pieces. */
2622 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2623 unsigned int *operand, gimple_seq *stmts,
2626 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2630 switch (currop->opcode)
2634 tree folded, sc = currop->op1;
2635 unsigned int nargs = 0;
2636 tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2637 ref->operands) - 1);
2638 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2640 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2645 folded = build_call_array (currop->type,
2646 TREE_CODE (currop->op0) == FUNCTION_DECL
2647 ? build_fold_addr_expr (currop->op0)
2653 pre_expr scexpr = get_or_alloc_expr_for (sc);
2654 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2657 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2662 case TARGET_MEM_REF:
2664 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2667 tree genop0 = NULL_TREE;
2668 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2674 op0expr = get_or_alloc_expr_for (currop->op0);
2675 genop0 = find_or_generate_expression (block, op0expr,
2680 if (DECL_P (baseop))
2681 return build6 (TARGET_MEM_REF, currop->type,
2683 genop0, currop->op1, currop->op2,
2684 unshare_expr (nextop->op1));
2686 return build6 (TARGET_MEM_REF, currop->type,
2688 genop0, currop->op1, currop->op2,
2689 unshare_expr (nextop->op1));
2695 gcc_assert (is_gimple_min_invariant (currop->op0));
2701 case VIEW_CONVERT_EXPR:
2704 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2709 folded = fold_build1 (currop->opcode, currop->type,
2714 case ALIGN_INDIRECT_REF:
2715 case MISALIGNED_INDIRECT_REF:
2719 tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2724 genop1 = fold_convert (build_pointer_type (currop->type),
2727 if (currop->opcode == MISALIGNED_INDIRECT_REF)
2728 folded = fold_build2 (currop->opcode, currop->type,
2729 genop1, currop->op1);
2731 folded = fold_build1 (currop->opcode, currop->type,
2739 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2741 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2742 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2748 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2751 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2754 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2759 /* For array ref vn_reference_op's, operand 1 of the array ref
2760 is op0 of the reference op and operand 3 of the array ref is
2762 case ARRAY_RANGE_REF:
2766 tree genop1 = currop->op0;
2768 tree genop2 = currop->op1;
2770 tree genop3 = currop->op2;
2772 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2776 op1expr = get_or_alloc_expr_for (genop1);
2777 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2782 /* Drop zero minimum index. */
2783 if (tree_int_cst_equal (genop2, integer_zero_node))
2787 op2expr = get_or_alloc_expr_for (genop2);
2788 genop2 = find_or_generate_expression (block, op2expr, stmts,
2796 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2797 /* We can't always put a size in units of the element alignment
2798 here as the element alignment may be not visible. See
2799 PR43783. Simply drop the element size for constant
2801 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2805 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2806 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2807 op3expr = get_or_alloc_expr_for (genop3);
2808 genop3 = find_or_generate_expression (block, op3expr, stmts,
2814 return build4 (currop->opcode, currop->type, genop0, genop1,
2821 tree genop2 = currop->op1;
2823 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2827 /* op1 should be a FIELD_DECL, which are represented by
2832 op2expr = get_or_alloc_expr_for (genop2);
2833 genop2 = find_or_generate_expression (block, op2expr, stmts,
2839 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2845 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2846 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2867 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2868 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2869 trying to rename aggregates into ssa form directly, which is a no no.
2871 Thus, this routine doesn't create temporaries, it just builds a
2872 single access expression for the array, calling
2873 find_or_generate_expression to build the innermost pieces.
2875 This function is a subroutine of create_expression_by_pieces, and
2876 should not be called on it's own unless you really know what you
2880 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2881 gimple_seq *stmts, gimple domstmt)
2883 unsigned int op = 0;
2884 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2887 /* Find a leader for an expression, or generate one using
2888 create_expression_by_pieces if it's ANTIC but
2890 BLOCK is the basic_block we are looking for leaders in.
2891 EXPR is the expression to find a leader or generate for.
2892 STMTS is the statement list to put the inserted expressions on.
2893 Returns the SSA_NAME of the LHS of the generated expression or the
2895 DOMSTMT if non-NULL is a statement that should be dominated by
2896 all uses in the generated expression. If DOMSTMT is non-NULL this
2897 routine can fail and return NULL_TREE. Otherwise it will assert
2901 find_or_generate_expression (basic_block block, pre_expr expr,
2902 gimple_seq *stmts, gimple domstmt)
2904 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2905 get_expr_value_id (expr), domstmt);
2909 if (leader->kind == NAME)
2910 genop = PRE_EXPR_NAME (leader);
2911 else if (leader->kind == CONSTANT)
2912 genop = PRE_EXPR_CONSTANT (leader);
2915 /* If it's still NULL, it must be a complex expression, so generate
2916 it recursively. Not so for FRE though. */
2920 bitmap_set_t exprset;
2921 unsigned int lookfor = get_expr_value_id (expr);
2922 bool handled = false;
2926 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2927 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2929 pre_expr temp = expression_for_id (i);
2930 if (temp->kind != NAME)
2933 genop = create_expression_by_pieces (block, temp, stmts,
2935 get_expr_type (expr));
2939 if (!handled && domstmt)
2942 gcc_assert (handled);
2947 #define NECESSARY GF_PLF_1
2949 /* Create an expression in pieces, so that we can handle very complex
2950 expressions that may be ANTIC, but not necessary GIMPLE.
2951 BLOCK is the basic block the expression will be inserted into,
2952 EXPR is the expression to insert (in value form)
2953 STMTS is a statement list to append the necessary insertions into.
2955 This function will die if we hit some value that shouldn't be
2956 ANTIC but is (IE there is no leader for it, or its components).
2957 This function may also generate expressions that are themselves
2958 partially or fully redundant. Those that are will be either made
2959 fully redundant during the next iteration of insert (for partially
2960 redundant ones), or eliminated by eliminate (for fully redundant
2963 If DOMSTMT is non-NULL then we make sure that all uses in the
2964 expressions dominate that statement. In this case the function
2965 can return NULL_TREE to signal failure. */
2968 create_expression_by_pieces (basic_block block, pre_expr expr,
2969 gimple_seq *stmts, gimple domstmt, tree type)
2973 gimple_seq forced_stmts = NULL;
2974 unsigned int value_id;
2975 gimple_stmt_iterator gsi;
2976 tree exprtype = type ? type : get_expr_type (expr);
2982 /* We may hit the NAME/CONSTANT case if we have to convert types
2983 that value numbering saw through. */
2985 folded = PRE_EXPR_NAME (expr);
2988 folded = PRE_EXPR_CONSTANT (expr);
2992 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2993 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
2998 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2999 switch (nary->length)
3003 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3004 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3005 tree genop1 = find_or_generate_expression (block, op1,
3007 tree genop2 = find_or_generate_expression (block, op2,
3009 if (!genop1 || !genop2)
3011 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3012 may be a constant with the wrong type. */
3013 if (nary->opcode == POINTER_PLUS_EXPR)
3015 genop1 = fold_convert (nary->type, genop1);
3016 genop2 = fold_convert (sizetype, genop2);
3020 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3021 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3024 folded = fold_build2 (nary->opcode, nary->type,
3030 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3031 tree genop1 = find_or_generate_expression (block, op1,
3035 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3037 folded = fold_build1 (nary->opcode, nary->type,
3050 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3051 folded = fold_convert (exprtype, folded);
3053 /* Force the generated expression to be a sequence of GIMPLE
3055 We have to call unshare_expr because force_gimple_operand may
3056 modify the tree we pass to it. */
3057 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3060 /* If we have any intermediate expressions to the value sets, add them
3061 to the value sets and chain them in the instruction stream. */
3064 gsi = gsi_start (forced_stmts);
3065 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3067 gimple stmt = gsi_stmt (gsi);
3068 tree forcedname = gimple_get_lhs (stmt);
3071 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3072 if (TREE_CODE (forcedname) == SSA_NAME)
3074 VN_INFO_GET (forcedname)->valnum = forcedname;
3075 VN_INFO (forcedname)->value_id = get_next_value_id ();
3076 nameexpr = get_or_alloc_expr_for_name (forcedname);
3077 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3079 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3080 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3082 mark_symbols_for_renaming (stmt);
3084 gimple_seq_add_seq (stmts, forced_stmts);
3087 /* Build and insert the assignment of the end result to the temporary
3088 that we will return. */
3089 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3091 pretemp = create_tmp_var (exprtype, "pretmp");
3092 get_var_ann (pretemp);
3096 add_referenced_var (temp);
3098 if (TREE_CODE (exprtype) == COMPLEX_TYPE
3099 || TREE_CODE (exprtype) == VECTOR_TYPE)
3100 DECL_GIMPLE_REG_P (temp) = 1;
3102 newstmt = gimple_build_assign (temp, folded);
3103 name = make_ssa_name (temp, newstmt);
3104 gimple_assign_set_lhs (newstmt, name);
3105 gimple_set_plf (newstmt, NECESSARY, false);
3107 gimple_seq_add_stmt (stmts, newstmt);
3108 VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
3110 /* All the symbols in NEWEXPR should be put into SSA form. */
3111 mark_symbols_for_renaming (newstmt);
3113 /* Add a value number to the temporary.
3114 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3115 we are creating the expression by pieces, and this particular piece of
3116 the expression may have been represented. There is no harm in replacing
3118 VN_INFO_GET (name)->valnum = name;
3119 value_id = get_expr_value_id (expr);
3120 VN_INFO (name)->value_id = value_id;
3121 nameexpr = get_or_alloc_expr_for_name (name);
3122 add_to_value (value_id, nameexpr);
3124 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3125 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3127 pre_stats.insertions++;
3128 if (dump_file && (dump_flags & TDF_DETAILS))
3130 fprintf (dump_file, "Inserted ");
3131 print_gimple_stmt (dump_file, newstmt, 0, 0);
3132 fprintf (dump_file, " in predecessor %d\n", block->index);
3139 /* Returns true if we want to inhibit the insertions of PHI nodes
3140 for the given EXPR for basic block BB (a member of a loop).
3141 We want to do this, when we fear that the induction variable we
3142 create might inhibit vectorization. */
3145 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3147 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3148 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3149 vn_reference_op_t op;
3152 /* If we aren't going to vectorize we don't inhibit anything. */
3153 if (!flag_tree_vectorize)
3156 /* Otherwise we inhibit the insertion when the address of the
3157 memory reference is a simple induction variable. In other
3158 cases the vectorizer won't do anything anyway (either it's
3159 loop invariant or a complicated expression). */
3160 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3165 case ARRAY_RANGE_REF:
3166 if (TREE_CODE (op->op0) != SSA_NAME)
3171 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3173 /* Default defs are loop invariant. */
3176 /* Defined outside this loop, also loop invariant. */
3177 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3179 /* If it's a simple induction variable inhibit insertion,
3180 the vectorizer might be interested in this one. */
3181 if (simple_iv (bb->loop_father, bb->loop_father,
3182 op->op0, &iv, true))
3184 /* No simple IV, vectorizer can't do anything, hence no
3185 reason to inhibit the transformation for this operand. */
3195 /* Insert the to-be-made-available values of expression EXPRNUM for each
3196 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3197 merge the result with a phi node, given the same value number as
3198 NODE. Return true if we have inserted new stuff. */
3201 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3204 pre_expr expr = expression_for_id (exprnum);
3206 unsigned int val = get_expr_value_id (expr);
3208 bool insertions = false;
3213 tree type = get_expr_type (expr);
3217 if (dump_file && (dump_flags & TDF_DETAILS))
3219 fprintf (dump_file, "Found partial redundancy for expression ");
3220 print_pre_expr (dump_file, expr);
3221 fprintf (dump_file, " (%04d)\n", val);
3224 /* Make sure we aren't creating an induction variable. */
3225 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3227 bool firstinsideloop = false;
3228 bool secondinsideloop = false;
3229 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3230 EDGE_PRED (block, 0)->src);
3231 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3232 EDGE_PRED (block, 1)->src);
3233 /* Induction variables only have one edge inside the loop. */
3234 if ((firstinsideloop ^ secondinsideloop)
3235 && (expr->kind != REFERENCE
3236 || inhibit_phi_insertion (block, expr)))
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3239 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3244 /* Make the necessary insertions. */
3245 FOR_EACH_EDGE (pred, ei, block->preds)
3247 gimple_seq stmts = NULL;
3250 eprime = avail[bprime->index];
3252 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3254 builtexpr = create_expression_by_pieces (bprime,
3258 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3259 gsi_insert_seq_on_edge (pred, stmts);
3260 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3263 else if (eprime->kind == CONSTANT)
3265 /* Constants may not have the right type, fold_convert
3266 should give us back a constant with the right type.
3268 tree constant = PRE_EXPR_CONSTANT (eprime);
3269 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3271 tree builtexpr = fold_convert (type, constant);
3272 if (!is_gimple_min_invariant (builtexpr))
3274 tree forcedexpr = force_gimple_operand (builtexpr,
3277 if (!is_gimple_min_invariant (forcedexpr))
3279 if (forcedexpr != builtexpr)
3281 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3282 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3286 gimple_stmt_iterator gsi;
3287 gsi = gsi_start (stmts);
3288 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3290 gimple stmt = gsi_stmt (gsi);
3291 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3292 gimple_set_plf (stmt, NECESSARY, false);
3294 gsi_insert_seq_on_edge (pred, stmts);
3296 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3301 else if (eprime->kind == NAME)
3303 /* We may have to do a conversion because our value
3304 numbering can look through types in certain cases, but
3305 our IL requires all operands of a phi node have the same
3307 tree name = PRE_EXPR_NAME (eprime);
3308 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3312 builtexpr = fold_convert (type, name);
3313 forcedexpr = force_gimple_operand (builtexpr,
3317 if (forcedexpr != name)
3319 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3320 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3325 gimple_stmt_iterator gsi;
3326 gsi = gsi_start (stmts);
3327 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3329 gimple stmt = gsi_stmt (gsi);
3330 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3331 gimple_set_plf (stmt, NECESSARY, false);
3333 gsi_insert_seq_on_edge (pred, stmts);
3335 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3339 /* If we didn't want a phi node, and we made insertions, we still have
3340 inserted new stuff, and thus return true. If we didn't want a phi node,
3341 and didn't make insertions, we haven't added anything new, so return
3343 if (nophi && insertions)
3345 else if (nophi && !insertions)
3348 /* Now build a phi for the new variable. */
3349 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3351 prephitemp = create_tmp_var (type, "prephitmp");
3352 get_var_ann (prephitemp);
3356 add_referenced_var (temp);
3358 if (TREE_CODE (type) == COMPLEX_TYPE
3359 || TREE_CODE (type) == VECTOR_TYPE)
3360 DECL_GIMPLE_REG_P (temp) = 1;
3361 phi = create_phi_node (temp, block);
3363 gimple_set_plf (phi, NECESSARY, false);
3364 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3365 VN_INFO (gimple_phi_result (phi))->value_id = val;
3366 VEC_safe_push (gimple, heap, inserted_exprs, phi);
3367 bitmap_set_bit (inserted_phi_names,
3368 SSA_NAME_VERSION (gimple_phi_result (phi)));
3369 FOR_EACH_EDGE (pred, ei, block->preds)
3371 pre_expr ae = avail[pred->src->index];
3372 gcc_assert (get_expr_type (ae) == type
3373 || useless_type_conversion_p (type, get_expr_type (ae)));
3374 if (ae->kind == CONSTANT)
3375 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3377 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3381 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3382 add_to_value (val, newphi);
3384 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3385 this insertion, since we test for the existence of this value in PHI_GEN
3386 before proceeding with the partial redundancy checks in insert_aux.
3388 The value may exist in AVAIL_OUT, in particular, it could be represented
3389 by the expression we are trying to eliminate, in which case we want the
3390 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3393 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3394 this block, because if it did, it would have existed in our dominator's
3395 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3398 bitmap_insert_into_set (PHI_GEN (block), newphi);
3399 bitmap_value_replace_in_set (AVAIL_OUT (block),
3401 bitmap_insert_into_set (NEW_SETS (block),
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, "Created phi ");
3407 print_gimple_stmt (dump_file, phi, 0, 0);
3408 fprintf (dump_file, " in block %d\n", block->index);
3416 /* Perform insertion of partially redundant values.
3417 For BLOCK, do the following:
3418 1. Propagate the NEW_SETS of the dominator into the current block.
3419 If the block has multiple predecessors,
3420 2a. Iterate over the ANTIC expressions for the block to see if
3421 any of them are partially redundant.
3422 2b. If so, insert them into the necessary predecessors to make
3423 the expression fully redundant.
3424 2c. Insert a new PHI merging the values of the predecessors.
3425 2d. Insert the new PHI, and the new expressions, into the
3427 3. Recursively call ourselves on the dominator children of BLOCK.
3429 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3430 do_regular_insertion and do_partial_insertion.
3435 do_regular_insertion (basic_block block, basic_block dom)
3437 bool new_stuff = false;
3438 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3442 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3444 if (expr->kind != NAME)
3448 bool by_some = false;
3449 bool cant_insert = false;
3450 bool all_same = true;
3451 pre_expr first_s = NULL;
3454 pre_expr eprime = NULL;
3456 pre_expr edoubleprime = NULL;
3457 bool do_insertion = false;
3459 val = get_expr_value_id (expr);
3460 if (bitmap_set_contains_value (PHI_GEN (block), val))
3462 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3464 if (dump_file && (dump_flags & TDF_DETAILS))
3465 fprintf (dump_file, "Found fully redundant value\n");
3469 avail = XCNEWVEC (pre_expr, last_basic_block);
3470 FOR_EACH_EDGE (pred, ei, block->preds)
3472 unsigned int vprime;
3474 /* We should never run insertion for the exit block
3475 and so not come across fake pred edges. */
3476 gcc_assert (!(pred->flags & EDGE_FAKE));
3478 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3481 /* eprime will generally only be NULL if the
3482 value of the expression, translated
3483 through the PHI for this predecessor, is
3484 undefined. If that is the case, we can't
3485 make the expression fully redundant,
3486 because its value is undefined along a
3487 predecessor path. We can thus break out
3488 early because it doesn't matter what the
3489 rest of the results are. */
3496 eprime = fully_constant_expression (eprime);
3497 vprime = get_expr_value_id (eprime);
3498 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3500 if (edoubleprime == NULL)
3502 avail[bprime->index] = eprime;
3507 avail[bprime->index] = edoubleprime;
3509 /* We want to perform insertions to remove a redundancy on
3510 a path in the CFG we want to optimize for speed. */
3511 if (optimize_edge_for_speed_p (pred))
3512 do_insertion = true;
3513 if (first_s == NULL)
3514 first_s = edoubleprime;
3515 else if (!pre_expr_eq (first_s, edoubleprime))
3519 /* If we can insert it, it's not the same value
3520 already existing along every predecessor, and
3521 it's defined by some predecessor, it is
3522 partially redundant. */
3523 if (!cant_insert && !all_same && by_some && do_insertion
3524 && dbg_cnt (treepre_insert))
3526 if (insert_into_preds_of_block (block, get_expression_id (expr),
3530 /* If all edges produce the same value and that value is
3531 an invariant, then the PHI has the same value on all
3532 edges. Note this. */
3533 else if (!cant_insert && all_same && eprime
3534 && (edoubleprime->kind == CONSTANT
3535 || edoubleprime->kind == NAME)
3536 && !value_id_constant_p (val))
3540 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3541 value_expressions, val);
3543 unsigned int new_val = get_expr_value_id (edoubleprime);
3544 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3546 pre_expr expr = expression_for_id (j);
3548 if (expr->kind == NAME)
3550 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3551 /* Just reset the value id and valnum so it is
3552 the same as the constant we have discovered. */
3553 if (edoubleprime->kind == CONSTANT)
3555 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3556 pre_stats.constified++;
3559 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3560 info->value_id = new_val;
3568 VEC_free (pre_expr, heap, exprs);
3573 /* Perform insertion for partially anticipatable expressions. There
3574 is only one case we will perform insertion for these. This case is
3575 if the expression is partially anticipatable, and fully available.
3576 In this case, we know that putting it earlier will enable us to
3577 remove the later computation. */
3581 do_partial_partial_insertion (basic_block block, basic_block dom)
3583 bool new_stuff = false;
3584 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3588 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3590 if (expr->kind != NAME)
3595 bool cant_insert = false;
3598 pre_expr eprime = NULL;
3601 val = get_expr_value_id (expr);
3602 if (bitmap_set_contains_value (PHI_GEN (block), val))
3604 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3607 avail = XCNEWVEC (pre_expr, last_basic_block);
3608 FOR_EACH_EDGE (pred, ei, block->preds)
3610 unsigned int vprime;
3611 pre_expr edoubleprime;
3613 /* We should never run insertion for the exit block
3614 and so not come across fake pred edges. */
3615 gcc_assert (!(pred->flags & EDGE_FAKE));
3617 eprime = phi_translate (expr, ANTIC_IN (block),
3621 /* eprime will generally only be NULL if the
3622 value of the expression, translated
3623 through the PHI for this predecessor, is
3624 undefined. If that is the case, we can't
3625 make the expression fully redundant,
3626 because its value is undefined along a
3627 predecessor path. We can thus break out
3628 early because it doesn't matter what the
3629 rest of the results are. */
3636 eprime = fully_constant_expression (eprime);
3637 vprime = get_expr_value_id (eprime);
3638 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3640 if (edoubleprime == NULL)
3646 avail[bprime->index] = edoubleprime;
3650 /* If we can insert it, it's not the same value
3651 already existing along every predecessor, and
3652 it's defined by some predecessor, it is
3653 partially redundant. */
3654 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3656 pre_stats.pa_insert++;
3657 if (insert_into_preds_of_block (block, get_expression_id (expr),
3665 VEC_free (pre_expr, heap, exprs);
3670 insert_aux (basic_block block)
3673 bool new_stuff = false;
3678 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3683 bitmap_set_t newset = NEW_SETS (dom);
3686 /* Note that we need to value_replace both NEW_SETS, and
3687 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3688 represented by some non-simple expression here that we want
3689 to replace it with. */
3690 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3692 pre_expr expr = expression_for_id (i);
3693 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3694 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3697 if (!single_pred_p (block))
3699 new_stuff |= do_regular_insertion (block, dom);
3700 if (do_partial_partial)
3701 new_stuff |= do_partial_partial_insertion (block, dom);
3705 for (son = first_dom_son (CDI_DOMINATORS, block);
3707 son = next_dom_son (CDI_DOMINATORS, son))
3709 new_stuff |= insert_aux (son);
3715 /* Perform insertion of partially redundant values. */
3720 bool new_stuff = true;
3722 int num_iterations = 0;
3725 NEW_SETS (bb) = bitmap_set_new ();
3730 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3732 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3736 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3739 add_to_exp_gen (basic_block block, tree op)
3744 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3746 result = get_or_alloc_expr_for_name (op);
3747 bitmap_value_insert_into_set (EXP_GEN (block), result);
3751 /* Create value ids for PHI in BLOCK. */
3754 make_values_for_phi (gimple phi, basic_block block)
3756 tree result = gimple_phi_result (phi);
3758 /* We have no need for virtual phis, as they don't represent
3759 actual computations. */
3760 if (is_gimple_reg (result))
3762 pre_expr e = get_or_alloc_expr_for_name (result);
3763 add_to_value (get_expr_value_id (e), e);
3764 bitmap_insert_into_set (PHI_GEN (block), e);
3765 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3769 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3771 tree arg = gimple_phi_arg_def (phi, i);
3772 if (TREE_CODE (arg) == SSA_NAME)
3774 e = get_or_alloc_expr_for_name (arg);
3775 add_to_value (get_expr_value_id (e), e);
3782 /* Compute the AVAIL set for all basic blocks.
3784 This function performs value numbering of the statements in each basic
3785 block. The AVAIL sets are built from information we glean while doing
3786 this value numbering, since the AVAIL sets contain only one entry per
3789 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3790 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3793 compute_avail (void)
3796 basic_block block, son;
3797 basic_block *worklist;
3801 /* We pretend that default definitions are defined in the entry block.
3802 This includes function arguments and the static chain decl. */
3803 for (i = 1; i < num_ssa_names; ++i)
3805 tree name = ssa_name (i);
3808 || !SSA_NAME_IS_DEFAULT_DEF (name)
3809 || has_zero_uses (name)
3810 || !is_gimple_reg (name))
3813 e = get_or_alloc_expr_for_name (name);
3814 add_to_value (get_expr_value_id (e), e);
3816 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3817 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3820 /* Allocate the worklist. */
3821 worklist = XNEWVEC (basic_block, n_basic_blocks);
3823 /* Seed the algorithm by putting the dominator children of the entry
3824 block on the worklist. */
3825 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3827 son = next_dom_son (CDI_DOMINATORS, son))
3828 worklist[sp++] = son;
3830 /* Loop until the worklist is empty. */
3833 gimple_stmt_iterator gsi;
3836 unsigned int stmt_uid = 1;
3838 /* Pick a block from the worklist. */
3839 block = worklist[--sp];
3841 /* Initially, the set of available values in BLOCK is that of
3842 its immediate dominator. */
3843 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3845 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3847 /* Generate values for PHI nodes. */
3848 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3849 make_values_for_phi (gsi_stmt (gsi), block);
3851 BB_MAY_NOTRETURN (block) = 0;
3853 /* Now compute value numbers and populate value sets with all
3854 the expressions computed in BLOCK. */
3855 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3860 stmt = gsi_stmt (gsi);
3861 gimple_set_uid (stmt, stmt_uid++);
3863 /* Cache whether the basic-block has any non-visible side-effect
3865 If this isn't a call or it is the last stmt in the
3866 basic-block then the CFG represents things correctly. */
3867 if (is_gimple_call (stmt)
3868 && !stmt_ends_bb_p (stmt))
3870 /* Non-looping const functions always return normally.
3871 Otherwise the call might not return or have side-effects
3872 that forbids hoisting possibly trapping expressions
3874 int flags = gimple_call_flags (stmt);
3875 if (!(flags & ECF_CONST)
3876 || (flags & ECF_LOOPING_CONST_OR_PURE))
3877 BB_MAY_NOTRETURN (block) = 1;
3880 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3882 pre_expr e = get_or_alloc_expr_for_name (op);
3884 add_to_value (get_expr_value_id (e), e);
3886 bitmap_insert_into_set (TMP_GEN (block), e);
3887 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3890 if (gimple_has_volatile_ops (stmt)
3891 || stmt_could_throw_p (stmt))
3894 switch (gimple_code (stmt))
3897 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3898 add_to_exp_gen (block, op);
3905 vn_reference_op_t vro;
3906 pre_expr result = NULL;
3907 VEC(vn_reference_op_s, heap) *ops = NULL;
3909 if (!can_value_number_call (stmt))
3912 copy_reference_ops_from_call (stmt, &ops);
3913 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3914 gimple_expr_type (stmt),
3916 VEC_free (vn_reference_op_s, heap, ops);
3920 for (i = 0; VEC_iterate (vn_reference_op_s,
3924 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3925 add_to_exp_gen (block, vro->op0);
3926 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3927 add_to_exp_gen (block, vro->op1);
3928 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3929 add_to_exp_gen (block, vro->op2);
3931 result = (pre_expr) pool_alloc (pre_expr_pool);
3932 result->kind = REFERENCE;
3934 PRE_EXPR_REFERENCE (result) = ref;
3936 get_or_alloc_expression_id (result);
3937 add_to_value (get_expr_value_id (result), result);
3939 bitmap_value_insert_into_set (EXP_GEN (block), result);
3945 pre_expr result = NULL;
3946 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3950 case tcc_comparison:
3955 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
3956 gimple_assign_rhs_code (stmt),
3957 gimple_expr_type (stmt),
3958 gimple_assign_rhs1 (stmt),
3959 gimple_assign_rhs2 (stmt),
3960 NULL_TREE, NULL_TREE, &nary);
3965 for (i = 0; i < nary->length; i++)
3966 if (TREE_CODE (nary->op[i]) == SSA_NAME)
3967 add_to_exp_gen (block, nary->op[i]);
3969 result = (pre_expr) pool_alloc (pre_expr_pool);
3970 result->kind = NARY;
3972 PRE_EXPR_NARY (result) = nary;
3976 case tcc_declaration:
3981 vn_reference_op_t vro;
3983 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3989 for (i = 0; VEC_iterate (vn_reference_op_s,
3993 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3994 add_to_exp_gen (block, vro->op0);
3995 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3996 add_to_exp_gen (block, vro->op1);
3997 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3998 add_to_exp_gen (block, vro->op2);
4000 result = (pre_expr) pool_alloc (pre_expr_pool);
4001 result->kind = REFERENCE;
4003 PRE_EXPR_REFERENCE (result) = ref;
4008 /* For any other statement that we don't
4009 recognize, simply add all referenced
4010 SSA_NAMEs to EXP_GEN. */
4011 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4012 add_to_exp_gen (block, op);
4016 get_or_alloc_expression_id (result);
4017 add_to_value (get_expr_value_id (result), result);
4019 bitmap_value_insert_into_set (EXP_GEN (block), result);
4028 /* Put the dominator children of BLOCK on the worklist of blocks
4029 to compute available sets for. */
4030 for (son = first_dom_son (CDI_DOMINATORS, block);
4032 son = next_dom_son (CDI_DOMINATORS, son))
4033 worklist[sp++] = son;
4039 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4040 than the available expressions for it. The insertion point is
4041 right before the first use in STMT. Returns the SSA_NAME that should
4042 be used for replacement. */
4045 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4047 basic_block bb = gimple_bb (stmt);
4048 gimple_stmt_iterator gsi;
4049 gimple_seq stmts = NULL;
4053 /* First create a value expression from the expression we want
4054 to insert and associate it with the value handle for SSA_VN. */
4055 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4059 /* Then use create_expression_by_pieces to generate a valid
4060 expression to insert at this point of the IL stream. */
4061 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4062 if (expr == NULL_TREE)
4064 gsi = gsi_for_stmt (stmt);
4065 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4070 /* Eliminate fully redundant computations. */
4075 VEC (gimple, heap) *to_remove = NULL;
4077 unsigned int todo = 0;
4078 gimple_stmt_iterator gsi;
4084 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4086 stmt = gsi_stmt (gsi);
4088 /* Lookup the RHS of the expression, see if we have an
4089 available computation for it. If so, replace the RHS with
4090 the available computation. */
4091 if (gimple_has_lhs (stmt)
4092 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4093 && !gimple_assign_ssa_name_copy_p (stmt)
4094 && (!gimple_assign_single_p (stmt)
4095 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4096 && !gimple_has_volatile_ops (stmt)
4097 && !has_zero_uses (gimple_get_lhs (stmt)))
4099 tree lhs = gimple_get_lhs (stmt);
4100 tree rhs = NULL_TREE;
4102 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4103 pre_expr sprimeexpr;
4105 if (gimple_assign_single_p (stmt))
4106 rhs = gimple_assign_rhs1 (stmt);
4108 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4109 get_expr_value_id (lhsexpr),
4114 if (sprimeexpr->kind == CONSTANT)
4115 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4116 else if (sprimeexpr->kind == NAME)
4117 sprime = PRE_EXPR_NAME (sprimeexpr);
4122 /* If there is no existing leader but SCCVN knows this
4123 value is constant, use that constant. */
4124 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4126 sprime = VN_INFO (lhs)->valnum;
4127 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4128 TREE_TYPE (sprime)))
4129 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4131 if (dump_file && (dump_flags & TDF_DETAILS))
4133 fprintf (dump_file, "Replaced ");
4134 print_gimple_expr (dump_file, stmt, 0, 0);
4135 fprintf (dump_file, " with ");
4136 print_generic_expr (dump_file, sprime, 0);
4137 fprintf (dump_file, " in ");
4138 print_gimple_stmt (dump_file, stmt, 0, 0);
4140 pre_stats.eliminations++;
4141 propagate_tree_value_into_stmt (&gsi, sprime);
4142 stmt = gsi_stmt (gsi);
4147 /* If there is no existing usable leader but SCCVN thinks
4148 it has an expression it wants to use as replacement,
4150 if (!sprime || sprime == lhs)
4152 tree val = VN_INFO (lhs)->valnum;
4154 && TREE_CODE (val) == SSA_NAME
4155 && VN_INFO (val)->needs_insertion
4156 && can_PRE_operation (vn_get_expr_for (val)))
4157 sprime = do_SCCVN_insertion (stmt, val);
4161 && (rhs == NULL_TREE
4162 || TREE_CODE (rhs) != SSA_NAME
4163 || may_propagate_copy (rhs, sprime)))
4165 gcc_assert (sprime != rhs);
4167 if (dump_file && (dump_flags & TDF_DETAILS))
4169 fprintf (dump_file, "Replaced ");
4170 print_gimple_expr (dump_file, stmt, 0, 0);
4171 fprintf (dump_file, " with ");
4172 print_generic_expr (dump_file, sprime, 0);
4173 fprintf (dump_file, " in ");
4174 print_gimple_stmt (dump_file, stmt, 0, 0);
4177 if (TREE_CODE (sprime) == SSA_NAME)
4178 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4180 /* We need to make sure the new and old types actually match,
4181 which may require adding a simple cast, which fold_convert
4183 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4184 && !useless_type_conversion_p (gimple_expr_type (stmt),
4185 TREE_TYPE (sprime)))
4186 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4188 pre_stats.eliminations++;
4189 propagate_tree_value_into_stmt (&gsi, sprime);
4190 stmt = gsi_stmt (gsi);
4193 /* If we removed EH side effects from the statement, clean
4194 its EH information. */
4195 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4197 bitmap_set_bit (need_eh_cleanup,
4198 gimple_bb (stmt)->index);
4199 if (dump_file && (dump_flags & TDF_DETAILS))
4200 fprintf (dump_file, " Removed EH side effects.\n");
4204 /* If the statement is a scalar store, see if the expression
4205 has the same value number as its rhs. If so, the store is
4207 else if (gimple_assign_single_p (stmt)
4208 && !is_gimple_reg (gimple_assign_lhs (stmt))
4209 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4210 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4212 tree rhs = gimple_assign_rhs1 (stmt);
4214 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4215 gimple_vuse (stmt), true, NULL);
4216 if (TREE_CODE (rhs) == SSA_NAME)
4217 rhs = VN_INFO (rhs)->valnum;
4219 && operand_equal_p (val, rhs, 0))
4221 if (dump_file && (dump_flags & TDF_DETAILS))
4223 fprintf (dump_file, "Deleted redundant store ");
4224 print_gimple_stmt (dump_file, stmt, 0, 0);
4227 /* Queue stmt for removal. */
4228 VEC_safe_push (gimple, heap, to_remove, stmt);
4231 /* Visit COND_EXPRs and fold the comparison with the
4232 available value-numbers. */
4233 else if (gimple_code (stmt) == GIMPLE_COND)
4235 tree op0 = gimple_cond_lhs (stmt);
4236 tree op1 = gimple_cond_rhs (stmt);
4239 if (TREE_CODE (op0) == SSA_NAME)
4240 op0 = VN_INFO (op0)->valnum;
4241 if (TREE_CODE (op1) == SSA_NAME)
4242 op1 = VN_INFO (op1)->valnum;
4243 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4245 if (result && TREE_CODE (result) == INTEGER_CST)
4247 if (integer_zerop (result))
4248 gimple_cond_make_false (stmt);
4250 gimple_cond_make_true (stmt);
4252 todo = TODO_cleanup_cfg;
4255 /* Visit indirect calls and turn them into direct calls if
4257 if (gimple_code (stmt) == GIMPLE_CALL
4258 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4260 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4261 if (TREE_CODE (fn) == ADDR_EXPR
4262 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4264 if (dump_file && (dump_flags & TDF_DETAILS))
4266 fprintf (dump_file, "Replacing call target with ");
4267 print_generic_expr (dump_file, fn, 0);
4268 fprintf (dump_file, " in ");
4269 print_gimple_stmt (dump_file, stmt, 0, 0);
4272 gimple_call_set_fn (stmt, fn);
4274 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4276 bitmap_set_bit (need_eh_cleanup,
4277 gimple_bb (stmt)->index);
4278 if (dump_file && (dump_flags & TDF_DETAILS))
4279 fprintf (dump_file, " Removed EH side effects.\n");
4282 /* Changing an indirect call to a direct call may
4283 have exposed different semantics. This may
4284 require an SSA update. */
4285 todo |= TODO_update_ssa_only_virtuals;
4290 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4292 gimple stmt, phi = gsi_stmt (gsi);
4293 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4294 pre_expr sprimeexpr, resexpr;
4295 gimple_stmt_iterator gsi2;
4297 /* We want to perform redundant PHI elimination. Do so by
4298 replacing the PHI with a single copy if possible.
4299 Do not touch inserted, single-argument or virtual PHIs. */
4300 if (gimple_phi_num_args (phi) == 1
4301 || !is_gimple_reg (res)
4302 || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
4308 resexpr = get_or_alloc_expr_for_name (res);
4309 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4310 get_expr_value_id (resexpr), NULL);
4313 if (sprimeexpr->kind == CONSTANT)
4314 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4315 else if (sprimeexpr->kind == NAME)
4316 sprime = PRE_EXPR_NAME (sprimeexpr);
4327 if (dump_file && (dump_flags & TDF_DETAILS))
4329 fprintf (dump_file, "Replaced redundant PHI node defining ");
4330 print_generic_expr (dump_file, res, 0);
4331 fprintf (dump_file, " with ");
4332 print_generic_expr (dump_file, sprime, 0);
4333 fprintf (dump_file, "\n");
4336 remove_phi_node (&gsi, false);
4338 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4339 sprime = fold_convert (TREE_TYPE (res), sprime);
4340 stmt = gimple_build_assign (res, sprime);
4341 SSA_NAME_DEF_STMT (res) = stmt;
4342 if (TREE_CODE (sprime) == SSA_NAME)
4343 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4345 gsi2 = gsi_after_labels (b);
4346 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4347 /* Queue the copy for eventual removal. */
4348 VEC_safe_push (gimple, heap, to_remove, stmt);
4349 pre_stats.eliminations++;
4353 /* We cannot remove stmts during BB walk, especially not release SSA
4354 names there as this confuses the VN machinery. The stmts ending
4355 up in to_remove are either stores or simple copies. */
4356 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4358 tree lhs = gimple_assign_lhs (stmt);
4359 tree rhs = gimple_assign_rhs1 (stmt);
4360 use_operand_p use_p;
4363 /* If there is a single use only, propagate the equivalency
4364 instead of keeping the copy. */
4365 if (TREE_CODE (lhs) == SSA_NAME
4366 && TREE_CODE (rhs) == SSA_NAME
4367 && single_imm_use (lhs, &use_p, &use_stmt)
4368 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4370 SET_USE (use_p, gimple_assign_rhs1 (stmt));
4371 update_stmt (use_stmt);
4374 /* If this is a store or a now unused copy, remove it. */
4375 if (TREE_CODE (lhs) != SSA_NAME
4376 || has_zero_uses (lhs))
4378 gsi = gsi_for_stmt (stmt);
4379 unlink_stmt_vdef (stmt);
4380 gsi_remove (&gsi, true);
4381 release_defs (stmt);
4384 VEC_free (gimple, heap, to_remove);
4389 /* Borrow a bit of tree-ssa-dce.c for the moment.
4390 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4391 this may be a bit faster, and we may want critical edges kept split. */
4393 /* If OP's defining statement has not already been determined to be necessary,
4394 mark that statement necessary. Return the stmt, if it is newly
4397 static inline gimple
4398 mark_operand_necessary (tree op)
4404 if (TREE_CODE (op) != SSA_NAME)
4407 stmt = SSA_NAME_DEF_STMT (op);
4410 if (gimple_plf (stmt, NECESSARY)
4411 || gimple_nop_p (stmt))
4414 gimple_set_plf (stmt, NECESSARY, true);
4418 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4419 to insert PHI nodes sometimes, and because value numbering of casts isn't
4420 perfect, we sometimes end up inserting dead code. This simple DCE-like
4421 pass removes any insertions we made that weren't actually used. */
4424 remove_dead_inserted_code (void)
4426 VEC(gimple,heap) *worklist = NULL;
4430 worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
4431 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4433 if (gimple_plf (t, NECESSARY))
4434 VEC_quick_push (gimple, worklist, t);
4436 while (VEC_length (gimple, worklist) > 0)
4438 t = VEC_pop (gimple, worklist);
4440 /* PHI nodes are somewhat special in that each PHI alternative has
4441 data and control dependencies. All the statements feeding the
4442 PHI node's arguments are always necessary. */
4443 if (gimple_code (t) == GIMPLE_PHI)
4447 VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
4448 for (k = 0; k < gimple_phi_num_args (t); k++)
4450 tree arg = PHI_ARG_DEF (t, k);
4451 if (TREE_CODE (arg) == SSA_NAME)
4453 gimple n = mark_operand_necessary (arg);
4455 VEC_quick_push (gimple, worklist, n);
4461 /* Propagate through the operands. Examine all the USE, VUSE and
4462 VDEF operands in this statement. Mark all the statements
4463 which feed this statement's uses as necessary. */
4467 /* The operands of VDEF expressions are also needed as they
4468 represent potential definitions that may reach this
4469 statement (VDEF operands allow us to follow def-def
4472 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4474 gimple n = mark_operand_necessary (use);
4476 VEC_safe_push (gimple, heap, worklist, n);
4481 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4483 if (!gimple_plf (t, NECESSARY))
4485 gimple_stmt_iterator gsi;
4487 if (dump_file && (dump_flags & TDF_DETAILS))
4489 fprintf (dump_file, "Removing unnecessary insertion:");
4490 print_gimple_stmt (dump_file, t, 0, 0);
4493 gsi = gsi_for_stmt (t);
4494 if (gimple_code (t) == GIMPLE_PHI)
4495 remove_phi_node (&gsi, true);
4498 gsi_remove (&gsi, true);
4503 VEC_free (gimple, heap, worklist);
4506 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4507 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4508 the number of visited blocks. */
4511 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4513 edge_iterator *stack;
4515 int post_order_num = 0;
4518 if (include_entry_exit)
4519 post_order[post_order_num++] = EXIT_BLOCK;
4521 /* Allocate stack for back-tracking up CFG. */
4522 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4525 /* Allocate bitmap to track nodes that have been visited. */
4526 visited = sbitmap_alloc (last_basic_block);
4528 /* None of the nodes in the CFG have been visited yet. */
4529 sbitmap_zero (visited);
4531 /* Push the last edge on to the stack. */
4532 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4540 /* Look at the edge on the top of the stack. */
4542 src = ei_edge (ei)->src;
4543 dest = ei_edge (ei)->dest;
4545 /* Check if the edge destination has been visited yet. */
4546 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4548 /* Mark that we have visited the destination. */
4549 SET_BIT (visited, src->index);
4551 if (EDGE_COUNT (src->preds) > 0)
4552 /* Since the DEST node has been visited for the first
4553 time, check its successors. */
4554 stack[sp++] = ei_start (src->preds);
4556 post_order[post_order_num++] = src->index;
4560 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4561 post_order[post_order_num++] = dest->index;
4563 if (!ei_one_before_end_p (ei))
4564 ei_next (&stack[sp - 1]);
4570 if (include_entry_exit)
4571 post_order[post_order_num++] = ENTRY_BLOCK;
4574 sbitmap_free (visited);
4575 return post_order_num;
4579 /* Initialize data structures used by PRE. */
4582 init_pre (bool do_fre)
4586 next_expression_id = 1;
4588 VEC_safe_push (pre_expr, heap, expressions, NULL);
4589 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4590 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4591 get_max_value_id() + 1);
4596 inserted_exprs = NULL;
4597 need_creation = NULL;
4598 pretemp = NULL_TREE;
4599 storetemp = NULL_TREE;
4600 prephitemp = NULL_TREE;
4602 connect_infinite_loops_to_exit ();
4603 memset (&pre_stats, 0, sizeof (pre_stats));
4606 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4607 my_rev_post_order_compute (postorder, false);
4610 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4612 calculate_dominance_info (CDI_POST_DOMINATORS);
4613 calculate_dominance_info (CDI_DOMINATORS);
4615 bitmap_obstack_initialize (&grand_bitmap_obstack);
4616 inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
4617 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4618 expr_pred_trans_eq, free);
4619 expression_to_id = htab_create (num_ssa_names * 3,
4622 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4623 sizeof (struct bitmap_set), 30);
4624 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4625 sizeof (struct pre_expr_d), 30);
4628 EXP_GEN (bb) = bitmap_set_new ();
4629 PHI_GEN (bb) = bitmap_set_new ();
4630 TMP_GEN (bb) = bitmap_set_new ();
4631 AVAIL_OUT (bb) = bitmap_set_new ();
4634 need_eh_cleanup = BITMAP_ALLOC (NULL);
4638 /* Deallocate data structures used by PRE. */
4641 fini_pre (bool do_fre)
4646 VEC_free (bitmap_set_t, heap, value_expressions);
4647 VEC_free (gimple, heap, inserted_exprs);
4648 VEC_free (gimple, heap, need_creation);
4649 bitmap_obstack_release (&grand_bitmap_obstack);
4650 free_alloc_pool (bitmap_set_pool);
4651 free_alloc_pool (pre_expr_pool);
4652 htab_delete (phi_translate_table);
4653 htab_delete (expression_to_id);
4654 VEC_free (unsigned, heap, name_to_id);
4662 free_dominance_info (CDI_POST_DOMINATORS);
4664 if (!bitmap_empty_p (need_eh_cleanup))
4666 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4667 cleanup_tree_cfg ();
4670 BITMAP_FREE (need_eh_cleanup);
4673 loop_optimizer_finalize ();
4676 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4677 only wants to do full redundancy elimination. */
4680 execute_pre (bool do_fre)
4682 unsigned int todo = 0;
4684 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4686 /* This has to happen before SCCVN runs because
4687 loop_optimizer_init may create new phis, etc. */
4689 loop_optimizer_init (LOOPS_NORMAL);
4691 if (!run_scc_vn (do_fre))
4695 remove_dead_inserted_code ();
4696 loop_optimizer_finalize ();
4705 /* Collect and value number expressions computed in each basic block. */
4708 if (dump_file && (dump_flags & TDF_DETAILS))
4714 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4715 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4716 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4717 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4721 /* Insert can get quite slow on an incredibly large number of basic
4722 blocks due to some quadratic behavior. Until this behavior is
4723 fixed, don't run it when he have an incredibly large number of
4724 bb's. If we aren't going to run insert, there is no point in
4725 computing ANTIC, either, even though it's plenty fast. */
4726 if (!do_fre && n_basic_blocks < 4000)
4732 /* Remove all the redundant expressions. */
4733 todo |= eliminate ();
4735 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4736 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4737 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4738 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4739 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4741 /* Make sure to remove fake edges before committing our inserts.
4742 This makes sure we don't end up with extra critical edges that
4743 we would need to split. */
4744 remove_fake_exit_edges ();
4745 gsi_commit_edge_inserts ();
4747 clear_expression_ids ();
4750 remove_dead_inserted_code ();
4758 /* Gate and execute functions for PRE. */
4763 return execute_pre (false);
4769 return flag_tree_pre != 0;
4772 struct gimple_opt_pass pass_pre =
4777 gate_pre, /* gate */
4778 do_pre, /* execute */
4781 0, /* static_pass_number */
4782 TV_TREE_PRE, /* tv_id */
4783 PROP_no_crit_edges | PROP_cfg
4784 | PROP_ssa, /* properties_required */
4785 0, /* properties_provided */
4786 0, /* properties_destroyed */
4787 TODO_rebuild_alias, /* todo_flags_start */
4788 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4789 | TODO_verify_ssa /* todo_flags_finish */
4794 /* Gate and execute functions for FRE. */
4799 return execute_pre (true);
4805 return flag_tree_fre != 0;
4808 struct gimple_opt_pass pass_fre =
4813 gate_fre, /* gate */
4814 execute_fre, /* execute */
4817 0, /* static_pass_number */
4818 TV_TREE_FRE, /* tv_id */
4819 PROP_cfg | PROP_ssa, /* properties_required */
4820 0, /* properties_provided */
4821 0, /* properties_destroyed */
4822 0, /* todo_flags_start */
4823 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */