1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
181 /* Set the value numbering info for a given SSA name to a given
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
223 /* If the value-number is a constant it is the representative
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
249 if (!is_gimple_assign (def_stmt))
252 /* FIXME tuples. This is incomplete and likely will miss some
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
281 if (expr == NULL_TREE)
284 /* Cache the expression. */
291 /* Free a phi operation structure VP. */
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
336 get_constant_value_id (tree constant)
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
346 return ((vn_constant_t)*slot)->value_id;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
354 get_or_alloc_constant_value_id (tree constant)
357 struct vn_constant_s vc;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
407 result = iterative_hash_expr (vro1->op0, result);
409 result = iterative_hash_expr (vro1->op1, result);
411 result = iterative_hash_expr (vro1->op2, result);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
439 else if (vro->opcode != ADDR_EXPR)
451 result = iterative_hash_hashval_t (off, result);
454 && vro->opcode == ADDR_EXPR)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
464 result = vn_reference_op_compute_hash (vro, result);
468 result += SSA_NAME_VERSION (vr1->vuse);
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
551 if (deref2 && vro2->opcode == ADDR_EXPR)
553 memset (&tem2, 0, sizeof (tem2));
554 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
555 tem2.type = TREE_TYPE (tem2.op0);
556 tem2.opcode = TREE_CODE (tem2.op0);
560 if (deref1 != deref2)
562 if (!vn_reference_op_eq (vro1, vro2))
567 while (VEC_length (vn_reference_op_s, vr1->operands) != i
568 || VEC_length (vn_reference_op_s, vr2->operands) != j);
573 /* Copy the operations present in load/store REF into RESULT, a vector of
574 vn_reference_op_s's. */
577 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
579 if (TREE_CODE (ref) == TARGET_MEM_REF)
581 vn_reference_op_s temp;
583 memset (&temp, 0, sizeof (temp));
584 /* We do not care for spurious type qualifications. */
585 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
586 temp.opcode = TREE_CODE (ref);
587 temp.op0 = TMR_INDEX (ref);
588 temp.op1 = TMR_STEP (ref);
589 temp.op2 = TMR_OFFSET (ref);
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
593 memset (&temp, 0, sizeof (temp));
594 temp.type = NULL_TREE;
595 temp.opcode = ERROR_MARK;
596 temp.op0 = TMR_INDEX2 (ref);
598 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
600 memset (&temp, 0, sizeof (temp));
601 temp.type = NULL_TREE;
602 temp.opcode = TREE_CODE (TMR_BASE (ref));
603 temp.op0 = TMR_BASE (ref);
605 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 /* For non-calls, store the information that makes up the address. */
613 vn_reference_op_s temp;
615 memset (&temp, 0, sizeof (temp));
616 /* We do not care for spurious type qualifications. */
617 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
618 temp.opcode = TREE_CODE (ref);
624 /* The base address gets its own vn_reference_op_s structure. */
625 temp.op0 = TREE_OPERAND (ref, 1);
626 if (host_integerp (TREE_OPERAND (ref, 1), 0))
627 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
630 /* Record bits and position. */
631 temp.op0 = TREE_OPERAND (ref, 1);
632 temp.op1 = TREE_OPERAND (ref, 2);
635 /* The field decl is enough to unambiguously specify the field,
636 a matching type is not necessary and a mismatching type
637 is always a spurious difference. */
638 temp.type = NULL_TREE;
639 temp.op0 = TREE_OPERAND (ref, 1);
640 temp.op1 = TREE_OPERAND (ref, 2);
642 tree this_offset = component_ref_field_offset (ref);
644 && TREE_CODE (this_offset) == INTEGER_CST)
646 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
647 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
650 = double_int_add (tree_to_double_int (this_offset),
652 (tree_to_double_int (bit_offset),
654 ? 3 : exact_log2 (BITS_PER_UNIT),
655 HOST_BITS_PER_DOUBLE_INT, true));
656 if (double_int_fits_in_shwi_p (off))
662 case ARRAY_RANGE_REF:
664 /* Record index as operand. */
665 temp.op0 = TREE_OPERAND (ref, 1);
666 /* Always record lower bounds and element size. */
667 temp.op1 = array_ref_low_bound (ref);
668 temp.op2 = array_ref_element_size (ref);
669 if (TREE_CODE (temp.op0) == INTEGER_CST
670 && TREE_CODE (temp.op1) == INTEGER_CST
671 && TREE_CODE (temp.op2) == INTEGER_CST)
673 double_int off = tree_to_double_int (temp.op0);
674 off = double_int_add (off,
676 (tree_to_double_int (temp.op1)));
677 off = double_int_mul (off, tree_to_double_int (temp.op2));
678 if (double_int_fits_in_shwi_p (off))
696 if (is_gimple_min_invariant (ref))
702 /* These are only interesting for their operands, their
703 existence, and their type. They will never be the last
704 ref in the chain of references (IE they require an
705 operand), so we don't have to put anything
706 for op* as it will be handled by the iteration */
708 case VIEW_CONVERT_EXPR:
712 /* This is only interesting for its constant offset. */
713 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
718 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
720 if (REFERENCE_CLASS_P (ref)
721 || (TREE_CODE (ref) == ADDR_EXPR
722 && !is_gimple_min_invariant (ref)))
723 ref = TREE_OPERAND (ref, 0);
729 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
730 operands in *OPS, the reference alias set SET and the reference type TYPE.
731 Return true if something useful was produced. */
734 ao_ref_init_from_vn_reference (ao_ref *ref,
735 alias_set_type set, tree type,
736 VEC (vn_reference_op_s, heap) *ops)
738 vn_reference_op_t op;
740 tree base = NULL_TREE;
742 HOST_WIDE_INT offset = 0;
743 HOST_WIDE_INT max_size;
744 HOST_WIDE_INT size = -1;
745 tree size_tree = NULL_TREE;
746 alias_set_type base_alias_set = -1;
748 /* First get the final access size from just the outermost expression. */
749 op = VEC_index (vn_reference_op_s, ops, 0);
750 if (op->opcode == COMPONENT_REF)
751 size_tree = DECL_SIZE (op->op0);
752 else if (op->opcode == BIT_FIELD_REF)
756 enum machine_mode mode = TYPE_MODE (type);
758 size_tree = TYPE_SIZE (type);
760 size = GET_MODE_BITSIZE (mode);
762 if (size_tree != NULL_TREE)
764 if (!host_integerp (size_tree, 1))
767 size = TREE_INT_CST_LOW (size_tree);
770 /* Initially, maxsize is the same as the accessed element size.
771 In the following it will only grow (or become -1). */
774 /* Compute cumulative bit-offset for nested component-refs and array-refs,
775 and find the ultimate containing object. */
776 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
780 /* These may be in the reference ops, but we cannot do anything
781 sensible with them here. */
783 /* Apart from ADDR_EXPR arguments to MEM_REF. */
784 if (base != NULL_TREE
785 && TREE_CODE (base) == MEM_REF
787 && DECL_P (TREE_OPERAND (op->op0, 0)))
789 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
790 base = TREE_OPERAND (op->op0, 0);
797 offset += pop->off * BITS_PER_UNIT;
805 /* Record the base objects. */
807 base_alias_set = get_deref_alias_set (op->op0);
808 *op0_p = build2 (MEM_REF, op->type,
810 op0_p = &TREE_OPERAND (*op0_p, 0);
821 /* And now the usual component-reference style ops. */
823 offset += tree_low_cst (op->op1, 0);
828 tree field = op->op0;
829 /* We do not have a complete COMPONENT_REF tree here so we
830 cannot use component_ref_field_offset. Do the interesting
834 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
838 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
840 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
845 case ARRAY_RANGE_REF:
847 /* We recorded the lower bound and the element size. */
848 if (!host_integerp (op->op0, 0)
849 || !host_integerp (op->op1, 0)
850 || !host_integerp (op->op2, 0))
854 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
855 hindex -= TREE_INT_CST_LOW (op->op1);
856 hindex *= TREE_INT_CST_LOW (op->op2);
857 hindex *= BITS_PER_UNIT;
869 case VIEW_CONVERT_EXPR:
886 if (base == NULL_TREE)
889 ref->ref = NULL_TREE;
891 ref->offset = offset;
893 ref->max_size = max_size;
894 ref->ref_alias_set = set;
895 if (base_alias_set != -1)
896 ref->base_alias_set = base_alias_set;
898 ref->base_alias_set = get_alias_set (base);
903 /* Copy the operations present in load/store/call REF into RESULT, a vector of
904 vn_reference_op_s's. */
907 copy_reference_ops_from_call (gimple call,
908 VEC(vn_reference_op_s, heap) **result)
910 vn_reference_op_s temp;
913 /* Copy the type, opcode, function being called and static chain. */
914 memset (&temp, 0, sizeof (temp));
915 temp.type = gimple_call_return_type (call);
916 temp.opcode = CALL_EXPR;
917 temp.op0 = gimple_call_fn (call);
918 temp.op1 = gimple_call_chain (call);
920 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
922 /* Copy the call arguments. As they can be references as well,
923 just chain them together. */
924 for (i = 0; i < gimple_call_num_args (call); ++i)
926 tree callarg = gimple_call_arg (call, i);
927 copy_reference_ops_from_ref (callarg, result);
931 /* Create a vector of vn_reference_op_s structures from REF, a
932 REFERENCE_CLASS_P tree. The vector is not shared. */
934 static VEC(vn_reference_op_s, heap) *
935 create_reference_ops_from_ref (tree ref)
937 VEC (vn_reference_op_s, heap) *result = NULL;
939 copy_reference_ops_from_ref (ref, &result);
943 /* Create a vector of vn_reference_op_s structures from CALL, a
944 call statement. The vector is not shared. */
946 static VEC(vn_reference_op_s, heap) *
947 create_reference_ops_from_call (gimple call)
949 VEC (vn_reference_op_s, heap) *result = NULL;
951 copy_reference_ops_from_call (call, &result);
955 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
956 *I_P to point to the last element of the replacement. */
958 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
961 unsigned int i = *i_p;
962 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
963 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
965 HOST_WIDE_INT addr_offset;
967 /* The only thing we have to do is from &OBJ.foo.bar add the offset
968 from .foo.bar to the preceeding MEM_REF offset and replace the
969 address with &OBJ. */
970 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
972 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
973 if (addr_base != op->op0)
975 double_int off = tree_to_double_int (mem_op->op0);
976 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
977 off = double_int_add (off, shwi_to_double_int (addr_offset));
978 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
979 op->op0 = build_fold_addr_expr (addr_base);
980 if (host_integerp (mem_op->op0, 0))
981 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
987 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
988 *I_P to point to the last element of the replacement. */
990 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
993 unsigned int i = *i_p;
994 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
995 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1000 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1001 if (!is_gimple_assign (def_stmt))
1004 code = gimple_assign_rhs_code (def_stmt);
1005 if (code != ADDR_EXPR
1006 && code != POINTER_PLUS_EXPR)
1009 off = tree_to_double_int (mem_op->op0);
1010 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1012 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1013 from .foo.bar to the preceeding MEM_REF offset and replace the
1014 address with &OBJ. */
1015 if (code == ADDR_EXPR)
1017 tree addr, addr_base;
1018 HOST_WIDE_INT addr_offset;
1020 addr = gimple_assign_rhs1 (def_stmt);
1021 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1024 || TREE_CODE (addr_base) != MEM_REF)
1027 off = double_int_add (off, shwi_to_double_int (addr_offset));
1028 off = double_int_add (off, mem_ref_offset (addr_base));
1029 op->op0 = TREE_OPERAND (addr_base, 0);
1034 ptr = gimple_assign_rhs1 (def_stmt);
1035 ptroff = gimple_assign_rhs2 (def_stmt);
1036 if (TREE_CODE (ptr) != SSA_NAME
1037 || TREE_CODE (ptroff) != INTEGER_CST)
1040 off = double_int_add (off, tree_to_double_int (ptroff));
1044 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1045 if (host_integerp (mem_op->op0, 0))
1046 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1049 if (TREE_CODE (op->op0) == SSA_NAME)
1050 op->op0 = SSA_VAL (op->op0);
1051 if (TREE_CODE (op->op0) != SSA_NAME)
1052 op->opcode = TREE_CODE (op->op0);
1055 if (TREE_CODE (op->op0) == SSA_NAME)
1056 vn_reference_maybe_forwprop_address (ops, i_p);
1057 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1058 vn_reference_fold_indirect (ops, i_p);
1061 /* Optimize the reference REF to a constant if possible or return
1062 NULL_TREE if not. */
1065 fully_constant_vn_reference_p (vn_reference_t ref)
1067 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1068 vn_reference_op_t op;
1070 /* Try to simplify the translated expression if it is
1071 a call to a builtin function with at most two arguments. */
1072 op = VEC_index (vn_reference_op_s, operands, 0);
1073 if (op->opcode == CALL_EXPR
1074 && TREE_CODE (op->op0) == ADDR_EXPR
1075 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1076 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1077 && VEC_length (vn_reference_op_s, operands) >= 2
1078 && VEC_length (vn_reference_op_s, operands) <= 3)
1080 vn_reference_op_t arg0, arg1 = NULL;
1081 bool anyconst = false;
1082 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1083 if (VEC_length (vn_reference_op_s, operands) > 2)
1084 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1085 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1086 || (arg0->opcode == ADDR_EXPR
1087 && is_gimple_min_invariant (arg0->op0)))
1090 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1091 || (arg1->opcode == ADDR_EXPR
1092 && is_gimple_min_invariant (arg1->op0))))
1096 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1099 arg1 ? arg1->op0 : NULL);
1101 && TREE_CODE (folded) == NOP_EXPR)
1102 folded = TREE_OPERAND (folded, 0);
1104 && is_gimple_min_invariant (folded))
1109 /* Simplify reads from constant strings. */
1110 else if (op->opcode == ARRAY_REF
1111 && TREE_CODE (op->op0) == INTEGER_CST
1112 && integer_zerop (op->op1)
1113 && VEC_length (vn_reference_op_s, operands) == 2)
1115 vn_reference_op_t arg0;
1116 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1117 if (arg0->opcode == STRING_CST
1118 && (TYPE_MODE (op->type)
1119 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1120 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1121 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1122 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1123 return build_int_cst_type (op->type,
1124 (TREE_STRING_POINTER (arg0->op0)
1125 [TREE_INT_CST_LOW (op->op0)]));
1131 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1132 structures into their value numbers. This is done in-place, and
1133 the vector passed in is returned. */
1135 static VEC (vn_reference_op_s, heap) *
1136 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1138 vn_reference_op_t vro;
1141 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1143 if (vro->opcode == SSA_NAME
1144 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1146 vro->op0 = SSA_VAL (vro->op0);
1147 /* If it transforms from an SSA_NAME to a constant, update
1149 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1150 vro->opcode = TREE_CODE (vro->op0);
1152 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1153 vro->op1 = SSA_VAL (vro->op1);
1154 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1155 vro->op2 = SSA_VAL (vro->op2);
1156 /* If it transforms from an SSA_NAME to an address, fold with
1157 a preceding indirect reference. */
1160 && TREE_CODE (vro->op0) == ADDR_EXPR
1161 && VEC_index (vn_reference_op_s,
1162 orig, i - 1)->opcode == MEM_REF)
1163 vn_reference_fold_indirect (&orig, &i);
1165 && vro->opcode == SSA_NAME
1166 && VEC_index (vn_reference_op_s,
1167 orig, i - 1)->opcode == MEM_REF)
1168 vn_reference_maybe_forwprop_address (&orig, &i);
1169 /* If it transforms a non-constant ARRAY_REF into a constant
1170 one, adjust the constant offset. */
1171 else if (vro->opcode == ARRAY_REF
1173 && TREE_CODE (vro->op0) == INTEGER_CST
1174 && TREE_CODE (vro->op1) == INTEGER_CST
1175 && TREE_CODE (vro->op2) == INTEGER_CST)
1177 double_int off = tree_to_double_int (vro->op0);
1178 off = double_int_add (off,
1180 (tree_to_double_int (vro->op1)));
1181 off = double_int_mul (off, tree_to_double_int (vro->op2));
1182 if (double_int_fits_in_shwi_p (off))
1190 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1192 /* Create a vector of vn_reference_op_s structures from REF, a
1193 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1196 static VEC(vn_reference_op_s, heap) *
1197 valueize_shared_reference_ops_from_ref (tree ref)
1201 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1202 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1203 shared_lookup_references = valueize_refs (shared_lookup_references);
1204 return shared_lookup_references;
1207 /* Create a vector of vn_reference_op_s structures from CALL, a
1208 call statement. The vector is shared among all callers of
1211 static VEC(vn_reference_op_s, heap) *
1212 valueize_shared_reference_ops_from_call (gimple call)
1216 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1217 copy_reference_ops_from_call (call, &shared_lookup_references);
1218 shared_lookup_references = valueize_refs (shared_lookup_references);
1219 return shared_lookup_references;
1222 /* Lookup a SCCVN reference operation VR in the current hash table.
1223 Returns the resulting value number if it exists in the hash table,
1224 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1225 vn_reference_t stored in the hashtable if something is found. */
1228 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1233 hash = vr->hashcode;
1234 slot = htab_find_slot_with_hash (current_info->references, vr,
1236 if (!slot && current_info == optimistic_info)
1237 slot = htab_find_slot_with_hash (valid_info->references, vr,
1242 *vnresult = (vn_reference_t)*slot;
1243 return ((vn_reference_t)*slot)->result;
1249 static tree *last_vuse_ptr;
1250 static vn_lookup_kind vn_walk_kind;
1251 static vn_lookup_kind default_vn_walk_kind;
1253 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1254 with the current VUSE and performs the expression lookup. */
1257 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1259 vn_reference_t vr = (vn_reference_t)vr_;
1264 *last_vuse_ptr = vuse;
1266 /* Fixup vuse and hash. */
1268 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1269 vr->vuse = SSA_VAL (vuse);
1271 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1273 hash = vr->hashcode;
1274 slot = htab_find_slot_with_hash (current_info->references, vr,
1276 if (!slot && current_info == optimistic_info)
1277 slot = htab_find_slot_with_hash (valid_info->references, vr,
1285 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1286 from the statement defining VUSE and if not successful tries to
1287 translate *REFP and VR_ through an aggregate copy at the defintion
1291 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1293 vn_reference_t vr = (vn_reference_t)vr_;
1294 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1297 HOST_WIDE_INT offset, maxsize;
1298 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1300 bool lhs_ref_ok = false;
1302 /* First try to disambiguate after value-replacing in the definitions LHS. */
1303 if (is_gimple_assign (def_stmt))
1305 VEC (vn_reference_op_s, heap) *tem;
1306 tree lhs = gimple_assign_lhs (def_stmt);
1307 /* Avoid re-allocation overhead. */
1308 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1309 copy_reference_ops_from_ref (lhs, &lhs_ops);
1311 lhs_ops = valueize_refs (lhs_ops);
1312 gcc_assert (lhs_ops == tem);
1313 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1314 TREE_TYPE (lhs), lhs_ops);
1316 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1320 base = ao_ref_base (ref);
1321 offset = ref->offset;
1322 maxsize = ref->max_size;
1324 /* If we cannot constrain the size of the reference we cannot
1325 test if anything kills it. */
1329 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1330 from that defintion.
1332 if (is_gimple_reg_type (vr->type)
1333 && is_gimple_call (def_stmt)
1334 && (fndecl = gimple_call_fndecl (def_stmt))
1335 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1336 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1337 && integer_zerop (gimple_call_arg (def_stmt, 1))
1338 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1339 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1341 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1343 HOST_WIDE_INT offset2, size2, maxsize2;
1344 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1345 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1346 if ((unsigned HOST_WIDE_INT)size2 / 8
1347 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1349 && operand_equal_p (base, base2, 0)
1350 && offset2 <= offset
1351 && offset2 + size2 >= offset + maxsize)
1353 tree val = build_zero_cst (vr->type);
1354 unsigned int value_id = get_or_alloc_constant_value_id (val);
1355 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1356 VEC_copy (vn_reference_op_s,
1357 heap, vr->operands),
1362 /* 2) Assignment from an empty CONSTRUCTOR. */
1363 else if (is_gimple_reg_type (vr->type)
1364 && gimple_assign_single_p (def_stmt)
1365 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1366 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1369 HOST_WIDE_INT offset2, size2, maxsize2;
1370 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1371 &offset2, &size2, &maxsize2);
1373 && operand_equal_p (base, base2, 0)
1374 && offset2 <= offset
1375 && offset2 + size2 >= offset + maxsize)
1377 tree val = build_zero_cst (vr->type);
1378 unsigned int value_id = get_or_alloc_constant_value_id (val);
1379 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1380 VEC_copy (vn_reference_op_s,
1381 heap, vr->operands),
1386 /* For aggregate copies translate the reference through them if
1387 the copy kills ref. */
1388 else if (vn_walk_kind == VN_WALKREWRITE
1389 && gimple_assign_single_p (def_stmt)
1390 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1391 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1392 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1395 HOST_WIDE_INT offset2, size2, maxsize2;
1397 VEC (vn_reference_op_s, heap) *rhs = NULL;
1398 vn_reference_op_t vro;
1404 /* See if the assignment kills REF. */
1405 base2 = ao_ref_base (&lhs_ref);
1406 offset2 = lhs_ref.offset;
1407 size2 = lhs_ref.size;
1408 maxsize2 = lhs_ref.max_size;
1410 || (base != base2 && !operand_equal_p (base, base2, 0))
1412 || offset2 + size2 < offset + maxsize)
1415 /* Find the common base of ref and the lhs. lhs_ops already
1416 contains valueized operands for the lhs. */
1417 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1418 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1419 while (j >= 0 && i >= 0
1420 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1422 VEC_index (vn_reference_op_s, lhs_ops, j)))
1428 /* i now points to the first additional op.
1429 ??? LHS may not be completely contained in VR, one or more
1430 VIEW_CONVERT_EXPRs could be in its way. We could at least
1431 try handling outermost VIEW_CONVERT_EXPRs. */
1435 /* Now re-write REF to be based on the rhs of the assignment. */
1436 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1437 /* We need to pre-pend vr->operands[0..i] to rhs. */
1438 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1439 > VEC_length (vn_reference_op_s, vr->operands))
1441 VEC (vn_reference_op_s, heap) *old = vr->operands;
1442 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1443 i + 1 + VEC_length (vn_reference_op_s, rhs));
1444 if (old == shared_lookup_references
1445 && vr->operands != old)
1446 shared_lookup_references = NULL;
1449 VEC_truncate (vn_reference_op_s, vr->operands,
1450 i + 1 + VEC_length (vn_reference_op_s, rhs));
1451 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1452 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1453 VEC_free (vn_reference_op_s, heap, rhs);
1454 vr->hashcode = vn_reference_compute_hash (vr);
1456 /* Adjust *ref from the new operands. */
1457 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1459 /* This can happen with bitfields. */
1460 if (ref->size != r.size)
1464 /* Do not update last seen VUSE after translating. */
1465 last_vuse_ptr = NULL;
1467 /* Keep looking for the adjusted *REF / VR pair. */
1471 /* Bail out and stop walking. */
1475 /* Lookup a reference operation by it's parts, in the current hash table.
1476 Returns the resulting value number if it exists in the hash table,
1477 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1478 vn_reference_t stored in the hashtable if something is found. */
1481 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1482 VEC (vn_reference_op_s, heap) *operands,
1483 vn_reference_t *vnresult, vn_lookup_kind kind)
1485 struct vn_reference_s vr1;
1493 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1494 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1495 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1496 VEC_length (vn_reference_op_s, operands));
1497 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1498 VEC_address (vn_reference_op_s, operands),
1499 sizeof (vn_reference_op_s)
1500 * VEC_length (vn_reference_op_s, operands));
1501 vr1.operands = operands = shared_lookup_references
1502 = valueize_refs (shared_lookup_references);
1505 vr1.hashcode = vn_reference_compute_hash (&vr1);
1506 if ((cst = fully_constant_vn_reference_p (&vr1)))
1509 vn_reference_lookup_1 (&vr1, vnresult);
1511 && kind != VN_NOWALK
1515 vn_walk_kind = kind;
1516 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1518 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1519 vn_reference_lookup_2,
1520 vn_reference_lookup_3, &vr1);
1521 if (vr1.operands != operands)
1522 VEC_free (vn_reference_op_s, heap, vr1.operands);
1526 return (*vnresult)->result;
1531 /* Lookup OP in the current hash table, and return the resulting value
1532 number if it exists in the hash table. Return NULL_TREE if it does
1533 not exist in the hash table or if the result field of the structure
1534 was NULL.. VNRESULT will be filled in with the vn_reference_t
1535 stored in the hashtable if one exists. */
1538 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1539 vn_reference_t *vnresult)
1541 VEC (vn_reference_op_s, heap) *operands;
1542 struct vn_reference_s vr1;
1548 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1549 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1550 vr1.type = TREE_TYPE (op);
1551 vr1.set = get_alias_set (op);
1552 vr1.hashcode = vn_reference_compute_hash (&vr1);
1553 if ((cst = fully_constant_vn_reference_p (&vr1)))
1556 if (kind != VN_NOWALK
1559 vn_reference_t wvnresult;
1561 ao_ref_init (&r, op);
1562 vn_walk_kind = kind;
1564 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1565 vn_reference_lookup_2,
1566 vn_reference_lookup_3, &vr1);
1567 if (vr1.operands != operands)
1568 VEC_free (vn_reference_op_s, heap, vr1.operands);
1572 *vnresult = wvnresult;
1573 return wvnresult->result;
1579 return vn_reference_lookup_1 (&vr1, vnresult);
1583 /* Insert OP into the current hash table with a value number of
1584 RESULT, and return the resulting reference structure we created. */
1587 vn_reference_insert (tree op, tree result, tree vuse)
1592 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1593 if (TREE_CODE (result) == SSA_NAME)
1594 vr1->value_id = VN_INFO (result)->value_id;
1596 vr1->value_id = get_or_alloc_constant_value_id (result);
1597 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1598 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1599 vr1->type = TREE_TYPE (op);
1600 vr1->set = get_alias_set (op);
1601 vr1->hashcode = vn_reference_compute_hash (vr1);
1602 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1604 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1607 /* Because we lookup stores using vuses, and value number failures
1608 using the vdefs (see visit_reference_op_store for how and why),
1609 it's possible that on failure we may try to insert an already
1610 inserted store. This is not wrong, there is no ssa name for a
1611 store that we could use as a differentiator anyway. Thus, unlike
1612 the other lookup functions, you cannot gcc_assert (!*slot)
1615 /* But free the old slot in case of a collision. */
1617 free_reference (*slot);
1623 /* Insert a reference by it's pieces into the current hash table with
1624 a value number of RESULT. Return the resulting reference
1625 structure we created. */
1628 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1629 VEC (vn_reference_op_s, heap) *operands,
1630 tree result, unsigned int value_id)
1636 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1637 vr1->value_id = value_id;
1638 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1639 vr1->operands = valueize_refs (operands);
1642 vr1->hashcode = vn_reference_compute_hash (vr1);
1643 if (result && TREE_CODE (result) == SSA_NAME)
1644 result = SSA_VAL (result);
1645 vr1->result = result;
1647 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1650 /* At this point we should have all the things inserted that we have
1651 seen before, and we should never try inserting something that
1653 gcc_assert (!*slot);
1655 free_reference (*slot);
1661 /* Compute and return the hash value for nary operation VBO1. */
1664 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1669 for (i = 0; i < vno1->length; ++i)
1670 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1671 vno1->op[i] = SSA_VAL (vno1->op[i]);
1673 if (vno1->length == 2
1674 && commutative_tree_code (vno1->opcode)
1675 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1677 tree temp = vno1->op[0];
1678 vno1->op[0] = vno1->op[1];
1682 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1683 for (i = 0; i < vno1->length; ++i)
1684 hash = iterative_hash_expr (vno1->op[i], hash);
1689 /* Return the computed hashcode for nary operation P1. */
1692 vn_nary_op_hash (const void *p1)
1694 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1695 return vno1->hashcode;
1698 /* Compare nary operations P1 and P2 and return true if they are
1702 vn_nary_op_eq (const void *p1, const void *p2)
1704 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1705 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1708 if (vno1->hashcode != vno2->hashcode)
1711 if (vno1->opcode != vno2->opcode
1712 || !types_compatible_p (vno1->type, vno2->type))
1715 for (i = 0; i < vno1->length; ++i)
1716 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1722 /* Initialize VNO from the pieces provided. */
1725 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1726 enum tree_code code, tree type, tree op0,
1727 tree op1, tree op2, tree op3)
1730 vno->length = length;
1734 /* The fallthrus here are deliberate. */
1735 case 4: vno->op[3] = op3;
1736 case 3: vno->op[2] = op2;
1737 case 2: vno->op[1] = op1;
1738 case 1: vno->op[0] = op0;
1744 /* Initialize VNO from OP. */
1747 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1751 vno->opcode = TREE_CODE (op);
1752 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1753 vno->type = TREE_TYPE (op);
1754 for (i = 0; i < vno->length; ++i)
1755 vno->op[i] = TREE_OPERAND (op, i);
1758 /* Initialize VNO from STMT. */
1761 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1765 vno->opcode = gimple_assign_rhs_code (stmt);
1766 vno->length = gimple_num_ops (stmt) - 1;
1767 vno->type = gimple_expr_type (stmt);
1768 for (i = 0; i < vno->length; ++i)
1769 vno->op[i] = gimple_op (stmt, i + 1);
1770 if (vno->opcode == REALPART_EXPR
1771 || vno->opcode == IMAGPART_EXPR
1772 || vno->opcode == VIEW_CONVERT_EXPR)
1773 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1776 /* Compute the hashcode for VNO and look for it in the hash table;
1777 return the resulting value number if it exists in the hash table.
1778 Return NULL_TREE if it does not exist in the hash table or if the
1779 result field of the operation is NULL. VNRESULT will contain the
1780 vn_nary_op_t from the hashtable if it exists. */
1783 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1790 vno->hashcode = vn_nary_op_compute_hash (vno);
1791 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1793 if (!slot && current_info == optimistic_info)
1794 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1799 *vnresult = (vn_nary_op_t)*slot;
1800 return ((vn_nary_op_t)*slot)->result;
1803 /* Lookup a n-ary operation by its pieces and return the resulting value
1804 number if it exists in the hash table. Return NULL_TREE if it does
1805 not exist in the hash table or if the result field of the operation
1806 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1810 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1811 tree type, tree op0, tree op1, tree op2,
1812 tree op3, vn_nary_op_t *vnresult)
1814 struct vn_nary_op_s vno1;
1815 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1816 return vn_nary_op_lookup_1 (&vno1, vnresult);
1819 /* Lookup OP in the current hash table, and return the resulting value
1820 number if it exists in the hash table. Return NULL_TREE if it does
1821 not exist in the hash table or if the result field of the operation
1822 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1826 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1828 struct vn_nary_op_s vno1;
1829 init_vn_nary_op_from_op (&vno1, op);
1830 return vn_nary_op_lookup_1 (&vno1, vnresult);
1833 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1834 value number if it exists in the hash table. Return NULL_TREE if
1835 it does not exist in the hash table. VNRESULT will contain the
1836 vn_nary_op_t from the hashtable if it exists. */
1839 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1841 struct vn_nary_op_s vno1;
1842 init_vn_nary_op_from_stmt (&vno1, stmt);
1843 return vn_nary_op_lookup_1 (&vno1, vnresult);
1846 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1849 sizeof_vn_nary_op (unsigned int length)
1851 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1854 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1857 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1859 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1862 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1866 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
1868 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
1869 ¤t_info->nary_obstack);
1871 vno1->value_id = value_id;
1872 vno1->length = length;
1873 vno1->result = result;
1878 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1879 VNO->HASHCODE first. */
1882 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
1887 vno->hashcode = vn_nary_op_compute_hash (vno);
1889 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
1890 gcc_assert (!*slot);
1896 /* Insert a n-ary operation into the current hash table using it's
1897 pieces. Return the vn_nary_op_t structure we created and put in
1901 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1902 tree type, tree op0,
1903 tree op1, tree op2, tree op3,
1905 unsigned int value_id)
1909 vno1 = alloc_vn_nary_op (length, result, value_id);
1910 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
1911 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1914 /* Insert OP into the current hash table with a value number of
1915 RESULT. Return the vn_nary_op_t structure we created and put in
1919 vn_nary_op_insert (tree op, tree result)
1921 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1924 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1925 init_vn_nary_op_from_op (vno1, op);
1926 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1929 /* Insert the rhs of STMT into the current hash table with a value number of
1933 vn_nary_op_insert_stmt (gimple stmt, tree result)
1935 unsigned length = gimple_num_ops (stmt) - 1;
1938 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1939 init_vn_nary_op_from_stmt (vno1, stmt);
1940 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1943 /* Compute a hashcode for PHI operation VP1 and return it. */
1945 static inline hashval_t
1946 vn_phi_compute_hash (vn_phi_t vp1)
1953 result = vp1->block->index;
1955 /* If all PHI arguments are constants we need to distinguish
1956 the PHI node via its type. */
1957 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1958 result += (INTEGRAL_TYPE_P (type)
1959 + (INTEGRAL_TYPE_P (type)
1960 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1962 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1964 if (phi1op == VN_TOP)
1966 result = iterative_hash_expr (phi1op, result);
1972 /* Return the computed hashcode for phi operation P1. */
1975 vn_phi_hash (const void *p1)
1977 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1978 return vp1->hashcode;
1981 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1984 vn_phi_eq (const void *p1, const void *p2)
1986 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1987 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1989 if (vp1->hashcode != vp2->hashcode)
1992 if (vp1->block == vp2->block)
1997 /* If the PHI nodes do not have compatible types
1998 they are not the same. */
1999 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2000 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2003 /* Any phi in the same block will have it's arguments in the
2004 same edge order, because of how we store phi nodes. */
2005 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2007 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2008 if (phi1op == VN_TOP || phi2op == VN_TOP)
2010 if (!expressions_equal_p (phi1op, phi2op))
2018 static VEC(tree, heap) *shared_lookup_phiargs;
2020 /* Lookup PHI in the current hash table, and return the resulting
2021 value number if it exists in the hash table. Return NULL_TREE if
2022 it does not exist in the hash table. */
2025 vn_phi_lookup (gimple phi)
2028 struct vn_phi_s vp1;
2031 VEC_truncate (tree, shared_lookup_phiargs, 0);
2033 /* Canonicalize the SSA_NAME's to their value number. */
2034 for (i = 0; i < gimple_phi_num_args (phi); i++)
2036 tree def = PHI_ARG_DEF (phi, i);
2037 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2038 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2040 vp1.phiargs = shared_lookup_phiargs;
2041 vp1.block = gimple_bb (phi);
2042 vp1.hashcode = vn_phi_compute_hash (&vp1);
2043 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2045 if (!slot && current_info == optimistic_info)
2046 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2050 return ((vn_phi_t)*slot)->result;
2053 /* Insert PHI into the current hash table with a value number of
2057 vn_phi_insert (gimple phi, tree result)
2060 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2062 VEC (tree, heap) *args = NULL;
2064 /* Canonicalize the SSA_NAME's to their value number. */
2065 for (i = 0; i < gimple_phi_num_args (phi); i++)
2067 tree def = PHI_ARG_DEF (phi, i);
2068 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2069 VEC_safe_push (tree, heap, args, def);
2071 vp1->value_id = VN_INFO (result)->value_id;
2072 vp1->phiargs = args;
2073 vp1->block = gimple_bb (phi);
2074 vp1->result = result;
2075 vp1->hashcode = vn_phi_compute_hash (vp1);
2077 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2080 /* Because we iterate over phi operations more than once, it's
2081 possible the slot might already exist here, hence no assert.*/
2087 /* Print set of components in strongly connected component SCC to OUT. */
2090 print_scc (FILE *out, VEC (tree, heap) *scc)
2095 fprintf (out, "SCC consists of: ");
2096 FOR_EACH_VEC_ELT (tree, scc, i, var)
2098 print_generic_expr (out, var, 0);
2101 fprintf (out, "\n");
2104 /* Set the value number of FROM to TO, return true if it has changed
2108 set_ssa_val_to (tree from, tree to)
2110 tree currval = SSA_VAL (from);
2114 if (currval == from)
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2118 fprintf (dump_file, "Not changing value number of ");
2119 print_generic_expr (dump_file, from, 0);
2120 fprintf (dump_file, " from VARYING to ");
2121 print_generic_expr (dump_file, to, 0);
2122 fprintf (dump_file, "\n");
2126 else if (TREE_CODE (to) == SSA_NAME
2127 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2131 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2132 and invariants. So assert that here. */
2133 gcc_assert (to != NULL_TREE
2135 || TREE_CODE (to) == SSA_NAME
2136 || is_gimple_min_invariant (to)));
2138 if (dump_file && (dump_flags & TDF_DETAILS))
2140 fprintf (dump_file, "Setting value number of ");
2141 print_generic_expr (dump_file, from, 0);
2142 fprintf (dump_file, " to ");
2143 print_generic_expr (dump_file, to, 0);
2146 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2148 VN_INFO (from)->valnum = to;
2149 if (dump_file && (dump_flags & TDF_DETAILS))
2150 fprintf (dump_file, " (changed)\n");
2153 if (dump_file && (dump_flags & TDF_DETAILS))
2154 fprintf (dump_file, "\n");
2158 /* Set all definitions in STMT to value number to themselves.
2159 Return true if a value number changed. */
2162 defs_to_varying (gimple stmt)
2164 bool changed = false;
2168 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2170 tree def = DEF_FROM_PTR (defp);
2172 VN_INFO (def)->use_processed = true;
2173 changed |= set_ssa_val_to (def, def);
2178 static bool expr_has_constants (tree expr);
2179 static tree valueize_expr (tree expr);
2181 /* Visit a copy between LHS and RHS, return true if the value number
2185 visit_copy (tree lhs, tree rhs)
2187 /* Follow chains of copies to their destination. */
2188 while (TREE_CODE (rhs) == SSA_NAME
2189 && SSA_VAL (rhs) != rhs)
2190 rhs = SSA_VAL (rhs);
2192 /* The copy may have a more interesting constant filled expression
2193 (we don't, since we know our RHS is just an SSA name). */
2194 if (TREE_CODE (rhs) == SSA_NAME)
2196 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2197 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2200 return set_ssa_val_to (lhs, rhs);
2203 /* Visit a nary operator RHS, value number it, and return true if the
2204 value number of LHS has changed as a result. */
2207 visit_nary_op (tree lhs, gimple stmt)
2209 bool changed = false;
2210 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2213 changed = set_ssa_val_to (lhs, result);
2216 changed = set_ssa_val_to (lhs, lhs);
2217 vn_nary_op_insert_stmt (stmt, lhs);
2223 /* Visit a call STMT storing into LHS. Return true if the value number
2224 of the LHS has changed as a result. */
2227 visit_reference_op_call (tree lhs, gimple stmt)
2229 bool changed = false;
2230 struct vn_reference_s vr1;
2232 tree vuse = gimple_vuse (stmt);
2234 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2235 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2236 vr1.type = gimple_expr_type (stmt);
2238 vr1.hashcode = vn_reference_compute_hash (&vr1);
2239 result = vn_reference_lookup_1 (&vr1, NULL);
2242 changed = set_ssa_val_to (lhs, result);
2243 if (TREE_CODE (result) == SSA_NAME
2244 && VN_INFO (result)->has_constants)
2245 VN_INFO (lhs)->has_constants = true;
2251 changed = set_ssa_val_to (lhs, lhs);
2252 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2253 vr2->vuse = vr1.vuse;
2254 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2255 vr2->type = vr1.type;
2257 vr2->hashcode = vr1.hashcode;
2259 slot = htab_find_slot_with_hash (current_info->references,
2260 vr2, vr2->hashcode, INSERT);
2262 free_reference (*slot);
2269 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2270 and return true if the value number of the LHS has changed as a result. */
2273 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2275 bool changed = false;
2279 last_vuse = gimple_vuse (stmt);
2280 last_vuse_ptr = &last_vuse;
2281 result = vn_reference_lookup (op, gimple_vuse (stmt),
2282 default_vn_walk_kind, NULL);
2283 last_vuse_ptr = NULL;
2285 /* If we have a VCE, try looking up its operand as it might be stored in
2286 a different type. */
2287 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2288 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2289 default_vn_walk_kind, NULL);
2291 /* We handle type-punning through unions by value-numbering based
2292 on offset and size of the access. Be prepared to handle a
2293 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2295 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2297 /* We will be setting the value number of lhs to the value number
2298 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2299 So first simplify and lookup this expression to see if it
2300 is already available. */
2301 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2302 if ((CONVERT_EXPR_P (val)
2303 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2304 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2306 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2307 if ((CONVERT_EXPR_P (tem)
2308 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2309 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2310 TREE_TYPE (val), tem)))
2314 if (!is_gimple_min_invariant (val)
2315 && TREE_CODE (val) != SSA_NAME)
2316 result = vn_nary_op_lookup (val, NULL);
2317 /* If the expression is not yet available, value-number lhs to
2318 a new SSA_NAME we create. */
2321 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2322 /* Initialize value-number information properly. */
2323 VN_INFO_GET (result)->valnum = result;
2324 VN_INFO (result)->value_id = get_next_value_id ();
2325 VN_INFO (result)->expr = val;
2326 VN_INFO (result)->has_constants = expr_has_constants (val);
2327 VN_INFO (result)->needs_insertion = true;
2328 /* As all "inserted" statements are singleton SCCs, insert
2329 to the valid table. This is strictly needed to
2330 avoid re-generating new value SSA_NAMEs for the same
2331 expression during SCC iteration over and over (the
2332 optimistic table gets cleared after each iteration).
2333 We do not need to insert into the optimistic table, as
2334 lookups there will fall back to the valid table. */
2335 if (current_info == optimistic_info)
2337 current_info = valid_info;
2338 vn_nary_op_insert (val, result);
2339 current_info = optimistic_info;
2342 vn_nary_op_insert (val, result);
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2345 fprintf (dump_file, "Inserting name ");
2346 print_generic_expr (dump_file, result, 0);
2347 fprintf (dump_file, " for expression ");
2348 print_generic_expr (dump_file, val, 0);
2349 fprintf (dump_file, "\n");
2356 changed = set_ssa_val_to (lhs, result);
2357 if (TREE_CODE (result) == SSA_NAME
2358 && VN_INFO (result)->has_constants)
2360 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2361 VN_INFO (lhs)->has_constants = true;
2366 changed = set_ssa_val_to (lhs, lhs);
2367 vn_reference_insert (op, lhs, last_vuse);
2374 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2375 and return true if the value number of the LHS has changed as a result. */
2378 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2380 bool changed = false;
2382 bool resultsame = false;
2384 /* First we want to lookup using the *vuses* from the store and see
2385 if there the last store to this location with the same address
2388 The vuses represent the memory state before the store. If the
2389 memory state, address, and value of the store is the same as the
2390 last store to this location, then this store will produce the
2391 same memory state as that store.
2393 In this case the vdef versions for this store are value numbered to those
2394 vuse versions, since they represent the same memory state after
2397 Otherwise, the vdefs for the store are used when inserting into
2398 the table, since the store generates a new memory state. */
2400 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2404 if (TREE_CODE (result) == SSA_NAME)
2405 result = SSA_VAL (result);
2406 if (TREE_CODE (op) == SSA_NAME)
2408 resultsame = expressions_equal_p (result, op);
2411 if (!result || !resultsame)
2415 if (dump_file && (dump_flags & TDF_DETAILS))
2417 fprintf (dump_file, "No store match\n");
2418 fprintf (dump_file, "Value numbering store ");
2419 print_generic_expr (dump_file, lhs, 0);
2420 fprintf (dump_file, " to ");
2421 print_generic_expr (dump_file, op, 0);
2422 fprintf (dump_file, "\n");
2424 /* Have to set value numbers before insert, since insert is
2425 going to valueize the references in-place. */
2426 if ((vdef = gimple_vdef (stmt)))
2428 VN_INFO (vdef)->use_processed = true;
2429 changed |= set_ssa_val_to (vdef, vdef);
2432 /* Do not insert structure copies into the tables. */
2433 if (is_gimple_min_invariant (op)
2434 || is_gimple_reg (op))
2435 vn_reference_insert (lhs, op, vdef);
2439 /* We had a match, so value number the vdef to have the value
2440 number of the vuse it came from. */
2443 if (dump_file && (dump_flags & TDF_DETAILS))
2444 fprintf (dump_file, "Store matched earlier value,"
2445 "value numbering store vdefs to matching vuses.\n");
2447 def = gimple_vdef (stmt);
2448 use = gimple_vuse (stmt);
2450 VN_INFO (def)->use_processed = true;
2451 changed |= set_ssa_val_to (def, SSA_VAL (use));
2457 /* Visit and value number PHI, return true if the value number
2461 visit_phi (gimple phi)
2463 bool changed = false;
2465 tree sameval = VN_TOP;
2466 bool allsame = true;
2469 /* TODO: We could check for this in init_sccvn, and replace this
2470 with a gcc_assert. */
2471 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2472 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2474 /* See if all non-TOP arguments have the same value. TOP is
2475 equivalent to everything, so we can ignore it. */
2476 for (i = 0; i < gimple_phi_num_args (phi); i++)
2478 tree def = PHI_ARG_DEF (phi, i);
2480 if (TREE_CODE (def) == SSA_NAME)
2481 def = SSA_VAL (def);
2484 if (sameval == VN_TOP)
2490 if (!expressions_equal_p (def, sameval))
2498 /* If all value numbered to the same value, the phi node has that
2502 if (is_gimple_min_invariant (sameval))
2504 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2505 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2509 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2510 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2513 if (TREE_CODE (sameval) == SSA_NAME)
2514 return visit_copy (PHI_RESULT (phi), sameval);
2516 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2519 /* Otherwise, see if it is equivalent to a phi node in this block. */
2520 result = vn_phi_lookup (phi);
2523 if (TREE_CODE (result) == SSA_NAME)
2524 changed = visit_copy (PHI_RESULT (phi), result);
2526 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2530 vn_phi_insert (phi, PHI_RESULT (phi));
2531 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2532 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2533 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2539 /* Return true if EXPR contains constants. */
2542 expr_has_constants (tree expr)
2544 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2547 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2550 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2551 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2552 /* Constants inside reference ops are rarely interesting, but
2553 it can take a lot of looking to find them. */
2555 case tcc_declaration:
2558 return is_gimple_min_invariant (expr);
2563 /* Return true if STMT contains constants. */
2566 stmt_has_constants (gimple stmt)
2568 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2571 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2573 case GIMPLE_UNARY_RHS:
2574 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2576 case GIMPLE_BINARY_RHS:
2577 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2578 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2579 case GIMPLE_TERNARY_RHS:
2580 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2581 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2582 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2583 case GIMPLE_SINGLE_RHS:
2584 /* Constants inside reference ops are rarely interesting, but
2585 it can take a lot of looking to find them. */
2586 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2593 /* Replace SSA_NAMES in expr with their value numbers, and return the
2595 This is performed in place. */
2598 valueize_expr (tree expr)
2600 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2603 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2604 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2605 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2608 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2609 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2610 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2611 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2612 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2613 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2621 /* Simplify the binary expression RHS, and return the result if
2625 simplify_binary_expression (gimple stmt)
2627 tree result = NULL_TREE;
2628 tree op0 = gimple_assign_rhs1 (stmt);
2629 tree op1 = gimple_assign_rhs2 (stmt);
2631 /* This will not catch every single case we could combine, but will
2632 catch those with constants. The goal here is to simultaneously
2633 combine constants between expressions, but avoid infinite
2634 expansion of expressions during simplification. */
2635 if (TREE_CODE (op0) == SSA_NAME)
2637 if (VN_INFO (op0)->has_constants
2638 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2639 op0 = valueize_expr (vn_get_expr_for (op0));
2640 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2641 op0 = SSA_VAL (op0);
2644 if (TREE_CODE (op1) == SSA_NAME)
2646 if (VN_INFO (op1)->has_constants)
2647 op1 = valueize_expr (vn_get_expr_for (op1));
2648 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2649 op1 = SSA_VAL (op1);
2652 /* Avoid folding if nothing changed. */
2653 if (op0 == gimple_assign_rhs1 (stmt)
2654 && op1 == gimple_assign_rhs2 (stmt))
2657 fold_defer_overflow_warnings ();
2659 result = fold_binary (gimple_assign_rhs_code (stmt),
2660 gimple_expr_type (stmt), op0, op1);
2662 STRIP_USELESS_TYPE_CONVERSION (result);
2664 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2667 /* Make sure result is not a complex expression consisting
2668 of operators of operators (IE (a + b) + (a + c))
2669 Otherwise, we will end up with unbounded expressions if
2670 fold does anything at all. */
2671 if (result && valid_gimple_rhs_p (result))
2677 /* Simplify the unary expression RHS, and return the result if
2681 simplify_unary_expression (gimple stmt)
2683 tree result = NULL_TREE;
2684 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2686 /* We handle some tcc_reference codes here that are all
2687 GIMPLE_ASSIGN_SINGLE codes. */
2688 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2689 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2690 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2691 op0 = TREE_OPERAND (op0, 0);
2693 if (TREE_CODE (op0) != SSA_NAME)
2697 if (VN_INFO (op0)->has_constants)
2698 op0 = valueize_expr (vn_get_expr_for (op0));
2699 else if (gimple_assign_cast_p (stmt)
2700 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2701 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2702 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2704 /* We want to do tree-combining on conversion-like expressions.
2705 Make sure we feed only SSA_NAMEs or constants to fold though. */
2706 tree tem = valueize_expr (vn_get_expr_for (op0));
2707 if (UNARY_CLASS_P (tem)
2708 || BINARY_CLASS_P (tem)
2709 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2710 || TREE_CODE (tem) == SSA_NAME
2711 || is_gimple_min_invariant (tem))
2715 /* Avoid folding if nothing changed, but remember the expression. */
2716 if (op0 == orig_op0)
2719 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2720 gimple_expr_type (stmt), op0);
2723 STRIP_USELESS_TYPE_CONVERSION (result);
2724 if (valid_gimple_rhs_p (result))
2731 /* Try to simplify RHS using equivalences and constant folding. */
2734 try_to_simplify (gimple stmt)
2738 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2739 in this case, there is no point in doing extra work. */
2740 if (gimple_assign_copy_p (stmt)
2741 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2744 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2746 case tcc_declaration:
2747 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2753 /* Do not do full-blown reference lookup here, but simplify
2754 reads from constant aggregates. */
2755 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2759 /* Fallthrough for some codes that can operate on registers. */
2760 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2761 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2762 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2764 /* We could do a little more with unary ops, if they expand
2765 into binary ops, but it's debatable whether it is worth it. */
2767 return simplify_unary_expression (stmt);
2769 case tcc_comparison:
2771 return simplify_binary_expression (stmt);
2780 /* Visit and value number USE, return true if the value number
2784 visit_use (tree use)
2786 bool changed = false;
2787 gimple stmt = SSA_NAME_DEF_STMT (use);
2789 VN_INFO (use)->use_processed = true;
2791 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2792 if (dump_file && (dump_flags & TDF_DETAILS)
2793 && !SSA_NAME_IS_DEFAULT_DEF (use))
2795 fprintf (dump_file, "Value numbering ");
2796 print_generic_expr (dump_file, use, 0);
2797 fprintf (dump_file, " stmt = ");
2798 print_gimple_stmt (dump_file, stmt, 0, 0);
2801 /* Handle uninitialized uses. */
2802 if (SSA_NAME_IS_DEFAULT_DEF (use))
2803 changed = set_ssa_val_to (use, use);
2806 if (gimple_code (stmt) == GIMPLE_PHI)
2807 changed = visit_phi (stmt);
2808 else if (!gimple_has_lhs (stmt)
2809 || gimple_has_volatile_ops (stmt)
2810 || stmt_could_throw_p (stmt))
2811 changed = defs_to_varying (stmt);
2812 else if (is_gimple_assign (stmt))
2814 tree lhs = gimple_assign_lhs (stmt);
2817 /* Shortcut for copies. Simplifying copies is pointless,
2818 since we copy the expression and value they represent. */
2819 if (gimple_assign_copy_p (stmt)
2820 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2821 && TREE_CODE (lhs) == SSA_NAME)
2823 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2826 simplified = try_to_simplify (stmt);
2829 if (dump_file && (dump_flags & TDF_DETAILS))
2831 fprintf (dump_file, "RHS ");
2832 print_gimple_expr (dump_file, stmt, 0, 0);
2833 fprintf (dump_file, " simplified to ");
2834 print_generic_expr (dump_file, simplified, 0);
2835 if (TREE_CODE (lhs) == SSA_NAME)
2836 fprintf (dump_file, " has constants %d\n",
2837 expr_has_constants (simplified));
2839 fprintf (dump_file, "\n");
2842 /* Setting value numbers to constants will occasionally
2843 screw up phi congruence because constants are not
2844 uniquely associated with a single ssa name that can be
2847 && is_gimple_min_invariant (simplified)
2848 && TREE_CODE (lhs) == SSA_NAME)
2850 VN_INFO (lhs)->expr = simplified;
2851 VN_INFO (lhs)->has_constants = true;
2852 changed = set_ssa_val_to (lhs, simplified);
2856 && TREE_CODE (simplified) == SSA_NAME
2857 && TREE_CODE (lhs) == SSA_NAME)
2859 changed = visit_copy (lhs, simplified);
2862 else if (simplified)
2864 if (TREE_CODE (lhs) == SSA_NAME)
2866 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2867 /* We have to unshare the expression or else
2868 valuizing may change the IL stream. */
2869 VN_INFO (lhs)->expr = unshare_expr (simplified);
2872 else if (stmt_has_constants (stmt)
2873 && TREE_CODE (lhs) == SSA_NAME)
2874 VN_INFO (lhs)->has_constants = true;
2875 else if (TREE_CODE (lhs) == SSA_NAME)
2877 /* We reset expr and constantness here because we may
2878 have been value numbering optimistically, and
2879 iterating. They may become non-constant in this case,
2880 even if they were optimistically constant. */
2882 VN_INFO (lhs)->has_constants = false;
2883 VN_INFO (lhs)->expr = NULL_TREE;
2886 if ((TREE_CODE (lhs) == SSA_NAME
2887 /* We can substitute SSA_NAMEs that are live over
2888 abnormal edges with their constant value. */
2889 && !(gimple_assign_copy_p (stmt)
2890 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2892 && is_gimple_min_invariant (simplified))
2893 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2894 /* Stores or copies from SSA_NAMEs that are live over
2895 abnormal edges are a problem. */
2896 || (gimple_assign_single_p (stmt)
2897 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2898 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2899 changed = defs_to_varying (stmt);
2900 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2902 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2904 else if (TREE_CODE (lhs) == SSA_NAME)
2906 if ((gimple_assign_copy_p (stmt)
2907 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2909 && is_gimple_min_invariant (simplified)))
2911 VN_INFO (lhs)->has_constants = true;
2913 changed = set_ssa_val_to (lhs, simplified);
2915 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2919 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2921 case GIMPLE_UNARY_RHS:
2922 case GIMPLE_BINARY_RHS:
2923 case GIMPLE_TERNARY_RHS:
2924 changed = visit_nary_op (lhs, stmt);
2926 case GIMPLE_SINGLE_RHS:
2927 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2930 /* VOP-less references can go through unary case. */
2931 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2932 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2933 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2934 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2936 changed = visit_nary_op (lhs, stmt);
2940 case tcc_declaration:
2941 changed = visit_reference_op_load
2942 (lhs, gimple_assign_rhs1 (stmt), stmt);
2944 case tcc_expression:
2945 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2947 changed = visit_nary_op (lhs, stmt);
2952 changed = defs_to_varying (stmt);
2956 changed = defs_to_varying (stmt);
2962 changed = defs_to_varying (stmt);
2964 else if (is_gimple_call (stmt))
2966 tree lhs = gimple_call_lhs (stmt);
2968 /* ??? We could try to simplify calls. */
2970 if (stmt_has_constants (stmt)
2971 && TREE_CODE (lhs) == SSA_NAME)
2972 VN_INFO (lhs)->has_constants = true;
2973 else if (TREE_CODE (lhs) == SSA_NAME)
2975 /* We reset expr and constantness here because we may
2976 have been value numbering optimistically, and
2977 iterating. They may become non-constant in this case,
2978 even if they were optimistically constant. */
2979 VN_INFO (lhs)->has_constants = false;
2980 VN_INFO (lhs)->expr = NULL_TREE;
2983 if (TREE_CODE (lhs) == SSA_NAME
2984 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2985 changed = defs_to_varying (stmt);
2986 /* ??? We should handle stores from calls. */
2987 else if (TREE_CODE (lhs) == SSA_NAME)
2989 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2990 changed = visit_reference_op_call (lhs, stmt);
2992 changed = defs_to_varying (stmt);
2995 changed = defs_to_varying (stmt);
3002 /* Compare two operands by reverse postorder index */
3005 compare_ops (const void *pa, const void *pb)
3007 const tree opa = *((const tree *)pa);
3008 const tree opb = *((const tree *)pb);
3009 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3010 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3014 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3015 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3016 else if (gimple_nop_p (opstmta))
3018 else if (gimple_nop_p (opstmtb))
3021 bba = gimple_bb (opstmta);
3022 bbb = gimple_bb (opstmtb);
3025 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3033 if (gimple_code (opstmta) == GIMPLE_PHI
3034 && gimple_code (opstmtb) == GIMPLE_PHI)
3035 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3036 else if (gimple_code (opstmta) == GIMPLE_PHI)
3038 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3040 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3041 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3043 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3045 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3048 /* Sort an array containing members of a strongly connected component
3049 SCC so that the members are ordered by RPO number.
3050 This means that when the sort is complete, iterating through the
3051 array will give you the members in RPO order. */
3054 sort_scc (VEC (tree, heap) *scc)
3056 VEC_qsort (tree, scc, compare_ops);
3059 /* Insert the no longer used nary ONARY to the hash INFO. */
3062 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3064 size_t size = sizeof_vn_nary_op (onary->length);
3065 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3066 &info->nary_obstack);
3067 memcpy (nary, onary, size);
3068 vn_nary_op_insert_into (nary, info->nary, false);
3071 /* Insert the no longer used phi OPHI to the hash INFO. */
3074 copy_phi (vn_phi_t ophi, vn_tables_t info)
3076 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3078 memcpy (phi, ophi, sizeof (*phi));
3079 ophi->phiargs = NULL;
3080 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3081 gcc_assert (!*slot);
3085 /* Insert the no longer used reference OREF to the hash INFO. */
3088 copy_reference (vn_reference_t oref, vn_tables_t info)
3092 ref = (vn_reference_t) pool_alloc (info->references_pool);
3093 memcpy (ref, oref, sizeof (*ref));
3094 oref->operands = NULL;
3095 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3098 free_reference (*slot);
3102 /* Process a strongly connected component in the SSA graph. */
3105 process_scc (VEC (tree, heap) *scc)
3109 unsigned int iterations = 0;
3110 bool changed = true;
3116 /* If the SCC has a single member, just visit it. */
3117 if (VEC_length (tree, scc) == 1)
3119 tree use = VEC_index (tree, scc, 0);
3120 if (VN_INFO (use)->use_processed)
3122 /* We need to make sure it doesn't form a cycle itself, which can
3123 happen for self-referential PHI nodes. In that case we would
3124 end up inserting an expression with VN_TOP operands into the
3125 valid table which makes us derive bogus equivalences later.
3126 The cheapest way to check this is to assume it for all PHI nodes. */
3127 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3128 /* Fallthru to iteration. */ ;
3136 /* Iterate over the SCC with the optimistic table until it stops
3138 current_info = optimistic_info;
3143 if (dump_file && (dump_flags & TDF_DETAILS))
3144 fprintf (dump_file, "Starting iteration %d\n", iterations);
3145 /* As we are value-numbering optimistically we have to
3146 clear the expression tables and the simplified expressions
3147 in each iteration until we converge. */
3148 htab_empty (optimistic_info->nary);
3149 htab_empty (optimistic_info->phis);
3150 htab_empty (optimistic_info->references);
3151 obstack_free (&optimistic_info->nary_obstack, NULL);
3152 gcc_obstack_init (&optimistic_info->nary_obstack);
3153 empty_alloc_pool (optimistic_info->phis_pool);
3154 empty_alloc_pool (optimistic_info->references_pool);
3155 FOR_EACH_VEC_ELT (tree, scc, i, var)
3156 VN_INFO (var)->expr = NULL_TREE;
3157 FOR_EACH_VEC_ELT (tree, scc, i, var)
3158 changed |= visit_use (var);
3161 statistics_histogram_event (cfun, "SCC iterations", iterations);
3163 /* Finally, copy the contents of the no longer used optimistic
3164 table to the valid table. */
3165 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3166 copy_nary (nary, valid_info);
3167 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3168 copy_phi (phi, valid_info);
3169 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3170 copy_reference (ref, valid_info);
3172 current_info = valid_info;
3175 DEF_VEC_O(ssa_op_iter);
3176 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3178 /* Pop the components of the found SCC for NAME off the SCC stack
3179 and process them. Returns true if all went well, false if
3180 we run into resource limits. */
3183 extract_and_process_scc_for_name (tree name)
3185 VEC (tree, heap) *scc = NULL;
3188 /* Found an SCC, pop the components off the SCC stack and
3192 x = VEC_pop (tree, sccstack);
3194 VN_INFO (x)->on_sccstack = false;
3195 VEC_safe_push (tree, heap, scc, x);
3196 } while (x != name);
3198 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3199 if (VEC_length (tree, scc)
3200 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3203 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3204 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3205 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3209 if (VEC_length (tree, scc) > 1)
3212 if (dump_file && (dump_flags & TDF_DETAILS))
3213 print_scc (dump_file, scc);
3217 VEC_free (tree, heap, scc);
3222 /* Depth first search on NAME to discover and process SCC's in the SSA
3224 Execution of this algorithm relies on the fact that the SCC's are
3225 popped off the stack in topological order.
3226 Returns true if successful, false if we stopped processing SCC's due
3227 to resource constraints. */
3232 VEC(ssa_op_iter, heap) *itervec = NULL;
3233 VEC(tree, heap) *namevec = NULL;
3234 use_operand_p usep = NULL;
3241 VN_INFO (name)->dfsnum = next_dfs_num++;
3242 VN_INFO (name)->visited = true;
3243 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3245 VEC_safe_push (tree, heap, sccstack, name);
3246 VN_INFO (name)->on_sccstack = true;
3247 defstmt = SSA_NAME_DEF_STMT (name);
3249 /* Recursively DFS on our operands, looking for SCC's. */
3250 if (!gimple_nop_p (defstmt))
3252 /* Push a new iterator. */
3253 if (gimple_code (defstmt) == GIMPLE_PHI)
3254 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3256 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3259 clear_and_done_ssa_iter (&iter);
3263 /* If we are done processing uses of a name, go up the stack
3264 of iterators and process SCCs as we found them. */
3265 if (op_iter_done (&iter))
3267 /* See if we found an SCC. */
3268 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3269 if (!extract_and_process_scc_for_name (name))
3271 VEC_free (tree, heap, namevec);
3272 VEC_free (ssa_op_iter, heap, itervec);
3276 /* Check if we are done. */
3277 if (VEC_empty (tree, namevec))
3279 VEC_free (tree, heap, namevec);
3280 VEC_free (ssa_op_iter, heap, itervec);
3284 /* Restore the last use walker and continue walking there. */
3286 name = VEC_pop (tree, namevec);
3287 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3288 sizeof (ssa_op_iter));
3289 VEC_pop (ssa_op_iter, itervec);
3290 goto continue_walking;
3293 use = USE_FROM_PTR (usep);
3295 /* Since we handle phi nodes, we will sometimes get
3296 invariants in the use expression. */
3297 if (TREE_CODE (use) == SSA_NAME)
3299 if (! (VN_INFO (use)->visited))
3301 /* Recurse by pushing the current use walking state on
3302 the stack and starting over. */
3303 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3304 VEC_safe_push(tree, heap, namevec, name);
3309 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3310 VN_INFO (use)->low);
3312 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3313 && VN_INFO (use)->on_sccstack)
3315 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3316 VN_INFO (name)->low);
3320 usep = op_iter_next_use (&iter);
3324 /* Allocate a value number table. */
3327 allocate_vn_table (vn_tables_t table)
3329 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3330 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3331 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3334 gcc_obstack_init (&table->nary_obstack);
3335 table->phis_pool = create_alloc_pool ("VN phis",
3336 sizeof (struct vn_phi_s),
3338 table->references_pool = create_alloc_pool ("VN references",
3339 sizeof (struct vn_reference_s),
3343 /* Free a value number table. */
3346 free_vn_table (vn_tables_t table)
3348 htab_delete (table->phis);
3349 htab_delete (table->nary);
3350 htab_delete (table->references);
3351 obstack_free (&table->nary_obstack, NULL);
3352 free_alloc_pool (table->phis_pool);
3353 free_alloc_pool (table->references_pool);
3361 int *rpo_numbers_temp;
3363 calculate_dominance_info (CDI_DOMINATORS);
3365 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3368 constant_value_ids = BITMAP_ALLOC (NULL);
3373 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3374 /* VEC_alloc doesn't actually grow it to the right size, it just
3375 preallocates the space to do so. */
3376 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3377 gcc_obstack_init (&vn_ssa_aux_obstack);
3379 shared_lookup_phiargs = NULL;
3380 shared_lookup_references = NULL;
3381 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3382 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3383 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3385 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3386 the i'th block in RPO order is bb. We want to map bb's to RPO
3387 numbers, so we need to rearrange this array. */
3388 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3389 rpo_numbers[rpo_numbers_temp[j]] = j;
3391 XDELETE (rpo_numbers_temp);
3393 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3395 /* Create the VN_INFO structures, and initialize value numbers to
3397 for (i = 0; i < num_ssa_names; i++)
3399 tree name = ssa_name (i);
3402 VN_INFO_GET (name)->valnum = VN_TOP;
3403 VN_INFO (name)->expr = NULL_TREE;
3404 VN_INFO (name)->value_id = 0;
3408 renumber_gimple_stmt_uids ();
3410 /* Create the valid and optimistic value numbering tables. */
3411 valid_info = XCNEW (struct vn_tables_s);
3412 allocate_vn_table (valid_info);
3413 optimistic_info = XCNEW (struct vn_tables_s);
3414 allocate_vn_table (optimistic_info);
3422 htab_delete (constant_to_value_id);
3423 BITMAP_FREE (constant_value_ids);
3424 VEC_free (tree, heap, shared_lookup_phiargs);
3425 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3426 XDELETEVEC (rpo_numbers);
3428 for (i = 0; i < num_ssa_names; i++)
3430 tree name = ssa_name (i);
3432 && VN_INFO (name)->needs_insertion)
3433 release_ssa_name (name);
3435 obstack_free (&vn_ssa_aux_obstack, NULL);
3436 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3438 VEC_free (tree, heap, sccstack);
3439 free_vn_table (valid_info);
3440 XDELETE (valid_info);
3441 free_vn_table (optimistic_info);
3442 XDELETE (optimistic_info);
3445 /* Set *ID if we computed something useful in RESULT. */
3448 set_value_id_for_result (tree result, unsigned int *id)
3452 if (TREE_CODE (result) == SSA_NAME)
3453 *id = VN_INFO (result)->value_id;
3454 else if (is_gimple_min_invariant (result))
3455 *id = get_or_alloc_constant_value_id (result);
3459 /* Set the value ids in the valid hash tables. */
3462 set_hashtable_value_ids (void)
3469 /* Now set the value ids of the things we had put in the hash
3472 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3473 vno, vn_nary_op_t, hi)
3474 set_value_id_for_result (vno->result, &vno->value_id);
3476 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3478 set_value_id_for_result (vp->result, &vp->value_id);
3480 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3481 vr, vn_reference_t, hi)
3482 set_value_id_for_result (vr->result, &vr->value_id);
3485 /* Do SCCVN. Returns true if it finished, false if we bailed out
3486 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3487 how we use the alias oracle walking during the VN process. */
3490 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3494 bool changed = true;
3496 default_vn_walk_kind = default_vn_walk_kind_;
3499 current_info = valid_info;
3501 for (param = DECL_ARGUMENTS (current_function_decl);
3503 param = DECL_CHAIN (param))
3505 if (gimple_default_def (cfun, param) != NULL)
3507 tree def = gimple_default_def (cfun, param);
3508 VN_INFO (def)->valnum = def;
3512 for (i = 1; i < num_ssa_names; ++i)
3514 tree name = ssa_name (i);
3516 && VN_INFO (name)->visited == false
3517 && !has_zero_uses (name))
3525 /* Initialize the value ids. */
3527 for (i = 1; i < num_ssa_names; ++i)
3529 tree name = ssa_name (i);
3533 info = VN_INFO (name);
3534 if (info->valnum == name
3535 || info->valnum == VN_TOP)
3536 info->value_id = get_next_value_id ();
3537 else if (is_gimple_min_invariant (info->valnum))
3538 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3541 /* Propagate until they stop changing. */
3545 for (i = 1; i < num_ssa_names; ++i)
3547 tree name = ssa_name (i);
3551 info = VN_INFO (name);
3552 if (TREE_CODE (info->valnum) == SSA_NAME
3553 && info->valnum != name
3554 && info->value_id != VN_INFO (info->valnum)->value_id)
3557 info->value_id = VN_INFO (info->valnum)->value_id;
3562 set_hashtable_value_ids ();
3564 if (dump_file && (dump_flags & TDF_DETAILS))
3566 fprintf (dump_file, "Value numbers:\n");
3567 for (i = 0; i < num_ssa_names; i++)
3569 tree name = ssa_name (i);
3571 && VN_INFO (name)->visited
3572 && SSA_VAL (name) != name)
3574 print_generic_expr (dump_file, name, 0);
3575 fprintf (dump_file, " = ");
3576 print_generic_expr (dump_file, SSA_VAL (name), 0);
3577 fprintf (dump_file, "\n");
3585 /* Return the maximum value id we have ever seen. */
3588 get_max_value_id (void)
3590 return next_value_id;
3593 /* Return the next unique value id. */
3596 get_next_value_id (void)
3598 return next_value_id++;
3602 /* Compare two expressions E1 and E2 and return true if they are equal. */
3605 expressions_equal_p (tree e1, tree e2)
3607 /* The obvious case. */
3611 /* If only one of them is null, they cannot be equal. */
3615 /* Now perform the actual comparison. */
3616 if (TREE_CODE (e1) == TREE_CODE (e2)
3617 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3624 /* Return true if the nary operation NARY may trap. This is a copy
3625 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3628 vn_nary_may_trap (vn_nary_op_t nary)
3631 tree rhs2 = NULL_TREE;
3632 bool honor_nans = false;
3633 bool honor_snans = false;
3634 bool fp_operation = false;
3635 bool honor_trapv = false;
3639 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3640 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3641 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3644 fp_operation = FLOAT_TYPE_P (type);
3647 honor_nans = flag_trapping_math && !flag_finite_math_only;
3648 honor_snans = flag_signaling_nans != 0;
3650 else if (INTEGRAL_TYPE_P (type)
3651 && TYPE_OVERFLOW_TRAPS (type))
3654 if (nary->length >= 2)
3656 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3658 honor_nans, honor_snans, rhs2,
3664 for (i = 0; i < nary->length; ++i)
3665 if (tree_could_trap_p (nary->op[i]))