1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
284 if (expr == NULL_TREE)
287 /* Cache the expression. */
294 /* Free a phi operation structure VP. */
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
339 get_constant_value_id (tree constant)
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
349 return ((vn_constant_t)*slot)->value_id;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
357 get_or_alloc_constant_value_id (tree constant)
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
373 bitmap_set_bit (constant_value_ids, vc->value_id);
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
406 result = iterative_hash_hashval_t (vro1->opcode, result);
408 result = iterative_hash_expr (vro1->op0, result);
410 result = iterative_hash_expr (vro1->op1, result);
412 result = iterative_hash_expr (vro1->op2, result);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result = 0;
432 vn_reference_op_t vro;
434 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
435 result = vn_reference_op_compute_hash (vro, result);
437 result += SSA_NAME_VERSION (vr1->vuse);
442 /* Return true if reference operations P1 and P2 are equivalent. This
443 means they have the same set of operands and vuses. */
446 vn_reference_eq (const void *p1, const void *p2)
449 vn_reference_op_t vro;
451 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
452 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
453 if (vr1->hashcode != vr2->hashcode)
456 /* Early out if this is not a hash collision. */
457 if (vr1->hashcode != vr2->hashcode)
460 /* The VOP needs to be the same. */
461 if (vr1->vuse != vr2->vuse)
464 /* If the operands are the same we are done. */
465 if (vr1->operands == vr2->operands)
468 /* We require that address operands be canonicalized in a way that
469 two memory references will have the same operands if they are
471 if (VEC_length (vn_reference_op_s, vr1->operands)
472 != VEC_length (vn_reference_op_s, vr2->operands))
475 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
476 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
483 /* Copy the operations present in load/store REF into RESULT, a vector of
484 vn_reference_op_s's. */
487 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
489 if (TREE_CODE (ref) == TARGET_MEM_REF)
491 vn_reference_op_s temp;
494 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
496 base = build_int_cst (ptr_type_node, 0);
498 memset (&temp, 0, sizeof (temp));
499 /* We do not care for spurious type qualifications. */
500 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
501 temp.opcode = TREE_CODE (ref);
502 temp.op0 = TMR_INDEX (ref);
503 temp.op1 = TMR_STEP (ref);
504 temp.op2 = TMR_OFFSET (ref);
505 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
507 memset (&temp, 0, sizeof (temp));
508 temp.type = NULL_TREE;
509 temp.opcode = TREE_CODE (base);
511 temp.op1 = TMR_ORIGINAL (ref);
512 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
516 /* For non-calls, store the information that makes up the address. */
520 vn_reference_op_s temp;
522 memset (&temp, 0, sizeof (temp));
523 /* We do not care for spurious type qualifications. */
524 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
525 temp.opcode = TREE_CODE (ref);
529 case ALIGN_INDIRECT_REF:
531 /* The only operand is the address, which gets its own
532 vn_reference_op_s structure. */
534 case MISALIGNED_INDIRECT_REF:
535 temp.op0 = TREE_OPERAND (ref, 1);
538 /* Record bits and position. */
539 temp.op0 = TREE_OPERAND (ref, 1);
540 temp.op1 = TREE_OPERAND (ref, 2);
543 /* The field decl is enough to unambiguously specify the field,
544 a matching type is not necessary and a mismatching type
545 is always a spurious difference. */
546 temp.type = NULL_TREE;
547 temp.op0 = TREE_OPERAND (ref, 1);
548 temp.op1 = TREE_OPERAND (ref, 2);
549 /* If this is a reference to a union member, record the union
550 member size as operand. Do so only if we are doing
551 expression insertion (during FRE), as PRE currently gets
552 confused with this. */
554 && temp.op1 == NULL_TREE
555 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
556 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
557 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
558 && host_integerp (DECL_SIZE (temp.op0), 0))
559 temp.op0 = DECL_SIZE (temp.op0);
561 case ARRAY_RANGE_REF:
563 /* Record index as operand. */
564 temp.op0 = TREE_OPERAND (ref, 1);
565 /* Always record lower bounds and element size. */
566 temp.op1 = array_ref_low_bound (ref);
567 temp.op2 = array_ref_element_size (ref);
583 if (is_gimple_min_invariant (ref))
589 /* These are only interesting for their operands, their
590 existence, and their type. They will never be the last
591 ref in the chain of references (IE they require an
592 operand), so we don't have to put anything
593 for op* as it will be handled by the iteration */
596 case VIEW_CONVERT_EXPR:
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603 if (REFERENCE_CLASS_P (ref)
604 || (TREE_CODE (ref) == ADDR_EXPR
605 && !is_gimple_min_invariant (ref)))
606 ref = TREE_OPERAND (ref, 0);
612 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
613 operands in *OPS, the reference alias set SET and the reference type TYPE.
614 Return true if something useful was produced. */
617 ao_ref_init_from_vn_reference (ao_ref *ref,
618 alias_set_type set, tree type,
619 VEC (vn_reference_op_s, heap) *ops)
621 vn_reference_op_t op;
623 tree base = NULL_TREE;
625 HOST_WIDE_INT offset = 0;
626 HOST_WIDE_INT max_size;
627 HOST_WIDE_INT size = -1;
628 tree size_tree = NULL_TREE;
630 /* First get the final access size from just the outermost expression. */
631 op = VEC_index (vn_reference_op_s, ops, 0);
632 if (op->opcode == COMPONENT_REF)
634 if (TREE_CODE (op->op0) == INTEGER_CST)
637 size_tree = DECL_SIZE (op->op0);
639 else if (op->opcode == BIT_FIELD_REF)
643 enum machine_mode mode = TYPE_MODE (type);
645 size_tree = TYPE_SIZE (type);
647 size = GET_MODE_BITSIZE (mode);
649 if (size_tree != NULL_TREE)
651 if (!host_integerp (size_tree, 1))
654 size = TREE_INT_CST_LOW (size_tree);
657 /* Initially, maxsize is the same as the accessed element size.
658 In the following it will only grow (or become -1). */
661 /* Compute cumulative bit-offset for nested component-refs and array-refs,
662 and find the ultimate containing object. */
663 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
667 /* These may be in the reference ops, but we cannot do anything
668 sensible with them here. */
673 /* Record the base objects. */
674 case ALIGN_INDIRECT_REF:
676 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
677 op0_p = &TREE_OPERAND (*op0_p, 0);
680 case MISALIGNED_INDIRECT_REF:
681 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
683 op0_p = &TREE_OPERAND (*op0_p, 0);
693 /* And now the usual component-reference style ops. */
695 offset += tree_low_cst (op->op1, 0);
700 tree field = op->op0;
701 /* We do not have a complete COMPONENT_REF tree here so we
702 cannot use component_ref_field_offset. Do the interesting
705 /* Our union trick, done for offset zero only. */
706 if (TREE_CODE (field) == INTEGER_CST)
709 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
713 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
715 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
720 case ARRAY_RANGE_REF:
722 /* We recorded the lower bound and the element size. */
723 if (!host_integerp (op->op0, 0)
724 || !host_integerp (op->op1, 0)
725 || !host_integerp (op->op2, 0))
729 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
730 hindex -= TREE_INT_CST_LOW (op->op1);
731 hindex *= TREE_INT_CST_LOW (op->op2);
732 hindex *= BITS_PER_UNIT;
744 case VIEW_CONVERT_EXPR:
761 if (base == NULL_TREE)
764 ref->ref = NULL_TREE;
766 ref->offset = offset;
768 ref->max_size = max_size;
769 ref->ref_alias_set = set;
770 ref->base_alias_set = -1;
775 /* Copy the operations present in load/store/call REF into RESULT, a vector of
776 vn_reference_op_s's. */
779 copy_reference_ops_from_call (gimple call,
780 VEC(vn_reference_op_s, heap) **result)
782 vn_reference_op_s temp;
785 /* Copy the type, opcode, function being called and static chain. */
786 memset (&temp, 0, sizeof (temp));
787 temp.type = gimple_call_return_type (call);
788 temp.opcode = CALL_EXPR;
789 temp.op0 = gimple_call_fn (call);
790 temp.op1 = gimple_call_chain (call);
791 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
793 /* Copy the call arguments. As they can be references as well,
794 just chain them together. */
795 for (i = 0; i < gimple_call_num_args (call); ++i)
797 tree callarg = gimple_call_arg (call, i);
798 copy_reference_ops_from_ref (callarg, result);
802 /* Create a vector of vn_reference_op_s structures from REF, a
803 REFERENCE_CLASS_P tree. The vector is not shared. */
805 static VEC(vn_reference_op_s, heap) *
806 create_reference_ops_from_ref (tree ref)
808 VEC (vn_reference_op_s, heap) *result = NULL;
810 copy_reference_ops_from_ref (ref, &result);
814 /* Create a vector of vn_reference_op_s structures from CALL, a
815 call statement. The vector is not shared. */
817 static VEC(vn_reference_op_s, heap) *
818 create_reference_ops_from_call (gimple call)
820 VEC (vn_reference_op_s, heap) *result = NULL;
822 copy_reference_ops_from_call (call, &result);
826 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
827 *I_P to point to the last element of the replacement. */
829 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
832 VEC(vn_reference_op_s, heap) *mem = NULL;
833 vn_reference_op_t op;
834 unsigned int i = *i_p;
837 /* Get ops for the addressed object. */
838 op = VEC_index (vn_reference_op_s, *ops, i);
839 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
840 around it to avoid later ICEs. */
841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
842 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
844 vn_reference_op_s aref;
846 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
847 aref.opcode = ARRAY_REF;
848 aref.op0 = integer_zero_node;
849 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
850 && TYPE_MIN_VALUE (dom))
851 aref.op0 = TYPE_MIN_VALUE (dom);
853 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
854 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
856 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
858 /* Do the replacement - we should have at least one op in mem now. */
859 if (VEC_length (vn_reference_op_s, mem) == 1)
861 VEC_replace (vn_reference_op_s, *ops, i - 1,
862 VEC_index (vn_reference_op_s, mem, 0));
863 VEC_ordered_remove (vn_reference_op_s, *ops, i);
866 else if (VEC_length (vn_reference_op_s, mem) == 2)
868 VEC_replace (vn_reference_op_s, *ops, i - 1,
869 VEC_index (vn_reference_op_s, mem, 0));
870 VEC_replace (vn_reference_op_s, *ops, i,
871 VEC_index (vn_reference_op_s, mem, 1));
873 else if (VEC_length (vn_reference_op_s, mem) > 2)
875 VEC_replace (vn_reference_op_s, *ops, i - 1,
876 VEC_index (vn_reference_op_s, mem, 0));
877 VEC_replace (vn_reference_op_s, *ops, i,
878 VEC_index (vn_reference_op_s, mem, 1));
879 /* ??? There is no VEC_splice. */
880 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
881 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
886 VEC_free (vn_reference_op_s, heap, mem);
890 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
891 structures into their value numbers. This is done in-place, and
892 the vector passed in is returned. */
894 static VEC (vn_reference_op_s, heap) *
895 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
897 vn_reference_op_t vro;
900 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
902 if (vro->opcode == SSA_NAME
903 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
905 vro->op0 = SSA_VAL (vro->op0);
906 /* If it transforms from an SSA_NAME to a constant, update
908 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
909 vro->opcode = TREE_CODE (vro->op0);
910 /* If it transforms from an SSA_NAME to an address, fold with
911 a preceding indirect reference. */
912 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
913 && VEC_index (vn_reference_op_s,
914 orig, i - 1)->opcode == INDIRECT_REF)
916 vn_reference_fold_indirect (&orig, &i);
920 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
921 vro->op1 = SSA_VAL (vro->op1);
922 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
923 vro->op2 = SSA_VAL (vro->op2);
929 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
931 /* Create a vector of vn_reference_op_s structures from REF, a
932 REFERENCE_CLASS_P tree. The vector is shared among all callers of
935 static VEC(vn_reference_op_s, heap) *
936 valueize_shared_reference_ops_from_ref (tree ref)
940 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
941 copy_reference_ops_from_ref (ref, &shared_lookup_references);
942 shared_lookup_references = valueize_refs (shared_lookup_references);
943 return shared_lookup_references;
946 /* Create a vector of vn_reference_op_s structures from CALL, a
947 call statement. The vector is shared among all callers of
950 static VEC(vn_reference_op_s, heap) *
951 valueize_shared_reference_ops_from_call (gimple call)
955 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
956 copy_reference_ops_from_call (call, &shared_lookup_references);
957 shared_lookup_references = valueize_refs (shared_lookup_references);
958 return shared_lookup_references;
961 /* Lookup a SCCVN reference operation VR in the current hash table.
962 Returns the resulting value number if it exists in the hash table,
963 NULL_TREE otherwise. VNRESULT will be filled in with the actual
964 vn_reference_t stored in the hashtable if something is found. */
967 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
973 slot = htab_find_slot_with_hash (current_info->references, vr,
975 if (!slot && current_info == optimistic_info)
976 slot = htab_find_slot_with_hash (valid_info->references, vr,
981 *vnresult = (vn_reference_t)*slot;
982 return ((vn_reference_t)*slot)->result;
988 static tree *last_vuse_ptr;
990 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
991 with the current VUSE and performs the expression lookup. */
994 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
996 vn_reference_t vr = (vn_reference_t)vr_;
1001 *last_vuse_ptr = vuse;
1003 /* Fixup vuse and hash. */
1005 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1006 vr->vuse = SSA_VAL (vuse);
1008 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1010 hash = vr->hashcode;
1011 slot = htab_find_slot_with_hash (current_info->references, vr,
1013 if (!slot && current_info == optimistic_info)
1014 slot = htab_find_slot_with_hash (valid_info->references, vr,
1022 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1023 from the statement defining VUSE and if not successful tries to
1024 translate *REFP and VR_ through an aggregate copy at the defintion
1028 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1030 vn_reference_t vr = (vn_reference_t)vr_;
1031 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1034 HOST_WIDE_INT offset, maxsize;
1036 base = ao_ref_base (ref);
1037 offset = ref->offset;
1038 maxsize = ref->max_size;
1040 /* If we cannot constrain the size of the reference we cannot
1041 test if anything kills it. */
1045 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1046 from that defintion.
1048 if (is_gimple_reg_type (vr->type)
1049 && is_gimple_call (def_stmt)
1050 && (fndecl = gimple_call_fndecl (def_stmt))
1051 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1052 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1053 && integer_zerop (gimple_call_arg (def_stmt, 1))
1054 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1055 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1057 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1059 HOST_WIDE_INT offset2, size2, maxsize2;
1060 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1061 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1062 if ((unsigned HOST_WIDE_INT)size2 / 8
1063 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1064 && operand_equal_p (base, base2, 0)
1065 && offset2 <= offset
1066 && offset2 + size2 >= offset + maxsize)
1068 tree val = fold_convert (vr->type, integer_zero_node);
1069 unsigned int value_id = get_or_alloc_constant_value_id (val);
1070 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1071 VEC_copy (vn_reference_op_s,
1072 heap, vr->operands),
1077 /* 2) Assignment from an empty CONSTRUCTOR. */
1078 else if (is_gimple_reg_type (vr->type)
1079 && gimple_assign_single_p (def_stmt)
1080 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1081 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1084 HOST_WIDE_INT offset2, size2, maxsize2;
1085 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1086 &offset2, &size2, &maxsize2);
1087 if (operand_equal_p (base, base2, 0)
1088 && offset2 <= offset
1089 && offset2 + size2 >= offset + maxsize)
1091 tree val = fold_convert (vr->type, integer_zero_node);
1092 unsigned int value_id = get_or_alloc_constant_value_id (val);
1093 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1094 VEC_copy (vn_reference_op_s,
1095 heap, vr->operands),
1100 /* For aggregate copies translate the reference through them if
1101 the copy kills ref. */
1102 else if (gimple_assign_single_p (def_stmt)
1103 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1104 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1105 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1108 HOST_WIDE_INT offset2, size2, maxsize2;
1110 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1111 vn_reference_op_t vro;
1114 /* See if the assignment kills REF. */
1115 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1116 &offset2, &size2, &maxsize2);
1117 if (!operand_equal_p (base, base2, 0)
1119 || offset2 + size2 < offset + maxsize)
1122 /* Find the common base of ref and the lhs. */
1123 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1124 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1125 j = VEC_length (vn_reference_op_s, lhs) - 1;
1126 while (j >= 0 && i >= 0
1127 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1129 VEC_index (vn_reference_op_s, lhs, j)))
1135 VEC_free (vn_reference_op_s, heap, lhs);
1136 /* i now points to the first additional op.
1137 ??? LHS may not be completely contained in VR, one or more
1138 VIEW_CONVERT_EXPRs could be in its way. We could at least
1139 try handling outermost VIEW_CONVERT_EXPRs. */
1143 /* Now re-write REF to be based on the rhs of the assignment. */
1144 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1145 /* We need to pre-pend vr->operands[0..i] to rhs. */
1146 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1147 > VEC_length (vn_reference_op_s, vr->operands))
1149 VEC (vn_reference_op_s, heap) *old = vr->operands;
1150 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1151 i + 1 + VEC_length (vn_reference_op_s, rhs));
1152 if (old == shared_lookup_references
1153 && vr->operands != old)
1154 shared_lookup_references = NULL;
1157 VEC_truncate (vn_reference_op_s, vr->operands,
1158 i + 1 + VEC_length (vn_reference_op_s, rhs));
1159 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1160 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1161 VEC_free (vn_reference_op_s, heap, rhs);
1162 vr->hashcode = vn_reference_compute_hash (vr);
1164 /* Adjust *ref from the new operands. */
1165 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1167 /* This can happen with bitfields. */
1168 if (ref->size != r.size)
1172 /* Do not update last seen VUSE after translating. */
1173 last_vuse_ptr = NULL;
1175 /* Keep looking for the adjusted *REF / VR pair. */
1179 /* Bail out and stop walking. */
1183 /* Lookup a reference operation by it's parts, in the current hash table.
1184 Returns the resulting value number if it exists in the hash table,
1185 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1186 vn_reference_t stored in the hashtable if something is found. */
1189 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1190 VEC (vn_reference_op_s, heap) *operands,
1191 vn_reference_t *vnresult, bool maywalk)
1193 struct vn_reference_s vr1;
1200 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1201 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1202 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1203 VEC_length (vn_reference_op_s, operands));
1204 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1205 VEC_address (vn_reference_op_s, operands),
1206 sizeof (vn_reference_op_s)
1207 * VEC_length (vn_reference_op_s, operands));
1208 vr1.operands = operands = shared_lookup_references
1209 = valueize_refs (shared_lookup_references);
1212 vr1.hashcode = vn_reference_compute_hash (&vr1);
1213 vn_reference_lookup_1 (&vr1, vnresult);
1220 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1222 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1223 vn_reference_lookup_2,
1224 vn_reference_lookup_3, &vr1);
1225 if (vr1.operands != operands)
1226 VEC_free (vn_reference_op_s, heap, vr1.operands);
1230 return (*vnresult)->result;
1235 /* Lookup OP in the current hash table, and return the resulting value
1236 number if it exists in the hash table. Return NULL_TREE if it does
1237 not exist in the hash table or if the result field of the structure
1238 was NULL.. VNRESULT will be filled in with the vn_reference_t
1239 stored in the hashtable if one exists. */
1242 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1243 vn_reference_t *vnresult)
1245 VEC (vn_reference_op_s, heap) *operands;
1246 struct vn_reference_s vr1;
1251 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1252 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1253 vr1.type = TREE_TYPE (op);
1254 vr1.set = get_alias_set (op);
1255 vr1.hashcode = vn_reference_compute_hash (&vr1);
1260 vn_reference_t wvnresult;
1262 ao_ref_init (&r, op);
1264 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1265 vn_reference_lookup_2,
1266 vn_reference_lookup_3, &vr1);
1267 if (vr1.operands != operands)
1268 VEC_free (vn_reference_op_s, heap, vr1.operands);
1272 *vnresult = wvnresult;
1273 return wvnresult->result;
1279 return vn_reference_lookup_1 (&vr1, vnresult);
1283 /* Insert OP into the current hash table with a value number of
1284 RESULT, and return the resulting reference structure we created. */
1287 vn_reference_insert (tree op, tree result, tree vuse)
1292 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1293 if (TREE_CODE (result) == SSA_NAME)
1294 vr1->value_id = VN_INFO (result)->value_id;
1296 vr1->value_id = get_or_alloc_constant_value_id (result);
1297 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1298 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1299 vr1->type = TREE_TYPE (op);
1300 vr1->set = get_alias_set (op);
1301 vr1->hashcode = vn_reference_compute_hash (vr1);
1302 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1304 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1307 /* Because we lookup stores using vuses, and value number failures
1308 using the vdefs (see visit_reference_op_store for how and why),
1309 it's possible that on failure we may try to insert an already
1310 inserted store. This is not wrong, there is no ssa name for a
1311 store that we could use as a differentiator anyway. Thus, unlike
1312 the other lookup functions, you cannot gcc_assert (!*slot)
1315 /* But free the old slot in case of a collision. */
1317 free_reference (*slot);
1323 /* Insert a reference by it's pieces into the current hash table with
1324 a value number of RESULT. Return the resulting reference
1325 structure we created. */
1328 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1329 VEC (vn_reference_op_s, heap) *operands,
1330 tree result, unsigned int value_id)
1336 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1337 vr1->value_id = value_id;
1338 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1339 vr1->operands = valueize_refs (operands);
1342 vr1->hashcode = vn_reference_compute_hash (vr1);
1343 if (result && TREE_CODE (result) == SSA_NAME)
1344 result = SSA_VAL (result);
1345 vr1->result = result;
1347 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1350 /* At this point we should have all the things inserted that we have
1351 seen before, and we should never try inserting something that
1353 gcc_assert (!*slot);
1355 free_reference (*slot);
1361 /* Compute and return the hash value for nary operation VBO1. */
1364 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1369 for (i = 0; i < vno1->length; ++i)
1370 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1371 vno1->op[i] = SSA_VAL (vno1->op[i]);
1373 if (vno1->length == 2
1374 && commutative_tree_code (vno1->opcode)
1375 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1377 tree temp = vno1->op[0];
1378 vno1->op[0] = vno1->op[1];
1382 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1383 for (i = 0; i < vno1->length; ++i)
1384 hash = iterative_hash_expr (vno1->op[i], hash);
1389 /* Return the computed hashcode for nary operation P1. */
1392 vn_nary_op_hash (const void *p1)
1394 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1395 return vno1->hashcode;
1398 /* Compare nary operations P1 and P2 and return true if they are
1402 vn_nary_op_eq (const void *p1, const void *p2)
1404 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1405 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1408 if (vno1->hashcode != vno2->hashcode)
1411 if (vno1->opcode != vno2->opcode
1412 || !types_compatible_p (vno1->type, vno2->type))
1415 for (i = 0; i < vno1->length; ++i)
1416 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1422 /* Lookup a n-ary operation by its pieces and return the resulting value
1423 number if it exists in the hash table. Return NULL_TREE if it does
1424 not exist in the hash table or if the result field of the operation
1425 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1429 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1430 tree type, tree op0, tree op1, tree op2,
1431 tree op3, vn_nary_op_t *vnresult)
1434 struct vn_nary_op_s vno1;
1438 vno1.length = length;
1444 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1445 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1447 if (!slot && current_info == optimistic_info)
1448 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1453 *vnresult = (vn_nary_op_t)*slot;
1454 return ((vn_nary_op_t)*slot)->result;
1457 /* Lookup OP in the current hash table, and return the resulting value
1458 number if it exists in the hash table. Return NULL_TREE if it does
1459 not exist in the hash table or if the result field of the operation
1460 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1464 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1467 struct vn_nary_op_s vno1;
1472 vno1.opcode = TREE_CODE (op);
1473 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1474 vno1.type = TREE_TYPE (op);
1475 for (i = 0; i < vno1.length; ++i)
1476 vno1.op[i] = TREE_OPERAND (op, i);
1477 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1478 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1480 if (!slot && current_info == optimistic_info)
1481 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1486 *vnresult = (vn_nary_op_t)*slot;
1487 return ((vn_nary_op_t)*slot)->result;
1490 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1491 value number if it exists in the hash table. Return NULL_TREE if
1492 it does not exist in the hash table. VNRESULT will contain the
1493 vn_nary_op_t from the hashtable if it exists. */
1496 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1499 struct vn_nary_op_s vno1;
1504 vno1.opcode = gimple_assign_rhs_code (stmt);
1505 vno1.length = gimple_num_ops (stmt) - 1;
1506 vno1.type = gimple_expr_type (stmt);
1507 for (i = 0; i < vno1.length; ++i)
1508 vno1.op[i] = gimple_op (stmt, i + 1);
1509 if (vno1.opcode == REALPART_EXPR
1510 || vno1.opcode == IMAGPART_EXPR
1511 || vno1.opcode == VIEW_CONVERT_EXPR)
1512 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1513 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1514 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1516 if (!slot && current_info == optimistic_info)
1517 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1522 *vnresult = (vn_nary_op_t)*slot;
1523 return ((vn_nary_op_t)*slot)->result;
1526 /* Insert a n-ary operation into the current hash table using it's
1527 pieces. Return the vn_nary_op_t structure we created and put in
1531 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1532 tree type, tree op0,
1533 tree op1, tree op2, tree op3,
1535 unsigned int value_id)
1540 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1541 (sizeof (struct vn_nary_op_s)
1542 - sizeof (tree) * (4 - length)));
1543 vno1->value_id = value_id;
1544 vno1->opcode = code;
1545 vno1->length = length;
1555 vno1->result = result;
1556 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1557 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1559 gcc_assert (!*slot);
1566 /* Insert OP into the current hash table with a value number of
1567 RESULT. Return the vn_nary_op_t structure we created and put in
1571 vn_nary_op_insert (tree op, tree result)
1573 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1578 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1579 (sizeof (struct vn_nary_op_s)
1580 - sizeof (tree) * (4 - length)));
1581 vno1->value_id = VN_INFO (result)->value_id;
1582 vno1->opcode = TREE_CODE (op);
1583 vno1->length = length;
1584 vno1->type = TREE_TYPE (op);
1585 for (i = 0; i < vno1->length; ++i)
1586 vno1->op[i] = TREE_OPERAND (op, i);
1587 vno1->result = result;
1588 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1589 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1591 gcc_assert (!*slot);
1597 /* Insert the rhs of STMT into the current hash table with a value number of
1601 vn_nary_op_insert_stmt (gimple stmt, tree result)
1603 unsigned length = gimple_num_ops (stmt) - 1;
1608 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1609 (sizeof (struct vn_nary_op_s)
1610 - sizeof (tree) * (4 - length)));
1611 vno1->value_id = VN_INFO (result)->value_id;
1612 vno1->opcode = gimple_assign_rhs_code (stmt);
1613 vno1->length = length;
1614 vno1->type = gimple_expr_type (stmt);
1615 for (i = 0; i < vno1->length; ++i)
1616 vno1->op[i] = gimple_op (stmt, i + 1);
1617 if (vno1->opcode == REALPART_EXPR
1618 || vno1->opcode == IMAGPART_EXPR
1619 || vno1->opcode == VIEW_CONVERT_EXPR)
1620 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1621 vno1->result = result;
1622 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1623 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1625 gcc_assert (!*slot);
1631 /* Compute a hashcode for PHI operation VP1 and return it. */
1633 static inline hashval_t
1634 vn_phi_compute_hash (vn_phi_t vp1)
1641 result = vp1->block->index;
1643 /* If all PHI arguments are constants we need to distinguish
1644 the PHI node via its type. */
1645 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1646 result += (INTEGRAL_TYPE_P (type)
1647 + (INTEGRAL_TYPE_P (type)
1648 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1650 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1652 if (phi1op == VN_TOP)
1654 result = iterative_hash_expr (phi1op, result);
1660 /* Return the computed hashcode for phi operation P1. */
1663 vn_phi_hash (const void *p1)
1665 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1666 return vp1->hashcode;
1669 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1672 vn_phi_eq (const void *p1, const void *p2)
1674 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1675 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1677 if (vp1->hashcode != vp2->hashcode)
1680 if (vp1->block == vp2->block)
1685 /* If the PHI nodes do not have compatible types
1686 they are not the same. */
1687 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1688 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1691 /* Any phi in the same block will have it's arguments in the
1692 same edge order, because of how we store phi nodes. */
1693 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1695 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1696 if (phi1op == VN_TOP || phi2op == VN_TOP)
1698 if (!expressions_equal_p (phi1op, phi2op))
1706 static VEC(tree, heap) *shared_lookup_phiargs;
1708 /* Lookup PHI in the current hash table, and return the resulting
1709 value number if it exists in the hash table. Return NULL_TREE if
1710 it does not exist in the hash table. */
1713 vn_phi_lookup (gimple phi)
1716 struct vn_phi_s vp1;
1719 VEC_truncate (tree, shared_lookup_phiargs, 0);
1721 /* Canonicalize the SSA_NAME's to their value number. */
1722 for (i = 0; i < gimple_phi_num_args (phi); i++)
1724 tree def = PHI_ARG_DEF (phi, i);
1725 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1726 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1728 vp1.phiargs = shared_lookup_phiargs;
1729 vp1.block = gimple_bb (phi);
1730 vp1.hashcode = vn_phi_compute_hash (&vp1);
1731 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1733 if (!slot && current_info == optimistic_info)
1734 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1738 return ((vn_phi_t)*slot)->result;
1741 /* Insert PHI into the current hash table with a value number of
1745 vn_phi_insert (gimple phi, tree result)
1748 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1750 VEC (tree, heap) *args = NULL;
1752 /* Canonicalize the SSA_NAME's to their value number. */
1753 for (i = 0; i < gimple_phi_num_args (phi); i++)
1755 tree def = PHI_ARG_DEF (phi, i);
1756 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1757 VEC_safe_push (tree, heap, args, def);
1759 vp1->value_id = VN_INFO (result)->value_id;
1760 vp1->phiargs = args;
1761 vp1->block = gimple_bb (phi);
1762 vp1->result = result;
1763 vp1->hashcode = vn_phi_compute_hash (vp1);
1765 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1768 /* Because we iterate over phi operations more than once, it's
1769 possible the slot might already exist here, hence no assert.*/
1775 /* Print set of components in strongly connected component SCC to OUT. */
1778 print_scc (FILE *out, VEC (tree, heap) *scc)
1783 fprintf (out, "SCC consists of: ");
1784 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1786 print_generic_expr (out, var, 0);
1789 fprintf (out, "\n");
1792 /* Set the value number of FROM to TO, return true if it has changed
1796 set_ssa_val_to (tree from, tree to)
1801 && TREE_CODE (to) == SSA_NAME
1802 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1805 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1806 and invariants. So assert that here. */
1807 gcc_assert (to != NULL_TREE
1809 || TREE_CODE (to) == SSA_NAME
1810 || is_gimple_min_invariant (to)));
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1814 fprintf (dump_file, "Setting value number of ");
1815 print_generic_expr (dump_file, from, 0);
1816 fprintf (dump_file, " to ");
1817 print_generic_expr (dump_file, to, 0);
1820 currval = SSA_VAL (from);
1822 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1824 VN_INFO (from)->valnum = to;
1825 if (dump_file && (dump_flags & TDF_DETAILS))
1826 fprintf (dump_file, " (changed)\n");
1829 if (dump_file && (dump_flags & TDF_DETAILS))
1830 fprintf (dump_file, "\n");
1834 /* Set all definitions in STMT to value number to themselves.
1835 Return true if a value number changed. */
1838 defs_to_varying (gimple stmt)
1840 bool changed = false;
1844 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1846 tree def = DEF_FROM_PTR (defp);
1848 VN_INFO (def)->use_processed = true;
1849 changed |= set_ssa_val_to (def, def);
1854 static bool expr_has_constants (tree expr);
1855 static tree valueize_expr (tree expr);
1857 /* Visit a copy between LHS and RHS, return true if the value number
1861 visit_copy (tree lhs, tree rhs)
1863 /* Follow chains of copies to their destination. */
1864 while (TREE_CODE (rhs) == SSA_NAME
1865 && SSA_VAL (rhs) != rhs)
1866 rhs = SSA_VAL (rhs);
1868 /* The copy may have a more interesting constant filled expression
1869 (we don't, since we know our RHS is just an SSA name). */
1870 if (TREE_CODE (rhs) == SSA_NAME)
1872 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1873 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1876 return set_ssa_val_to (lhs, rhs);
1879 /* Visit a unary operator RHS, value number it, and return true if the
1880 value number of LHS has changed as a result. */
1883 visit_unary_op (tree lhs, gimple stmt)
1885 bool changed = false;
1886 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1890 changed = set_ssa_val_to (lhs, result);
1894 changed = set_ssa_val_to (lhs, lhs);
1895 vn_nary_op_insert_stmt (stmt, lhs);
1901 /* Visit a binary operator RHS, value number it, and return true if the
1902 value number of LHS has changed as a result. */
1905 visit_binary_op (tree lhs, gimple stmt)
1907 bool changed = false;
1908 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1912 changed = set_ssa_val_to (lhs, result);
1916 changed = set_ssa_val_to (lhs, lhs);
1917 vn_nary_op_insert_stmt (stmt, lhs);
1923 /* Visit a call STMT storing into LHS. Return true if the value number
1924 of the LHS has changed as a result. */
1927 visit_reference_op_call (tree lhs, gimple stmt)
1929 bool changed = false;
1930 struct vn_reference_s vr1;
1932 tree vuse = gimple_vuse (stmt);
1934 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1935 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1936 vr1.type = gimple_expr_type (stmt);
1938 vr1.hashcode = vn_reference_compute_hash (&vr1);
1939 result = vn_reference_lookup_1 (&vr1, NULL);
1942 changed = set_ssa_val_to (lhs, result);
1943 if (TREE_CODE (result) == SSA_NAME
1944 && VN_INFO (result)->has_constants)
1945 VN_INFO (lhs)->has_constants = true;
1951 changed = set_ssa_val_to (lhs, lhs);
1952 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1953 vr2->vuse = vr1.vuse;
1954 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1955 vr2->type = vr1.type;
1957 vr2->hashcode = vr1.hashcode;
1959 slot = htab_find_slot_with_hash (current_info->references,
1960 vr2, vr2->hashcode, INSERT);
1962 free_reference (*slot);
1969 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1970 and return true if the value number of the LHS has changed as a result. */
1973 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1975 bool changed = false;
1979 last_vuse = gimple_vuse (stmt);
1980 last_vuse_ptr = &last_vuse;
1981 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1982 last_vuse_ptr = NULL;
1984 /* If we have a VCE, try looking up its operand as it might be stored in
1985 a different type. */
1986 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1987 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1990 /* We handle type-punning through unions by value-numbering based
1991 on offset and size of the access. Be prepared to handle a
1992 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1994 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1996 /* We will be setting the value number of lhs to the value number
1997 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1998 So first simplify and lookup this expression to see if it
1999 is already available. */
2000 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2001 if ((CONVERT_EXPR_P (val)
2002 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2003 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2005 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2006 if ((CONVERT_EXPR_P (tem)
2007 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2008 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2009 TREE_TYPE (val), tem)))
2013 if (!is_gimple_min_invariant (val)
2014 && TREE_CODE (val) != SSA_NAME)
2015 result = vn_nary_op_lookup (val, NULL);
2016 /* If the expression is not yet available, value-number lhs to
2017 a new SSA_NAME we create. */
2018 if (!result && may_insert)
2020 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2021 /* Initialize value-number information properly. */
2022 VN_INFO_GET (result)->valnum = result;
2023 VN_INFO (result)->value_id = get_next_value_id ();
2024 VN_INFO (result)->expr = val;
2025 VN_INFO (result)->has_constants = expr_has_constants (val);
2026 VN_INFO (result)->needs_insertion = true;
2027 /* As all "inserted" statements are singleton SCCs, insert
2028 to the valid table. This is strictly needed to
2029 avoid re-generating new value SSA_NAMEs for the same
2030 expression during SCC iteration over and over (the
2031 optimistic table gets cleared after each iteration).
2032 We do not need to insert into the optimistic table, as
2033 lookups there will fall back to the valid table. */
2034 if (current_info == optimistic_info)
2036 current_info = valid_info;
2037 vn_nary_op_insert (val, result);
2038 current_info = optimistic_info;
2041 vn_nary_op_insert (val, result);
2042 if (dump_file && (dump_flags & TDF_DETAILS))
2044 fprintf (dump_file, "Inserting name ");
2045 print_generic_expr (dump_file, result, 0);
2046 fprintf (dump_file, " for expression ");
2047 print_generic_expr (dump_file, val, 0);
2048 fprintf (dump_file, "\n");
2055 changed = set_ssa_val_to (lhs, result);
2056 if (TREE_CODE (result) == SSA_NAME
2057 && VN_INFO (result)->has_constants)
2059 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2060 VN_INFO (lhs)->has_constants = true;
2065 changed = set_ssa_val_to (lhs, lhs);
2066 vn_reference_insert (op, lhs, last_vuse);
2073 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2074 and return true if the value number of the LHS has changed as a result. */
2077 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2079 bool changed = false;
2081 bool resultsame = false;
2083 /* First we want to lookup using the *vuses* from the store and see
2084 if there the last store to this location with the same address
2087 The vuses represent the memory state before the store. If the
2088 memory state, address, and value of the store is the same as the
2089 last store to this location, then this store will produce the
2090 same memory state as that store.
2092 In this case the vdef versions for this store are value numbered to those
2093 vuse versions, since they represent the same memory state after
2096 Otherwise, the vdefs for the store are used when inserting into
2097 the table, since the store generates a new memory state. */
2099 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2103 if (TREE_CODE (result) == SSA_NAME)
2104 result = SSA_VAL (result);
2105 if (TREE_CODE (op) == SSA_NAME)
2107 resultsame = expressions_equal_p (result, op);
2110 if (!result || !resultsame)
2114 if (dump_file && (dump_flags & TDF_DETAILS))
2116 fprintf (dump_file, "No store match\n");
2117 fprintf (dump_file, "Value numbering store ");
2118 print_generic_expr (dump_file, lhs, 0);
2119 fprintf (dump_file, " to ");
2120 print_generic_expr (dump_file, op, 0);
2121 fprintf (dump_file, "\n");
2123 /* Have to set value numbers before insert, since insert is
2124 going to valueize the references in-place. */
2125 if ((vdef = gimple_vdef (stmt)))
2127 VN_INFO (vdef)->use_processed = true;
2128 changed |= set_ssa_val_to (vdef, vdef);
2131 /* Do not insert structure copies into the tables. */
2132 if (is_gimple_min_invariant (op)
2133 || is_gimple_reg (op))
2134 vn_reference_insert (lhs, op, vdef);
2138 /* We had a match, so value number the vdef to have the value
2139 number of the vuse it came from. */
2142 if (dump_file && (dump_flags & TDF_DETAILS))
2143 fprintf (dump_file, "Store matched earlier value,"
2144 "value numbering store vdefs to matching vuses.\n");
2146 def = gimple_vdef (stmt);
2147 use = gimple_vuse (stmt);
2149 VN_INFO (def)->use_processed = true;
2150 changed |= set_ssa_val_to (def, SSA_VAL (use));
2156 /* Visit and value number PHI, return true if the value number
2160 visit_phi (gimple phi)
2162 bool changed = false;
2164 tree sameval = VN_TOP;
2165 bool allsame = true;
2168 /* TODO: We could check for this in init_sccvn, and replace this
2169 with a gcc_assert. */
2170 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2171 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2173 /* See if all non-TOP arguments have the same value. TOP is
2174 equivalent to everything, so we can ignore it. */
2175 for (i = 0; i < gimple_phi_num_args (phi); i++)
2177 tree def = PHI_ARG_DEF (phi, i);
2179 if (TREE_CODE (def) == SSA_NAME)
2180 def = SSA_VAL (def);
2183 if (sameval == VN_TOP)
2189 if (!expressions_equal_p (def, sameval))
2197 /* If all value numbered to the same value, the phi node has that
2201 if (is_gimple_min_invariant (sameval))
2203 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2204 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2208 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2209 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2212 if (TREE_CODE (sameval) == SSA_NAME)
2213 return visit_copy (PHI_RESULT (phi), sameval);
2215 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2218 /* Otherwise, see if it is equivalent to a phi node in this block. */
2219 result = vn_phi_lookup (phi);
2222 if (TREE_CODE (result) == SSA_NAME)
2223 changed = visit_copy (PHI_RESULT (phi), result);
2225 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2229 vn_phi_insert (phi, PHI_RESULT (phi));
2230 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2231 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2232 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2238 /* Return true if EXPR contains constants. */
2241 expr_has_constants (tree expr)
2243 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2246 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2249 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2250 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2251 /* Constants inside reference ops are rarely interesting, but
2252 it can take a lot of looking to find them. */
2254 case tcc_declaration:
2257 return is_gimple_min_invariant (expr);
2262 /* Return true if STMT contains constants. */
2265 stmt_has_constants (gimple stmt)
2267 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2270 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2272 case GIMPLE_UNARY_RHS:
2273 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2275 case GIMPLE_BINARY_RHS:
2276 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2277 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2278 case GIMPLE_SINGLE_RHS:
2279 /* Constants inside reference ops are rarely interesting, but
2280 it can take a lot of looking to find them. */
2281 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2288 /* Replace SSA_NAMES in expr with their value numbers, and return the
2290 This is performed in place. */
2293 valueize_expr (tree expr)
2295 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2298 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2299 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2300 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2303 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2304 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2305 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2306 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2307 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2308 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2316 /* Simplify the binary expression RHS, and return the result if
2320 simplify_binary_expression (gimple stmt)
2322 tree result = NULL_TREE;
2323 tree op0 = gimple_assign_rhs1 (stmt);
2324 tree op1 = gimple_assign_rhs2 (stmt);
2326 /* This will not catch every single case we could combine, but will
2327 catch those with constants. The goal here is to simultaneously
2328 combine constants between expressions, but avoid infinite
2329 expansion of expressions during simplification. */
2330 if (TREE_CODE (op0) == SSA_NAME)
2332 if (VN_INFO (op0)->has_constants
2333 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2334 op0 = valueize_expr (vn_get_expr_for (op0));
2335 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2336 op0 = SSA_VAL (op0);
2339 if (TREE_CODE (op1) == SSA_NAME)
2341 if (VN_INFO (op1)->has_constants)
2342 op1 = valueize_expr (vn_get_expr_for (op1));
2343 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2344 op1 = SSA_VAL (op1);
2347 /* Avoid folding if nothing changed. */
2348 if (op0 == gimple_assign_rhs1 (stmt)
2349 && op1 == gimple_assign_rhs2 (stmt))
2352 fold_defer_overflow_warnings ();
2354 result = fold_binary (gimple_assign_rhs_code (stmt),
2355 gimple_expr_type (stmt), op0, op1);
2357 STRIP_USELESS_TYPE_CONVERSION (result);
2359 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2362 /* Make sure result is not a complex expression consisting
2363 of operators of operators (IE (a + b) + (a + c))
2364 Otherwise, we will end up with unbounded expressions if
2365 fold does anything at all. */
2366 if (result && valid_gimple_rhs_p (result))
2372 /* Simplify the unary expression RHS, and return the result if
2376 simplify_unary_expression (gimple stmt)
2378 tree result = NULL_TREE;
2379 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2381 /* We handle some tcc_reference codes here that are all
2382 GIMPLE_ASSIGN_SINGLE codes. */
2383 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2384 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2385 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2386 op0 = TREE_OPERAND (op0, 0);
2388 if (TREE_CODE (op0) != SSA_NAME)
2392 if (VN_INFO (op0)->has_constants)
2393 op0 = valueize_expr (vn_get_expr_for (op0));
2394 else if (gimple_assign_cast_p (stmt)
2395 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2396 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2397 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2399 /* We want to do tree-combining on conversion-like expressions.
2400 Make sure we feed only SSA_NAMEs or constants to fold though. */
2401 tree tem = valueize_expr (vn_get_expr_for (op0));
2402 if (UNARY_CLASS_P (tem)
2403 || BINARY_CLASS_P (tem)
2404 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2405 || TREE_CODE (tem) == SSA_NAME
2406 || is_gimple_min_invariant (tem))
2410 /* Avoid folding if nothing changed, but remember the expression. */
2411 if (op0 == orig_op0)
2414 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2415 gimple_expr_type (stmt), op0);
2418 STRIP_USELESS_TYPE_CONVERSION (result);
2419 if (valid_gimple_rhs_p (result))
2426 /* Try to simplify RHS using equivalences and constant folding. */
2429 try_to_simplify (gimple stmt)
2433 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2434 in this case, there is no point in doing extra work. */
2435 if (gimple_assign_copy_p (stmt)
2436 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2439 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2441 case tcc_declaration:
2442 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2448 /* Do not do full-blown reference lookup here, but simplify
2449 reads from constant aggregates. */
2450 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2454 /* Fallthrough for some codes that can operate on registers. */
2455 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2456 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2457 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2459 /* We could do a little more with unary ops, if they expand
2460 into binary ops, but it's debatable whether it is worth it. */
2462 return simplify_unary_expression (stmt);
2464 case tcc_comparison:
2466 return simplify_binary_expression (stmt);
2475 /* Visit and value number USE, return true if the value number
2479 visit_use (tree use)
2481 bool changed = false;
2482 gimple stmt = SSA_NAME_DEF_STMT (use);
2484 VN_INFO (use)->use_processed = true;
2486 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2487 if (dump_file && (dump_flags & TDF_DETAILS)
2488 && !SSA_NAME_IS_DEFAULT_DEF (use))
2490 fprintf (dump_file, "Value numbering ");
2491 print_generic_expr (dump_file, use, 0);
2492 fprintf (dump_file, " stmt = ");
2493 print_gimple_stmt (dump_file, stmt, 0, 0);
2496 /* Handle uninitialized uses. */
2497 if (SSA_NAME_IS_DEFAULT_DEF (use))
2498 changed = set_ssa_val_to (use, use);
2501 if (gimple_code (stmt) == GIMPLE_PHI)
2502 changed = visit_phi (stmt);
2503 else if (!gimple_has_lhs (stmt)
2504 || gimple_has_volatile_ops (stmt)
2505 || stmt_could_throw_p (stmt))
2506 changed = defs_to_varying (stmt);
2507 else if (is_gimple_assign (stmt))
2509 tree lhs = gimple_assign_lhs (stmt);
2512 /* Shortcut for copies. Simplifying copies is pointless,
2513 since we copy the expression and value they represent. */
2514 if (gimple_assign_copy_p (stmt)
2515 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2516 && TREE_CODE (lhs) == SSA_NAME)
2518 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2521 simplified = try_to_simplify (stmt);
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2526 fprintf (dump_file, "RHS ");
2527 print_gimple_expr (dump_file, stmt, 0, 0);
2528 fprintf (dump_file, " simplified to ");
2529 print_generic_expr (dump_file, simplified, 0);
2530 if (TREE_CODE (lhs) == SSA_NAME)
2531 fprintf (dump_file, " has constants %d\n",
2532 expr_has_constants (simplified));
2534 fprintf (dump_file, "\n");
2537 /* Setting value numbers to constants will occasionally
2538 screw up phi congruence because constants are not
2539 uniquely associated with a single ssa name that can be
2542 && is_gimple_min_invariant (simplified)
2543 && TREE_CODE (lhs) == SSA_NAME)
2545 VN_INFO (lhs)->expr = simplified;
2546 VN_INFO (lhs)->has_constants = true;
2547 changed = set_ssa_val_to (lhs, simplified);
2551 && TREE_CODE (simplified) == SSA_NAME
2552 && TREE_CODE (lhs) == SSA_NAME)
2554 changed = visit_copy (lhs, simplified);
2557 else if (simplified)
2559 if (TREE_CODE (lhs) == SSA_NAME)
2561 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2562 /* We have to unshare the expression or else
2563 valuizing may change the IL stream. */
2564 VN_INFO (lhs)->expr = unshare_expr (simplified);
2567 else if (stmt_has_constants (stmt)
2568 && TREE_CODE (lhs) == SSA_NAME)
2569 VN_INFO (lhs)->has_constants = true;
2570 else if (TREE_CODE (lhs) == SSA_NAME)
2572 /* We reset expr and constantness here because we may
2573 have been value numbering optimistically, and
2574 iterating. They may become non-constant in this case,
2575 even if they were optimistically constant. */
2577 VN_INFO (lhs)->has_constants = false;
2578 VN_INFO (lhs)->expr = NULL_TREE;
2581 if ((TREE_CODE (lhs) == SSA_NAME
2582 /* We can substitute SSA_NAMEs that are live over
2583 abnormal edges with their constant value. */
2584 && !(gimple_assign_copy_p (stmt)
2585 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2587 && is_gimple_min_invariant (simplified))
2588 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2589 /* Stores or copies from SSA_NAMEs that are live over
2590 abnormal edges are a problem. */
2591 || (gimple_assign_single_p (stmt)
2592 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2593 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2594 changed = defs_to_varying (stmt);
2595 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2597 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2599 else if (TREE_CODE (lhs) == SSA_NAME)
2601 if ((gimple_assign_copy_p (stmt)
2602 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2604 && is_gimple_min_invariant (simplified)))
2606 VN_INFO (lhs)->has_constants = true;
2608 changed = set_ssa_val_to (lhs, simplified);
2610 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2614 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2616 case GIMPLE_UNARY_RHS:
2617 changed = visit_unary_op (lhs, stmt);
2619 case GIMPLE_BINARY_RHS:
2620 changed = visit_binary_op (lhs, stmt);
2622 case GIMPLE_SINGLE_RHS:
2623 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2626 /* VOP-less references can go through unary case. */
2627 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2628 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2629 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2630 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2632 changed = visit_unary_op (lhs, stmt);
2636 case tcc_declaration:
2637 changed = visit_reference_op_load
2638 (lhs, gimple_assign_rhs1 (stmt), stmt);
2640 case tcc_expression:
2641 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2643 changed = visit_unary_op (lhs, stmt);
2648 changed = defs_to_varying (stmt);
2652 changed = defs_to_varying (stmt);
2658 changed = defs_to_varying (stmt);
2660 else if (is_gimple_call (stmt))
2662 tree lhs = gimple_call_lhs (stmt);
2664 /* ??? We could try to simplify calls. */
2666 if (stmt_has_constants (stmt)
2667 && TREE_CODE (lhs) == SSA_NAME)
2668 VN_INFO (lhs)->has_constants = true;
2669 else if (TREE_CODE (lhs) == SSA_NAME)
2671 /* We reset expr and constantness here because we may
2672 have been value numbering optimistically, and
2673 iterating. They may become non-constant in this case,
2674 even if they were optimistically constant. */
2675 VN_INFO (lhs)->has_constants = false;
2676 VN_INFO (lhs)->expr = NULL_TREE;
2679 if (TREE_CODE (lhs) == SSA_NAME
2680 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2681 changed = defs_to_varying (stmt);
2682 /* ??? We should handle stores from calls. */
2683 else if (TREE_CODE (lhs) == SSA_NAME)
2685 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2686 changed = visit_reference_op_call (lhs, stmt);
2688 changed = defs_to_varying (stmt);
2691 changed = defs_to_varying (stmt);
2698 /* Compare two operands by reverse postorder index */
2701 compare_ops (const void *pa, const void *pb)
2703 const tree opa = *((const tree *)pa);
2704 const tree opb = *((const tree *)pb);
2705 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2706 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2710 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2711 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2712 else if (gimple_nop_p (opstmta))
2714 else if (gimple_nop_p (opstmtb))
2717 bba = gimple_bb (opstmta);
2718 bbb = gimple_bb (opstmtb);
2721 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2729 if (gimple_code (opstmta) == GIMPLE_PHI
2730 && gimple_code (opstmtb) == GIMPLE_PHI)
2731 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2732 else if (gimple_code (opstmta) == GIMPLE_PHI)
2734 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2736 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2737 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2739 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2741 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2744 /* Sort an array containing members of a strongly connected component
2745 SCC so that the members are ordered by RPO number.
2746 This means that when the sort is complete, iterating through the
2747 array will give you the members in RPO order. */
2750 sort_scc (VEC (tree, heap) *scc)
2752 qsort (VEC_address (tree, scc),
2753 VEC_length (tree, scc),
2758 /* Insert the no longer used nary *ENTRY to the current hash. */
2761 copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
2763 vn_nary_op_t onary = (vn_nary_op_t) *entry;
2764 size_t size = (sizeof (struct vn_nary_op_s)
2765 - sizeof (tree) * (4 - onary->length));
2766 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
2769 memcpy (nary, onary, size);
2770 slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
2772 gcc_assert (!*slot);
2777 /* Insert the no longer used phi *ENTRY to the current hash. */
2780 copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
2782 vn_phi_t ophi = (vn_phi_t) *entry;
2783 vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
2785 memcpy (phi, ophi, sizeof (*phi));
2786 ophi->phiargs = NULL;
2787 slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
2793 /* Insert the no longer used reference *ENTRY to the current hash. */
2796 copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
2798 vn_reference_t oref = (vn_reference_t) *entry;
2801 ref = (vn_reference_t) pool_alloc (current_info->references_pool);
2802 memcpy (ref, oref, sizeof (*ref));
2803 oref->operands = NULL;
2804 slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
2807 free_reference (*slot);
2812 /* Process a strongly connected component in the SSA graph. */
2815 process_scc (VEC (tree, heap) *scc)
2817 /* If the SCC has a single member, just visit it. */
2819 if (VEC_length (tree, scc) == 1)
2821 tree use = VEC_index (tree, scc, 0);
2822 if (!VN_INFO (use)->use_processed)
2829 unsigned int iterations = 0;
2830 bool changed = true;
2832 /* Iterate over the SCC with the optimistic table until it stops
2834 current_info = optimistic_info;
2839 /* As we are value-numbering optimistically we have to
2840 clear the expression tables and the simplified expressions
2841 in each iteration until we converge. */
2842 htab_empty (optimistic_info->nary);
2843 htab_empty (optimistic_info->phis);
2844 htab_empty (optimistic_info->references);
2845 obstack_free (&optimistic_info->nary_obstack, NULL);
2846 gcc_obstack_init (&optimistic_info->nary_obstack);
2847 empty_alloc_pool (optimistic_info->phis_pool);
2848 empty_alloc_pool (optimistic_info->references_pool);
2849 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2850 VN_INFO (var)->expr = NULL_TREE;
2851 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2852 changed |= visit_use (var);
2855 statistics_histogram_event (cfun, "SCC iterations", iterations);
2857 /* Finally, copy the contents of the no longer used optimistic
2858 table to the valid table. */
2859 current_info = valid_info;
2860 htab_traverse (optimistic_info->nary, copy_nary, NULL);
2861 htab_traverse (optimistic_info->phis, copy_phis, NULL);
2862 htab_traverse (optimistic_info->references, copy_references, NULL);
2866 DEF_VEC_O(ssa_op_iter);
2867 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2869 /* Pop the components of the found SCC for NAME off the SCC stack
2870 and process them. Returns true if all went well, false if
2871 we run into resource limits. */
2874 extract_and_process_scc_for_name (tree name)
2876 VEC (tree, heap) *scc = NULL;
2879 /* Found an SCC, pop the components off the SCC stack and
2883 x = VEC_pop (tree, sccstack);
2885 VN_INFO (x)->on_sccstack = false;
2886 VEC_safe_push (tree, heap, scc, x);
2887 } while (x != name);
2889 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2890 if (VEC_length (tree, scc)
2891 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2894 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2895 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2896 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2900 if (VEC_length (tree, scc) > 1)
2903 if (dump_file && (dump_flags & TDF_DETAILS))
2904 print_scc (dump_file, scc);
2908 VEC_free (tree, heap, scc);
2913 /* Depth first search on NAME to discover and process SCC's in the SSA
2915 Execution of this algorithm relies on the fact that the SCC's are
2916 popped off the stack in topological order.
2917 Returns true if successful, false if we stopped processing SCC's due
2918 to resource constraints. */
2923 VEC(ssa_op_iter, heap) *itervec = NULL;
2924 VEC(tree, heap) *namevec = NULL;
2925 use_operand_p usep = NULL;
2932 VN_INFO (name)->dfsnum = next_dfs_num++;
2933 VN_INFO (name)->visited = true;
2934 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2936 VEC_safe_push (tree, heap, sccstack, name);
2937 VN_INFO (name)->on_sccstack = true;
2938 defstmt = SSA_NAME_DEF_STMT (name);
2940 /* Recursively DFS on our operands, looking for SCC's. */
2941 if (!gimple_nop_p (defstmt))
2943 /* Push a new iterator. */
2944 if (gimple_code (defstmt) == GIMPLE_PHI)
2945 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2947 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2950 clear_and_done_ssa_iter (&iter);
2954 /* If we are done processing uses of a name, go up the stack
2955 of iterators and process SCCs as we found them. */
2956 if (op_iter_done (&iter))
2958 /* See if we found an SCC. */
2959 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2960 if (!extract_and_process_scc_for_name (name))
2962 VEC_free (tree, heap, namevec);
2963 VEC_free (ssa_op_iter, heap, itervec);
2967 /* Check if we are done. */
2968 if (VEC_empty (tree, namevec))
2970 VEC_free (tree, heap, namevec);
2971 VEC_free (ssa_op_iter, heap, itervec);
2975 /* Restore the last use walker and continue walking there. */
2977 name = VEC_pop (tree, namevec);
2978 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2979 sizeof (ssa_op_iter));
2980 VEC_pop (ssa_op_iter, itervec);
2981 goto continue_walking;
2984 use = USE_FROM_PTR (usep);
2986 /* Since we handle phi nodes, we will sometimes get
2987 invariants in the use expression. */
2988 if (TREE_CODE (use) == SSA_NAME)
2990 if (! (VN_INFO (use)->visited))
2992 /* Recurse by pushing the current use walking state on
2993 the stack and starting over. */
2994 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2995 VEC_safe_push(tree, heap, namevec, name);
3000 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3001 VN_INFO (use)->low);
3003 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3004 && VN_INFO (use)->on_sccstack)
3006 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3007 VN_INFO (name)->low);
3011 usep = op_iter_next_use (&iter);
3015 /* Allocate a value number table. */
3018 allocate_vn_table (vn_tables_t table)
3020 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3021 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3022 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3025 gcc_obstack_init (&table->nary_obstack);
3026 table->phis_pool = create_alloc_pool ("VN phis",
3027 sizeof (struct vn_phi_s),
3029 table->references_pool = create_alloc_pool ("VN references",
3030 sizeof (struct vn_reference_s),
3034 /* Free a value number table. */
3037 free_vn_table (vn_tables_t table)
3039 htab_delete (table->phis);
3040 htab_delete (table->nary);
3041 htab_delete (table->references);
3042 obstack_free (&table->nary_obstack, NULL);
3043 free_alloc_pool (table->phis_pool);
3044 free_alloc_pool (table->references_pool);
3052 int *rpo_numbers_temp;
3054 calculate_dominance_info (CDI_DOMINATORS);
3056 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3059 constant_value_ids = BITMAP_ALLOC (NULL);
3064 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3065 /* VEC_alloc doesn't actually grow it to the right size, it just
3066 preallocates the space to do so. */
3067 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3068 gcc_obstack_init (&vn_ssa_aux_obstack);
3070 shared_lookup_phiargs = NULL;
3071 shared_lookup_references = NULL;
3072 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3073 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3074 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3076 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3077 the i'th block in RPO order is bb. We want to map bb's to RPO
3078 numbers, so we need to rearrange this array. */
3079 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3080 rpo_numbers[rpo_numbers_temp[j]] = j;
3082 XDELETE (rpo_numbers_temp);
3084 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3086 /* Create the VN_INFO structures, and initialize value numbers to
3088 for (i = 0; i < num_ssa_names; i++)
3090 tree name = ssa_name (i);
3093 VN_INFO_GET (name)->valnum = VN_TOP;
3094 VN_INFO (name)->expr = NULL_TREE;
3095 VN_INFO (name)->value_id = 0;
3099 renumber_gimple_stmt_uids ();
3101 /* Create the valid and optimistic value numbering tables. */
3102 valid_info = XCNEW (struct vn_tables_s);
3103 allocate_vn_table (valid_info);
3104 optimistic_info = XCNEW (struct vn_tables_s);
3105 allocate_vn_table (optimistic_info);
3113 htab_delete (constant_to_value_id);
3114 BITMAP_FREE (constant_value_ids);
3115 VEC_free (tree, heap, shared_lookup_phiargs);
3116 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3117 XDELETEVEC (rpo_numbers);
3119 for (i = 0; i < num_ssa_names; i++)
3121 tree name = ssa_name (i);
3123 && VN_INFO (name)->needs_insertion)
3124 release_ssa_name (name);
3126 obstack_free (&vn_ssa_aux_obstack, NULL);
3127 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3129 VEC_free (tree, heap, sccstack);
3130 free_vn_table (valid_info);
3131 XDELETE (valid_info);
3132 free_vn_table (optimistic_info);
3133 XDELETE (optimistic_info);
3136 /* Set the value ids in the valid hash tables. */
3139 set_hashtable_value_ids (void)
3146 /* Now set the value ids of the things we had put in the hash
3149 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3150 vno, vn_nary_op_t, hi)
3154 if (TREE_CODE (vno->result) == SSA_NAME)
3155 vno->value_id = VN_INFO (vno->result)->value_id;
3156 else if (is_gimple_min_invariant (vno->result))
3157 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3161 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3166 if (TREE_CODE (vp->result) == SSA_NAME)
3167 vp->value_id = VN_INFO (vp->result)->value_id;
3168 else if (is_gimple_min_invariant (vp->result))
3169 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3173 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3174 vr, vn_reference_t, hi)
3178 if (TREE_CODE (vr->result) == SSA_NAME)
3179 vr->value_id = VN_INFO (vr->result)->value_id;
3180 else if (is_gimple_min_invariant (vr->result))
3181 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3186 /* Do SCCVN. Returns true if it finished, false if we bailed out
3187 due to resource constraints. */
3190 run_scc_vn (bool may_insert_arg)
3194 bool changed = true;
3196 may_insert = may_insert_arg;
3199 current_info = valid_info;
3201 for (param = DECL_ARGUMENTS (current_function_decl);
3203 param = TREE_CHAIN (param))
3205 if (gimple_default_def (cfun, param) != NULL)
3207 tree def = gimple_default_def (cfun, param);
3208 VN_INFO (def)->valnum = def;
3212 for (i = 1; i < num_ssa_names; ++i)
3214 tree name = ssa_name (i);
3216 && VN_INFO (name)->visited == false
3217 && !has_zero_uses (name))
3226 /* Initialize the value ids. */
3228 for (i = 1; i < num_ssa_names; ++i)
3230 tree name = ssa_name (i);
3234 info = VN_INFO (name);
3235 if (info->valnum == name
3236 || info->valnum == VN_TOP)
3237 info->value_id = get_next_value_id ();
3238 else if (is_gimple_min_invariant (info->valnum))
3239 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3242 /* Propagate until they stop changing. */
3246 for (i = 1; i < num_ssa_names; ++i)
3248 tree name = ssa_name (i);
3252 info = VN_INFO (name);
3253 if (TREE_CODE (info->valnum) == SSA_NAME
3254 && info->valnum != name
3255 && info->value_id != VN_INFO (info->valnum)->value_id)
3258 info->value_id = VN_INFO (info->valnum)->value_id;
3263 set_hashtable_value_ids ();
3265 if (dump_file && (dump_flags & TDF_DETAILS))
3267 fprintf (dump_file, "Value numbers:\n");
3268 for (i = 0; i < num_ssa_names; i++)
3270 tree name = ssa_name (i);
3272 && VN_INFO (name)->visited
3273 && SSA_VAL (name) != name)
3275 print_generic_expr (dump_file, name, 0);
3276 fprintf (dump_file, " = ");
3277 print_generic_expr (dump_file, SSA_VAL (name), 0);
3278 fprintf (dump_file, "\n");
3287 /* Return the maximum value id we have ever seen. */
3290 get_max_value_id (void)
3292 return next_value_id;
3295 /* Return the next unique value id. */
3298 get_next_value_id (void)
3300 return next_value_id++;
3304 /* Compare two expressions E1 and E2 and return true if they are equal. */
3307 expressions_equal_p (tree e1, tree e2)
3309 /* The obvious case. */
3313 /* If only one of them is null, they cannot be equal. */
3317 /* Now perform the actual comparison. */
3318 if (TREE_CODE (e1) == TREE_CODE (e2)
3319 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3326 /* Return true if the nary operation NARY may trap. This is a copy
3327 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3330 vn_nary_may_trap (vn_nary_op_t nary)
3334 bool honor_nans = false;
3335 bool honor_snans = false;
3336 bool fp_operation = false;
3337 bool honor_trapv = false;
3341 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3342 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3343 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3346 fp_operation = FLOAT_TYPE_P (type);
3349 honor_nans = flag_trapping_math && !flag_finite_math_only;
3350 honor_snans = flag_signaling_nans != 0;
3352 else if (INTEGRAL_TYPE_P (type)
3353 && TYPE_OVERFLOW_TRAPS (type))
3357 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3359 honor_nans, honor_snans, rhs2,
3365 for (i = 0; i < nary->length; ++i)
3366 if (tree_could_trap_p (nary->op[i]))