1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
181 /* Set the value numbering info for a given SSA name to a given
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
223 /* If the value-number is a constant it is the representative
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
249 if (!is_gimple_assign (def_stmt))
252 /* FIXME tuples. This is incomplete and likely will miss some
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
281 if (expr == NULL_TREE)
284 /* Cache the expression. */
291 /* Free a phi operation structure VP. */
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
336 get_constant_value_id (tree constant)
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
346 return ((vn_constant_t)*slot)->value_id;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
354 get_or_alloc_constant_value_id (tree constant)
357 struct vn_constant_s vc;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
407 result = iterative_hash_expr (vro1->op0, result);
409 result = iterative_hash_expr (vro1->op1, result);
411 result = iterative_hash_expr (vro1->op2, result);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 if (vro->opcode == MEM_REF)
439 else if (vro->opcode != ADDR_EXPR)
451 result = iterative_hash_hashval_t (off, result);
454 && vro->opcode == ADDR_EXPR)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
464 result = vn_reference_op_compute_hash (vro, result);
468 result += SSA_NAME_VERSION (vr1->vuse);
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
558 if (!vn_reference_op_eq (vro1, vro2))
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
580 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
582 base = null_pointer_node;
584 memset (&temp, 0, sizeof (temp));
585 /* We do not care for spurious type qualifications. */
586 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
587 temp.opcode = TREE_CODE (ref);
588 temp.op0 = TMR_INDEX (ref);
589 temp.op1 = TMR_STEP (ref);
590 temp.op2 = TMR_OFFSET (ref);
592 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
594 memset (&temp, 0, sizeof (temp));
595 temp.type = NULL_TREE;
596 temp.opcode = TREE_CODE (base);
598 temp.op1 = TMR_ORIGINAL (ref);
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604 /* For non-calls, store the information that makes up the address. */
608 vn_reference_op_s temp;
610 memset (&temp, 0, sizeof (temp));
611 /* We do not care for spurious type qualifications. */
612 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
613 temp.opcode = TREE_CODE (ref);
618 case MISALIGNED_INDIRECT_REF:
619 temp.op0 = TREE_OPERAND (ref, 1);
622 /* The base address gets its own vn_reference_op_s structure. */
623 temp.op0 = TREE_OPERAND (ref, 1);
624 if (host_integerp (TREE_OPERAND (ref, 1), 0))
625 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
628 /* Record bits and position. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 temp.op1 = TREE_OPERAND (ref, 2);
633 /* The field decl is enough to unambiguously specify the field,
634 a matching type is not necessary and a mismatching type
635 is always a spurious difference. */
636 temp.type = NULL_TREE;
637 temp.op0 = TREE_OPERAND (ref, 1);
638 temp.op1 = TREE_OPERAND (ref, 2);
640 tree this_offset = component_ref_field_offset (ref);
642 && TREE_CODE (this_offset) == INTEGER_CST)
644 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
645 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
648 = double_int_add (tree_to_double_int (this_offset),
650 (tree_to_double_int (bit_offset),
651 uhwi_to_double_int (BITS_PER_UNIT),
653 if (double_int_fits_in_shwi_p (off))
659 case ARRAY_RANGE_REF:
661 /* Record index as operand. */
662 temp.op0 = TREE_OPERAND (ref, 1);
663 /* Always record lower bounds and element size. */
664 temp.op1 = array_ref_low_bound (ref);
665 temp.op2 = array_ref_element_size (ref);
666 if (TREE_CODE (temp.op0) == INTEGER_CST
667 && TREE_CODE (temp.op1) == INTEGER_CST
668 && TREE_CODE (temp.op2) == INTEGER_CST)
670 double_int off = tree_to_double_int (temp.op0);
671 off = double_int_add (off,
673 (tree_to_double_int (temp.op1)));
674 off = double_int_mul (off, tree_to_double_int (temp.op2));
675 if (double_int_fits_in_shwi_p (off))
693 if (is_gimple_min_invariant (ref))
699 /* These are only interesting for their operands, their
700 existence, and their type. They will never be the last
701 ref in the chain of references (IE they require an
702 operand), so we don't have to put anything
703 for op* as it will be handled by the iteration */
705 case VIEW_CONVERT_EXPR:
709 /* This is only interesting for its constant offset. */
710 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
715 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
717 if (REFERENCE_CLASS_P (ref)
718 || (TREE_CODE (ref) == ADDR_EXPR
719 && !is_gimple_min_invariant (ref)))
720 ref = TREE_OPERAND (ref, 0);
726 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
727 operands in *OPS, the reference alias set SET and the reference type TYPE.
728 Return true if something useful was produced. */
731 ao_ref_init_from_vn_reference (ao_ref *ref,
732 alias_set_type set, tree type,
733 VEC (vn_reference_op_s, heap) *ops)
735 vn_reference_op_t op;
737 tree base = NULL_TREE;
739 HOST_WIDE_INT offset = 0;
740 HOST_WIDE_INT max_size;
741 HOST_WIDE_INT size = -1;
742 tree size_tree = NULL_TREE;
743 alias_set_type base_alias_set = -1;
745 /* First get the final access size from just the outermost expression. */
746 op = VEC_index (vn_reference_op_s, ops, 0);
747 if (op->opcode == COMPONENT_REF)
748 size_tree = DECL_SIZE (op->op0);
749 else if (op->opcode == BIT_FIELD_REF)
753 enum machine_mode mode = TYPE_MODE (type);
755 size_tree = TYPE_SIZE (type);
757 size = GET_MODE_BITSIZE (mode);
759 if (size_tree != NULL_TREE)
761 if (!host_integerp (size_tree, 1))
764 size = TREE_INT_CST_LOW (size_tree);
767 /* Initially, maxsize is the same as the accessed element size.
768 In the following it will only grow (or become -1). */
771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
772 and find the ultimate containing object. */
773 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
777 /* These may be in the reference ops, but we cannot do anything
778 sensible with them here. */
780 /* Apart from ADDR_EXPR arguments to MEM_REF. */
781 if (base != NULL_TREE
782 && TREE_CODE (base) == MEM_REF
784 && DECL_P (TREE_OPERAND (op->op0, 0)))
786 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
787 base = TREE_OPERAND (op->op0, 0);
794 offset += pop->off * BITS_PER_UNIT;
802 /* Record the base objects. */
803 case MISALIGNED_INDIRECT_REF:
804 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
806 op0_p = &TREE_OPERAND (*op0_p, 0);
810 base_alias_set = get_deref_alias_set (op->op0);
811 *op0_p = build2 (MEM_REF, op->type,
813 op0_p = &TREE_OPERAND (*op0_p, 0);
824 /* And now the usual component-reference style ops. */
826 offset += tree_low_cst (op->op1, 0);
831 tree field = op->op0;
832 /* We do not have a complete COMPONENT_REF tree here so we
833 cannot use component_ref_field_offset. Do the interesting
837 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
841 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
843 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
848 case ARRAY_RANGE_REF:
850 /* We recorded the lower bound and the element size. */
851 if (!host_integerp (op->op0, 0)
852 || !host_integerp (op->op1, 0)
853 || !host_integerp (op->op2, 0))
857 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
858 hindex -= TREE_INT_CST_LOW (op->op1);
859 hindex *= TREE_INT_CST_LOW (op->op2);
860 hindex *= BITS_PER_UNIT;
872 case VIEW_CONVERT_EXPR:
889 if (base == NULL_TREE)
892 ref->ref = NULL_TREE;
894 ref->offset = offset;
896 ref->max_size = max_size;
897 ref->ref_alias_set = set;
898 if (base_alias_set != -1)
899 ref->base_alias_set = base_alias_set;
901 ref->base_alias_set = get_alias_set (base);
906 /* Copy the operations present in load/store/call REF into RESULT, a vector of
907 vn_reference_op_s's. */
910 copy_reference_ops_from_call (gimple call,
911 VEC(vn_reference_op_s, heap) **result)
913 vn_reference_op_s temp;
916 /* Copy the type, opcode, function being called and static chain. */
917 memset (&temp, 0, sizeof (temp));
918 temp.type = gimple_call_return_type (call);
919 temp.opcode = CALL_EXPR;
920 temp.op0 = gimple_call_fn (call);
921 temp.op1 = gimple_call_chain (call);
923 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
925 /* Copy the call arguments. As they can be references as well,
926 just chain them together. */
927 for (i = 0; i < gimple_call_num_args (call); ++i)
929 tree callarg = gimple_call_arg (call, i);
930 copy_reference_ops_from_ref (callarg, result);
934 /* Create a vector of vn_reference_op_s structures from REF, a
935 REFERENCE_CLASS_P tree. The vector is not shared. */
937 static VEC(vn_reference_op_s, heap) *
938 create_reference_ops_from_ref (tree ref)
940 VEC (vn_reference_op_s, heap) *result = NULL;
942 copy_reference_ops_from_ref (ref, &result);
946 /* Create a vector of vn_reference_op_s structures from CALL, a
947 call statement. The vector is not shared. */
949 static VEC(vn_reference_op_s, heap) *
950 create_reference_ops_from_call (gimple call)
952 VEC (vn_reference_op_s, heap) *result = NULL;
954 copy_reference_ops_from_call (call, &result);
958 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
959 *I_P to point to the last element of the replacement. */
961 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
964 unsigned int i = *i_p;
965 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
966 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
968 HOST_WIDE_INT addr_offset;
970 /* The only thing we have to do is from &OBJ.foo.bar add the offset
971 from .foo.bar to the preceeding MEM_REF offset and replace the
972 address with &OBJ. */
973 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
975 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
976 if (addr_base != op->op0)
978 double_int off = tree_to_double_int (mem_op->op0);
979 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
980 off = double_int_add (off, shwi_to_double_int (addr_offset));
981 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
982 op->op0 = build_fold_addr_expr (addr_base);
983 if (host_integerp (mem_op->op0, 0))
984 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
990 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
991 *I_P to point to the last element of the replacement. */
993 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
996 unsigned int i = *i_p;
997 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
998 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1000 enum tree_code code;
1003 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1004 if (!is_gimple_assign (def_stmt))
1007 code = gimple_assign_rhs_code (def_stmt);
1008 if (code != ADDR_EXPR
1009 && code != POINTER_PLUS_EXPR)
1012 off = tree_to_double_int (mem_op->op0);
1013 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1015 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1016 from .foo.bar to the preceeding MEM_REF offset and replace the
1017 address with &OBJ. */
1018 if (code == ADDR_EXPR)
1020 tree addr, addr_base;
1021 HOST_WIDE_INT addr_offset;
1023 addr = gimple_assign_rhs1 (def_stmt);
1024 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1027 || TREE_CODE (addr_base) != MEM_REF)
1030 off = double_int_add (off, shwi_to_double_int (addr_offset));
1031 off = double_int_add (off, mem_ref_offset (addr_base));
1032 op->op0 = TREE_OPERAND (addr_base, 0);
1037 ptr = gimple_assign_rhs1 (def_stmt);
1038 ptroff = gimple_assign_rhs2 (def_stmt);
1039 if (TREE_CODE (ptr) != SSA_NAME
1040 || TREE_CODE (ptroff) != INTEGER_CST)
1043 off = double_int_add (off, tree_to_double_int (ptroff));
1047 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1048 if (host_integerp (mem_op->op0, 0))
1049 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1052 if (TREE_CODE (op->op0) == SSA_NAME)
1054 op->op0 = SSA_VAL (op->op0);
1055 if (TREE_CODE (op->op0) != SSA_NAME)
1056 op->opcode = TREE_CODE (op->op0);
1060 if (TREE_CODE (op->op0) == SSA_NAME)
1061 vn_reference_maybe_forwprop_address (ops, i_p);
1062 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1063 vn_reference_fold_indirect (ops, i_p);
1066 /* Optimize the reference REF to a constant if possible or return
1067 NULL_TREE if not. */
1070 fully_constant_vn_reference_p (vn_reference_t ref)
1072 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1073 vn_reference_op_t op;
1075 /* Try to simplify the translated expression if it is
1076 a call to a builtin function with at most two arguments. */
1077 op = VEC_index (vn_reference_op_s, operands, 0);
1078 if (op->opcode == CALL_EXPR
1079 && TREE_CODE (op->op0) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1081 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1082 && VEC_length (vn_reference_op_s, operands) >= 2
1083 && VEC_length (vn_reference_op_s, operands) <= 3)
1085 vn_reference_op_t arg0, arg1 = NULL;
1086 bool anyconst = false;
1087 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1088 if (VEC_length (vn_reference_op_s, operands) > 2)
1089 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1090 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1091 || (arg0->opcode == ADDR_EXPR
1092 && is_gimple_min_invariant (arg0->op0)))
1095 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1096 || (arg1->opcode == ADDR_EXPR
1097 && is_gimple_min_invariant (arg1->op0))))
1101 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1104 arg1 ? arg1->op0 : NULL);
1106 && TREE_CODE (folded) == NOP_EXPR)
1107 folded = TREE_OPERAND (folded, 0);
1109 && is_gimple_min_invariant (folded))
1114 /* Simplify reads from constant strings. */
1115 else if (op->opcode == ARRAY_REF
1116 && TREE_CODE (op->op0) == INTEGER_CST
1117 && integer_zerop (op->op1)
1118 && VEC_length (vn_reference_op_s, operands) == 2)
1120 vn_reference_op_t arg0;
1121 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1122 if (arg0->opcode == STRING_CST
1123 && (TYPE_MODE (op->type)
1124 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1125 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1126 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1127 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1128 return build_int_cst_type (op->type,
1129 (TREE_STRING_POINTER (arg0->op0)
1130 [TREE_INT_CST_LOW (op->op0)]));
1136 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1137 structures into their value numbers. This is done in-place, and
1138 the vector passed in is returned. */
1140 static VEC (vn_reference_op_s, heap) *
1141 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1143 vn_reference_op_t vro;
1146 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
1148 if (vro->opcode == SSA_NAME
1149 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1151 vro->op0 = SSA_VAL (vro->op0);
1152 /* If it transforms from an SSA_NAME to a constant, update
1154 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1155 vro->opcode = TREE_CODE (vro->op0);
1157 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1158 vro->op1 = SSA_VAL (vro->op1);
1159 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1160 vro->op2 = SSA_VAL (vro->op2);
1161 /* If it transforms from an SSA_NAME to an address, fold with
1162 a preceding indirect reference. */
1165 && TREE_CODE (vro->op0) == ADDR_EXPR
1166 && VEC_index (vn_reference_op_s,
1167 orig, i - 1)->opcode == MEM_REF)
1168 vn_reference_fold_indirect (&orig, &i);
1170 && vro->opcode == SSA_NAME
1171 && VEC_index (vn_reference_op_s,
1172 orig, i - 1)->opcode == MEM_REF)
1173 vn_reference_maybe_forwprop_address (&orig, &i);
1174 /* If it transforms a non-constant ARRAY_REF into a constant
1175 one, adjust the constant offset. */
1176 else if (vro->opcode == ARRAY_REF
1178 && TREE_CODE (vro->op0) == INTEGER_CST
1179 && TREE_CODE (vro->op1) == INTEGER_CST
1180 && TREE_CODE (vro->op2) == INTEGER_CST)
1182 double_int off = tree_to_double_int (vro->op0);
1183 off = double_int_add (off,
1185 (tree_to_double_int (vro->op1)));
1186 off = double_int_mul (off, tree_to_double_int (vro->op2));
1187 if (double_int_fits_in_shwi_p (off))
1195 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1197 /* Create a vector of vn_reference_op_s structures from REF, a
1198 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1201 static VEC(vn_reference_op_s, heap) *
1202 valueize_shared_reference_ops_from_ref (tree ref)
1206 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1207 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1208 shared_lookup_references = valueize_refs (shared_lookup_references);
1209 return shared_lookup_references;
1212 /* Create a vector of vn_reference_op_s structures from CALL, a
1213 call statement. The vector is shared among all callers of
1216 static VEC(vn_reference_op_s, heap) *
1217 valueize_shared_reference_ops_from_call (gimple call)
1221 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1222 copy_reference_ops_from_call (call, &shared_lookup_references);
1223 shared_lookup_references = valueize_refs (shared_lookup_references);
1224 return shared_lookup_references;
1227 /* Lookup a SCCVN reference operation VR in the current hash table.
1228 Returns the resulting value number if it exists in the hash table,
1229 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1230 vn_reference_t stored in the hashtable if something is found. */
1233 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1238 hash = vr->hashcode;
1239 slot = htab_find_slot_with_hash (current_info->references, vr,
1241 if (!slot && current_info == optimistic_info)
1242 slot = htab_find_slot_with_hash (valid_info->references, vr,
1247 *vnresult = (vn_reference_t)*slot;
1248 return ((vn_reference_t)*slot)->result;
1254 static tree *last_vuse_ptr;
1256 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1257 with the current VUSE and performs the expression lookup. */
1260 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1262 vn_reference_t vr = (vn_reference_t)vr_;
1267 *last_vuse_ptr = vuse;
1269 /* Fixup vuse and hash. */
1271 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1272 vr->vuse = SSA_VAL (vuse);
1274 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1276 hash = vr->hashcode;
1277 slot = htab_find_slot_with_hash (current_info->references, vr,
1279 if (!slot && current_info == optimistic_info)
1280 slot = htab_find_slot_with_hash (valid_info->references, vr,
1288 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1289 from the statement defining VUSE and if not successful tries to
1290 translate *REFP and VR_ through an aggregate copy at the defintion
1294 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1296 vn_reference_t vr = (vn_reference_t)vr_;
1297 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1300 HOST_WIDE_INT offset, maxsize;
1302 /* First try to disambiguate after value-replacing in the definitions LHS. */
1303 if (is_gimple_assign (def_stmt))
1305 tree lhs = gimple_assign_lhs (def_stmt);
1307 VEC (vn_reference_op_s, heap) *operands = NULL;
1309 copy_reference_ops_from_ref (lhs, &operands);
1310 operands = valueize_refs (operands);
1311 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1312 TREE_TYPE (lhs), operands))
1313 res = refs_may_alias_p_1 (ref, &ref1, true);
1314 VEC_free (vn_reference_op_s, heap, operands);
1319 base = ao_ref_base (ref);
1320 offset = ref->offset;
1321 maxsize = ref->max_size;
1323 /* If we cannot constrain the size of the reference we cannot
1324 test if anything kills it. */
1328 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1329 from that defintion.
1331 if (is_gimple_reg_type (vr->type)
1332 && is_gimple_call (def_stmt)
1333 && (fndecl = gimple_call_fndecl (def_stmt))
1334 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1335 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1336 && integer_zerop (gimple_call_arg (def_stmt, 1))
1337 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1338 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1340 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1342 HOST_WIDE_INT offset2, size2, maxsize2;
1343 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1344 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1345 if ((unsigned HOST_WIDE_INT)size2 / 8
1346 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1347 && operand_equal_p (base, base2, 0)
1348 && offset2 <= offset
1349 && offset2 + size2 >= offset + maxsize)
1351 tree val = fold_convert (vr->type, integer_zero_node);
1352 unsigned int value_id = get_or_alloc_constant_value_id (val);
1353 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1354 VEC_copy (vn_reference_op_s,
1355 heap, vr->operands),
1360 /* 2) Assignment from an empty CONSTRUCTOR. */
1361 else if (is_gimple_reg_type (vr->type)
1362 && gimple_assign_single_p (def_stmt)
1363 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1364 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1367 HOST_WIDE_INT offset2, size2, maxsize2;
1368 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1369 &offset2, &size2, &maxsize2);
1370 if (operand_equal_p (base, base2, 0)
1371 && offset2 <= offset
1372 && offset2 + size2 >= offset + maxsize)
1374 tree val = fold_convert (vr->type, integer_zero_node);
1375 unsigned int value_id = get_or_alloc_constant_value_id (val);
1376 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1377 VEC_copy (vn_reference_op_s,
1378 heap, vr->operands),
1383 /* For aggregate copies translate the reference through them if
1384 the copy kills ref. */
1385 else if (gimple_assign_single_p (def_stmt)
1386 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1387 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1388 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1391 HOST_WIDE_INT offset2, size2, maxsize2;
1393 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1394 vn_reference_op_t vro;
1397 /* See if the assignment kills REF. */
1398 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1399 &offset2, &size2, &maxsize2);
1400 if (!operand_equal_p (base, base2, 0)
1402 || offset2 + size2 < offset + maxsize)
1405 /* Find the common base of ref and the lhs. */
1406 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1407 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1408 j = VEC_length (vn_reference_op_s, lhs) - 1;
1409 while (j >= 0 && i >= 0
1410 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1412 VEC_index (vn_reference_op_s, lhs, j)))
1418 VEC_free (vn_reference_op_s, heap, lhs);
1419 /* i now points to the first additional op.
1420 ??? LHS may not be completely contained in VR, one or more
1421 VIEW_CONVERT_EXPRs could be in its way. We could at least
1422 try handling outermost VIEW_CONVERT_EXPRs. */
1426 /* Now re-write REF to be based on the rhs of the assignment. */
1427 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1428 /* We need to pre-pend vr->operands[0..i] to rhs. */
1429 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1430 > VEC_length (vn_reference_op_s, vr->operands))
1432 VEC (vn_reference_op_s, heap) *old = vr->operands;
1433 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1434 i + 1 + VEC_length (vn_reference_op_s, rhs));
1435 if (old == shared_lookup_references
1436 && vr->operands != old)
1437 shared_lookup_references = NULL;
1440 VEC_truncate (vn_reference_op_s, vr->operands,
1441 i + 1 + VEC_length (vn_reference_op_s, rhs));
1442 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1443 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1444 VEC_free (vn_reference_op_s, heap, rhs);
1445 vr->hashcode = vn_reference_compute_hash (vr);
1447 /* Adjust *ref from the new operands. */
1448 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1450 /* This can happen with bitfields. */
1451 if (ref->size != r.size)
1455 /* Do not update last seen VUSE after translating. */
1456 last_vuse_ptr = NULL;
1458 /* Keep looking for the adjusted *REF / VR pair. */
1462 /* Bail out and stop walking. */
1466 /* Lookup a reference operation by it's parts, in the current hash table.
1467 Returns the resulting value number if it exists in the hash table,
1468 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1469 vn_reference_t stored in the hashtable if something is found. */
1472 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1473 VEC (vn_reference_op_s, heap) *operands,
1474 vn_reference_t *vnresult, bool maywalk)
1476 struct vn_reference_s vr1;
1484 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1485 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1486 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1487 VEC_length (vn_reference_op_s, operands));
1488 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1489 VEC_address (vn_reference_op_s, operands),
1490 sizeof (vn_reference_op_s)
1491 * VEC_length (vn_reference_op_s, operands));
1492 vr1.operands = operands = shared_lookup_references
1493 = valueize_refs (shared_lookup_references);
1496 vr1.hashcode = vn_reference_compute_hash (&vr1);
1497 if ((cst = fully_constant_vn_reference_p (&vr1)))
1500 vn_reference_lookup_1 (&vr1, vnresult);
1506 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1508 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1509 vn_reference_lookup_2,
1510 vn_reference_lookup_3, &vr1);
1511 if (vr1.operands != operands)
1512 VEC_free (vn_reference_op_s, heap, vr1.operands);
1516 return (*vnresult)->result;
1521 /* Lookup OP in the current hash table, and return the resulting value
1522 number if it exists in the hash table. Return NULL_TREE if it does
1523 not exist in the hash table or if the result field of the structure
1524 was NULL.. VNRESULT will be filled in with the vn_reference_t
1525 stored in the hashtable if one exists. */
1528 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1529 vn_reference_t *vnresult)
1531 VEC (vn_reference_op_s, heap) *operands;
1532 struct vn_reference_s vr1;
1538 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1539 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1540 vr1.type = TREE_TYPE (op);
1541 vr1.set = get_alias_set (op);
1542 vr1.hashcode = vn_reference_compute_hash (&vr1);
1543 if ((cst = fully_constant_vn_reference_p (&vr1)))
1549 vn_reference_t wvnresult;
1551 ao_ref_init (&r, op);
1553 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1554 vn_reference_lookup_2,
1555 vn_reference_lookup_3, &vr1);
1556 if (vr1.operands != operands)
1557 VEC_free (vn_reference_op_s, heap, vr1.operands);
1561 *vnresult = wvnresult;
1562 return wvnresult->result;
1568 return vn_reference_lookup_1 (&vr1, vnresult);
1572 /* Insert OP into the current hash table with a value number of
1573 RESULT, and return the resulting reference structure we created. */
1576 vn_reference_insert (tree op, tree result, tree vuse)
1581 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1582 if (TREE_CODE (result) == SSA_NAME)
1583 vr1->value_id = VN_INFO (result)->value_id;
1585 vr1->value_id = get_or_alloc_constant_value_id (result);
1586 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1587 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1588 vr1->type = TREE_TYPE (op);
1589 vr1->set = get_alias_set (op);
1590 vr1->hashcode = vn_reference_compute_hash (vr1);
1591 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1593 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1596 /* Because we lookup stores using vuses, and value number failures
1597 using the vdefs (see visit_reference_op_store for how and why),
1598 it's possible that on failure we may try to insert an already
1599 inserted store. This is not wrong, there is no ssa name for a
1600 store that we could use as a differentiator anyway. Thus, unlike
1601 the other lookup functions, you cannot gcc_assert (!*slot)
1604 /* But free the old slot in case of a collision. */
1606 free_reference (*slot);
1612 /* Insert a reference by it's pieces into the current hash table with
1613 a value number of RESULT. Return the resulting reference
1614 structure we created. */
1617 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1618 VEC (vn_reference_op_s, heap) *operands,
1619 tree result, unsigned int value_id)
1625 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1626 vr1->value_id = value_id;
1627 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1628 vr1->operands = valueize_refs (operands);
1631 vr1->hashcode = vn_reference_compute_hash (vr1);
1632 if (result && TREE_CODE (result) == SSA_NAME)
1633 result = SSA_VAL (result);
1634 vr1->result = result;
1636 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1639 /* At this point we should have all the things inserted that we have
1640 seen before, and we should never try inserting something that
1642 gcc_assert (!*slot);
1644 free_reference (*slot);
1650 /* Compute and return the hash value for nary operation VBO1. */
1653 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1658 for (i = 0; i < vno1->length; ++i)
1659 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1660 vno1->op[i] = SSA_VAL (vno1->op[i]);
1662 if (vno1->length == 2
1663 && commutative_tree_code (vno1->opcode)
1664 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1666 tree temp = vno1->op[0];
1667 vno1->op[0] = vno1->op[1];
1671 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1672 for (i = 0; i < vno1->length; ++i)
1673 hash = iterative_hash_expr (vno1->op[i], hash);
1678 /* Return the computed hashcode for nary operation P1. */
1681 vn_nary_op_hash (const void *p1)
1683 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1684 return vno1->hashcode;
1687 /* Compare nary operations P1 and P2 and return true if they are
1691 vn_nary_op_eq (const void *p1, const void *p2)
1693 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1694 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1697 if (vno1->hashcode != vno2->hashcode)
1700 if (vno1->opcode != vno2->opcode
1701 || !types_compatible_p (vno1->type, vno2->type))
1704 for (i = 0; i < vno1->length; ++i)
1705 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1711 /* Lookup a n-ary operation by its pieces and return the resulting value
1712 number if it exists in the hash table. Return NULL_TREE if it does
1713 not exist in the hash table or if the result field of the operation
1714 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1718 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1719 tree type, tree op0, tree op1, tree op2,
1720 tree op3, vn_nary_op_t *vnresult)
1723 struct vn_nary_op_s vno1;
1727 vno1.length = length;
1733 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1734 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1736 if (!slot && current_info == optimistic_info)
1737 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1742 *vnresult = (vn_nary_op_t)*slot;
1743 return ((vn_nary_op_t)*slot)->result;
1746 /* Lookup OP in the current hash table, and return the resulting value
1747 number if it exists in the hash table. Return NULL_TREE if it does
1748 not exist in the hash table or if the result field of the operation
1749 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1753 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1756 struct vn_nary_op_s vno1;
1761 vno1.opcode = TREE_CODE (op);
1762 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1763 vno1.type = TREE_TYPE (op);
1764 for (i = 0; i < vno1.length; ++i)
1765 vno1.op[i] = TREE_OPERAND (op, i);
1766 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1767 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1769 if (!slot && current_info == optimistic_info)
1770 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1775 *vnresult = (vn_nary_op_t)*slot;
1776 return ((vn_nary_op_t)*slot)->result;
1779 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1780 value number if it exists in the hash table. Return NULL_TREE if
1781 it does not exist in the hash table. VNRESULT will contain the
1782 vn_nary_op_t from the hashtable if it exists. */
1785 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1788 struct vn_nary_op_s vno1;
1793 vno1.opcode = gimple_assign_rhs_code (stmt);
1794 vno1.length = gimple_num_ops (stmt) - 1;
1795 vno1.type = gimple_expr_type (stmt);
1796 for (i = 0; i < vno1.length; ++i)
1797 vno1.op[i] = gimple_op (stmt, i + 1);
1798 if (vno1.opcode == REALPART_EXPR
1799 || vno1.opcode == IMAGPART_EXPR
1800 || vno1.opcode == VIEW_CONVERT_EXPR)
1801 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1802 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1803 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1805 if (!slot && current_info == optimistic_info)
1806 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1811 *vnresult = (vn_nary_op_t)*slot;
1812 return ((vn_nary_op_t)*slot)->result;
1815 /* Insert a n-ary operation into the current hash table using it's
1816 pieces. Return the vn_nary_op_t structure we created and put in
1820 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1821 tree type, tree op0,
1822 tree op1, tree op2, tree op3,
1824 unsigned int value_id)
1829 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1830 (sizeof (struct vn_nary_op_s)
1831 - sizeof (tree) * (4 - length)));
1832 vno1->value_id = value_id;
1833 vno1->opcode = code;
1834 vno1->length = length;
1844 vno1->result = result;
1845 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1846 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1848 gcc_assert (!*slot);
1855 /* Insert OP into the current hash table with a value number of
1856 RESULT. Return the vn_nary_op_t structure we created and put in
1860 vn_nary_op_insert (tree op, tree result)
1862 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1867 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1868 (sizeof (struct vn_nary_op_s)
1869 - sizeof (tree) * (4 - length)));
1870 vno1->value_id = VN_INFO (result)->value_id;
1871 vno1->opcode = TREE_CODE (op);
1872 vno1->length = length;
1873 vno1->type = TREE_TYPE (op);
1874 for (i = 0; i < vno1->length; ++i)
1875 vno1->op[i] = TREE_OPERAND (op, i);
1876 vno1->result = result;
1877 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1878 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1880 gcc_assert (!*slot);
1886 /* Insert the rhs of STMT into the current hash table with a value number of
1890 vn_nary_op_insert_stmt (gimple stmt, tree result)
1892 unsigned length = gimple_num_ops (stmt) - 1;
1897 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1898 (sizeof (struct vn_nary_op_s)
1899 - sizeof (tree) * (4 - length)));
1900 vno1->value_id = VN_INFO (result)->value_id;
1901 vno1->opcode = gimple_assign_rhs_code (stmt);
1902 vno1->length = length;
1903 vno1->type = gimple_expr_type (stmt);
1904 for (i = 0; i < vno1->length; ++i)
1905 vno1->op[i] = gimple_op (stmt, i + 1);
1906 if (vno1->opcode == REALPART_EXPR
1907 || vno1->opcode == IMAGPART_EXPR
1908 || vno1->opcode == VIEW_CONVERT_EXPR)
1909 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1910 vno1->result = result;
1911 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1912 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1914 gcc_assert (!*slot);
1920 /* Compute a hashcode for PHI operation VP1 and return it. */
1922 static inline hashval_t
1923 vn_phi_compute_hash (vn_phi_t vp1)
1930 result = vp1->block->index;
1932 /* If all PHI arguments are constants we need to distinguish
1933 the PHI node via its type. */
1934 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1935 result += (INTEGRAL_TYPE_P (type)
1936 + (INTEGRAL_TYPE_P (type)
1937 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1939 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1941 if (phi1op == VN_TOP)
1943 result = iterative_hash_expr (phi1op, result);
1949 /* Return the computed hashcode for phi operation P1. */
1952 vn_phi_hash (const void *p1)
1954 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1955 return vp1->hashcode;
1958 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1961 vn_phi_eq (const void *p1, const void *p2)
1963 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1964 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1966 if (vp1->hashcode != vp2->hashcode)
1969 if (vp1->block == vp2->block)
1974 /* If the PHI nodes do not have compatible types
1975 they are not the same. */
1976 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1977 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1980 /* Any phi in the same block will have it's arguments in the
1981 same edge order, because of how we store phi nodes. */
1982 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1984 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1985 if (phi1op == VN_TOP || phi2op == VN_TOP)
1987 if (!expressions_equal_p (phi1op, phi2op))
1995 static VEC(tree, heap) *shared_lookup_phiargs;
1997 /* Lookup PHI in the current hash table, and return the resulting
1998 value number if it exists in the hash table. Return NULL_TREE if
1999 it does not exist in the hash table. */
2002 vn_phi_lookup (gimple phi)
2005 struct vn_phi_s vp1;
2008 VEC_truncate (tree, shared_lookup_phiargs, 0);
2010 /* Canonicalize the SSA_NAME's to their value number. */
2011 for (i = 0; i < gimple_phi_num_args (phi); i++)
2013 tree def = PHI_ARG_DEF (phi, i);
2014 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2015 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2017 vp1.phiargs = shared_lookup_phiargs;
2018 vp1.block = gimple_bb (phi);
2019 vp1.hashcode = vn_phi_compute_hash (&vp1);
2020 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2022 if (!slot && current_info == optimistic_info)
2023 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2027 return ((vn_phi_t)*slot)->result;
2030 /* Insert PHI into the current hash table with a value number of
2034 vn_phi_insert (gimple phi, tree result)
2037 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2039 VEC (tree, heap) *args = NULL;
2041 /* Canonicalize the SSA_NAME's to their value number. */
2042 for (i = 0; i < gimple_phi_num_args (phi); i++)
2044 tree def = PHI_ARG_DEF (phi, i);
2045 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2046 VEC_safe_push (tree, heap, args, def);
2048 vp1->value_id = VN_INFO (result)->value_id;
2049 vp1->phiargs = args;
2050 vp1->block = gimple_bb (phi);
2051 vp1->result = result;
2052 vp1->hashcode = vn_phi_compute_hash (vp1);
2054 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2057 /* Because we iterate over phi operations more than once, it's
2058 possible the slot might already exist here, hence no assert.*/
2064 /* Print set of components in strongly connected component SCC to OUT. */
2067 print_scc (FILE *out, VEC (tree, heap) *scc)
2072 fprintf (out, "SCC consists of: ");
2073 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2075 print_generic_expr (out, var, 0);
2078 fprintf (out, "\n");
2081 /* Set the value number of FROM to TO, return true if it has changed
2085 set_ssa_val_to (tree from, tree to)
2090 && TREE_CODE (to) == SSA_NAME
2091 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2094 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2095 and invariants. So assert that here. */
2096 gcc_assert (to != NULL_TREE
2098 || TREE_CODE (to) == SSA_NAME
2099 || is_gimple_min_invariant (to)));
2101 if (dump_file && (dump_flags & TDF_DETAILS))
2103 fprintf (dump_file, "Setting value number of ");
2104 print_generic_expr (dump_file, from, 0);
2105 fprintf (dump_file, " to ");
2106 print_generic_expr (dump_file, to, 0);
2109 currval = SSA_VAL (from);
2111 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2113 VN_INFO (from)->valnum = to;
2114 if (dump_file && (dump_flags & TDF_DETAILS))
2115 fprintf (dump_file, " (changed)\n");
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2119 fprintf (dump_file, "\n");
2123 /* Set all definitions in STMT to value number to themselves.
2124 Return true if a value number changed. */
2127 defs_to_varying (gimple stmt)
2129 bool changed = false;
2133 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2135 tree def = DEF_FROM_PTR (defp);
2137 VN_INFO (def)->use_processed = true;
2138 changed |= set_ssa_val_to (def, def);
2143 static bool expr_has_constants (tree expr);
2144 static tree valueize_expr (tree expr);
2146 /* Visit a copy between LHS and RHS, return true if the value number
2150 visit_copy (tree lhs, tree rhs)
2152 /* Follow chains of copies to their destination. */
2153 while (TREE_CODE (rhs) == SSA_NAME
2154 && SSA_VAL (rhs) != rhs)
2155 rhs = SSA_VAL (rhs);
2157 /* The copy may have a more interesting constant filled expression
2158 (we don't, since we know our RHS is just an SSA name). */
2159 if (TREE_CODE (rhs) == SSA_NAME)
2161 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2162 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2165 return set_ssa_val_to (lhs, rhs);
2168 /* Visit a unary operator RHS, value number it, and return true if the
2169 value number of LHS has changed as a result. */
2172 visit_unary_op (tree lhs, gimple stmt)
2174 bool changed = false;
2175 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2179 changed = set_ssa_val_to (lhs, result);
2183 changed = set_ssa_val_to (lhs, lhs);
2184 vn_nary_op_insert_stmt (stmt, lhs);
2190 /* Visit a binary operator RHS, value number it, and return true if the
2191 value number of LHS has changed as a result. */
2194 visit_binary_op (tree lhs, gimple stmt)
2196 bool changed = false;
2197 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2201 changed = set_ssa_val_to (lhs, result);
2205 changed = set_ssa_val_to (lhs, lhs);
2206 vn_nary_op_insert_stmt (stmt, lhs);
2212 /* Visit a call STMT storing into LHS. Return true if the value number
2213 of the LHS has changed as a result. */
2216 visit_reference_op_call (tree lhs, gimple stmt)
2218 bool changed = false;
2219 struct vn_reference_s vr1;
2221 tree vuse = gimple_vuse (stmt);
2223 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2224 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2225 vr1.type = gimple_expr_type (stmt);
2227 vr1.hashcode = vn_reference_compute_hash (&vr1);
2228 result = vn_reference_lookup_1 (&vr1, NULL);
2231 changed = set_ssa_val_to (lhs, result);
2232 if (TREE_CODE (result) == SSA_NAME
2233 && VN_INFO (result)->has_constants)
2234 VN_INFO (lhs)->has_constants = true;
2240 changed = set_ssa_val_to (lhs, lhs);
2241 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2242 vr2->vuse = vr1.vuse;
2243 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2244 vr2->type = vr1.type;
2246 vr2->hashcode = vr1.hashcode;
2248 slot = htab_find_slot_with_hash (current_info->references,
2249 vr2, vr2->hashcode, INSERT);
2251 free_reference (*slot);
2258 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2259 and return true if the value number of the LHS has changed as a result. */
2262 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2264 bool changed = false;
2268 last_vuse = gimple_vuse (stmt);
2269 last_vuse_ptr = &last_vuse;
2270 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2271 last_vuse_ptr = NULL;
2273 /* If we have a VCE, try looking up its operand as it might be stored in
2274 a different type. */
2275 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2276 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2279 /* We handle type-punning through unions by value-numbering based
2280 on offset and size of the access. Be prepared to handle a
2281 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2283 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2285 /* We will be setting the value number of lhs to the value number
2286 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2287 So first simplify and lookup this expression to see if it
2288 is already available. */
2289 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2290 if ((CONVERT_EXPR_P (val)
2291 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2292 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2294 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2295 if ((CONVERT_EXPR_P (tem)
2296 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2297 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2298 TREE_TYPE (val), tem)))
2302 if (!is_gimple_min_invariant (val)
2303 && TREE_CODE (val) != SSA_NAME)
2304 result = vn_nary_op_lookup (val, NULL);
2305 /* If the expression is not yet available, value-number lhs to
2306 a new SSA_NAME we create. */
2309 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2310 /* Initialize value-number information properly. */
2311 VN_INFO_GET (result)->valnum = result;
2312 VN_INFO (result)->value_id = get_next_value_id ();
2313 VN_INFO (result)->expr = val;
2314 VN_INFO (result)->has_constants = expr_has_constants (val);
2315 VN_INFO (result)->needs_insertion = true;
2316 /* As all "inserted" statements are singleton SCCs, insert
2317 to the valid table. This is strictly needed to
2318 avoid re-generating new value SSA_NAMEs for the same
2319 expression during SCC iteration over and over (the
2320 optimistic table gets cleared after each iteration).
2321 We do not need to insert into the optimistic table, as
2322 lookups there will fall back to the valid table. */
2323 if (current_info == optimistic_info)
2325 current_info = valid_info;
2326 vn_nary_op_insert (val, result);
2327 current_info = optimistic_info;
2330 vn_nary_op_insert (val, result);
2331 if (dump_file && (dump_flags & TDF_DETAILS))
2333 fprintf (dump_file, "Inserting name ");
2334 print_generic_expr (dump_file, result, 0);
2335 fprintf (dump_file, " for expression ");
2336 print_generic_expr (dump_file, val, 0);
2337 fprintf (dump_file, "\n");
2344 changed = set_ssa_val_to (lhs, result);
2345 if (TREE_CODE (result) == SSA_NAME
2346 && VN_INFO (result)->has_constants)
2348 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2349 VN_INFO (lhs)->has_constants = true;
2354 changed = set_ssa_val_to (lhs, lhs);
2355 vn_reference_insert (op, lhs, last_vuse);
2362 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2363 and return true if the value number of the LHS has changed as a result. */
2366 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2368 bool changed = false;
2370 bool resultsame = false;
2372 /* First we want to lookup using the *vuses* from the store and see
2373 if there the last store to this location with the same address
2376 The vuses represent the memory state before the store. If the
2377 memory state, address, and value of the store is the same as the
2378 last store to this location, then this store will produce the
2379 same memory state as that store.
2381 In this case the vdef versions for this store are value numbered to those
2382 vuse versions, since they represent the same memory state after
2385 Otherwise, the vdefs for the store are used when inserting into
2386 the table, since the store generates a new memory state. */
2388 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2392 if (TREE_CODE (result) == SSA_NAME)
2393 result = SSA_VAL (result);
2394 if (TREE_CODE (op) == SSA_NAME)
2396 resultsame = expressions_equal_p (result, op);
2399 if (!result || !resultsame)
2403 if (dump_file && (dump_flags & TDF_DETAILS))
2405 fprintf (dump_file, "No store match\n");
2406 fprintf (dump_file, "Value numbering store ");
2407 print_generic_expr (dump_file, lhs, 0);
2408 fprintf (dump_file, " to ");
2409 print_generic_expr (dump_file, op, 0);
2410 fprintf (dump_file, "\n");
2412 /* Have to set value numbers before insert, since insert is
2413 going to valueize the references in-place. */
2414 if ((vdef = gimple_vdef (stmt)))
2416 VN_INFO (vdef)->use_processed = true;
2417 changed |= set_ssa_val_to (vdef, vdef);
2420 /* Do not insert structure copies into the tables. */
2421 if (is_gimple_min_invariant (op)
2422 || is_gimple_reg (op))
2423 vn_reference_insert (lhs, op, vdef);
2427 /* We had a match, so value number the vdef to have the value
2428 number of the vuse it came from. */
2431 if (dump_file && (dump_flags & TDF_DETAILS))
2432 fprintf (dump_file, "Store matched earlier value,"
2433 "value numbering store vdefs to matching vuses.\n");
2435 def = gimple_vdef (stmt);
2436 use = gimple_vuse (stmt);
2438 VN_INFO (def)->use_processed = true;
2439 changed |= set_ssa_val_to (def, SSA_VAL (use));
2445 /* Visit and value number PHI, return true if the value number
2449 visit_phi (gimple phi)
2451 bool changed = false;
2453 tree sameval = VN_TOP;
2454 bool allsame = true;
2457 /* TODO: We could check for this in init_sccvn, and replace this
2458 with a gcc_assert. */
2459 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2460 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2462 /* See if all non-TOP arguments have the same value. TOP is
2463 equivalent to everything, so we can ignore it. */
2464 for (i = 0; i < gimple_phi_num_args (phi); i++)
2466 tree def = PHI_ARG_DEF (phi, i);
2468 if (TREE_CODE (def) == SSA_NAME)
2469 def = SSA_VAL (def);
2472 if (sameval == VN_TOP)
2478 if (!expressions_equal_p (def, sameval))
2486 /* If all value numbered to the same value, the phi node has that
2490 if (is_gimple_min_invariant (sameval))
2492 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2493 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2497 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2498 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2501 if (TREE_CODE (sameval) == SSA_NAME)
2502 return visit_copy (PHI_RESULT (phi), sameval);
2504 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2507 /* Otherwise, see if it is equivalent to a phi node in this block. */
2508 result = vn_phi_lookup (phi);
2511 if (TREE_CODE (result) == SSA_NAME)
2512 changed = visit_copy (PHI_RESULT (phi), result);
2514 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2518 vn_phi_insert (phi, PHI_RESULT (phi));
2519 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2520 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2521 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2527 /* Return true if EXPR contains constants. */
2530 expr_has_constants (tree expr)
2532 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2535 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2538 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2539 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2540 /* Constants inside reference ops are rarely interesting, but
2541 it can take a lot of looking to find them. */
2543 case tcc_declaration:
2546 return is_gimple_min_invariant (expr);
2551 /* Return true if STMT contains constants. */
2554 stmt_has_constants (gimple stmt)
2556 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2559 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2561 case GIMPLE_UNARY_RHS:
2562 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2564 case GIMPLE_BINARY_RHS:
2565 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2566 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2567 case GIMPLE_TERNARY_RHS:
2568 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2569 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2570 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2571 case GIMPLE_SINGLE_RHS:
2572 /* Constants inside reference ops are rarely interesting, but
2573 it can take a lot of looking to find them. */
2574 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2581 /* Replace SSA_NAMES in expr with their value numbers, and return the
2583 This is performed in place. */
2586 valueize_expr (tree expr)
2588 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2591 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2592 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2593 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2596 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2597 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2598 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2599 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2600 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2601 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2609 /* Simplify the binary expression RHS, and return the result if
2613 simplify_binary_expression (gimple stmt)
2615 tree result = NULL_TREE;
2616 tree op0 = gimple_assign_rhs1 (stmt);
2617 tree op1 = gimple_assign_rhs2 (stmt);
2619 /* This will not catch every single case we could combine, but will
2620 catch those with constants. The goal here is to simultaneously
2621 combine constants between expressions, but avoid infinite
2622 expansion of expressions during simplification. */
2623 if (TREE_CODE (op0) == SSA_NAME)
2625 if (VN_INFO (op0)->has_constants
2626 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2627 op0 = valueize_expr (vn_get_expr_for (op0));
2628 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2629 op0 = SSA_VAL (op0);
2632 if (TREE_CODE (op1) == SSA_NAME)
2634 if (VN_INFO (op1)->has_constants)
2635 op1 = valueize_expr (vn_get_expr_for (op1));
2636 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2637 op1 = SSA_VAL (op1);
2640 /* Avoid folding if nothing changed. */
2641 if (op0 == gimple_assign_rhs1 (stmt)
2642 && op1 == gimple_assign_rhs2 (stmt))
2645 fold_defer_overflow_warnings ();
2647 result = fold_binary (gimple_assign_rhs_code (stmt),
2648 gimple_expr_type (stmt), op0, op1);
2650 STRIP_USELESS_TYPE_CONVERSION (result);
2652 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2655 /* Make sure result is not a complex expression consisting
2656 of operators of operators (IE (a + b) + (a + c))
2657 Otherwise, we will end up with unbounded expressions if
2658 fold does anything at all. */
2659 if (result && valid_gimple_rhs_p (result))
2665 /* Simplify the unary expression RHS, and return the result if
2669 simplify_unary_expression (gimple stmt)
2671 tree result = NULL_TREE;
2672 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2674 /* We handle some tcc_reference codes here that are all
2675 GIMPLE_ASSIGN_SINGLE codes. */
2676 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2677 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2678 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2679 op0 = TREE_OPERAND (op0, 0);
2681 if (TREE_CODE (op0) != SSA_NAME)
2685 if (VN_INFO (op0)->has_constants)
2686 op0 = valueize_expr (vn_get_expr_for (op0));
2687 else if (gimple_assign_cast_p (stmt)
2688 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2689 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2690 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2692 /* We want to do tree-combining on conversion-like expressions.
2693 Make sure we feed only SSA_NAMEs or constants to fold though. */
2694 tree tem = valueize_expr (vn_get_expr_for (op0));
2695 if (UNARY_CLASS_P (tem)
2696 || BINARY_CLASS_P (tem)
2697 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2698 || TREE_CODE (tem) == SSA_NAME
2699 || is_gimple_min_invariant (tem))
2703 /* Avoid folding if nothing changed, but remember the expression. */
2704 if (op0 == orig_op0)
2707 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2708 gimple_expr_type (stmt), op0);
2711 STRIP_USELESS_TYPE_CONVERSION (result);
2712 if (valid_gimple_rhs_p (result))
2719 /* Try to simplify RHS using equivalences and constant folding. */
2722 try_to_simplify (gimple stmt)
2726 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2727 in this case, there is no point in doing extra work. */
2728 if (gimple_assign_copy_p (stmt)
2729 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2732 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2734 case tcc_declaration:
2735 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2741 /* Do not do full-blown reference lookup here, but simplify
2742 reads from constant aggregates. */
2743 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2747 /* Fallthrough for some codes that can operate on registers. */
2748 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2749 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2750 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2752 /* We could do a little more with unary ops, if they expand
2753 into binary ops, but it's debatable whether it is worth it. */
2755 return simplify_unary_expression (stmt);
2757 case tcc_comparison:
2759 return simplify_binary_expression (stmt);
2768 /* Visit and value number USE, return true if the value number
2772 visit_use (tree use)
2774 bool changed = false;
2775 gimple stmt = SSA_NAME_DEF_STMT (use);
2777 VN_INFO (use)->use_processed = true;
2779 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2780 if (dump_file && (dump_flags & TDF_DETAILS)
2781 && !SSA_NAME_IS_DEFAULT_DEF (use))
2783 fprintf (dump_file, "Value numbering ");
2784 print_generic_expr (dump_file, use, 0);
2785 fprintf (dump_file, " stmt = ");
2786 print_gimple_stmt (dump_file, stmt, 0, 0);
2789 /* Handle uninitialized uses. */
2790 if (SSA_NAME_IS_DEFAULT_DEF (use))
2791 changed = set_ssa_val_to (use, use);
2794 if (gimple_code (stmt) == GIMPLE_PHI)
2795 changed = visit_phi (stmt);
2796 else if (!gimple_has_lhs (stmt)
2797 || gimple_has_volatile_ops (stmt)
2798 || stmt_could_throw_p (stmt))
2799 changed = defs_to_varying (stmt);
2800 else if (is_gimple_assign (stmt))
2802 tree lhs = gimple_assign_lhs (stmt);
2805 /* Shortcut for copies. Simplifying copies is pointless,
2806 since we copy the expression and value they represent. */
2807 if (gimple_assign_copy_p (stmt)
2808 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2809 && TREE_CODE (lhs) == SSA_NAME)
2811 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2814 simplified = try_to_simplify (stmt);
2817 if (dump_file && (dump_flags & TDF_DETAILS))
2819 fprintf (dump_file, "RHS ");
2820 print_gimple_expr (dump_file, stmt, 0, 0);
2821 fprintf (dump_file, " simplified to ");
2822 print_generic_expr (dump_file, simplified, 0);
2823 if (TREE_CODE (lhs) == SSA_NAME)
2824 fprintf (dump_file, " has constants %d\n",
2825 expr_has_constants (simplified));
2827 fprintf (dump_file, "\n");
2830 /* Setting value numbers to constants will occasionally
2831 screw up phi congruence because constants are not
2832 uniquely associated with a single ssa name that can be
2835 && is_gimple_min_invariant (simplified)
2836 && TREE_CODE (lhs) == SSA_NAME)
2838 VN_INFO (lhs)->expr = simplified;
2839 VN_INFO (lhs)->has_constants = true;
2840 changed = set_ssa_val_to (lhs, simplified);
2844 && TREE_CODE (simplified) == SSA_NAME
2845 && TREE_CODE (lhs) == SSA_NAME)
2847 changed = visit_copy (lhs, simplified);
2850 else if (simplified)
2852 if (TREE_CODE (lhs) == SSA_NAME)
2854 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2855 /* We have to unshare the expression or else
2856 valuizing may change the IL stream. */
2857 VN_INFO (lhs)->expr = unshare_expr (simplified);
2860 else if (stmt_has_constants (stmt)
2861 && TREE_CODE (lhs) == SSA_NAME)
2862 VN_INFO (lhs)->has_constants = true;
2863 else if (TREE_CODE (lhs) == SSA_NAME)
2865 /* We reset expr and constantness here because we may
2866 have been value numbering optimistically, and
2867 iterating. They may become non-constant in this case,
2868 even if they were optimistically constant. */
2870 VN_INFO (lhs)->has_constants = false;
2871 VN_INFO (lhs)->expr = NULL_TREE;
2874 if ((TREE_CODE (lhs) == SSA_NAME
2875 /* We can substitute SSA_NAMEs that are live over
2876 abnormal edges with their constant value. */
2877 && !(gimple_assign_copy_p (stmt)
2878 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2880 && is_gimple_min_invariant (simplified))
2881 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2882 /* Stores or copies from SSA_NAMEs that are live over
2883 abnormal edges are a problem. */
2884 || (gimple_assign_single_p (stmt)
2885 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2886 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2887 changed = defs_to_varying (stmt);
2888 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2890 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2892 else if (TREE_CODE (lhs) == SSA_NAME)
2894 if ((gimple_assign_copy_p (stmt)
2895 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2897 && is_gimple_min_invariant (simplified)))
2899 VN_INFO (lhs)->has_constants = true;
2901 changed = set_ssa_val_to (lhs, simplified);
2903 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2907 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2909 case GIMPLE_UNARY_RHS:
2910 changed = visit_unary_op (lhs, stmt);
2912 case GIMPLE_BINARY_RHS:
2913 changed = visit_binary_op (lhs, stmt);
2915 case GIMPLE_SINGLE_RHS:
2916 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2919 /* VOP-less references can go through unary case. */
2920 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2921 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2922 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2923 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2925 changed = visit_unary_op (lhs, stmt);
2929 case tcc_declaration:
2930 changed = visit_reference_op_load
2931 (lhs, gimple_assign_rhs1 (stmt), stmt);
2933 case tcc_expression:
2934 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2936 changed = visit_unary_op (lhs, stmt);
2941 changed = defs_to_varying (stmt);
2945 changed = defs_to_varying (stmt);
2951 changed = defs_to_varying (stmt);
2953 else if (is_gimple_call (stmt))
2955 tree lhs = gimple_call_lhs (stmt);
2957 /* ??? We could try to simplify calls. */
2959 if (stmt_has_constants (stmt)
2960 && TREE_CODE (lhs) == SSA_NAME)
2961 VN_INFO (lhs)->has_constants = true;
2962 else if (TREE_CODE (lhs) == SSA_NAME)
2964 /* We reset expr and constantness here because we may
2965 have been value numbering optimistically, and
2966 iterating. They may become non-constant in this case,
2967 even if they were optimistically constant. */
2968 VN_INFO (lhs)->has_constants = false;
2969 VN_INFO (lhs)->expr = NULL_TREE;
2972 if (TREE_CODE (lhs) == SSA_NAME
2973 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2974 changed = defs_to_varying (stmt);
2975 /* ??? We should handle stores from calls. */
2976 else if (TREE_CODE (lhs) == SSA_NAME)
2978 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2979 changed = visit_reference_op_call (lhs, stmt);
2981 changed = defs_to_varying (stmt);
2984 changed = defs_to_varying (stmt);
2991 /* Compare two operands by reverse postorder index */
2994 compare_ops (const void *pa, const void *pb)
2996 const tree opa = *((const tree *)pa);
2997 const tree opb = *((const tree *)pb);
2998 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2999 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3003 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3004 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3005 else if (gimple_nop_p (opstmta))
3007 else if (gimple_nop_p (opstmtb))
3010 bba = gimple_bb (opstmta);
3011 bbb = gimple_bb (opstmtb);
3014 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3022 if (gimple_code (opstmta) == GIMPLE_PHI
3023 && gimple_code (opstmtb) == GIMPLE_PHI)
3024 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3025 else if (gimple_code (opstmta) == GIMPLE_PHI)
3027 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3029 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3030 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3032 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3034 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3037 /* Sort an array containing members of a strongly connected component
3038 SCC so that the members are ordered by RPO number.
3039 This means that when the sort is complete, iterating through the
3040 array will give you the members in RPO order. */
3043 sort_scc (VEC (tree, heap) *scc)
3045 qsort (VEC_address (tree, scc),
3046 VEC_length (tree, scc),
3051 /* Insert the no longer used nary ONARY to the hash INFO. */
3054 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3056 size_t size = (sizeof (struct vn_nary_op_s)
3057 - sizeof (tree) * (4 - onary->length));
3058 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
3060 memcpy (nary, onary, size);
3061 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
3062 gcc_assert (!*slot);
3066 /* Insert the no longer used phi OPHI to the hash INFO. */
3069 copy_phi (vn_phi_t ophi, vn_tables_t info)
3071 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3073 memcpy (phi, ophi, sizeof (*phi));
3074 ophi->phiargs = NULL;
3075 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3076 gcc_assert (!*slot);
3080 /* Insert the no longer used reference OREF to the hash INFO. */
3083 copy_reference (vn_reference_t oref, vn_tables_t info)
3087 ref = (vn_reference_t) pool_alloc (info->references_pool);
3088 memcpy (ref, oref, sizeof (*ref));
3089 oref->operands = NULL;
3090 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3093 free_reference (*slot);
3097 /* Process a strongly connected component in the SSA graph. */
3100 process_scc (VEC (tree, heap) *scc)
3104 unsigned int iterations = 0;
3105 bool changed = true;
3111 /* If the SCC has a single member, just visit it. */
3112 if (VEC_length (tree, scc) == 1)
3114 tree use = VEC_index (tree, scc, 0);
3115 if (!VN_INFO (use)->use_processed)
3120 /* Iterate over the SCC with the optimistic table until it stops
3122 current_info = optimistic_info;
3127 /* As we are value-numbering optimistically we have to
3128 clear the expression tables and the simplified expressions
3129 in each iteration until we converge. */
3130 htab_empty (optimistic_info->nary);
3131 htab_empty (optimistic_info->phis);
3132 htab_empty (optimistic_info->references);
3133 obstack_free (&optimistic_info->nary_obstack, NULL);
3134 gcc_obstack_init (&optimistic_info->nary_obstack);
3135 empty_alloc_pool (optimistic_info->phis_pool);
3136 empty_alloc_pool (optimistic_info->references_pool);
3137 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3138 VN_INFO (var)->expr = NULL_TREE;
3139 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3140 changed |= visit_use (var);
3143 statistics_histogram_event (cfun, "SCC iterations", iterations);
3145 /* Finally, copy the contents of the no longer used optimistic
3146 table to the valid table. */
3147 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3148 copy_nary (nary, valid_info);
3149 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3150 copy_phi (phi, valid_info);
3151 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3152 copy_reference (ref, valid_info);
3154 current_info = valid_info;
3157 DEF_VEC_O(ssa_op_iter);
3158 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3160 /* Pop the components of the found SCC for NAME off the SCC stack
3161 and process them. Returns true if all went well, false if
3162 we run into resource limits. */
3165 extract_and_process_scc_for_name (tree name)
3167 VEC (tree, heap) *scc = NULL;
3170 /* Found an SCC, pop the components off the SCC stack and
3174 x = VEC_pop (tree, sccstack);
3176 VN_INFO (x)->on_sccstack = false;
3177 VEC_safe_push (tree, heap, scc, x);
3178 } while (x != name);
3180 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3181 if (VEC_length (tree, scc)
3182 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3185 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3186 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3187 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3191 if (VEC_length (tree, scc) > 1)
3194 if (dump_file && (dump_flags & TDF_DETAILS))
3195 print_scc (dump_file, scc);
3199 VEC_free (tree, heap, scc);
3204 /* Depth first search on NAME to discover and process SCC's in the SSA
3206 Execution of this algorithm relies on the fact that the SCC's are
3207 popped off the stack in topological order.
3208 Returns true if successful, false if we stopped processing SCC's due
3209 to resource constraints. */
3214 VEC(ssa_op_iter, heap) *itervec = NULL;
3215 VEC(tree, heap) *namevec = NULL;
3216 use_operand_p usep = NULL;
3223 VN_INFO (name)->dfsnum = next_dfs_num++;
3224 VN_INFO (name)->visited = true;
3225 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3227 VEC_safe_push (tree, heap, sccstack, name);
3228 VN_INFO (name)->on_sccstack = true;
3229 defstmt = SSA_NAME_DEF_STMT (name);
3231 /* Recursively DFS on our operands, looking for SCC's. */
3232 if (!gimple_nop_p (defstmt))
3234 /* Push a new iterator. */
3235 if (gimple_code (defstmt) == GIMPLE_PHI)
3236 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3238 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3241 clear_and_done_ssa_iter (&iter);
3245 /* If we are done processing uses of a name, go up the stack
3246 of iterators and process SCCs as we found them. */
3247 if (op_iter_done (&iter))
3249 /* See if we found an SCC. */
3250 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3251 if (!extract_and_process_scc_for_name (name))
3253 VEC_free (tree, heap, namevec);
3254 VEC_free (ssa_op_iter, heap, itervec);
3258 /* Check if we are done. */
3259 if (VEC_empty (tree, namevec))
3261 VEC_free (tree, heap, namevec);
3262 VEC_free (ssa_op_iter, heap, itervec);
3266 /* Restore the last use walker and continue walking there. */
3268 name = VEC_pop (tree, namevec);
3269 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3270 sizeof (ssa_op_iter));
3271 VEC_pop (ssa_op_iter, itervec);
3272 goto continue_walking;
3275 use = USE_FROM_PTR (usep);
3277 /* Since we handle phi nodes, we will sometimes get
3278 invariants in the use expression. */
3279 if (TREE_CODE (use) == SSA_NAME)
3281 if (! (VN_INFO (use)->visited))
3283 /* Recurse by pushing the current use walking state on
3284 the stack and starting over. */
3285 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3286 VEC_safe_push(tree, heap, namevec, name);
3291 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3292 VN_INFO (use)->low);
3294 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3295 && VN_INFO (use)->on_sccstack)
3297 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3298 VN_INFO (name)->low);
3302 usep = op_iter_next_use (&iter);
3306 /* Allocate a value number table. */
3309 allocate_vn_table (vn_tables_t table)
3311 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3312 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3313 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3316 gcc_obstack_init (&table->nary_obstack);
3317 table->phis_pool = create_alloc_pool ("VN phis",
3318 sizeof (struct vn_phi_s),
3320 table->references_pool = create_alloc_pool ("VN references",
3321 sizeof (struct vn_reference_s),
3325 /* Free a value number table. */
3328 free_vn_table (vn_tables_t table)
3330 htab_delete (table->phis);
3331 htab_delete (table->nary);
3332 htab_delete (table->references);
3333 obstack_free (&table->nary_obstack, NULL);
3334 free_alloc_pool (table->phis_pool);
3335 free_alloc_pool (table->references_pool);
3343 int *rpo_numbers_temp;
3345 calculate_dominance_info (CDI_DOMINATORS);
3347 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3350 constant_value_ids = BITMAP_ALLOC (NULL);
3355 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3356 /* VEC_alloc doesn't actually grow it to the right size, it just
3357 preallocates the space to do so. */
3358 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3359 gcc_obstack_init (&vn_ssa_aux_obstack);
3361 shared_lookup_phiargs = NULL;
3362 shared_lookup_references = NULL;
3363 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3364 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3365 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3367 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3368 the i'th block in RPO order is bb. We want to map bb's to RPO
3369 numbers, so we need to rearrange this array. */
3370 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3371 rpo_numbers[rpo_numbers_temp[j]] = j;
3373 XDELETE (rpo_numbers_temp);
3375 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3377 /* Create the VN_INFO structures, and initialize value numbers to
3379 for (i = 0; i < num_ssa_names; i++)
3381 tree name = ssa_name (i);
3384 VN_INFO_GET (name)->valnum = VN_TOP;
3385 VN_INFO (name)->expr = NULL_TREE;
3386 VN_INFO (name)->value_id = 0;
3390 renumber_gimple_stmt_uids ();
3392 /* Create the valid and optimistic value numbering tables. */
3393 valid_info = XCNEW (struct vn_tables_s);
3394 allocate_vn_table (valid_info);
3395 optimistic_info = XCNEW (struct vn_tables_s);
3396 allocate_vn_table (optimistic_info);
3404 htab_delete (constant_to_value_id);
3405 BITMAP_FREE (constant_value_ids);
3406 VEC_free (tree, heap, shared_lookup_phiargs);
3407 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3408 XDELETEVEC (rpo_numbers);
3410 for (i = 0; i < num_ssa_names; i++)
3412 tree name = ssa_name (i);
3414 && VN_INFO (name)->needs_insertion)
3415 release_ssa_name (name);
3417 obstack_free (&vn_ssa_aux_obstack, NULL);
3418 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3420 VEC_free (tree, heap, sccstack);
3421 free_vn_table (valid_info);
3422 XDELETE (valid_info);
3423 free_vn_table (optimistic_info);
3424 XDELETE (optimistic_info);
3427 /* Set the value ids in the valid hash tables. */
3430 set_hashtable_value_ids (void)
3437 /* Now set the value ids of the things we had put in the hash
3440 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3441 vno, vn_nary_op_t, hi)
3445 if (TREE_CODE (vno->result) == SSA_NAME)
3446 vno->value_id = VN_INFO (vno->result)->value_id;
3447 else if (is_gimple_min_invariant (vno->result))
3448 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3452 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3457 if (TREE_CODE (vp->result) == SSA_NAME)
3458 vp->value_id = VN_INFO (vp->result)->value_id;
3459 else if (is_gimple_min_invariant (vp->result))
3460 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3464 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3465 vr, vn_reference_t, hi)
3469 if (TREE_CODE (vr->result) == SSA_NAME)
3470 vr->value_id = VN_INFO (vr->result)->value_id;
3471 else if (is_gimple_min_invariant (vr->result))
3472 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3477 /* Do SCCVN. Returns true if it finished, false if we bailed out
3478 due to resource constraints. */
3485 bool changed = true;
3488 current_info = valid_info;
3490 for (param = DECL_ARGUMENTS (current_function_decl);
3492 param = DECL_CHAIN (param))
3494 if (gimple_default_def (cfun, param) != NULL)
3496 tree def = gimple_default_def (cfun, param);
3497 VN_INFO (def)->valnum = def;
3501 for (i = 1; i < num_ssa_names; ++i)
3503 tree name = ssa_name (i);
3505 && VN_INFO (name)->visited == false
3506 && !has_zero_uses (name))
3514 /* Initialize the value ids. */
3516 for (i = 1; i < num_ssa_names; ++i)
3518 tree name = ssa_name (i);
3522 info = VN_INFO (name);
3523 if (info->valnum == name
3524 || info->valnum == VN_TOP)
3525 info->value_id = get_next_value_id ();
3526 else if (is_gimple_min_invariant (info->valnum))
3527 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3530 /* Propagate until they stop changing. */
3534 for (i = 1; i < num_ssa_names; ++i)
3536 tree name = ssa_name (i);
3540 info = VN_INFO (name);
3541 if (TREE_CODE (info->valnum) == SSA_NAME
3542 && info->valnum != name
3543 && info->value_id != VN_INFO (info->valnum)->value_id)
3546 info->value_id = VN_INFO (info->valnum)->value_id;
3551 set_hashtable_value_ids ();
3553 if (dump_file && (dump_flags & TDF_DETAILS))
3555 fprintf (dump_file, "Value numbers:\n");
3556 for (i = 0; i < num_ssa_names; i++)
3558 tree name = ssa_name (i);
3560 && VN_INFO (name)->visited
3561 && SSA_VAL (name) != name)
3563 print_generic_expr (dump_file, name, 0);
3564 fprintf (dump_file, " = ");
3565 print_generic_expr (dump_file, SSA_VAL (name), 0);
3566 fprintf (dump_file, "\n");
3574 /* Return the maximum value id we have ever seen. */
3577 get_max_value_id (void)
3579 return next_value_id;
3582 /* Return the next unique value id. */
3585 get_next_value_id (void)
3587 return next_value_id++;
3591 /* Compare two expressions E1 and E2 and return true if they are equal. */
3594 expressions_equal_p (tree e1, tree e2)
3596 /* The obvious case. */
3600 /* If only one of them is null, they cannot be equal. */
3604 /* Now perform the actual comparison. */
3605 if (TREE_CODE (e1) == TREE_CODE (e2)
3606 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3613 /* Return true if the nary operation NARY may trap. This is a copy
3614 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3617 vn_nary_may_trap (vn_nary_op_t nary)
3620 tree rhs2 = NULL_TREE;
3621 bool honor_nans = false;
3622 bool honor_snans = false;
3623 bool fp_operation = false;
3624 bool honor_trapv = false;
3628 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3629 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3630 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3633 fp_operation = FLOAT_TYPE_P (type);
3636 honor_nans = flag_trapping_math && !flag_finite_math_only;
3637 honor_snans = flag_signaling_nans != 0;
3639 else if (INTEGRAL_TYPE_P (type)
3640 && TYPE_OVERFLOW_TRAPS (type))
3643 if (nary->length >= 2)
3645 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3647 honor_nans, honor_snans, rhs2,
3653 for (i = 0; i < nary->length; ++i)
3654 if (tree_could_trap_p (nary->op[i]))