1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
181 /* Set the value numbering info for a given SSA name to a given
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
223 /* If the value-number is a constant it is the representative
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
249 if (!is_gimple_assign (def_stmt))
252 /* FIXME tuples. This is incomplete and likely will miss some
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
281 if (expr == NULL_TREE)
284 /* Cache the expression. */
291 /* Free a phi operation structure VP. */
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
336 get_constant_value_id (tree constant)
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
346 return ((vn_constant_t)*slot)->value_id;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
354 get_or_alloc_constant_value_id (tree constant)
357 struct vn_constant_s vc;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
407 result = iterative_hash_expr (vro1->op0, result);
409 result = iterative_hash_expr (vro1->op1, result);
411 result = iterative_hash_expr (vro1->op2, result);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 if (vro->opcode == MEM_REF)
439 else if (vro->opcode != ADDR_EXPR)
451 result = iterative_hash_hashval_t (off, result);
454 && vro->opcode == ADDR_EXPR)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
464 result = vn_reference_op_compute_hash (vro, result);
468 result += SSA_NAME_VERSION (vr1->vuse);
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
505 HOST_WIDE_INT off1 = 0, off2 = 0;
506 vn_reference_op_t vro1, vro2;
507 vn_reference_op_s tem1, tem2;
508 bool deref1 = false, deref2 = false;
509 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
511 if (vro1->opcode == MEM_REF)
517 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
519 if (vro2->opcode == MEM_REF)
527 if (deref1 && vro1->opcode == ADDR_EXPR)
529 memset (&tem1, 0, sizeof (tem1));
530 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
531 tem1.type = TREE_TYPE (tem1.op0);
532 tem1.opcode = TREE_CODE (tem1.op0);
535 if (deref2 && vro2->opcode == ADDR_EXPR)
537 memset (&tem2, 0, sizeof (tem2));
538 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
539 tem2.type = TREE_TYPE (tem2.op0);
540 tem2.opcode = TREE_CODE (tem2.op0);
543 if (!vn_reference_op_eq (vro1, vro2))
548 while (VEC_length (vn_reference_op_s, vr1->operands) != i
549 || VEC_length (vn_reference_op_s, vr2->operands) != j);
554 /* Copy the operations present in load/store REF into RESULT, a vector of
555 vn_reference_op_s's. */
558 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
560 if (TREE_CODE (ref) == TARGET_MEM_REF)
562 vn_reference_op_s temp;
565 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
567 base = build_int_cst (ptr_type_node, 0);
569 memset (&temp, 0, sizeof (temp));
570 /* We do not care for spurious type qualifications. */
571 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
572 temp.opcode = TREE_CODE (ref);
573 temp.op0 = TMR_INDEX (ref);
574 temp.op1 = TMR_STEP (ref);
575 temp.op2 = TMR_OFFSET (ref);
577 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
579 memset (&temp, 0, sizeof (temp));
580 temp.type = NULL_TREE;
581 temp.opcode = TREE_CODE (base);
583 temp.op1 = TMR_ORIGINAL (ref);
585 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 /* For non-calls, store the information that makes up the address. */
593 vn_reference_op_s temp;
595 memset (&temp, 0, sizeof (temp));
596 /* We do not care for spurious type qualifications. */
597 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
598 temp.opcode = TREE_CODE (ref);
603 case ALIGN_INDIRECT_REF:
604 /* The only operand is the address, which gets its own
605 vn_reference_op_s structure. */
607 case MISALIGNED_INDIRECT_REF:
608 temp.op0 = TREE_OPERAND (ref, 1);
611 /* The base address gets its own vn_reference_op_s structure. */
612 temp.op0 = TREE_OPERAND (ref, 1);
613 if (host_integerp (TREE_OPERAND (ref, 1), 0))
614 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
617 /* Record bits and position. */
618 temp.op0 = TREE_OPERAND (ref, 1);
619 temp.op1 = TREE_OPERAND (ref, 2);
622 /* The field decl is enough to unambiguously specify the field,
623 a matching type is not necessary and a mismatching type
624 is always a spurious difference. */
625 temp.type = NULL_TREE;
626 temp.op0 = TREE_OPERAND (ref, 1);
627 temp.op1 = TREE_OPERAND (ref, 2);
629 tree this_offset = component_ref_field_offset (ref);
631 && TREE_CODE (this_offset) == INTEGER_CST)
633 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
634 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
637 = double_int_add (tree_to_double_int (this_offset),
639 (tree_to_double_int (bit_offset),
640 uhwi_to_double_int (BITS_PER_UNIT),
642 if (double_int_fits_in_shwi_p (off))
648 case ARRAY_RANGE_REF:
650 /* Record index as operand. */
651 temp.op0 = TREE_OPERAND (ref, 1);
652 /* Always record lower bounds and element size. */
653 temp.op1 = array_ref_low_bound (ref);
654 temp.op2 = array_ref_element_size (ref);
655 if (TREE_CODE (temp.op0) == INTEGER_CST
656 && TREE_CODE (temp.op1) == INTEGER_CST
657 && TREE_CODE (temp.op2) == INTEGER_CST)
659 double_int off = tree_to_double_int (temp.op0);
660 off = double_int_add (off,
662 (tree_to_double_int (temp.op1)));
663 off = double_int_mul (off, tree_to_double_int (temp.op2));
664 if (double_int_fits_in_shwi_p (off))
682 if (is_gimple_min_invariant (ref))
688 /* These are only interesting for their operands, their
689 existence, and their type. They will never be the last
690 ref in the chain of references (IE they require an
691 operand), so we don't have to put anything
692 for op* as it will be handled by the iteration */
694 case VIEW_CONVERT_EXPR:
698 /* This is only interesting for its constant offset. */
699 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
704 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
706 if (REFERENCE_CLASS_P (ref)
707 || (TREE_CODE (ref) == ADDR_EXPR
708 && !is_gimple_min_invariant (ref)))
709 ref = TREE_OPERAND (ref, 0);
715 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
716 operands in *OPS, the reference alias set SET and the reference type TYPE.
717 Return true if something useful was produced. */
720 ao_ref_init_from_vn_reference (ao_ref *ref,
721 alias_set_type set, tree type,
722 VEC (vn_reference_op_s, heap) *ops)
724 vn_reference_op_t op;
726 tree base = NULL_TREE;
728 HOST_WIDE_INT offset = 0;
729 HOST_WIDE_INT max_size;
730 HOST_WIDE_INT size = -1;
731 tree size_tree = NULL_TREE;
732 alias_set_type base_alias_set = -1;
734 /* First get the final access size from just the outermost expression. */
735 op = VEC_index (vn_reference_op_s, ops, 0);
736 if (op->opcode == COMPONENT_REF)
737 size_tree = DECL_SIZE (op->op0);
738 else if (op->opcode == BIT_FIELD_REF)
742 enum machine_mode mode = TYPE_MODE (type);
744 size_tree = TYPE_SIZE (type);
746 size = GET_MODE_BITSIZE (mode);
748 if (size_tree != NULL_TREE)
750 if (!host_integerp (size_tree, 1))
753 size = TREE_INT_CST_LOW (size_tree);
756 /* Initially, maxsize is the same as the accessed element size.
757 In the following it will only grow (or become -1). */
760 /* Compute cumulative bit-offset for nested component-refs and array-refs,
761 and find the ultimate containing object. */
762 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
766 /* These may be in the reference ops, but we cannot do anything
767 sensible with them here. */
769 /* Apart from ADDR_EXPR arguments to MEM_REF. */
770 if (base != NULL_TREE
771 && TREE_CODE (base) == MEM_REF
773 && DECL_P (TREE_OPERAND (op->op0, 0)))
775 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
776 base = TREE_OPERAND (op->op0, 0);
783 offset += pop->off * BITS_PER_UNIT;
791 /* Record the base objects. */
792 case ALIGN_INDIRECT_REF:
793 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
794 op0_p = &TREE_OPERAND (*op0_p, 0);
797 case MISALIGNED_INDIRECT_REF:
798 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
800 op0_p = &TREE_OPERAND (*op0_p, 0);
804 base_alias_set = get_deref_alias_set (op->op0);
805 *op0_p = build2 (MEM_REF, op->type,
807 op0_p = &TREE_OPERAND (*op0_p, 0);
818 /* And now the usual component-reference style ops. */
820 offset += tree_low_cst (op->op1, 0);
825 tree field = op->op0;
826 /* We do not have a complete COMPONENT_REF tree here so we
827 cannot use component_ref_field_offset. Do the interesting
831 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
835 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
837 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
842 case ARRAY_RANGE_REF:
844 /* We recorded the lower bound and the element size. */
845 if (!host_integerp (op->op0, 0)
846 || !host_integerp (op->op1, 0)
847 || !host_integerp (op->op2, 0))
851 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
852 hindex -= TREE_INT_CST_LOW (op->op1);
853 hindex *= TREE_INT_CST_LOW (op->op2);
854 hindex *= BITS_PER_UNIT;
866 case VIEW_CONVERT_EXPR:
883 if (base == NULL_TREE)
886 ref->ref = NULL_TREE;
888 ref->offset = offset;
890 ref->max_size = max_size;
891 ref->ref_alias_set = set;
892 if (base_alias_set != -1)
893 ref->base_alias_set = base_alias_set;
895 ref->base_alias_set = get_alias_set (base);
900 /* Copy the operations present in load/store/call REF into RESULT, a vector of
901 vn_reference_op_s's. */
904 copy_reference_ops_from_call (gimple call,
905 VEC(vn_reference_op_s, heap) **result)
907 vn_reference_op_s temp;
910 /* Copy the type, opcode, function being called and static chain. */
911 memset (&temp, 0, sizeof (temp));
912 temp.type = gimple_call_return_type (call);
913 temp.opcode = CALL_EXPR;
914 temp.op0 = gimple_call_fn (call);
915 temp.op1 = gimple_call_chain (call);
917 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
919 /* Copy the call arguments. As they can be references as well,
920 just chain them together. */
921 for (i = 0; i < gimple_call_num_args (call); ++i)
923 tree callarg = gimple_call_arg (call, i);
924 copy_reference_ops_from_ref (callarg, result);
928 /* Create a vector of vn_reference_op_s structures from REF, a
929 REFERENCE_CLASS_P tree. The vector is not shared. */
931 static VEC(vn_reference_op_s, heap) *
932 create_reference_ops_from_ref (tree ref)
934 VEC (vn_reference_op_s, heap) *result = NULL;
936 copy_reference_ops_from_ref (ref, &result);
940 /* Create a vector of vn_reference_op_s structures from CALL, a
941 call statement. The vector is not shared. */
943 static VEC(vn_reference_op_s, heap) *
944 create_reference_ops_from_call (gimple call)
946 VEC (vn_reference_op_s, heap) *result = NULL;
948 copy_reference_ops_from_call (call, &result);
952 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
953 *I_P to point to the last element of the replacement. */
955 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
958 unsigned int i = *i_p;
959 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
960 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
962 HOST_WIDE_INT addr_offset;
964 /* The only thing we have to do is from &OBJ.foo.bar add the offset
965 from .foo.bar to the preceeding MEM_REF offset and replace the
966 address with &OBJ. */
967 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
969 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
970 if (addr_base != op->op0)
972 double_int off = tree_to_double_int (mem_op->op0);
973 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
974 off = double_int_add (off, shwi_to_double_int (addr_offset));
975 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
976 op->op0 = build_fold_addr_expr (addr_base);
977 if (host_integerp (mem_op->op0, 0))
978 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
984 /* Optimize the reference REF to a constant if possible or return
988 fully_constant_vn_reference_p (vn_reference_t ref)
990 VEC (vn_reference_op_s, heap) *operands = ref->operands;
991 vn_reference_op_t op;
993 /* Try to simplify the translated expression if it is
994 a call to a builtin function with at most two arguments. */
995 op = VEC_index (vn_reference_op_s, operands, 0);
996 if (op->opcode == CALL_EXPR
997 && TREE_CODE (op->op0) == ADDR_EXPR
998 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
999 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1000 && VEC_length (vn_reference_op_s, operands) >= 2
1001 && VEC_length (vn_reference_op_s, operands) <= 3)
1003 vn_reference_op_t arg0, arg1 = NULL;
1004 bool anyconst = false;
1005 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1006 if (VEC_length (vn_reference_op_s, operands) > 2)
1007 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1008 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1009 || (arg0->opcode == ADDR_EXPR
1010 && is_gimple_min_invariant (arg0->op0)))
1013 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1014 || (arg1->opcode == ADDR_EXPR
1015 && is_gimple_min_invariant (arg1->op0))))
1019 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1022 arg1 ? arg1->op0 : NULL);
1024 && TREE_CODE (folded) == NOP_EXPR)
1025 folded = TREE_OPERAND (folded, 0);
1027 && is_gimple_min_invariant (folded))
1032 /* Simplify reads from constant strings. */
1033 else if (op->opcode == ARRAY_REF
1034 && TREE_CODE (op->op0) == INTEGER_CST
1035 && integer_zerop (op->op1)
1036 && VEC_length (vn_reference_op_s, operands) == 2)
1038 vn_reference_op_t arg0;
1039 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1040 if (arg0->opcode == STRING_CST
1041 && (TYPE_MODE (op->type)
1042 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1043 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1044 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1045 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1046 return build_int_cst_type (op->type,
1047 (TREE_STRING_POINTER (arg0->op0)
1048 [TREE_INT_CST_LOW (op->op0)]));
1054 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1055 structures into their value numbers. This is done in-place, and
1056 the vector passed in is returned. */
1058 static VEC (vn_reference_op_s, heap) *
1059 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1061 vn_reference_op_t vro;
1064 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
1066 if (vro->opcode == SSA_NAME
1067 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1069 vro->op0 = SSA_VAL (vro->op0);
1070 /* If it transforms from an SSA_NAME to a constant, update
1072 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1073 vro->opcode = TREE_CODE (vro->op0);
1075 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1076 vro->op1 = SSA_VAL (vro->op1);
1077 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1078 vro->op2 = SSA_VAL (vro->op2);
1079 /* If it transforms from an SSA_NAME to an address, fold with
1080 a preceding indirect reference. */
1083 && TREE_CODE (vro->op0) == ADDR_EXPR
1084 && VEC_index (vn_reference_op_s,
1085 orig, i - 1)->opcode == MEM_REF)
1086 vn_reference_fold_indirect (&orig, &i);
1087 /* If it transforms a non-constant ARRAY_REF into a constant
1088 one, adjust the constant offset. */
1089 else if (vro->opcode == ARRAY_REF
1091 && TREE_CODE (vro->op0) == INTEGER_CST
1092 && TREE_CODE (vro->op1) == INTEGER_CST
1093 && TREE_CODE (vro->op2) == INTEGER_CST)
1095 double_int off = tree_to_double_int (vro->op0);
1096 off = double_int_add (off,
1098 (tree_to_double_int (vro->op1)));
1099 off = double_int_mul (off, tree_to_double_int (vro->op2));
1100 if (double_int_fits_in_shwi_p (off))
1108 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1110 /* Create a vector of vn_reference_op_s structures from REF, a
1111 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1114 static VEC(vn_reference_op_s, heap) *
1115 valueize_shared_reference_ops_from_ref (tree ref)
1119 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1120 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1121 shared_lookup_references = valueize_refs (shared_lookup_references);
1122 return shared_lookup_references;
1125 /* Create a vector of vn_reference_op_s structures from CALL, a
1126 call statement. The vector is shared among all callers of
1129 static VEC(vn_reference_op_s, heap) *
1130 valueize_shared_reference_ops_from_call (gimple call)
1134 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1135 copy_reference_ops_from_call (call, &shared_lookup_references);
1136 shared_lookup_references = valueize_refs (shared_lookup_references);
1137 return shared_lookup_references;
1140 /* Lookup a SCCVN reference operation VR in the current hash table.
1141 Returns the resulting value number if it exists in the hash table,
1142 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1143 vn_reference_t stored in the hashtable if something is found. */
1146 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1151 hash = vr->hashcode;
1152 slot = htab_find_slot_with_hash (current_info->references, vr,
1154 if (!slot && current_info == optimistic_info)
1155 slot = htab_find_slot_with_hash (valid_info->references, vr,
1160 *vnresult = (vn_reference_t)*slot;
1161 return ((vn_reference_t)*slot)->result;
1167 static tree *last_vuse_ptr;
1169 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1170 with the current VUSE and performs the expression lookup. */
1173 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1175 vn_reference_t vr = (vn_reference_t)vr_;
1180 *last_vuse_ptr = vuse;
1182 /* Fixup vuse and hash. */
1184 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1185 vr->vuse = SSA_VAL (vuse);
1187 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1189 hash = vr->hashcode;
1190 slot = htab_find_slot_with_hash (current_info->references, vr,
1192 if (!slot && current_info == optimistic_info)
1193 slot = htab_find_slot_with_hash (valid_info->references, vr,
1201 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1202 from the statement defining VUSE and if not successful tries to
1203 translate *REFP and VR_ through an aggregate copy at the defintion
1207 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1209 vn_reference_t vr = (vn_reference_t)vr_;
1210 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1213 HOST_WIDE_INT offset, maxsize;
1215 /* First try to disambiguate after value-replacing in the definitions LHS. */
1216 if (is_gimple_assign (def_stmt))
1218 tree lhs = gimple_assign_lhs (def_stmt);
1220 VEC (vn_reference_op_s, heap) *operands = NULL;
1222 copy_reference_ops_from_ref (lhs, &operands);
1223 operands = valueize_refs (operands);
1224 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1225 TREE_TYPE (lhs), operands))
1226 res = refs_may_alias_p_1 (ref, &ref1, true);
1227 VEC_free (vn_reference_op_s, heap, operands);
1232 base = ao_ref_base (ref);
1233 offset = ref->offset;
1234 maxsize = ref->max_size;
1236 /* If we cannot constrain the size of the reference we cannot
1237 test if anything kills it. */
1241 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1242 from that defintion.
1244 if (is_gimple_reg_type (vr->type)
1245 && is_gimple_call (def_stmt)
1246 && (fndecl = gimple_call_fndecl (def_stmt))
1247 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1248 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1249 && integer_zerop (gimple_call_arg (def_stmt, 1))
1250 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1251 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1253 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1255 HOST_WIDE_INT offset2, size2, maxsize2;
1256 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1257 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1258 if ((unsigned HOST_WIDE_INT)size2 / 8
1259 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1260 && operand_equal_p (base, base2, 0)
1261 && offset2 <= offset
1262 && offset2 + size2 >= offset + maxsize)
1264 tree val = fold_convert (vr->type, integer_zero_node);
1265 unsigned int value_id = get_or_alloc_constant_value_id (val);
1266 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1267 VEC_copy (vn_reference_op_s,
1268 heap, vr->operands),
1273 /* 2) Assignment from an empty CONSTRUCTOR. */
1274 else if (is_gimple_reg_type (vr->type)
1275 && gimple_assign_single_p (def_stmt)
1276 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1277 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1280 HOST_WIDE_INT offset2, size2, maxsize2;
1281 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1282 &offset2, &size2, &maxsize2);
1283 if (operand_equal_p (base, base2, 0)
1284 && offset2 <= offset
1285 && offset2 + size2 >= offset + maxsize)
1287 tree val = fold_convert (vr->type, integer_zero_node);
1288 unsigned int value_id = get_or_alloc_constant_value_id (val);
1289 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1290 VEC_copy (vn_reference_op_s,
1291 heap, vr->operands),
1296 /* For aggregate copies translate the reference through them if
1297 the copy kills ref. */
1298 else if (gimple_assign_single_p (def_stmt)
1299 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1300 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1301 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1304 HOST_WIDE_INT offset2, size2, maxsize2;
1306 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1307 vn_reference_op_t vro;
1310 /* See if the assignment kills REF. */
1311 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1312 &offset2, &size2, &maxsize2);
1313 if (!operand_equal_p (base, base2, 0)
1315 || offset2 + size2 < offset + maxsize)
1318 /* Find the common base of ref and the lhs. */
1319 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1320 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1321 j = VEC_length (vn_reference_op_s, lhs) - 1;
1322 while (j >= 0 && i >= 0
1323 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1325 VEC_index (vn_reference_op_s, lhs, j)))
1331 VEC_free (vn_reference_op_s, heap, lhs);
1332 /* i now points to the first additional op.
1333 ??? LHS may not be completely contained in VR, one or more
1334 VIEW_CONVERT_EXPRs could be in its way. We could at least
1335 try handling outermost VIEW_CONVERT_EXPRs. */
1339 /* Now re-write REF to be based on the rhs of the assignment. */
1340 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1341 /* We need to pre-pend vr->operands[0..i] to rhs. */
1342 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1343 > VEC_length (vn_reference_op_s, vr->operands))
1345 VEC (vn_reference_op_s, heap) *old = vr->operands;
1346 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1347 i + 1 + VEC_length (vn_reference_op_s, rhs));
1348 if (old == shared_lookup_references
1349 && vr->operands != old)
1350 shared_lookup_references = NULL;
1353 VEC_truncate (vn_reference_op_s, vr->operands,
1354 i + 1 + VEC_length (vn_reference_op_s, rhs));
1355 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1356 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1357 VEC_free (vn_reference_op_s, heap, rhs);
1358 vr->hashcode = vn_reference_compute_hash (vr);
1360 /* Adjust *ref from the new operands. */
1361 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1363 /* This can happen with bitfields. */
1364 if (ref->size != r.size)
1368 /* Do not update last seen VUSE after translating. */
1369 last_vuse_ptr = NULL;
1371 /* Keep looking for the adjusted *REF / VR pair. */
1375 /* Bail out and stop walking. */
1379 /* Lookup a reference operation by it's parts, in the current hash table.
1380 Returns the resulting value number if it exists in the hash table,
1381 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1382 vn_reference_t stored in the hashtable if something is found. */
1385 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1386 VEC (vn_reference_op_s, heap) *operands,
1387 vn_reference_t *vnresult, bool maywalk)
1389 struct vn_reference_s vr1;
1397 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1398 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1399 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1400 VEC_length (vn_reference_op_s, operands));
1401 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1402 VEC_address (vn_reference_op_s, operands),
1403 sizeof (vn_reference_op_s)
1404 * VEC_length (vn_reference_op_s, operands));
1405 vr1.operands = operands = shared_lookup_references
1406 = valueize_refs (shared_lookup_references);
1409 vr1.hashcode = vn_reference_compute_hash (&vr1);
1410 if ((cst = fully_constant_vn_reference_p (&vr1)))
1413 vn_reference_lookup_1 (&vr1, vnresult);
1419 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1421 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1422 vn_reference_lookup_2,
1423 vn_reference_lookup_3, &vr1);
1424 if (vr1.operands != operands)
1425 VEC_free (vn_reference_op_s, heap, vr1.operands);
1429 return (*vnresult)->result;
1434 /* Lookup OP in the current hash table, and return the resulting value
1435 number if it exists in the hash table. Return NULL_TREE if it does
1436 not exist in the hash table or if the result field of the structure
1437 was NULL.. VNRESULT will be filled in with the vn_reference_t
1438 stored in the hashtable if one exists. */
1441 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1442 vn_reference_t *vnresult)
1444 VEC (vn_reference_op_s, heap) *operands;
1445 struct vn_reference_s vr1;
1451 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1452 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1453 vr1.type = TREE_TYPE (op);
1454 vr1.set = get_alias_set (op);
1455 vr1.hashcode = vn_reference_compute_hash (&vr1);
1456 if ((cst = fully_constant_vn_reference_p (&vr1)))
1462 vn_reference_t wvnresult;
1464 ao_ref_init (&r, op);
1466 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1467 vn_reference_lookup_2,
1468 vn_reference_lookup_3, &vr1);
1469 if (vr1.operands != operands)
1470 VEC_free (vn_reference_op_s, heap, vr1.operands);
1474 *vnresult = wvnresult;
1475 return wvnresult->result;
1481 return vn_reference_lookup_1 (&vr1, vnresult);
1485 /* Insert OP into the current hash table with a value number of
1486 RESULT, and return the resulting reference structure we created. */
1489 vn_reference_insert (tree op, tree result, tree vuse)
1494 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1495 if (TREE_CODE (result) == SSA_NAME)
1496 vr1->value_id = VN_INFO (result)->value_id;
1498 vr1->value_id = get_or_alloc_constant_value_id (result);
1499 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1500 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1501 vr1->type = TREE_TYPE (op);
1502 vr1->set = get_alias_set (op);
1503 vr1->hashcode = vn_reference_compute_hash (vr1);
1504 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1506 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1509 /* Because we lookup stores using vuses, and value number failures
1510 using the vdefs (see visit_reference_op_store for how and why),
1511 it's possible that on failure we may try to insert an already
1512 inserted store. This is not wrong, there is no ssa name for a
1513 store that we could use as a differentiator anyway. Thus, unlike
1514 the other lookup functions, you cannot gcc_assert (!*slot)
1517 /* But free the old slot in case of a collision. */
1519 free_reference (*slot);
1525 /* Insert a reference by it's pieces into the current hash table with
1526 a value number of RESULT. Return the resulting reference
1527 structure we created. */
1530 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1531 VEC (vn_reference_op_s, heap) *operands,
1532 tree result, unsigned int value_id)
1538 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1539 vr1->value_id = value_id;
1540 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1541 vr1->operands = valueize_refs (operands);
1544 vr1->hashcode = vn_reference_compute_hash (vr1);
1545 if (result && TREE_CODE (result) == SSA_NAME)
1546 result = SSA_VAL (result);
1547 vr1->result = result;
1549 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1552 /* At this point we should have all the things inserted that we have
1553 seen before, and we should never try inserting something that
1555 gcc_assert (!*slot);
1557 free_reference (*slot);
1563 /* Compute and return the hash value for nary operation VBO1. */
1566 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1571 for (i = 0; i < vno1->length; ++i)
1572 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1573 vno1->op[i] = SSA_VAL (vno1->op[i]);
1575 if (vno1->length == 2
1576 && commutative_tree_code (vno1->opcode)
1577 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1579 tree temp = vno1->op[0];
1580 vno1->op[0] = vno1->op[1];
1584 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1585 for (i = 0; i < vno1->length; ++i)
1586 hash = iterative_hash_expr (vno1->op[i], hash);
1591 /* Return the computed hashcode for nary operation P1. */
1594 vn_nary_op_hash (const void *p1)
1596 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1597 return vno1->hashcode;
1600 /* Compare nary operations P1 and P2 and return true if they are
1604 vn_nary_op_eq (const void *p1, const void *p2)
1606 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1607 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1610 if (vno1->hashcode != vno2->hashcode)
1613 if (vno1->opcode != vno2->opcode
1614 || !types_compatible_p (vno1->type, vno2->type))
1617 for (i = 0; i < vno1->length; ++i)
1618 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1624 /* Lookup a n-ary operation by its pieces and return the resulting value
1625 number if it exists in the hash table. Return NULL_TREE if it does
1626 not exist in the hash table or if the result field of the operation
1627 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1631 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1632 tree type, tree op0, tree op1, tree op2,
1633 tree op3, vn_nary_op_t *vnresult)
1636 struct vn_nary_op_s vno1;
1640 vno1.length = length;
1646 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1647 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1649 if (!slot && current_info == optimistic_info)
1650 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1655 *vnresult = (vn_nary_op_t)*slot;
1656 return ((vn_nary_op_t)*slot)->result;
1659 /* Lookup OP in the current hash table, and return the resulting value
1660 number if it exists in the hash table. Return NULL_TREE if it does
1661 not exist in the hash table or if the result field of the operation
1662 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1666 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1669 struct vn_nary_op_s vno1;
1674 vno1.opcode = TREE_CODE (op);
1675 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1676 vno1.type = TREE_TYPE (op);
1677 for (i = 0; i < vno1.length; ++i)
1678 vno1.op[i] = TREE_OPERAND (op, i);
1679 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1680 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1682 if (!slot && current_info == optimistic_info)
1683 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1688 *vnresult = (vn_nary_op_t)*slot;
1689 return ((vn_nary_op_t)*slot)->result;
1692 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1693 value number if it exists in the hash table. Return NULL_TREE if
1694 it does not exist in the hash table. VNRESULT will contain the
1695 vn_nary_op_t from the hashtable if it exists. */
1698 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1701 struct vn_nary_op_s vno1;
1706 vno1.opcode = gimple_assign_rhs_code (stmt);
1707 vno1.length = gimple_num_ops (stmt) - 1;
1708 vno1.type = gimple_expr_type (stmt);
1709 for (i = 0; i < vno1.length; ++i)
1710 vno1.op[i] = gimple_op (stmt, i + 1);
1711 if (vno1.opcode == REALPART_EXPR
1712 || vno1.opcode == IMAGPART_EXPR
1713 || vno1.opcode == VIEW_CONVERT_EXPR)
1714 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1715 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1716 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1718 if (!slot && current_info == optimistic_info)
1719 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1724 *vnresult = (vn_nary_op_t)*slot;
1725 return ((vn_nary_op_t)*slot)->result;
1728 /* Insert a n-ary operation into the current hash table using it's
1729 pieces. Return the vn_nary_op_t structure we created and put in
1733 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1734 tree type, tree op0,
1735 tree op1, tree op2, tree op3,
1737 unsigned int value_id)
1742 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1743 (sizeof (struct vn_nary_op_s)
1744 - sizeof (tree) * (4 - length)));
1745 vno1->value_id = value_id;
1746 vno1->opcode = code;
1747 vno1->length = length;
1757 vno1->result = result;
1758 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1759 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1761 gcc_assert (!*slot);
1768 /* Insert OP into the current hash table with a value number of
1769 RESULT. Return the vn_nary_op_t structure we created and put in
1773 vn_nary_op_insert (tree op, tree result)
1775 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1780 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1781 (sizeof (struct vn_nary_op_s)
1782 - sizeof (tree) * (4 - length)));
1783 vno1->value_id = VN_INFO (result)->value_id;
1784 vno1->opcode = TREE_CODE (op);
1785 vno1->length = length;
1786 vno1->type = TREE_TYPE (op);
1787 for (i = 0; i < vno1->length; ++i)
1788 vno1->op[i] = TREE_OPERAND (op, i);
1789 vno1->result = result;
1790 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1791 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1793 gcc_assert (!*slot);
1799 /* Insert the rhs of STMT into the current hash table with a value number of
1803 vn_nary_op_insert_stmt (gimple stmt, tree result)
1805 unsigned length = gimple_num_ops (stmt) - 1;
1810 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1811 (sizeof (struct vn_nary_op_s)
1812 - sizeof (tree) * (4 - length)));
1813 vno1->value_id = VN_INFO (result)->value_id;
1814 vno1->opcode = gimple_assign_rhs_code (stmt);
1815 vno1->length = length;
1816 vno1->type = gimple_expr_type (stmt);
1817 for (i = 0; i < vno1->length; ++i)
1818 vno1->op[i] = gimple_op (stmt, i + 1);
1819 if (vno1->opcode == REALPART_EXPR
1820 || vno1->opcode == IMAGPART_EXPR
1821 || vno1->opcode == VIEW_CONVERT_EXPR)
1822 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1823 vno1->result = result;
1824 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1825 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1827 gcc_assert (!*slot);
1833 /* Compute a hashcode for PHI operation VP1 and return it. */
1835 static inline hashval_t
1836 vn_phi_compute_hash (vn_phi_t vp1)
1843 result = vp1->block->index;
1845 /* If all PHI arguments are constants we need to distinguish
1846 the PHI node via its type. */
1847 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1848 result += (INTEGRAL_TYPE_P (type)
1849 + (INTEGRAL_TYPE_P (type)
1850 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1852 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1854 if (phi1op == VN_TOP)
1856 result = iterative_hash_expr (phi1op, result);
1862 /* Return the computed hashcode for phi operation P1. */
1865 vn_phi_hash (const void *p1)
1867 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1868 return vp1->hashcode;
1871 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1874 vn_phi_eq (const void *p1, const void *p2)
1876 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1877 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1879 if (vp1->hashcode != vp2->hashcode)
1882 if (vp1->block == vp2->block)
1887 /* If the PHI nodes do not have compatible types
1888 they are not the same. */
1889 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1890 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1893 /* Any phi in the same block will have it's arguments in the
1894 same edge order, because of how we store phi nodes. */
1895 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1897 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1898 if (phi1op == VN_TOP || phi2op == VN_TOP)
1900 if (!expressions_equal_p (phi1op, phi2op))
1908 static VEC(tree, heap) *shared_lookup_phiargs;
1910 /* Lookup PHI in the current hash table, and return the resulting
1911 value number if it exists in the hash table. Return NULL_TREE if
1912 it does not exist in the hash table. */
1915 vn_phi_lookup (gimple phi)
1918 struct vn_phi_s vp1;
1921 VEC_truncate (tree, shared_lookup_phiargs, 0);
1923 /* Canonicalize the SSA_NAME's to their value number. */
1924 for (i = 0; i < gimple_phi_num_args (phi); i++)
1926 tree def = PHI_ARG_DEF (phi, i);
1927 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1928 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1930 vp1.phiargs = shared_lookup_phiargs;
1931 vp1.block = gimple_bb (phi);
1932 vp1.hashcode = vn_phi_compute_hash (&vp1);
1933 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1935 if (!slot && current_info == optimistic_info)
1936 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1940 return ((vn_phi_t)*slot)->result;
1943 /* Insert PHI into the current hash table with a value number of
1947 vn_phi_insert (gimple phi, tree result)
1950 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1952 VEC (tree, heap) *args = NULL;
1954 /* Canonicalize the SSA_NAME's to their value number. */
1955 for (i = 0; i < gimple_phi_num_args (phi); i++)
1957 tree def = PHI_ARG_DEF (phi, i);
1958 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1959 VEC_safe_push (tree, heap, args, def);
1961 vp1->value_id = VN_INFO (result)->value_id;
1962 vp1->phiargs = args;
1963 vp1->block = gimple_bb (phi);
1964 vp1->result = result;
1965 vp1->hashcode = vn_phi_compute_hash (vp1);
1967 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1970 /* Because we iterate over phi operations more than once, it's
1971 possible the slot might already exist here, hence no assert.*/
1977 /* Print set of components in strongly connected component SCC to OUT. */
1980 print_scc (FILE *out, VEC (tree, heap) *scc)
1985 fprintf (out, "SCC consists of: ");
1986 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1988 print_generic_expr (out, var, 0);
1991 fprintf (out, "\n");
1994 /* Set the value number of FROM to TO, return true if it has changed
1998 set_ssa_val_to (tree from, tree to)
2003 && TREE_CODE (to) == SSA_NAME
2004 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2007 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2008 and invariants. So assert that here. */
2009 gcc_assert (to != NULL_TREE
2011 || TREE_CODE (to) == SSA_NAME
2012 || is_gimple_min_invariant (to)));
2014 if (dump_file && (dump_flags & TDF_DETAILS))
2016 fprintf (dump_file, "Setting value number of ");
2017 print_generic_expr (dump_file, from, 0);
2018 fprintf (dump_file, " to ");
2019 print_generic_expr (dump_file, to, 0);
2022 currval = SSA_VAL (from);
2024 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2026 VN_INFO (from)->valnum = to;
2027 if (dump_file && (dump_flags & TDF_DETAILS))
2028 fprintf (dump_file, " (changed)\n");
2031 if (dump_file && (dump_flags & TDF_DETAILS))
2032 fprintf (dump_file, "\n");
2036 /* Set all definitions in STMT to value number to themselves.
2037 Return true if a value number changed. */
2040 defs_to_varying (gimple stmt)
2042 bool changed = false;
2046 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2048 tree def = DEF_FROM_PTR (defp);
2050 VN_INFO (def)->use_processed = true;
2051 changed |= set_ssa_val_to (def, def);
2056 static bool expr_has_constants (tree expr);
2057 static tree valueize_expr (tree expr);
2059 /* Visit a copy between LHS and RHS, return true if the value number
2063 visit_copy (tree lhs, tree rhs)
2065 /* Follow chains of copies to their destination. */
2066 while (TREE_CODE (rhs) == SSA_NAME
2067 && SSA_VAL (rhs) != rhs)
2068 rhs = SSA_VAL (rhs);
2070 /* The copy may have a more interesting constant filled expression
2071 (we don't, since we know our RHS is just an SSA name). */
2072 if (TREE_CODE (rhs) == SSA_NAME)
2074 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2075 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2078 return set_ssa_val_to (lhs, rhs);
2081 /* Visit a unary operator RHS, value number it, and return true if the
2082 value number of LHS has changed as a result. */
2085 visit_unary_op (tree lhs, gimple stmt)
2087 bool changed = false;
2088 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2092 changed = set_ssa_val_to (lhs, result);
2096 changed = set_ssa_val_to (lhs, lhs);
2097 vn_nary_op_insert_stmt (stmt, lhs);
2103 /* Visit a binary operator RHS, value number it, and return true if the
2104 value number of LHS has changed as a result. */
2107 visit_binary_op (tree lhs, gimple stmt)
2109 bool changed = false;
2110 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2114 changed = set_ssa_val_to (lhs, result);
2118 changed = set_ssa_val_to (lhs, lhs);
2119 vn_nary_op_insert_stmt (stmt, lhs);
2125 /* Visit a call STMT storing into LHS. Return true if the value number
2126 of the LHS has changed as a result. */
2129 visit_reference_op_call (tree lhs, gimple stmt)
2131 bool changed = false;
2132 struct vn_reference_s vr1;
2134 tree vuse = gimple_vuse (stmt);
2136 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2137 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2138 vr1.type = gimple_expr_type (stmt);
2140 vr1.hashcode = vn_reference_compute_hash (&vr1);
2141 result = vn_reference_lookup_1 (&vr1, NULL);
2144 changed = set_ssa_val_to (lhs, result);
2145 if (TREE_CODE (result) == SSA_NAME
2146 && VN_INFO (result)->has_constants)
2147 VN_INFO (lhs)->has_constants = true;
2153 changed = set_ssa_val_to (lhs, lhs);
2154 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2155 vr2->vuse = vr1.vuse;
2156 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2157 vr2->type = vr1.type;
2159 vr2->hashcode = vr1.hashcode;
2161 slot = htab_find_slot_with_hash (current_info->references,
2162 vr2, vr2->hashcode, INSERT);
2164 free_reference (*slot);
2171 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2172 and return true if the value number of the LHS has changed as a result. */
2175 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2177 bool changed = false;
2181 last_vuse = gimple_vuse (stmt);
2182 last_vuse_ptr = &last_vuse;
2183 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2184 last_vuse_ptr = NULL;
2186 /* If we have a VCE, try looking up its operand as it might be stored in
2187 a different type. */
2188 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2189 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2192 /* We handle type-punning through unions by value-numbering based
2193 on offset and size of the access. Be prepared to handle a
2194 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2196 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2198 /* We will be setting the value number of lhs to the value number
2199 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2200 So first simplify and lookup this expression to see if it
2201 is already available. */
2202 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2203 if ((CONVERT_EXPR_P (val)
2204 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2205 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2207 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2208 if ((CONVERT_EXPR_P (tem)
2209 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2210 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2211 TREE_TYPE (val), tem)))
2215 if (!is_gimple_min_invariant (val)
2216 && TREE_CODE (val) != SSA_NAME)
2217 result = vn_nary_op_lookup (val, NULL);
2218 /* If the expression is not yet available, value-number lhs to
2219 a new SSA_NAME we create. */
2222 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2223 /* Initialize value-number information properly. */
2224 VN_INFO_GET (result)->valnum = result;
2225 VN_INFO (result)->value_id = get_next_value_id ();
2226 VN_INFO (result)->expr = val;
2227 VN_INFO (result)->has_constants = expr_has_constants (val);
2228 VN_INFO (result)->needs_insertion = true;
2229 /* As all "inserted" statements are singleton SCCs, insert
2230 to the valid table. This is strictly needed to
2231 avoid re-generating new value SSA_NAMEs for the same
2232 expression during SCC iteration over and over (the
2233 optimistic table gets cleared after each iteration).
2234 We do not need to insert into the optimistic table, as
2235 lookups there will fall back to the valid table. */
2236 if (current_info == optimistic_info)
2238 current_info = valid_info;
2239 vn_nary_op_insert (val, result);
2240 current_info = optimistic_info;
2243 vn_nary_op_insert (val, result);
2244 if (dump_file && (dump_flags & TDF_DETAILS))
2246 fprintf (dump_file, "Inserting name ");
2247 print_generic_expr (dump_file, result, 0);
2248 fprintf (dump_file, " for expression ");
2249 print_generic_expr (dump_file, val, 0);
2250 fprintf (dump_file, "\n");
2257 changed = set_ssa_val_to (lhs, result);
2258 if (TREE_CODE (result) == SSA_NAME
2259 && VN_INFO (result)->has_constants)
2261 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2262 VN_INFO (lhs)->has_constants = true;
2267 changed = set_ssa_val_to (lhs, lhs);
2268 vn_reference_insert (op, lhs, last_vuse);
2275 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2276 and return true if the value number of the LHS has changed as a result. */
2279 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2281 bool changed = false;
2283 bool resultsame = false;
2285 /* First we want to lookup using the *vuses* from the store and see
2286 if there the last store to this location with the same address
2289 The vuses represent the memory state before the store. If the
2290 memory state, address, and value of the store is the same as the
2291 last store to this location, then this store will produce the
2292 same memory state as that store.
2294 In this case the vdef versions for this store are value numbered to those
2295 vuse versions, since they represent the same memory state after
2298 Otherwise, the vdefs for the store are used when inserting into
2299 the table, since the store generates a new memory state. */
2301 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2305 if (TREE_CODE (result) == SSA_NAME)
2306 result = SSA_VAL (result);
2307 if (TREE_CODE (op) == SSA_NAME)
2309 resultsame = expressions_equal_p (result, op);
2312 if (!result || !resultsame)
2316 if (dump_file && (dump_flags & TDF_DETAILS))
2318 fprintf (dump_file, "No store match\n");
2319 fprintf (dump_file, "Value numbering store ");
2320 print_generic_expr (dump_file, lhs, 0);
2321 fprintf (dump_file, " to ");
2322 print_generic_expr (dump_file, op, 0);
2323 fprintf (dump_file, "\n");
2325 /* Have to set value numbers before insert, since insert is
2326 going to valueize the references in-place. */
2327 if ((vdef = gimple_vdef (stmt)))
2329 VN_INFO (vdef)->use_processed = true;
2330 changed |= set_ssa_val_to (vdef, vdef);
2333 /* Do not insert structure copies into the tables. */
2334 if (is_gimple_min_invariant (op)
2335 || is_gimple_reg (op))
2336 vn_reference_insert (lhs, op, vdef);
2340 /* We had a match, so value number the vdef to have the value
2341 number of the vuse it came from. */
2344 if (dump_file && (dump_flags & TDF_DETAILS))
2345 fprintf (dump_file, "Store matched earlier value,"
2346 "value numbering store vdefs to matching vuses.\n");
2348 def = gimple_vdef (stmt);
2349 use = gimple_vuse (stmt);
2351 VN_INFO (def)->use_processed = true;
2352 changed |= set_ssa_val_to (def, SSA_VAL (use));
2358 /* Visit and value number PHI, return true if the value number
2362 visit_phi (gimple phi)
2364 bool changed = false;
2366 tree sameval = VN_TOP;
2367 bool allsame = true;
2370 /* TODO: We could check for this in init_sccvn, and replace this
2371 with a gcc_assert. */
2372 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2373 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2375 /* See if all non-TOP arguments have the same value. TOP is
2376 equivalent to everything, so we can ignore it. */
2377 for (i = 0; i < gimple_phi_num_args (phi); i++)
2379 tree def = PHI_ARG_DEF (phi, i);
2381 if (TREE_CODE (def) == SSA_NAME)
2382 def = SSA_VAL (def);
2385 if (sameval == VN_TOP)
2391 if (!expressions_equal_p (def, sameval))
2399 /* If all value numbered to the same value, the phi node has that
2403 if (is_gimple_min_invariant (sameval))
2405 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2406 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2410 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2411 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2414 if (TREE_CODE (sameval) == SSA_NAME)
2415 return visit_copy (PHI_RESULT (phi), sameval);
2417 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2420 /* Otherwise, see if it is equivalent to a phi node in this block. */
2421 result = vn_phi_lookup (phi);
2424 if (TREE_CODE (result) == SSA_NAME)
2425 changed = visit_copy (PHI_RESULT (phi), result);
2427 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2431 vn_phi_insert (phi, PHI_RESULT (phi));
2432 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2433 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2434 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2440 /* Return true if EXPR contains constants. */
2443 expr_has_constants (tree expr)
2445 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2448 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2451 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2452 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2453 /* Constants inside reference ops are rarely interesting, but
2454 it can take a lot of looking to find them. */
2456 case tcc_declaration:
2459 return is_gimple_min_invariant (expr);
2464 /* Return true if STMT contains constants. */
2467 stmt_has_constants (gimple stmt)
2469 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2472 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2474 case GIMPLE_UNARY_RHS:
2475 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2477 case GIMPLE_BINARY_RHS:
2478 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2479 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2480 case GIMPLE_TERNARY_RHS:
2481 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2482 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2483 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2484 case GIMPLE_SINGLE_RHS:
2485 /* Constants inside reference ops are rarely interesting, but
2486 it can take a lot of looking to find them. */
2487 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2494 /* Replace SSA_NAMES in expr with their value numbers, and return the
2496 This is performed in place. */
2499 valueize_expr (tree expr)
2501 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2504 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2505 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2506 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2509 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2510 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2511 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2512 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2513 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2514 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2522 /* Simplify the binary expression RHS, and return the result if
2526 simplify_binary_expression (gimple stmt)
2528 tree result = NULL_TREE;
2529 tree op0 = gimple_assign_rhs1 (stmt);
2530 tree op1 = gimple_assign_rhs2 (stmt);
2532 /* This will not catch every single case we could combine, but will
2533 catch those with constants. The goal here is to simultaneously
2534 combine constants between expressions, but avoid infinite
2535 expansion of expressions during simplification. */
2536 if (TREE_CODE (op0) == SSA_NAME)
2538 if (VN_INFO (op0)->has_constants
2539 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2540 op0 = valueize_expr (vn_get_expr_for (op0));
2541 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2542 op0 = SSA_VAL (op0);
2545 if (TREE_CODE (op1) == SSA_NAME)
2547 if (VN_INFO (op1)->has_constants)
2548 op1 = valueize_expr (vn_get_expr_for (op1));
2549 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2550 op1 = SSA_VAL (op1);
2553 /* Avoid folding if nothing changed. */
2554 if (op0 == gimple_assign_rhs1 (stmt)
2555 && op1 == gimple_assign_rhs2 (stmt))
2558 fold_defer_overflow_warnings ();
2560 result = fold_binary (gimple_assign_rhs_code (stmt),
2561 gimple_expr_type (stmt), op0, op1);
2563 STRIP_USELESS_TYPE_CONVERSION (result);
2565 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2568 /* Make sure result is not a complex expression consisting
2569 of operators of operators (IE (a + b) + (a + c))
2570 Otherwise, we will end up with unbounded expressions if
2571 fold does anything at all. */
2572 if (result && valid_gimple_rhs_p (result))
2578 /* Simplify the unary expression RHS, and return the result if
2582 simplify_unary_expression (gimple stmt)
2584 tree result = NULL_TREE;
2585 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2587 /* We handle some tcc_reference codes here that are all
2588 GIMPLE_ASSIGN_SINGLE codes. */
2589 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2590 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2591 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2592 op0 = TREE_OPERAND (op0, 0);
2594 if (TREE_CODE (op0) != SSA_NAME)
2598 if (VN_INFO (op0)->has_constants)
2599 op0 = valueize_expr (vn_get_expr_for (op0));
2600 else if (gimple_assign_cast_p (stmt)
2601 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2602 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2603 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2605 /* We want to do tree-combining on conversion-like expressions.
2606 Make sure we feed only SSA_NAMEs or constants to fold though. */
2607 tree tem = valueize_expr (vn_get_expr_for (op0));
2608 if (UNARY_CLASS_P (tem)
2609 || BINARY_CLASS_P (tem)
2610 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2611 || TREE_CODE (tem) == SSA_NAME
2612 || is_gimple_min_invariant (tem))
2616 /* Avoid folding if nothing changed, but remember the expression. */
2617 if (op0 == orig_op0)
2620 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2621 gimple_expr_type (stmt), op0);
2624 STRIP_USELESS_TYPE_CONVERSION (result);
2625 if (valid_gimple_rhs_p (result))
2632 /* Try to simplify RHS using equivalences and constant folding. */
2635 try_to_simplify (gimple stmt)
2639 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2640 in this case, there is no point in doing extra work. */
2641 if (gimple_assign_copy_p (stmt)
2642 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2645 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2647 case tcc_declaration:
2648 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2654 /* Do not do full-blown reference lookup here, but simplify
2655 reads from constant aggregates. */
2656 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2660 /* Fallthrough for some codes that can operate on registers. */
2661 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2662 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2663 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2665 /* We could do a little more with unary ops, if they expand
2666 into binary ops, but it's debatable whether it is worth it. */
2668 return simplify_unary_expression (stmt);
2670 case tcc_comparison:
2672 return simplify_binary_expression (stmt);
2681 /* Visit and value number USE, return true if the value number
2685 visit_use (tree use)
2687 bool changed = false;
2688 gimple stmt = SSA_NAME_DEF_STMT (use);
2690 VN_INFO (use)->use_processed = true;
2692 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2693 if (dump_file && (dump_flags & TDF_DETAILS)
2694 && !SSA_NAME_IS_DEFAULT_DEF (use))
2696 fprintf (dump_file, "Value numbering ");
2697 print_generic_expr (dump_file, use, 0);
2698 fprintf (dump_file, " stmt = ");
2699 print_gimple_stmt (dump_file, stmt, 0, 0);
2702 /* Handle uninitialized uses. */
2703 if (SSA_NAME_IS_DEFAULT_DEF (use))
2704 changed = set_ssa_val_to (use, use);
2707 if (gimple_code (stmt) == GIMPLE_PHI)
2708 changed = visit_phi (stmt);
2709 else if (!gimple_has_lhs (stmt)
2710 || gimple_has_volatile_ops (stmt)
2711 || stmt_could_throw_p (stmt))
2712 changed = defs_to_varying (stmt);
2713 else if (is_gimple_assign (stmt))
2715 tree lhs = gimple_assign_lhs (stmt);
2718 /* Shortcut for copies. Simplifying copies is pointless,
2719 since we copy the expression and value they represent. */
2720 if (gimple_assign_copy_p (stmt)
2721 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2722 && TREE_CODE (lhs) == SSA_NAME)
2724 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2727 simplified = try_to_simplify (stmt);
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2732 fprintf (dump_file, "RHS ");
2733 print_gimple_expr (dump_file, stmt, 0, 0);
2734 fprintf (dump_file, " simplified to ");
2735 print_generic_expr (dump_file, simplified, 0);
2736 if (TREE_CODE (lhs) == SSA_NAME)
2737 fprintf (dump_file, " has constants %d\n",
2738 expr_has_constants (simplified));
2740 fprintf (dump_file, "\n");
2743 /* Setting value numbers to constants will occasionally
2744 screw up phi congruence because constants are not
2745 uniquely associated with a single ssa name that can be
2748 && is_gimple_min_invariant (simplified)
2749 && TREE_CODE (lhs) == SSA_NAME)
2751 VN_INFO (lhs)->expr = simplified;
2752 VN_INFO (lhs)->has_constants = true;
2753 changed = set_ssa_val_to (lhs, simplified);
2757 && TREE_CODE (simplified) == SSA_NAME
2758 && TREE_CODE (lhs) == SSA_NAME)
2760 changed = visit_copy (lhs, simplified);
2763 else if (simplified)
2765 if (TREE_CODE (lhs) == SSA_NAME)
2767 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2768 /* We have to unshare the expression or else
2769 valuizing may change the IL stream. */
2770 VN_INFO (lhs)->expr = unshare_expr (simplified);
2773 else if (stmt_has_constants (stmt)
2774 && TREE_CODE (lhs) == SSA_NAME)
2775 VN_INFO (lhs)->has_constants = true;
2776 else if (TREE_CODE (lhs) == SSA_NAME)
2778 /* We reset expr and constantness here because we may
2779 have been value numbering optimistically, and
2780 iterating. They may become non-constant in this case,
2781 even if they were optimistically constant. */
2783 VN_INFO (lhs)->has_constants = false;
2784 VN_INFO (lhs)->expr = NULL_TREE;
2787 if ((TREE_CODE (lhs) == SSA_NAME
2788 /* We can substitute SSA_NAMEs that are live over
2789 abnormal edges with their constant value. */
2790 && !(gimple_assign_copy_p (stmt)
2791 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2793 && is_gimple_min_invariant (simplified))
2794 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2795 /* Stores or copies from SSA_NAMEs that are live over
2796 abnormal edges are a problem. */
2797 || (gimple_assign_single_p (stmt)
2798 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2799 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2800 changed = defs_to_varying (stmt);
2801 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2803 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2805 else if (TREE_CODE (lhs) == SSA_NAME)
2807 if ((gimple_assign_copy_p (stmt)
2808 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2810 && is_gimple_min_invariant (simplified)))
2812 VN_INFO (lhs)->has_constants = true;
2814 changed = set_ssa_val_to (lhs, simplified);
2816 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2820 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2822 case GIMPLE_UNARY_RHS:
2823 changed = visit_unary_op (lhs, stmt);
2825 case GIMPLE_BINARY_RHS:
2826 changed = visit_binary_op (lhs, stmt);
2828 case GIMPLE_SINGLE_RHS:
2829 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2832 /* VOP-less references can go through unary case. */
2833 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2834 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2835 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2836 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2838 changed = visit_unary_op (lhs, stmt);
2842 case tcc_declaration:
2843 changed = visit_reference_op_load
2844 (lhs, gimple_assign_rhs1 (stmt), stmt);
2846 case tcc_expression:
2847 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2849 changed = visit_unary_op (lhs, stmt);
2854 changed = defs_to_varying (stmt);
2858 changed = defs_to_varying (stmt);
2864 changed = defs_to_varying (stmt);
2866 else if (is_gimple_call (stmt))
2868 tree lhs = gimple_call_lhs (stmt);
2870 /* ??? We could try to simplify calls. */
2872 if (stmt_has_constants (stmt)
2873 && TREE_CODE (lhs) == SSA_NAME)
2874 VN_INFO (lhs)->has_constants = true;
2875 else if (TREE_CODE (lhs) == SSA_NAME)
2877 /* We reset expr and constantness here because we may
2878 have been value numbering optimistically, and
2879 iterating. They may become non-constant in this case,
2880 even if they were optimistically constant. */
2881 VN_INFO (lhs)->has_constants = false;
2882 VN_INFO (lhs)->expr = NULL_TREE;
2885 if (TREE_CODE (lhs) == SSA_NAME
2886 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2887 changed = defs_to_varying (stmt);
2888 /* ??? We should handle stores from calls. */
2889 else if (TREE_CODE (lhs) == SSA_NAME)
2891 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2892 changed = visit_reference_op_call (lhs, stmt);
2894 changed = defs_to_varying (stmt);
2897 changed = defs_to_varying (stmt);
2904 /* Compare two operands by reverse postorder index */
2907 compare_ops (const void *pa, const void *pb)
2909 const tree opa = *((const tree *)pa);
2910 const tree opb = *((const tree *)pb);
2911 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2912 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2916 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2917 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2918 else if (gimple_nop_p (opstmta))
2920 else if (gimple_nop_p (opstmtb))
2923 bba = gimple_bb (opstmta);
2924 bbb = gimple_bb (opstmtb);
2927 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2935 if (gimple_code (opstmta) == GIMPLE_PHI
2936 && gimple_code (opstmtb) == GIMPLE_PHI)
2937 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2938 else if (gimple_code (opstmta) == GIMPLE_PHI)
2940 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2942 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2943 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2945 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2947 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2950 /* Sort an array containing members of a strongly connected component
2951 SCC so that the members are ordered by RPO number.
2952 This means that when the sort is complete, iterating through the
2953 array will give you the members in RPO order. */
2956 sort_scc (VEC (tree, heap) *scc)
2958 qsort (VEC_address (tree, scc),
2959 VEC_length (tree, scc),
2964 /* Insert the no longer used nary ONARY to the hash INFO. */
2967 copy_nary (vn_nary_op_t onary, vn_tables_t info)
2969 size_t size = (sizeof (struct vn_nary_op_s)
2970 - sizeof (tree) * (4 - onary->length));
2971 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
2973 memcpy (nary, onary, size);
2974 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
2975 gcc_assert (!*slot);
2979 /* Insert the no longer used phi OPHI to the hash INFO. */
2982 copy_phi (vn_phi_t ophi, vn_tables_t info)
2984 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
2986 memcpy (phi, ophi, sizeof (*phi));
2987 ophi->phiargs = NULL;
2988 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
2989 gcc_assert (!*slot);
2993 /* Insert the no longer used reference OREF to the hash INFO. */
2996 copy_reference (vn_reference_t oref, vn_tables_t info)
3000 ref = (vn_reference_t) pool_alloc (info->references_pool);
3001 memcpy (ref, oref, sizeof (*ref));
3002 oref->operands = NULL;
3003 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3006 free_reference (*slot);
3010 /* Process a strongly connected component in the SSA graph. */
3013 process_scc (VEC (tree, heap) *scc)
3017 unsigned int iterations = 0;
3018 bool changed = true;
3024 /* If the SCC has a single member, just visit it. */
3025 if (VEC_length (tree, scc) == 1)
3027 tree use = VEC_index (tree, scc, 0);
3028 if (!VN_INFO (use)->use_processed)
3033 /* Iterate over the SCC with the optimistic table until it stops
3035 current_info = optimistic_info;
3040 /* As we are value-numbering optimistically we have to
3041 clear the expression tables and the simplified expressions
3042 in each iteration until we converge. */
3043 htab_empty (optimistic_info->nary);
3044 htab_empty (optimistic_info->phis);
3045 htab_empty (optimistic_info->references);
3046 obstack_free (&optimistic_info->nary_obstack, NULL);
3047 gcc_obstack_init (&optimistic_info->nary_obstack);
3048 empty_alloc_pool (optimistic_info->phis_pool);
3049 empty_alloc_pool (optimistic_info->references_pool);
3050 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3051 VN_INFO (var)->expr = NULL_TREE;
3052 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3053 changed |= visit_use (var);
3056 statistics_histogram_event (cfun, "SCC iterations", iterations);
3058 /* Finally, copy the contents of the no longer used optimistic
3059 table to the valid table. */
3060 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3061 copy_nary (nary, valid_info);
3062 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3063 copy_phi (phi, valid_info);
3064 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3065 copy_reference (ref, valid_info);
3067 current_info = valid_info;
3070 DEF_VEC_O(ssa_op_iter);
3071 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3073 /* Pop the components of the found SCC for NAME off the SCC stack
3074 and process them. Returns true if all went well, false if
3075 we run into resource limits. */
3078 extract_and_process_scc_for_name (tree name)
3080 VEC (tree, heap) *scc = NULL;
3083 /* Found an SCC, pop the components off the SCC stack and
3087 x = VEC_pop (tree, sccstack);
3089 VN_INFO (x)->on_sccstack = false;
3090 VEC_safe_push (tree, heap, scc, x);
3091 } while (x != name);
3093 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3094 if (VEC_length (tree, scc)
3095 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3098 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3099 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3100 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3104 if (VEC_length (tree, scc) > 1)
3107 if (dump_file && (dump_flags & TDF_DETAILS))
3108 print_scc (dump_file, scc);
3112 VEC_free (tree, heap, scc);
3117 /* Depth first search on NAME to discover and process SCC's in the SSA
3119 Execution of this algorithm relies on the fact that the SCC's are
3120 popped off the stack in topological order.
3121 Returns true if successful, false if we stopped processing SCC's due
3122 to resource constraints. */
3127 VEC(ssa_op_iter, heap) *itervec = NULL;
3128 VEC(tree, heap) *namevec = NULL;
3129 use_operand_p usep = NULL;
3136 VN_INFO (name)->dfsnum = next_dfs_num++;
3137 VN_INFO (name)->visited = true;
3138 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3140 VEC_safe_push (tree, heap, sccstack, name);
3141 VN_INFO (name)->on_sccstack = true;
3142 defstmt = SSA_NAME_DEF_STMT (name);
3144 /* Recursively DFS on our operands, looking for SCC's. */
3145 if (!gimple_nop_p (defstmt))
3147 /* Push a new iterator. */
3148 if (gimple_code (defstmt) == GIMPLE_PHI)
3149 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3151 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3154 clear_and_done_ssa_iter (&iter);
3158 /* If we are done processing uses of a name, go up the stack
3159 of iterators and process SCCs as we found them. */
3160 if (op_iter_done (&iter))
3162 /* See if we found an SCC. */
3163 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3164 if (!extract_and_process_scc_for_name (name))
3166 VEC_free (tree, heap, namevec);
3167 VEC_free (ssa_op_iter, heap, itervec);
3171 /* Check if we are done. */
3172 if (VEC_empty (tree, namevec))
3174 VEC_free (tree, heap, namevec);
3175 VEC_free (ssa_op_iter, heap, itervec);
3179 /* Restore the last use walker and continue walking there. */
3181 name = VEC_pop (tree, namevec);
3182 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3183 sizeof (ssa_op_iter));
3184 VEC_pop (ssa_op_iter, itervec);
3185 goto continue_walking;
3188 use = USE_FROM_PTR (usep);
3190 /* Since we handle phi nodes, we will sometimes get
3191 invariants in the use expression. */
3192 if (TREE_CODE (use) == SSA_NAME)
3194 if (! (VN_INFO (use)->visited))
3196 /* Recurse by pushing the current use walking state on
3197 the stack and starting over. */
3198 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3199 VEC_safe_push(tree, heap, namevec, name);
3204 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3205 VN_INFO (use)->low);
3207 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3208 && VN_INFO (use)->on_sccstack)
3210 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3211 VN_INFO (name)->low);
3215 usep = op_iter_next_use (&iter);
3219 /* Allocate a value number table. */
3222 allocate_vn_table (vn_tables_t table)
3224 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3225 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3226 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3229 gcc_obstack_init (&table->nary_obstack);
3230 table->phis_pool = create_alloc_pool ("VN phis",
3231 sizeof (struct vn_phi_s),
3233 table->references_pool = create_alloc_pool ("VN references",
3234 sizeof (struct vn_reference_s),
3238 /* Free a value number table. */
3241 free_vn_table (vn_tables_t table)
3243 htab_delete (table->phis);
3244 htab_delete (table->nary);
3245 htab_delete (table->references);
3246 obstack_free (&table->nary_obstack, NULL);
3247 free_alloc_pool (table->phis_pool);
3248 free_alloc_pool (table->references_pool);
3256 int *rpo_numbers_temp;
3258 calculate_dominance_info (CDI_DOMINATORS);
3260 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3263 constant_value_ids = BITMAP_ALLOC (NULL);
3268 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3269 /* VEC_alloc doesn't actually grow it to the right size, it just
3270 preallocates the space to do so. */
3271 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3272 gcc_obstack_init (&vn_ssa_aux_obstack);
3274 shared_lookup_phiargs = NULL;
3275 shared_lookup_references = NULL;
3276 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3277 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3278 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3280 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3281 the i'th block in RPO order is bb. We want to map bb's to RPO
3282 numbers, so we need to rearrange this array. */
3283 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3284 rpo_numbers[rpo_numbers_temp[j]] = j;
3286 XDELETE (rpo_numbers_temp);
3288 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3290 /* Create the VN_INFO structures, and initialize value numbers to
3292 for (i = 0; i < num_ssa_names; i++)
3294 tree name = ssa_name (i);
3297 VN_INFO_GET (name)->valnum = VN_TOP;
3298 VN_INFO (name)->expr = NULL_TREE;
3299 VN_INFO (name)->value_id = 0;
3303 renumber_gimple_stmt_uids ();
3305 /* Create the valid and optimistic value numbering tables. */
3306 valid_info = XCNEW (struct vn_tables_s);
3307 allocate_vn_table (valid_info);
3308 optimistic_info = XCNEW (struct vn_tables_s);
3309 allocate_vn_table (optimistic_info);
3317 htab_delete (constant_to_value_id);
3318 BITMAP_FREE (constant_value_ids);
3319 VEC_free (tree, heap, shared_lookup_phiargs);
3320 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3321 XDELETEVEC (rpo_numbers);
3323 for (i = 0; i < num_ssa_names; i++)
3325 tree name = ssa_name (i);
3327 && VN_INFO (name)->needs_insertion)
3328 release_ssa_name (name);
3330 obstack_free (&vn_ssa_aux_obstack, NULL);
3331 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3333 VEC_free (tree, heap, sccstack);
3334 free_vn_table (valid_info);
3335 XDELETE (valid_info);
3336 free_vn_table (optimistic_info);
3337 XDELETE (optimistic_info);
3340 /* Set the value ids in the valid hash tables. */
3343 set_hashtable_value_ids (void)
3350 /* Now set the value ids of the things we had put in the hash
3353 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3354 vno, vn_nary_op_t, hi)
3358 if (TREE_CODE (vno->result) == SSA_NAME)
3359 vno->value_id = VN_INFO (vno->result)->value_id;
3360 else if (is_gimple_min_invariant (vno->result))
3361 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3365 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3370 if (TREE_CODE (vp->result) == SSA_NAME)
3371 vp->value_id = VN_INFO (vp->result)->value_id;
3372 else if (is_gimple_min_invariant (vp->result))
3373 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3377 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3378 vr, vn_reference_t, hi)
3382 if (TREE_CODE (vr->result) == SSA_NAME)
3383 vr->value_id = VN_INFO (vr->result)->value_id;
3384 else if (is_gimple_min_invariant (vr->result))
3385 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3390 /* Do SCCVN. Returns true if it finished, false if we bailed out
3391 due to resource constraints. */
3398 bool changed = true;
3401 current_info = valid_info;
3403 for (param = DECL_ARGUMENTS (current_function_decl);
3405 param = TREE_CHAIN (param))
3407 if (gimple_default_def (cfun, param) != NULL)
3409 tree def = gimple_default_def (cfun, param);
3410 VN_INFO (def)->valnum = def;
3414 for (i = 1; i < num_ssa_names; ++i)
3416 tree name = ssa_name (i);
3418 && VN_INFO (name)->visited == false
3419 && !has_zero_uses (name))
3427 /* Initialize the value ids. */
3429 for (i = 1; i < num_ssa_names; ++i)
3431 tree name = ssa_name (i);
3435 info = VN_INFO (name);
3436 if (info->valnum == name
3437 || info->valnum == VN_TOP)
3438 info->value_id = get_next_value_id ();
3439 else if (is_gimple_min_invariant (info->valnum))
3440 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3443 /* Propagate until they stop changing. */
3447 for (i = 1; i < num_ssa_names; ++i)
3449 tree name = ssa_name (i);
3453 info = VN_INFO (name);
3454 if (TREE_CODE (info->valnum) == SSA_NAME
3455 && info->valnum != name
3456 && info->value_id != VN_INFO (info->valnum)->value_id)
3459 info->value_id = VN_INFO (info->valnum)->value_id;
3464 set_hashtable_value_ids ();
3466 if (dump_file && (dump_flags & TDF_DETAILS))
3468 fprintf (dump_file, "Value numbers:\n");
3469 for (i = 0; i < num_ssa_names; i++)
3471 tree name = ssa_name (i);
3473 && VN_INFO (name)->visited
3474 && SSA_VAL (name) != name)
3476 print_generic_expr (dump_file, name, 0);
3477 fprintf (dump_file, " = ");
3478 print_generic_expr (dump_file, SSA_VAL (name), 0);
3479 fprintf (dump_file, "\n");
3487 /* Return the maximum value id we have ever seen. */
3490 get_max_value_id (void)
3492 return next_value_id;
3495 /* Return the next unique value id. */
3498 get_next_value_id (void)
3500 return next_value_id++;
3504 /* Compare two expressions E1 and E2 and return true if they are equal. */
3507 expressions_equal_p (tree e1, tree e2)
3509 /* The obvious case. */
3513 /* If only one of them is null, they cannot be equal. */
3517 /* Now perform the actual comparison. */
3518 if (TREE_CODE (e1) == TREE_CODE (e2)
3519 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3526 /* Return true if the nary operation NARY may trap. This is a copy
3527 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3530 vn_nary_may_trap (vn_nary_op_t nary)
3533 tree rhs2 = NULL_TREE;
3534 bool honor_nans = false;
3535 bool honor_snans = false;
3536 bool fp_operation = false;
3537 bool honor_trapv = false;
3541 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3542 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3543 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3546 fp_operation = FLOAT_TYPE_P (type);
3549 honor_nans = flag_trapping_math && !flag_finite_math_only;
3550 honor_snans = flag_signaling_nans != 0;
3552 else if (INTEGRAL_TYPE_P (type)
3553 && TYPE_OVERFLOW_TRAPS (type))
3556 if (nary->length >= 2)
3558 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3560 honor_nans, honor_snans, rhs2,
3566 for (i = 0; i < nary->length; ++i)
3567 if (tree_could_trap_p (nary->op[i]))