1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
182 /* Set the value numbering info for a given SSA name to a given
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
219 tree expr = NULL_TREE;
221 if (vn->valnum == VN_TOP)
224 /* If the value-number is a constant it is the representative
226 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 /* Get to the information of the value of this SSA_NAME. */
230 vn = VN_INFO (vn->valnum);
232 /* If the value-number is a constant it is the representative
234 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 /* Else if we have an expression, return it. */
238 if (vn->expr != NULL_TREE)
241 /* Otherwise use the defining statement to build the expression. */
242 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244 /* If the value number is a default-definition or a PHI result
246 if (gimple_nop_p (def_stmt)
247 || gimple_code (def_stmt) == GIMPLE_PHI)
250 if (!is_gimple_assign (def_stmt))
253 /* FIXME tuples. This is incomplete and likely will miss some
255 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
258 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
259 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
260 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
261 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
262 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
263 gimple_expr_type (def_stmt),
264 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
268 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
269 gimple_expr_type (def_stmt),
270 gimple_assign_rhs1 (def_stmt));
274 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
275 gimple_expr_type (def_stmt),
276 gimple_assign_rhs1 (def_stmt),
277 gimple_assign_rhs2 (def_stmt));
282 if (expr == NULL_TREE)
285 /* Cache the expression. */
292 /* Free a phi operation structure VP. */
297 vn_phi_t phi = (vn_phi_t) vp;
298 VEC_free (tree, heap, phi->phiargs);
301 /* Free a reference operation structure VP. */
304 free_reference (void *vp)
306 vn_reference_t vr = (vn_reference_t) vp;
307 VEC_free (vn_reference_op_s, heap, vr->operands);
310 /* Hash table equality function for vn_constant_t. */
313 vn_constant_eq (const void *p1, const void *p2)
315 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
316 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318 if (vc1->hashcode != vc2->hashcode)
321 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 /* Hash table hash function for vn_constant_t. */
327 vn_constant_hash (const void *p1)
329 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
330 return vc1->hashcode;
333 /* Lookup a value id for CONSTANT and return it. If it does not
337 get_constant_value_id (tree constant)
340 struct vn_constant_s vc;
342 vc.hashcode = vn_hash_constant_with_type (constant);
343 vc.constant = constant;
344 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
345 vc.hashcode, NO_INSERT);
347 return ((vn_constant_t)*slot)->value_id;
351 /* Lookup a value id for CONSTANT, and if it does not exist, create a
352 new one and return it. If it does exist, return it. */
355 get_or_alloc_constant_value_id (tree constant)
358 struct vn_constant_s vc;
361 vc.hashcode = vn_hash_constant_with_type (constant);
362 vc.constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
364 vc.hashcode, INSERT);
366 return ((vn_constant_t)*slot)->value_id;
368 vcp = XNEW (struct vn_constant_s);
369 vcp->hashcode = vc.hashcode;
370 vcp->constant = constant;
371 vcp->value_id = get_next_value_id ();
372 *slot = (void *) vcp;
373 bitmap_set_bit (constant_value_ids, vcp->value_id);
374 return vcp->value_id;
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return (vro1->opcode == vro2->opcode
395 /* We do not care for differences in type qualification. */
396 && (vro1->type == vro2->type
397 || (vro1->type && vro2->type
398 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
399 TYPE_MAIN_VARIANT (vro2->type))))
400 && expressions_equal_p (vro1->op0, vro2->op0)
401 && expressions_equal_p (vro1->op1, vro2->op1)
402 && expressions_equal_p (vro1->op2, vro2->op2));
405 /* Compute the hash for a reference operand VRO1. */
408 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
410 result = iterative_hash_hashval_t (vro1->opcode, result);
412 result = iterative_hash_expr (vro1->op0, result);
414 result = iterative_hash_expr (vro1->op1, result);
416 result = iterative_hash_expr (vro1->op2, result);
420 /* Return the hashcode for a given reference operation P1. */
423 vn_reference_hash (const void *p1)
425 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
426 return vr1->hashcode;
429 /* Compute a hash for the reference operation VR1 and return it. */
432 vn_reference_compute_hash (const vn_reference_t vr1)
434 hashval_t result = 0;
436 vn_reference_op_t vro;
437 HOST_WIDE_INT off = -1;
440 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
442 if (vro->opcode == MEM_REF)
444 else if (vro->opcode != ADDR_EXPR)
456 result = iterative_hash_hashval_t (off, result);
459 && vro->opcode == ADDR_EXPR)
463 tree op = TREE_OPERAND (vro->op0, 0);
464 result = iterative_hash_hashval_t (TREE_CODE (op), result);
465 result = iterative_hash_expr (op, result);
469 result = vn_reference_op_compute_hash (vro, result);
473 result += SSA_NAME_VERSION (vr1->vuse);
478 /* Return true if reference operations P1 and P2 are equivalent. This
479 means they have the same set of operands and vuses. */
482 vn_reference_eq (const void *p1, const void *p2)
486 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
487 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
488 if (vr1->hashcode != vr2->hashcode)
491 /* Early out if this is not a hash collision. */
492 if (vr1->hashcode != vr2->hashcode)
495 /* The VOP needs to be the same. */
496 if (vr1->vuse != vr2->vuse)
499 /* If the operands are the same we are done. */
500 if (vr1->operands == vr2->operands)
503 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
506 if (INTEGRAL_TYPE_P (vr1->type)
507 && INTEGRAL_TYPE_P (vr2->type))
509 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
512 else if (INTEGRAL_TYPE_P (vr1->type)
513 && (TYPE_PRECISION (vr1->type)
514 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
516 else if (INTEGRAL_TYPE_P (vr2->type)
517 && (TYPE_PRECISION (vr2->type)
518 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
525 HOST_WIDE_INT off1 = 0, off2 = 0;
526 vn_reference_op_t vro1, vro2;
527 vn_reference_op_s tem1, tem2;
528 bool deref1 = false, deref2 = false;
529 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
531 if (vro1->opcode == MEM_REF)
537 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
539 if (vro2->opcode == MEM_REF)
547 if (deref1 && vro1->opcode == ADDR_EXPR)
549 memset (&tem1, 0, sizeof (tem1));
550 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
551 tem1.type = TREE_TYPE (tem1.op0);
552 tem1.opcode = TREE_CODE (tem1.op0);
555 if (deref2 && vro2->opcode == ADDR_EXPR)
557 memset (&tem2, 0, sizeof (tem2));
558 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
559 tem2.type = TREE_TYPE (tem2.op0);
560 tem2.opcode = TREE_CODE (tem2.op0);
563 if (!vn_reference_op_eq (vro1, vro2))
568 while (VEC_length (vn_reference_op_s, vr1->operands) != i
569 || VEC_length (vn_reference_op_s, vr2->operands) != j);
574 /* Copy the operations present in load/store REF into RESULT, a vector of
575 vn_reference_op_s's. */
578 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
580 if (TREE_CODE (ref) == TARGET_MEM_REF)
582 vn_reference_op_s temp;
584 memset (&temp, 0, sizeof (temp));
585 temp.type = TREE_TYPE (ref);
586 temp.opcode = TREE_CODE (ref);
587 temp.op0 = TMR_INDEX (ref);
588 temp.op1 = TMR_STEP (ref);
589 temp.op2 = TMR_OFFSET (ref);
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
593 memset (&temp, 0, sizeof (temp));
594 temp.type = NULL_TREE;
595 temp.opcode = ERROR_MARK;
596 temp.op0 = TMR_INDEX2 (ref);
598 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
600 memset (&temp, 0, sizeof (temp));
601 temp.type = NULL_TREE;
602 temp.opcode = TREE_CODE (TMR_BASE (ref));
603 temp.op0 = TMR_BASE (ref);
605 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 /* For non-calls, store the information that makes up the address. */
613 vn_reference_op_s temp;
615 memset (&temp, 0, sizeof (temp));
616 temp.type = TREE_TYPE (ref);
617 temp.opcode = TREE_CODE (ref);
623 /* The base address gets its own vn_reference_op_s structure. */
624 temp.op0 = TREE_OPERAND (ref, 1);
625 if (host_integerp (TREE_OPERAND (ref, 1), 0))
626 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
629 /* Record bits and position. */
630 temp.op0 = TREE_OPERAND (ref, 1);
631 temp.op1 = TREE_OPERAND (ref, 2);
634 /* The field decl is enough to unambiguously specify the field,
635 a matching type is not necessary and a mismatching type
636 is always a spurious difference. */
637 temp.type = NULL_TREE;
638 temp.op0 = TREE_OPERAND (ref, 1);
639 temp.op1 = TREE_OPERAND (ref, 2);
641 tree this_offset = component_ref_field_offset (ref);
643 && TREE_CODE (this_offset) == INTEGER_CST)
645 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
646 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
649 = double_int_add (tree_to_double_int (this_offset),
651 (tree_to_double_int (bit_offset),
653 ? 3 : exact_log2 (BITS_PER_UNIT),
654 HOST_BITS_PER_DOUBLE_INT, true));
655 if (double_int_fits_in_shwi_p (off))
661 case ARRAY_RANGE_REF:
663 /* Record index as operand. */
664 temp.op0 = TREE_OPERAND (ref, 1);
665 /* Always record lower bounds and element size. */
666 temp.op1 = array_ref_low_bound (ref);
667 temp.op2 = array_ref_element_size (ref);
668 if (TREE_CODE (temp.op0) == INTEGER_CST
669 && TREE_CODE (temp.op1) == INTEGER_CST
670 && TREE_CODE (temp.op2) == INTEGER_CST)
672 double_int off = tree_to_double_int (temp.op0);
673 off = double_int_add (off,
675 (tree_to_double_int (temp.op1)));
676 off = double_int_mul (off, tree_to_double_int (temp.op2));
677 if (double_int_fits_in_shwi_p (off))
682 if (DECL_HARD_REGISTER (ref))
691 /* Canonicalize decls to MEM[&decl] which is what we end up with
692 when valueizing MEM[ptr] with ptr = &decl. */
693 temp.opcode = MEM_REF;
694 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
696 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
697 temp.opcode = ADDR_EXPR;
698 temp.op0 = build_fold_addr_expr (ref);
699 temp.type = TREE_TYPE (temp.op0);
712 if (is_gimple_min_invariant (ref))
718 /* These are only interesting for their operands, their
719 existence, and their type. They will never be the last
720 ref in the chain of references (IE they require an
721 operand), so we don't have to put anything
722 for op* as it will be handled by the iteration */
724 case VIEW_CONVERT_EXPR:
728 /* This is only interesting for its constant offset. */
729 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
734 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
736 if (REFERENCE_CLASS_P (ref)
737 || (TREE_CODE (ref) == ADDR_EXPR
738 && !is_gimple_min_invariant (ref)))
739 ref = TREE_OPERAND (ref, 0);
745 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
746 operands in *OPS, the reference alias set SET and the reference type TYPE.
747 Return true if something useful was produced. */
750 ao_ref_init_from_vn_reference (ao_ref *ref,
751 alias_set_type set, tree type,
752 VEC (vn_reference_op_s, heap) *ops)
754 vn_reference_op_t op;
756 tree base = NULL_TREE;
758 HOST_WIDE_INT offset = 0;
759 HOST_WIDE_INT max_size;
760 HOST_WIDE_INT size = -1;
761 tree size_tree = NULL_TREE;
762 alias_set_type base_alias_set = -1;
764 /* First get the final access size from just the outermost expression. */
765 op = VEC_index (vn_reference_op_s, ops, 0);
766 if (op->opcode == COMPONENT_REF)
767 size_tree = DECL_SIZE (op->op0);
768 else if (op->opcode == BIT_FIELD_REF)
772 enum machine_mode mode = TYPE_MODE (type);
774 size_tree = TYPE_SIZE (type);
776 size = GET_MODE_BITSIZE (mode);
778 if (size_tree != NULL_TREE)
780 if (!host_integerp (size_tree, 1))
783 size = TREE_INT_CST_LOW (size_tree);
786 /* Initially, maxsize is the same as the accessed element size.
787 In the following it will only grow (or become -1). */
790 /* Compute cumulative bit-offset for nested component-refs and array-refs,
791 and find the ultimate containing object. */
792 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
796 /* These may be in the reference ops, but we cannot do anything
797 sensible with them here. */
799 /* Apart from ADDR_EXPR arguments to MEM_REF. */
800 if (base != NULL_TREE
801 && TREE_CODE (base) == MEM_REF
803 && DECL_P (TREE_OPERAND (op->op0, 0)))
805 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
806 base = TREE_OPERAND (op->op0, 0);
813 offset += pop->off * BITS_PER_UNIT;
821 /* Record the base objects. */
823 base_alias_set = get_deref_alias_set (op->op0);
824 *op0_p = build2 (MEM_REF, op->type,
826 op0_p = &TREE_OPERAND (*op0_p, 0);
837 /* And now the usual component-reference style ops. */
839 offset += tree_low_cst (op->op1, 0);
844 tree field = op->op0;
845 /* We do not have a complete COMPONENT_REF tree here so we
846 cannot use component_ref_field_offset. Do the interesting
850 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
854 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
856 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
861 case ARRAY_RANGE_REF:
863 /* We recorded the lower bound and the element size. */
864 if (!host_integerp (op->op0, 0)
865 || !host_integerp (op->op1, 0)
866 || !host_integerp (op->op2, 0))
870 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
871 hindex -= TREE_INT_CST_LOW (op->op1);
872 hindex *= TREE_INT_CST_LOW (op->op2);
873 hindex *= BITS_PER_UNIT;
885 case VIEW_CONVERT_EXPR:
902 if (base == NULL_TREE)
905 ref->ref = NULL_TREE;
907 ref->offset = offset;
909 ref->max_size = max_size;
910 ref->ref_alias_set = set;
911 if (base_alias_set != -1)
912 ref->base_alias_set = base_alias_set;
914 ref->base_alias_set = get_alias_set (base);
919 /* Copy the operations present in load/store/call REF into RESULT, a vector of
920 vn_reference_op_s's. */
923 copy_reference_ops_from_call (gimple call,
924 VEC(vn_reference_op_s, heap) **result)
926 vn_reference_op_s temp;
929 /* Copy the type, opcode, function being called and static chain. */
930 memset (&temp, 0, sizeof (temp));
931 temp.type = gimple_call_return_type (call);
932 temp.opcode = CALL_EXPR;
933 temp.op0 = gimple_call_fn (call);
934 temp.op1 = gimple_call_chain (call);
936 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
938 /* Copy the call arguments. As they can be references as well,
939 just chain them together. */
940 for (i = 0; i < gimple_call_num_args (call); ++i)
942 tree callarg = gimple_call_arg (call, i);
943 copy_reference_ops_from_ref (callarg, result);
947 /* Create a vector of vn_reference_op_s structures from REF, a
948 REFERENCE_CLASS_P tree. The vector is not shared. */
950 static VEC(vn_reference_op_s, heap) *
951 create_reference_ops_from_ref (tree ref)
953 VEC (vn_reference_op_s, heap) *result = NULL;
955 copy_reference_ops_from_ref (ref, &result);
959 /* Create a vector of vn_reference_op_s structures from CALL, a
960 call statement. The vector is not shared. */
962 static VEC(vn_reference_op_s, heap) *
963 create_reference_ops_from_call (gimple call)
965 VEC (vn_reference_op_s, heap) *result = NULL;
967 copy_reference_ops_from_call (call, &result);
971 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
972 *I_P to point to the last element of the replacement. */
974 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
977 unsigned int i = *i_p;
978 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
979 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
981 HOST_WIDE_INT addr_offset;
983 /* The only thing we have to do is from &OBJ.foo.bar add the offset
984 from .foo.bar to the preceeding MEM_REF offset and replace the
985 address with &OBJ. */
986 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
988 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
989 if (addr_base != op->op0)
991 double_int off = tree_to_double_int (mem_op->op0);
992 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
993 off = double_int_add (off, shwi_to_double_int (addr_offset));
994 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
995 op->op0 = build_fold_addr_expr (addr_base);
996 if (host_integerp (mem_op->op0, 0))
997 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1003 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1004 *I_P to point to the last element of the replacement. */
1006 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1009 unsigned int i = *i_p;
1010 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1011 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1013 enum tree_code code;
1016 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1017 if (!is_gimple_assign (def_stmt))
1020 code = gimple_assign_rhs_code (def_stmt);
1021 if (code != ADDR_EXPR
1022 && code != POINTER_PLUS_EXPR)
1025 off = tree_to_double_int (mem_op->op0);
1026 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1028 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1029 from .foo.bar to the preceeding MEM_REF offset and replace the
1030 address with &OBJ. */
1031 if (code == ADDR_EXPR)
1033 tree addr, addr_base;
1034 HOST_WIDE_INT addr_offset;
1036 addr = gimple_assign_rhs1 (def_stmt);
1037 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1040 || TREE_CODE (addr_base) != MEM_REF)
1043 off = double_int_add (off, shwi_to_double_int (addr_offset));
1044 off = double_int_add (off, mem_ref_offset (addr_base));
1045 op->op0 = TREE_OPERAND (addr_base, 0);
1050 ptr = gimple_assign_rhs1 (def_stmt);
1051 ptroff = gimple_assign_rhs2 (def_stmt);
1052 if (TREE_CODE (ptr) != SSA_NAME
1053 || TREE_CODE (ptroff) != INTEGER_CST)
1056 off = double_int_add (off, tree_to_double_int (ptroff));
1060 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1061 if (host_integerp (mem_op->op0, 0))
1062 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1065 if (TREE_CODE (op->op0) == SSA_NAME)
1066 op->op0 = SSA_VAL (op->op0);
1067 if (TREE_CODE (op->op0) != SSA_NAME)
1068 op->opcode = TREE_CODE (op->op0);
1071 if (TREE_CODE (op->op0) == SSA_NAME)
1072 vn_reference_maybe_forwprop_address (ops, i_p);
1073 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1074 vn_reference_fold_indirect (ops, i_p);
1077 /* Optimize the reference REF to a constant if possible or return
1078 NULL_TREE if not. */
1081 fully_constant_vn_reference_p (vn_reference_t ref)
1083 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1084 vn_reference_op_t op;
1086 /* Try to simplify the translated expression if it is
1087 a call to a builtin function with at most two arguments. */
1088 op = VEC_index (vn_reference_op_s, operands, 0);
1089 if (op->opcode == CALL_EXPR
1090 && TREE_CODE (op->op0) == ADDR_EXPR
1091 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1092 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1093 && VEC_length (vn_reference_op_s, operands) >= 2
1094 && VEC_length (vn_reference_op_s, operands) <= 3)
1096 vn_reference_op_t arg0, arg1 = NULL;
1097 bool anyconst = false;
1098 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1099 if (VEC_length (vn_reference_op_s, operands) > 2)
1100 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1101 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1102 || (arg0->opcode == ADDR_EXPR
1103 && is_gimple_min_invariant (arg0->op0)))
1106 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1107 || (arg1->opcode == ADDR_EXPR
1108 && is_gimple_min_invariant (arg1->op0))))
1112 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1115 arg1 ? arg1->op0 : NULL);
1117 && TREE_CODE (folded) == NOP_EXPR)
1118 folded = TREE_OPERAND (folded, 0);
1120 && is_gimple_min_invariant (folded))
1125 /* Simplify reads from constant strings. */
1126 else if (op->opcode == ARRAY_REF
1127 && TREE_CODE (op->op0) == INTEGER_CST
1128 && integer_zerop (op->op1)
1129 && VEC_length (vn_reference_op_s, operands) == 2)
1131 vn_reference_op_t arg0;
1132 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1133 if (arg0->opcode == STRING_CST
1134 && (TYPE_MODE (op->type)
1135 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1136 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1137 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1138 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1139 return build_int_cst_type (op->type,
1140 (TREE_STRING_POINTER (arg0->op0)
1141 [TREE_INT_CST_LOW (op->op0)]));
1147 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1148 structures into their value numbers. This is done in-place, and
1149 the vector passed in is returned. */
1151 static VEC (vn_reference_op_s, heap) *
1152 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1154 vn_reference_op_t vro;
1157 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1159 if (vro->opcode == SSA_NAME
1160 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1162 vro->op0 = SSA_VAL (vro->op0);
1163 /* If it transforms from an SSA_NAME to a constant, update
1165 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1166 vro->opcode = TREE_CODE (vro->op0);
1168 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1169 vro->op1 = SSA_VAL (vro->op1);
1170 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1171 vro->op2 = SSA_VAL (vro->op2);
1172 /* If it transforms from an SSA_NAME to an address, fold with
1173 a preceding indirect reference. */
1176 && TREE_CODE (vro->op0) == ADDR_EXPR
1177 && VEC_index (vn_reference_op_s,
1178 orig, i - 1)->opcode == MEM_REF)
1179 vn_reference_fold_indirect (&orig, &i);
1181 && vro->opcode == SSA_NAME
1182 && VEC_index (vn_reference_op_s,
1183 orig, i - 1)->opcode == MEM_REF)
1184 vn_reference_maybe_forwprop_address (&orig, &i);
1185 /* If it transforms a non-constant ARRAY_REF into a constant
1186 one, adjust the constant offset. */
1187 else if (vro->opcode == ARRAY_REF
1189 && TREE_CODE (vro->op0) == INTEGER_CST
1190 && TREE_CODE (vro->op1) == INTEGER_CST
1191 && TREE_CODE (vro->op2) == INTEGER_CST)
1193 double_int off = tree_to_double_int (vro->op0);
1194 off = double_int_add (off,
1196 (tree_to_double_int (vro->op1)));
1197 off = double_int_mul (off, tree_to_double_int (vro->op2));
1198 if (double_int_fits_in_shwi_p (off))
1206 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1208 /* Create a vector of vn_reference_op_s structures from REF, a
1209 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1212 static VEC(vn_reference_op_s, heap) *
1213 valueize_shared_reference_ops_from_ref (tree ref)
1217 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1218 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1219 shared_lookup_references = valueize_refs (shared_lookup_references);
1220 return shared_lookup_references;
1223 /* Create a vector of vn_reference_op_s structures from CALL, a
1224 call statement. The vector is shared among all callers of
1227 static VEC(vn_reference_op_s, heap) *
1228 valueize_shared_reference_ops_from_call (gimple call)
1232 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1233 copy_reference_ops_from_call (call, &shared_lookup_references);
1234 shared_lookup_references = valueize_refs (shared_lookup_references);
1235 return shared_lookup_references;
1238 /* Lookup a SCCVN reference operation VR in the current hash table.
1239 Returns the resulting value number if it exists in the hash table,
1240 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1241 vn_reference_t stored in the hashtable if something is found. */
1244 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1249 hash = vr->hashcode;
1250 slot = htab_find_slot_with_hash (current_info->references, vr,
1252 if (!slot && current_info == optimistic_info)
1253 slot = htab_find_slot_with_hash (valid_info->references, vr,
1258 *vnresult = (vn_reference_t)*slot;
1259 return ((vn_reference_t)*slot)->result;
1265 static tree *last_vuse_ptr;
1266 static vn_lookup_kind vn_walk_kind;
1267 static vn_lookup_kind default_vn_walk_kind;
1269 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1270 with the current VUSE and performs the expression lookup. */
1273 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1275 vn_reference_t vr = (vn_reference_t)vr_;
1280 *last_vuse_ptr = vuse;
1282 /* Fixup vuse and hash. */
1284 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1285 vr->vuse = SSA_VAL (vuse);
1287 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1289 hash = vr->hashcode;
1290 slot = htab_find_slot_with_hash (current_info->references, vr,
1292 if (!slot && current_info == optimistic_info)
1293 slot = htab_find_slot_with_hash (valid_info->references, vr,
1301 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1302 from the statement defining VUSE and if not successful tries to
1303 translate *REFP and VR_ through an aggregate copy at the defintion
1307 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1309 vn_reference_t vr = (vn_reference_t)vr_;
1310 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1312 HOST_WIDE_INT offset, maxsize;
1313 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1315 bool lhs_ref_ok = false;
1317 /* First try to disambiguate after value-replacing in the definitions LHS. */
1318 if (is_gimple_assign (def_stmt))
1320 VEC (vn_reference_op_s, heap) *tem;
1321 tree lhs = gimple_assign_lhs (def_stmt);
1322 /* Avoid re-allocation overhead. */
1323 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1324 copy_reference_ops_from_ref (lhs, &lhs_ops);
1326 lhs_ops = valueize_refs (lhs_ops);
1327 gcc_assert (lhs_ops == tem);
1328 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1329 TREE_TYPE (lhs), lhs_ops);
1331 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1335 base = ao_ref_base (ref);
1336 offset = ref->offset;
1337 maxsize = ref->max_size;
1339 /* If we cannot constrain the size of the reference we cannot
1340 test if anything kills it. */
1344 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1345 from that defintion.
1347 if (is_gimple_reg_type (vr->type)
1348 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1349 && integer_zerop (gimple_call_arg (def_stmt, 1))
1350 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1351 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1353 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1355 HOST_WIDE_INT offset2, size2, maxsize2;
1356 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1357 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1358 if ((unsigned HOST_WIDE_INT)size2 / 8
1359 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1361 && operand_equal_p (base, base2, 0)
1362 && offset2 <= offset
1363 && offset2 + size2 >= offset + maxsize)
1365 tree val = build_zero_cst (vr->type);
1366 unsigned int value_id = get_or_alloc_constant_value_id (val);
1367 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1368 VEC_copy (vn_reference_op_s,
1369 heap, vr->operands),
1374 /* 2) Assignment from an empty CONSTRUCTOR. */
1375 else if (is_gimple_reg_type (vr->type)
1376 && gimple_assign_single_p (def_stmt)
1377 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1378 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1381 HOST_WIDE_INT offset2, size2, maxsize2;
1382 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1383 &offset2, &size2, &maxsize2);
1385 && operand_equal_p (base, base2, 0)
1386 && offset2 <= offset
1387 && offset2 + size2 >= offset + maxsize)
1389 tree val = build_zero_cst (vr->type);
1390 unsigned int value_id = get_or_alloc_constant_value_id (val);
1391 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1392 VEC_copy (vn_reference_op_s,
1393 heap, vr->operands),
1398 /* 3) For aggregate copies translate the reference through them if
1399 the copy kills ref. */
1400 else if (vn_walk_kind == VN_WALKREWRITE
1401 && gimple_assign_single_p (def_stmt)
1402 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1403 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1404 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1407 HOST_WIDE_INT offset2, size2, maxsize2;
1409 VEC (vn_reference_op_s, heap) *rhs = NULL;
1410 vn_reference_op_t vro;
1416 /* See if the assignment kills REF. */
1417 base2 = ao_ref_base (&lhs_ref);
1418 offset2 = lhs_ref.offset;
1419 size2 = lhs_ref.size;
1420 maxsize2 = lhs_ref.max_size;
1422 || (base != base2 && !operand_equal_p (base, base2, 0))
1424 || offset2 + size2 < offset + maxsize)
1427 /* Find the common base of ref and the lhs. lhs_ops already
1428 contains valueized operands for the lhs. */
1429 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1430 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1431 while (j >= 0 && i >= 0
1432 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1434 VEC_index (vn_reference_op_s, lhs_ops, j)))
1440 /* i now points to the first additional op.
1441 ??? LHS may not be completely contained in VR, one or more
1442 VIEW_CONVERT_EXPRs could be in its way. We could at least
1443 try handling outermost VIEW_CONVERT_EXPRs. */
1447 /* Now re-write REF to be based on the rhs of the assignment. */
1448 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1449 /* We need to pre-pend vr->operands[0..i] to rhs. */
1450 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1451 > VEC_length (vn_reference_op_s, vr->operands))
1453 VEC (vn_reference_op_s, heap) *old = vr->operands;
1454 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1455 i + 1 + VEC_length (vn_reference_op_s, rhs));
1456 if (old == shared_lookup_references
1457 && vr->operands != old)
1458 shared_lookup_references = NULL;
1461 VEC_truncate (vn_reference_op_s, vr->operands,
1462 i + 1 + VEC_length (vn_reference_op_s, rhs));
1463 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1464 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1465 VEC_free (vn_reference_op_s, heap, rhs);
1466 vr->hashcode = vn_reference_compute_hash (vr);
1468 /* Adjust *ref from the new operands. */
1469 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1471 /* This can happen with bitfields. */
1472 if (ref->size != r.size)
1476 /* Do not update last seen VUSE after translating. */
1477 last_vuse_ptr = NULL;
1479 /* Keep looking for the adjusted *REF / VR pair. */
1483 /* 4) For memcpy copies translate the reference through them if
1484 the copy kills ref. */
1485 else if (vn_walk_kind == VN_WALKREWRITE
1486 && is_gimple_reg_type (vr->type)
1487 /* ??? Handle BCOPY as well. */
1488 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1489 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1490 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1491 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1492 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1493 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1494 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1495 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1499 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1500 vn_reference_op_s op;
1504 /* Only handle non-variable, addressable refs. */
1505 if (ref->size != maxsize
1506 || offset % BITS_PER_UNIT != 0
1507 || ref->size % BITS_PER_UNIT != 0)
1510 /* Extract a pointer base and an offset for the destination. */
1511 lhs = gimple_call_arg (def_stmt, 0);
1513 if (TREE_CODE (lhs) == SSA_NAME)
1514 lhs = SSA_VAL (lhs);
1515 if (TREE_CODE (lhs) == ADDR_EXPR)
1517 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1521 if (TREE_CODE (tem) == MEM_REF
1522 && host_integerp (TREE_OPERAND (tem, 1), 1))
1524 lhs = TREE_OPERAND (tem, 0);
1525 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1527 else if (DECL_P (tem))
1528 lhs = build_fold_addr_expr (tem);
1532 if (TREE_CODE (lhs) != SSA_NAME
1533 && TREE_CODE (lhs) != ADDR_EXPR)
1536 /* Extract a pointer base and an offset for the source. */
1537 rhs = gimple_call_arg (def_stmt, 1);
1539 if (TREE_CODE (rhs) == SSA_NAME)
1540 rhs = SSA_VAL (rhs);
1541 if (TREE_CODE (rhs) == ADDR_EXPR)
1543 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1547 if (TREE_CODE (tem) == MEM_REF
1548 && host_integerp (TREE_OPERAND (tem, 1), 1))
1550 rhs = TREE_OPERAND (tem, 0);
1551 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1553 else if (DECL_P (tem))
1554 rhs = build_fold_addr_expr (tem);
1558 if (TREE_CODE (rhs) != SSA_NAME
1559 && TREE_CODE (rhs) != ADDR_EXPR)
1562 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1564 /* The bases of the destination and the references have to agree. */
1565 if ((TREE_CODE (base) != MEM_REF
1567 || (TREE_CODE (base) == MEM_REF
1568 && (TREE_OPERAND (base, 0) != lhs
1569 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1571 && (TREE_CODE (lhs) != ADDR_EXPR
1572 || TREE_OPERAND (lhs, 0) != base)))
1575 /* And the access has to be contained within the memcpy destination. */
1576 at = offset / BITS_PER_UNIT;
1577 if (TREE_CODE (base) == MEM_REF)
1578 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1580 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1583 /* Make room for 2 operands in the new reference. */
1584 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1586 VEC (vn_reference_op_s, heap) *old = vr->operands;
1587 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1588 if (old == shared_lookup_references
1589 && vr->operands != old)
1590 shared_lookup_references = NULL;
1593 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1595 /* The looked-through reference is a simple MEM_REF. */
1596 memset (&op, 0, sizeof (op));
1598 op.opcode = MEM_REF;
1599 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1600 op.off = at - lhs_offset + rhs_offset;
1601 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1602 op.type = TREE_TYPE (rhs);
1603 op.opcode = TREE_CODE (rhs);
1606 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1607 vr->hashcode = vn_reference_compute_hash (vr);
1609 /* Adjust *ref from the new operands. */
1610 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1612 /* This can happen with bitfields. */
1613 if (ref->size != r.size)
1617 /* Do not update last seen VUSE after translating. */
1618 last_vuse_ptr = NULL;
1620 /* Keep looking for the adjusted *REF / VR pair. */
1624 /* Bail out and stop walking. */
1628 /* Lookup a reference operation by it's parts, in the current hash table.
1629 Returns the resulting value number if it exists in the hash table,
1630 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1631 vn_reference_t stored in the hashtable if something is found. */
1634 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1635 VEC (vn_reference_op_s, heap) *operands,
1636 vn_reference_t *vnresult, vn_lookup_kind kind)
1638 struct vn_reference_s vr1;
1646 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1647 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1648 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1649 VEC_length (vn_reference_op_s, operands));
1650 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1651 VEC_address (vn_reference_op_s, operands),
1652 sizeof (vn_reference_op_s)
1653 * VEC_length (vn_reference_op_s, operands));
1654 vr1.operands = operands = shared_lookup_references
1655 = valueize_refs (shared_lookup_references);
1658 vr1.hashcode = vn_reference_compute_hash (&vr1);
1659 if ((cst = fully_constant_vn_reference_p (&vr1)))
1662 vn_reference_lookup_1 (&vr1, vnresult);
1664 && kind != VN_NOWALK
1668 vn_walk_kind = kind;
1669 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1671 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1672 vn_reference_lookup_2,
1673 vn_reference_lookup_3, &vr1);
1674 if (vr1.operands != operands)
1675 VEC_free (vn_reference_op_s, heap, vr1.operands);
1679 return (*vnresult)->result;
1684 /* Lookup OP in the current hash table, and return the resulting value
1685 number if it exists in the hash table. Return NULL_TREE if it does
1686 not exist in the hash table or if the result field of the structure
1687 was NULL.. VNRESULT will be filled in with the vn_reference_t
1688 stored in the hashtable if one exists. */
1691 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1692 vn_reference_t *vnresult)
1694 VEC (vn_reference_op_s, heap) *operands;
1695 struct vn_reference_s vr1;
1701 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1702 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1703 vr1.type = TREE_TYPE (op);
1704 vr1.set = get_alias_set (op);
1705 vr1.hashcode = vn_reference_compute_hash (&vr1);
1706 if ((cst = fully_constant_vn_reference_p (&vr1)))
1709 if (kind != VN_NOWALK
1712 vn_reference_t wvnresult;
1714 /* Make sure to use a valueized reference ... */
1715 if (!ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type, vr1.operands))
1716 ao_ref_init (&r, op);
1718 /* ... but also preserve a full reference tree for advanced TBAA. */
1720 vn_walk_kind = kind;
1722 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1723 vn_reference_lookup_2,
1724 vn_reference_lookup_3, &vr1);
1725 if (vr1.operands != operands)
1726 VEC_free (vn_reference_op_s, heap, vr1.operands);
1730 *vnresult = wvnresult;
1731 return wvnresult->result;
1737 return vn_reference_lookup_1 (&vr1, vnresult);
1741 /* Insert OP into the current hash table with a value number of
1742 RESULT, and return the resulting reference structure we created. */
1745 vn_reference_insert (tree op, tree result, tree vuse)
1750 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1751 if (TREE_CODE (result) == SSA_NAME)
1752 vr1->value_id = VN_INFO (result)->value_id;
1754 vr1->value_id = get_or_alloc_constant_value_id (result);
1755 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1756 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1757 vr1->type = TREE_TYPE (op);
1758 vr1->set = get_alias_set (op);
1759 vr1->hashcode = vn_reference_compute_hash (vr1);
1760 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1762 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1765 /* Because we lookup stores using vuses, and value number failures
1766 using the vdefs (see visit_reference_op_store for how and why),
1767 it's possible that on failure we may try to insert an already
1768 inserted store. This is not wrong, there is no ssa name for a
1769 store that we could use as a differentiator anyway. Thus, unlike
1770 the other lookup functions, you cannot gcc_assert (!*slot)
1773 /* But free the old slot in case of a collision. */
1775 free_reference (*slot);
1781 /* Insert a reference by it's pieces into the current hash table with
1782 a value number of RESULT. Return the resulting reference
1783 structure we created. */
1786 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1787 VEC (vn_reference_op_s, heap) *operands,
1788 tree result, unsigned int value_id)
1794 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1795 vr1->value_id = value_id;
1796 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1797 vr1->operands = valueize_refs (operands);
1800 vr1->hashcode = vn_reference_compute_hash (vr1);
1801 if (result && TREE_CODE (result) == SSA_NAME)
1802 result = SSA_VAL (result);
1803 vr1->result = result;
1805 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1808 /* At this point we should have all the things inserted that we have
1809 seen before, and we should never try inserting something that
1811 gcc_assert (!*slot);
1813 free_reference (*slot);
1819 /* Compute and return the hash value for nary operation VBO1. */
1822 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1827 for (i = 0; i < vno1->length; ++i)
1828 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1829 vno1->op[i] = SSA_VAL (vno1->op[i]);
1831 if (vno1->length == 2
1832 && commutative_tree_code (vno1->opcode)
1833 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1835 tree temp = vno1->op[0];
1836 vno1->op[0] = vno1->op[1];
1840 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1841 for (i = 0; i < vno1->length; ++i)
1842 hash = iterative_hash_expr (vno1->op[i], hash);
1847 /* Return the computed hashcode for nary operation P1. */
1850 vn_nary_op_hash (const void *p1)
1852 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1853 return vno1->hashcode;
1856 /* Compare nary operations P1 and P2 and return true if they are
1860 vn_nary_op_eq (const void *p1, const void *p2)
1862 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1863 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1866 if (vno1->hashcode != vno2->hashcode)
1869 if (vno1->opcode != vno2->opcode
1870 || !types_compatible_p (vno1->type, vno2->type))
1873 for (i = 0; i < vno1->length; ++i)
1874 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1880 /* Initialize VNO from the pieces provided. */
1883 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1884 enum tree_code code, tree type, tree op0,
1885 tree op1, tree op2, tree op3)
1888 vno->length = length;
1892 /* The fallthrus here are deliberate. */
1893 case 4: vno->op[3] = op3;
1894 case 3: vno->op[2] = op2;
1895 case 2: vno->op[1] = op1;
1896 case 1: vno->op[0] = op0;
1902 /* Initialize VNO from OP. */
1905 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1909 vno->opcode = TREE_CODE (op);
1910 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1911 vno->type = TREE_TYPE (op);
1912 for (i = 0; i < vno->length; ++i)
1913 vno->op[i] = TREE_OPERAND (op, i);
1916 /* Initialize VNO from STMT. */
1919 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1923 vno->opcode = gimple_assign_rhs_code (stmt);
1924 vno->length = gimple_num_ops (stmt) - 1;
1925 vno->type = gimple_expr_type (stmt);
1926 for (i = 0; i < vno->length; ++i)
1927 vno->op[i] = gimple_op (stmt, i + 1);
1928 if (vno->opcode == REALPART_EXPR
1929 || vno->opcode == IMAGPART_EXPR
1930 || vno->opcode == VIEW_CONVERT_EXPR)
1931 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1934 /* Compute the hashcode for VNO and look for it in the hash table;
1935 return the resulting value number if it exists in the hash table.
1936 Return NULL_TREE if it does not exist in the hash table or if the
1937 result field of the operation is NULL. VNRESULT will contain the
1938 vn_nary_op_t from the hashtable if it exists. */
1941 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1948 vno->hashcode = vn_nary_op_compute_hash (vno);
1949 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1951 if (!slot && current_info == optimistic_info)
1952 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1957 *vnresult = (vn_nary_op_t)*slot;
1958 return ((vn_nary_op_t)*slot)->result;
1961 /* Lookup a n-ary operation by its pieces and return the resulting value
1962 number if it exists in the hash table. Return NULL_TREE if it does
1963 not exist in the hash table or if the result field of the operation
1964 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1968 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1969 tree type, tree op0, tree op1, tree op2,
1970 tree op3, vn_nary_op_t *vnresult)
1972 struct vn_nary_op_s vno1;
1973 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1974 return vn_nary_op_lookup_1 (&vno1, vnresult);
1977 /* Lookup OP in the current hash table, and return the resulting value
1978 number if it exists in the hash table. Return NULL_TREE if it does
1979 not exist in the hash table or if the result field of the operation
1980 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1984 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1986 struct vn_nary_op_s vno1;
1987 init_vn_nary_op_from_op (&vno1, op);
1988 return vn_nary_op_lookup_1 (&vno1, vnresult);
1991 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1992 value number if it exists in the hash table. Return NULL_TREE if
1993 it does not exist in the hash table. VNRESULT will contain the
1994 vn_nary_op_t from the hashtable if it exists. */
1997 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1999 struct vn_nary_op_s vno1;
2000 init_vn_nary_op_from_stmt (&vno1, stmt);
2001 return vn_nary_op_lookup_1 (&vno1, vnresult);
2004 /* Return the size of a vn_nary_op_t with LENGTH operands. */
2007 sizeof_vn_nary_op (unsigned int length)
2009 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
2012 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2015 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2017 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2020 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2024 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2026 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2027 ¤t_info->nary_obstack);
2029 vno1->value_id = value_id;
2030 vno1->length = length;
2031 vno1->result = result;
2036 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2037 VNO->HASHCODE first. */
2040 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2045 vno->hashcode = vn_nary_op_compute_hash (vno);
2047 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2048 gcc_assert (!*slot);
2054 /* Insert a n-ary operation into the current hash table using it's
2055 pieces. Return the vn_nary_op_t structure we created and put in
2059 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2060 tree type, tree op0,
2061 tree op1, tree op2, tree op3,
2063 unsigned int value_id)
2067 vno1 = alloc_vn_nary_op (length, result, value_id);
2068 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
2069 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2072 /* Insert OP into the current hash table with a value number of
2073 RESULT. Return the vn_nary_op_t structure we created and put in
2077 vn_nary_op_insert (tree op, tree result)
2079 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2082 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2083 init_vn_nary_op_from_op (vno1, op);
2084 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2087 /* Insert the rhs of STMT into the current hash table with a value number of
2091 vn_nary_op_insert_stmt (gimple stmt, tree result)
2093 unsigned length = gimple_num_ops (stmt) - 1;
2096 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2097 init_vn_nary_op_from_stmt (vno1, stmt);
2098 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2101 /* Compute a hashcode for PHI operation VP1 and return it. */
2103 static inline hashval_t
2104 vn_phi_compute_hash (vn_phi_t vp1)
2111 result = vp1->block->index;
2113 /* If all PHI arguments are constants we need to distinguish
2114 the PHI node via its type. */
2115 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2116 result += (INTEGRAL_TYPE_P (type)
2117 + (INTEGRAL_TYPE_P (type)
2118 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2120 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2122 if (phi1op == VN_TOP)
2124 result = iterative_hash_expr (phi1op, result);
2130 /* Return the computed hashcode for phi operation P1. */
2133 vn_phi_hash (const void *p1)
2135 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2136 return vp1->hashcode;
2139 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2142 vn_phi_eq (const void *p1, const void *p2)
2144 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2145 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2147 if (vp1->hashcode != vp2->hashcode)
2150 if (vp1->block == vp2->block)
2155 /* If the PHI nodes do not have compatible types
2156 they are not the same. */
2157 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2158 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2161 /* Any phi in the same block will have it's arguments in the
2162 same edge order, because of how we store phi nodes. */
2163 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2165 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2166 if (phi1op == VN_TOP || phi2op == VN_TOP)
2168 if (!expressions_equal_p (phi1op, phi2op))
2176 static VEC(tree, heap) *shared_lookup_phiargs;
2178 /* Lookup PHI in the current hash table, and return the resulting
2179 value number if it exists in the hash table. Return NULL_TREE if
2180 it does not exist in the hash table. */
2183 vn_phi_lookup (gimple phi)
2186 struct vn_phi_s vp1;
2189 VEC_truncate (tree, shared_lookup_phiargs, 0);
2191 /* Canonicalize the SSA_NAME's to their value number. */
2192 for (i = 0; i < gimple_phi_num_args (phi); i++)
2194 tree def = PHI_ARG_DEF (phi, i);
2195 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2196 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2198 vp1.phiargs = shared_lookup_phiargs;
2199 vp1.block = gimple_bb (phi);
2200 vp1.hashcode = vn_phi_compute_hash (&vp1);
2201 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2203 if (!slot && current_info == optimistic_info)
2204 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2208 return ((vn_phi_t)*slot)->result;
2211 /* Insert PHI into the current hash table with a value number of
2215 vn_phi_insert (gimple phi, tree result)
2218 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2220 VEC (tree, heap) *args = NULL;
2222 /* Canonicalize the SSA_NAME's to their value number. */
2223 for (i = 0; i < gimple_phi_num_args (phi); i++)
2225 tree def = PHI_ARG_DEF (phi, i);
2226 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2227 VEC_safe_push (tree, heap, args, def);
2229 vp1->value_id = VN_INFO (result)->value_id;
2230 vp1->phiargs = args;
2231 vp1->block = gimple_bb (phi);
2232 vp1->result = result;
2233 vp1->hashcode = vn_phi_compute_hash (vp1);
2235 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2238 /* Because we iterate over phi operations more than once, it's
2239 possible the slot might already exist here, hence no assert.*/
2245 /* Print set of components in strongly connected component SCC to OUT. */
2248 print_scc (FILE *out, VEC (tree, heap) *scc)
2253 fprintf (out, "SCC consists of: ");
2254 FOR_EACH_VEC_ELT (tree, scc, i, var)
2256 print_generic_expr (out, var, 0);
2259 fprintf (out, "\n");
2262 /* Set the value number of FROM to TO, return true if it has changed
2266 set_ssa_val_to (tree from, tree to)
2268 tree currval = SSA_VAL (from);
2272 if (currval == from)
2274 if (dump_file && (dump_flags & TDF_DETAILS))
2276 fprintf (dump_file, "Not changing value number of ");
2277 print_generic_expr (dump_file, from, 0);
2278 fprintf (dump_file, " from VARYING to ");
2279 print_generic_expr (dump_file, to, 0);
2280 fprintf (dump_file, "\n");
2284 else if (TREE_CODE (to) == SSA_NAME
2285 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2289 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2290 and invariants. So assert that here. */
2291 gcc_assert (to != NULL_TREE
2293 || TREE_CODE (to) == SSA_NAME
2294 || is_gimple_min_invariant (to)));
2296 if (dump_file && (dump_flags & TDF_DETAILS))
2298 fprintf (dump_file, "Setting value number of ");
2299 print_generic_expr (dump_file, from, 0);
2300 fprintf (dump_file, " to ");
2301 print_generic_expr (dump_file, to, 0);
2304 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2306 VN_INFO (from)->valnum = to;
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2308 fprintf (dump_file, " (changed)\n");
2311 if (dump_file && (dump_flags & TDF_DETAILS))
2312 fprintf (dump_file, "\n");
2316 /* Set all definitions in STMT to value number to themselves.
2317 Return true if a value number changed. */
2320 defs_to_varying (gimple stmt)
2322 bool changed = false;
2326 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2328 tree def = DEF_FROM_PTR (defp);
2330 VN_INFO (def)->use_processed = true;
2331 changed |= set_ssa_val_to (def, def);
2336 static bool expr_has_constants (tree expr);
2337 static tree valueize_expr (tree expr);
2339 /* Visit a copy between LHS and RHS, return true if the value number
2343 visit_copy (tree lhs, tree rhs)
2345 /* Follow chains of copies to their destination. */
2346 while (TREE_CODE (rhs) == SSA_NAME
2347 && SSA_VAL (rhs) != rhs)
2348 rhs = SSA_VAL (rhs);
2350 /* The copy may have a more interesting constant filled expression
2351 (we don't, since we know our RHS is just an SSA name). */
2352 if (TREE_CODE (rhs) == SSA_NAME)
2354 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2355 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2358 return set_ssa_val_to (lhs, rhs);
2361 /* Visit a nary operator RHS, value number it, and return true if the
2362 value number of LHS has changed as a result. */
2365 visit_nary_op (tree lhs, gimple stmt)
2367 bool changed = false;
2368 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2371 changed = set_ssa_val_to (lhs, result);
2374 changed = set_ssa_val_to (lhs, lhs);
2375 vn_nary_op_insert_stmt (stmt, lhs);
2381 /* Visit a call STMT storing into LHS. Return true if the value number
2382 of the LHS has changed as a result. */
2385 visit_reference_op_call (tree lhs, gimple stmt)
2387 bool changed = false;
2388 struct vn_reference_s vr1;
2390 tree vuse = gimple_vuse (stmt);
2392 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2393 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2394 vr1.type = gimple_expr_type (stmt);
2396 vr1.hashcode = vn_reference_compute_hash (&vr1);
2397 result = vn_reference_lookup_1 (&vr1, NULL);
2400 changed = set_ssa_val_to (lhs, result);
2401 if (TREE_CODE (result) == SSA_NAME
2402 && VN_INFO (result)->has_constants)
2403 VN_INFO (lhs)->has_constants = true;
2409 changed = set_ssa_val_to (lhs, lhs);
2410 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2411 vr2->vuse = vr1.vuse;
2412 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2413 vr2->type = vr1.type;
2415 vr2->hashcode = vr1.hashcode;
2417 slot = htab_find_slot_with_hash (current_info->references,
2418 vr2, vr2->hashcode, INSERT);
2420 free_reference (*slot);
2427 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2428 and return true if the value number of the LHS has changed as a result. */
2431 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2433 bool changed = false;
2437 last_vuse = gimple_vuse (stmt);
2438 last_vuse_ptr = &last_vuse;
2439 result = vn_reference_lookup (op, gimple_vuse (stmt),
2440 default_vn_walk_kind, NULL);
2441 last_vuse_ptr = NULL;
2443 /* If we have a VCE, try looking up its operand as it might be stored in
2444 a different type. */
2445 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2446 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2447 default_vn_walk_kind, NULL);
2449 /* We handle type-punning through unions by value-numbering based
2450 on offset and size of the access. Be prepared to handle a
2451 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2453 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2455 /* We will be setting the value number of lhs to the value number
2456 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2457 So first simplify and lookup this expression to see if it
2458 is already available. */
2459 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2460 if ((CONVERT_EXPR_P (val)
2461 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2462 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2464 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2465 if ((CONVERT_EXPR_P (tem)
2466 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2467 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2468 TREE_TYPE (val), tem)))
2472 if (!is_gimple_min_invariant (val)
2473 && TREE_CODE (val) != SSA_NAME)
2474 result = vn_nary_op_lookup (val, NULL);
2475 /* If the expression is not yet available, value-number lhs to
2476 a new SSA_NAME we create. */
2479 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2480 /* Initialize value-number information properly. */
2481 VN_INFO_GET (result)->valnum = result;
2482 VN_INFO (result)->value_id = get_next_value_id ();
2483 VN_INFO (result)->expr = val;
2484 VN_INFO (result)->has_constants = expr_has_constants (val);
2485 VN_INFO (result)->needs_insertion = true;
2486 /* As all "inserted" statements are singleton SCCs, insert
2487 to the valid table. This is strictly needed to
2488 avoid re-generating new value SSA_NAMEs for the same
2489 expression during SCC iteration over and over (the
2490 optimistic table gets cleared after each iteration).
2491 We do not need to insert into the optimistic table, as
2492 lookups there will fall back to the valid table. */
2493 if (current_info == optimistic_info)
2495 current_info = valid_info;
2496 vn_nary_op_insert (val, result);
2497 current_info = optimistic_info;
2500 vn_nary_op_insert (val, result);
2501 if (dump_file && (dump_flags & TDF_DETAILS))
2503 fprintf (dump_file, "Inserting name ");
2504 print_generic_expr (dump_file, result, 0);
2505 fprintf (dump_file, " for expression ");
2506 print_generic_expr (dump_file, val, 0);
2507 fprintf (dump_file, "\n");
2514 changed = set_ssa_val_to (lhs, result);
2515 if (TREE_CODE (result) == SSA_NAME
2516 && VN_INFO (result)->has_constants)
2518 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2519 VN_INFO (lhs)->has_constants = true;
2524 changed = set_ssa_val_to (lhs, lhs);
2525 vn_reference_insert (op, lhs, last_vuse);
2532 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2533 and return true if the value number of the LHS has changed as a result. */
2536 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2538 bool changed = false;
2540 bool resultsame = false;
2542 /* First we want to lookup using the *vuses* from the store and see
2543 if there the last store to this location with the same address
2546 The vuses represent the memory state before the store. If the
2547 memory state, address, and value of the store is the same as the
2548 last store to this location, then this store will produce the
2549 same memory state as that store.
2551 In this case the vdef versions for this store are value numbered to those
2552 vuse versions, since they represent the same memory state after
2555 Otherwise, the vdefs for the store are used when inserting into
2556 the table, since the store generates a new memory state. */
2558 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2562 if (TREE_CODE (result) == SSA_NAME)
2563 result = SSA_VAL (result);
2564 if (TREE_CODE (op) == SSA_NAME)
2566 resultsame = expressions_equal_p (result, op);
2569 if (!result || !resultsame)
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2575 fprintf (dump_file, "No store match\n");
2576 fprintf (dump_file, "Value numbering store ");
2577 print_generic_expr (dump_file, lhs, 0);
2578 fprintf (dump_file, " to ");
2579 print_generic_expr (dump_file, op, 0);
2580 fprintf (dump_file, "\n");
2582 /* Have to set value numbers before insert, since insert is
2583 going to valueize the references in-place. */
2584 if ((vdef = gimple_vdef (stmt)))
2586 VN_INFO (vdef)->use_processed = true;
2587 changed |= set_ssa_val_to (vdef, vdef);
2590 /* Do not insert structure copies into the tables. */
2591 if (is_gimple_min_invariant (op)
2592 || is_gimple_reg (op))
2593 vn_reference_insert (lhs, op, vdef);
2597 /* We had a match, so value number the vdef to have the value
2598 number of the vuse it came from. */
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 fprintf (dump_file, "Store matched earlier value,"
2603 "value numbering store vdefs to matching vuses.\n");
2605 def = gimple_vdef (stmt);
2606 use = gimple_vuse (stmt);
2608 VN_INFO (def)->use_processed = true;
2609 changed |= set_ssa_val_to (def, SSA_VAL (use));
2615 /* Visit and value number PHI, return true if the value number
2619 visit_phi (gimple phi)
2621 bool changed = false;
2623 tree sameval = VN_TOP;
2624 bool allsame = true;
2627 /* TODO: We could check for this in init_sccvn, and replace this
2628 with a gcc_assert. */
2629 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2630 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2632 /* See if all non-TOP arguments have the same value. TOP is
2633 equivalent to everything, so we can ignore it. */
2634 for (i = 0; i < gimple_phi_num_args (phi); i++)
2636 tree def = PHI_ARG_DEF (phi, i);
2638 if (TREE_CODE (def) == SSA_NAME)
2639 def = SSA_VAL (def);
2642 if (sameval == VN_TOP)
2648 if (!expressions_equal_p (def, sameval))
2656 /* If all value numbered to the same value, the phi node has that
2660 if (is_gimple_min_invariant (sameval))
2662 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2663 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2667 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2668 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2671 if (TREE_CODE (sameval) == SSA_NAME)
2672 return visit_copy (PHI_RESULT (phi), sameval);
2674 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2677 /* Otherwise, see if it is equivalent to a phi node in this block. */
2678 result = vn_phi_lookup (phi);
2681 if (TREE_CODE (result) == SSA_NAME)
2682 changed = visit_copy (PHI_RESULT (phi), result);
2684 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2688 vn_phi_insert (phi, PHI_RESULT (phi));
2689 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2690 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2691 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2697 /* Return true if EXPR contains constants. */
2700 expr_has_constants (tree expr)
2702 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2705 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2708 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2709 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2710 /* Constants inside reference ops are rarely interesting, but
2711 it can take a lot of looking to find them. */
2713 case tcc_declaration:
2716 return is_gimple_min_invariant (expr);
2721 /* Return true if STMT contains constants. */
2724 stmt_has_constants (gimple stmt)
2726 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2729 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2731 case GIMPLE_UNARY_RHS:
2732 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2734 case GIMPLE_BINARY_RHS:
2735 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2736 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2737 case GIMPLE_TERNARY_RHS:
2738 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2739 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2740 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2741 case GIMPLE_SINGLE_RHS:
2742 /* Constants inside reference ops are rarely interesting, but
2743 it can take a lot of looking to find them. */
2744 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2751 /* Replace SSA_NAMES in expr with their value numbers, and return the
2753 This is performed in place. */
2756 valueize_expr (tree expr)
2758 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2761 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2762 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2763 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2766 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2767 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2768 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2769 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2770 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2771 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2779 /* Simplify the binary expression RHS, and return the result if
2783 simplify_binary_expression (gimple stmt)
2785 tree result = NULL_TREE;
2786 tree op0 = gimple_assign_rhs1 (stmt);
2787 tree op1 = gimple_assign_rhs2 (stmt);
2789 /* This will not catch every single case we could combine, but will
2790 catch those with constants. The goal here is to simultaneously
2791 combine constants between expressions, but avoid infinite
2792 expansion of expressions during simplification. */
2793 if (TREE_CODE (op0) == SSA_NAME)
2795 if (VN_INFO (op0)->has_constants
2796 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2797 op0 = valueize_expr (vn_get_expr_for (op0));
2798 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2799 op0 = SSA_VAL (op0);
2802 if (TREE_CODE (op1) == SSA_NAME)
2804 if (VN_INFO (op1)->has_constants)
2805 op1 = valueize_expr (vn_get_expr_for (op1));
2806 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2807 op1 = SSA_VAL (op1);
2810 /* Pointer plus constant can be represented as invariant address.
2811 Do so to allow further propatation, see also tree forwprop. */
2812 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2813 && host_integerp (op1, 1)
2814 && TREE_CODE (op0) == ADDR_EXPR
2815 && is_gimple_min_invariant (op0))
2816 return build_invariant_address (TREE_TYPE (op0),
2817 TREE_OPERAND (op0, 0),
2818 TREE_INT_CST_LOW (op1));
2820 /* Avoid folding if nothing changed. */
2821 if (op0 == gimple_assign_rhs1 (stmt)
2822 && op1 == gimple_assign_rhs2 (stmt))
2825 fold_defer_overflow_warnings ();
2827 result = fold_binary (gimple_assign_rhs_code (stmt),
2828 gimple_expr_type (stmt), op0, op1);
2830 STRIP_USELESS_TYPE_CONVERSION (result);
2832 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2835 /* Make sure result is not a complex expression consisting
2836 of operators of operators (IE (a + b) + (a + c))
2837 Otherwise, we will end up with unbounded expressions if
2838 fold does anything at all. */
2839 if (result && valid_gimple_rhs_p (result))
2845 /* Simplify the unary expression RHS, and return the result if
2849 simplify_unary_expression (gimple stmt)
2851 tree result = NULL_TREE;
2852 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2854 /* We handle some tcc_reference codes here that are all
2855 GIMPLE_ASSIGN_SINGLE codes. */
2856 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2857 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2858 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2859 op0 = TREE_OPERAND (op0, 0);
2861 if (TREE_CODE (op0) != SSA_NAME)
2865 if (VN_INFO (op0)->has_constants)
2866 op0 = valueize_expr (vn_get_expr_for (op0));
2867 else if (gimple_assign_cast_p (stmt)
2868 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2869 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2870 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2872 /* We want to do tree-combining on conversion-like expressions.
2873 Make sure we feed only SSA_NAMEs or constants to fold though. */
2874 tree tem = valueize_expr (vn_get_expr_for (op0));
2875 if (UNARY_CLASS_P (tem)
2876 || BINARY_CLASS_P (tem)
2877 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2878 || TREE_CODE (tem) == SSA_NAME
2879 || is_gimple_min_invariant (tem))
2883 /* Avoid folding if nothing changed, but remember the expression. */
2884 if (op0 == orig_op0)
2887 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2888 gimple_expr_type (stmt), op0);
2891 STRIP_USELESS_TYPE_CONVERSION (result);
2892 if (valid_gimple_rhs_p (result))
2899 /* Valueize NAME if it is an SSA name, otherwise just return it. */
2902 vn_valueize (tree name)
2904 if (TREE_CODE (name) == SSA_NAME)
2906 tree tem = SSA_VAL (name);
2907 return tem == VN_TOP ? name : tem;
2912 /* Try to simplify RHS using equivalences and constant folding. */
2915 try_to_simplify (gimple stmt)
2919 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2920 in this case, there is no point in doing extra work. */
2921 if (gimple_assign_copy_p (stmt)
2922 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2925 /* First try constant folding based on our current lattice. */
2926 tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
2930 /* If that didn't work try combining multiple statements. */
2931 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2934 /* Fallthrough for some codes that can operate on registers. */
2935 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2936 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2937 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2939 /* We could do a little more with unary ops, if they expand
2940 into binary ops, but it's debatable whether it is worth it. */
2942 return simplify_unary_expression (stmt);
2944 case tcc_comparison:
2946 return simplify_binary_expression (stmt);
2955 /* Visit and value number USE, return true if the value number
2959 visit_use (tree use)
2961 bool changed = false;
2962 gimple stmt = SSA_NAME_DEF_STMT (use);
2964 VN_INFO (use)->use_processed = true;
2966 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2967 if (dump_file && (dump_flags & TDF_DETAILS)
2968 && !SSA_NAME_IS_DEFAULT_DEF (use))
2970 fprintf (dump_file, "Value numbering ");
2971 print_generic_expr (dump_file, use, 0);
2972 fprintf (dump_file, " stmt = ");
2973 print_gimple_stmt (dump_file, stmt, 0, 0);
2976 /* Handle uninitialized uses. */
2977 if (SSA_NAME_IS_DEFAULT_DEF (use))
2978 changed = set_ssa_val_to (use, use);
2981 if (gimple_code (stmt) == GIMPLE_PHI)
2982 changed = visit_phi (stmt);
2983 else if (!gimple_has_lhs (stmt)
2984 || gimple_has_volatile_ops (stmt)
2985 || stmt_could_throw_p (stmt))
2986 changed = defs_to_varying (stmt);
2987 else if (is_gimple_assign (stmt))
2989 tree lhs = gimple_assign_lhs (stmt);
2992 /* Shortcut for copies. Simplifying copies is pointless,
2993 since we copy the expression and value they represent. */
2994 if (gimple_assign_copy_p (stmt)
2995 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2996 && TREE_CODE (lhs) == SSA_NAME)
2998 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
3001 simplified = try_to_simplify (stmt);
3004 if (dump_file && (dump_flags & TDF_DETAILS))
3006 fprintf (dump_file, "RHS ");
3007 print_gimple_expr (dump_file, stmt, 0, 0);
3008 fprintf (dump_file, " simplified to ");
3009 print_generic_expr (dump_file, simplified, 0);
3010 if (TREE_CODE (lhs) == SSA_NAME)
3011 fprintf (dump_file, " has constants %d\n",
3012 expr_has_constants (simplified));
3014 fprintf (dump_file, "\n");
3017 /* Setting value numbers to constants will occasionally
3018 screw up phi congruence because constants are not
3019 uniquely associated with a single ssa name that can be
3022 && is_gimple_min_invariant (simplified)
3023 && TREE_CODE (lhs) == SSA_NAME)
3025 VN_INFO (lhs)->expr = simplified;
3026 VN_INFO (lhs)->has_constants = true;
3027 changed = set_ssa_val_to (lhs, simplified);
3031 && TREE_CODE (simplified) == SSA_NAME
3032 && TREE_CODE (lhs) == SSA_NAME)
3034 changed = visit_copy (lhs, simplified);
3037 else if (simplified)
3039 if (TREE_CODE (lhs) == SSA_NAME)
3041 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3042 /* We have to unshare the expression or else
3043 valuizing may change the IL stream. */
3044 VN_INFO (lhs)->expr = unshare_expr (simplified);
3047 else if (stmt_has_constants (stmt)
3048 && TREE_CODE (lhs) == SSA_NAME)
3049 VN_INFO (lhs)->has_constants = true;
3050 else if (TREE_CODE (lhs) == SSA_NAME)
3052 /* We reset expr and constantness here because we may
3053 have been value numbering optimistically, and
3054 iterating. They may become non-constant in this case,
3055 even if they were optimistically constant. */
3057 VN_INFO (lhs)->has_constants = false;
3058 VN_INFO (lhs)->expr = NULL_TREE;
3061 if ((TREE_CODE (lhs) == SSA_NAME
3062 /* We can substitute SSA_NAMEs that are live over
3063 abnormal edges with their constant value. */
3064 && !(gimple_assign_copy_p (stmt)
3065 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3067 && is_gimple_min_invariant (simplified))
3068 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3069 /* Stores or copies from SSA_NAMEs that are live over
3070 abnormal edges are a problem. */
3071 || (gimple_assign_single_p (stmt)
3072 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3073 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
3074 changed = defs_to_varying (stmt);
3075 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
3077 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
3079 else if (TREE_CODE (lhs) == SSA_NAME)
3081 if ((gimple_assign_copy_p (stmt)
3082 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3084 && is_gimple_min_invariant (simplified)))
3086 VN_INFO (lhs)->has_constants = true;
3088 changed = set_ssa_val_to (lhs, simplified);
3090 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
3094 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3096 case GIMPLE_UNARY_RHS:
3097 case GIMPLE_BINARY_RHS:
3098 case GIMPLE_TERNARY_RHS:
3099 changed = visit_nary_op (lhs, stmt);
3101 case GIMPLE_SINGLE_RHS:
3102 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3105 /* VOP-less references can go through unary case. */
3106 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
3107 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
3108 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3109 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
3111 changed = visit_nary_op (lhs, stmt);
3115 case tcc_declaration:
3116 changed = visit_reference_op_load
3117 (lhs, gimple_assign_rhs1 (stmt), stmt);
3119 case tcc_expression:
3120 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3122 changed = visit_nary_op (lhs, stmt);
3127 changed = defs_to_varying (stmt);
3131 changed = defs_to_varying (stmt);
3137 changed = defs_to_varying (stmt);
3139 else if (is_gimple_call (stmt))
3141 tree lhs = gimple_call_lhs (stmt);
3143 /* ??? We could try to simplify calls. */
3145 if (stmt_has_constants (stmt)
3146 && TREE_CODE (lhs) == SSA_NAME)
3147 VN_INFO (lhs)->has_constants = true;
3148 else if (TREE_CODE (lhs) == SSA_NAME)
3150 /* We reset expr and constantness here because we may
3151 have been value numbering optimistically, and
3152 iterating. They may become non-constant in this case,
3153 even if they were optimistically constant. */
3154 VN_INFO (lhs)->has_constants = false;
3155 VN_INFO (lhs)->expr = NULL_TREE;
3158 if (TREE_CODE (lhs) == SSA_NAME
3159 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3160 changed = defs_to_varying (stmt);
3161 /* ??? We should handle stores from calls. */
3162 else if (TREE_CODE (lhs) == SSA_NAME)
3164 if (!gimple_call_internal_p (stmt)
3165 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3166 changed = visit_reference_op_call (lhs, stmt);
3168 changed = defs_to_varying (stmt);
3171 changed = defs_to_varying (stmt);
3178 /* Compare two operands by reverse postorder index */
3181 compare_ops (const void *pa, const void *pb)
3183 const tree opa = *((const tree *)pa);
3184 const tree opb = *((const tree *)pb);
3185 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3186 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3190 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3191 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3192 else if (gimple_nop_p (opstmta))
3194 else if (gimple_nop_p (opstmtb))
3197 bba = gimple_bb (opstmta);
3198 bbb = gimple_bb (opstmtb);
3201 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3209 if (gimple_code (opstmta) == GIMPLE_PHI
3210 && gimple_code (opstmtb) == GIMPLE_PHI)
3211 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3212 else if (gimple_code (opstmta) == GIMPLE_PHI)
3214 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3216 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3217 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3219 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3221 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3224 /* Sort an array containing members of a strongly connected component
3225 SCC so that the members are ordered by RPO number.
3226 This means that when the sort is complete, iterating through the
3227 array will give you the members in RPO order. */
3230 sort_scc (VEC (tree, heap) *scc)
3232 VEC_qsort (tree, scc, compare_ops);
3235 /* Insert the no longer used nary ONARY to the hash INFO. */
3238 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3240 size_t size = sizeof_vn_nary_op (onary->length);
3241 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3242 &info->nary_obstack);
3243 memcpy (nary, onary, size);
3244 vn_nary_op_insert_into (nary, info->nary, false);
3247 /* Insert the no longer used phi OPHI to the hash INFO. */
3250 copy_phi (vn_phi_t ophi, vn_tables_t info)
3252 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3254 memcpy (phi, ophi, sizeof (*phi));
3255 ophi->phiargs = NULL;
3256 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3257 gcc_assert (!*slot);
3261 /* Insert the no longer used reference OREF to the hash INFO. */
3264 copy_reference (vn_reference_t oref, vn_tables_t info)
3268 ref = (vn_reference_t) pool_alloc (info->references_pool);
3269 memcpy (ref, oref, sizeof (*ref));
3270 oref->operands = NULL;
3271 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3274 free_reference (*slot);
3278 /* Process a strongly connected component in the SSA graph. */
3281 process_scc (VEC (tree, heap) *scc)
3285 unsigned int iterations = 0;
3286 bool changed = true;
3292 /* If the SCC has a single member, just visit it. */
3293 if (VEC_length (tree, scc) == 1)
3295 tree use = VEC_index (tree, scc, 0);
3296 if (VN_INFO (use)->use_processed)
3298 /* We need to make sure it doesn't form a cycle itself, which can
3299 happen for self-referential PHI nodes. In that case we would
3300 end up inserting an expression with VN_TOP operands into the
3301 valid table which makes us derive bogus equivalences later.
3302 The cheapest way to check this is to assume it for all PHI nodes. */
3303 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3304 /* Fallthru to iteration. */ ;
3312 /* Iterate over the SCC with the optimistic table until it stops
3314 current_info = optimistic_info;
3319 if (dump_file && (dump_flags & TDF_DETAILS))
3320 fprintf (dump_file, "Starting iteration %d\n", iterations);
3321 /* As we are value-numbering optimistically we have to
3322 clear the expression tables and the simplified expressions
3323 in each iteration until we converge. */
3324 htab_empty (optimistic_info->nary);
3325 htab_empty (optimistic_info->phis);
3326 htab_empty (optimistic_info->references);
3327 obstack_free (&optimistic_info->nary_obstack, NULL);
3328 gcc_obstack_init (&optimistic_info->nary_obstack);
3329 empty_alloc_pool (optimistic_info->phis_pool);
3330 empty_alloc_pool (optimistic_info->references_pool);
3331 FOR_EACH_VEC_ELT (tree, scc, i, var)
3332 VN_INFO (var)->expr = NULL_TREE;
3333 FOR_EACH_VEC_ELT (tree, scc, i, var)
3334 changed |= visit_use (var);
3337 statistics_histogram_event (cfun, "SCC iterations", iterations);
3339 /* Finally, copy the contents of the no longer used optimistic
3340 table to the valid table. */
3341 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3342 copy_nary (nary, valid_info);
3343 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3344 copy_phi (phi, valid_info);
3345 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3346 copy_reference (ref, valid_info);
3348 current_info = valid_info;
3351 DEF_VEC_O(ssa_op_iter);
3352 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3354 /* Pop the components of the found SCC for NAME off the SCC stack
3355 and process them. Returns true if all went well, false if
3356 we run into resource limits. */
3359 extract_and_process_scc_for_name (tree name)
3361 VEC (tree, heap) *scc = NULL;
3364 /* Found an SCC, pop the components off the SCC stack and
3368 x = VEC_pop (tree, sccstack);
3370 VN_INFO (x)->on_sccstack = false;
3371 VEC_safe_push (tree, heap, scc, x);
3372 } while (x != name);
3374 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3375 if (VEC_length (tree, scc)
3376 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3379 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3380 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3381 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3385 if (VEC_length (tree, scc) > 1)
3388 if (dump_file && (dump_flags & TDF_DETAILS))
3389 print_scc (dump_file, scc);
3393 VEC_free (tree, heap, scc);
3398 /* Depth first search on NAME to discover and process SCC's in the SSA
3400 Execution of this algorithm relies on the fact that the SCC's are
3401 popped off the stack in topological order.
3402 Returns true if successful, false if we stopped processing SCC's due
3403 to resource constraints. */
3408 VEC(ssa_op_iter, heap) *itervec = NULL;
3409 VEC(tree, heap) *namevec = NULL;
3410 use_operand_p usep = NULL;
3417 VN_INFO (name)->dfsnum = next_dfs_num++;
3418 VN_INFO (name)->visited = true;
3419 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3421 VEC_safe_push (tree, heap, sccstack, name);
3422 VN_INFO (name)->on_sccstack = true;
3423 defstmt = SSA_NAME_DEF_STMT (name);
3425 /* Recursively DFS on our operands, looking for SCC's. */
3426 if (!gimple_nop_p (defstmt))
3428 /* Push a new iterator. */
3429 if (gimple_code (defstmt) == GIMPLE_PHI)
3430 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3432 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3435 clear_and_done_ssa_iter (&iter);
3439 /* If we are done processing uses of a name, go up the stack
3440 of iterators and process SCCs as we found them. */
3441 if (op_iter_done (&iter))
3443 /* See if we found an SCC. */
3444 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3445 if (!extract_and_process_scc_for_name (name))
3447 VEC_free (tree, heap, namevec);
3448 VEC_free (ssa_op_iter, heap, itervec);
3452 /* Check if we are done. */
3453 if (VEC_empty (tree, namevec))
3455 VEC_free (tree, heap, namevec);
3456 VEC_free (ssa_op_iter, heap, itervec);
3460 /* Restore the last use walker and continue walking there. */
3462 name = VEC_pop (tree, namevec);
3463 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3464 sizeof (ssa_op_iter));
3465 VEC_pop (ssa_op_iter, itervec);
3466 goto continue_walking;
3469 use = USE_FROM_PTR (usep);
3471 /* Since we handle phi nodes, we will sometimes get
3472 invariants in the use expression. */
3473 if (TREE_CODE (use) == SSA_NAME)
3475 if (! (VN_INFO (use)->visited))
3477 /* Recurse by pushing the current use walking state on
3478 the stack and starting over. */
3479 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3480 VEC_safe_push(tree, heap, namevec, name);
3485 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3486 VN_INFO (use)->low);
3488 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3489 && VN_INFO (use)->on_sccstack)
3491 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3492 VN_INFO (name)->low);
3496 usep = op_iter_next_use (&iter);
3500 /* Allocate a value number table. */
3503 allocate_vn_table (vn_tables_t table)
3505 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3506 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3507 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3510 gcc_obstack_init (&table->nary_obstack);
3511 table->phis_pool = create_alloc_pool ("VN phis",
3512 sizeof (struct vn_phi_s),
3514 table->references_pool = create_alloc_pool ("VN references",
3515 sizeof (struct vn_reference_s),
3519 /* Free a value number table. */
3522 free_vn_table (vn_tables_t table)
3524 htab_delete (table->phis);
3525 htab_delete (table->nary);
3526 htab_delete (table->references);
3527 obstack_free (&table->nary_obstack, NULL);
3528 free_alloc_pool (table->phis_pool);
3529 free_alloc_pool (table->references_pool);
3537 int *rpo_numbers_temp;
3539 calculate_dominance_info (CDI_DOMINATORS);
3541 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3544 constant_value_ids = BITMAP_ALLOC (NULL);
3549 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3550 /* VEC_alloc doesn't actually grow it to the right size, it just
3551 preallocates the space to do so. */
3552 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3553 gcc_obstack_init (&vn_ssa_aux_obstack);
3555 shared_lookup_phiargs = NULL;
3556 shared_lookup_references = NULL;
3557 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3558 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3559 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3561 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3562 the i'th block in RPO order is bb. We want to map bb's to RPO
3563 numbers, so we need to rearrange this array. */
3564 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3565 rpo_numbers[rpo_numbers_temp[j]] = j;
3567 XDELETE (rpo_numbers_temp);
3569 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3571 /* Create the VN_INFO structures, and initialize value numbers to
3573 for (i = 0; i < num_ssa_names; i++)
3575 tree name = ssa_name (i);
3578 VN_INFO_GET (name)->valnum = VN_TOP;
3579 VN_INFO (name)->expr = NULL_TREE;
3580 VN_INFO (name)->value_id = 0;
3584 renumber_gimple_stmt_uids ();
3586 /* Create the valid and optimistic value numbering tables. */
3587 valid_info = XCNEW (struct vn_tables_s);
3588 allocate_vn_table (valid_info);
3589 optimistic_info = XCNEW (struct vn_tables_s);
3590 allocate_vn_table (optimistic_info);
3598 htab_delete (constant_to_value_id);
3599 BITMAP_FREE (constant_value_ids);
3600 VEC_free (tree, heap, shared_lookup_phiargs);
3601 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3602 XDELETEVEC (rpo_numbers);
3604 for (i = 0; i < num_ssa_names; i++)
3606 tree name = ssa_name (i);
3608 && VN_INFO (name)->needs_insertion)
3609 release_ssa_name (name);
3611 obstack_free (&vn_ssa_aux_obstack, NULL);
3612 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3614 VEC_free (tree, heap, sccstack);
3615 free_vn_table (valid_info);
3616 XDELETE (valid_info);
3617 free_vn_table (optimistic_info);
3618 XDELETE (optimistic_info);
3621 /* Set *ID if we computed something useful in RESULT. */
3624 set_value_id_for_result (tree result, unsigned int *id)
3628 if (TREE_CODE (result) == SSA_NAME)
3629 *id = VN_INFO (result)->value_id;
3630 else if (is_gimple_min_invariant (result))
3631 *id = get_or_alloc_constant_value_id (result);
3635 /* Set the value ids in the valid hash tables. */
3638 set_hashtable_value_ids (void)
3645 /* Now set the value ids of the things we had put in the hash
3648 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3649 vno, vn_nary_op_t, hi)
3650 set_value_id_for_result (vno->result, &vno->value_id);
3652 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3654 set_value_id_for_result (vp->result, &vp->value_id);
3656 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3657 vr, vn_reference_t, hi)
3658 set_value_id_for_result (vr->result, &vr->value_id);
3661 /* Do SCCVN. Returns true if it finished, false if we bailed out
3662 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3663 how we use the alias oracle walking during the VN process. */
3666 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3670 bool changed = true;
3672 default_vn_walk_kind = default_vn_walk_kind_;
3675 current_info = valid_info;
3677 for (param = DECL_ARGUMENTS (current_function_decl);
3679 param = DECL_CHAIN (param))
3681 if (gimple_default_def (cfun, param) != NULL)
3683 tree def = gimple_default_def (cfun, param);
3684 VN_INFO (def)->valnum = def;
3688 for (i = 1; i < num_ssa_names; ++i)
3690 tree name = ssa_name (i);
3692 && VN_INFO (name)->visited == false
3693 && !has_zero_uses (name))
3701 /* Initialize the value ids. */
3703 for (i = 1; i < num_ssa_names; ++i)
3705 tree name = ssa_name (i);
3709 info = VN_INFO (name);
3710 if (info->valnum == name
3711 || info->valnum == VN_TOP)
3712 info->value_id = get_next_value_id ();
3713 else if (is_gimple_min_invariant (info->valnum))
3714 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3717 /* Propagate until they stop changing. */
3721 for (i = 1; i < num_ssa_names; ++i)
3723 tree name = ssa_name (i);
3727 info = VN_INFO (name);
3728 if (TREE_CODE (info->valnum) == SSA_NAME
3729 && info->valnum != name
3730 && info->value_id != VN_INFO (info->valnum)->value_id)
3733 info->value_id = VN_INFO (info->valnum)->value_id;
3738 set_hashtable_value_ids ();
3740 if (dump_file && (dump_flags & TDF_DETAILS))
3742 fprintf (dump_file, "Value numbers:\n");
3743 for (i = 0; i < num_ssa_names; i++)
3745 tree name = ssa_name (i);
3747 && VN_INFO (name)->visited
3748 && SSA_VAL (name) != name)
3750 print_generic_expr (dump_file, name, 0);
3751 fprintf (dump_file, " = ");
3752 print_generic_expr (dump_file, SSA_VAL (name), 0);
3753 fprintf (dump_file, "\n");
3761 /* Return the maximum value id we have ever seen. */
3764 get_max_value_id (void)
3766 return next_value_id;
3769 /* Return the next unique value id. */
3772 get_next_value_id (void)
3774 return next_value_id++;
3778 /* Compare two expressions E1 and E2 and return true if they are equal. */
3781 expressions_equal_p (tree e1, tree e2)
3783 /* The obvious case. */
3787 /* If only one of them is null, they cannot be equal. */
3791 /* Now perform the actual comparison. */
3792 if (TREE_CODE (e1) == TREE_CODE (e2)
3793 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3800 /* Return true if the nary operation NARY may trap. This is a copy
3801 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3804 vn_nary_may_trap (vn_nary_op_t nary)
3807 tree rhs2 = NULL_TREE;
3808 bool honor_nans = false;
3809 bool honor_snans = false;
3810 bool fp_operation = false;
3811 bool honor_trapv = false;
3815 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3816 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3817 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3820 fp_operation = FLOAT_TYPE_P (type);
3823 honor_nans = flag_trapping_math && !flag_finite_math_only;
3824 honor_snans = flag_signaling_nans != 0;
3826 else if (INTEGRAL_TYPE_P (type)
3827 && TYPE_OVERFLOW_TRAPS (type))
3830 if (nary->length >= 2)
3832 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3834 honor_nans, honor_snans, rhs2,
3840 for (i = 0; i < nary->length; ++i)
3841 if (tree_could_trap_p (nary->op[i]))