1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
182 /* Set the value numbering info for a given SSA name to a given
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
219 tree expr = NULL_TREE;
222 if (vn->valnum == VN_TOP)
225 /* If the value-number is a constant it is the representative
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
249 /* FIXME tuples. This is incomplete and likely will miss some
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
287 if (expr == NULL_TREE)
290 /* Cache the expression. */
297 /* Free a phi operation structure VP. */
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
342 get_constant_value_id (tree constant)
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
352 return ((vn_constant_t)*slot)->value_id;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
360 get_or_alloc_constant_value_id (tree constant)
363 struct vn_constant_s vc;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
417 result = iterative_hash_expr (vro1->op0, result);
419 result = iterative_hash_expr (vro1->op1, result);
421 result = iterative_hash_expr (vro1->op2, result);
425 /* Return the hashcode for a given reference operation P1. */
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
449 else if (vro->opcode != ADDR_EXPR)
461 result = iterative_hash_hashval_t (off, result);
464 && vro->opcode == ADDR_EXPR)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
474 result = vn_reference_op_compute_hash (vro, result);
478 result += SSA_NAME_VERSION (vr1->vuse);
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
561 if (deref2 && vro2->opcode == ADDR_EXPR)
563 memset (&tem2, 0, sizeof (tem2));
564 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
565 tem2.type = TREE_TYPE (tem2.op0);
566 tem2.opcode = TREE_CODE (tem2.op0);
570 if (deref1 != deref2)
572 if (!vn_reference_op_eq (vro1, vro2))
577 while (VEC_length (vn_reference_op_s, vr1->operands) != i
578 || VEC_length (vn_reference_op_s, vr2->operands) != j);
583 /* Copy the operations present in load/store REF into RESULT, a vector of
584 vn_reference_op_s's. */
587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
589 if (TREE_CODE (ref) == TARGET_MEM_REF)
591 vn_reference_op_s temp;
593 memset (&temp, 0, sizeof (temp));
594 temp.type = TREE_TYPE (ref);
595 temp.opcode = TREE_CODE (ref);
596 temp.op0 = TMR_INDEX (ref);
597 temp.op1 = TMR_STEP (ref);
598 temp.op2 = TMR_OFFSET (ref);
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 memset (&temp, 0, sizeof (temp));
603 temp.type = NULL_TREE;
604 temp.opcode = ERROR_MARK;
605 temp.op0 = TMR_INDEX2 (ref);
607 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 memset (&temp, 0, sizeof (temp));
610 temp.type = NULL_TREE;
611 temp.opcode = TREE_CODE (TMR_BASE (ref));
612 temp.op0 = TMR_BASE (ref);
614 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
618 /* For non-calls, store the information that makes up the address. */
622 vn_reference_op_s temp;
624 memset (&temp, 0, sizeof (temp));
625 temp.type = TREE_TYPE (ref);
626 temp.opcode = TREE_CODE (ref);
632 temp.op0 = TREE_OPERAND (ref, 1);
636 /* The base address gets its own vn_reference_op_s structure. */
637 temp.op0 = TREE_OPERAND (ref, 1);
638 if (host_integerp (TREE_OPERAND (ref, 1), 0))
639 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
642 /* Record bits and position. */
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
647 /* The field decl is enough to unambiguously specify the field,
648 a matching type is not necessary and a mismatching type
649 is always a spurious difference. */
650 temp.type = NULL_TREE;
651 temp.op0 = TREE_OPERAND (ref, 1);
652 temp.op1 = TREE_OPERAND (ref, 2);
654 tree this_offset = component_ref_field_offset (ref);
656 && TREE_CODE (this_offset) == INTEGER_CST)
658 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
659 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
662 = double_int_add (tree_to_double_int (this_offset),
664 (tree_to_double_int (bit_offset),
666 ? 3 : exact_log2 (BITS_PER_UNIT),
667 HOST_BITS_PER_DOUBLE_INT, true));
668 if (double_int_fits_in_shwi_p (off))
674 case ARRAY_RANGE_REF:
676 /* Record index as operand. */
677 temp.op0 = TREE_OPERAND (ref, 1);
678 /* Always record lower bounds and element size. */
679 temp.op1 = array_ref_low_bound (ref);
680 temp.op2 = array_ref_element_size (ref);
681 if (TREE_CODE (temp.op0) == INTEGER_CST
682 && TREE_CODE (temp.op1) == INTEGER_CST
683 && TREE_CODE (temp.op2) == INTEGER_CST)
685 double_int off = tree_to_double_int (temp.op0);
686 off = double_int_add (off,
688 (tree_to_double_int (temp.op1)));
689 off = double_int_mul (off, tree_to_double_int (temp.op2));
690 if (double_int_fits_in_shwi_p (off))
695 if (DECL_HARD_REGISTER (ref))
704 /* Canonicalize decls to MEM[&decl] which is what we end up with
705 when valueizing MEM[ptr] with ptr = &decl. */
706 temp.opcode = MEM_REF;
707 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
709 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
710 temp.opcode = ADDR_EXPR;
711 temp.op0 = build_fold_addr_expr (ref);
712 temp.type = TREE_TYPE (temp.op0);
726 if (is_gimple_min_invariant (ref))
732 /* These are only interesting for their operands, their
733 existence, and their type. They will never be the last
734 ref in the chain of references (IE they require an
735 operand), so we don't have to put anything
736 for op* as it will be handled by the iteration */
738 case VIEW_CONVERT_EXPR:
742 /* This is only interesting for its constant offset. */
743 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
748 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
750 if (REFERENCE_CLASS_P (ref)
751 || TREE_CODE (ref) == WITH_SIZE_EXPR
752 || (TREE_CODE (ref) == ADDR_EXPR
753 && !is_gimple_min_invariant (ref)))
754 ref = TREE_OPERAND (ref, 0);
760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
761 operands in *OPS, the reference alias set SET and the reference type TYPE.
762 Return true if something useful was produced. */
765 ao_ref_init_from_vn_reference (ao_ref *ref,
766 alias_set_type set, tree type,
767 VEC (vn_reference_op_s, heap) *ops)
769 vn_reference_op_t op;
771 tree base = NULL_TREE;
773 HOST_WIDE_INT offset = 0;
774 HOST_WIDE_INT max_size;
775 HOST_WIDE_INT size = -1;
776 tree size_tree = NULL_TREE;
777 alias_set_type base_alias_set = -1;
779 /* First get the final access size from just the outermost expression. */
780 op = VEC_index (vn_reference_op_s, ops, 0);
781 if (op->opcode == COMPONENT_REF)
782 size_tree = DECL_SIZE (op->op0);
783 else if (op->opcode == BIT_FIELD_REF)
787 enum machine_mode mode = TYPE_MODE (type);
789 size_tree = TYPE_SIZE (type);
791 size = GET_MODE_BITSIZE (mode);
793 if (size_tree != NULL_TREE)
795 if (!host_integerp (size_tree, 1))
798 size = TREE_INT_CST_LOW (size_tree);
801 /* Initially, maxsize is the same as the accessed element size.
802 In the following it will only grow (or become -1). */
805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
806 and find the ultimate containing object. */
807 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
811 /* These may be in the reference ops, but we cannot do anything
812 sensible with them here. */
814 /* Apart from ADDR_EXPR arguments to MEM_REF. */
815 if (base != NULL_TREE
816 && TREE_CODE (base) == MEM_REF
818 && DECL_P (TREE_OPERAND (op->op0, 0)))
820 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
821 base = TREE_OPERAND (op->op0, 0);
828 offset += pop->off * BITS_PER_UNIT;
836 /* Record the base objects. */
838 base_alias_set = get_deref_alias_set (op->op0);
839 *op0_p = build2 (MEM_REF, op->type,
841 op0_p = &TREE_OPERAND (*op0_p, 0);
852 /* And now the usual component-reference style ops. */
854 offset += tree_low_cst (op->op1, 0);
859 tree field = op->op0;
860 /* We do not have a complete COMPONENT_REF tree here so we
861 cannot use component_ref_field_offset. Do the interesting
865 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
869 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
871 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
876 case ARRAY_RANGE_REF:
878 /* We recorded the lower bound and the element size. */
879 if (!host_integerp (op->op0, 0)
880 || !host_integerp (op->op1, 0)
881 || !host_integerp (op->op2, 0))
885 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
886 hindex -= TREE_INT_CST_LOW (op->op1);
887 hindex *= TREE_INT_CST_LOW (op->op2);
888 hindex *= BITS_PER_UNIT;
900 case VIEW_CONVERT_EXPR:
917 if (base == NULL_TREE)
920 ref->ref = NULL_TREE;
922 ref->offset = offset;
924 ref->max_size = max_size;
925 ref->ref_alias_set = set;
926 if (base_alias_set != -1)
927 ref->base_alias_set = base_alias_set;
929 ref->base_alias_set = get_alias_set (base);
930 /* We discount volatiles from value-numbering elsewhere. */
931 ref->volatile_p = false;
936 /* Copy the operations present in load/store/call REF into RESULT, a vector of
937 vn_reference_op_s's. */
940 copy_reference_ops_from_call (gimple call,
941 VEC(vn_reference_op_s, heap) **result)
943 vn_reference_op_s temp;
946 /* Copy the type, opcode, function being called and static chain. */
947 memset (&temp, 0, sizeof (temp));
948 temp.type = gimple_call_return_type (call);
949 temp.opcode = CALL_EXPR;
950 temp.op0 = gimple_call_fn (call);
951 temp.op1 = gimple_call_chain (call);
953 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
955 /* Copy the call arguments. As they can be references as well,
956 just chain them together. */
957 for (i = 0; i < gimple_call_num_args (call); ++i)
959 tree callarg = gimple_call_arg (call, i);
960 copy_reference_ops_from_ref (callarg, result);
964 /* Create a vector of vn_reference_op_s structures from REF, a
965 REFERENCE_CLASS_P tree. The vector is not shared. */
967 static VEC(vn_reference_op_s, heap) *
968 create_reference_ops_from_ref (tree ref)
970 VEC (vn_reference_op_s, heap) *result = NULL;
972 copy_reference_ops_from_ref (ref, &result);
976 /* Create a vector of vn_reference_op_s structures from CALL, a
977 call statement. The vector is not shared. */
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_call (gimple call)
982 VEC (vn_reference_op_s, heap) *result = NULL;
984 copy_reference_ops_from_call (call, &result);
988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
989 *I_P to point to the last element of the replacement. */
991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
994 unsigned int i = *i_p;
995 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
996 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
998 HOST_WIDE_INT addr_offset;
1000 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001 from .foo.bar to the preceeding MEM_REF offset and replace the
1002 address with &OBJ. */
1003 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1005 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1006 if (addr_base != op->op0)
1008 double_int off = tree_to_double_int (mem_op->op0);
1009 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1010 off = double_int_add (off, shwi_to_double_int (addr_offset));
1011 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1012 op->op0 = build_fold_addr_expr (addr_base);
1013 if (host_integerp (mem_op->op0, 0))
1014 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1021 *I_P to point to the last element of the replacement. */
1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1026 unsigned int i = *i_p;
1027 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1028 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1030 enum tree_code code;
1033 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1034 if (!is_gimple_assign (def_stmt))
1037 code = gimple_assign_rhs_code (def_stmt);
1038 if (code != ADDR_EXPR
1039 && code != POINTER_PLUS_EXPR)
1042 off = tree_to_double_int (mem_op->op0);
1043 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1045 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1046 from .foo.bar to the preceeding MEM_REF offset and replace the
1047 address with &OBJ. */
1048 if (code == ADDR_EXPR)
1050 tree addr, addr_base;
1051 HOST_WIDE_INT addr_offset;
1053 addr = gimple_assign_rhs1 (def_stmt);
1054 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1057 || TREE_CODE (addr_base) != MEM_REF)
1060 off = double_int_add (off, shwi_to_double_int (addr_offset));
1061 off = double_int_add (off, mem_ref_offset (addr_base));
1062 op->op0 = TREE_OPERAND (addr_base, 0);
1067 ptr = gimple_assign_rhs1 (def_stmt);
1068 ptroff = gimple_assign_rhs2 (def_stmt);
1069 if (TREE_CODE (ptr) != SSA_NAME
1070 || TREE_CODE (ptroff) != INTEGER_CST)
1073 off = double_int_add (off, tree_to_double_int (ptroff));
1077 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1078 if (host_integerp (mem_op->op0, 0))
1079 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1082 if (TREE_CODE (op->op0) == SSA_NAME)
1083 op->op0 = SSA_VAL (op->op0);
1084 if (TREE_CODE (op->op0) != SSA_NAME)
1085 op->opcode = TREE_CODE (op->op0);
1088 if (TREE_CODE (op->op0) == SSA_NAME)
1089 vn_reference_maybe_forwprop_address (ops, i_p);
1090 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1091 vn_reference_fold_indirect (ops, i_p);
1094 /* Optimize the reference REF to a constant if possible or return
1095 NULL_TREE if not. */
1098 fully_constant_vn_reference_p (vn_reference_t ref)
1100 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1101 vn_reference_op_t op;
1103 /* Try to simplify the translated expression if it is
1104 a call to a builtin function with at most two arguments. */
1105 op = VEC_index (vn_reference_op_s, operands, 0);
1106 if (op->opcode == CALL_EXPR
1107 && TREE_CODE (op->op0) == ADDR_EXPR
1108 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1109 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1110 && VEC_length (vn_reference_op_s, operands) >= 2
1111 && VEC_length (vn_reference_op_s, operands) <= 3)
1113 vn_reference_op_t arg0, arg1 = NULL;
1114 bool anyconst = false;
1115 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1116 if (VEC_length (vn_reference_op_s, operands) > 2)
1117 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1118 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1119 || (arg0->opcode == ADDR_EXPR
1120 && is_gimple_min_invariant (arg0->op0)))
1123 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1124 || (arg1->opcode == ADDR_EXPR
1125 && is_gimple_min_invariant (arg1->op0))))
1129 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1132 arg1 ? arg1->op0 : NULL);
1134 && TREE_CODE (folded) == NOP_EXPR)
1135 folded = TREE_OPERAND (folded, 0);
1137 && is_gimple_min_invariant (folded))
1142 /* Simplify reads from constant strings. */
1143 else if (op->opcode == ARRAY_REF
1144 && TREE_CODE (op->op0) == INTEGER_CST
1145 && integer_zerop (op->op1)
1146 && VEC_length (vn_reference_op_s, operands) == 2)
1148 vn_reference_op_t arg0;
1149 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1150 if (arg0->opcode == STRING_CST
1151 && (TYPE_MODE (op->type)
1152 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1153 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1154 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1155 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1156 return build_int_cst_type (op->type,
1157 (TREE_STRING_POINTER (arg0->op0)
1158 [TREE_INT_CST_LOW (op->op0)]));
1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1165 structures into their value numbers. This is done in-place, and
1166 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1167 whether any operands were valueized. */
1169 static VEC (vn_reference_op_s, heap) *
1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1172 vn_reference_op_t vro;
1175 *valueized_anything = false;
1177 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1179 if (vro->opcode == SSA_NAME
1180 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1182 tree tem = SSA_VAL (vro->op0);
1183 if (tem != vro->op0)
1185 *valueized_anything = true;
1188 /* If it transforms from an SSA_NAME to a constant, update
1190 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1191 vro->opcode = TREE_CODE (vro->op0);
1193 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1195 tree tem = SSA_VAL (vro->op1);
1196 if (tem != vro->op1)
1198 *valueized_anything = true;
1202 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1204 tree tem = SSA_VAL (vro->op2);
1205 if (tem != vro->op2)
1207 *valueized_anything = true;
1211 /* If it transforms from an SSA_NAME to an address, fold with
1212 a preceding indirect reference. */
1215 && TREE_CODE (vro->op0) == ADDR_EXPR
1216 && VEC_index (vn_reference_op_s,
1217 orig, i - 1)->opcode == MEM_REF)
1218 vn_reference_fold_indirect (&orig, &i);
1220 && vro->opcode == SSA_NAME
1221 && VEC_index (vn_reference_op_s,
1222 orig, i - 1)->opcode == MEM_REF)
1223 vn_reference_maybe_forwprop_address (&orig, &i);
1224 /* If it transforms a non-constant ARRAY_REF into a constant
1225 one, adjust the constant offset. */
1226 else if (vro->opcode == ARRAY_REF
1228 && TREE_CODE (vro->op0) == INTEGER_CST
1229 && TREE_CODE (vro->op1) == INTEGER_CST
1230 && TREE_CODE (vro->op2) == INTEGER_CST)
1232 double_int off = tree_to_double_int (vro->op0);
1233 off = double_int_add (off,
1235 (tree_to_double_int (vro->op1)));
1236 off = double_int_mul (off, tree_to_double_int (vro->op2));
1237 if (double_int_fits_in_shwi_p (off))
1245 static VEC (vn_reference_op_s, heap) *
1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1249 return valueize_refs_1 (orig, &tem);
1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1254 /* Create a vector of vn_reference_op_s structures from REF, a
1255 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1256 this function. *VALUEIZED_ANYTHING will specify whether any
1257 operands were valueized. */
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1264 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1266 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1267 valueized_anything);
1268 return shared_lookup_references;
1271 /* Create a vector of vn_reference_op_s structures from CALL, a
1272 call statement. The vector is shared among all callers of
1275 static VEC(vn_reference_op_s, heap) *
1276 valueize_shared_reference_ops_from_call (gimple call)
1280 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1281 copy_reference_ops_from_call (call, &shared_lookup_references);
1282 shared_lookup_references = valueize_refs (shared_lookup_references);
1283 return shared_lookup_references;
1286 /* Lookup a SCCVN reference operation VR in the current hash table.
1287 Returns the resulting value number if it exists in the hash table,
1288 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1289 vn_reference_t stored in the hashtable if something is found. */
1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1297 hash = vr->hashcode;
1298 slot = htab_find_slot_with_hash (current_info->references, vr,
1300 if (!slot && current_info == optimistic_info)
1301 slot = htab_find_slot_with_hash (valid_info->references, vr,
1306 *vnresult = (vn_reference_t)*slot;
1307 return ((vn_reference_t)*slot)->result;
1313 static tree *last_vuse_ptr;
1314 static vn_lookup_kind vn_walk_kind;
1315 static vn_lookup_kind default_vn_walk_kind;
1317 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1318 with the current VUSE and performs the expression lookup. */
1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1323 vn_reference_t vr = (vn_reference_t)vr_;
1328 *last_vuse_ptr = vuse;
1330 /* Fixup vuse and hash. */
1332 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1333 vr->vuse = SSA_VAL (vuse);
1335 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1337 hash = vr->hashcode;
1338 slot = htab_find_slot_with_hash (current_info->references, vr,
1340 if (!slot && current_info == optimistic_info)
1341 slot = htab_find_slot_with_hash (valid_info->references, vr,
1349 /* Lookup an existing or insert a new vn_reference entry into the
1350 value table for the VUSE, SET, TYPE, OPERANDS reference which
1351 has the value VALUE which is either a constant or an SSA name. */
1353 static vn_reference_t
1354 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1357 VEC (vn_reference_op_s,
1361 struct vn_reference_s vr1;
1362 vn_reference_t result;
1365 vr1.operands = operands;
1368 vr1.hashcode = vn_reference_compute_hash (&vr1);
1369 if (vn_reference_lookup_1 (&vr1, &result))
1371 if (TREE_CODE (value) == SSA_NAME)
1372 value_id = VN_INFO (value)->value_id;
1374 value_id = get_or_alloc_constant_value_id (value);
1375 return vn_reference_insert_pieces (vuse, set, type,
1376 VEC_copy (vn_reference_op_s, heap,
1377 operands), value, value_id);
1380 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1381 from the statement defining VUSE and if not successful tries to
1382 translate *REFP and VR_ through an aggregate copy at the defintion
1386 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1388 vn_reference_t vr = (vn_reference_t)vr_;
1389 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1391 HOST_WIDE_INT offset, maxsize;
1392 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1394 bool lhs_ref_ok = false;
1396 /* First try to disambiguate after value-replacing in the definitions LHS. */
1397 if (is_gimple_assign (def_stmt))
1399 VEC (vn_reference_op_s, heap) *tem;
1400 tree lhs = gimple_assign_lhs (def_stmt);
1401 bool valueized_anything = false;
1402 /* Avoid re-allocation overhead. */
1403 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1404 copy_reference_ops_from_ref (lhs, &lhs_ops);
1406 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1407 gcc_assert (lhs_ops == tem);
1408 if (valueized_anything)
1410 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1411 get_alias_set (lhs),
1412 TREE_TYPE (lhs), lhs_ops);
1414 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1419 ao_ref_init (&lhs_ref, lhs);
1424 base = ao_ref_base (ref);
1425 offset = ref->offset;
1426 maxsize = ref->max_size;
1428 /* If we cannot constrain the size of the reference we cannot
1429 test if anything kills it. */
1433 /* We can't deduce anything useful from clobbers. */
1434 if (gimple_clobber_p (def_stmt))
1437 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1438 from that definition.
1440 if (is_gimple_reg_type (vr->type)
1441 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1442 && integer_zerop (gimple_call_arg (def_stmt, 1))
1443 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1444 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1446 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1448 HOST_WIDE_INT offset2, size2, maxsize2;
1449 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1450 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1451 if ((unsigned HOST_WIDE_INT)size2 / 8
1452 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1454 && operand_equal_p (base, base2, 0)
1455 && offset2 <= offset
1456 && offset2 + size2 >= offset + maxsize)
1458 tree val = build_zero_cst (vr->type);
1459 return vn_reference_lookup_or_insert_for_pieces
1460 (vuse, vr->set, vr->type, vr->operands, val);
1464 /* 2) Assignment from an empty CONSTRUCTOR. */
1465 else if (is_gimple_reg_type (vr->type)
1466 && gimple_assign_single_p (def_stmt)
1467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1471 HOST_WIDE_INT offset2, size2, maxsize2;
1472 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1473 &offset2, &size2, &maxsize2);
1475 && operand_equal_p (base, base2, 0)
1476 && offset2 <= offset
1477 && offset2 + size2 >= offset + maxsize)
1479 tree val = build_zero_cst (vr->type);
1480 return vn_reference_lookup_or_insert_for_pieces
1481 (vuse, vr->set, vr->type, vr->operands, val);
1485 /* 3) Assignment from a constant. We can use folds native encode/interpret
1486 routines to extract the assigned bits. */
1487 else if (vn_walk_kind == VN_WALKREWRITE
1488 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1489 && ref->size == maxsize
1490 && maxsize % BITS_PER_UNIT == 0
1491 && offset % BITS_PER_UNIT == 0
1492 && is_gimple_reg_type (vr->type)
1493 && gimple_assign_single_p (def_stmt)
1494 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1497 HOST_WIDE_INT offset2, size2, maxsize2;
1498 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1499 &offset2, &size2, &maxsize2);
1501 && maxsize2 == size2
1502 && size2 % BITS_PER_UNIT == 0
1503 && offset2 % BITS_PER_UNIT == 0
1504 && operand_equal_p (base, base2, 0)
1505 && offset2 <= offset
1506 && offset2 + size2 >= offset + maxsize)
1508 /* We support up to 512-bit values (for V8DFmode). */
1509 unsigned char buffer[64];
1512 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1513 buffer, sizeof (buffer));
1516 tree val = native_interpret_expr (vr->type,
1518 + ((offset - offset2)
1520 ref->size / BITS_PER_UNIT);
1522 return vn_reference_lookup_or_insert_for_pieces
1523 (vuse, vr->set, vr->type, vr->operands, val);
1528 /* 4) Assignment from an SSA name which definition we may be able
1529 to access pieces from. */
1530 else if (ref->size == maxsize
1531 && is_gimple_reg_type (vr->type)
1532 && gimple_assign_single_p (def_stmt)
1533 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1535 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1536 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1537 if (is_gimple_assign (def_stmt2)
1538 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1539 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1540 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1543 HOST_WIDE_INT offset2, size2, maxsize2, off;
1544 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1545 &offset2, &size2, &maxsize2);
1546 off = offset - offset2;
1548 && maxsize2 == size2
1549 && operand_equal_p (base, base2, 0)
1550 && offset2 <= offset
1551 && offset2 + size2 >= offset + maxsize)
1553 tree val = NULL_TREE;
1555 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1556 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1559 val = gimple_assign_rhs1 (def_stmt2);
1560 else if (off == elsz)
1561 val = gimple_assign_rhs2 (def_stmt2);
1563 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1566 tree ctor = gimple_assign_rhs1 (def_stmt2);
1567 unsigned i = off / elsz;
1568 if (i < CONSTRUCTOR_NELTS (ctor))
1570 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1571 if (compare_tree_int (elt->index, i) == 0)
1576 return vn_reference_lookup_or_insert_for_pieces
1577 (vuse, vr->set, vr->type, vr->operands, val);
1582 /* 5) For aggregate copies translate the reference through them if
1583 the copy kills ref. */
1584 else if (vn_walk_kind == VN_WALKREWRITE
1585 && gimple_assign_single_p (def_stmt)
1586 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1587 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1588 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1591 HOST_WIDE_INT offset2, size2, maxsize2;
1593 VEC (vn_reference_op_s, heap) *rhs = NULL;
1594 vn_reference_op_t vro;
1600 /* See if the assignment kills REF. */
1601 base2 = ao_ref_base (&lhs_ref);
1602 offset2 = lhs_ref.offset;
1603 size2 = lhs_ref.size;
1604 maxsize2 = lhs_ref.max_size;
1606 || (base != base2 && !operand_equal_p (base, base2, 0))
1608 || offset2 + size2 < offset + maxsize)
1611 /* Find the common base of ref and the lhs. lhs_ops already
1612 contains valueized operands for the lhs. */
1613 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1614 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1615 while (j >= 0 && i >= 0
1616 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1618 VEC_index (vn_reference_op_s, lhs_ops, j)))
1624 /* ??? The innermost op should always be a MEM_REF and we already
1625 checked that the assignment to the lhs kills vr. Thus for
1626 aggregate copies using char[] types the vn_reference_op_eq
1627 may fail when comparing types for compatibility. But we really
1628 don't care here - further lookups with the rewritten operands
1629 will simply fail if we messed up types too badly. */
1630 if (j == 0 && i >= 0
1631 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1632 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1633 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1634 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1637 /* i now points to the first additional op.
1638 ??? LHS may not be completely contained in VR, one or more
1639 VIEW_CONVERT_EXPRs could be in its way. We could at least
1640 try handling outermost VIEW_CONVERT_EXPRs. */
1644 /* Now re-write REF to be based on the rhs of the assignment. */
1645 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1646 /* We need to pre-pend vr->operands[0..i] to rhs. */
1647 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1648 > VEC_length (vn_reference_op_s, vr->operands))
1650 VEC (vn_reference_op_s, heap) *old = vr->operands;
1651 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1652 i + 1 + VEC_length (vn_reference_op_s, rhs));
1653 if (old == shared_lookup_references
1654 && vr->operands != old)
1655 shared_lookup_references = NULL;
1658 VEC_truncate (vn_reference_op_s, vr->operands,
1659 i + 1 + VEC_length (vn_reference_op_s, rhs));
1660 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1661 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1662 VEC_free (vn_reference_op_s, heap, rhs);
1663 vr->operands = valueize_refs (vr->operands);
1664 vr->hashcode = vn_reference_compute_hash (vr);
1666 /* Adjust *ref from the new operands. */
1667 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1669 /* This can happen with bitfields. */
1670 if (ref->size != r.size)
1674 /* Do not update last seen VUSE after translating. */
1675 last_vuse_ptr = NULL;
1677 /* Keep looking for the adjusted *REF / VR pair. */
1681 /* 6) For memcpy copies translate the reference through them if
1682 the copy kills ref. */
1683 else if (vn_walk_kind == VN_WALKREWRITE
1684 && is_gimple_reg_type (vr->type)
1685 /* ??? Handle BCOPY as well. */
1686 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1687 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1688 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1689 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1690 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1691 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1692 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1693 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1697 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1698 vn_reference_op_s op;
1702 /* Only handle non-variable, addressable refs. */
1703 if (ref->size != maxsize
1704 || offset % BITS_PER_UNIT != 0
1705 || ref->size % BITS_PER_UNIT != 0)
1708 /* Extract a pointer base and an offset for the destination. */
1709 lhs = gimple_call_arg (def_stmt, 0);
1711 if (TREE_CODE (lhs) == SSA_NAME)
1712 lhs = SSA_VAL (lhs);
1713 if (TREE_CODE (lhs) == ADDR_EXPR)
1715 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1719 if (TREE_CODE (tem) == MEM_REF
1720 && host_integerp (TREE_OPERAND (tem, 1), 1))
1722 lhs = TREE_OPERAND (tem, 0);
1723 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1725 else if (DECL_P (tem))
1726 lhs = build_fold_addr_expr (tem);
1730 if (TREE_CODE (lhs) != SSA_NAME
1731 && TREE_CODE (lhs) != ADDR_EXPR)
1734 /* Extract a pointer base and an offset for the source. */
1735 rhs = gimple_call_arg (def_stmt, 1);
1737 if (TREE_CODE (rhs) == SSA_NAME)
1738 rhs = SSA_VAL (rhs);
1739 if (TREE_CODE (rhs) == ADDR_EXPR)
1741 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1745 if (TREE_CODE (tem) == MEM_REF
1746 && host_integerp (TREE_OPERAND (tem, 1), 1))
1748 rhs = TREE_OPERAND (tem, 0);
1749 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1751 else if (DECL_P (tem))
1752 rhs = build_fold_addr_expr (tem);
1756 if (TREE_CODE (rhs) != SSA_NAME
1757 && TREE_CODE (rhs) != ADDR_EXPR)
1760 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1762 /* The bases of the destination and the references have to agree. */
1763 if ((TREE_CODE (base) != MEM_REF
1765 || (TREE_CODE (base) == MEM_REF
1766 && (TREE_OPERAND (base, 0) != lhs
1767 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1769 && (TREE_CODE (lhs) != ADDR_EXPR
1770 || TREE_OPERAND (lhs, 0) != base)))
1773 /* And the access has to be contained within the memcpy destination. */
1774 at = offset / BITS_PER_UNIT;
1775 if (TREE_CODE (base) == MEM_REF)
1776 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1778 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1781 /* Make room for 2 operands in the new reference. */
1782 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1784 VEC (vn_reference_op_s, heap) *old = vr->operands;
1785 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1786 if (old == shared_lookup_references
1787 && vr->operands != old)
1788 shared_lookup_references = NULL;
1791 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1793 /* The looked-through reference is a simple MEM_REF. */
1794 memset (&op, 0, sizeof (op));
1796 op.opcode = MEM_REF;
1797 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1798 op.off = at - lhs_offset + rhs_offset;
1799 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1800 op.type = TREE_TYPE (rhs);
1801 op.opcode = TREE_CODE (rhs);
1804 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1805 vr->hashcode = vn_reference_compute_hash (vr);
1807 /* Adjust *ref from the new operands. */
1808 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1810 /* This can happen with bitfields. */
1811 if (ref->size != r.size)
1815 /* Do not update last seen VUSE after translating. */
1816 last_vuse_ptr = NULL;
1818 /* Keep looking for the adjusted *REF / VR pair. */
1822 /* Bail out and stop walking. */
1826 /* Lookup a reference operation by it's parts, in the current hash table.
1827 Returns the resulting value number if it exists in the hash table,
1828 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1829 vn_reference_t stored in the hashtable if something is found. */
1832 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1833 VEC (vn_reference_op_s, heap) *operands,
1834 vn_reference_t *vnresult, vn_lookup_kind kind)
1836 struct vn_reference_s vr1;
1844 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1845 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1846 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1847 VEC_length (vn_reference_op_s, operands));
1848 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1849 VEC_address (vn_reference_op_s, operands),
1850 sizeof (vn_reference_op_s)
1851 * VEC_length (vn_reference_op_s, operands));
1852 vr1.operands = operands = shared_lookup_references
1853 = valueize_refs (shared_lookup_references);
1856 vr1.hashcode = vn_reference_compute_hash (&vr1);
1857 if ((cst = fully_constant_vn_reference_p (&vr1)))
1860 vn_reference_lookup_1 (&vr1, vnresult);
1862 && kind != VN_NOWALK
1866 vn_walk_kind = kind;
1867 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1869 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1870 vn_reference_lookup_2,
1871 vn_reference_lookup_3, &vr1);
1872 if (vr1.operands != operands)
1873 VEC_free (vn_reference_op_s, heap, vr1.operands);
1877 return (*vnresult)->result;
1882 /* Lookup OP in the current hash table, and return the resulting value
1883 number if it exists in the hash table. Return NULL_TREE if it does
1884 not exist in the hash table or if the result field of the structure
1885 was NULL.. VNRESULT will be filled in with the vn_reference_t
1886 stored in the hashtable if one exists. */
1889 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1890 vn_reference_t *vnresult)
1892 VEC (vn_reference_op_s, heap) *operands;
1893 struct vn_reference_s vr1;
1895 bool valuezied_anything;
1900 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1901 vr1.operands = operands
1902 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1903 vr1.type = TREE_TYPE (op);
1904 vr1.set = get_alias_set (op);
1905 vr1.hashcode = vn_reference_compute_hash (&vr1);
1906 if ((cst = fully_constant_vn_reference_p (&vr1)))
1909 if (kind != VN_NOWALK
1912 vn_reference_t wvnresult;
1914 /* Make sure to use a valueized reference if we valueized anything.
1915 Otherwise preserve the full reference for advanced TBAA. */
1916 if (!valuezied_anything
1917 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1919 ao_ref_init (&r, op);
1920 vn_walk_kind = kind;
1922 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1923 vn_reference_lookup_2,
1924 vn_reference_lookup_3, &vr1);
1925 if (vr1.operands != operands)
1926 VEC_free (vn_reference_op_s, heap, vr1.operands);
1930 *vnresult = wvnresult;
1931 return wvnresult->result;
1937 return vn_reference_lookup_1 (&vr1, vnresult);
1941 /* Insert OP into the current hash table with a value number of
1942 RESULT, and return the resulting reference structure we created. */
1945 vn_reference_insert (tree op, tree result, tree vuse)
1950 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1951 if (TREE_CODE (result) == SSA_NAME)
1952 vr1->value_id = VN_INFO (result)->value_id;
1954 vr1->value_id = get_or_alloc_constant_value_id (result);
1955 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1956 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1957 vr1->type = TREE_TYPE (op);
1958 vr1->set = get_alias_set (op);
1959 vr1->hashcode = vn_reference_compute_hash (vr1);
1960 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1962 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1965 /* Because we lookup stores using vuses, and value number failures
1966 using the vdefs (see visit_reference_op_store for how and why),
1967 it's possible that on failure we may try to insert an already
1968 inserted store. This is not wrong, there is no ssa name for a
1969 store that we could use as a differentiator anyway. Thus, unlike
1970 the other lookup functions, you cannot gcc_assert (!*slot)
1973 /* But free the old slot in case of a collision. */
1975 free_reference (*slot);
1981 /* Insert a reference by it's pieces into the current hash table with
1982 a value number of RESULT. Return the resulting reference
1983 structure we created. */
1986 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1987 VEC (vn_reference_op_s, heap) *operands,
1988 tree result, unsigned int value_id)
1994 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1995 vr1->value_id = value_id;
1996 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1997 vr1->operands = valueize_refs (operands);
2000 vr1->hashcode = vn_reference_compute_hash (vr1);
2001 if (result && TREE_CODE (result) == SSA_NAME)
2002 result = SSA_VAL (result);
2003 vr1->result = result;
2005 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2008 /* At this point we should have all the things inserted that we have
2009 seen before, and we should never try inserting something that
2011 gcc_assert (!*slot);
2013 free_reference (*slot);
2019 /* Compute and return the hash value for nary operation VBO1. */
2022 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2027 for (i = 0; i < vno1->length; ++i)
2028 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2029 vno1->op[i] = SSA_VAL (vno1->op[i]);
2031 if (vno1->length == 2
2032 && commutative_tree_code (vno1->opcode)
2033 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2035 tree temp = vno1->op[0];
2036 vno1->op[0] = vno1->op[1];
2040 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2041 for (i = 0; i < vno1->length; ++i)
2042 hash = iterative_hash_expr (vno1->op[i], hash);
2047 /* Return the computed hashcode for nary operation P1. */
2050 vn_nary_op_hash (const void *p1)
2052 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2053 return vno1->hashcode;
2056 /* Compare nary operations P1 and P2 and return true if they are
2060 vn_nary_op_eq (const void *p1, const void *p2)
2062 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2063 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2066 if (vno1->hashcode != vno2->hashcode)
2069 if (vno1->length != vno2->length)
2072 if (vno1->opcode != vno2->opcode
2073 || !types_compatible_p (vno1->type, vno2->type))
2076 for (i = 0; i < vno1->length; ++i)
2077 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2083 /* Initialize VNO from the pieces provided. */
2086 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2087 enum tree_code code, tree type, tree *ops)
2090 vno->length = length;
2092 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2095 /* Initialize VNO from OP. */
2098 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2102 vno->opcode = TREE_CODE (op);
2103 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2104 vno->type = TREE_TYPE (op);
2105 for (i = 0; i < vno->length; ++i)
2106 vno->op[i] = TREE_OPERAND (op, i);
2109 /* Return the number of operands for a vn_nary ops structure from STMT. */
2112 vn_nary_length_from_stmt (gimple stmt)
2114 switch (gimple_assign_rhs_code (stmt))
2118 case VIEW_CONVERT_EXPR:
2122 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2125 return gimple_num_ops (stmt) - 1;
2129 /* Initialize VNO from STMT. */
2132 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2136 vno->opcode = gimple_assign_rhs_code (stmt);
2137 vno->type = gimple_expr_type (stmt);
2138 switch (vno->opcode)
2142 case VIEW_CONVERT_EXPR:
2144 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2148 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2149 for (i = 0; i < vno->length; ++i)
2150 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2154 vno->length = gimple_num_ops (stmt) - 1;
2155 for (i = 0; i < vno->length; ++i)
2156 vno->op[i] = gimple_op (stmt, i + 1);
2160 /* Compute the hashcode for VNO and look for it in the hash table;
2161 return the resulting value number if it exists in the hash table.
2162 Return NULL_TREE if it does not exist in the hash table or if the
2163 result field of the operation is NULL. VNRESULT will contain the
2164 vn_nary_op_t from the hashtable if it exists. */
2167 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2174 vno->hashcode = vn_nary_op_compute_hash (vno);
2175 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2177 if (!slot && current_info == optimistic_info)
2178 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2183 *vnresult = (vn_nary_op_t)*slot;
2184 return ((vn_nary_op_t)*slot)->result;
2187 /* Lookup a n-ary operation by its pieces and return the resulting value
2188 number if it exists in the hash table. Return NULL_TREE if it does
2189 not exist in the hash table or if the result field of the operation
2190 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2194 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2195 tree type, tree *ops, vn_nary_op_t *vnresult)
2197 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2198 sizeof_vn_nary_op (length));
2199 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2200 return vn_nary_op_lookup_1 (vno1, vnresult);
2203 /* Lookup OP in the current hash table, and return the resulting value
2204 number if it exists in the hash table. Return NULL_TREE if it does
2205 not exist in the hash table or if the result field of the operation
2206 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2210 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2213 = XALLOCAVAR (struct vn_nary_op_s,
2214 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2215 init_vn_nary_op_from_op (vno1, op);
2216 return vn_nary_op_lookup_1 (vno1, vnresult);
2219 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2220 value number if it exists in the hash table. Return NULL_TREE if
2221 it does not exist in the hash table. VNRESULT will contain the
2222 vn_nary_op_t from the hashtable if it exists. */
2225 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2228 = XALLOCAVAR (struct vn_nary_op_s,
2229 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2230 init_vn_nary_op_from_stmt (vno1, stmt);
2231 return vn_nary_op_lookup_1 (vno1, vnresult);
2234 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2237 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2239 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2242 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2246 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2248 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2249 ¤t_info->nary_obstack);
2251 vno1->value_id = value_id;
2252 vno1->length = length;
2253 vno1->result = result;
2258 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2259 VNO->HASHCODE first. */
2262 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2267 vno->hashcode = vn_nary_op_compute_hash (vno);
2269 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2270 gcc_assert (!*slot);
2276 /* Insert a n-ary operation into the current hash table using it's
2277 pieces. Return the vn_nary_op_t structure we created and put in
2281 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2282 tree type, tree *ops,
2283 tree result, unsigned int value_id)
2285 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2286 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2287 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2290 /* Insert OP into the current hash table with a value number of
2291 RESULT. Return the vn_nary_op_t structure we created and put in
2295 vn_nary_op_insert (tree op, tree result)
2297 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2300 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2301 init_vn_nary_op_from_op (vno1, op);
2302 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2305 /* Insert the rhs of STMT into the current hash table with a value number of
2309 vn_nary_op_insert_stmt (gimple stmt, tree result)
2312 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2313 result, VN_INFO (result)->value_id);
2314 init_vn_nary_op_from_stmt (vno1, stmt);
2315 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2318 /* Compute a hashcode for PHI operation VP1 and return it. */
2320 static inline hashval_t
2321 vn_phi_compute_hash (vn_phi_t vp1)
2328 result = vp1->block->index;
2330 /* If all PHI arguments are constants we need to distinguish
2331 the PHI node via its type. */
2332 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2333 result += (INTEGRAL_TYPE_P (type)
2334 + (INTEGRAL_TYPE_P (type)
2335 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2337 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2339 if (phi1op == VN_TOP)
2341 result = iterative_hash_expr (phi1op, result);
2347 /* Return the computed hashcode for phi operation P1. */
2350 vn_phi_hash (const void *p1)
2352 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2353 return vp1->hashcode;
2356 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2359 vn_phi_eq (const void *p1, const void *p2)
2361 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2362 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2364 if (vp1->hashcode != vp2->hashcode)
2367 if (vp1->block == vp2->block)
2372 /* If the PHI nodes do not have compatible types
2373 they are not the same. */
2374 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2375 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2378 /* Any phi in the same block will have it's arguments in the
2379 same edge order, because of how we store phi nodes. */
2380 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2382 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2383 if (phi1op == VN_TOP || phi2op == VN_TOP)
2385 if (!expressions_equal_p (phi1op, phi2op))
2393 static VEC(tree, heap) *shared_lookup_phiargs;
2395 /* Lookup PHI in the current hash table, and return the resulting
2396 value number if it exists in the hash table. Return NULL_TREE if
2397 it does not exist in the hash table. */
2400 vn_phi_lookup (gimple phi)
2403 struct vn_phi_s vp1;
2406 VEC_truncate (tree, shared_lookup_phiargs, 0);
2408 /* Canonicalize the SSA_NAME's to their value number. */
2409 for (i = 0; i < gimple_phi_num_args (phi); i++)
2411 tree def = PHI_ARG_DEF (phi, i);
2412 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2413 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2415 vp1.phiargs = shared_lookup_phiargs;
2416 vp1.block = gimple_bb (phi);
2417 vp1.hashcode = vn_phi_compute_hash (&vp1);
2418 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2420 if (!slot && current_info == optimistic_info)
2421 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2425 return ((vn_phi_t)*slot)->result;
2428 /* Insert PHI into the current hash table with a value number of
2432 vn_phi_insert (gimple phi, tree result)
2435 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2437 VEC (tree, heap) *args = NULL;
2439 /* Canonicalize the SSA_NAME's to their value number. */
2440 for (i = 0; i < gimple_phi_num_args (phi); i++)
2442 tree def = PHI_ARG_DEF (phi, i);
2443 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2444 VEC_safe_push (tree, heap, args, def);
2446 vp1->value_id = VN_INFO (result)->value_id;
2447 vp1->phiargs = args;
2448 vp1->block = gimple_bb (phi);
2449 vp1->result = result;
2450 vp1->hashcode = vn_phi_compute_hash (vp1);
2452 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2455 /* Because we iterate over phi operations more than once, it's
2456 possible the slot might already exist here, hence no assert.*/
2462 /* Print set of components in strongly connected component SCC to OUT. */
2465 print_scc (FILE *out, VEC (tree, heap) *scc)
2470 fprintf (out, "SCC consists of:");
2471 FOR_EACH_VEC_ELT (tree, scc, i, var)
2474 print_generic_expr (out, var, 0);
2476 fprintf (out, "\n");
2479 /* Set the value number of FROM to TO, return true if it has changed
2483 set_ssa_val_to (tree from, tree to)
2485 tree currval = SSA_VAL (from);
2489 if (currval == from)
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, "Not changing value number of ");
2494 print_generic_expr (dump_file, from, 0);
2495 fprintf (dump_file, " from VARYING to ");
2496 print_generic_expr (dump_file, to, 0);
2497 fprintf (dump_file, "\n");
2501 else if (TREE_CODE (to) == SSA_NAME
2502 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2506 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2507 and invariants. So assert that here. */
2508 gcc_assert (to != NULL_TREE
2510 || TREE_CODE (to) == SSA_NAME
2511 || is_gimple_min_invariant (to)));
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, "Setting value number of ");
2516 print_generic_expr (dump_file, from, 0);
2517 fprintf (dump_file, " to ");
2518 print_generic_expr (dump_file, to, 0);
2521 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2523 VN_INFO (from)->valnum = to;
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 fprintf (dump_file, " (changed)\n");
2528 if (dump_file && (dump_flags & TDF_DETAILS))
2529 fprintf (dump_file, "\n");
2533 /* Set all definitions in STMT to value number to themselves.
2534 Return true if a value number changed. */
2537 defs_to_varying (gimple stmt)
2539 bool changed = false;
2543 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2545 tree def = DEF_FROM_PTR (defp);
2547 VN_INFO (def)->use_processed = true;
2548 changed |= set_ssa_val_to (def, def);
2553 static bool expr_has_constants (tree expr);
2554 static tree valueize_expr (tree expr);
2556 /* Visit a copy between LHS and RHS, return true if the value number
2560 visit_copy (tree lhs, tree rhs)
2562 /* Follow chains of copies to their destination. */
2563 while (TREE_CODE (rhs) == SSA_NAME
2564 && SSA_VAL (rhs) != rhs)
2565 rhs = SSA_VAL (rhs);
2567 /* The copy may have a more interesting constant filled expression
2568 (we don't, since we know our RHS is just an SSA name). */
2569 if (TREE_CODE (rhs) == SSA_NAME)
2571 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2572 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2575 return set_ssa_val_to (lhs, rhs);
2578 /* Visit a nary operator RHS, value number it, and return true if the
2579 value number of LHS has changed as a result. */
2582 visit_nary_op (tree lhs, gimple stmt)
2584 bool changed = false;
2585 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2588 changed = set_ssa_val_to (lhs, result);
2591 changed = set_ssa_val_to (lhs, lhs);
2592 vn_nary_op_insert_stmt (stmt, lhs);
2598 /* Visit a call STMT storing into LHS. Return true if the value number
2599 of the LHS has changed as a result. */
2602 visit_reference_op_call (tree lhs, gimple stmt)
2604 bool changed = false;
2605 struct vn_reference_s vr1;
2607 tree vuse = gimple_vuse (stmt);
2609 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2610 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2611 vr1.type = gimple_expr_type (stmt);
2613 vr1.hashcode = vn_reference_compute_hash (&vr1);
2614 result = vn_reference_lookup_1 (&vr1, NULL);
2617 changed = set_ssa_val_to (lhs, result);
2618 if (TREE_CODE (result) == SSA_NAME
2619 && VN_INFO (result)->has_constants)
2620 VN_INFO (lhs)->has_constants = true;
2626 changed = set_ssa_val_to (lhs, lhs);
2627 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2628 vr2->vuse = vr1.vuse;
2629 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2630 vr2->type = vr1.type;
2632 vr2->hashcode = vr1.hashcode;
2634 slot = htab_find_slot_with_hash (current_info->references,
2635 vr2, vr2->hashcode, INSERT);
2637 free_reference (*slot);
2644 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2645 and return true if the value number of the LHS has changed as a result. */
2648 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2650 bool changed = false;
2654 last_vuse = gimple_vuse (stmt);
2655 last_vuse_ptr = &last_vuse;
2656 result = vn_reference_lookup (op, gimple_vuse (stmt),
2657 default_vn_walk_kind, NULL);
2658 last_vuse_ptr = NULL;
2660 /* If we have a VCE, try looking up its operand as it might be stored in
2661 a different type. */
2662 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2663 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2664 default_vn_walk_kind, NULL);
2666 /* We handle type-punning through unions by value-numbering based
2667 on offset and size of the access. Be prepared to handle a
2668 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2670 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2672 /* We will be setting the value number of lhs to the value number
2673 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2674 So first simplify and lookup this expression to see if it
2675 is already available. */
2676 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2677 if ((CONVERT_EXPR_P (val)
2678 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2679 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2681 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2682 if ((CONVERT_EXPR_P (tem)
2683 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2684 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2685 TREE_TYPE (val), tem)))
2689 if (!is_gimple_min_invariant (val)
2690 && TREE_CODE (val) != SSA_NAME)
2691 result = vn_nary_op_lookup (val, NULL);
2692 /* If the expression is not yet available, value-number lhs to
2693 a new SSA_NAME we create. */
2696 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2697 /* Initialize value-number information properly. */
2698 VN_INFO_GET (result)->valnum = result;
2699 VN_INFO (result)->value_id = get_next_value_id ();
2700 VN_INFO (result)->expr = val;
2701 VN_INFO (result)->has_constants = expr_has_constants (val);
2702 VN_INFO (result)->needs_insertion = true;
2703 /* As all "inserted" statements are singleton SCCs, insert
2704 to the valid table. This is strictly needed to
2705 avoid re-generating new value SSA_NAMEs for the same
2706 expression during SCC iteration over and over (the
2707 optimistic table gets cleared after each iteration).
2708 We do not need to insert into the optimistic table, as
2709 lookups there will fall back to the valid table. */
2710 if (current_info == optimistic_info)
2712 current_info = valid_info;
2713 vn_nary_op_insert (val, result);
2714 current_info = optimistic_info;
2717 vn_nary_op_insert (val, result);
2718 if (dump_file && (dump_flags & TDF_DETAILS))
2720 fprintf (dump_file, "Inserting name ");
2721 print_generic_expr (dump_file, result, 0);
2722 fprintf (dump_file, " for expression ");
2723 print_generic_expr (dump_file, val, 0);
2724 fprintf (dump_file, "\n");
2731 changed = set_ssa_val_to (lhs, result);
2732 if (TREE_CODE (result) == SSA_NAME
2733 && VN_INFO (result)->has_constants)
2735 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2736 VN_INFO (lhs)->has_constants = true;
2741 changed = set_ssa_val_to (lhs, lhs);
2742 vn_reference_insert (op, lhs, last_vuse);
2749 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2750 and return true if the value number of the LHS has changed as a result. */
2753 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2755 bool changed = false;
2757 bool resultsame = false;
2759 /* First we want to lookup using the *vuses* from the store and see
2760 if there the last store to this location with the same address
2763 The vuses represent the memory state before the store. If the
2764 memory state, address, and value of the store is the same as the
2765 last store to this location, then this store will produce the
2766 same memory state as that store.
2768 In this case the vdef versions for this store are value numbered to those
2769 vuse versions, since they represent the same memory state after
2772 Otherwise, the vdefs for the store are used when inserting into
2773 the table, since the store generates a new memory state. */
2775 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2779 if (TREE_CODE (result) == SSA_NAME)
2780 result = SSA_VAL (result);
2781 if (TREE_CODE (op) == SSA_NAME)
2783 resultsame = expressions_equal_p (result, op);
2786 if (!result || !resultsame)
2790 if (dump_file && (dump_flags & TDF_DETAILS))
2792 fprintf (dump_file, "No store match\n");
2793 fprintf (dump_file, "Value numbering store ");
2794 print_generic_expr (dump_file, lhs, 0);
2795 fprintf (dump_file, " to ");
2796 print_generic_expr (dump_file, op, 0);
2797 fprintf (dump_file, "\n");
2799 /* Have to set value numbers before insert, since insert is
2800 going to valueize the references in-place. */
2801 if ((vdef = gimple_vdef (stmt)))
2803 VN_INFO (vdef)->use_processed = true;
2804 changed |= set_ssa_val_to (vdef, vdef);
2807 /* Do not insert structure copies into the tables. */
2808 if (is_gimple_min_invariant (op)
2809 || is_gimple_reg (op))
2810 vn_reference_insert (lhs, op, vdef);
2814 /* We had a match, so value number the vdef to have the value
2815 number of the vuse it came from. */
2818 if (dump_file && (dump_flags & TDF_DETAILS))
2819 fprintf (dump_file, "Store matched earlier value,"
2820 "value numbering store vdefs to matching vuses.\n");
2822 def = gimple_vdef (stmt);
2823 use = gimple_vuse (stmt);
2825 VN_INFO (def)->use_processed = true;
2826 changed |= set_ssa_val_to (def, SSA_VAL (use));
2832 /* Visit and value number PHI, return true if the value number
2836 visit_phi (gimple phi)
2838 bool changed = false;
2840 tree sameval = VN_TOP;
2841 bool allsame = true;
2844 /* TODO: We could check for this in init_sccvn, and replace this
2845 with a gcc_assert. */
2846 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2847 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2849 /* See if all non-TOP arguments have the same value. TOP is
2850 equivalent to everything, so we can ignore it. */
2851 for (i = 0; i < gimple_phi_num_args (phi); i++)
2853 tree def = PHI_ARG_DEF (phi, i);
2855 if (TREE_CODE (def) == SSA_NAME)
2856 def = SSA_VAL (def);
2859 if (sameval == VN_TOP)
2865 if (!expressions_equal_p (def, sameval))
2873 /* If all value numbered to the same value, the phi node has that
2877 if (is_gimple_min_invariant (sameval))
2879 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2880 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2884 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2885 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2888 if (TREE_CODE (sameval) == SSA_NAME)
2889 return visit_copy (PHI_RESULT (phi), sameval);
2891 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2894 /* Otherwise, see if it is equivalent to a phi node in this block. */
2895 result = vn_phi_lookup (phi);
2898 if (TREE_CODE (result) == SSA_NAME)
2899 changed = visit_copy (PHI_RESULT (phi), result);
2901 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2905 vn_phi_insert (phi, PHI_RESULT (phi));
2906 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2907 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2908 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2914 /* Return true if EXPR contains constants. */
2917 expr_has_constants (tree expr)
2919 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2922 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2925 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2926 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2927 /* Constants inside reference ops are rarely interesting, but
2928 it can take a lot of looking to find them. */
2930 case tcc_declaration:
2933 return is_gimple_min_invariant (expr);
2938 /* Return true if STMT contains constants. */
2941 stmt_has_constants (gimple stmt)
2943 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2946 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2948 case GIMPLE_UNARY_RHS:
2949 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2951 case GIMPLE_BINARY_RHS:
2952 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2953 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2954 case GIMPLE_TERNARY_RHS:
2955 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2956 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2957 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2958 case GIMPLE_SINGLE_RHS:
2959 /* Constants inside reference ops are rarely interesting, but
2960 it can take a lot of looking to find them. */
2961 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2968 /* Replace SSA_NAMES in expr with their value numbers, and return the
2970 This is performed in place. */
2973 valueize_expr (tree expr)
2975 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2978 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2981 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2988 /* Simplify the binary expression RHS, and return the result if
2992 simplify_binary_expression (gimple stmt)
2994 tree result = NULL_TREE;
2995 tree op0 = gimple_assign_rhs1 (stmt);
2996 tree op1 = gimple_assign_rhs2 (stmt);
2997 enum tree_code code = gimple_assign_rhs_code (stmt);
2999 /* This will not catch every single case we could combine, but will
3000 catch those with constants. The goal here is to simultaneously
3001 combine constants between expressions, but avoid infinite
3002 expansion of expressions during simplification. */
3003 if (TREE_CODE (op0) == SSA_NAME)
3005 if (VN_INFO (op0)->has_constants
3006 || TREE_CODE_CLASS (code) == tcc_comparison
3007 || code == COMPLEX_EXPR)
3008 op0 = valueize_expr (vn_get_expr_for (op0));
3010 op0 = vn_valueize (op0);
3013 if (TREE_CODE (op1) == SSA_NAME)
3015 if (VN_INFO (op1)->has_constants
3016 || code == COMPLEX_EXPR)
3017 op1 = valueize_expr (vn_get_expr_for (op1));
3019 op1 = vn_valueize (op1);
3022 /* Pointer plus constant can be represented as invariant address.
3023 Do so to allow further propatation, see also tree forwprop. */
3024 if (code == POINTER_PLUS_EXPR
3025 && host_integerp (op1, 1)
3026 && TREE_CODE (op0) == ADDR_EXPR
3027 && is_gimple_min_invariant (op0))
3028 return build_invariant_address (TREE_TYPE (op0),
3029 TREE_OPERAND (op0, 0),
3030 TREE_INT_CST_LOW (op1));
3032 /* Avoid folding if nothing changed. */
3033 if (op0 == gimple_assign_rhs1 (stmt)
3034 && op1 == gimple_assign_rhs2 (stmt))
3037 fold_defer_overflow_warnings ();
3039 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3041 STRIP_USELESS_TYPE_CONVERSION (result);
3043 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3046 /* Make sure result is not a complex expression consisting
3047 of operators of operators (IE (a + b) + (a + c))
3048 Otherwise, we will end up with unbounded expressions if
3049 fold does anything at all. */
3050 if (result && valid_gimple_rhs_p (result))
3056 /* Simplify the unary expression RHS, and return the result if
3060 simplify_unary_expression (gimple stmt)
3062 tree result = NULL_TREE;
3063 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3064 enum tree_code code = gimple_assign_rhs_code (stmt);
3066 /* We handle some tcc_reference codes here that are all
3067 GIMPLE_ASSIGN_SINGLE codes. */
3068 if (code == REALPART_EXPR
3069 || code == IMAGPART_EXPR
3070 || code == VIEW_CONVERT_EXPR
3071 || code == BIT_FIELD_REF)
3072 op0 = TREE_OPERAND (op0, 0);
3074 if (TREE_CODE (op0) != SSA_NAME)
3078 if (VN_INFO (op0)->has_constants)
3079 op0 = valueize_expr (vn_get_expr_for (op0));
3080 else if (CONVERT_EXPR_CODE_P (code)
3081 || code == REALPART_EXPR
3082 || code == IMAGPART_EXPR
3083 || code == VIEW_CONVERT_EXPR
3084 || code == BIT_FIELD_REF)
3086 /* We want to do tree-combining on conversion-like expressions.
3087 Make sure we feed only SSA_NAMEs or constants to fold though. */
3088 tree tem = valueize_expr (vn_get_expr_for (op0));
3089 if (UNARY_CLASS_P (tem)
3090 || BINARY_CLASS_P (tem)
3091 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3092 || TREE_CODE (tem) == SSA_NAME
3093 || TREE_CODE (tem) == CONSTRUCTOR
3094 || is_gimple_min_invariant (tem))
3098 /* Avoid folding if nothing changed, but remember the expression. */
3099 if (op0 == orig_op0)
3102 if (code == BIT_FIELD_REF)
3104 tree rhs = gimple_assign_rhs1 (stmt);
3105 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3106 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3109 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3112 STRIP_USELESS_TYPE_CONVERSION (result);
3113 if (valid_gimple_rhs_p (result))
3120 /* Try to simplify RHS using equivalences and constant folding. */
3123 try_to_simplify (gimple stmt)
3125 enum tree_code code = gimple_assign_rhs_code (stmt);
3128 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3129 in this case, there is no point in doing extra work. */
3130 if (code == SSA_NAME)
3133 /* First try constant folding based on our current lattice. */
3134 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3136 && (TREE_CODE (tem) == SSA_NAME
3137 || is_gimple_min_invariant (tem)))
3140 /* If that didn't work try combining multiple statements. */
3141 switch (TREE_CODE_CLASS (code))
3144 /* Fallthrough for some unary codes that can operate on registers. */
3145 if (!(code == REALPART_EXPR
3146 || code == IMAGPART_EXPR
3147 || code == VIEW_CONVERT_EXPR
3148 || code == BIT_FIELD_REF))
3150 /* We could do a little more with unary ops, if they expand
3151 into binary ops, but it's debatable whether it is worth it. */
3153 return simplify_unary_expression (stmt);
3155 case tcc_comparison:
3157 return simplify_binary_expression (stmt);
3166 /* Visit and value number USE, return true if the value number
3170 visit_use (tree use)
3172 bool changed = false;
3173 gimple stmt = SSA_NAME_DEF_STMT (use);
3175 VN_INFO (use)->use_processed = true;
3177 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3178 if (dump_file && (dump_flags & TDF_DETAILS)
3179 && !SSA_NAME_IS_DEFAULT_DEF (use))
3181 fprintf (dump_file, "Value numbering ");
3182 print_generic_expr (dump_file, use, 0);
3183 fprintf (dump_file, " stmt = ");
3184 print_gimple_stmt (dump_file, stmt, 0, 0);
3187 /* Handle uninitialized uses. */
3188 if (SSA_NAME_IS_DEFAULT_DEF (use))
3189 changed = set_ssa_val_to (use, use);
3192 if (gimple_code (stmt) == GIMPLE_PHI)
3193 changed = visit_phi (stmt);
3194 else if (!gimple_has_lhs (stmt)
3195 || gimple_has_volatile_ops (stmt))
3196 changed = defs_to_varying (stmt);
3197 else if (is_gimple_assign (stmt))
3199 enum tree_code code = gimple_assign_rhs_code (stmt);
3200 tree lhs = gimple_assign_lhs (stmt);
3201 tree rhs1 = gimple_assign_rhs1 (stmt);
3204 /* Shortcut for copies. Simplifying copies is pointless,
3205 since we copy the expression and value they represent. */
3206 if (code == SSA_NAME
3207 && TREE_CODE (lhs) == SSA_NAME)
3209 changed = visit_copy (lhs, rhs1);
3212 simplified = try_to_simplify (stmt);
3215 if (dump_file && (dump_flags & TDF_DETAILS))
3217 fprintf (dump_file, "RHS ");
3218 print_gimple_expr (dump_file, stmt, 0, 0);
3219 fprintf (dump_file, " simplified to ");
3220 print_generic_expr (dump_file, simplified, 0);
3221 if (TREE_CODE (lhs) == SSA_NAME)
3222 fprintf (dump_file, " has constants %d\n",
3223 expr_has_constants (simplified));
3225 fprintf (dump_file, "\n");
3228 /* Setting value numbers to constants will occasionally
3229 screw up phi congruence because constants are not
3230 uniquely associated with a single ssa name that can be
3233 && is_gimple_min_invariant (simplified)
3234 && TREE_CODE (lhs) == SSA_NAME)
3236 VN_INFO (lhs)->expr = simplified;
3237 VN_INFO (lhs)->has_constants = true;
3238 changed = set_ssa_val_to (lhs, simplified);
3242 && TREE_CODE (simplified) == SSA_NAME
3243 && TREE_CODE (lhs) == SSA_NAME)
3245 changed = visit_copy (lhs, simplified);
3248 else if (simplified)
3250 if (TREE_CODE (lhs) == SSA_NAME)
3252 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3253 /* We have to unshare the expression or else
3254 valuizing may change the IL stream. */
3255 VN_INFO (lhs)->expr = unshare_expr (simplified);
3258 else if (stmt_has_constants (stmt)
3259 && TREE_CODE (lhs) == SSA_NAME)
3260 VN_INFO (lhs)->has_constants = true;
3261 else if (TREE_CODE (lhs) == SSA_NAME)
3263 /* We reset expr and constantness here because we may
3264 have been value numbering optimistically, and
3265 iterating. They may become non-constant in this case,
3266 even if they were optimistically constant. */
3268 VN_INFO (lhs)->has_constants = false;
3269 VN_INFO (lhs)->expr = NULL_TREE;
3272 if ((TREE_CODE (lhs) == SSA_NAME
3273 /* We can substitute SSA_NAMEs that are live over
3274 abnormal edges with their constant value. */
3275 && !(gimple_assign_copy_p (stmt)
3276 && is_gimple_min_invariant (rhs1))
3278 && is_gimple_min_invariant (simplified))
3279 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3280 /* Stores or copies from SSA_NAMEs that are live over
3281 abnormal edges are a problem. */
3282 || (code == SSA_NAME
3283 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3284 changed = defs_to_varying (stmt);
3285 else if (REFERENCE_CLASS_P (lhs)
3287 changed = visit_reference_op_store (lhs, rhs1, stmt);
3288 else if (TREE_CODE (lhs) == SSA_NAME)
3290 if ((gimple_assign_copy_p (stmt)
3291 && is_gimple_min_invariant (rhs1))
3293 && is_gimple_min_invariant (simplified)))
3295 VN_INFO (lhs)->has_constants = true;
3297 changed = set_ssa_val_to (lhs, simplified);
3299 changed = set_ssa_val_to (lhs, rhs1);
3303 switch (get_gimple_rhs_class (code))
3305 case GIMPLE_UNARY_RHS:
3306 case GIMPLE_BINARY_RHS:
3307 case GIMPLE_TERNARY_RHS:
3308 changed = visit_nary_op (lhs, stmt);
3310 case GIMPLE_SINGLE_RHS:
3311 switch (TREE_CODE_CLASS (code))
3314 /* VOP-less references can go through unary case. */
3315 if ((code == REALPART_EXPR
3316 || code == IMAGPART_EXPR
3317 || code == VIEW_CONVERT_EXPR
3318 || code == BIT_FIELD_REF)
3319 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3321 changed = visit_nary_op (lhs, stmt);
3325 case tcc_declaration:
3326 changed = visit_reference_op_load (lhs, rhs1, stmt);
3329 if (code == ADDR_EXPR)
3331 changed = visit_nary_op (lhs, stmt);
3334 else if (code == CONSTRUCTOR)
3336 changed = visit_nary_op (lhs, stmt);
3339 changed = defs_to_varying (stmt);
3343 changed = defs_to_varying (stmt);
3349 changed = defs_to_varying (stmt);
3351 else if (is_gimple_call (stmt))
3353 tree lhs = gimple_call_lhs (stmt);
3355 /* ??? We could try to simplify calls. */
3357 if (stmt_has_constants (stmt)
3358 && TREE_CODE (lhs) == SSA_NAME)
3359 VN_INFO (lhs)->has_constants = true;
3360 else if (TREE_CODE (lhs) == SSA_NAME)
3362 /* We reset expr and constantness here because we may
3363 have been value numbering optimistically, and
3364 iterating. They may become non-constant in this case,
3365 even if they were optimistically constant. */
3366 VN_INFO (lhs)->has_constants = false;
3367 VN_INFO (lhs)->expr = NULL_TREE;
3370 if (TREE_CODE (lhs) == SSA_NAME
3371 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3372 changed = defs_to_varying (stmt);
3373 /* ??? We should handle stores from calls. */
3374 else if (TREE_CODE (lhs) == SSA_NAME)
3376 if (!gimple_call_internal_p (stmt)
3377 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3378 changed = visit_reference_op_call (lhs, stmt);
3380 changed = defs_to_varying (stmt);
3383 changed = defs_to_varying (stmt);
3390 /* Compare two operands by reverse postorder index */
3393 compare_ops (const void *pa, const void *pb)
3395 const tree opa = *((const tree *)pa);
3396 const tree opb = *((const tree *)pb);
3397 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3398 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3402 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3403 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3404 else if (gimple_nop_p (opstmta))
3406 else if (gimple_nop_p (opstmtb))
3409 bba = gimple_bb (opstmta);
3410 bbb = gimple_bb (opstmtb);
3413 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3421 if (gimple_code (opstmta) == GIMPLE_PHI
3422 && gimple_code (opstmtb) == GIMPLE_PHI)
3423 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3424 else if (gimple_code (opstmta) == GIMPLE_PHI)
3426 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3428 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3429 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3431 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3433 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3436 /* Sort an array containing members of a strongly connected component
3437 SCC so that the members are ordered by RPO number.
3438 This means that when the sort is complete, iterating through the
3439 array will give you the members in RPO order. */
3442 sort_scc (VEC (tree, heap) *scc)
3444 VEC_qsort (tree, scc, compare_ops);
3447 /* Insert the no longer used nary ONARY to the hash INFO. */
3450 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3452 size_t size = sizeof_vn_nary_op (onary->length);
3453 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3454 &info->nary_obstack);
3455 memcpy (nary, onary, size);
3456 vn_nary_op_insert_into (nary, info->nary, false);
3459 /* Insert the no longer used phi OPHI to the hash INFO. */
3462 copy_phi (vn_phi_t ophi, vn_tables_t info)
3464 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3466 memcpy (phi, ophi, sizeof (*phi));
3467 ophi->phiargs = NULL;
3468 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3469 gcc_assert (!*slot);
3473 /* Insert the no longer used reference OREF to the hash INFO. */
3476 copy_reference (vn_reference_t oref, vn_tables_t info)
3480 ref = (vn_reference_t) pool_alloc (info->references_pool);
3481 memcpy (ref, oref, sizeof (*ref));
3482 oref->operands = NULL;
3483 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3486 free_reference (*slot);
3490 /* Process a strongly connected component in the SSA graph. */
3493 process_scc (VEC (tree, heap) *scc)
3497 unsigned int iterations = 0;
3498 bool changed = true;
3504 /* If the SCC has a single member, just visit it. */
3505 if (VEC_length (tree, scc) == 1)
3507 tree use = VEC_index (tree, scc, 0);
3508 if (VN_INFO (use)->use_processed)
3510 /* We need to make sure it doesn't form a cycle itself, which can
3511 happen for self-referential PHI nodes. In that case we would
3512 end up inserting an expression with VN_TOP operands into the
3513 valid table which makes us derive bogus equivalences later.
3514 The cheapest way to check this is to assume it for all PHI nodes. */
3515 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3516 /* Fallthru to iteration. */ ;
3524 /* Iterate over the SCC with the optimistic table until it stops
3526 current_info = optimistic_info;
3531 if (dump_file && (dump_flags & TDF_DETAILS))
3532 fprintf (dump_file, "Starting iteration %d\n", iterations);
3533 /* As we are value-numbering optimistically we have to
3534 clear the expression tables and the simplified expressions
3535 in each iteration until we converge. */
3536 htab_empty (optimistic_info->nary);
3537 htab_empty (optimistic_info->phis);
3538 htab_empty (optimistic_info->references);
3539 obstack_free (&optimistic_info->nary_obstack, NULL);
3540 gcc_obstack_init (&optimistic_info->nary_obstack);
3541 empty_alloc_pool (optimistic_info->phis_pool);
3542 empty_alloc_pool (optimistic_info->references_pool);
3543 FOR_EACH_VEC_ELT (tree, scc, i, var)
3544 VN_INFO (var)->expr = NULL_TREE;
3545 FOR_EACH_VEC_ELT (tree, scc, i, var)
3546 changed |= visit_use (var);
3549 statistics_histogram_event (cfun, "SCC iterations", iterations);
3551 /* Finally, copy the contents of the no longer used optimistic
3552 table to the valid table. */
3553 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3554 copy_nary (nary, valid_info);
3555 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3556 copy_phi (phi, valid_info);
3557 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3558 copy_reference (ref, valid_info);
3560 current_info = valid_info;
3563 DEF_VEC_O(ssa_op_iter);
3564 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3566 /* Pop the components of the found SCC for NAME off the SCC stack
3567 and process them. Returns true if all went well, false if
3568 we run into resource limits. */
3571 extract_and_process_scc_for_name (tree name)
3573 VEC (tree, heap) *scc = NULL;
3576 /* Found an SCC, pop the components off the SCC stack and
3580 x = VEC_pop (tree, sccstack);
3582 VN_INFO (x)->on_sccstack = false;
3583 VEC_safe_push (tree, heap, scc, x);
3584 } while (x != name);
3586 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3587 if (VEC_length (tree, scc)
3588 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3591 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3592 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3593 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3595 VEC_free (tree, heap, scc);
3599 if (VEC_length (tree, scc) > 1)
3602 if (dump_file && (dump_flags & TDF_DETAILS))
3603 print_scc (dump_file, scc);
3607 VEC_free (tree, heap, scc);
3612 /* Depth first search on NAME to discover and process SCC's in the SSA
3614 Execution of this algorithm relies on the fact that the SCC's are
3615 popped off the stack in topological order.
3616 Returns true if successful, false if we stopped processing SCC's due
3617 to resource constraints. */
3622 VEC(ssa_op_iter, heap) *itervec = NULL;
3623 VEC(tree, heap) *namevec = NULL;
3624 use_operand_p usep = NULL;
3631 VN_INFO (name)->dfsnum = next_dfs_num++;
3632 VN_INFO (name)->visited = true;
3633 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3635 VEC_safe_push (tree, heap, sccstack, name);
3636 VN_INFO (name)->on_sccstack = true;
3637 defstmt = SSA_NAME_DEF_STMT (name);
3639 /* Recursively DFS on our operands, looking for SCC's. */
3640 if (!gimple_nop_p (defstmt))
3642 /* Push a new iterator. */
3643 if (gimple_code (defstmt) == GIMPLE_PHI)
3644 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3646 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3649 clear_and_done_ssa_iter (&iter);
3653 /* If we are done processing uses of a name, go up the stack
3654 of iterators and process SCCs as we found them. */
3655 if (op_iter_done (&iter))
3657 /* See if we found an SCC. */
3658 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3659 if (!extract_and_process_scc_for_name (name))
3661 VEC_free (tree, heap, namevec);
3662 VEC_free (ssa_op_iter, heap, itervec);
3666 /* Check if we are done. */
3667 if (VEC_empty (tree, namevec))
3669 VEC_free (tree, heap, namevec);
3670 VEC_free (ssa_op_iter, heap, itervec);
3674 /* Restore the last use walker and continue walking there. */
3676 name = VEC_pop (tree, namevec);
3677 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3678 sizeof (ssa_op_iter));
3679 VEC_pop (ssa_op_iter, itervec);
3680 goto continue_walking;
3683 use = USE_FROM_PTR (usep);
3685 /* Since we handle phi nodes, we will sometimes get
3686 invariants in the use expression. */
3687 if (TREE_CODE (use) == SSA_NAME)
3689 if (! (VN_INFO (use)->visited))
3691 /* Recurse by pushing the current use walking state on
3692 the stack and starting over. */
3693 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3694 VEC_safe_push(tree, heap, namevec, name);
3699 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3700 VN_INFO (use)->low);
3702 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3703 && VN_INFO (use)->on_sccstack)
3705 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3706 VN_INFO (name)->low);
3710 usep = op_iter_next_use (&iter);
3714 /* Allocate a value number table. */
3717 allocate_vn_table (vn_tables_t table)
3719 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3720 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3721 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3724 gcc_obstack_init (&table->nary_obstack);
3725 table->phis_pool = create_alloc_pool ("VN phis",
3726 sizeof (struct vn_phi_s),
3728 table->references_pool = create_alloc_pool ("VN references",
3729 sizeof (struct vn_reference_s),
3733 /* Free a value number table. */
3736 free_vn_table (vn_tables_t table)
3738 htab_delete (table->phis);
3739 htab_delete (table->nary);
3740 htab_delete (table->references);
3741 obstack_free (&table->nary_obstack, NULL);
3742 free_alloc_pool (table->phis_pool);
3743 free_alloc_pool (table->references_pool);
3751 int *rpo_numbers_temp;
3753 calculate_dominance_info (CDI_DOMINATORS);
3755 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3758 constant_value_ids = BITMAP_ALLOC (NULL);
3763 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3764 /* VEC_alloc doesn't actually grow it to the right size, it just
3765 preallocates the space to do so. */
3766 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3767 gcc_obstack_init (&vn_ssa_aux_obstack);
3769 shared_lookup_phiargs = NULL;
3770 shared_lookup_references = NULL;
3771 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3772 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3773 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3775 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3776 the i'th block in RPO order is bb. We want to map bb's to RPO
3777 numbers, so we need to rearrange this array. */
3778 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3779 rpo_numbers[rpo_numbers_temp[j]] = j;
3781 XDELETE (rpo_numbers_temp);
3783 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3785 /* Create the VN_INFO structures, and initialize value numbers to
3787 for (i = 0; i < num_ssa_names; i++)
3789 tree name = ssa_name (i);
3792 VN_INFO_GET (name)->valnum = VN_TOP;
3793 VN_INFO (name)->expr = NULL_TREE;
3794 VN_INFO (name)->value_id = 0;
3798 renumber_gimple_stmt_uids ();
3800 /* Create the valid and optimistic value numbering tables. */
3801 valid_info = XCNEW (struct vn_tables_s);
3802 allocate_vn_table (valid_info);
3803 optimistic_info = XCNEW (struct vn_tables_s);
3804 allocate_vn_table (optimistic_info);
3812 htab_delete (constant_to_value_id);
3813 BITMAP_FREE (constant_value_ids);
3814 VEC_free (tree, heap, shared_lookup_phiargs);
3815 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3816 XDELETEVEC (rpo_numbers);
3818 for (i = 0; i < num_ssa_names; i++)
3820 tree name = ssa_name (i);
3822 && VN_INFO (name)->needs_insertion)
3823 release_ssa_name (name);
3825 obstack_free (&vn_ssa_aux_obstack, NULL);
3826 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3828 VEC_free (tree, heap, sccstack);
3829 free_vn_table (valid_info);
3830 XDELETE (valid_info);
3831 free_vn_table (optimistic_info);
3832 XDELETE (optimistic_info);
3835 /* Set *ID if we computed something useful in RESULT. */
3838 set_value_id_for_result (tree result, unsigned int *id)
3842 if (TREE_CODE (result) == SSA_NAME)
3843 *id = VN_INFO (result)->value_id;
3844 else if (is_gimple_min_invariant (result))
3845 *id = get_or_alloc_constant_value_id (result);
3849 /* Set the value ids in the valid hash tables. */
3852 set_hashtable_value_ids (void)
3859 /* Now set the value ids of the things we had put in the hash
3862 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3863 vno, vn_nary_op_t, hi)
3864 set_value_id_for_result (vno->result, &vno->value_id);
3866 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3868 set_value_id_for_result (vp->result, &vp->value_id);
3870 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3871 vr, vn_reference_t, hi)
3872 set_value_id_for_result (vr->result, &vr->value_id);
3875 /* Do SCCVN. Returns true if it finished, false if we bailed out
3876 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3877 how we use the alias oracle walking during the VN process. */
3880 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3884 bool changed = true;
3886 default_vn_walk_kind = default_vn_walk_kind_;
3889 current_info = valid_info;
3891 for (param = DECL_ARGUMENTS (current_function_decl);
3893 param = DECL_CHAIN (param))
3895 if (gimple_default_def (cfun, param) != NULL)
3897 tree def = gimple_default_def (cfun, param);
3898 VN_INFO (def)->valnum = def;
3902 for (i = 1; i < num_ssa_names; ++i)
3904 tree name = ssa_name (i);
3906 && VN_INFO (name)->visited == false
3907 && !has_zero_uses (name))
3915 /* Initialize the value ids. */
3917 for (i = 1; i < num_ssa_names; ++i)
3919 tree name = ssa_name (i);
3923 info = VN_INFO (name);
3924 if (info->valnum == name
3925 || info->valnum == VN_TOP)
3926 info->value_id = get_next_value_id ();
3927 else if (is_gimple_min_invariant (info->valnum))
3928 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3931 /* Propagate until they stop changing. */
3935 for (i = 1; i < num_ssa_names; ++i)
3937 tree name = ssa_name (i);
3941 info = VN_INFO (name);
3942 if (TREE_CODE (info->valnum) == SSA_NAME
3943 && info->valnum != name
3944 && info->value_id != VN_INFO (info->valnum)->value_id)
3947 info->value_id = VN_INFO (info->valnum)->value_id;
3952 set_hashtable_value_ids ();
3954 if (dump_file && (dump_flags & TDF_DETAILS))
3956 fprintf (dump_file, "Value numbers:\n");
3957 for (i = 0; i < num_ssa_names; i++)
3959 tree name = ssa_name (i);
3961 && VN_INFO (name)->visited
3962 && SSA_VAL (name) != name)
3964 print_generic_expr (dump_file, name, 0);
3965 fprintf (dump_file, " = ");
3966 print_generic_expr (dump_file, SSA_VAL (name), 0);
3967 fprintf (dump_file, "\n");
3975 /* Return the maximum value id we have ever seen. */
3978 get_max_value_id (void)
3980 return next_value_id;
3983 /* Return the next unique value id. */
3986 get_next_value_id (void)
3988 return next_value_id++;
3992 /* Compare two expressions E1 and E2 and return true if they are equal. */
3995 expressions_equal_p (tree e1, tree e2)
3997 /* The obvious case. */
4001 /* If only one of them is null, they cannot be equal. */
4005 /* Now perform the actual comparison. */
4006 if (TREE_CODE (e1) == TREE_CODE (e2)
4007 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4014 /* Return true if the nary operation NARY may trap. This is a copy
4015 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4018 vn_nary_may_trap (vn_nary_op_t nary)
4021 tree rhs2 = NULL_TREE;
4022 bool honor_nans = false;
4023 bool honor_snans = false;
4024 bool fp_operation = false;
4025 bool honor_trapv = false;
4029 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4030 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4031 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4034 fp_operation = FLOAT_TYPE_P (type);
4037 honor_nans = flag_trapping_math && !flag_finite_math_only;
4038 honor_snans = flag_signaling_nans != 0;
4040 else if (INTEGRAL_TYPE_P (type)
4041 && TYPE_OVERFLOW_TRAPS (type))
4044 if (nary->length >= 2)
4046 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4048 honor_nans, honor_snans, rhs2,
4054 for (i = 0; i < nary->length; ++i)
4055 if (tree_could_trap_p (nary->op[i]))