1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
182 /* Set the value numbering info for a given SSA name to a given
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
219 tree expr = NULL_TREE;
222 if (vn->valnum == VN_TOP)
225 /* If the value-number is a constant it is the representative
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
249 /* FIXME tuples. This is incomplete and likely will miss some
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
287 if (expr == NULL_TREE)
290 /* Cache the expression. */
297 /* Free a phi operation structure VP. */
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
342 get_constant_value_id (tree constant)
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
352 return ((vn_constant_t)*slot)->value_id;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
360 get_or_alloc_constant_value_id (tree constant)
363 struct vn_constant_s vc;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
417 result = iterative_hash_expr (vro1->op0, result);
419 result = iterative_hash_expr (vro1->op1, result);
421 result = iterative_hash_expr (vro1->op2, result);
425 /* Return the hashcode for a given reference operation P1. */
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
449 else if (vro->opcode != ADDR_EXPR)
461 result = iterative_hash_hashval_t (off, result);
464 && vro->opcode == ADDR_EXPR)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
474 result = vn_reference_op_compute_hash (vro, result);
478 result += SSA_NAME_VERSION (vr1->vuse);
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
560 if (deref2 && vro2->opcode == ADDR_EXPR)
562 memset (&tem2, 0, sizeof (tem2));
563 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
564 tem2.type = TREE_TYPE (tem2.op0);
565 tem2.opcode = TREE_CODE (tem2.op0);
568 if (!vn_reference_op_eq (vro1, vro2))
573 while (VEC_length (vn_reference_op_s, vr1->operands) != i
574 || VEC_length (vn_reference_op_s, vr2->operands) != j);
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
583 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
585 if (TREE_CODE (ref) == TARGET_MEM_REF)
587 vn_reference_op_s temp;
589 memset (&temp, 0, sizeof (temp));
590 temp.type = TREE_TYPE (ref);
591 temp.opcode = TREE_CODE (ref);
592 temp.op0 = TMR_INDEX (ref);
593 temp.op1 = TMR_STEP (ref);
594 temp.op2 = TMR_OFFSET (ref);
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = ERROR_MARK;
601 temp.op0 = TMR_INDEX2 (ref);
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
605 memset (&temp, 0, sizeof (temp));
606 temp.type = NULL_TREE;
607 temp.opcode = TREE_CODE (TMR_BASE (ref));
608 temp.op0 = TMR_BASE (ref);
610 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
614 /* For non-calls, store the information that makes up the address. */
618 vn_reference_op_s temp;
620 memset (&temp, 0, sizeof (temp));
621 temp.type = TREE_TYPE (ref);
622 temp.opcode = TREE_CODE (ref);
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 if (host_integerp (TREE_OPERAND (ref, 1), 0))
631 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
634 /* Record bits and position. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp.type = NULL_TREE;
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
646 tree this_offset = component_ref_field_offset (ref);
648 && TREE_CODE (this_offset) == INTEGER_CST)
650 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
651 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
654 = double_int_add (tree_to_double_int (this_offset),
656 (tree_to_double_int (bit_offset),
658 ? 3 : exact_log2 (BITS_PER_UNIT),
659 HOST_BITS_PER_DOUBLE_INT, true));
660 if (double_int_fits_in_shwi_p (off))
666 case ARRAY_RANGE_REF:
668 /* Record index as operand. */
669 temp.op0 = TREE_OPERAND (ref, 1);
670 /* Always record lower bounds and element size. */
671 temp.op1 = array_ref_low_bound (ref);
672 temp.op2 = array_ref_element_size (ref);
673 if (TREE_CODE (temp.op0) == INTEGER_CST
674 && TREE_CODE (temp.op1) == INTEGER_CST
675 && TREE_CODE (temp.op2) == INTEGER_CST)
677 double_int off = tree_to_double_int (temp.op0);
678 off = double_int_add (off,
680 (tree_to_double_int (temp.op1)));
681 off = double_int_mul (off, tree_to_double_int (temp.op2));
682 if (double_int_fits_in_shwi_p (off))
687 if (DECL_HARD_REGISTER (ref))
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp.opcode = MEM_REF;
699 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
701 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 temp.opcode = ADDR_EXPR;
703 temp.op0 = build_fold_addr_expr (ref);
704 temp.type = TREE_TYPE (temp.op0);
718 if (is_gimple_min_invariant (ref))
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
730 case VIEW_CONVERT_EXPR:
734 /* This is only interesting for its constant offset. */
735 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
740 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
742 if (REFERENCE_CLASS_P (ref)
743 || (TREE_CODE (ref) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref)))
745 ref = TREE_OPERAND (ref, 0);
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
756 ao_ref_init_from_vn_reference (ao_ref *ref,
757 alias_set_type set, tree type,
758 VEC (vn_reference_op_s, heap) *ops)
760 vn_reference_op_t op;
762 tree base = NULL_TREE;
764 HOST_WIDE_INT offset = 0;
765 HOST_WIDE_INT max_size;
766 HOST_WIDE_INT size = -1;
767 tree size_tree = NULL_TREE;
768 alias_set_type base_alias_set = -1;
770 /* First get the final access size from just the outermost expression. */
771 op = VEC_index (vn_reference_op_s, ops, 0);
772 if (op->opcode == COMPONENT_REF)
773 size_tree = DECL_SIZE (op->op0);
774 else if (op->opcode == BIT_FIELD_REF)
778 enum machine_mode mode = TYPE_MODE (type);
780 size_tree = TYPE_SIZE (type);
782 size = GET_MODE_BITSIZE (mode);
784 if (size_tree != NULL_TREE)
786 if (!host_integerp (size_tree, 1))
789 size = TREE_INT_CST_LOW (size_tree);
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base != NULL_TREE
807 && TREE_CODE (base) == MEM_REF
809 && DECL_P (TREE_OPERAND (op->op0, 0)))
811 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
812 base = TREE_OPERAND (op->op0, 0);
819 offset += pop->off * BITS_PER_UNIT;
827 /* Record the base objects. */
829 base_alias_set = get_deref_alias_set (op->op0);
830 *op0_p = build2 (MEM_REF, op->type,
832 op0_p = &TREE_OPERAND (*op0_p, 0);
843 /* And now the usual component-reference style ops. */
845 offset += tree_low_cst (op->op1, 0);
850 tree field = op->op0;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
856 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
860 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
862 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
867 case ARRAY_RANGE_REF:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op->op0, 0)
871 || !host_integerp (op->op1, 0)
872 || !host_integerp (op->op2, 0))
876 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
877 hindex -= TREE_INT_CST_LOW (op->op1);
878 hindex *= TREE_INT_CST_LOW (op->op2);
879 hindex *= BITS_PER_UNIT;
891 case VIEW_CONVERT_EXPR:
908 if (base == NULL_TREE)
911 ref->ref = NULL_TREE;
913 ref->offset = offset;
915 ref->max_size = max_size;
916 ref->ref_alias_set = set;
917 if (base_alias_set != -1)
918 ref->base_alias_set = base_alias_set;
920 ref->base_alias_set = get_alias_set (base);
921 /* We discount volatiles from value-numbering elsewhere. */
922 ref->volatile_p = false;
927 /* Copy the operations present in load/store/call REF into RESULT, a vector of
928 vn_reference_op_s's. */
931 copy_reference_ops_from_call (gimple call,
932 VEC(vn_reference_op_s, heap) **result)
934 vn_reference_op_s temp;
937 /* Copy the type, opcode, function being called and static chain. */
938 memset (&temp, 0, sizeof (temp));
939 temp.type = gimple_call_return_type (call);
940 temp.opcode = CALL_EXPR;
941 temp.op0 = gimple_call_fn (call);
942 temp.op1 = gimple_call_chain (call);
944 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
946 /* Copy the call arguments. As they can be references as well,
947 just chain them together. */
948 for (i = 0; i < gimple_call_num_args (call); ++i)
950 tree callarg = gimple_call_arg (call, i);
951 copy_reference_ops_from_ref (callarg, result);
955 /* Create a vector of vn_reference_op_s structures from REF, a
956 REFERENCE_CLASS_P tree. The vector is not shared. */
958 static VEC(vn_reference_op_s, heap) *
959 create_reference_ops_from_ref (tree ref)
961 VEC (vn_reference_op_s, heap) *result = NULL;
963 copy_reference_ops_from_ref (ref, &result);
967 /* Create a vector of vn_reference_op_s structures from CALL, a
968 call statement. The vector is not shared. */
970 static VEC(vn_reference_op_s, heap) *
971 create_reference_ops_from_call (gimple call)
973 VEC (vn_reference_op_s, heap) *result = NULL;
975 copy_reference_ops_from_call (call, &result);
979 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
980 *I_P to point to the last element of the replacement. */
982 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
985 unsigned int i = *i_p;
986 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
987 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
989 HOST_WIDE_INT addr_offset;
991 /* The only thing we have to do is from &OBJ.foo.bar add the offset
992 from .foo.bar to the preceeding MEM_REF offset and replace the
993 address with &OBJ. */
994 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
996 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
997 if (addr_base != op->op0)
999 double_int off = tree_to_double_int (mem_op->op0);
1000 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1001 off = double_int_add (off, shwi_to_double_int (addr_offset));
1002 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1003 op->op0 = build_fold_addr_expr (addr_base);
1004 if (host_integerp (mem_op->op0, 0))
1005 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1011 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1012 *I_P to point to the last element of the replacement. */
1014 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1017 unsigned int i = *i_p;
1018 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1019 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1021 enum tree_code code;
1024 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1025 if (!is_gimple_assign (def_stmt))
1028 code = gimple_assign_rhs_code (def_stmt);
1029 if (code != ADDR_EXPR
1030 && code != POINTER_PLUS_EXPR)
1033 off = tree_to_double_int (mem_op->op0);
1034 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1036 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1037 from .foo.bar to the preceeding MEM_REF offset and replace the
1038 address with &OBJ. */
1039 if (code == ADDR_EXPR)
1041 tree addr, addr_base;
1042 HOST_WIDE_INT addr_offset;
1044 addr = gimple_assign_rhs1 (def_stmt);
1045 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1048 || TREE_CODE (addr_base) != MEM_REF)
1051 off = double_int_add (off, shwi_to_double_int (addr_offset));
1052 off = double_int_add (off, mem_ref_offset (addr_base));
1053 op->op0 = TREE_OPERAND (addr_base, 0);
1058 ptr = gimple_assign_rhs1 (def_stmt);
1059 ptroff = gimple_assign_rhs2 (def_stmt);
1060 if (TREE_CODE (ptr) != SSA_NAME
1061 || TREE_CODE (ptroff) != INTEGER_CST)
1064 off = double_int_add (off, tree_to_double_int (ptroff));
1068 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1069 if (host_integerp (mem_op->op0, 0))
1070 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1073 if (TREE_CODE (op->op0) == SSA_NAME)
1074 op->op0 = SSA_VAL (op->op0);
1075 if (TREE_CODE (op->op0) != SSA_NAME)
1076 op->opcode = TREE_CODE (op->op0);
1079 if (TREE_CODE (op->op0) == SSA_NAME)
1080 vn_reference_maybe_forwprop_address (ops, i_p);
1081 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1082 vn_reference_fold_indirect (ops, i_p);
1085 /* Optimize the reference REF to a constant if possible or return
1086 NULL_TREE if not. */
1089 fully_constant_vn_reference_p (vn_reference_t ref)
1091 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1092 vn_reference_op_t op;
1094 /* Try to simplify the translated expression if it is
1095 a call to a builtin function with at most two arguments. */
1096 op = VEC_index (vn_reference_op_s, operands, 0);
1097 if (op->opcode == CALL_EXPR
1098 && TREE_CODE (op->op0) == ADDR_EXPR
1099 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1100 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1101 && VEC_length (vn_reference_op_s, operands) >= 2
1102 && VEC_length (vn_reference_op_s, operands) <= 3)
1104 vn_reference_op_t arg0, arg1 = NULL;
1105 bool anyconst = false;
1106 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1107 if (VEC_length (vn_reference_op_s, operands) > 2)
1108 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1109 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1110 || (arg0->opcode == ADDR_EXPR
1111 && is_gimple_min_invariant (arg0->op0)))
1114 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1115 || (arg1->opcode == ADDR_EXPR
1116 && is_gimple_min_invariant (arg1->op0))))
1120 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1123 arg1 ? arg1->op0 : NULL);
1125 && TREE_CODE (folded) == NOP_EXPR)
1126 folded = TREE_OPERAND (folded, 0);
1128 && is_gimple_min_invariant (folded))
1133 /* Simplify reads from constant strings. */
1134 else if (op->opcode == ARRAY_REF
1135 && TREE_CODE (op->op0) == INTEGER_CST
1136 && integer_zerop (op->op1)
1137 && VEC_length (vn_reference_op_s, operands) == 2)
1139 vn_reference_op_t arg0;
1140 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1141 if (arg0->opcode == STRING_CST
1142 && (TYPE_MODE (op->type)
1143 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1144 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1145 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1146 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1147 return build_int_cst_type (op->type,
1148 (TREE_STRING_POINTER (arg0->op0)
1149 [TREE_INT_CST_LOW (op->op0)]));
1155 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1156 structures into their value numbers. This is done in-place, and
1157 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1158 whether any operands were valueized. */
1160 static VEC (vn_reference_op_s, heap) *
1161 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1163 vn_reference_op_t vro;
1166 *valueized_anything = false;
1168 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1170 if (vro->opcode == SSA_NAME
1171 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1173 tree tem = SSA_VAL (vro->op0);
1174 if (tem != vro->op0)
1176 *valueized_anything = true;
1179 /* If it transforms from an SSA_NAME to a constant, update
1181 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1182 vro->opcode = TREE_CODE (vro->op0);
1184 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1186 tree tem = SSA_VAL (vro->op1);
1187 if (tem != vro->op1)
1189 *valueized_anything = true;
1193 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1195 tree tem = SSA_VAL (vro->op2);
1196 if (tem != vro->op2)
1198 *valueized_anything = true;
1202 /* If it transforms from an SSA_NAME to an address, fold with
1203 a preceding indirect reference. */
1206 && TREE_CODE (vro->op0) == ADDR_EXPR
1207 && VEC_index (vn_reference_op_s,
1208 orig, i - 1)->opcode == MEM_REF)
1209 vn_reference_fold_indirect (&orig, &i);
1211 && vro->opcode == SSA_NAME
1212 && VEC_index (vn_reference_op_s,
1213 orig, i - 1)->opcode == MEM_REF)
1214 vn_reference_maybe_forwprop_address (&orig, &i);
1215 /* If it transforms a non-constant ARRAY_REF into a constant
1216 one, adjust the constant offset. */
1217 else if (vro->opcode == ARRAY_REF
1219 && TREE_CODE (vro->op0) == INTEGER_CST
1220 && TREE_CODE (vro->op1) == INTEGER_CST
1221 && TREE_CODE (vro->op2) == INTEGER_CST)
1223 double_int off = tree_to_double_int (vro->op0);
1224 off = double_int_add (off,
1226 (tree_to_double_int (vro->op1)));
1227 off = double_int_mul (off, tree_to_double_int (vro->op2));
1228 if (double_int_fits_in_shwi_p (off))
1236 static VEC (vn_reference_op_s, heap) *
1237 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1240 return valueize_refs_1 (orig, &tem);
1243 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1245 /* Create a vector of vn_reference_op_s structures from REF, a
1246 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1247 this function. *VALUEIZED_ANYTHING will specify whether any
1248 operands were valueized. */
1250 static VEC(vn_reference_op_s, heap) *
1251 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1255 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1256 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1257 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1258 valueized_anything);
1259 return shared_lookup_references;
1262 /* Create a vector of vn_reference_op_s structures from CALL, a
1263 call statement. The vector is shared among all callers of
1266 static VEC(vn_reference_op_s, heap) *
1267 valueize_shared_reference_ops_from_call (gimple call)
1271 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1272 copy_reference_ops_from_call (call, &shared_lookup_references);
1273 shared_lookup_references = valueize_refs (shared_lookup_references);
1274 return shared_lookup_references;
1277 /* Lookup a SCCVN reference operation VR in the current hash table.
1278 Returns the resulting value number if it exists in the hash table,
1279 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1280 vn_reference_t stored in the hashtable if something is found. */
1283 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1288 hash = vr->hashcode;
1289 slot = htab_find_slot_with_hash (current_info->references, vr,
1291 if (!slot && current_info == optimistic_info)
1292 slot = htab_find_slot_with_hash (valid_info->references, vr,
1297 *vnresult = (vn_reference_t)*slot;
1298 return ((vn_reference_t)*slot)->result;
1304 static tree *last_vuse_ptr;
1305 static vn_lookup_kind vn_walk_kind;
1306 static vn_lookup_kind default_vn_walk_kind;
1308 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1309 with the current VUSE and performs the expression lookup. */
1312 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1314 vn_reference_t vr = (vn_reference_t)vr_;
1319 *last_vuse_ptr = vuse;
1321 /* Fixup vuse and hash. */
1323 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1324 vr->vuse = SSA_VAL (vuse);
1326 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1328 hash = vr->hashcode;
1329 slot = htab_find_slot_with_hash (current_info->references, vr,
1331 if (!slot && current_info == optimistic_info)
1332 slot = htab_find_slot_with_hash (valid_info->references, vr,
1340 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1341 from the statement defining VUSE and if not successful tries to
1342 translate *REFP and VR_ through an aggregate copy at the defintion
1346 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1348 vn_reference_t vr = (vn_reference_t)vr_;
1349 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1351 HOST_WIDE_INT offset, maxsize;
1352 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1354 bool lhs_ref_ok = false;
1356 /* First try to disambiguate after value-replacing in the definitions LHS. */
1357 if (is_gimple_assign (def_stmt))
1359 VEC (vn_reference_op_s, heap) *tem;
1360 tree lhs = gimple_assign_lhs (def_stmt);
1361 bool valueized_anything = false;
1362 /* Avoid re-allocation overhead. */
1363 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1364 copy_reference_ops_from_ref (lhs, &lhs_ops);
1366 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1367 gcc_assert (lhs_ops == tem);
1368 if (valueized_anything)
1370 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1371 get_alias_set (lhs),
1372 TREE_TYPE (lhs), lhs_ops);
1374 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1379 ao_ref_init (&lhs_ref, lhs);
1384 base = ao_ref_base (ref);
1385 offset = ref->offset;
1386 maxsize = ref->max_size;
1388 /* If we cannot constrain the size of the reference we cannot
1389 test if anything kills it. */
1393 /* We can't deduce anything useful from clobbers. */
1394 if (gimple_clobber_p (def_stmt))
1397 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1398 from that definition.
1400 if (is_gimple_reg_type (vr->type)
1401 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1402 && integer_zerop (gimple_call_arg (def_stmt, 1))
1403 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1404 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1406 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1408 HOST_WIDE_INT offset2, size2, maxsize2;
1409 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1410 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1411 if ((unsigned HOST_WIDE_INT)size2 / 8
1412 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1414 && operand_equal_p (base, base2, 0)
1415 && offset2 <= offset
1416 && offset2 + size2 >= offset + maxsize)
1418 tree val = build_zero_cst (vr->type);
1419 unsigned int value_id = get_or_alloc_constant_value_id (val);
1420 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1421 VEC_copy (vn_reference_op_s,
1422 heap, vr->operands),
1427 /* 2) Assignment from an empty CONSTRUCTOR. */
1428 else if (is_gimple_reg_type (vr->type)
1429 && gimple_assign_single_p (def_stmt)
1430 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1431 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1434 HOST_WIDE_INT offset2, size2, maxsize2;
1435 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1436 &offset2, &size2, &maxsize2);
1438 && operand_equal_p (base, base2, 0)
1439 && offset2 <= offset
1440 && offset2 + size2 >= offset + maxsize)
1442 tree val = build_zero_cst (vr->type);
1443 unsigned int value_id = get_or_alloc_constant_value_id (val);
1444 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1445 VEC_copy (vn_reference_op_s,
1446 heap, vr->operands),
1451 /* 3) Assignment from a constant. We can use folds native encode/interpret
1452 routines to extract the assigned bits. */
1453 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1454 && ref->size == maxsize
1455 && maxsize % BITS_PER_UNIT == 0
1456 && offset % BITS_PER_UNIT == 0
1457 && is_gimple_reg_type (vr->type)
1458 && gimple_assign_single_p (def_stmt)
1459 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1462 HOST_WIDE_INT offset2, size2, maxsize2;
1463 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1464 &offset2, &size2, &maxsize2);
1466 && maxsize2 == size2
1467 && size2 % BITS_PER_UNIT == 0
1468 && offset2 % BITS_PER_UNIT == 0
1469 && operand_equal_p (base, base2, 0)
1470 && offset2 <= offset
1471 && offset2 + size2 >= offset + maxsize)
1473 /* We support up to 512-bit values (for V8DFmode). */
1474 unsigned char buffer[64];
1477 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1478 buffer, sizeof (buffer));
1481 tree val = native_interpret_expr (vr->type,
1483 + ((offset - offset2)
1485 ref->size / BITS_PER_UNIT);
1488 unsigned int value_id = get_or_alloc_constant_value_id (val);
1489 return vn_reference_insert_pieces
1490 (vuse, vr->set, vr->type,
1491 VEC_copy (vn_reference_op_s, heap, vr->operands),
1498 /* 4) Assignment from an SSA name which definition we may be able
1499 to access pieces from. */
1500 else if (ref->size == maxsize
1501 && is_gimple_reg_type (vr->type)
1502 && gimple_assign_single_p (def_stmt)
1503 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1505 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1506 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1507 if (is_gimple_assign (def_stmt2)
1508 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1509 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1510 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1513 HOST_WIDE_INT offset2, size2, maxsize2, off;
1514 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1515 &offset2, &size2, &maxsize2);
1516 off = offset - offset2;
1518 && maxsize2 == size2
1519 && operand_equal_p (base, base2, 0)
1520 && offset2 <= offset
1521 && offset2 + size2 >= offset + maxsize)
1523 tree val = NULL_TREE;
1525 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1526 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1529 val = gimple_assign_rhs1 (def_stmt2);
1530 else if (off == elsz)
1531 val = gimple_assign_rhs2 (def_stmt2);
1533 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1536 tree ctor = gimple_assign_rhs1 (def_stmt2);
1537 unsigned i = off / elsz;
1538 if (i < CONSTRUCTOR_NELTS (ctor))
1540 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1541 if (compare_tree_int (elt->index, i) == 0)
1547 unsigned int value_id = get_or_alloc_constant_value_id (val);
1548 return vn_reference_insert_pieces
1549 (vuse, vr->set, vr->type,
1550 VEC_copy (vn_reference_op_s, heap, vr->operands),
1557 /* 5) For aggregate copies translate the reference through them if
1558 the copy kills ref. */
1559 else if (vn_walk_kind == VN_WALKREWRITE
1560 && gimple_assign_single_p (def_stmt)
1561 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1562 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1563 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1566 HOST_WIDE_INT offset2, size2, maxsize2;
1568 VEC (vn_reference_op_s, heap) *rhs = NULL;
1569 vn_reference_op_t vro;
1575 /* See if the assignment kills REF. */
1576 base2 = ao_ref_base (&lhs_ref);
1577 offset2 = lhs_ref.offset;
1578 size2 = lhs_ref.size;
1579 maxsize2 = lhs_ref.max_size;
1581 || (base != base2 && !operand_equal_p (base, base2, 0))
1583 || offset2 + size2 < offset + maxsize)
1586 /* Find the common base of ref and the lhs. lhs_ops already
1587 contains valueized operands for the lhs. */
1588 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1589 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1590 while (j >= 0 && i >= 0
1591 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1593 VEC_index (vn_reference_op_s, lhs_ops, j)))
1599 /* ??? The innermost op should always be a MEM_REF and we already
1600 checked that the assignment to the lhs kills vr. Thus for
1601 aggregate copies using char[] types the vn_reference_op_eq
1602 may fail when comparing types for compatibility. But we really
1603 don't care here - further lookups with the rewritten operands
1604 will simply fail if we messed up types too badly. */
1605 if (j == 0 && i >= 0
1606 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1607 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1608 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1609 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1612 /* i now points to the first additional op.
1613 ??? LHS may not be completely contained in VR, one or more
1614 VIEW_CONVERT_EXPRs could be in its way. We could at least
1615 try handling outermost VIEW_CONVERT_EXPRs. */
1619 /* Now re-write REF to be based on the rhs of the assignment. */
1620 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1621 /* We need to pre-pend vr->operands[0..i] to rhs. */
1622 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1623 > VEC_length (vn_reference_op_s, vr->operands))
1625 VEC (vn_reference_op_s, heap) *old = vr->operands;
1626 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1627 i + 1 + VEC_length (vn_reference_op_s, rhs));
1628 if (old == shared_lookup_references
1629 && vr->operands != old)
1630 shared_lookup_references = NULL;
1633 VEC_truncate (vn_reference_op_s, vr->operands,
1634 i + 1 + VEC_length (vn_reference_op_s, rhs));
1635 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1636 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1637 VEC_free (vn_reference_op_s, heap, rhs);
1638 vr->hashcode = vn_reference_compute_hash (vr);
1640 /* Adjust *ref from the new operands. */
1641 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1643 /* This can happen with bitfields. */
1644 if (ref->size != r.size)
1648 /* Do not update last seen VUSE after translating. */
1649 last_vuse_ptr = NULL;
1651 /* Keep looking for the adjusted *REF / VR pair. */
1655 /* 6) For memcpy copies translate the reference through them if
1656 the copy kills ref. */
1657 else if (vn_walk_kind == VN_WALKREWRITE
1658 && is_gimple_reg_type (vr->type)
1659 /* ??? Handle BCOPY as well. */
1660 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1661 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1662 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1663 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1664 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1665 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1666 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1667 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1671 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1672 vn_reference_op_s op;
1676 /* Only handle non-variable, addressable refs. */
1677 if (ref->size != maxsize
1678 || offset % BITS_PER_UNIT != 0
1679 || ref->size % BITS_PER_UNIT != 0)
1682 /* Extract a pointer base and an offset for the destination. */
1683 lhs = gimple_call_arg (def_stmt, 0);
1685 if (TREE_CODE (lhs) == SSA_NAME)
1686 lhs = SSA_VAL (lhs);
1687 if (TREE_CODE (lhs) == ADDR_EXPR)
1689 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1693 if (TREE_CODE (tem) == MEM_REF
1694 && host_integerp (TREE_OPERAND (tem, 1), 1))
1696 lhs = TREE_OPERAND (tem, 0);
1697 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1699 else if (DECL_P (tem))
1700 lhs = build_fold_addr_expr (tem);
1704 if (TREE_CODE (lhs) != SSA_NAME
1705 && TREE_CODE (lhs) != ADDR_EXPR)
1708 /* Extract a pointer base and an offset for the source. */
1709 rhs = gimple_call_arg (def_stmt, 1);
1711 if (TREE_CODE (rhs) == SSA_NAME)
1712 rhs = SSA_VAL (rhs);
1713 if (TREE_CODE (rhs) == ADDR_EXPR)
1715 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1719 if (TREE_CODE (tem) == MEM_REF
1720 && host_integerp (TREE_OPERAND (tem, 1), 1))
1722 rhs = TREE_OPERAND (tem, 0);
1723 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1725 else if (DECL_P (tem))
1726 rhs = build_fold_addr_expr (tem);
1730 if (TREE_CODE (rhs) != SSA_NAME
1731 && TREE_CODE (rhs) != ADDR_EXPR)
1734 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1736 /* The bases of the destination and the references have to agree. */
1737 if ((TREE_CODE (base) != MEM_REF
1739 || (TREE_CODE (base) == MEM_REF
1740 && (TREE_OPERAND (base, 0) != lhs
1741 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1743 && (TREE_CODE (lhs) != ADDR_EXPR
1744 || TREE_OPERAND (lhs, 0) != base)))
1747 /* And the access has to be contained within the memcpy destination. */
1748 at = offset / BITS_PER_UNIT;
1749 if (TREE_CODE (base) == MEM_REF)
1750 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1752 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1755 /* Make room for 2 operands in the new reference. */
1756 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1758 VEC (vn_reference_op_s, heap) *old = vr->operands;
1759 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1760 if (old == shared_lookup_references
1761 && vr->operands != old)
1762 shared_lookup_references = NULL;
1765 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1767 /* The looked-through reference is a simple MEM_REF. */
1768 memset (&op, 0, sizeof (op));
1770 op.opcode = MEM_REF;
1771 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1772 op.off = at - lhs_offset + rhs_offset;
1773 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1774 op.type = TREE_TYPE (rhs);
1775 op.opcode = TREE_CODE (rhs);
1778 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1779 vr->hashcode = vn_reference_compute_hash (vr);
1781 /* Adjust *ref from the new operands. */
1782 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1784 /* This can happen with bitfields. */
1785 if (ref->size != r.size)
1789 /* Do not update last seen VUSE after translating. */
1790 last_vuse_ptr = NULL;
1792 /* Keep looking for the adjusted *REF / VR pair. */
1796 /* Bail out and stop walking. */
1800 /* Lookup a reference operation by it's parts, in the current hash table.
1801 Returns the resulting value number if it exists in the hash table,
1802 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1803 vn_reference_t stored in the hashtable if something is found. */
1806 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1807 VEC (vn_reference_op_s, heap) *operands,
1808 vn_reference_t *vnresult, vn_lookup_kind kind)
1810 struct vn_reference_s vr1;
1818 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1819 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1820 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1821 VEC_length (vn_reference_op_s, operands));
1822 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1823 VEC_address (vn_reference_op_s, operands),
1824 sizeof (vn_reference_op_s)
1825 * VEC_length (vn_reference_op_s, operands));
1826 vr1.operands = operands = shared_lookup_references
1827 = valueize_refs (shared_lookup_references);
1830 vr1.hashcode = vn_reference_compute_hash (&vr1);
1831 if ((cst = fully_constant_vn_reference_p (&vr1)))
1834 vn_reference_lookup_1 (&vr1, vnresult);
1836 && kind != VN_NOWALK
1840 vn_walk_kind = kind;
1841 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1843 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1844 vn_reference_lookup_2,
1845 vn_reference_lookup_3, &vr1);
1846 if (vr1.operands != operands)
1847 VEC_free (vn_reference_op_s, heap, vr1.operands);
1851 return (*vnresult)->result;
1856 /* Lookup OP in the current hash table, and return the resulting value
1857 number if it exists in the hash table. Return NULL_TREE if it does
1858 not exist in the hash table or if the result field of the structure
1859 was NULL.. VNRESULT will be filled in with the vn_reference_t
1860 stored in the hashtable if one exists. */
1863 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1864 vn_reference_t *vnresult)
1866 VEC (vn_reference_op_s, heap) *operands;
1867 struct vn_reference_s vr1;
1869 bool valuezied_anything;
1874 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1875 vr1.operands = operands
1876 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1877 vr1.type = TREE_TYPE (op);
1878 vr1.set = get_alias_set (op);
1879 vr1.hashcode = vn_reference_compute_hash (&vr1);
1880 if ((cst = fully_constant_vn_reference_p (&vr1)))
1883 if (kind != VN_NOWALK
1886 vn_reference_t wvnresult;
1888 /* Make sure to use a valueized reference if we valueized anything.
1889 Otherwise preserve the full reference for advanced TBAA. */
1890 if (!valuezied_anything
1891 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1893 ao_ref_init (&r, op);
1894 vn_walk_kind = kind;
1896 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1897 vn_reference_lookup_2,
1898 vn_reference_lookup_3, &vr1);
1899 if (vr1.operands != operands)
1900 VEC_free (vn_reference_op_s, heap, vr1.operands);
1904 *vnresult = wvnresult;
1905 return wvnresult->result;
1911 return vn_reference_lookup_1 (&vr1, vnresult);
1915 /* Insert OP into the current hash table with a value number of
1916 RESULT, and return the resulting reference structure we created. */
1919 vn_reference_insert (tree op, tree result, tree vuse)
1924 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1925 if (TREE_CODE (result) == SSA_NAME)
1926 vr1->value_id = VN_INFO (result)->value_id;
1928 vr1->value_id = get_or_alloc_constant_value_id (result);
1929 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1930 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1931 vr1->type = TREE_TYPE (op);
1932 vr1->set = get_alias_set (op);
1933 vr1->hashcode = vn_reference_compute_hash (vr1);
1934 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1936 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1939 /* Because we lookup stores using vuses, and value number failures
1940 using the vdefs (see visit_reference_op_store for how and why),
1941 it's possible that on failure we may try to insert an already
1942 inserted store. This is not wrong, there is no ssa name for a
1943 store that we could use as a differentiator anyway. Thus, unlike
1944 the other lookup functions, you cannot gcc_assert (!*slot)
1947 /* But free the old slot in case of a collision. */
1949 free_reference (*slot);
1955 /* Insert a reference by it's pieces into the current hash table with
1956 a value number of RESULT. Return the resulting reference
1957 structure we created. */
1960 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1961 VEC (vn_reference_op_s, heap) *operands,
1962 tree result, unsigned int value_id)
1968 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1969 vr1->value_id = value_id;
1970 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1971 vr1->operands = valueize_refs (operands);
1974 vr1->hashcode = vn_reference_compute_hash (vr1);
1975 if (result && TREE_CODE (result) == SSA_NAME)
1976 result = SSA_VAL (result);
1977 vr1->result = result;
1979 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1982 /* At this point we should have all the things inserted that we have
1983 seen before, and we should never try inserting something that
1985 gcc_assert (!*slot);
1987 free_reference (*slot);
1993 /* Compute and return the hash value for nary operation VBO1. */
1996 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2001 for (i = 0; i < vno1->length; ++i)
2002 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2003 vno1->op[i] = SSA_VAL (vno1->op[i]);
2005 if (vno1->length == 2
2006 && commutative_tree_code (vno1->opcode)
2007 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2009 tree temp = vno1->op[0];
2010 vno1->op[0] = vno1->op[1];
2014 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2015 for (i = 0; i < vno1->length; ++i)
2016 hash = iterative_hash_expr (vno1->op[i], hash);
2021 /* Return the computed hashcode for nary operation P1. */
2024 vn_nary_op_hash (const void *p1)
2026 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2027 return vno1->hashcode;
2030 /* Compare nary operations P1 and P2 and return true if they are
2034 vn_nary_op_eq (const void *p1, const void *p2)
2036 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2037 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2040 if (vno1->hashcode != vno2->hashcode)
2043 if (vno1->length != vno2->length)
2046 if (vno1->opcode != vno2->opcode
2047 || !types_compatible_p (vno1->type, vno2->type))
2050 for (i = 0; i < vno1->length; ++i)
2051 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2057 /* Initialize VNO from the pieces provided. */
2060 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2061 enum tree_code code, tree type, tree *ops)
2064 vno->length = length;
2066 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2069 /* Initialize VNO from OP. */
2072 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2076 vno->opcode = TREE_CODE (op);
2077 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2078 vno->type = TREE_TYPE (op);
2079 for (i = 0; i < vno->length; ++i)
2080 vno->op[i] = TREE_OPERAND (op, i);
2083 /* Return the number of operands for a vn_nary ops structure from STMT. */
2086 vn_nary_length_from_stmt (gimple stmt)
2088 switch (gimple_assign_rhs_code (stmt))
2092 case VIEW_CONVERT_EXPR:
2096 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2099 return gimple_num_ops (stmt) - 1;
2103 /* Initialize VNO from STMT. */
2106 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2110 vno->opcode = gimple_assign_rhs_code (stmt);
2111 vno->type = gimple_expr_type (stmt);
2112 switch (vno->opcode)
2116 case VIEW_CONVERT_EXPR:
2118 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2122 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2123 for (i = 0; i < vno->length; ++i)
2124 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2128 vno->length = gimple_num_ops (stmt) - 1;
2129 for (i = 0; i < vno->length; ++i)
2130 vno->op[i] = gimple_op (stmt, i + 1);
2134 /* Compute the hashcode for VNO and look for it in the hash table;
2135 return the resulting value number if it exists in the hash table.
2136 Return NULL_TREE if it does not exist in the hash table or if the
2137 result field of the operation is NULL. VNRESULT will contain the
2138 vn_nary_op_t from the hashtable if it exists. */
2141 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2148 vno->hashcode = vn_nary_op_compute_hash (vno);
2149 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2151 if (!slot && current_info == optimistic_info)
2152 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2157 *vnresult = (vn_nary_op_t)*slot;
2158 return ((vn_nary_op_t)*slot)->result;
2161 /* Lookup a n-ary operation by its pieces and return the resulting value
2162 number if it exists in the hash table. Return NULL_TREE if it does
2163 not exist in the hash table or if the result field of the operation
2164 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2168 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2169 tree type, tree *ops, vn_nary_op_t *vnresult)
2171 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2172 sizeof_vn_nary_op (length));
2173 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2174 return vn_nary_op_lookup_1 (vno1, vnresult);
2177 /* Lookup OP in the current hash table, and return the resulting value
2178 number if it exists in the hash table. Return NULL_TREE if it does
2179 not exist in the hash table or if the result field of the operation
2180 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2184 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2187 = XALLOCAVAR (struct vn_nary_op_s,
2188 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2189 init_vn_nary_op_from_op (vno1, op);
2190 return vn_nary_op_lookup_1 (vno1, vnresult);
2193 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2194 value number if it exists in the hash table. Return NULL_TREE if
2195 it does not exist in the hash table. VNRESULT will contain the
2196 vn_nary_op_t from the hashtable if it exists. */
2199 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2202 = XALLOCAVAR (struct vn_nary_op_s,
2203 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2204 init_vn_nary_op_from_stmt (vno1, stmt);
2205 return vn_nary_op_lookup_1 (vno1, vnresult);
2208 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2211 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2213 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2216 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2220 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2222 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2223 ¤t_info->nary_obstack);
2225 vno1->value_id = value_id;
2226 vno1->length = length;
2227 vno1->result = result;
2232 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2233 VNO->HASHCODE first. */
2236 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2241 vno->hashcode = vn_nary_op_compute_hash (vno);
2243 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2244 gcc_assert (!*slot);
2250 /* Insert a n-ary operation into the current hash table using it's
2251 pieces. Return the vn_nary_op_t structure we created and put in
2255 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2256 tree type, tree *ops,
2257 tree result, unsigned int value_id)
2259 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2260 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2261 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2264 /* Insert OP into the current hash table with a value number of
2265 RESULT. Return the vn_nary_op_t structure we created and put in
2269 vn_nary_op_insert (tree op, tree result)
2271 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2274 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2275 init_vn_nary_op_from_op (vno1, op);
2276 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2279 /* Insert the rhs of STMT into the current hash table with a value number of
2283 vn_nary_op_insert_stmt (gimple stmt, tree result)
2286 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2287 result, VN_INFO (result)->value_id);
2288 init_vn_nary_op_from_stmt (vno1, stmt);
2289 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2292 /* Compute a hashcode for PHI operation VP1 and return it. */
2294 static inline hashval_t
2295 vn_phi_compute_hash (vn_phi_t vp1)
2302 result = vp1->block->index;
2304 /* If all PHI arguments are constants we need to distinguish
2305 the PHI node via its type. */
2306 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2307 result += (INTEGRAL_TYPE_P (type)
2308 + (INTEGRAL_TYPE_P (type)
2309 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2311 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2313 if (phi1op == VN_TOP)
2315 result = iterative_hash_expr (phi1op, result);
2321 /* Return the computed hashcode for phi operation P1. */
2324 vn_phi_hash (const void *p1)
2326 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2327 return vp1->hashcode;
2330 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2333 vn_phi_eq (const void *p1, const void *p2)
2335 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2336 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2338 if (vp1->hashcode != vp2->hashcode)
2341 if (vp1->block == vp2->block)
2346 /* If the PHI nodes do not have compatible types
2347 they are not the same. */
2348 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2349 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2352 /* Any phi in the same block will have it's arguments in the
2353 same edge order, because of how we store phi nodes. */
2354 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2356 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2357 if (phi1op == VN_TOP || phi2op == VN_TOP)
2359 if (!expressions_equal_p (phi1op, phi2op))
2367 static VEC(tree, heap) *shared_lookup_phiargs;
2369 /* Lookup PHI in the current hash table, and return the resulting
2370 value number if it exists in the hash table. Return NULL_TREE if
2371 it does not exist in the hash table. */
2374 vn_phi_lookup (gimple phi)
2377 struct vn_phi_s vp1;
2380 VEC_truncate (tree, shared_lookup_phiargs, 0);
2382 /* Canonicalize the SSA_NAME's to their value number. */
2383 for (i = 0; i < gimple_phi_num_args (phi); i++)
2385 tree def = PHI_ARG_DEF (phi, i);
2386 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2387 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2389 vp1.phiargs = shared_lookup_phiargs;
2390 vp1.block = gimple_bb (phi);
2391 vp1.hashcode = vn_phi_compute_hash (&vp1);
2392 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2394 if (!slot && current_info == optimistic_info)
2395 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2399 return ((vn_phi_t)*slot)->result;
2402 /* Insert PHI into the current hash table with a value number of
2406 vn_phi_insert (gimple phi, tree result)
2409 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2411 VEC (tree, heap) *args = NULL;
2413 /* Canonicalize the SSA_NAME's to their value number. */
2414 for (i = 0; i < gimple_phi_num_args (phi); i++)
2416 tree def = PHI_ARG_DEF (phi, i);
2417 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2418 VEC_safe_push (tree, heap, args, def);
2420 vp1->value_id = VN_INFO (result)->value_id;
2421 vp1->phiargs = args;
2422 vp1->block = gimple_bb (phi);
2423 vp1->result = result;
2424 vp1->hashcode = vn_phi_compute_hash (vp1);
2426 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2429 /* Because we iterate over phi operations more than once, it's
2430 possible the slot might already exist here, hence no assert.*/
2436 /* Print set of components in strongly connected component SCC to OUT. */
2439 print_scc (FILE *out, VEC (tree, heap) *scc)
2444 fprintf (out, "SCC consists of: ");
2445 FOR_EACH_VEC_ELT (tree, scc, i, var)
2447 print_generic_expr (out, var, 0);
2450 fprintf (out, "\n");
2453 /* Set the value number of FROM to TO, return true if it has changed
2457 set_ssa_val_to (tree from, tree to)
2459 tree currval = SSA_VAL (from);
2463 if (currval == from)
2465 if (dump_file && (dump_flags & TDF_DETAILS))
2467 fprintf (dump_file, "Not changing value number of ");
2468 print_generic_expr (dump_file, from, 0);
2469 fprintf (dump_file, " from VARYING to ");
2470 print_generic_expr (dump_file, to, 0);
2471 fprintf (dump_file, "\n");
2475 else if (TREE_CODE (to) == SSA_NAME
2476 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2480 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2481 and invariants. So assert that here. */
2482 gcc_assert (to != NULL_TREE
2484 || TREE_CODE (to) == SSA_NAME
2485 || is_gimple_min_invariant (to)));
2487 if (dump_file && (dump_flags & TDF_DETAILS))
2489 fprintf (dump_file, "Setting value number of ");
2490 print_generic_expr (dump_file, from, 0);
2491 fprintf (dump_file, " to ");
2492 print_generic_expr (dump_file, to, 0);
2495 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2497 VN_INFO (from)->valnum = to;
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2499 fprintf (dump_file, " (changed)\n");
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2503 fprintf (dump_file, "\n");
2507 /* Set all definitions in STMT to value number to themselves.
2508 Return true if a value number changed. */
2511 defs_to_varying (gimple stmt)
2513 bool changed = false;
2517 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2519 tree def = DEF_FROM_PTR (defp);
2521 VN_INFO (def)->use_processed = true;
2522 changed |= set_ssa_val_to (def, def);
2527 static bool expr_has_constants (tree expr);
2528 static tree valueize_expr (tree expr);
2530 /* Visit a copy between LHS and RHS, return true if the value number
2534 visit_copy (tree lhs, tree rhs)
2536 /* Follow chains of copies to their destination. */
2537 while (TREE_CODE (rhs) == SSA_NAME
2538 && SSA_VAL (rhs) != rhs)
2539 rhs = SSA_VAL (rhs);
2541 /* The copy may have a more interesting constant filled expression
2542 (we don't, since we know our RHS is just an SSA name). */
2543 if (TREE_CODE (rhs) == SSA_NAME)
2545 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2546 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2549 return set_ssa_val_to (lhs, rhs);
2552 /* Visit a nary operator RHS, value number it, and return true if the
2553 value number of LHS has changed as a result. */
2556 visit_nary_op (tree lhs, gimple stmt)
2558 bool changed = false;
2559 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2562 changed = set_ssa_val_to (lhs, result);
2565 changed = set_ssa_val_to (lhs, lhs);
2566 vn_nary_op_insert_stmt (stmt, lhs);
2572 /* Visit a call STMT storing into LHS. Return true if the value number
2573 of the LHS has changed as a result. */
2576 visit_reference_op_call (tree lhs, gimple stmt)
2578 bool changed = false;
2579 struct vn_reference_s vr1;
2581 tree vuse = gimple_vuse (stmt);
2583 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2584 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2585 vr1.type = gimple_expr_type (stmt);
2587 vr1.hashcode = vn_reference_compute_hash (&vr1);
2588 result = vn_reference_lookup_1 (&vr1, NULL);
2591 changed = set_ssa_val_to (lhs, result);
2592 if (TREE_CODE (result) == SSA_NAME
2593 && VN_INFO (result)->has_constants)
2594 VN_INFO (lhs)->has_constants = true;
2600 changed = set_ssa_val_to (lhs, lhs);
2601 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2602 vr2->vuse = vr1.vuse;
2603 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2604 vr2->type = vr1.type;
2606 vr2->hashcode = vr1.hashcode;
2608 slot = htab_find_slot_with_hash (current_info->references,
2609 vr2, vr2->hashcode, INSERT);
2611 free_reference (*slot);
2618 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2619 and return true if the value number of the LHS has changed as a result. */
2622 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2624 bool changed = false;
2628 last_vuse = gimple_vuse (stmt);
2629 last_vuse_ptr = &last_vuse;
2630 result = vn_reference_lookup (op, gimple_vuse (stmt),
2631 default_vn_walk_kind, NULL);
2632 last_vuse_ptr = NULL;
2634 /* If we have a VCE, try looking up its operand as it might be stored in
2635 a different type. */
2636 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2637 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2638 default_vn_walk_kind, NULL);
2640 /* We handle type-punning through unions by value-numbering based
2641 on offset and size of the access. Be prepared to handle a
2642 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2644 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2646 /* We will be setting the value number of lhs to the value number
2647 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2648 So first simplify and lookup this expression to see if it
2649 is already available. */
2650 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2651 if ((CONVERT_EXPR_P (val)
2652 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2653 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2655 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2656 if ((CONVERT_EXPR_P (tem)
2657 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2658 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2659 TREE_TYPE (val), tem)))
2663 if (!is_gimple_min_invariant (val)
2664 && TREE_CODE (val) != SSA_NAME)
2665 result = vn_nary_op_lookup (val, NULL);
2666 /* If the expression is not yet available, value-number lhs to
2667 a new SSA_NAME we create. */
2670 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2671 /* Initialize value-number information properly. */
2672 VN_INFO_GET (result)->valnum = result;
2673 VN_INFO (result)->value_id = get_next_value_id ();
2674 VN_INFO (result)->expr = val;
2675 VN_INFO (result)->has_constants = expr_has_constants (val);
2676 VN_INFO (result)->needs_insertion = true;
2677 /* As all "inserted" statements are singleton SCCs, insert
2678 to the valid table. This is strictly needed to
2679 avoid re-generating new value SSA_NAMEs for the same
2680 expression during SCC iteration over and over (the
2681 optimistic table gets cleared after each iteration).
2682 We do not need to insert into the optimistic table, as
2683 lookups there will fall back to the valid table. */
2684 if (current_info == optimistic_info)
2686 current_info = valid_info;
2687 vn_nary_op_insert (val, result);
2688 current_info = optimistic_info;
2691 vn_nary_op_insert (val, result);
2692 if (dump_file && (dump_flags & TDF_DETAILS))
2694 fprintf (dump_file, "Inserting name ");
2695 print_generic_expr (dump_file, result, 0);
2696 fprintf (dump_file, " for expression ");
2697 print_generic_expr (dump_file, val, 0);
2698 fprintf (dump_file, "\n");
2705 changed = set_ssa_val_to (lhs, result);
2706 if (TREE_CODE (result) == SSA_NAME
2707 && VN_INFO (result)->has_constants)
2709 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2710 VN_INFO (lhs)->has_constants = true;
2715 changed = set_ssa_val_to (lhs, lhs);
2716 vn_reference_insert (op, lhs, last_vuse);
2723 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2724 and return true if the value number of the LHS has changed as a result. */
2727 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2729 bool changed = false;
2731 bool resultsame = false;
2733 /* First we want to lookup using the *vuses* from the store and see
2734 if there the last store to this location with the same address
2737 The vuses represent the memory state before the store. If the
2738 memory state, address, and value of the store is the same as the
2739 last store to this location, then this store will produce the
2740 same memory state as that store.
2742 In this case the vdef versions for this store are value numbered to those
2743 vuse versions, since they represent the same memory state after
2746 Otherwise, the vdefs for the store are used when inserting into
2747 the table, since the store generates a new memory state. */
2749 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2753 if (TREE_CODE (result) == SSA_NAME)
2754 result = SSA_VAL (result);
2755 if (TREE_CODE (op) == SSA_NAME)
2757 resultsame = expressions_equal_p (result, op);
2760 if (!result || !resultsame)
2764 if (dump_file && (dump_flags & TDF_DETAILS))
2766 fprintf (dump_file, "No store match\n");
2767 fprintf (dump_file, "Value numbering store ");
2768 print_generic_expr (dump_file, lhs, 0);
2769 fprintf (dump_file, " to ");
2770 print_generic_expr (dump_file, op, 0);
2771 fprintf (dump_file, "\n");
2773 /* Have to set value numbers before insert, since insert is
2774 going to valueize the references in-place. */
2775 if ((vdef = gimple_vdef (stmt)))
2777 VN_INFO (vdef)->use_processed = true;
2778 changed |= set_ssa_val_to (vdef, vdef);
2781 /* Do not insert structure copies into the tables. */
2782 if (is_gimple_min_invariant (op)
2783 || is_gimple_reg (op))
2784 vn_reference_insert (lhs, op, vdef);
2788 /* We had a match, so value number the vdef to have the value
2789 number of the vuse it came from. */
2792 if (dump_file && (dump_flags & TDF_DETAILS))
2793 fprintf (dump_file, "Store matched earlier value,"
2794 "value numbering store vdefs to matching vuses.\n");
2796 def = gimple_vdef (stmt);
2797 use = gimple_vuse (stmt);
2799 VN_INFO (def)->use_processed = true;
2800 changed |= set_ssa_val_to (def, SSA_VAL (use));
2806 /* Visit and value number PHI, return true if the value number
2810 visit_phi (gimple phi)
2812 bool changed = false;
2814 tree sameval = VN_TOP;
2815 bool allsame = true;
2818 /* TODO: We could check for this in init_sccvn, and replace this
2819 with a gcc_assert. */
2820 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2821 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2823 /* See if all non-TOP arguments have the same value. TOP is
2824 equivalent to everything, so we can ignore it. */
2825 for (i = 0; i < gimple_phi_num_args (phi); i++)
2827 tree def = PHI_ARG_DEF (phi, i);
2829 if (TREE_CODE (def) == SSA_NAME)
2830 def = SSA_VAL (def);
2833 if (sameval == VN_TOP)
2839 if (!expressions_equal_p (def, sameval))
2847 /* If all value numbered to the same value, the phi node has that
2851 if (is_gimple_min_invariant (sameval))
2853 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2854 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2858 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2859 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2862 if (TREE_CODE (sameval) == SSA_NAME)
2863 return visit_copy (PHI_RESULT (phi), sameval);
2865 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2868 /* Otherwise, see if it is equivalent to a phi node in this block. */
2869 result = vn_phi_lookup (phi);
2872 if (TREE_CODE (result) == SSA_NAME)
2873 changed = visit_copy (PHI_RESULT (phi), result);
2875 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2879 vn_phi_insert (phi, PHI_RESULT (phi));
2880 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2881 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2882 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2888 /* Return true if EXPR contains constants. */
2891 expr_has_constants (tree expr)
2893 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2896 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2899 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2900 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2901 /* Constants inside reference ops are rarely interesting, but
2902 it can take a lot of looking to find them. */
2904 case tcc_declaration:
2907 return is_gimple_min_invariant (expr);
2912 /* Return true if STMT contains constants. */
2915 stmt_has_constants (gimple stmt)
2917 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2920 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2922 case GIMPLE_UNARY_RHS:
2923 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2925 case GIMPLE_BINARY_RHS:
2926 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2927 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2928 case GIMPLE_TERNARY_RHS:
2929 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2930 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2931 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2932 case GIMPLE_SINGLE_RHS:
2933 /* Constants inside reference ops are rarely interesting, but
2934 it can take a lot of looking to find them. */
2935 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2942 /* Replace SSA_NAMES in expr with their value numbers, and return the
2944 This is performed in place. */
2947 valueize_expr (tree expr)
2949 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2952 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2955 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2962 /* Simplify the binary expression RHS, and return the result if
2966 simplify_binary_expression (gimple stmt)
2968 tree result = NULL_TREE;
2969 tree op0 = gimple_assign_rhs1 (stmt);
2970 tree op1 = gimple_assign_rhs2 (stmt);
2971 enum tree_code code = gimple_assign_rhs_code (stmt);
2973 /* This will not catch every single case we could combine, but will
2974 catch those with constants. The goal here is to simultaneously
2975 combine constants between expressions, but avoid infinite
2976 expansion of expressions during simplification. */
2977 if (TREE_CODE (op0) == SSA_NAME)
2979 if (VN_INFO (op0)->has_constants
2980 || TREE_CODE_CLASS (code) == tcc_comparison
2981 || code == COMPLEX_EXPR)
2982 op0 = valueize_expr (vn_get_expr_for (op0));
2984 op0 = vn_valueize (op0);
2987 if (TREE_CODE (op1) == SSA_NAME)
2989 if (VN_INFO (op1)->has_constants
2990 || code == COMPLEX_EXPR)
2991 op1 = valueize_expr (vn_get_expr_for (op1));
2993 op1 = vn_valueize (op1);
2996 /* Pointer plus constant can be represented as invariant address.
2997 Do so to allow further propatation, see also tree forwprop. */
2998 if (code == POINTER_PLUS_EXPR
2999 && host_integerp (op1, 1)
3000 && TREE_CODE (op0) == ADDR_EXPR
3001 && is_gimple_min_invariant (op0))
3002 return build_invariant_address (TREE_TYPE (op0),
3003 TREE_OPERAND (op0, 0),
3004 TREE_INT_CST_LOW (op1));
3006 /* Avoid folding if nothing changed. */
3007 if (op0 == gimple_assign_rhs1 (stmt)
3008 && op1 == gimple_assign_rhs2 (stmt))
3011 fold_defer_overflow_warnings ();
3013 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3015 STRIP_USELESS_TYPE_CONVERSION (result);
3017 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3020 /* Make sure result is not a complex expression consisting
3021 of operators of operators (IE (a + b) + (a + c))
3022 Otherwise, we will end up with unbounded expressions if
3023 fold does anything at all. */
3024 if (result && valid_gimple_rhs_p (result))
3030 /* Simplify the unary expression RHS, and return the result if
3034 simplify_unary_expression (gimple stmt)
3036 tree result = NULL_TREE;
3037 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3038 enum tree_code code = gimple_assign_rhs_code (stmt);
3040 /* We handle some tcc_reference codes here that are all
3041 GIMPLE_ASSIGN_SINGLE codes. */
3042 if (code == REALPART_EXPR
3043 || code == IMAGPART_EXPR
3044 || code == VIEW_CONVERT_EXPR
3045 || code == BIT_FIELD_REF)
3046 op0 = TREE_OPERAND (op0, 0);
3048 if (TREE_CODE (op0) != SSA_NAME)
3052 if (VN_INFO (op0)->has_constants)
3053 op0 = valueize_expr (vn_get_expr_for (op0));
3054 else if (CONVERT_EXPR_CODE_P (code)
3055 || code == REALPART_EXPR
3056 || code == IMAGPART_EXPR
3057 || code == VIEW_CONVERT_EXPR
3058 || code == BIT_FIELD_REF)
3060 /* We want to do tree-combining on conversion-like expressions.
3061 Make sure we feed only SSA_NAMEs or constants to fold though. */
3062 tree tem = valueize_expr (vn_get_expr_for (op0));
3063 if (UNARY_CLASS_P (tem)
3064 || BINARY_CLASS_P (tem)
3065 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3066 || TREE_CODE (tem) == SSA_NAME
3067 || TREE_CODE (tem) == CONSTRUCTOR
3068 || is_gimple_min_invariant (tem))
3072 /* Avoid folding if nothing changed, but remember the expression. */
3073 if (op0 == orig_op0)
3076 if (code == BIT_FIELD_REF)
3078 tree rhs = gimple_assign_rhs1 (stmt);
3079 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3080 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3083 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3086 STRIP_USELESS_TYPE_CONVERSION (result);
3087 if (valid_gimple_rhs_p (result))
3094 /* Try to simplify RHS using equivalences and constant folding. */
3097 try_to_simplify (gimple stmt)
3099 enum tree_code code = gimple_assign_rhs_code (stmt);
3102 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3103 in this case, there is no point in doing extra work. */
3104 if (code == SSA_NAME)
3107 /* First try constant folding based on our current lattice. */
3108 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3110 && (TREE_CODE (tem) == SSA_NAME
3111 || is_gimple_min_invariant (tem)))
3114 /* If that didn't work try combining multiple statements. */
3115 switch (TREE_CODE_CLASS (code))
3118 /* Fallthrough for some unary codes that can operate on registers. */
3119 if (!(code == REALPART_EXPR
3120 || code == IMAGPART_EXPR
3121 || code == VIEW_CONVERT_EXPR
3122 || code == BIT_FIELD_REF))
3124 /* We could do a little more with unary ops, if they expand
3125 into binary ops, but it's debatable whether it is worth it. */
3127 return simplify_unary_expression (stmt);
3129 case tcc_comparison:
3131 return simplify_binary_expression (stmt);
3140 /* Visit and value number USE, return true if the value number
3144 visit_use (tree use)
3146 bool changed = false;
3147 gimple stmt = SSA_NAME_DEF_STMT (use);
3149 VN_INFO (use)->use_processed = true;
3151 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3152 if (dump_file && (dump_flags & TDF_DETAILS)
3153 && !SSA_NAME_IS_DEFAULT_DEF (use))
3155 fprintf (dump_file, "Value numbering ");
3156 print_generic_expr (dump_file, use, 0);
3157 fprintf (dump_file, " stmt = ");
3158 print_gimple_stmt (dump_file, stmt, 0, 0);
3161 /* Handle uninitialized uses. */
3162 if (SSA_NAME_IS_DEFAULT_DEF (use))
3163 changed = set_ssa_val_to (use, use);
3166 if (gimple_code (stmt) == GIMPLE_PHI)
3167 changed = visit_phi (stmt);
3168 else if (!gimple_has_lhs (stmt)
3169 || gimple_has_volatile_ops (stmt)
3170 || stmt_could_throw_p (stmt))
3171 changed = defs_to_varying (stmt);
3172 else if (is_gimple_assign (stmt))
3174 enum tree_code code = gimple_assign_rhs_code (stmt);
3175 tree lhs = gimple_assign_lhs (stmt);
3176 tree rhs1 = gimple_assign_rhs1 (stmt);
3179 /* Shortcut for copies. Simplifying copies is pointless,
3180 since we copy the expression and value they represent. */
3181 if (code == SSA_NAME
3182 && TREE_CODE (lhs) == SSA_NAME)
3184 changed = visit_copy (lhs, rhs1);
3187 simplified = try_to_simplify (stmt);
3190 if (dump_file && (dump_flags & TDF_DETAILS))
3192 fprintf (dump_file, "RHS ");
3193 print_gimple_expr (dump_file, stmt, 0, 0);
3194 fprintf (dump_file, " simplified to ");
3195 print_generic_expr (dump_file, simplified, 0);
3196 if (TREE_CODE (lhs) == SSA_NAME)
3197 fprintf (dump_file, " has constants %d\n",
3198 expr_has_constants (simplified));
3200 fprintf (dump_file, "\n");
3203 /* Setting value numbers to constants will occasionally
3204 screw up phi congruence because constants are not
3205 uniquely associated with a single ssa name that can be
3208 && is_gimple_min_invariant (simplified)
3209 && TREE_CODE (lhs) == SSA_NAME)
3211 VN_INFO (lhs)->expr = simplified;
3212 VN_INFO (lhs)->has_constants = true;
3213 changed = set_ssa_val_to (lhs, simplified);
3217 && TREE_CODE (simplified) == SSA_NAME
3218 && TREE_CODE (lhs) == SSA_NAME)
3220 changed = visit_copy (lhs, simplified);
3223 else if (simplified)
3225 if (TREE_CODE (lhs) == SSA_NAME)
3227 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3228 /* We have to unshare the expression or else
3229 valuizing may change the IL stream. */
3230 VN_INFO (lhs)->expr = unshare_expr (simplified);
3233 else if (stmt_has_constants (stmt)
3234 && TREE_CODE (lhs) == SSA_NAME)
3235 VN_INFO (lhs)->has_constants = true;
3236 else if (TREE_CODE (lhs) == SSA_NAME)
3238 /* We reset expr and constantness here because we may
3239 have been value numbering optimistically, and
3240 iterating. They may become non-constant in this case,
3241 even if they were optimistically constant. */
3243 VN_INFO (lhs)->has_constants = false;
3244 VN_INFO (lhs)->expr = NULL_TREE;
3247 if ((TREE_CODE (lhs) == SSA_NAME
3248 /* We can substitute SSA_NAMEs that are live over
3249 abnormal edges with their constant value. */
3250 && !(gimple_assign_copy_p (stmt)
3251 && is_gimple_min_invariant (rhs1))
3253 && is_gimple_min_invariant (simplified))
3254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3255 /* Stores or copies from SSA_NAMEs that are live over
3256 abnormal edges are a problem. */
3257 || (code == SSA_NAME
3258 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3259 changed = defs_to_varying (stmt);
3260 else if (REFERENCE_CLASS_P (lhs)
3262 changed = visit_reference_op_store (lhs, rhs1, stmt);
3263 else if (TREE_CODE (lhs) == SSA_NAME)
3265 if ((gimple_assign_copy_p (stmt)
3266 && is_gimple_min_invariant (rhs1))
3268 && is_gimple_min_invariant (simplified)))
3270 VN_INFO (lhs)->has_constants = true;
3272 changed = set_ssa_val_to (lhs, simplified);
3274 changed = set_ssa_val_to (lhs, rhs1);
3278 switch (get_gimple_rhs_class (code))
3280 case GIMPLE_UNARY_RHS:
3281 case GIMPLE_BINARY_RHS:
3282 case GIMPLE_TERNARY_RHS:
3283 changed = visit_nary_op (lhs, stmt);
3285 case GIMPLE_SINGLE_RHS:
3286 switch (TREE_CODE_CLASS (code))
3289 /* VOP-less references can go through unary case. */
3290 if ((code == REALPART_EXPR
3291 || code == IMAGPART_EXPR
3292 || code == VIEW_CONVERT_EXPR
3293 || code == BIT_FIELD_REF)
3294 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3296 changed = visit_nary_op (lhs, stmt);
3300 case tcc_declaration:
3301 changed = visit_reference_op_load (lhs, rhs1, stmt);
3304 if (code == ADDR_EXPR)
3306 changed = visit_nary_op (lhs, stmt);
3309 else if (code == CONSTRUCTOR)
3311 changed = visit_nary_op (lhs, stmt);
3314 changed = defs_to_varying (stmt);
3318 changed = defs_to_varying (stmt);
3324 changed = defs_to_varying (stmt);
3326 else if (is_gimple_call (stmt))
3328 tree lhs = gimple_call_lhs (stmt);
3330 /* ??? We could try to simplify calls. */
3332 if (stmt_has_constants (stmt)
3333 && TREE_CODE (lhs) == SSA_NAME)
3334 VN_INFO (lhs)->has_constants = true;
3335 else if (TREE_CODE (lhs) == SSA_NAME)
3337 /* We reset expr and constantness here because we may
3338 have been value numbering optimistically, and
3339 iterating. They may become non-constant in this case,
3340 even if they were optimistically constant. */
3341 VN_INFO (lhs)->has_constants = false;
3342 VN_INFO (lhs)->expr = NULL_TREE;
3345 if (TREE_CODE (lhs) == SSA_NAME
3346 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3347 changed = defs_to_varying (stmt);
3348 /* ??? We should handle stores from calls. */
3349 else if (TREE_CODE (lhs) == SSA_NAME)
3351 if (!gimple_call_internal_p (stmt)
3352 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3353 changed = visit_reference_op_call (lhs, stmt);
3355 changed = defs_to_varying (stmt);
3358 changed = defs_to_varying (stmt);
3365 /* Compare two operands by reverse postorder index */
3368 compare_ops (const void *pa, const void *pb)
3370 const tree opa = *((const tree *)pa);
3371 const tree opb = *((const tree *)pb);
3372 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3373 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3377 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3378 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3379 else if (gimple_nop_p (opstmta))
3381 else if (gimple_nop_p (opstmtb))
3384 bba = gimple_bb (opstmta);
3385 bbb = gimple_bb (opstmtb);
3388 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3396 if (gimple_code (opstmta) == GIMPLE_PHI
3397 && gimple_code (opstmtb) == GIMPLE_PHI)
3398 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3399 else if (gimple_code (opstmta) == GIMPLE_PHI)
3401 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3403 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3404 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3406 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3408 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3411 /* Sort an array containing members of a strongly connected component
3412 SCC so that the members are ordered by RPO number.
3413 This means that when the sort is complete, iterating through the
3414 array will give you the members in RPO order. */
3417 sort_scc (VEC (tree, heap) *scc)
3419 VEC_qsort (tree, scc, compare_ops);
3422 /* Insert the no longer used nary ONARY to the hash INFO. */
3425 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3427 size_t size = sizeof_vn_nary_op (onary->length);
3428 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3429 &info->nary_obstack);
3430 memcpy (nary, onary, size);
3431 vn_nary_op_insert_into (nary, info->nary, false);
3434 /* Insert the no longer used phi OPHI to the hash INFO. */
3437 copy_phi (vn_phi_t ophi, vn_tables_t info)
3439 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3441 memcpy (phi, ophi, sizeof (*phi));
3442 ophi->phiargs = NULL;
3443 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3444 gcc_assert (!*slot);
3448 /* Insert the no longer used reference OREF to the hash INFO. */
3451 copy_reference (vn_reference_t oref, vn_tables_t info)
3455 ref = (vn_reference_t) pool_alloc (info->references_pool);
3456 memcpy (ref, oref, sizeof (*ref));
3457 oref->operands = NULL;
3458 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3461 free_reference (*slot);
3465 /* Process a strongly connected component in the SSA graph. */
3468 process_scc (VEC (tree, heap) *scc)
3472 unsigned int iterations = 0;
3473 bool changed = true;
3479 /* If the SCC has a single member, just visit it. */
3480 if (VEC_length (tree, scc) == 1)
3482 tree use = VEC_index (tree, scc, 0);
3483 if (VN_INFO (use)->use_processed)
3485 /* We need to make sure it doesn't form a cycle itself, which can
3486 happen for self-referential PHI nodes. In that case we would
3487 end up inserting an expression with VN_TOP operands into the
3488 valid table which makes us derive bogus equivalences later.
3489 The cheapest way to check this is to assume it for all PHI nodes. */
3490 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3491 /* Fallthru to iteration. */ ;
3499 /* Iterate over the SCC with the optimistic table until it stops
3501 current_info = optimistic_info;
3506 if (dump_file && (dump_flags & TDF_DETAILS))
3507 fprintf (dump_file, "Starting iteration %d\n", iterations);
3508 /* As we are value-numbering optimistically we have to
3509 clear the expression tables and the simplified expressions
3510 in each iteration until we converge. */
3511 htab_empty (optimistic_info->nary);
3512 htab_empty (optimistic_info->phis);
3513 htab_empty (optimistic_info->references);
3514 obstack_free (&optimistic_info->nary_obstack, NULL);
3515 gcc_obstack_init (&optimistic_info->nary_obstack);
3516 empty_alloc_pool (optimistic_info->phis_pool);
3517 empty_alloc_pool (optimistic_info->references_pool);
3518 FOR_EACH_VEC_ELT (tree, scc, i, var)
3519 VN_INFO (var)->expr = NULL_TREE;
3520 FOR_EACH_VEC_ELT (tree, scc, i, var)
3521 changed |= visit_use (var);
3524 statistics_histogram_event (cfun, "SCC iterations", iterations);
3526 /* Finally, copy the contents of the no longer used optimistic
3527 table to the valid table. */
3528 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3529 copy_nary (nary, valid_info);
3530 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3531 copy_phi (phi, valid_info);
3532 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3533 copy_reference (ref, valid_info);
3535 current_info = valid_info;
3538 DEF_VEC_O(ssa_op_iter);
3539 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3541 /* Pop the components of the found SCC for NAME off the SCC stack
3542 and process them. Returns true if all went well, false if
3543 we run into resource limits. */
3546 extract_and_process_scc_for_name (tree name)
3548 VEC (tree, heap) *scc = NULL;
3551 /* Found an SCC, pop the components off the SCC stack and
3555 x = VEC_pop (tree, sccstack);
3557 VN_INFO (x)->on_sccstack = false;
3558 VEC_safe_push (tree, heap, scc, x);
3559 } while (x != name);
3561 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3562 if (VEC_length (tree, scc)
3563 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3566 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3567 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3568 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3572 if (VEC_length (tree, scc) > 1)
3575 if (dump_file && (dump_flags & TDF_DETAILS))
3576 print_scc (dump_file, scc);
3580 VEC_free (tree, heap, scc);
3585 /* Depth first search on NAME to discover and process SCC's in the SSA
3587 Execution of this algorithm relies on the fact that the SCC's are
3588 popped off the stack in topological order.
3589 Returns true if successful, false if we stopped processing SCC's due
3590 to resource constraints. */
3595 VEC(ssa_op_iter, heap) *itervec = NULL;
3596 VEC(tree, heap) *namevec = NULL;
3597 use_operand_p usep = NULL;
3604 VN_INFO (name)->dfsnum = next_dfs_num++;
3605 VN_INFO (name)->visited = true;
3606 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3608 VEC_safe_push (tree, heap, sccstack, name);
3609 VN_INFO (name)->on_sccstack = true;
3610 defstmt = SSA_NAME_DEF_STMT (name);
3612 /* Recursively DFS on our operands, looking for SCC's. */
3613 if (!gimple_nop_p (defstmt))
3615 /* Push a new iterator. */
3616 if (gimple_code (defstmt) == GIMPLE_PHI)
3617 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3619 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3622 clear_and_done_ssa_iter (&iter);
3626 /* If we are done processing uses of a name, go up the stack
3627 of iterators and process SCCs as we found them. */
3628 if (op_iter_done (&iter))
3630 /* See if we found an SCC. */
3631 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3632 if (!extract_and_process_scc_for_name (name))
3634 VEC_free (tree, heap, namevec);
3635 VEC_free (ssa_op_iter, heap, itervec);
3639 /* Check if we are done. */
3640 if (VEC_empty (tree, namevec))
3642 VEC_free (tree, heap, namevec);
3643 VEC_free (ssa_op_iter, heap, itervec);
3647 /* Restore the last use walker and continue walking there. */
3649 name = VEC_pop (tree, namevec);
3650 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3651 sizeof (ssa_op_iter));
3652 VEC_pop (ssa_op_iter, itervec);
3653 goto continue_walking;
3656 use = USE_FROM_PTR (usep);
3658 /* Since we handle phi nodes, we will sometimes get
3659 invariants in the use expression. */
3660 if (TREE_CODE (use) == SSA_NAME)
3662 if (! (VN_INFO (use)->visited))
3664 /* Recurse by pushing the current use walking state on
3665 the stack and starting over. */
3666 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3667 VEC_safe_push(tree, heap, namevec, name);
3672 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3673 VN_INFO (use)->low);
3675 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3676 && VN_INFO (use)->on_sccstack)
3678 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3679 VN_INFO (name)->low);
3683 usep = op_iter_next_use (&iter);
3687 /* Allocate a value number table. */
3690 allocate_vn_table (vn_tables_t table)
3692 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3693 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3694 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3697 gcc_obstack_init (&table->nary_obstack);
3698 table->phis_pool = create_alloc_pool ("VN phis",
3699 sizeof (struct vn_phi_s),
3701 table->references_pool = create_alloc_pool ("VN references",
3702 sizeof (struct vn_reference_s),
3706 /* Free a value number table. */
3709 free_vn_table (vn_tables_t table)
3711 htab_delete (table->phis);
3712 htab_delete (table->nary);
3713 htab_delete (table->references);
3714 obstack_free (&table->nary_obstack, NULL);
3715 free_alloc_pool (table->phis_pool);
3716 free_alloc_pool (table->references_pool);
3724 int *rpo_numbers_temp;
3726 calculate_dominance_info (CDI_DOMINATORS);
3728 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3731 constant_value_ids = BITMAP_ALLOC (NULL);
3736 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3737 /* VEC_alloc doesn't actually grow it to the right size, it just
3738 preallocates the space to do so. */
3739 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3740 gcc_obstack_init (&vn_ssa_aux_obstack);
3742 shared_lookup_phiargs = NULL;
3743 shared_lookup_references = NULL;
3744 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3745 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3746 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3748 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3749 the i'th block in RPO order is bb. We want to map bb's to RPO
3750 numbers, so we need to rearrange this array. */
3751 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3752 rpo_numbers[rpo_numbers_temp[j]] = j;
3754 XDELETE (rpo_numbers_temp);
3756 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3758 /* Create the VN_INFO structures, and initialize value numbers to
3760 for (i = 0; i < num_ssa_names; i++)
3762 tree name = ssa_name (i);
3765 VN_INFO_GET (name)->valnum = VN_TOP;
3766 VN_INFO (name)->expr = NULL_TREE;
3767 VN_INFO (name)->value_id = 0;
3771 renumber_gimple_stmt_uids ();
3773 /* Create the valid and optimistic value numbering tables. */
3774 valid_info = XCNEW (struct vn_tables_s);
3775 allocate_vn_table (valid_info);
3776 optimistic_info = XCNEW (struct vn_tables_s);
3777 allocate_vn_table (optimistic_info);
3785 htab_delete (constant_to_value_id);
3786 BITMAP_FREE (constant_value_ids);
3787 VEC_free (tree, heap, shared_lookup_phiargs);
3788 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3789 XDELETEVEC (rpo_numbers);
3791 for (i = 0; i < num_ssa_names; i++)
3793 tree name = ssa_name (i);
3795 && VN_INFO (name)->needs_insertion)
3796 release_ssa_name (name);
3798 obstack_free (&vn_ssa_aux_obstack, NULL);
3799 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3801 VEC_free (tree, heap, sccstack);
3802 free_vn_table (valid_info);
3803 XDELETE (valid_info);
3804 free_vn_table (optimistic_info);
3805 XDELETE (optimistic_info);
3808 /* Set *ID if we computed something useful in RESULT. */
3811 set_value_id_for_result (tree result, unsigned int *id)
3815 if (TREE_CODE (result) == SSA_NAME)
3816 *id = VN_INFO (result)->value_id;
3817 else if (is_gimple_min_invariant (result))
3818 *id = get_or_alloc_constant_value_id (result);
3822 /* Set the value ids in the valid hash tables. */
3825 set_hashtable_value_ids (void)
3832 /* Now set the value ids of the things we had put in the hash
3835 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3836 vno, vn_nary_op_t, hi)
3837 set_value_id_for_result (vno->result, &vno->value_id);
3839 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3841 set_value_id_for_result (vp->result, &vp->value_id);
3843 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3844 vr, vn_reference_t, hi)
3845 set_value_id_for_result (vr->result, &vr->value_id);
3848 /* Do SCCVN. Returns true if it finished, false if we bailed out
3849 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3850 how we use the alias oracle walking during the VN process. */
3853 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3857 bool changed = true;
3859 default_vn_walk_kind = default_vn_walk_kind_;
3862 current_info = valid_info;
3864 for (param = DECL_ARGUMENTS (current_function_decl);
3866 param = DECL_CHAIN (param))
3868 if (gimple_default_def (cfun, param) != NULL)
3870 tree def = gimple_default_def (cfun, param);
3871 VN_INFO (def)->valnum = def;
3875 for (i = 1; i < num_ssa_names; ++i)
3877 tree name = ssa_name (i);
3879 && VN_INFO (name)->visited == false
3880 && !has_zero_uses (name))
3888 /* Initialize the value ids. */
3890 for (i = 1; i < num_ssa_names; ++i)
3892 tree name = ssa_name (i);
3896 info = VN_INFO (name);
3897 if (info->valnum == name
3898 || info->valnum == VN_TOP)
3899 info->value_id = get_next_value_id ();
3900 else if (is_gimple_min_invariant (info->valnum))
3901 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3904 /* Propagate until they stop changing. */
3908 for (i = 1; i < num_ssa_names; ++i)
3910 tree name = ssa_name (i);
3914 info = VN_INFO (name);
3915 if (TREE_CODE (info->valnum) == SSA_NAME
3916 && info->valnum != name
3917 && info->value_id != VN_INFO (info->valnum)->value_id)
3920 info->value_id = VN_INFO (info->valnum)->value_id;
3925 set_hashtable_value_ids ();
3927 if (dump_file && (dump_flags & TDF_DETAILS))
3929 fprintf (dump_file, "Value numbers:\n");
3930 for (i = 0; i < num_ssa_names; i++)
3932 tree name = ssa_name (i);
3934 && VN_INFO (name)->visited
3935 && SSA_VAL (name) != name)
3937 print_generic_expr (dump_file, name, 0);
3938 fprintf (dump_file, " = ");
3939 print_generic_expr (dump_file, SSA_VAL (name), 0);
3940 fprintf (dump_file, "\n");
3948 /* Return the maximum value id we have ever seen. */
3951 get_max_value_id (void)
3953 return next_value_id;
3956 /* Return the next unique value id. */
3959 get_next_value_id (void)
3961 return next_value_id++;
3965 /* Compare two expressions E1 and E2 and return true if they are equal. */
3968 expressions_equal_p (tree e1, tree e2)
3970 /* The obvious case. */
3974 /* If only one of them is null, they cannot be equal. */
3978 /* Now perform the actual comparison. */
3979 if (TREE_CODE (e1) == TREE_CODE (e2)
3980 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3987 /* Return true if the nary operation NARY may trap. This is a copy
3988 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3991 vn_nary_may_trap (vn_nary_op_t nary)
3994 tree rhs2 = NULL_TREE;
3995 bool honor_nans = false;
3996 bool honor_snans = false;
3997 bool fp_operation = false;
3998 bool honor_trapv = false;
4002 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4003 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4004 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4007 fp_operation = FLOAT_TYPE_P (type);
4010 honor_nans = flag_trapping_math && !flag_finite_math_only;
4011 honor_snans = flag_signaling_nans != 0;
4013 else if (INTEGRAL_TYPE_P (type)
4014 && TYPE_OVERFLOW_TRAPS (type))
4017 if (nary->length >= 2)
4019 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4021 honor_nans, honor_snans, rhs2,
4027 for (i = 0; i < nary->length; ++i)
4028 if (tree_could_trap_p (nary->op[i]))