1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
284 if (expr == NULL_TREE)
287 /* Cache the expression. */
294 /* Free a phi operation structure VP. */
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
339 get_constant_value_id (tree constant)
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
349 return ((vn_constant_t)*slot)->value_id;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
357 get_or_alloc_constant_value_id (tree constant)
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
373 bitmap_set_bit (constant_value_ids, vc->value_id);
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1)
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
515 /* For non-calls, store the information that makes up the address. */
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
528 case ALIGN_INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
548 /* If this is a reference to a union member, record the union
549 member size as operand. Do so only if we are doing
550 expression insertion (during FRE), as PRE currently gets
551 confused with this. */
553 && temp.op1 == NULL_TREE
554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
557 && host_integerp (DECL_SIZE (temp.op0), 0))
558 temp.op0 = DECL_SIZE (temp.op0);
560 case ARRAY_RANGE_REF:
562 /* Record index as operand. */
563 temp.op0 = TREE_OPERAND (ref, 1);
564 /* Always record lower bounds and element size. */
565 temp.op1 = array_ref_low_bound (ref);
566 temp.op2 = array_ref_element_size (ref);
584 if (is_gimple_min_invariant (ref))
590 /* These are only interesting for their operands, their
591 existence, and their type. They will never be the last
592 ref in the chain of references (IE they require an
593 operand), so we don't have to put anything
594 for op* as it will be handled by the iteration */
597 case VIEW_CONVERT_EXPR:
602 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604 if (REFERENCE_CLASS_P (ref)
605 || (TREE_CODE (ref) == ADDR_EXPR
606 && !is_gimple_min_invariant (ref)))
607 ref = TREE_OPERAND (ref, 0);
613 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
614 operands in *OPS, the reference alias set SET and the reference type TYPE.
615 Return true if something useful was produced. */
618 ao_ref_init_from_vn_reference (ao_ref *ref,
619 alias_set_type set, tree type,
620 VEC (vn_reference_op_s, heap) *ops)
622 vn_reference_op_t op;
624 tree base = NULL_TREE;
626 HOST_WIDE_INT offset = 0;
627 HOST_WIDE_INT max_size;
628 HOST_WIDE_INT size = -1;
629 tree size_tree = NULL_TREE;
631 /* First get the final access size from just the outermost expression. */
632 op = VEC_index (vn_reference_op_s, ops, 0);
633 if (op->opcode == COMPONENT_REF)
635 if (TREE_CODE (op->op0) == INTEGER_CST)
638 size_tree = DECL_SIZE (op->op0);
640 else if (op->opcode == BIT_FIELD_REF)
644 enum machine_mode mode = TYPE_MODE (type);
646 size_tree = TYPE_SIZE (type);
648 size = GET_MODE_BITSIZE (mode);
650 if (size_tree != NULL_TREE)
652 if (!host_integerp (size_tree, 1))
655 size = TREE_INT_CST_LOW (size_tree);
658 /* Initially, maxsize is the same as the accessed element size.
659 In the following it will only grow (or become -1). */
662 /* Compute cumulative bit-offset for nested component-refs and array-refs,
663 and find the ultimate containing object. */
664 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
668 /* These may be in the reference ops, but we cannot do anything
669 sensible with them here. */
674 /* Record the base objects. */
675 case ALIGN_INDIRECT_REF:
677 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
678 op0_p = &TREE_OPERAND (*op0_p, 0);
681 case MISALIGNED_INDIRECT_REF:
682 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
684 op0_p = &TREE_OPERAND (*op0_p, 0);
696 /* And now the usual component-reference style ops. */
698 offset += tree_low_cst (op->op1, 0);
703 tree field = op->op0;
704 /* We do not have a complete COMPONENT_REF tree here so we
705 cannot use component_ref_field_offset. Do the interesting
708 /* Our union trick, done for offset zero only. */
709 if (TREE_CODE (field) == INTEGER_CST)
712 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
716 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
718 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
723 case ARRAY_RANGE_REF:
725 /* We recorded the lower bound and the element size. */
726 if (!host_integerp (op->op0, 0)
727 || !host_integerp (op->op1, 0)
728 || !host_integerp (op->op2, 0))
732 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
733 hindex -= TREE_INT_CST_LOW (op->op1);
734 hindex *= TREE_INT_CST_LOW (op->op2);
735 hindex *= BITS_PER_UNIT;
747 case VIEW_CONVERT_EXPR:
764 if (base == NULL_TREE)
767 ref->ref = NULL_TREE;
769 ref->offset = offset;
771 ref->max_size = max_size;
772 ref->ref_alias_set = set;
773 ref->base_alias_set = -1;
778 /* Copy the operations present in load/store/call REF into RESULT, a vector of
779 vn_reference_op_s's. */
782 copy_reference_ops_from_call (gimple call,
783 VEC(vn_reference_op_s, heap) **result)
785 vn_reference_op_s temp;
788 /* Copy the type, opcode, function being called and static chain. */
789 memset (&temp, 0, sizeof (temp));
790 temp.type = gimple_call_return_type (call);
791 temp.opcode = CALL_EXPR;
792 temp.op0 = gimple_call_fn (call);
793 temp.op1 = gimple_call_chain (call);
794 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
796 /* Copy the call arguments. As they can be references as well,
797 just chain them together. */
798 for (i = 0; i < gimple_call_num_args (call); ++i)
800 tree callarg = gimple_call_arg (call, i);
801 copy_reference_ops_from_ref (callarg, result);
805 /* Create a vector of vn_reference_op_s structures from REF, a
806 REFERENCE_CLASS_P tree. The vector is not shared. */
808 static VEC(vn_reference_op_s, heap) *
809 create_reference_ops_from_ref (tree ref)
811 VEC (vn_reference_op_s, heap) *result = NULL;
813 copy_reference_ops_from_ref (ref, &result);
817 /* Create a vector of vn_reference_op_s structures from CALL, a
818 call statement. The vector is not shared. */
820 static VEC(vn_reference_op_s, heap) *
821 create_reference_ops_from_call (gimple call)
823 VEC (vn_reference_op_s, heap) *result = NULL;
825 copy_reference_ops_from_call (call, &result);
829 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
830 *I_P to point to the last element of the replacement. */
832 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
835 VEC(vn_reference_op_s, heap) *mem = NULL;
836 vn_reference_op_t op;
837 unsigned int i = *i_p;
840 /* Get ops for the addressed object. */
841 op = VEC_index (vn_reference_op_s, *ops, i);
842 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
843 around it to avoid later ICEs. */
844 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
845 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
847 vn_reference_op_s aref;
849 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
850 aref.opcode = ARRAY_REF;
851 aref.op0 = integer_zero_node;
852 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
853 && TYPE_MIN_VALUE (dom))
854 aref.op0 = TYPE_MIN_VALUE (dom);
856 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
857 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
859 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
861 /* Do the replacement - we should have at least one op in mem now. */
862 if (VEC_length (vn_reference_op_s, mem) == 1)
864 VEC_replace (vn_reference_op_s, *ops, i - 1,
865 VEC_index (vn_reference_op_s, mem, 0));
866 VEC_ordered_remove (vn_reference_op_s, *ops, i);
869 else if (VEC_length (vn_reference_op_s, mem) == 2)
871 VEC_replace (vn_reference_op_s, *ops, i - 1,
872 VEC_index (vn_reference_op_s, mem, 0));
873 VEC_replace (vn_reference_op_s, *ops, i,
874 VEC_index (vn_reference_op_s, mem, 1));
876 else if (VEC_length (vn_reference_op_s, mem) > 2)
878 VEC_replace (vn_reference_op_s, *ops, i - 1,
879 VEC_index (vn_reference_op_s, mem, 0));
880 VEC_replace (vn_reference_op_s, *ops, i,
881 VEC_index (vn_reference_op_s, mem, 1));
882 /* ??? There is no VEC_splice. */
883 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
884 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
889 VEC_free (vn_reference_op_s, heap, mem);
893 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
894 structures into their value numbers. This is done in-place, and
895 the vector passed in is returned. */
897 static VEC (vn_reference_op_s, heap) *
898 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
900 vn_reference_op_t vro;
903 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
905 if (vro->opcode == SSA_NAME
906 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
908 vro->op0 = SSA_VAL (vro->op0);
909 /* If it transforms from an SSA_NAME to a constant, update
911 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
912 vro->opcode = TREE_CODE (vro->op0);
913 /* If it transforms from an SSA_NAME to an address, fold with
914 a preceding indirect reference. */
915 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
916 && VEC_index (vn_reference_op_s,
917 orig, i - 1)->opcode == INDIRECT_REF)
919 vn_reference_fold_indirect (&orig, &i);
923 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
924 vro->op1 = SSA_VAL (vro->op1);
925 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
926 vro->op2 = SSA_VAL (vro->op2);
932 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
934 /* Create a vector of vn_reference_op_s structures from REF, a
935 REFERENCE_CLASS_P tree. The vector is shared among all callers of
938 static VEC(vn_reference_op_s, heap) *
939 valueize_shared_reference_ops_from_ref (tree ref)
943 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
944 copy_reference_ops_from_ref (ref, &shared_lookup_references);
945 shared_lookup_references = valueize_refs (shared_lookup_references);
946 return shared_lookup_references;
949 /* Create a vector of vn_reference_op_s structures from CALL, a
950 call statement. The vector is shared among all callers of
953 static VEC(vn_reference_op_s, heap) *
954 valueize_shared_reference_ops_from_call (gimple call)
958 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
959 copy_reference_ops_from_call (call, &shared_lookup_references);
960 shared_lookup_references = valueize_refs (shared_lookup_references);
961 return shared_lookup_references;
964 /* Lookup a SCCVN reference operation VR in the current hash table.
965 Returns the resulting value number if it exists in the hash table,
966 NULL_TREE otherwise. VNRESULT will be filled in with the actual
967 vn_reference_t stored in the hashtable if something is found. */
970 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
976 slot = htab_find_slot_with_hash (current_info->references, vr,
978 if (!slot && current_info == optimistic_info)
979 slot = htab_find_slot_with_hash (valid_info->references, vr,
984 *vnresult = (vn_reference_t)*slot;
985 return ((vn_reference_t)*slot)->result;
991 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
992 with the current VUSE and performs the expression lookup. */
995 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
997 vn_reference_t vr = (vn_reference_t)vr_;
1001 /* Fixup vuse and hash. */
1002 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
1003 vr->vuse = SSA_VAL (vuse);
1004 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1006 hash = vr->hashcode;
1007 slot = htab_find_slot_with_hash (current_info->references, vr,
1009 if (!slot && current_info == optimistic_info)
1010 slot = htab_find_slot_with_hash (valid_info->references, vr,
1018 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1019 from the statement defining VUSE and if not successful tries to
1020 translate *REFP and VR_ through an aggregate copy at the defintion
1024 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1026 vn_reference_t vr = (vn_reference_t)vr_;
1027 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1030 HOST_WIDE_INT offset, size, maxsize;
1032 base = ao_ref_base (ref);
1033 offset = ref->offset;
1035 maxsize = ref->max_size;
1037 /* If we cannot constrain the size of the reference we cannot
1038 test if anything kills it. */
1042 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1043 from that defintion.
1045 if (is_gimple_reg_type (vr->type)
1046 && is_gimple_call (def_stmt)
1047 && (fndecl = gimple_call_fndecl (def_stmt))
1048 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1049 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1050 && integer_zerop (gimple_call_arg (def_stmt, 1))
1051 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1052 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1054 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1056 HOST_WIDE_INT offset2, size2, maxsize2;
1057 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1058 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1059 if ((unsigned HOST_WIDE_INT)size2 / 8
1060 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1061 && operand_equal_p (base, base2, 0)
1062 && offset2 <= offset
1063 && offset2 + size2 >= offset + maxsize)
1065 tree val = fold_convert (vr->type, integer_zero_node);
1066 unsigned int value_id = get_or_alloc_constant_value_id (val);
1067 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1068 VEC_copy (vn_reference_op_s,
1069 heap, vr->operands),
1074 /* 2) Assignment from an empty CONSTRUCTOR. */
1075 else if (is_gimple_reg_type (vr->type)
1076 && gimple_assign_single_p (def_stmt)
1077 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1078 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1081 HOST_WIDE_INT offset2, size2, maxsize2;
1082 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1083 &offset2, &size2, &maxsize2);
1084 if (operand_equal_p (base, base2, 0)
1085 && offset2 <= offset
1086 && offset2 + size2 >= offset + maxsize)
1088 tree val = fold_convert (vr->type, integer_zero_node);
1089 unsigned int value_id = get_or_alloc_constant_value_id (val);
1090 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1091 VEC_copy (vn_reference_op_s,
1092 heap, vr->operands),
1097 /* For aggregate copies translate the reference through them if
1098 the copy kills ref. */
1099 else if (gimple_assign_single_p (def_stmt)
1100 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1101 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1102 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1105 HOST_WIDE_INT offset2, size2, maxsize2;
1107 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1108 vn_reference_op_t vro;
1111 /* See if the assignment kills REF. */
1112 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1113 &offset2, &size2, &maxsize2);
1114 if (!operand_equal_p (base, base2, 0)
1116 || offset2 + size2 < offset + maxsize)
1119 /* Find the common base of ref and the lhs. */
1120 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1121 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1122 j = VEC_length (vn_reference_op_s, lhs) - 1;
1123 while (j >= 0 && i >= 0
1124 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1126 VEC_index (vn_reference_op_s, lhs, j)))
1132 VEC_free (vn_reference_op_s, heap, lhs);
1133 /* i now points to the first additional op.
1134 ??? LHS may not be completely contained in VR, one or more
1135 VIEW_CONVERT_EXPRs could be in its way. We could at least
1136 try handling outermost VIEW_CONVERT_EXPRs. */
1140 /* Now re-write REF to be based on the rhs of the assignment. */
1141 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1142 /* We need to pre-pend vr->operands[0..i] to rhs. */
1143 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1144 > VEC_length (vn_reference_op_s, vr->operands))
1146 VEC (vn_reference_op_s, heap) *old = vr->operands;
1147 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1148 i + 1 + VEC_length (vn_reference_op_s, rhs));
1149 if (old == shared_lookup_references
1150 && vr->operands != old)
1151 shared_lookup_references = NULL;
1154 VEC_truncate (vn_reference_op_s, vr->operands,
1155 i + 1 + VEC_length (vn_reference_op_s, rhs));
1156 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1157 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1158 VEC_free (vn_reference_op_s, heap, rhs);
1159 vr->hashcode = vn_reference_compute_hash (vr);
1161 /* Adjust *ref from the new operands. */
1162 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1164 gcc_assert (ref->size == r.size);
1167 /* Keep looking for the adjusted *REF / VR pair. */
1171 /* Bail out and stop walking. */
1175 /* Lookup a reference operation by it's parts, in the current hash table.
1176 Returns the resulting value number if it exists in the hash table,
1177 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1178 vn_reference_t stored in the hashtable if something is found. */
1181 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1182 VEC (vn_reference_op_s, heap) *operands,
1183 vn_reference_t *vnresult, bool maywalk)
1185 struct vn_reference_s vr1;
1192 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1193 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1194 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1195 VEC_length (vn_reference_op_s, operands));
1196 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1197 VEC_address (vn_reference_op_s, operands),
1198 sizeof (vn_reference_op_s)
1199 * VEC_length (vn_reference_op_s, operands));
1200 vr1.operands = operands = shared_lookup_references
1201 = valueize_refs (shared_lookup_references);
1204 vr1.hashcode = vn_reference_compute_hash (&vr1);
1205 vn_reference_lookup_1 (&vr1, vnresult);
1212 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1214 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1215 vn_reference_lookup_2,
1216 vn_reference_lookup_3, &vr1);
1217 if (vr1.operands != operands)
1218 VEC_free (vn_reference_op_s, heap, vr1.operands);
1222 return (*vnresult)->result;
1227 /* Lookup OP in the current hash table, and return the resulting value
1228 number if it exists in the hash table. Return NULL_TREE if it does
1229 not exist in the hash table or if the result field of the structure
1230 was NULL.. VNRESULT will be filled in with the vn_reference_t
1231 stored in the hashtable if one exists. */
1234 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1235 vn_reference_t *vnresult)
1237 VEC (vn_reference_op_s, heap) *operands;
1238 struct vn_reference_s vr1;
1243 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1244 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1245 vr1.type = TREE_TYPE (op);
1246 vr1.set = get_alias_set (op);
1247 vr1.hashcode = vn_reference_compute_hash (&vr1);
1252 vn_reference_t wvnresult;
1254 ao_ref_init (&r, op);
1256 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1257 vn_reference_lookup_2,
1258 vn_reference_lookup_3, &vr1);
1259 if (vr1.operands != operands)
1260 VEC_free (vn_reference_op_s, heap, vr1.operands);
1264 *vnresult = wvnresult;
1265 return wvnresult->result;
1271 return vn_reference_lookup_1 (&vr1, vnresult);
1275 /* Insert OP into the current hash table with a value number of
1276 RESULT, and return the resulting reference structure we created. */
1279 vn_reference_insert (tree op, tree result, tree vuse)
1284 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1285 if (TREE_CODE (result) == SSA_NAME)
1286 vr1->value_id = VN_INFO (result)->value_id;
1288 vr1->value_id = get_or_alloc_constant_value_id (result);
1289 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1290 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1291 vr1->type = TREE_TYPE (op);
1292 vr1->set = get_alias_set (op);
1293 vr1->hashcode = vn_reference_compute_hash (vr1);
1294 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1296 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1299 /* Because we lookup stores using vuses, and value number failures
1300 using the vdefs (see visit_reference_op_store for how and why),
1301 it's possible that on failure we may try to insert an already
1302 inserted store. This is not wrong, there is no ssa name for a
1303 store that we could use as a differentiator anyway. Thus, unlike
1304 the other lookup functions, you cannot gcc_assert (!*slot)
1307 /* But free the old slot in case of a collision. */
1309 free_reference (*slot);
1315 /* Insert a reference by it's pieces into the current hash table with
1316 a value number of RESULT. Return the resulting reference
1317 structure we created. */
1320 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1321 VEC (vn_reference_op_s, heap) *operands,
1322 tree result, unsigned int value_id)
1328 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1329 vr1->value_id = value_id;
1330 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1331 vr1->operands = valueize_refs (operands);
1334 vr1->hashcode = vn_reference_compute_hash (vr1);
1335 if (result && TREE_CODE (result) == SSA_NAME)
1336 result = SSA_VAL (result);
1337 vr1->result = result;
1339 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1342 /* At this point we should have all the things inserted that we have
1343 seen before, and we should never try inserting something that
1345 gcc_assert (!*slot);
1347 free_reference (*slot);
1353 /* Compute and return the hash value for nary operation VBO1. */
1356 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1361 for (i = 0; i < vno1->length; ++i)
1362 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1363 vno1->op[i] = SSA_VAL (vno1->op[i]);
1365 if (vno1->length == 2
1366 && commutative_tree_code (vno1->opcode)
1367 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1369 tree temp = vno1->op[0];
1370 vno1->op[0] = vno1->op[1];
1374 for (i = 0; i < vno1->length; ++i)
1375 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1380 /* Return the computed hashcode for nary operation P1. */
1383 vn_nary_op_hash (const void *p1)
1385 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1386 return vno1->hashcode;
1389 /* Compare nary operations P1 and P2 and return true if they are
1393 vn_nary_op_eq (const void *p1, const void *p2)
1395 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1396 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1399 if (vno1->hashcode != vno2->hashcode)
1402 if (vno1->opcode != vno2->opcode
1403 || !types_compatible_p (vno1->type, vno2->type))
1406 for (i = 0; i < vno1->length; ++i)
1407 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1413 /* Lookup a n-ary operation by its pieces and return the resulting value
1414 number if it exists in the hash table. Return NULL_TREE if it does
1415 not exist in the hash table or if the result field of the operation
1416 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1420 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1421 tree type, tree op0, tree op1, tree op2,
1422 tree op3, vn_nary_op_t *vnresult)
1425 struct vn_nary_op_s vno1;
1429 vno1.length = length;
1435 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1436 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1438 if (!slot && current_info == optimistic_info)
1439 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1444 *vnresult = (vn_nary_op_t)*slot;
1445 return ((vn_nary_op_t)*slot)->result;
1448 /* Lookup OP in the current hash table, and return the resulting value
1449 number if it exists in the hash table. Return NULL_TREE if it does
1450 not exist in the hash table or if the result field of the operation
1451 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1455 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1458 struct vn_nary_op_s vno1;
1463 vno1.opcode = TREE_CODE (op);
1464 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1465 vno1.type = TREE_TYPE (op);
1466 for (i = 0; i < vno1.length; ++i)
1467 vno1.op[i] = TREE_OPERAND (op, i);
1468 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1469 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1471 if (!slot && current_info == optimistic_info)
1472 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1477 *vnresult = (vn_nary_op_t)*slot;
1478 return ((vn_nary_op_t)*slot)->result;
1481 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1482 value number if it exists in the hash table. Return NULL_TREE if
1483 it does not exist in the hash table. VNRESULT will contain the
1484 vn_nary_op_t from the hashtable if it exists. */
1487 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1490 struct vn_nary_op_s vno1;
1495 vno1.opcode = gimple_assign_rhs_code (stmt);
1496 vno1.length = gimple_num_ops (stmt) - 1;
1497 vno1.type = gimple_expr_type (stmt);
1498 for (i = 0; i < vno1.length; ++i)
1499 vno1.op[i] = gimple_op (stmt, i + 1);
1500 if (vno1.opcode == REALPART_EXPR
1501 || vno1.opcode == IMAGPART_EXPR
1502 || vno1.opcode == VIEW_CONVERT_EXPR)
1503 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1504 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1505 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1507 if (!slot && current_info == optimistic_info)
1508 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1513 *vnresult = (vn_nary_op_t)*slot;
1514 return ((vn_nary_op_t)*slot)->result;
1517 /* Insert a n-ary operation into the current hash table using it's
1518 pieces. Return the vn_nary_op_t structure we created and put in
1522 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1523 tree type, tree op0,
1524 tree op1, tree op2, tree op3,
1526 unsigned int value_id)
1531 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1532 (sizeof (struct vn_nary_op_s)
1533 - sizeof (tree) * (4 - length)));
1534 vno1->value_id = value_id;
1535 vno1->opcode = code;
1536 vno1->length = length;
1546 vno1->result = result;
1547 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1548 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1550 gcc_assert (!*slot);
1557 /* Insert OP into the current hash table with a value number of
1558 RESULT. Return the vn_nary_op_t structure we created and put in
1562 vn_nary_op_insert (tree op, tree result)
1564 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1569 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1570 (sizeof (struct vn_nary_op_s)
1571 - sizeof (tree) * (4 - length)));
1572 vno1->value_id = VN_INFO (result)->value_id;
1573 vno1->opcode = TREE_CODE (op);
1574 vno1->length = length;
1575 vno1->type = TREE_TYPE (op);
1576 for (i = 0; i < vno1->length; ++i)
1577 vno1->op[i] = TREE_OPERAND (op, i);
1578 vno1->result = result;
1579 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1580 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1582 gcc_assert (!*slot);
1588 /* Insert the rhs of STMT into the current hash table with a value number of
1592 vn_nary_op_insert_stmt (gimple stmt, tree result)
1594 unsigned length = gimple_num_ops (stmt) - 1;
1599 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1600 (sizeof (struct vn_nary_op_s)
1601 - sizeof (tree) * (4 - length)));
1602 vno1->value_id = VN_INFO (result)->value_id;
1603 vno1->opcode = gimple_assign_rhs_code (stmt);
1604 vno1->length = length;
1605 vno1->type = gimple_expr_type (stmt);
1606 for (i = 0; i < vno1->length; ++i)
1607 vno1->op[i] = gimple_op (stmt, i + 1);
1608 if (vno1->opcode == REALPART_EXPR
1609 || vno1->opcode == IMAGPART_EXPR
1610 || vno1->opcode == VIEW_CONVERT_EXPR)
1611 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1612 vno1->result = result;
1613 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1614 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1616 gcc_assert (!*slot);
1622 /* Compute a hashcode for PHI operation VP1 and return it. */
1624 static inline hashval_t
1625 vn_phi_compute_hash (vn_phi_t vp1)
1627 hashval_t result = 0;
1632 result = vp1->block->index;
1634 /* If all PHI arguments are constants we need to distinguish
1635 the PHI node via its type. */
1636 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1637 result += (INTEGRAL_TYPE_P (type)
1638 + (INTEGRAL_TYPE_P (type)
1639 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1641 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1643 if (phi1op == VN_TOP)
1645 result += iterative_hash_expr (phi1op, result);
1651 /* Return the computed hashcode for phi operation P1. */
1654 vn_phi_hash (const void *p1)
1656 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1657 return vp1->hashcode;
1660 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1663 vn_phi_eq (const void *p1, const void *p2)
1665 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1666 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1668 if (vp1->hashcode != vp2->hashcode)
1671 if (vp1->block == vp2->block)
1676 /* If the PHI nodes do not have compatible types
1677 they are not the same. */
1678 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1679 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1682 /* Any phi in the same block will have it's arguments in the
1683 same edge order, because of how we store phi nodes. */
1684 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1686 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1687 if (phi1op == VN_TOP || phi2op == VN_TOP)
1689 if (!expressions_equal_p (phi1op, phi2op))
1697 static VEC(tree, heap) *shared_lookup_phiargs;
1699 /* Lookup PHI in the current hash table, and return the resulting
1700 value number if it exists in the hash table. Return NULL_TREE if
1701 it does not exist in the hash table. */
1704 vn_phi_lookup (gimple phi)
1707 struct vn_phi_s vp1;
1710 VEC_truncate (tree, shared_lookup_phiargs, 0);
1712 /* Canonicalize the SSA_NAME's to their value number. */
1713 for (i = 0; i < gimple_phi_num_args (phi); i++)
1715 tree def = PHI_ARG_DEF (phi, i);
1716 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1717 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1719 vp1.phiargs = shared_lookup_phiargs;
1720 vp1.block = gimple_bb (phi);
1721 vp1.hashcode = vn_phi_compute_hash (&vp1);
1722 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1724 if (!slot && current_info == optimistic_info)
1725 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1729 return ((vn_phi_t)*slot)->result;
1732 /* Insert PHI into the current hash table with a value number of
1736 vn_phi_insert (gimple phi, tree result)
1739 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1741 VEC (tree, heap) *args = NULL;
1743 /* Canonicalize the SSA_NAME's to their value number. */
1744 for (i = 0; i < gimple_phi_num_args (phi); i++)
1746 tree def = PHI_ARG_DEF (phi, i);
1747 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1748 VEC_safe_push (tree, heap, args, def);
1750 vp1->value_id = VN_INFO (result)->value_id;
1751 vp1->phiargs = args;
1752 vp1->block = gimple_bb (phi);
1753 vp1->result = result;
1754 vp1->hashcode = vn_phi_compute_hash (vp1);
1756 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1759 /* Because we iterate over phi operations more than once, it's
1760 possible the slot might already exist here, hence no assert.*/
1766 /* Print set of components in strongly connected component SCC to OUT. */
1769 print_scc (FILE *out, VEC (tree, heap) *scc)
1774 fprintf (out, "SCC consists of: ");
1775 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1777 print_generic_expr (out, var, 0);
1780 fprintf (out, "\n");
1783 /* Set the value number of FROM to TO, return true if it has changed
1787 set_ssa_val_to (tree from, tree to)
1792 && TREE_CODE (to) == SSA_NAME
1793 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1796 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1797 and invariants. So assert that here. */
1798 gcc_assert (to != NULL_TREE
1800 || TREE_CODE (to) == SSA_NAME
1801 || is_gimple_min_invariant (to)));
1803 if (dump_file && (dump_flags & TDF_DETAILS))
1805 fprintf (dump_file, "Setting value number of ");
1806 print_generic_expr (dump_file, from, 0);
1807 fprintf (dump_file, " to ");
1808 print_generic_expr (dump_file, to, 0);
1811 currval = SSA_VAL (from);
1813 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1815 VN_INFO (from)->valnum = to;
1816 if (dump_file && (dump_flags & TDF_DETAILS))
1817 fprintf (dump_file, " (changed)\n");
1820 if (dump_file && (dump_flags & TDF_DETAILS))
1821 fprintf (dump_file, "\n");
1825 /* Set all definitions in STMT to value number to themselves.
1826 Return true if a value number changed. */
1829 defs_to_varying (gimple stmt)
1831 bool changed = false;
1835 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1837 tree def = DEF_FROM_PTR (defp);
1839 VN_INFO (def)->use_processed = true;
1840 changed |= set_ssa_val_to (def, def);
1845 static bool expr_has_constants (tree expr);
1846 static tree valueize_expr (tree expr);
1848 /* Visit a copy between LHS and RHS, return true if the value number
1852 visit_copy (tree lhs, tree rhs)
1854 /* Follow chains of copies to their destination. */
1855 while (TREE_CODE (rhs) == SSA_NAME
1856 && SSA_VAL (rhs) != rhs)
1857 rhs = SSA_VAL (rhs);
1859 /* The copy may have a more interesting constant filled expression
1860 (we don't, since we know our RHS is just an SSA name). */
1861 if (TREE_CODE (rhs) == SSA_NAME)
1863 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1864 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1867 return set_ssa_val_to (lhs, rhs);
1870 /* Visit a unary operator RHS, value number it, and return true if the
1871 value number of LHS has changed as a result. */
1874 visit_unary_op (tree lhs, gimple stmt)
1876 bool changed = false;
1877 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1881 changed = set_ssa_val_to (lhs, result);
1885 changed = set_ssa_val_to (lhs, lhs);
1886 vn_nary_op_insert_stmt (stmt, lhs);
1892 /* Visit a binary operator RHS, value number it, and return true if the
1893 value number of LHS has changed as a result. */
1896 visit_binary_op (tree lhs, gimple stmt)
1898 bool changed = false;
1899 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1903 changed = set_ssa_val_to (lhs, result);
1907 changed = set_ssa_val_to (lhs, lhs);
1908 vn_nary_op_insert_stmt (stmt, lhs);
1914 /* Visit a call STMT storing into LHS. Return true if the value number
1915 of the LHS has changed as a result. */
1918 visit_reference_op_call (tree lhs, gimple stmt)
1920 bool changed = false;
1921 struct vn_reference_s vr1;
1923 tree vuse = gimple_vuse (stmt);
1925 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1926 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1927 vr1.type = gimple_expr_type (stmt);
1929 vr1.hashcode = vn_reference_compute_hash (&vr1);
1930 result = vn_reference_lookup_1 (&vr1, NULL);
1933 changed = set_ssa_val_to (lhs, result);
1934 if (TREE_CODE (result) == SSA_NAME
1935 && VN_INFO (result)->has_constants)
1936 VN_INFO (lhs)->has_constants = true;
1942 changed = set_ssa_val_to (lhs, lhs);
1943 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1944 vr2->vuse = vr1.vuse;
1945 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1946 vr2->type = vr1.type;
1948 vr2->hashcode = vr1.hashcode;
1950 slot = htab_find_slot_with_hash (current_info->references,
1951 vr2, vr2->hashcode, INSERT);
1953 free_reference (*slot);
1960 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1961 and return true if the value number of the LHS has changed as a result. */
1964 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1966 bool changed = false;
1967 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1969 /* If we have a VCE, try looking up its operand as it might be stored in
1970 a different type. */
1971 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1972 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1975 /* We handle type-punning through unions by value-numbering based
1976 on offset and size of the access. Be prepared to handle a
1977 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1979 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1981 /* We will be setting the value number of lhs to the value number
1982 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1983 So first simplify and lookup this expression to see if it
1984 is already available. */
1985 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1986 if ((CONVERT_EXPR_P (val)
1987 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1988 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1990 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1991 if ((CONVERT_EXPR_P (tem)
1992 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1993 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1994 TREE_TYPE (val), tem)))
1998 if (!is_gimple_min_invariant (val)
1999 && TREE_CODE (val) != SSA_NAME)
2000 result = vn_nary_op_lookup (val, NULL);
2001 /* If the expression is not yet available, value-number lhs to
2002 a new SSA_NAME we create. */
2003 if (!result && may_insert)
2005 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2006 /* Initialize value-number information properly. */
2007 VN_INFO_GET (result)->valnum = result;
2008 VN_INFO (result)->value_id = get_next_value_id ();
2009 VN_INFO (result)->expr = val;
2010 VN_INFO (result)->has_constants = expr_has_constants (val);
2011 VN_INFO (result)->needs_insertion = true;
2012 /* As all "inserted" statements are singleton SCCs, insert
2013 to the valid table. This is strictly needed to
2014 avoid re-generating new value SSA_NAMEs for the same
2015 expression during SCC iteration over and over (the
2016 optimistic table gets cleared after each iteration).
2017 We do not need to insert into the optimistic table, as
2018 lookups there will fall back to the valid table. */
2019 if (current_info == optimistic_info)
2021 current_info = valid_info;
2022 vn_nary_op_insert (val, result);
2023 current_info = optimistic_info;
2026 vn_nary_op_insert (val, result);
2027 if (dump_file && (dump_flags & TDF_DETAILS))
2029 fprintf (dump_file, "Inserting name ");
2030 print_generic_expr (dump_file, result, 0);
2031 fprintf (dump_file, " for expression ");
2032 print_generic_expr (dump_file, val, 0);
2033 fprintf (dump_file, "\n");
2040 changed = set_ssa_val_to (lhs, result);
2041 if (TREE_CODE (result) == SSA_NAME
2042 && VN_INFO (result)->has_constants)
2044 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2045 VN_INFO (lhs)->has_constants = true;
2050 changed = set_ssa_val_to (lhs, lhs);
2051 vn_reference_insert (op, lhs, gimple_vuse (stmt));
2058 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2059 and return true if the value number of the LHS has changed as a result. */
2062 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2064 bool changed = false;
2066 bool resultsame = false;
2068 /* First we want to lookup using the *vuses* from the store and see
2069 if there the last store to this location with the same address
2072 The vuses represent the memory state before the store. If the
2073 memory state, address, and value of the store is the same as the
2074 last store to this location, then this store will produce the
2075 same memory state as that store.
2077 In this case the vdef versions for this store are value numbered to those
2078 vuse versions, since they represent the same memory state after
2081 Otherwise, the vdefs for the store are used when inserting into
2082 the table, since the store generates a new memory state. */
2084 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2088 if (TREE_CODE (result) == SSA_NAME)
2089 result = SSA_VAL (result);
2090 if (TREE_CODE (op) == SSA_NAME)
2092 resultsame = expressions_equal_p (result, op);
2095 if (!result || !resultsame)
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2101 fprintf (dump_file, "No store match\n");
2102 fprintf (dump_file, "Value numbering store ");
2103 print_generic_expr (dump_file, lhs, 0);
2104 fprintf (dump_file, " to ");
2105 print_generic_expr (dump_file, op, 0);
2106 fprintf (dump_file, "\n");
2108 /* Have to set value numbers before insert, since insert is
2109 going to valueize the references in-place. */
2110 if ((vdef = gimple_vdef (stmt)))
2112 VN_INFO (vdef)->use_processed = true;
2113 changed |= set_ssa_val_to (vdef, vdef);
2116 /* Do not insert structure copies into the tables. */
2117 if (is_gimple_min_invariant (op)
2118 || is_gimple_reg (op))
2119 vn_reference_insert (lhs, op, vdef);
2123 /* We had a match, so value number the vdef to have the value
2124 number of the vuse it came from. */
2127 if (dump_file && (dump_flags & TDF_DETAILS))
2128 fprintf (dump_file, "Store matched earlier value,"
2129 "value numbering store vdefs to matching vuses.\n");
2131 def = gimple_vdef (stmt);
2132 use = gimple_vuse (stmt);
2134 VN_INFO (def)->use_processed = true;
2135 changed |= set_ssa_val_to (def, SSA_VAL (use));
2141 /* Visit and value number PHI, return true if the value number
2145 visit_phi (gimple phi)
2147 bool changed = false;
2149 tree sameval = VN_TOP;
2150 bool allsame = true;
2153 /* TODO: We could check for this in init_sccvn, and replace this
2154 with a gcc_assert. */
2155 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2156 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2158 /* See if all non-TOP arguments have the same value. TOP is
2159 equivalent to everything, so we can ignore it. */
2160 for (i = 0; i < gimple_phi_num_args (phi); i++)
2162 tree def = PHI_ARG_DEF (phi, i);
2164 if (TREE_CODE (def) == SSA_NAME)
2165 def = SSA_VAL (def);
2168 if (sameval == VN_TOP)
2174 if (!expressions_equal_p (def, sameval))
2182 /* If all value numbered to the same value, the phi node has that
2186 if (is_gimple_min_invariant (sameval))
2188 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2189 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2193 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2194 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2197 if (TREE_CODE (sameval) == SSA_NAME)
2198 return visit_copy (PHI_RESULT (phi), sameval);
2200 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2203 /* Otherwise, see if it is equivalent to a phi node in this block. */
2204 result = vn_phi_lookup (phi);
2207 if (TREE_CODE (result) == SSA_NAME)
2208 changed = visit_copy (PHI_RESULT (phi), result);
2210 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2214 vn_phi_insert (phi, PHI_RESULT (phi));
2215 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2216 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2217 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2223 /* Return true if EXPR contains constants. */
2226 expr_has_constants (tree expr)
2228 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2231 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2234 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2235 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2236 /* Constants inside reference ops are rarely interesting, but
2237 it can take a lot of looking to find them. */
2239 case tcc_declaration:
2242 return is_gimple_min_invariant (expr);
2247 /* Return true if STMT contains constants. */
2250 stmt_has_constants (gimple stmt)
2252 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2255 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2257 case GIMPLE_UNARY_RHS:
2258 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2260 case GIMPLE_BINARY_RHS:
2261 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2262 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2263 case GIMPLE_SINGLE_RHS:
2264 /* Constants inside reference ops are rarely interesting, but
2265 it can take a lot of looking to find them. */
2266 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2273 /* Replace SSA_NAMES in expr with their value numbers, and return the
2275 This is performed in place. */
2278 valueize_expr (tree expr)
2280 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2283 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2284 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2285 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2288 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2289 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2290 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2291 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2292 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2293 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2301 /* Simplify the binary expression RHS, and return the result if
2305 simplify_binary_expression (gimple stmt)
2307 tree result = NULL_TREE;
2308 tree op0 = gimple_assign_rhs1 (stmt);
2309 tree op1 = gimple_assign_rhs2 (stmt);
2311 /* This will not catch every single case we could combine, but will
2312 catch those with constants. The goal here is to simultaneously
2313 combine constants between expressions, but avoid infinite
2314 expansion of expressions during simplification. */
2315 if (TREE_CODE (op0) == SSA_NAME)
2317 if (VN_INFO (op0)->has_constants
2318 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2319 op0 = valueize_expr (vn_get_expr_for (op0));
2320 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2321 op0 = SSA_VAL (op0);
2324 if (TREE_CODE (op1) == SSA_NAME)
2326 if (VN_INFO (op1)->has_constants)
2327 op1 = valueize_expr (vn_get_expr_for (op1));
2328 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2329 op1 = SSA_VAL (op1);
2332 /* Avoid folding if nothing changed. */
2333 if (op0 == gimple_assign_rhs1 (stmt)
2334 && op1 == gimple_assign_rhs2 (stmt))
2337 fold_defer_overflow_warnings ();
2339 result = fold_binary (gimple_assign_rhs_code (stmt),
2340 gimple_expr_type (stmt), op0, op1);
2342 STRIP_USELESS_TYPE_CONVERSION (result);
2344 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2347 /* Make sure result is not a complex expression consisting
2348 of operators of operators (IE (a + b) + (a + c))
2349 Otherwise, we will end up with unbounded expressions if
2350 fold does anything at all. */
2351 if (result && valid_gimple_rhs_p (result))
2357 /* Simplify the unary expression RHS, and return the result if
2361 simplify_unary_expression (gimple stmt)
2363 tree result = NULL_TREE;
2364 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2366 /* We handle some tcc_reference codes here that are all
2367 GIMPLE_ASSIGN_SINGLE codes. */
2368 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2369 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2370 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2371 op0 = TREE_OPERAND (op0, 0);
2373 if (TREE_CODE (op0) != SSA_NAME)
2377 if (VN_INFO (op0)->has_constants)
2378 op0 = valueize_expr (vn_get_expr_for (op0));
2379 else if (gimple_assign_cast_p (stmt)
2380 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2381 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2382 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2384 /* We want to do tree-combining on conversion-like expressions.
2385 Make sure we feed only SSA_NAMEs or constants to fold though. */
2386 tree tem = valueize_expr (vn_get_expr_for (op0));
2387 if (UNARY_CLASS_P (tem)
2388 || BINARY_CLASS_P (tem)
2389 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2390 || TREE_CODE (tem) == SSA_NAME
2391 || is_gimple_min_invariant (tem))
2395 /* Avoid folding if nothing changed, but remember the expression. */
2396 if (op0 == orig_op0)
2399 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2400 gimple_expr_type (stmt), op0);
2403 STRIP_USELESS_TYPE_CONVERSION (result);
2404 if (valid_gimple_rhs_p (result))
2411 /* Try to simplify RHS using equivalences and constant folding. */
2414 try_to_simplify (gimple stmt)
2418 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2419 in this case, there is no point in doing extra work. */
2420 if (gimple_assign_copy_p (stmt)
2421 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2424 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2426 case tcc_declaration:
2427 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2433 /* Do not do full-blown reference lookup here, but simplify
2434 reads from constant aggregates. */
2435 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2439 /* Fallthrough for some codes that can operate on registers. */
2440 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2441 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2442 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2444 /* We could do a little more with unary ops, if they expand
2445 into binary ops, but it's debatable whether it is worth it. */
2447 return simplify_unary_expression (stmt);
2449 case tcc_comparison:
2451 return simplify_binary_expression (stmt);
2460 /* Visit and value number USE, return true if the value number
2464 visit_use (tree use)
2466 bool changed = false;
2467 gimple stmt = SSA_NAME_DEF_STMT (use);
2469 VN_INFO (use)->use_processed = true;
2471 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2472 if (dump_file && (dump_flags & TDF_DETAILS)
2473 && !SSA_NAME_IS_DEFAULT_DEF (use))
2475 fprintf (dump_file, "Value numbering ");
2476 print_generic_expr (dump_file, use, 0);
2477 fprintf (dump_file, " stmt = ");
2478 print_gimple_stmt (dump_file, stmt, 0, 0);
2481 /* Handle uninitialized uses. */
2482 if (SSA_NAME_IS_DEFAULT_DEF (use))
2483 changed = set_ssa_val_to (use, use);
2486 if (gimple_code (stmt) == GIMPLE_PHI)
2487 changed = visit_phi (stmt);
2488 else if (!gimple_has_lhs (stmt)
2489 || gimple_has_volatile_ops (stmt)
2490 || stmt_could_throw_p (stmt))
2491 changed = defs_to_varying (stmt);
2492 else if (is_gimple_assign (stmt))
2494 tree lhs = gimple_assign_lhs (stmt);
2497 /* Shortcut for copies. Simplifying copies is pointless,
2498 since we copy the expression and value they represent. */
2499 if (gimple_assign_copy_p (stmt)
2500 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2501 && TREE_CODE (lhs) == SSA_NAME)
2503 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2506 simplified = try_to_simplify (stmt);
2509 if (dump_file && (dump_flags & TDF_DETAILS))
2511 fprintf (dump_file, "RHS ");
2512 print_gimple_expr (dump_file, stmt, 0, 0);
2513 fprintf (dump_file, " simplified to ");
2514 print_generic_expr (dump_file, simplified, 0);
2515 if (TREE_CODE (lhs) == SSA_NAME)
2516 fprintf (dump_file, " has constants %d\n",
2517 expr_has_constants (simplified));
2519 fprintf (dump_file, "\n");
2522 /* Setting value numbers to constants will occasionally
2523 screw up phi congruence because constants are not
2524 uniquely associated with a single ssa name that can be
2527 && is_gimple_min_invariant (simplified)
2528 && TREE_CODE (lhs) == SSA_NAME)
2530 VN_INFO (lhs)->expr = simplified;
2531 VN_INFO (lhs)->has_constants = true;
2532 changed = set_ssa_val_to (lhs, simplified);
2536 && TREE_CODE (simplified) == SSA_NAME
2537 && TREE_CODE (lhs) == SSA_NAME)
2539 changed = visit_copy (lhs, simplified);
2542 else if (simplified)
2544 if (TREE_CODE (lhs) == SSA_NAME)
2546 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2547 /* We have to unshare the expression or else
2548 valuizing may change the IL stream. */
2549 VN_INFO (lhs)->expr = unshare_expr (simplified);
2552 else if (stmt_has_constants (stmt)
2553 && TREE_CODE (lhs) == SSA_NAME)
2554 VN_INFO (lhs)->has_constants = true;
2555 else if (TREE_CODE (lhs) == SSA_NAME)
2557 /* We reset expr and constantness here because we may
2558 have been value numbering optimistically, and
2559 iterating. They may become non-constant in this case,
2560 even if they were optimistically constant. */
2562 VN_INFO (lhs)->has_constants = false;
2563 VN_INFO (lhs)->expr = NULL_TREE;
2566 if ((TREE_CODE (lhs) == SSA_NAME
2567 /* We can substitute SSA_NAMEs that are live over
2568 abnormal edges with their constant value. */
2569 && !(gimple_assign_copy_p (stmt)
2570 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2572 && is_gimple_min_invariant (simplified))
2573 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2574 /* Stores or copies from SSA_NAMEs that are live over
2575 abnormal edges are a problem. */
2576 || (gimple_assign_single_p (stmt)
2577 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2578 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2579 changed = defs_to_varying (stmt);
2580 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2582 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2584 else if (TREE_CODE (lhs) == SSA_NAME)
2586 if ((gimple_assign_copy_p (stmt)
2587 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2589 && is_gimple_min_invariant (simplified)))
2591 VN_INFO (lhs)->has_constants = true;
2593 changed = set_ssa_val_to (lhs, simplified);
2595 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2599 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2601 case GIMPLE_UNARY_RHS:
2602 changed = visit_unary_op (lhs, stmt);
2604 case GIMPLE_BINARY_RHS:
2605 changed = visit_binary_op (lhs, stmt);
2607 case GIMPLE_SINGLE_RHS:
2608 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2611 /* VOP-less references can go through unary case. */
2612 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2613 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2614 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2615 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2617 changed = visit_unary_op (lhs, stmt);
2621 case tcc_declaration:
2622 changed = visit_reference_op_load
2623 (lhs, gimple_assign_rhs1 (stmt), stmt);
2625 case tcc_expression:
2626 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2628 changed = visit_unary_op (lhs, stmt);
2633 changed = defs_to_varying (stmt);
2637 changed = defs_to_varying (stmt);
2643 changed = defs_to_varying (stmt);
2645 else if (is_gimple_call (stmt))
2647 tree lhs = gimple_call_lhs (stmt);
2649 /* ??? We could try to simplify calls. */
2651 if (stmt_has_constants (stmt)
2652 && TREE_CODE (lhs) == SSA_NAME)
2653 VN_INFO (lhs)->has_constants = true;
2654 else if (TREE_CODE (lhs) == SSA_NAME)
2656 /* We reset expr and constantness here because we may
2657 have been value numbering optimistically, and
2658 iterating. They may become non-constant in this case,
2659 even if they were optimistically constant. */
2660 VN_INFO (lhs)->has_constants = false;
2661 VN_INFO (lhs)->expr = NULL_TREE;
2664 if (TREE_CODE (lhs) == SSA_NAME
2665 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2666 changed = defs_to_varying (stmt);
2667 /* ??? We should handle stores from calls. */
2668 else if (TREE_CODE (lhs) == SSA_NAME)
2670 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2671 changed = visit_reference_op_call (lhs, stmt);
2673 changed = defs_to_varying (stmt);
2676 changed = defs_to_varying (stmt);
2683 /* Compare two operands by reverse postorder index */
2686 compare_ops (const void *pa, const void *pb)
2688 const tree opa = *((const tree *)pa);
2689 const tree opb = *((const tree *)pb);
2690 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2691 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2695 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2696 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2697 else if (gimple_nop_p (opstmta))
2699 else if (gimple_nop_p (opstmtb))
2702 bba = gimple_bb (opstmta);
2703 bbb = gimple_bb (opstmtb);
2706 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2714 if (gimple_code (opstmta) == GIMPLE_PHI
2715 && gimple_code (opstmtb) == GIMPLE_PHI)
2716 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2717 else if (gimple_code (opstmta) == GIMPLE_PHI)
2719 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2721 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2722 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2724 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2726 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2729 /* Sort an array containing members of a strongly connected component
2730 SCC so that the members are ordered by RPO number.
2731 This means that when the sort is complete, iterating through the
2732 array will give you the members in RPO order. */
2735 sort_scc (VEC (tree, heap) *scc)
2737 qsort (VEC_address (tree, scc),
2738 VEC_length (tree, scc),
2743 /* Process a strongly connected component in the SSA graph. */
2746 process_scc (VEC (tree, heap) *scc)
2748 /* If the SCC has a single member, just visit it. */
2750 if (VEC_length (tree, scc) == 1)
2752 tree use = VEC_index (tree, scc, 0);
2753 if (!VN_INFO (use)->use_processed)
2760 unsigned int iterations = 0;
2761 bool changed = true;
2763 /* Iterate over the SCC with the optimistic table until it stops
2765 current_info = optimistic_info;
2770 /* As we are value-numbering optimistically we have to
2771 clear the expression tables and the simplified expressions
2772 in each iteration until we converge. */
2773 htab_empty (optimistic_info->nary);
2774 htab_empty (optimistic_info->phis);
2775 htab_empty (optimistic_info->references);
2776 obstack_free (&optimistic_info->nary_obstack, NULL);
2777 gcc_obstack_init (&optimistic_info->nary_obstack);
2778 empty_alloc_pool (optimistic_info->phis_pool);
2779 empty_alloc_pool (optimistic_info->references_pool);
2780 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2781 VN_INFO (var)->expr = NULL_TREE;
2782 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2783 changed |= visit_use (var);
2786 statistics_histogram_event (cfun, "SCC iterations", iterations);
2788 /* Finally, visit the SCC once using the valid table. */
2789 current_info = valid_info;
2790 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2795 DEF_VEC_O(ssa_op_iter);
2796 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2798 /* Pop the components of the found SCC for NAME off the SCC stack
2799 and process them. Returns true if all went well, false if
2800 we run into resource limits. */
2803 extract_and_process_scc_for_name (tree name)
2805 VEC (tree, heap) *scc = NULL;
2808 /* Found an SCC, pop the components off the SCC stack and
2812 x = VEC_pop (tree, sccstack);
2814 VN_INFO (x)->on_sccstack = false;
2815 VEC_safe_push (tree, heap, scc, x);
2816 } while (x != name);
2818 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2819 if (VEC_length (tree, scc)
2820 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2823 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2824 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2825 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2829 if (VEC_length (tree, scc) > 1)
2832 if (dump_file && (dump_flags & TDF_DETAILS))
2833 print_scc (dump_file, scc);
2837 VEC_free (tree, heap, scc);
2842 /* Depth first search on NAME to discover and process SCC's in the SSA
2844 Execution of this algorithm relies on the fact that the SCC's are
2845 popped off the stack in topological order.
2846 Returns true if successful, false if we stopped processing SCC's due
2847 to resource constraints. */
2852 VEC(ssa_op_iter, heap) *itervec = NULL;
2853 VEC(tree, heap) *namevec = NULL;
2854 use_operand_p usep = NULL;
2861 VN_INFO (name)->dfsnum = next_dfs_num++;
2862 VN_INFO (name)->visited = true;
2863 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2865 VEC_safe_push (tree, heap, sccstack, name);
2866 VN_INFO (name)->on_sccstack = true;
2867 defstmt = SSA_NAME_DEF_STMT (name);
2869 /* Recursively DFS on our operands, looking for SCC's. */
2870 if (!gimple_nop_p (defstmt))
2872 /* Push a new iterator. */
2873 if (gimple_code (defstmt) == GIMPLE_PHI)
2874 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2876 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2879 clear_and_done_ssa_iter (&iter);
2883 /* If we are done processing uses of a name, go up the stack
2884 of iterators and process SCCs as we found them. */
2885 if (op_iter_done (&iter))
2887 /* See if we found an SCC. */
2888 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2889 if (!extract_and_process_scc_for_name (name))
2891 VEC_free (tree, heap, namevec);
2892 VEC_free (ssa_op_iter, heap, itervec);
2896 /* Check if we are done. */
2897 if (VEC_empty (tree, namevec))
2899 VEC_free (tree, heap, namevec);
2900 VEC_free (ssa_op_iter, heap, itervec);
2904 /* Restore the last use walker and continue walking there. */
2906 name = VEC_pop (tree, namevec);
2907 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2908 sizeof (ssa_op_iter));
2909 VEC_pop (ssa_op_iter, itervec);
2910 goto continue_walking;
2913 use = USE_FROM_PTR (usep);
2915 /* Since we handle phi nodes, we will sometimes get
2916 invariants in the use expression. */
2917 if (TREE_CODE (use) == SSA_NAME)
2919 if (! (VN_INFO (use)->visited))
2921 /* Recurse by pushing the current use walking state on
2922 the stack and starting over. */
2923 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2924 VEC_safe_push(tree, heap, namevec, name);
2929 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2930 VN_INFO (use)->low);
2932 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2933 && VN_INFO (use)->on_sccstack)
2935 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2936 VN_INFO (name)->low);
2940 usep = op_iter_next_use (&iter);
2944 /* Allocate a value number table. */
2947 allocate_vn_table (vn_tables_t table)
2949 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2950 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2951 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2954 gcc_obstack_init (&table->nary_obstack);
2955 table->phis_pool = create_alloc_pool ("VN phis",
2956 sizeof (struct vn_phi_s),
2958 table->references_pool = create_alloc_pool ("VN references",
2959 sizeof (struct vn_reference_s),
2963 /* Free a value number table. */
2966 free_vn_table (vn_tables_t table)
2968 htab_delete (table->phis);
2969 htab_delete (table->nary);
2970 htab_delete (table->references);
2971 obstack_free (&table->nary_obstack, NULL);
2972 free_alloc_pool (table->phis_pool);
2973 free_alloc_pool (table->references_pool);
2981 int *rpo_numbers_temp;
2983 calculate_dominance_info (CDI_DOMINATORS);
2985 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2988 constant_value_ids = BITMAP_ALLOC (NULL);
2993 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2994 /* VEC_alloc doesn't actually grow it to the right size, it just
2995 preallocates the space to do so. */
2996 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2997 gcc_obstack_init (&vn_ssa_aux_obstack);
2999 shared_lookup_phiargs = NULL;
3000 shared_lookup_references = NULL;
3001 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3002 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3003 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3005 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3006 the i'th block in RPO order is bb. We want to map bb's to RPO
3007 numbers, so we need to rearrange this array. */
3008 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3009 rpo_numbers[rpo_numbers_temp[j]] = j;
3011 XDELETE (rpo_numbers_temp);
3013 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3015 /* Create the VN_INFO structures, and initialize value numbers to
3017 for (i = 0; i < num_ssa_names; i++)
3019 tree name = ssa_name (i);
3022 VN_INFO_GET (name)->valnum = VN_TOP;
3023 VN_INFO (name)->expr = NULL_TREE;
3024 VN_INFO (name)->value_id = 0;
3028 renumber_gimple_stmt_uids ();
3030 /* Create the valid and optimistic value numbering tables. */
3031 valid_info = XCNEW (struct vn_tables_s);
3032 allocate_vn_table (valid_info);
3033 optimistic_info = XCNEW (struct vn_tables_s);
3034 allocate_vn_table (optimistic_info);
3042 htab_delete (constant_to_value_id);
3043 BITMAP_FREE (constant_value_ids);
3044 VEC_free (tree, heap, shared_lookup_phiargs);
3045 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3046 XDELETEVEC (rpo_numbers);
3048 for (i = 0; i < num_ssa_names; i++)
3050 tree name = ssa_name (i);
3052 && VN_INFO (name)->needs_insertion)
3053 release_ssa_name (name);
3055 obstack_free (&vn_ssa_aux_obstack, NULL);
3056 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3058 VEC_free (tree, heap, sccstack);
3059 free_vn_table (valid_info);
3060 XDELETE (valid_info);
3061 free_vn_table (optimistic_info);
3062 XDELETE (optimistic_info);
3065 /* Set the value ids in the valid hash tables. */
3068 set_hashtable_value_ids (void)
3075 /* Now set the value ids of the things we had put in the hash
3078 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3079 vno, vn_nary_op_t, hi)
3083 if (TREE_CODE (vno->result) == SSA_NAME)
3084 vno->value_id = VN_INFO (vno->result)->value_id;
3085 else if (is_gimple_min_invariant (vno->result))
3086 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3090 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3095 if (TREE_CODE (vp->result) == SSA_NAME)
3096 vp->value_id = VN_INFO (vp->result)->value_id;
3097 else if (is_gimple_min_invariant (vp->result))
3098 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3102 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3103 vr, vn_reference_t, hi)
3107 if (TREE_CODE (vr->result) == SSA_NAME)
3108 vr->value_id = VN_INFO (vr->result)->value_id;
3109 else if (is_gimple_min_invariant (vr->result))
3110 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3115 /* Do SCCVN. Returns true if it finished, false if we bailed out
3116 due to resource constraints. */
3119 run_scc_vn (bool may_insert_arg)
3123 bool changed = true;
3125 may_insert = may_insert_arg;
3128 current_info = valid_info;
3130 for (param = DECL_ARGUMENTS (current_function_decl);
3132 param = TREE_CHAIN (param))
3134 if (gimple_default_def (cfun, param) != NULL)
3136 tree def = gimple_default_def (cfun, param);
3137 VN_INFO (def)->valnum = def;
3141 for (i = 1; i < num_ssa_names; ++i)
3143 tree name = ssa_name (i);
3145 && VN_INFO (name)->visited == false
3146 && !has_zero_uses (name))
3155 /* Initialize the value ids. */
3157 for (i = 1; i < num_ssa_names; ++i)
3159 tree name = ssa_name (i);
3163 info = VN_INFO (name);
3164 if (info->valnum == name
3165 || info->valnum == VN_TOP)
3166 info->value_id = get_next_value_id ();
3167 else if (is_gimple_min_invariant (info->valnum))
3168 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3171 /* Propagate until they stop changing. */
3175 for (i = 1; i < num_ssa_names; ++i)
3177 tree name = ssa_name (i);
3181 info = VN_INFO (name);
3182 if (TREE_CODE (info->valnum) == SSA_NAME
3183 && info->valnum != name
3184 && info->value_id != VN_INFO (info->valnum)->value_id)
3187 info->value_id = VN_INFO (info->valnum)->value_id;
3192 set_hashtable_value_ids ();
3194 if (dump_file && (dump_flags & TDF_DETAILS))
3196 fprintf (dump_file, "Value numbers:\n");
3197 for (i = 0; i < num_ssa_names; i++)
3199 tree name = ssa_name (i);
3201 && VN_INFO (name)->visited
3202 && SSA_VAL (name) != name)
3204 print_generic_expr (dump_file, name, 0);
3205 fprintf (dump_file, " = ");
3206 print_generic_expr (dump_file, SSA_VAL (name), 0);
3207 fprintf (dump_file, "\n");
3216 /* Return the maximum value id we have ever seen. */
3219 get_max_value_id (void)
3221 return next_value_id;
3224 /* Return the next unique value id. */
3227 get_next_value_id (void)
3229 return next_value_id++;
3233 /* Compare two expressions E1 and E2 and return true if they are equal. */
3236 expressions_equal_p (tree e1, tree e2)
3238 /* The obvious case. */
3242 /* If only one of them is null, they cannot be equal. */
3246 /* Recurse on elements of lists. */
3247 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3251 for (lop1 = e1, lop2 = e2;
3253 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3257 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3263 /* Now perform the actual comparison. */
3264 if (TREE_CODE (e1) == TREE_CODE (e2)
3265 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3272 /* Return true if the nary operation NARY may trap. This is a copy
3273 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3276 vn_nary_may_trap (vn_nary_op_t nary)
3280 bool honor_nans = false;
3281 bool honor_snans = false;
3282 bool fp_operation = false;
3283 bool honor_trapv = false;
3287 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3288 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3289 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3292 fp_operation = FLOAT_TYPE_P (type);
3295 honor_nans = flag_trapping_math && !flag_finite_math_only;
3296 honor_snans = flag_signaling_nans != 0;
3298 else if (INTEGRAL_TYPE_P (type)
3299 && TYPE_OVERFLOW_TRAPS (type))
3303 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3305 honor_nans, honor_snans, rhs2,
3311 for (i = 0; i < nary->length; ++i)
3312 if (tree_could_trap_p (nary->op[i]))