1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
284 if (expr == NULL_TREE)
287 /* Cache the expression. */
294 /* Free a phi operation structure VP. */
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
339 get_constant_value_id (tree constant)
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
349 return ((vn_constant_t)*slot)->value_id;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
357 get_or_alloc_constant_value_id (tree constant)
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
373 bitmap_set_bit (constant_value_ids, vc->value_id);
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1)
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
515 /* For non-calls, store the information that makes up the address. */
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
528 case ALIGN_INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
548 /* If this is a reference to a union member, record the union
549 member size as operand. Do so only if we are doing
550 expression insertion (during FRE), as PRE currently gets
551 confused with this. */
553 && temp.op1 == NULL_TREE
554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
557 && host_integerp (DECL_SIZE (temp.op0), 0))
558 temp.op0 = DECL_SIZE (temp.op0);
560 case ARRAY_RANGE_REF:
562 /* Record index as operand. */
563 temp.op0 = TREE_OPERAND (ref, 1);
564 /* Always record lower bounds and element size. */
565 temp.op1 = array_ref_low_bound (ref);
566 temp.op2 = array_ref_element_size (ref);
582 if (is_gimple_min_invariant (ref))
588 /* These are only interesting for their operands, their
589 existence, and their type. They will never be the last
590 ref in the chain of references (IE they require an
591 operand), so we don't have to put anything
592 for op* as it will be handled by the iteration */
595 case VIEW_CONVERT_EXPR:
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 if (REFERENCE_CLASS_P (ref)
603 || (TREE_CODE (ref) == ADDR_EXPR
604 && !is_gimple_min_invariant (ref)))
605 ref = TREE_OPERAND (ref, 0);
611 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
612 operands in *OPS, the reference alias set SET and the reference type TYPE.
613 Return true if something useful was produced. */
616 ao_ref_init_from_vn_reference (ao_ref *ref,
617 alias_set_type set, tree type,
618 VEC (vn_reference_op_s, heap) *ops)
620 vn_reference_op_t op;
622 tree base = NULL_TREE;
624 HOST_WIDE_INT offset = 0;
625 HOST_WIDE_INT max_size;
626 HOST_WIDE_INT size = -1;
627 tree size_tree = NULL_TREE;
629 /* First get the final access size from just the outermost expression. */
630 op = VEC_index (vn_reference_op_s, ops, 0);
631 if (op->opcode == COMPONENT_REF)
633 if (TREE_CODE (op->op0) == INTEGER_CST)
636 size_tree = DECL_SIZE (op->op0);
638 else if (op->opcode == BIT_FIELD_REF)
642 enum machine_mode mode = TYPE_MODE (type);
644 size_tree = TYPE_SIZE (type);
646 size = GET_MODE_BITSIZE (mode);
648 if (size_tree != NULL_TREE)
650 if (!host_integerp (size_tree, 1))
653 size = TREE_INT_CST_LOW (size_tree);
656 /* Initially, maxsize is the same as the accessed element size.
657 In the following it will only grow (or become -1). */
660 /* Compute cumulative bit-offset for nested component-refs and array-refs,
661 and find the ultimate containing object. */
662 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
666 /* These may be in the reference ops, but we cannot do anything
667 sensible with them here. */
672 /* Record the base objects. */
673 case ALIGN_INDIRECT_REF:
675 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
676 op0_p = &TREE_OPERAND (*op0_p, 0);
679 case MISALIGNED_INDIRECT_REF:
680 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
682 op0_p = &TREE_OPERAND (*op0_p, 0);
692 /* And now the usual component-reference style ops. */
694 offset += tree_low_cst (op->op1, 0);
699 tree field = op->op0;
700 /* We do not have a complete COMPONENT_REF tree here so we
701 cannot use component_ref_field_offset. Do the interesting
704 /* Our union trick, done for offset zero only. */
705 if (TREE_CODE (field) == INTEGER_CST)
708 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
712 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
714 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
719 case ARRAY_RANGE_REF:
721 /* We recorded the lower bound and the element size. */
722 if (!host_integerp (op->op0, 0)
723 || !host_integerp (op->op1, 0)
724 || !host_integerp (op->op2, 0))
728 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
729 hindex -= TREE_INT_CST_LOW (op->op1);
730 hindex *= TREE_INT_CST_LOW (op->op2);
731 hindex *= BITS_PER_UNIT;
743 case VIEW_CONVERT_EXPR:
760 if (base == NULL_TREE)
763 ref->ref = NULL_TREE;
765 ref->offset = offset;
767 ref->max_size = max_size;
768 ref->ref_alias_set = set;
769 ref->base_alias_set = -1;
774 /* Copy the operations present in load/store/call REF into RESULT, a vector of
775 vn_reference_op_s's. */
778 copy_reference_ops_from_call (gimple call,
779 VEC(vn_reference_op_s, heap) **result)
781 vn_reference_op_s temp;
784 /* Copy the type, opcode, function being called and static chain. */
785 memset (&temp, 0, sizeof (temp));
786 temp.type = gimple_call_return_type (call);
787 temp.opcode = CALL_EXPR;
788 temp.op0 = gimple_call_fn (call);
789 temp.op1 = gimple_call_chain (call);
790 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
792 /* Copy the call arguments. As they can be references as well,
793 just chain them together. */
794 for (i = 0; i < gimple_call_num_args (call); ++i)
796 tree callarg = gimple_call_arg (call, i);
797 copy_reference_ops_from_ref (callarg, result);
801 /* Create a vector of vn_reference_op_s structures from REF, a
802 REFERENCE_CLASS_P tree. The vector is not shared. */
804 static VEC(vn_reference_op_s, heap) *
805 create_reference_ops_from_ref (tree ref)
807 VEC (vn_reference_op_s, heap) *result = NULL;
809 copy_reference_ops_from_ref (ref, &result);
813 /* Create a vector of vn_reference_op_s structures from CALL, a
814 call statement. The vector is not shared. */
816 static VEC(vn_reference_op_s, heap) *
817 create_reference_ops_from_call (gimple call)
819 VEC (vn_reference_op_s, heap) *result = NULL;
821 copy_reference_ops_from_call (call, &result);
825 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
826 *I_P to point to the last element of the replacement. */
828 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
831 VEC(vn_reference_op_s, heap) *mem = NULL;
832 vn_reference_op_t op;
833 unsigned int i = *i_p;
836 /* Get ops for the addressed object. */
837 op = VEC_index (vn_reference_op_s, *ops, i);
838 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
839 around it to avoid later ICEs. */
840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
841 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
843 vn_reference_op_s aref;
845 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
846 aref.opcode = ARRAY_REF;
847 aref.op0 = integer_zero_node;
848 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
849 && TYPE_MIN_VALUE (dom))
850 aref.op0 = TYPE_MIN_VALUE (dom);
852 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
853 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
855 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
857 /* Do the replacement - we should have at least one op in mem now. */
858 if (VEC_length (vn_reference_op_s, mem) == 1)
860 VEC_replace (vn_reference_op_s, *ops, i - 1,
861 VEC_index (vn_reference_op_s, mem, 0));
862 VEC_ordered_remove (vn_reference_op_s, *ops, i);
865 else if (VEC_length (vn_reference_op_s, mem) == 2)
867 VEC_replace (vn_reference_op_s, *ops, i - 1,
868 VEC_index (vn_reference_op_s, mem, 0));
869 VEC_replace (vn_reference_op_s, *ops, i,
870 VEC_index (vn_reference_op_s, mem, 1));
872 else if (VEC_length (vn_reference_op_s, mem) > 2)
874 VEC_replace (vn_reference_op_s, *ops, i - 1,
875 VEC_index (vn_reference_op_s, mem, 0));
876 VEC_replace (vn_reference_op_s, *ops, i,
877 VEC_index (vn_reference_op_s, mem, 1));
878 /* ??? There is no VEC_splice. */
879 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
880 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
885 VEC_free (vn_reference_op_s, heap, mem);
889 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
890 structures into their value numbers. This is done in-place, and
891 the vector passed in is returned. */
893 static VEC (vn_reference_op_s, heap) *
894 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
896 vn_reference_op_t vro;
899 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
901 if (vro->opcode == SSA_NAME
902 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
904 vro->op0 = SSA_VAL (vro->op0);
905 /* If it transforms from an SSA_NAME to a constant, update
907 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
908 vro->opcode = TREE_CODE (vro->op0);
909 /* If it transforms from an SSA_NAME to an address, fold with
910 a preceding indirect reference. */
911 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
912 && VEC_index (vn_reference_op_s,
913 orig, i - 1)->opcode == INDIRECT_REF)
915 vn_reference_fold_indirect (&orig, &i);
919 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
920 vro->op1 = SSA_VAL (vro->op1);
921 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
922 vro->op2 = SSA_VAL (vro->op2);
928 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
930 /* Create a vector of vn_reference_op_s structures from REF, a
931 REFERENCE_CLASS_P tree. The vector is shared among all callers of
934 static VEC(vn_reference_op_s, heap) *
935 valueize_shared_reference_ops_from_ref (tree ref)
939 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
940 copy_reference_ops_from_ref (ref, &shared_lookup_references);
941 shared_lookup_references = valueize_refs (shared_lookup_references);
942 return shared_lookup_references;
945 /* Create a vector of vn_reference_op_s structures from CALL, a
946 call statement. The vector is shared among all callers of
949 static VEC(vn_reference_op_s, heap) *
950 valueize_shared_reference_ops_from_call (gimple call)
954 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
955 copy_reference_ops_from_call (call, &shared_lookup_references);
956 shared_lookup_references = valueize_refs (shared_lookup_references);
957 return shared_lookup_references;
960 /* Lookup a SCCVN reference operation VR in the current hash table.
961 Returns the resulting value number if it exists in the hash table,
962 NULL_TREE otherwise. VNRESULT will be filled in with the actual
963 vn_reference_t stored in the hashtable if something is found. */
966 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
972 slot = htab_find_slot_with_hash (current_info->references, vr,
974 if (!slot && current_info == optimistic_info)
975 slot = htab_find_slot_with_hash (valid_info->references, vr,
980 *vnresult = (vn_reference_t)*slot;
981 return ((vn_reference_t)*slot)->result;
987 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
988 with the current VUSE and performs the expression lookup. */
991 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
993 vn_reference_t vr = (vn_reference_t)vr_;
997 /* Fixup vuse and hash. */
998 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
999 vr->vuse = SSA_VAL (vuse);
1000 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1002 hash = vr->hashcode;
1003 slot = htab_find_slot_with_hash (current_info->references, vr,
1005 if (!slot && current_info == optimistic_info)
1006 slot = htab_find_slot_with_hash (valid_info->references, vr,
1014 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1015 from the statement defining VUSE and if not successful tries to
1016 translate *REFP and VR_ through an aggregate copy at the defintion
1020 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1022 vn_reference_t vr = (vn_reference_t)vr_;
1023 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1026 HOST_WIDE_INT offset, size, maxsize;
1028 base = ao_ref_base (ref);
1029 offset = ref->offset;
1031 maxsize = ref->max_size;
1033 /* If we cannot constrain the size of the reference we cannot
1034 test if anything kills it. */
1038 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1039 from that defintion.
1041 if (is_gimple_reg_type (vr->type)
1042 && is_gimple_call (def_stmt)
1043 && (fndecl = gimple_call_fndecl (def_stmt))
1044 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1045 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1046 && integer_zerop (gimple_call_arg (def_stmt, 1))
1047 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1048 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1050 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1052 HOST_WIDE_INT offset2, size2, maxsize2;
1053 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1054 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1055 if ((unsigned HOST_WIDE_INT)size2 / 8
1056 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1057 && operand_equal_p (base, base2, 0)
1058 && offset2 <= offset
1059 && offset2 + size2 >= offset + maxsize)
1061 tree val = fold_convert (vr->type, integer_zero_node);
1062 unsigned int value_id = get_or_alloc_constant_value_id (val);
1063 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1064 VEC_copy (vn_reference_op_s,
1065 heap, vr->operands),
1070 /* 2) Assignment from an empty CONSTRUCTOR. */
1071 else if (is_gimple_reg_type (vr->type)
1072 && gimple_assign_single_p (def_stmt)
1073 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1074 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1077 HOST_WIDE_INT offset2, size2, maxsize2;
1078 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1079 &offset2, &size2, &maxsize2);
1080 if (operand_equal_p (base, base2, 0)
1081 && offset2 <= offset
1082 && offset2 + size2 >= offset + maxsize)
1084 tree val = fold_convert (vr->type, integer_zero_node);
1085 unsigned int value_id = get_or_alloc_constant_value_id (val);
1086 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1087 VEC_copy (vn_reference_op_s,
1088 heap, vr->operands),
1093 /* For aggregate copies translate the reference through them if
1094 the copy kills ref. */
1095 else if (gimple_assign_single_p (def_stmt)
1096 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1097 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1098 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1101 HOST_WIDE_INT offset2, size2, maxsize2;
1103 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1104 vn_reference_op_t vro;
1107 /* See if the assignment kills REF. */
1108 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1109 &offset2, &size2, &maxsize2);
1110 if (!operand_equal_p (base, base2, 0)
1112 || offset2 + size2 < offset + maxsize)
1115 /* Find the common base of ref and the lhs. */
1116 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1117 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1118 j = VEC_length (vn_reference_op_s, lhs) - 1;
1119 while (j >= 0 && i >= 0
1120 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1122 VEC_index (vn_reference_op_s, lhs, j)))
1128 VEC_free (vn_reference_op_s, heap, lhs);
1129 /* i now points to the first additional op.
1130 ??? LHS may not be completely contained in VR, one or more
1131 VIEW_CONVERT_EXPRs could be in its way. We could at least
1132 try handling outermost VIEW_CONVERT_EXPRs. */
1136 /* Now re-write REF to be based on the rhs of the assignment. */
1137 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1138 /* We need to pre-pend vr->operands[0..i] to rhs. */
1139 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1140 > VEC_length (vn_reference_op_s, vr->operands))
1142 VEC (vn_reference_op_s, heap) *old = vr->operands;
1143 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1144 i + 1 + VEC_length (vn_reference_op_s, rhs));
1145 if (old == shared_lookup_references
1146 && vr->operands != old)
1147 shared_lookup_references = NULL;
1150 VEC_truncate (vn_reference_op_s, vr->operands,
1151 i + 1 + VEC_length (vn_reference_op_s, rhs));
1152 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1153 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1154 VEC_free (vn_reference_op_s, heap, rhs);
1155 vr->hashcode = vn_reference_compute_hash (vr);
1157 /* Adjust *ref from the new operands. */
1158 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1160 gcc_assert (ref->size == r.size);
1163 /* Keep looking for the adjusted *REF / VR pair. */
1167 /* Bail out and stop walking. */
1171 /* Lookup a reference operation by it's parts, in the current hash table.
1172 Returns the resulting value number if it exists in the hash table,
1173 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1174 vn_reference_t stored in the hashtable if something is found. */
1177 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1178 VEC (vn_reference_op_s, heap) *operands,
1179 vn_reference_t *vnresult, bool maywalk)
1181 struct vn_reference_s vr1;
1188 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1189 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1190 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1191 VEC_length (vn_reference_op_s, operands));
1192 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1193 VEC_address (vn_reference_op_s, operands),
1194 sizeof (vn_reference_op_s)
1195 * VEC_length (vn_reference_op_s, operands));
1196 vr1.operands = operands = shared_lookup_references
1197 = valueize_refs (shared_lookup_references);
1200 vr1.hashcode = vn_reference_compute_hash (&vr1);
1201 vn_reference_lookup_1 (&vr1, vnresult);
1208 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1210 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1211 vn_reference_lookup_2,
1212 vn_reference_lookup_3, &vr1);
1213 if (vr1.operands != operands)
1214 VEC_free (vn_reference_op_s, heap, vr1.operands);
1218 return (*vnresult)->result;
1223 /* Lookup OP in the current hash table, and return the resulting value
1224 number if it exists in the hash table. Return NULL_TREE if it does
1225 not exist in the hash table or if the result field of the structure
1226 was NULL.. VNRESULT will be filled in with the vn_reference_t
1227 stored in the hashtable if one exists. */
1230 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1231 vn_reference_t *vnresult)
1233 VEC (vn_reference_op_s, heap) *operands;
1234 struct vn_reference_s vr1;
1239 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1240 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1241 vr1.type = TREE_TYPE (op);
1242 vr1.set = get_alias_set (op);
1243 vr1.hashcode = vn_reference_compute_hash (&vr1);
1248 vn_reference_t wvnresult;
1250 ao_ref_init (&r, op);
1252 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1253 vn_reference_lookup_2,
1254 vn_reference_lookup_3, &vr1);
1255 if (vr1.operands != operands)
1256 VEC_free (vn_reference_op_s, heap, vr1.operands);
1260 *vnresult = wvnresult;
1261 return wvnresult->result;
1267 return vn_reference_lookup_1 (&vr1, vnresult);
1271 /* Insert OP into the current hash table with a value number of
1272 RESULT, and return the resulting reference structure we created. */
1275 vn_reference_insert (tree op, tree result, tree vuse)
1280 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1281 if (TREE_CODE (result) == SSA_NAME)
1282 vr1->value_id = VN_INFO (result)->value_id;
1284 vr1->value_id = get_or_alloc_constant_value_id (result);
1285 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1286 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1287 vr1->type = TREE_TYPE (op);
1288 vr1->set = get_alias_set (op);
1289 vr1->hashcode = vn_reference_compute_hash (vr1);
1290 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1292 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1295 /* Because we lookup stores using vuses, and value number failures
1296 using the vdefs (see visit_reference_op_store for how and why),
1297 it's possible that on failure we may try to insert an already
1298 inserted store. This is not wrong, there is no ssa name for a
1299 store that we could use as a differentiator anyway. Thus, unlike
1300 the other lookup functions, you cannot gcc_assert (!*slot)
1303 /* But free the old slot in case of a collision. */
1305 free_reference (*slot);
1311 /* Insert a reference by it's pieces into the current hash table with
1312 a value number of RESULT. Return the resulting reference
1313 structure we created. */
1316 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1317 VEC (vn_reference_op_s, heap) *operands,
1318 tree result, unsigned int value_id)
1324 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1325 vr1->value_id = value_id;
1326 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1327 vr1->operands = valueize_refs (operands);
1330 vr1->hashcode = vn_reference_compute_hash (vr1);
1331 if (result && TREE_CODE (result) == SSA_NAME)
1332 result = SSA_VAL (result);
1333 vr1->result = result;
1335 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1338 /* At this point we should have all the things inserted that we have
1339 seen before, and we should never try inserting something that
1341 gcc_assert (!*slot);
1343 free_reference (*slot);
1349 /* Compute and return the hash value for nary operation VBO1. */
1352 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1357 for (i = 0; i < vno1->length; ++i)
1358 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1359 vno1->op[i] = SSA_VAL (vno1->op[i]);
1361 if (vno1->length == 2
1362 && commutative_tree_code (vno1->opcode)
1363 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1365 tree temp = vno1->op[0];
1366 vno1->op[0] = vno1->op[1];
1370 for (i = 0; i < vno1->length; ++i)
1371 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1376 /* Return the computed hashcode for nary operation P1. */
1379 vn_nary_op_hash (const void *p1)
1381 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1382 return vno1->hashcode;
1385 /* Compare nary operations P1 and P2 and return true if they are
1389 vn_nary_op_eq (const void *p1, const void *p2)
1391 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1392 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1395 if (vno1->hashcode != vno2->hashcode)
1398 if (vno1->opcode != vno2->opcode
1399 || !types_compatible_p (vno1->type, vno2->type))
1402 for (i = 0; i < vno1->length; ++i)
1403 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1409 /* Lookup a n-ary operation by its pieces and return the resulting value
1410 number if it exists in the hash table. Return NULL_TREE if it does
1411 not exist in the hash table or if the result field of the operation
1412 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1416 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1417 tree type, tree op0, tree op1, tree op2,
1418 tree op3, vn_nary_op_t *vnresult)
1421 struct vn_nary_op_s vno1;
1425 vno1.length = length;
1431 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1432 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1434 if (!slot && current_info == optimistic_info)
1435 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1440 *vnresult = (vn_nary_op_t)*slot;
1441 return ((vn_nary_op_t)*slot)->result;
1444 /* Lookup OP in the current hash table, and return the resulting value
1445 number if it exists in the hash table. Return NULL_TREE if it does
1446 not exist in the hash table or if the result field of the operation
1447 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1451 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1454 struct vn_nary_op_s vno1;
1459 vno1.opcode = TREE_CODE (op);
1460 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1461 vno1.type = TREE_TYPE (op);
1462 for (i = 0; i < vno1.length; ++i)
1463 vno1.op[i] = TREE_OPERAND (op, i);
1464 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1465 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1467 if (!slot && current_info == optimistic_info)
1468 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1473 *vnresult = (vn_nary_op_t)*slot;
1474 return ((vn_nary_op_t)*slot)->result;
1477 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1478 value number if it exists in the hash table. Return NULL_TREE if
1479 it does not exist in the hash table. VNRESULT will contain the
1480 vn_nary_op_t from the hashtable if it exists. */
1483 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1486 struct vn_nary_op_s vno1;
1491 vno1.opcode = gimple_assign_rhs_code (stmt);
1492 vno1.length = gimple_num_ops (stmt) - 1;
1493 vno1.type = gimple_expr_type (stmt);
1494 for (i = 0; i < vno1.length; ++i)
1495 vno1.op[i] = gimple_op (stmt, i + 1);
1496 if (vno1.opcode == REALPART_EXPR
1497 || vno1.opcode == IMAGPART_EXPR
1498 || vno1.opcode == VIEW_CONVERT_EXPR)
1499 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1500 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1501 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1503 if (!slot && current_info == optimistic_info)
1504 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1509 *vnresult = (vn_nary_op_t)*slot;
1510 return ((vn_nary_op_t)*slot)->result;
1513 /* Insert a n-ary operation into the current hash table using it's
1514 pieces. Return the vn_nary_op_t structure we created and put in
1518 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1519 tree type, tree op0,
1520 tree op1, tree op2, tree op3,
1522 unsigned int value_id)
1527 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1528 (sizeof (struct vn_nary_op_s)
1529 - sizeof (tree) * (4 - length)));
1530 vno1->value_id = value_id;
1531 vno1->opcode = code;
1532 vno1->length = length;
1542 vno1->result = result;
1543 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1544 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1546 gcc_assert (!*slot);
1553 /* Insert OP into the current hash table with a value number of
1554 RESULT. Return the vn_nary_op_t structure we created and put in
1558 vn_nary_op_insert (tree op, tree result)
1560 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1565 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1566 (sizeof (struct vn_nary_op_s)
1567 - sizeof (tree) * (4 - length)));
1568 vno1->value_id = VN_INFO (result)->value_id;
1569 vno1->opcode = TREE_CODE (op);
1570 vno1->length = length;
1571 vno1->type = TREE_TYPE (op);
1572 for (i = 0; i < vno1->length; ++i)
1573 vno1->op[i] = TREE_OPERAND (op, i);
1574 vno1->result = result;
1575 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1576 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1578 gcc_assert (!*slot);
1584 /* Insert the rhs of STMT into the current hash table with a value number of
1588 vn_nary_op_insert_stmt (gimple stmt, tree result)
1590 unsigned length = gimple_num_ops (stmt) - 1;
1595 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1596 (sizeof (struct vn_nary_op_s)
1597 - sizeof (tree) * (4 - length)));
1598 vno1->value_id = VN_INFO (result)->value_id;
1599 vno1->opcode = gimple_assign_rhs_code (stmt);
1600 vno1->length = length;
1601 vno1->type = gimple_expr_type (stmt);
1602 for (i = 0; i < vno1->length; ++i)
1603 vno1->op[i] = gimple_op (stmt, i + 1);
1604 if (vno1->opcode == REALPART_EXPR
1605 || vno1->opcode == IMAGPART_EXPR
1606 || vno1->opcode == VIEW_CONVERT_EXPR)
1607 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1608 vno1->result = result;
1609 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1610 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1612 gcc_assert (!*slot);
1618 /* Compute a hashcode for PHI operation VP1 and return it. */
1620 static inline hashval_t
1621 vn_phi_compute_hash (vn_phi_t vp1)
1623 hashval_t result = 0;
1628 result = vp1->block->index;
1630 /* If all PHI arguments are constants we need to distinguish
1631 the PHI node via its type. */
1632 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1633 result += (INTEGRAL_TYPE_P (type)
1634 + (INTEGRAL_TYPE_P (type)
1635 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1637 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1639 if (phi1op == VN_TOP)
1641 result += iterative_hash_expr (phi1op, result);
1647 /* Return the computed hashcode for phi operation P1. */
1650 vn_phi_hash (const void *p1)
1652 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1653 return vp1->hashcode;
1656 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1659 vn_phi_eq (const void *p1, const void *p2)
1661 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1662 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1664 if (vp1->hashcode != vp2->hashcode)
1667 if (vp1->block == vp2->block)
1672 /* If the PHI nodes do not have compatible types
1673 they are not the same. */
1674 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1675 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1678 /* Any phi in the same block will have it's arguments in the
1679 same edge order, because of how we store phi nodes. */
1680 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1682 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1683 if (phi1op == VN_TOP || phi2op == VN_TOP)
1685 if (!expressions_equal_p (phi1op, phi2op))
1693 static VEC(tree, heap) *shared_lookup_phiargs;
1695 /* Lookup PHI in the current hash table, and return the resulting
1696 value number if it exists in the hash table. Return NULL_TREE if
1697 it does not exist in the hash table. */
1700 vn_phi_lookup (gimple phi)
1703 struct vn_phi_s vp1;
1706 VEC_truncate (tree, shared_lookup_phiargs, 0);
1708 /* Canonicalize the SSA_NAME's to their value number. */
1709 for (i = 0; i < gimple_phi_num_args (phi); i++)
1711 tree def = PHI_ARG_DEF (phi, i);
1712 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1713 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1715 vp1.phiargs = shared_lookup_phiargs;
1716 vp1.block = gimple_bb (phi);
1717 vp1.hashcode = vn_phi_compute_hash (&vp1);
1718 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1720 if (!slot && current_info == optimistic_info)
1721 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1725 return ((vn_phi_t)*slot)->result;
1728 /* Insert PHI into the current hash table with a value number of
1732 vn_phi_insert (gimple phi, tree result)
1735 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1737 VEC (tree, heap) *args = NULL;
1739 /* Canonicalize the SSA_NAME's to their value number. */
1740 for (i = 0; i < gimple_phi_num_args (phi); i++)
1742 tree def = PHI_ARG_DEF (phi, i);
1743 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1744 VEC_safe_push (tree, heap, args, def);
1746 vp1->value_id = VN_INFO (result)->value_id;
1747 vp1->phiargs = args;
1748 vp1->block = gimple_bb (phi);
1749 vp1->result = result;
1750 vp1->hashcode = vn_phi_compute_hash (vp1);
1752 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1755 /* Because we iterate over phi operations more than once, it's
1756 possible the slot might already exist here, hence no assert.*/
1762 /* Print set of components in strongly connected component SCC to OUT. */
1765 print_scc (FILE *out, VEC (tree, heap) *scc)
1770 fprintf (out, "SCC consists of: ");
1771 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1773 print_generic_expr (out, var, 0);
1776 fprintf (out, "\n");
1779 /* Set the value number of FROM to TO, return true if it has changed
1783 set_ssa_val_to (tree from, tree to)
1788 && TREE_CODE (to) == SSA_NAME
1789 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1792 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1793 and invariants. So assert that here. */
1794 gcc_assert (to != NULL_TREE
1796 || TREE_CODE (to) == SSA_NAME
1797 || is_gimple_min_invariant (to)));
1799 if (dump_file && (dump_flags & TDF_DETAILS))
1801 fprintf (dump_file, "Setting value number of ");
1802 print_generic_expr (dump_file, from, 0);
1803 fprintf (dump_file, " to ");
1804 print_generic_expr (dump_file, to, 0);
1807 currval = SSA_VAL (from);
1809 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1811 VN_INFO (from)->valnum = to;
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1813 fprintf (dump_file, " (changed)\n");
1816 if (dump_file && (dump_flags & TDF_DETAILS))
1817 fprintf (dump_file, "\n");
1821 /* Set all definitions in STMT to value number to themselves.
1822 Return true if a value number changed. */
1825 defs_to_varying (gimple stmt)
1827 bool changed = false;
1831 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1833 tree def = DEF_FROM_PTR (defp);
1835 VN_INFO (def)->use_processed = true;
1836 changed |= set_ssa_val_to (def, def);
1841 static bool expr_has_constants (tree expr);
1842 static tree valueize_expr (tree expr);
1844 /* Visit a copy between LHS and RHS, return true if the value number
1848 visit_copy (tree lhs, tree rhs)
1850 /* Follow chains of copies to their destination. */
1851 while (TREE_CODE (rhs) == SSA_NAME
1852 && SSA_VAL (rhs) != rhs)
1853 rhs = SSA_VAL (rhs);
1855 /* The copy may have a more interesting constant filled expression
1856 (we don't, since we know our RHS is just an SSA name). */
1857 if (TREE_CODE (rhs) == SSA_NAME)
1859 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1860 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1863 return set_ssa_val_to (lhs, rhs);
1866 /* Visit a unary operator RHS, value number it, and return true if the
1867 value number of LHS has changed as a result. */
1870 visit_unary_op (tree lhs, gimple stmt)
1872 bool changed = false;
1873 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1877 changed = set_ssa_val_to (lhs, result);
1881 changed = set_ssa_val_to (lhs, lhs);
1882 vn_nary_op_insert_stmt (stmt, lhs);
1888 /* Visit a binary operator RHS, value number it, and return true if the
1889 value number of LHS has changed as a result. */
1892 visit_binary_op (tree lhs, gimple stmt)
1894 bool changed = false;
1895 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1899 changed = set_ssa_val_to (lhs, result);
1903 changed = set_ssa_val_to (lhs, lhs);
1904 vn_nary_op_insert_stmt (stmt, lhs);
1910 /* Visit a call STMT storing into LHS. Return true if the value number
1911 of the LHS has changed as a result. */
1914 visit_reference_op_call (tree lhs, gimple stmt)
1916 bool changed = false;
1917 struct vn_reference_s vr1;
1919 tree vuse = gimple_vuse (stmt);
1921 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1922 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1923 vr1.type = gimple_expr_type (stmt);
1925 vr1.hashcode = vn_reference_compute_hash (&vr1);
1926 result = vn_reference_lookup_1 (&vr1, NULL);
1929 changed = set_ssa_val_to (lhs, result);
1930 if (TREE_CODE (result) == SSA_NAME
1931 && VN_INFO (result)->has_constants)
1932 VN_INFO (lhs)->has_constants = true;
1938 changed = set_ssa_val_to (lhs, lhs);
1939 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1940 vr2->vuse = vr1.vuse;
1941 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1942 vr2->type = vr1.type;
1944 vr2->hashcode = vr1.hashcode;
1946 slot = htab_find_slot_with_hash (current_info->references,
1947 vr2, vr2->hashcode, INSERT);
1949 free_reference (*slot);
1956 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1957 and return true if the value number of the LHS has changed as a result. */
1960 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1962 bool changed = false;
1963 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1965 /* If we have a VCE, try looking up its operand as it might be stored in
1966 a different type. */
1967 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1968 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1971 /* We handle type-punning through unions by value-numbering based
1972 on offset and size of the access. Be prepared to handle a
1973 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1975 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1977 /* We will be setting the value number of lhs to the value number
1978 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1979 So first simplify and lookup this expression to see if it
1980 is already available. */
1981 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1982 if ((CONVERT_EXPR_P (val)
1983 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1984 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1986 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1987 if ((CONVERT_EXPR_P (tem)
1988 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1989 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1990 TREE_TYPE (val), tem)))
1994 if (!is_gimple_min_invariant (val)
1995 && TREE_CODE (val) != SSA_NAME)
1996 result = vn_nary_op_lookup (val, NULL);
1997 /* If the expression is not yet available, value-number lhs to
1998 a new SSA_NAME we create. */
1999 if (!result && may_insert)
2001 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2002 /* Initialize value-number information properly. */
2003 VN_INFO_GET (result)->valnum = result;
2004 VN_INFO (result)->value_id = get_next_value_id ();
2005 VN_INFO (result)->expr = val;
2006 VN_INFO (result)->has_constants = expr_has_constants (val);
2007 VN_INFO (result)->needs_insertion = true;
2008 /* As all "inserted" statements are singleton SCCs, insert
2009 to the valid table. This is strictly needed to
2010 avoid re-generating new value SSA_NAMEs for the same
2011 expression during SCC iteration over and over (the
2012 optimistic table gets cleared after each iteration).
2013 We do not need to insert into the optimistic table, as
2014 lookups there will fall back to the valid table. */
2015 if (current_info == optimistic_info)
2017 current_info = valid_info;
2018 vn_nary_op_insert (val, result);
2019 current_info = optimistic_info;
2022 vn_nary_op_insert (val, result);
2023 if (dump_file && (dump_flags & TDF_DETAILS))
2025 fprintf (dump_file, "Inserting name ");
2026 print_generic_expr (dump_file, result, 0);
2027 fprintf (dump_file, " for expression ");
2028 print_generic_expr (dump_file, val, 0);
2029 fprintf (dump_file, "\n");
2036 changed = set_ssa_val_to (lhs, result);
2037 if (TREE_CODE (result) == SSA_NAME
2038 && VN_INFO (result)->has_constants)
2040 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2041 VN_INFO (lhs)->has_constants = true;
2046 changed = set_ssa_val_to (lhs, lhs);
2047 vn_reference_insert (op, lhs, gimple_vuse (stmt));
2054 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2055 and return true if the value number of the LHS has changed as a result. */
2058 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2060 bool changed = false;
2062 bool resultsame = false;
2064 /* First we want to lookup using the *vuses* from the store and see
2065 if there the last store to this location with the same address
2068 The vuses represent the memory state before the store. If the
2069 memory state, address, and value of the store is the same as the
2070 last store to this location, then this store will produce the
2071 same memory state as that store.
2073 In this case the vdef versions for this store are value numbered to those
2074 vuse versions, since they represent the same memory state after
2077 Otherwise, the vdefs for the store are used when inserting into
2078 the table, since the store generates a new memory state. */
2080 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2084 if (TREE_CODE (result) == SSA_NAME)
2085 result = SSA_VAL (result);
2086 if (TREE_CODE (op) == SSA_NAME)
2088 resultsame = expressions_equal_p (result, op);
2091 if (!result || !resultsame)
2095 if (dump_file && (dump_flags & TDF_DETAILS))
2097 fprintf (dump_file, "No store match\n");
2098 fprintf (dump_file, "Value numbering store ");
2099 print_generic_expr (dump_file, lhs, 0);
2100 fprintf (dump_file, " to ");
2101 print_generic_expr (dump_file, op, 0);
2102 fprintf (dump_file, "\n");
2104 /* Have to set value numbers before insert, since insert is
2105 going to valueize the references in-place. */
2106 if ((vdef = gimple_vdef (stmt)))
2108 VN_INFO (vdef)->use_processed = true;
2109 changed |= set_ssa_val_to (vdef, vdef);
2112 /* Do not insert structure copies into the tables. */
2113 if (is_gimple_min_invariant (op)
2114 || is_gimple_reg (op))
2115 vn_reference_insert (lhs, op, vdef);
2119 /* We had a match, so value number the vdef to have the value
2120 number of the vuse it came from. */
2123 if (dump_file && (dump_flags & TDF_DETAILS))
2124 fprintf (dump_file, "Store matched earlier value,"
2125 "value numbering store vdefs to matching vuses.\n");
2127 def = gimple_vdef (stmt);
2128 use = gimple_vuse (stmt);
2130 VN_INFO (def)->use_processed = true;
2131 changed |= set_ssa_val_to (def, SSA_VAL (use));
2137 /* Visit and value number PHI, return true if the value number
2141 visit_phi (gimple phi)
2143 bool changed = false;
2145 tree sameval = VN_TOP;
2146 bool allsame = true;
2149 /* TODO: We could check for this in init_sccvn, and replace this
2150 with a gcc_assert. */
2151 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2152 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2154 /* See if all non-TOP arguments have the same value. TOP is
2155 equivalent to everything, so we can ignore it. */
2156 for (i = 0; i < gimple_phi_num_args (phi); i++)
2158 tree def = PHI_ARG_DEF (phi, i);
2160 if (TREE_CODE (def) == SSA_NAME)
2161 def = SSA_VAL (def);
2164 if (sameval == VN_TOP)
2170 if (!expressions_equal_p (def, sameval))
2178 /* If all value numbered to the same value, the phi node has that
2182 if (is_gimple_min_invariant (sameval))
2184 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2185 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2189 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2190 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2193 if (TREE_CODE (sameval) == SSA_NAME)
2194 return visit_copy (PHI_RESULT (phi), sameval);
2196 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2199 /* Otherwise, see if it is equivalent to a phi node in this block. */
2200 result = vn_phi_lookup (phi);
2203 if (TREE_CODE (result) == SSA_NAME)
2204 changed = visit_copy (PHI_RESULT (phi), result);
2206 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2210 vn_phi_insert (phi, PHI_RESULT (phi));
2211 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2212 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2213 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2219 /* Return true if EXPR contains constants. */
2222 expr_has_constants (tree expr)
2224 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2227 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2230 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2231 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2232 /* Constants inside reference ops are rarely interesting, but
2233 it can take a lot of looking to find them. */
2235 case tcc_declaration:
2238 return is_gimple_min_invariant (expr);
2243 /* Return true if STMT contains constants. */
2246 stmt_has_constants (gimple stmt)
2248 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2251 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2253 case GIMPLE_UNARY_RHS:
2254 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2256 case GIMPLE_BINARY_RHS:
2257 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2258 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2259 case GIMPLE_SINGLE_RHS:
2260 /* Constants inside reference ops are rarely interesting, but
2261 it can take a lot of looking to find them. */
2262 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2269 /* Replace SSA_NAMES in expr with their value numbers, and return the
2271 This is performed in place. */
2274 valueize_expr (tree expr)
2276 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2279 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2280 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2281 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2284 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2285 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2286 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2287 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2288 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2289 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2297 /* Simplify the binary expression RHS, and return the result if
2301 simplify_binary_expression (gimple stmt)
2303 tree result = NULL_TREE;
2304 tree op0 = gimple_assign_rhs1 (stmt);
2305 tree op1 = gimple_assign_rhs2 (stmt);
2307 /* This will not catch every single case we could combine, but will
2308 catch those with constants. The goal here is to simultaneously
2309 combine constants between expressions, but avoid infinite
2310 expansion of expressions during simplification. */
2311 if (TREE_CODE (op0) == SSA_NAME)
2313 if (VN_INFO (op0)->has_constants
2314 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2315 op0 = valueize_expr (vn_get_expr_for (op0));
2316 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2317 op0 = SSA_VAL (op0);
2320 if (TREE_CODE (op1) == SSA_NAME)
2322 if (VN_INFO (op1)->has_constants)
2323 op1 = valueize_expr (vn_get_expr_for (op1));
2324 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2325 op1 = SSA_VAL (op1);
2328 /* Avoid folding if nothing changed. */
2329 if (op0 == gimple_assign_rhs1 (stmt)
2330 && op1 == gimple_assign_rhs2 (stmt))
2333 fold_defer_overflow_warnings ();
2335 result = fold_binary (gimple_assign_rhs_code (stmt),
2336 gimple_expr_type (stmt), op0, op1);
2338 STRIP_USELESS_TYPE_CONVERSION (result);
2340 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2343 /* Make sure result is not a complex expression consisting
2344 of operators of operators (IE (a + b) + (a + c))
2345 Otherwise, we will end up with unbounded expressions if
2346 fold does anything at all. */
2347 if (result && valid_gimple_rhs_p (result))
2353 /* Simplify the unary expression RHS, and return the result if
2357 simplify_unary_expression (gimple stmt)
2359 tree result = NULL_TREE;
2360 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2362 /* We handle some tcc_reference codes here that are all
2363 GIMPLE_ASSIGN_SINGLE codes. */
2364 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2365 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2366 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2367 op0 = TREE_OPERAND (op0, 0);
2369 if (TREE_CODE (op0) != SSA_NAME)
2373 if (VN_INFO (op0)->has_constants)
2374 op0 = valueize_expr (vn_get_expr_for (op0));
2375 else if (gimple_assign_cast_p (stmt)
2376 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2377 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2378 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2380 /* We want to do tree-combining on conversion-like expressions.
2381 Make sure we feed only SSA_NAMEs or constants to fold though. */
2382 tree tem = valueize_expr (vn_get_expr_for (op0));
2383 if (UNARY_CLASS_P (tem)
2384 || BINARY_CLASS_P (tem)
2385 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2386 || TREE_CODE (tem) == SSA_NAME
2387 || is_gimple_min_invariant (tem))
2391 /* Avoid folding if nothing changed, but remember the expression. */
2392 if (op0 == orig_op0)
2395 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2396 gimple_expr_type (stmt), op0);
2399 STRIP_USELESS_TYPE_CONVERSION (result);
2400 if (valid_gimple_rhs_p (result))
2407 /* Try to simplify RHS using equivalences and constant folding. */
2410 try_to_simplify (gimple stmt)
2414 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2415 in this case, there is no point in doing extra work. */
2416 if (gimple_assign_copy_p (stmt)
2417 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2420 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2422 case tcc_declaration:
2423 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2429 /* Do not do full-blown reference lookup here, but simplify
2430 reads from constant aggregates. */
2431 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2435 /* Fallthrough for some codes that can operate on registers. */
2436 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2437 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2438 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2440 /* We could do a little more with unary ops, if they expand
2441 into binary ops, but it's debatable whether it is worth it. */
2443 return simplify_unary_expression (stmt);
2445 case tcc_comparison:
2447 return simplify_binary_expression (stmt);
2456 /* Visit and value number USE, return true if the value number
2460 visit_use (tree use)
2462 bool changed = false;
2463 gimple stmt = SSA_NAME_DEF_STMT (use);
2465 VN_INFO (use)->use_processed = true;
2467 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2468 if (dump_file && (dump_flags & TDF_DETAILS)
2469 && !SSA_NAME_IS_DEFAULT_DEF (use))
2471 fprintf (dump_file, "Value numbering ");
2472 print_generic_expr (dump_file, use, 0);
2473 fprintf (dump_file, " stmt = ");
2474 print_gimple_stmt (dump_file, stmt, 0, 0);
2477 /* Handle uninitialized uses. */
2478 if (SSA_NAME_IS_DEFAULT_DEF (use))
2479 changed = set_ssa_val_to (use, use);
2482 if (gimple_code (stmt) == GIMPLE_PHI)
2483 changed = visit_phi (stmt);
2484 else if (!gimple_has_lhs (stmt)
2485 || gimple_has_volatile_ops (stmt)
2486 || stmt_could_throw_p (stmt))
2487 changed = defs_to_varying (stmt);
2488 else if (is_gimple_assign (stmt))
2490 tree lhs = gimple_assign_lhs (stmt);
2493 /* Shortcut for copies. Simplifying copies is pointless,
2494 since we copy the expression and value they represent. */
2495 if (gimple_assign_copy_p (stmt)
2496 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2497 && TREE_CODE (lhs) == SSA_NAME)
2499 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2502 simplified = try_to_simplify (stmt);
2505 if (dump_file && (dump_flags & TDF_DETAILS))
2507 fprintf (dump_file, "RHS ");
2508 print_gimple_expr (dump_file, stmt, 0, 0);
2509 fprintf (dump_file, " simplified to ");
2510 print_generic_expr (dump_file, simplified, 0);
2511 if (TREE_CODE (lhs) == SSA_NAME)
2512 fprintf (dump_file, " has constants %d\n",
2513 expr_has_constants (simplified));
2515 fprintf (dump_file, "\n");
2518 /* Setting value numbers to constants will occasionally
2519 screw up phi congruence because constants are not
2520 uniquely associated with a single ssa name that can be
2523 && is_gimple_min_invariant (simplified)
2524 && TREE_CODE (lhs) == SSA_NAME)
2526 VN_INFO (lhs)->expr = simplified;
2527 VN_INFO (lhs)->has_constants = true;
2528 changed = set_ssa_val_to (lhs, simplified);
2532 && TREE_CODE (simplified) == SSA_NAME
2533 && TREE_CODE (lhs) == SSA_NAME)
2535 changed = visit_copy (lhs, simplified);
2538 else if (simplified)
2540 if (TREE_CODE (lhs) == SSA_NAME)
2542 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2543 /* We have to unshare the expression or else
2544 valuizing may change the IL stream. */
2545 VN_INFO (lhs)->expr = unshare_expr (simplified);
2548 else if (stmt_has_constants (stmt)
2549 && TREE_CODE (lhs) == SSA_NAME)
2550 VN_INFO (lhs)->has_constants = true;
2551 else if (TREE_CODE (lhs) == SSA_NAME)
2553 /* We reset expr and constantness here because we may
2554 have been value numbering optimistically, and
2555 iterating. They may become non-constant in this case,
2556 even if they were optimistically constant. */
2558 VN_INFO (lhs)->has_constants = false;
2559 VN_INFO (lhs)->expr = NULL_TREE;
2562 if ((TREE_CODE (lhs) == SSA_NAME
2563 /* We can substitute SSA_NAMEs that are live over
2564 abnormal edges with their constant value. */
2565 && !(gimple_assign_copy_p (stmt)
2566 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2568 && is_gimple_min_invariant (simplified))
2569 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2570 /* Stores or copies from SSA_NAMEs that are live over
2571 abnormal edges are a problem. */
2572 || (gimple_assign_single_p (stmt)
2573 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2574 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2575 changed = defs_to_varying (stmt);
2576 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2578 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2580 else if (TREE_CODE (lhs) == SSA_NAME)
2582 if ((gimple_assign_copy_p (stmt)
2583 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2585 && is_gimple_min_invariant (simplified)))
2587 VN_INFO (lhs)->has_constants = true;
2589 changed = set_ssa_val_to (lhs, simplified);
2591 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2595 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2597 case GIMPLE_UNARY_RHS:
2598 changed = visit_unary_op (lhs, stmt);
2600 case GIMPLE_BINARY_RHS:
2601 changed = visit_binary_op (lhs, stmt);
2603 case GIMPLE_SINGLE_RHS:
2604 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2607 /* VOP-less references can go through unary case. */
2608 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2609 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2610 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2611 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2613 changed = visit_unary_op (lhs, stmt);
2617 case tcc_declaration:
2618 changed = visit_reference_op_load
2619 (lhs, gimple_assign_rhs1 (stmt), stmt);
2621 case tcc_expression:
2622 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2624 changed = visit_unary_op (lhs, stmt);
2629 changed = defs_to_varying (stmt);
2633 changed = defs_to_varying (stmt);
2639 changed = defs_to_varying (stmt);
2641 else if (is_gimple_call (stmt))
2643 tree lhs = gimple_call_lhs (stmt);
2645 /* ??? We could try to simplify calls. */
2647 if (stmt_has_constants (stmt)
2648 && TREE_CODE (lhs) == SSA_NAME)
2649 VN_INFO (lhs)->has_constants = true;
2650 else if (TREE_CODE (lhs) == SSA_NAME)
2652 /* We reset expr and constantness here because we may
2653 have been value numbering optimistically, and
2654 iterating. They may become non-constant in this case,
2655 even if they were optimistically constant. */
2656 VN_INFO (lhs)->has_constants = false;
2657 VN_INFO (lhs)->expr = NULL_TREE;
2660 if (TREE_CODE (lhs) == SSA_NAME
2661 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2662 changed = defs_to_varying (stmt);
2663 /* ??? We should handle stores from calls. */
2664 else if (TREE_CODE (lhs) == SSA_NAME)
2666 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2667 changed = visit_reference_op_call (lhs, stmt);
2669 changed = defs_to_varying (stmt);
2672 changed = defs_to_varying (stmt);
2679 /* Compare two operands by reverse postorder index */
2682 compare_ops (const void *pa, const void *pb)
2684 const tree opa = *((const tree *)pa);
2685 const tree opb = *((const tree *)pb);
2686 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2687 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2691 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2692 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2693 else if (gimple_nop_p (opstmta))
2695 else if (gimple_nop_p (opstmtb))
2698 bba = gimple_bb (opstmta);
2699 bbb = gimple_bb (opstmtb);
2702 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2710 if (gimple_code (opstmta) == GIMPLE_PHI
2711 && gimple_code (opstmtb) == GIMPLE_PHI)
2712 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2713 else if (gimple_code (opstmta) == GIMPLE_PHI)
2715 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2717 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2718 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2720 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2722 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2725 /* Sort an array containing members of a strongly connected component
2726 SCC so that the members are ordered by RPO number.
2727 This means that when the sort is complete, iterating through the
2728 array will give you the members in RPO order. */
2731 sort_scc (VEC (tree, heap) *scc)
2733 qsort (VEC_address (tree, scc),
2734 VEC_length (tree, scc),
2739 /* Process a strongly connected component in the SSA graph. */
2742 process_scc (VEC (tree, heap) *scc)
2744 /* If the SCC has a single member, just visit it. */
2746 if (VEC_length (tree, scc) == 1)
2748 tree use = VEC_index (tree, scc, 0);
2749 if (!VN_INFO (use)->use_processed)
2756 unsigned int iterations = 0;
2757 bool changed = true;
2759 /* Iterate over the SCC with the optimistic table until it stops
2761 current_info = optimistic_info;
2766 /* As we are value-numbering optimistically we have to
2767 clear the expression tables and the simplified expressions
2768 in each iteration until we converge. */
2769 htab_empty (optimistic_info->nary);
2770 htab_empty (optimistic_info->phis);
2771 htab_empty (optimistic_info->references);
2772 obstack_free (&optimistic_info->nary_obstack, NULL);
2773 gcc_obstack_init (&optimistic_info->nary_obstack);
2774 empty_alloc_pool (optimistic_info->phis_pool);
2775 empty_alloc_pool (optimistic_info->references_pool);
2776 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2777 VN_INFO (var)->expr = NULL_TREE;
2778 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2779 changed |= visit_use (var);
2782 statistics_histogram_event (cfun, "SCC iterations", iterations);
2784 /* Finally, visit the SCC once using the valid table. */
2785 current_info = valid_info;
2786 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2791 DEF_VEC_O(ssa_op_iter);
2792 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2794 /* Pop the components of the found SCC for NAME off the SCC stack
2795 and process them. Returns true if all went well, false if
2796 we run into resource limits. */
2799 extract_and_process_scc_for_name (tree name)
2801 VEC (tree, heap) *scc = NULL;
2804 /* Found an SCC, pop the components off the SCC stack and
2808 x = VEC_pop (tree, sccstack);
2810 VN_INFO (x)->on_sccstack = false;
2811 VEC_safe_push (tree, heap, scc, x);
2812 } while (x != name);
2814 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2815 if (VEC_length (tree, scc)
2816 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2819 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2820 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2821 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2825 if (VEC_length (tree, scc) > 1)
2828 if (dump_file && (dump_flags & TDF_DETAILS))
2829 print_scc (dump_file, scc);
2833 VEC_free (tree, heap, scc);
2838 /* Depth first search on NAME to discover and process SCC's in the SSA
2840 Execution of this algorithm relies on the fact that the SCC's are
2841 popped off the stack in topological order.
2842 Returns true if successful, false if we stopped processing SCC's due
2843 to resource constraints. */
2848 VEC(ssa_op_iter, heap) *itervec = NULL;
2849 VEC(tree, heap) *namevec = NULL;
2850 use_operand_p usep = NULL;
2857 VN_INFO (name)->dfsnum = next_dfs_num++;
2858 VN_INFO (name)->visited = true;
2859 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2861 VEC_safe_push (tree, heap, sccstack, name);
2862 VN_INFO (name)->on_sccstack = true;
2863 defstmt = SSA_NAME_DEF_STMT (name);
2865 /* Recursively DFS on our operands, looking for SCC's. */
2866 if (!gimple_nop_p (defstmt))
2868 /* Push a new iterator. */
2869 if (gimple_code (defstmt) == GIMPLE_PHI)
2870 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2872 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2875 clear_and_done_ssa_iter (&iter);
2879 /* If we are done processing uses of a name, go up the stack
2880 of iterators and process SCCs as we found them. */
2881 if (op_iter_done (&iter))
2883 /* See if we found an SCC. */
2884 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2885 if (!extract_and_process_scc_for_name (name))
2887 VEC_free (tree, heap, namevec);
2888 VEC_free (ssa_op_iter, heap, itervec);
2892 /* Check if we are done. */
2893 if (VEC_empty (tree, namevec))
2895 VEC_free (tree, heap, namevec);
2896 VEC_free (ssa_op_iter, heap, itervec);
2900 /* Restore the last use walker and continue walking there. */
2902 name = VEC_pop (tree, namevec);
2903 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2904 sizeof (ssa_op_iter));
2905 VEC_pop (ssa_op_iter, itervec);
2906 goto continue_walking;
2909 use = USE_FROM_PTR (usep);
2911 /* Since we handle phi nodes, we will sometimes get
2912 invariants in the use expression. */
2913 if (TREE_CODE (use) == SSA_NAME)
2915 if (! (VN_INFO (use)->visited))
2917 /* Recurse by pushing the current use walking state on
2918 the stack and starting over. */
2919 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2920 VEC_safe_push(tree, heap, namevec, name);
2925 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2926 VN_INFO (use)->low);
2928 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2929 && VN_INFO (use)->on_sccstack)
2931 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2932 VN_INFO (name)->low);
2936 usep = op_iter_next_use (&iter);
2940 /* Allocate a value number table. */
2943 allocate_vn_table (vn_tables_t table)
2945 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2946 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2947 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2950 gcc_obstack_init (&table->nary_obstack);
2951 table->phis_pool = create_alloc_pool ("VN phis",
2952 sizeof (struct vn_phi_s),
2954 table->references_pool = create_alloc_pool ("VN references",
2955 sizeof (struct vn_reference_s),
2959 /* Free a value number table. */
2962 free_vn_table (vn_tables_t table)
2964 htab_delete (table->phis);
2965 htab_delete (table->nary);
2966 htab_delete (table->references);
2967 obstack_free (&table->nary_obstack, NULL);
2968 free_alloc_pool (table->phis_pool);
2969 free_alloc_pool (table->references_pool);
2977 int *rpo_numbers_temp;
2979 calculate_dominance_info (CDI_DOMINATORS);
2981 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2984 constant_value_ids = BITMAP_ALLOC (NULL);
2989 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2990 /* VEC_alloc doesn't actually grow it to the right size, it just
2991 preallocates the space to do so. */
2992 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2993 gcc_obstack_init (&vn_ssa_aux_obstack);
2995 shared_lookup_phiargs = NULL;
2996 shared_lookup_references = NULL;
2997 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2998 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2999 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3001 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3002 the i'th block in RPO order is bb. We want to map bb's to RPO
3003 numbers, so we need to rearrange this array. */
3004 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3005 rpo_numbers[rpo_numbers_temp[j]] = j;
3007 XDELETE (rpo_numbers_temp);
3009 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3011 /* Create the VN_INFO structures, and initialize value numbers to
3013 for (i = 0; i < num_ssa_names; i++)
3015 tree name = ssa_name (i);
3018 VN_INFO_GET (name)->valnum = VN_TOP;
3019 VN_INFO (name)->expr = NULL_TREE;
3020 VN_INFO (name)->value_id = 0;
3024 renumber_gimple_stmt_uids ();
3026 /* Create the valid and optimistic value numbering tables. */
3027 valid_info = XCNEW (struct vn_tables_s);
3028 allocate_vn_table (valid_info);
3029 optimistic_info = XCNEW (struct vn_tables_s);
3030 allocate_vn_table (optimistic_info);
3038 htab_delete (constant_to_value_id);
3039 BITMAP_FREE (constant_value_ids);
3040 VEC_free (tree, heap, shared_lookup_phiargs);
3041 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3042 XDELETEVEC (rpo_numbers);
3044 for (i = 0; i < num_ssa_names; i++)
3046 tree name = ssa_name (i);
3048 && VN_INFO (name)->needs_insertion)
3049 release_ssa_name (name);
3051 obstack_free (&vn_ssa_aux_obstack, NULL);
3052 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3054 VEC_free (tree, heap, sccstack);
3055 free_vn_table (valid_info);
3056 XDELETE (valid_info);
3057 free_vn_table (optimistic_info);
3058 XDELETE (optimistic_info);
3061 /* Set the value ids in the valid hash tables. */
3064 set_hashtable_value_ids (void)
3071 /* Now set the value ids of the things we had put in the hash
3074 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3075 vno, vn_nary_op_t, hi)
3079 if (TREE_CODE (vno->result) == SSA_NAME)
3080 vno->value_id = VN_INFO (vno->result)->value_id;
3081 else if (is_gimple_min_invariant (vno->result))
3082 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3086 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3091 if (TREE_CODE (vp->result) == SSA_NAME)
3092 vp->value_id = VN_INFO (vp->result)->value_id;
3093 else if (is_gimple_min_invariant (vp->result))
3094 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3098 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3099 vr, vn_reference_t, hi)
3103 if (TREE_CODE (vr->result) == SSA_NAME)
3104 vr->value_id = VN_INFO (vr->result)->value_id;
3105 else if (is_gimple_min_invariant (vr->result))
3106 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3111 /* Do SCCVN. Returns true if it finished, false if we bailed out
3112 due to resource constraints. */
3115 run_scc_vn (bool may_insert_arg)
3119 bool changed = true;
3121 may_insert = may_insert_arg;
3124 current_info = valid_info;
3126 for (param = DECL_ARGUMENTS (current_function_decl);
3128 param = TREE_CHAIN (param))
3130 if (gimple_default_def (cfun, param) != NULL)
3132 tree def = gimple_default_def (cfun, param);
3133 VN_INFO (def)->valnum = def;
3137 for (i = 1; i < num_ssa_names; ++i)
3139 tree name = ssa_name (i);
3141 && VN_INFO (name)->visited == false
3142 && !has_zero_uses (name))
3151 /* Initialize the value ids. */
3153 for (i = 1; i < num_ssa_names; ++i)
3155 tree name = ssa_name (i);
3159 info = VN_INFO (name);
3160 if (info->valnum == name
3161 || info->valnum == VN_TOP)
3162 info->value_id = get_next_value_id ();
3163 else if (is_gimple_min_invariant (info->valnum))
3164 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3167 /* Propagate until they stop changing. */
3171 for (i = 1; i < num_ssa_names; ++i)
3173 tree name = ssa_name (i);
3177 info = VN_INFO (name);
3178 if (TREE_CODE (info->valnum) == SSA_NAME
3179 && info->valnum != name
3180 && info->value_id != VN_INFO (info->valnum)->value_id)
3183 info->value_id = VN_INFO (info->valnum)->value_id;
3188 set_hashtable_value_ids ();
3190 if (dump_file && (dump_flags & TDF_DETAILS))
3192 fprintf (dump_file, "Value numbers:\n");
3193 for (i = 0; i < num_ssa_names; i++)
3195 tree name = ssa_name (i);
3197 && VN_INFO (name)->visited
3198 && SSA_VAL (name) != name)
3200 print_generic_expr (dump_file, name, 0);
3201 fprintf (dump_file, " = ");
3202 print_generic_expr (dump_file, SSA_VAL (name), 0);
3203 fprintf (dump_file, "\n");
3212 /* Return the maximum value id we have ever seen. */
3215 get_max_value_id (void)
3217 return next_value_id;
3220 /* Return the next unique value id. */
3223 get_next_value_id (void)
3225 return next_value_id++;
3229 /* Compare two expressions E1 and E2 and return true if they are equal. */
3232 expressions_equal_p (tree e1, tree e2)
3234 /* The obvious case. */
3238 /* If only one of them is null, they cannot be equal. */
3242 /* Recurse on elements of lists. */
3243 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3247 for (lop1 = e1, lop2 = e2;
3249 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3253 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3259 /* Now perform the actual comparison. */
3260 if (TREE_CODE (e1) == TREE_CODE (e2)
3261 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3268 /* Return true if the nary operation NARY may trap. This is a copy
3269 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3272 vn_nary_may_trap (vn_nary_op_t nary)
3276 bool honor_nans = false;
3277 bool honor_snans = false;
3278 bool fp_operation = false;
3279 bool honor_trapv = false;
3283 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3284 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3285 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3288 fp_operation = FLOAT_TYPE_P (type);
3291 honor_nans = flag_trapping_math && !flag_finite_math_only;
3292 honor_snans = flag_signaling_nans != 0;
3294 else if (INTEGRAL_TYPE_P (type)
3295 && TYPE_OVERFLOW_TRAPS (type))
3299 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3301 honor_nans, honor_snans, rhs2,
3307 for (i = 0; i < nary->length; ++i)
3308 if (tree_could_trap_p (nary->op[i]))