X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-sccvn.c;h=f965c5134e5c6dd618c2e06cf247182550ee8992;hb=17052c8f8f63239deccec6d06ff1d9a9ebfc4640;hp=69945a5c3c74666fe468db7ea3e1454fca19b9a8;hpb=7739c729cbb032edc59a9c37d2dbfb5293ea7087;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 69945a5c3c7..f965c5134e5 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -1,5 +1,5 @@ /* SCC value numbering for trees - Copyright (C) 2006, 2007, 2008 + Copyright (C) 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. Contributed by Daniel Berlin @@ -29,7 +29,7 @@ along with GCC; see the file COPYING3. If not see #include "diagnostic.h" #include "tree-inline.h" #include "tree-flow.h" -#include "tree-gimple.h" +#include "gimple.h" #include "tree-dump.h" #include "timevar.h" #include "fibheap.h" @@ -210,6 +210,87 @@ VN_INFO_GET (tree name) } +/* Get the representative expression for the SSA_NAME NAME. Returns + the representative SSA_NAME if there is no expression associated with it. */ + +tree +vn_get_expr_for (tree name) +{ + vn_ssa_aux_t vn = VN_INFO (name); + gimple def_stmt; + tree expr = NULL_TREE; + + if (vn->valnum == VN_TOP) + return name; + + /* If the value-number is a constant it is the representative + expression. */ + if (TREE_CODE (vn->valnum) != SSA_NAME) + return vn->valnum; + + /* Get to the information of the value of this SSA_NAME. */ + vn = VN_INFO (vn->valnum); + + /* If the value-number is a constant it is the representative + expression. */ + if (TREE_CODE (vn->valnum) != SSA_NAME) + return vn->valnum; + + /* Else if we have an expression, return it. */ + if (vn->expr != NULL_TREE) + return vn->expr; + + /* Otherwise use the defining statement to build the expression. */ + def_stmt = SSA_NAME_DEF_STMT (vn->valnum); + + /* If the value number is a default-definition or a PHI result + use it directly. */ + if (gimple_nop_p (def_stmt) + || gimple_code (def_stmt) == GIMPLE_PHI) + return vn->valnum; + + if (!is_gimple_assign (def_stmt)) + return vn->valnum; + + /* FIXME tuples. This is incomplete and likely will miss some + simplifications. */ + switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt))) + { + case tcc_reference: + if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR + || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR + || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR) + && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) + expr = fold_build1 (gimple_assign_rhs_code (def_stmt), + gimple_expr_type (def_stmt), + TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0)); + break; + + case tcc_unary: + expr = fold_build1 (gimple_assign_rhs_code (def_stmt), + gimple_expr_type (def_stmt), + gimple_assign_rhs1 (def_stmt)); + break; + + case tcc_binary: + expr = fold_build2 (gimple_assign_rhs_code (def_stmt), + gimple_expr_type (def_stmt), + gimple_assign_rhs1 (def_stmt), + gimple_assign_rhs2 (def_stmt)); + break; + + default:; + } + if (expr == NULL_TREE) + return vn->valnum; + + /* Cache the expression. */ + vn->expr = expr; + + return expr; +} + + /* Free a phi operation structure VP. */ static void @@ -236,11 +317,14 @@ vn_constant_eq (const void *p1, const void *p2) const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2; - return expressions_equal_p (vc1->constant, vc2->constant); + if (vc1->hashcode != vc2->hashcode) + return false; + + return vn_constant_eq_with_type (vc1->constant, vc2->constant); } /* Hash table hash function for vn_constant_t. */ - + static hashval_t vn_constant_hash (const void *p1) { @@ -248,6 +332,24 @@ vn_constant_hash (const void *p1) return vc1->hashcode; } +/* Lookup a value id for CONSTANT and return it. If it does not + exist returns 0. */ + +unsigned int +get_constant_value_id (tree constant) +{ + void **slot; + struct vn_constant_s vc; + + vc.hashcode = vn_hash_constant_with_type (constant); + vc.constant = constant; + slot = htab_find_slot_with_hash (constant_to_value_id, &vc, + vc.hashcode, NO_INSERT); + if (slot) + return ((vn_constant_t)*slot)->value_id; + return 0; +} + /* Lookup a value id for CONSTANT, and if it does not exist, create a new one and return it. If it does exist, return it. */ @@ -255,21 +357,23 @@ unsigned int get_or_alloc_constant_value_id (tree constant) { void **slot; - vn_constant_t vc = XNEW (struct vn_constant_s); - - vc->hashcode = iterative_hash_expr (constant, 0); - vc->constant = constant; - slot = htab_find_slot_with_hash (constant_to_value_id, vc, - vc->hashcode, INSERT); + struct vn_constant_s vc; + vn_constant_t vcp; + + vc.hashcode = vn_hash_constant_with_type (constant); + vc.constant = constant; + slot = htab_find_slot_with_hash (constant_to_value_id, &vc, + vc.hashcode, INSERT); if (*slot) - { - free (vc); - return ((vn_constant_t)*slot)->value_id; - } - vc->value_id = get_next_value_id (); - *slot = vc; - bitmap_set_bit (constant_value_ids, vc->value_id); - return vc->value_id; + return ((vn_constant_t)*slot)->value_id; + + vcp = XNEW (struct vn_constant_s); + vcp->hashcode = vc.hashcode; + vcp->constant = constant; + vcp->value_id = get_next_value_id (); + *slot = (void *) vcp; + bitmap_set_bit (constant_value_ids, vcp->value_id); + return vcp->value_id; } /* Return true if V is a value id for a constant. */ @@ -277,7 +381,7 @@ get_or_alloc_constant_value_id (tree constant) bool value_id_constant_p (unsigned int v) { - return bitmap_bit_p (constant_value_ids, v); + return bitmap_bit_p (constant_value_ids, v); } /* Compare two reference operands P1 and P2 for equality. Return true if @@ -288,8 +392,9 @@ vn_reference_op_eq (const void *p1, const void *p2) { const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; + return vro1->opcode == vro2->opcode - && vro1->type == vro2->type + && types_compatible_p (vro1->type, vro2->type) && expressions_equal_p (vro1->op0, vro2->op0) && expressions_equal_p (vro1->op1, vro2->op1) && expressions_equal_p (vro1->op2, vro2->op2); @@ -298,10 +403,16 @@ vn_reference_op_eq (const void *p1, const void *p2) /* Compute the hash for a reference operand VRO1. */ static hashval_t -vn_reference_op_compute_hash (const vn_reference_op_t vro1) -{ - return iterative_hash_expr (vro1->op0, vro1->opcode) - + iterative_hash_expr (vro1->op1, vro1->opcode); +vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result) +{ + result = iterative_hash_hashval_t (vro1->opcode, result); + if (vro1->op0) + result = iterative_hash_expr (vro1->op0, result); + if (vro1->op1) + result = iterative_hash_expr (vro1->op1, result); + if (vro1->op2) + result = iterative_hash_expr (vro1->op2, result); + return result; } /* Return the hashcode for a given reference operation P1. */ @@ -319,14 +430,13 @@ hashval_t vn_reference_compute_hash (const vn_reference_t vr1) { hashval_t result = 0; - tree v; int i; vn_reference_op_t vro; - for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++) - result += iterative_hash_expr (v, 0); for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) - result += vn_reference_op_compute_hash (vro); + result = vn_reference_op_compute_hash (vro, result); + if (vr1->vuse) + result += SSA_NAME_VERSION (vr1->vuse); return result; } @@ -337,22 +447,26 @@ vn_reference_compute_hash (const vn_reference_t vr1) int vn_reference_eq (const void *p1, const void *p2) { - tree v; int i; vn_reference_op_t vro; const_vn_reference_t const vr1 = (const_vn_reference_t) p1; const_vn_reference_t const vr2 = (const_vn_reference_t) p2; + if (vr1->hashcode != vr2->hashcode) + return false; - if (vr1->vuses == vr2->vuses - && vr1->operands == vr2->operands) - return true; + /* Early out if this is not a hash collision. */ + if (vr1->hashcode != vr2->hashcode) + return false; - /* Impossible for them to be equivalent if they have different - number of vuses. */ - if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses)) + /* The VOP needs to be the same. */ + if (vr1->vuse != vr2->vuse) return false; + /* If the operands are the same we are done. */ + if (vr1->operands == vr2->operands) + return true; + /* We require that address operands be canonicalized in a way that two memory references will have the same operands if they are equivalent. */ @@ -360,166 +474,43 @@ vn_reference_eq (const void *p1, const void *p2) != VEC_length (vn_reference_op_s, vr2->operands)) return false; - /* The memory state is more often different than the address of the - store/load, so check it first. */ - for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++) - { - if (VEC_index (tree, vr2->vuses, i) != v) - return false; - } - for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) - { - if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i), - vro)) - return false; - } - return true; -} - -/* Place the vuses from STMT into *result. */ - -static inline void -vuses_to_vec (tree stmt, VEC (tree, gc) **result) -{ - ssa_op_iter iter; - tree vuse; - - if (!stmt) - return; - - VEC_reserve_exact (tree, gc, *result, - num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES)); - - FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES) - VEC_quick_push (tree, *result, vuse); -} - - -/* Copy the VUSE names in STMT into a vector, and return - the vector. */ - -VEC (tree, gc) * -copy_vuses_from_stmt (tree stmt) -{ - VEC (tree, gc) *vuses = NULL; - - vuses_to_vec (stmt, &vuses); - - return vuses; -} - -/* Place the vdefs from STMT into *result. */ - -static inline void -vdefs_to_vec (tree stmt, VEC (tree, gc) **result) -{ - ssa_op_iter iter; - tree vdef; - - if (!stmt) - return; - - *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS)); - - FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS) - VEC_quick_push (tree, *result, vdef); -} - -/* Copy the names of vdef results in STMT into a vector, and return - the vector. */ - -static VEC (tree, gc) * -copy_vdefs_from_stmt (tree stmt) -{ - VEC (tree, gc) *vdefs = NULL; - - vdefs_to_vec (stmt, &vdefs); - - return vdefs; -} - -/* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */ -static VEC (tree, gc) *shared_lookup_vops; - -/* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS. - This function will overwrite the current SHARED_LOOKUP_VOPS - variable. */ - -VEC (tree, gc) * -shared_vuses_from_stmt (tree stmt) -{ - VEC_truncate (tree, shared_lookup_vops, 0); - vuses_to_vec (stmt, &shared_lookup_vops); + if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i), + vro)) + return false; - return shared_lookup_vops; + return true; } -/* Copy the operations present in load/store/call REF into RESULT, a vector of +/* Copy the operations present in load/store REF into RESULT, a vector of vn_reference_op_s's. */ -static void +void copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) { - /* Calls are different from all other reference operations. */ - if (TREE_CODE (ref) == CALL_EXPR) - { - vn_reference_op_s temp; - tree callfn; - call_expr_arg_iterator iter; - tree callarg; - - /* Copy the call_expr opcode, type, function being called, and - arguments. */ - memset (&temp, 0, sizeof (temp)); - temp.type = TREE_TYPE (ref); - temp.opcode = CALL_EXPR; - VEC_safe_push (vn_reference_op_s, heap, *result, &temp); - - /* We make no attempt to simplify the called function because - the typical &FUNCTION_DECL form is also used in function pointer - cases that become constant. If we simplify the original to - FUNCTION_DECL but not the function pointer case (which can - happen because we have no fold functions that operate on - vn_reference_t), we will claim they are not equivalent. - - An example of this behavior can be see if CALL_EXPR_FN below is - replaced with get_callee_fndecl and gcc.dg/tree-ssa/ssa-pre-13.c - is compiled. */ - callfn = CALL_EXPR_FN (ref); - temp.type = TREE_TYPE (callfn); - temp.opcode = TREE_CODE (callfn); - temp.op0 = callfn; - VEC_safe_push (vn_reference_op_s, heap, *result, &temp); - - FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref) - { - memset (&temp, 0, sizeof (temp)); - temp.type = TREE_TYPE (callarg); - temp.opcode = TREE_CODE (callarg); - temp.op0 = callarg; - VEC_safe_push (vn_reference_op_s, heap, *result, &temp); - } - return; - } - if (TREE_CODE (ref) == TARGET_MEM_REF) { vn_reference_op_s temp; + tree base; + + base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); + if (!base) + base = build_int_cst (ptr_type_node, 0); memset (&temp, 0, sizeof (temp)); /* We do not care for spurious type qualifications. */ temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref)); temp.opcode = TREE_CODE (ref); - temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); - temp.op1 = TMR_INDEX (ref); + temp.op0 = TMR_INDEX (ref); + temp.op1 = TMR_STEP (ref); + temp.op2 = TMR_OFFSET (ref); VEC_safe_push (vn_reference_op_s, heap, *result, &temp); memset (&temp, 0, sizeof (temp)); temp.type = NULL_TREE; - temp.opcode = TREE_CODE (ref); - temp.op0 = TMR_STEP (ref); - temp.op1 = TMR_OFFSET (ref); + temp.opcode = TREE_CODE (base); + temp.op0 = base; + temp.op1 = TMR_ORIGINAL (ref); VEC_safe_push (vn_reference_op_s, heap, *result, &temp); return; } @@ -538,11 +529,13 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) switch (temp.opcode) { case ALIGN_INDIRECT_REF: - case MISALIGNED_INDIRECT_REF: case INDIRECT_REF: /* The only operand is the address, which gets its own vn_reference_op_s structure. */ break; + case MISALIGNED_INDIRECT_REF: + temp.op0 = TREE_OPERAND (ref, 1); + break; case BIT_FIELD_REF: /* Record bits and position. */ temp.op0 = TREE_OPERAND (ref, 1); @@ -553,28 +546,27 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) a matching type is not necessary and a mismatching type is always a spurious difference. */ temp.type = NULL_TREE; -#if FIXME + temp.op0 = TREE_OPERAND (ref, 1); + temp.op1 = TREE_OPERAND (ref, 2); /* If this is a reference to a union member, record the union member size as operand. Do so only if we are doing expression insertion (during FRE), as PRE currently gets confused with this. */ if (may_insert - && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE - && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1))) - && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)))) - temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))); - else -#endif - /* Record field as operand. */ - temp.op0 = TREE_OPERAND (ref, 1); - temp.op1 = TREE_OPERAND (ref, 2); + && temp.op1 == NULL_TREE + && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE + && integer_zerop (DECL_FIELD_OFFSET (temp.op0)) + && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0)) + && host_integerp (DECL_SIZE (temp.op0), 0)) + temp.op0 = DECL_SIZE (temp.op0); break; case ARRAY_RANGE_REF: case ARRAY_REF: /* Record index as operand. */ temp.op0 = TREE_OPERAND (ref, 1); - temp.op1 = TREE_OPERAND (ref, 2); - temp.op2 = TREE_OPERAND (ref, 3); + /* Always record lower bounds and element size. */ + temp.op1 = array_ref_low_bound (ref); + temp.op2 = array_ref_element_size (ref); break; case STRING_CST: case INTEGER_CST: @@ -589,6 +581,13 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) case SSA_NAME: temp.op0 = ref; break; + case ADDR_EXPR: + if (is_gimple_min_invariant (ref)) + { + temp.op0 = ref; + break; + } + /* Fallthrough. */ /* These are only interesting for their operands, their existence, and their type. They will never be the last ref in the chain of references (IE they require an @@ -597,21 +596,211 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) case IMAGPART_EXPR: case REALPART_EXPR: case VIEW_CONVERT_EXPR: - case ADDR_EXPR: break; default: gcc_unreachable (); - } VEC_safe_push (vn_reference_op_s, heap, *result, &temp); - if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR) + if (REFERENCE_CLASS_P (ref) + || (TREE_CODE (ref) == ADDR_EXPR + && !is_gimple_min_invariant (ref))) ref = TREE_OPERAND (ref, 0); else ref = NULL_TREE; } } +/* Build a alias-oracle reference abstraction in *REF from the vn_reference + operands in *OPS, the reference alias set SET and the reference type TYPE. + Return true if something useful was produced. */ + +bool +ao_ref_init_from_vn_reference (ao_ref *ref, + alias_set_type set, tree type, + VEC (vn_reference_op_s, heap) *ops) +{ + vn_reference_op_t op; + unsigned i; + tree base = NULL_TREE; + tree *op0_p = &base; + HOST_WIDE_INT offset = 0; + HOST_WIDE_INT max_size; + HOST_WIDE_INT size = -1; + tree size_tree = NULL_TREE; + + /* First get the final access size from just the outermost expression. */ + op = VEC_index (vn_reference_op_s, ops, 0); + if (op->opcode == COMPONENT_REF) + { + if (TREE_CODE (op->op0) == INTEGER_CST) + size_tree = op->op0; + else + size_tree = DECL_SIZE (op->op0); + } + else if (op->opcode == BIT_FIELD_REF) + size_tree = op->op0; + else + { + enum machine_mode mode = TYPE_MODE (type); + if (mode == BLKmode) + size_tree = TYPE_SIZE (type); + else + size = GET_MODE_BITSIZE (mode); + } + if (size_tree != NULL_TREE) + { + if (!host_integerp (size_tree, 1)) + size = -1; + else + size = TREE_INT_CST_LOW (size_tree); + } + + /* Initially, maxsize is the same as the accessed element size. + In the following it will only grow (or become -1). */ + max_size = size; + + /* Compute cumulative bit-offset for nested component-refs and array-refs, + and find the ultimate containing object. */ + for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) + { + switch (op->opcode) + { + /* These may be in the reference ops, but we cannot do anything + sensible with them here. */ + case CALL_EXPR: + case ADDR_EXPR: + return false; + + /* Record the base objects. */ + case ALIGN_INDIRECT_REF: + case INDIRECT_REF: + *op0_p = build1 (op->opcode, op->type, NULL_TREE); + op0_p = &TREE_OPERAND (*op0_p, 0); + break; + + case MISALIGNED_INDIRECT_REF: + *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type, + NULL_TREE, op->op0); + op0_p = &TREE_OPERAND (*op0_p, 0); + break; + + case VAR_DECL: + case PARM_DECL: + case RESULT_DECL: + case SSA_NAME: + *op0_p = op->op0; + break; + + /* And now the usual component-reference style ops. */ + case BIT_FIELD_REF: + offset += tree_low_cst (op->op1, 0); + break; + + case COMPONENT_REF: + { + tree field = op->op0; + /* We do not have a complete COMPONENT_REF tree here so we + cannot use component_ref_field_offset. Do the interesting + parts manually. */ + + /* Our union trick, done for offset zero only. */ + if (TREE_CODE (field) == INTEGER_CST) + ; + else if (op->op1 + || !host_integerp (DECL_FIELD_OFFSET (field), 1)) + max_size = -1; + else + { + offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) + * BITS_PER_UNIT); + offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); + } + break; + } + + case ARRAY_RANGE_REF: + case ARRAY_REF: + /* We recorded the lower bound and the element size. */ + if (!host_integerp (op->op0, 0) + || !host_integerp (op->op1, 0) + || !host_integerp (op->op2, 0)) + max_size = -1; + else + { + HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0); + hindex -= TREE_INT_CST_LOW (op->op1); + hindex *= TREE_INT_CST_LOW (op->op2); + hindex *= BITS_PER_UNIT; + offset += hindex; + } + break; + + case REALPART_EXPR: + break; + + case IMAGPART_EXPR: + offset += size; + break; + + case VIEW_CONVERT_EXPR: + break; + + case STRING_CST: + case INTEGER_CST: + case COMPLEX_CST: + case VECTOR_CST: + case REAL_CST: + case CONSTRUCTOR: + case CONST_DECL: + return false; + + default: + return false; + } + } + + if (base == NULL_TREE) + return false; + + ref->ref = NULL_TREE; + ref->base = base; + ref->offset = offset; + ref->size = size; + ref->max_size = max_size; + ref->ref_alias_set = set; + ref->base_alias_set = -1; + + return true; +} + +/* Copy the operations present in load/store/call REF into RESULT, a vector of + vn_reference_op_s's. */ + +void +copy_reference_ops_from_call (gimple call, + VEC(vn_reference_op_s, heap) **result) +{ + vn_reference_op_s temp; + unsigned i; + + /* Copy the type, opcode, function being called and static chain. */ + memset (&temp, 0, sizeof (temp)); + temp.type = gimple_call_return_type (call); + temp.opcode = CALL_EXPR; + temp.op0 = gimple_call_fn (call); + temp.op1 = gimple_call_chain (call); + VEC_safe_push (vn_reference_op_s, heap, *result, &temp); + + /* Copy the call arguments. As they can be references as well, + just chain them together. */ + for (i = 0; i < gimple_call_num_args (call); ++i) + { + tree callarg = gimple_call_arg (call, i); + copy_reference_ops_from_ref (callarg, result); + } +} + /* Create a vector of vn_reference_op_s structures from REF, a REFERENCE_CLASS_P tree. The vector is not shared. */ @@ -624,22 +813,151 @@ create_reference_ops_from_ref (tree ref) return result; } -static VEC(vn_reference_op_s, heap) *shared_lookup_references; - -/* Create a vector of vn_reference_op_s structures from REF, a - REFERENCE_CLASS_P tree. The vector is shared among all callers of - this function. */ +/* Create a vector of vn_reference_op_s structures from CALL, a + call statement. The vector is not shared. */ static VEC(vn_reference_op_s, heap) * -shared_reference_ops_from_ref (tree ref) +create_reference_ops_from_call (gimple call) { - if (!ref) - return NULL; - VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); - copy_reference_ops_from_ref (ref, &shared_lookup_references); - return shared_lookup_references; + VEC (vn_reference_op_s, heap) *result = NULL; + + copy_reference_ops_from_call (call, &result); + return result; +} + +/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates + *I_P to point to the last element of the replacement. */ +void +vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops, + unsigned int *i_p) +{ + VEC(vn_reference_op_s, heap) *mem = NULL; + vn_reference_op_t op; + unsigned int i = *i_p; + unsigned int j; + + /* Get ops for the addressed object. */ + op = VEC_index (vn_reference_op_s, *ops, i); + /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work + around it to avoid later ICEs. */ + if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE + && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE) + { + vn_reference_op_s aref; + tree dom; + aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0))); + aref.opcode = ARRAY_REF; + aref.op0 = integer_zero_node; + if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0)))) + && TYPE_MIN_VALUE (dom)) + aref.op0 = TYPE_MIN_VALUE (dom); + aref.op1 = aref.op0; + aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0))); + VEC_safe_push (vn_reference_op_s, heap, mem, &aref); + } + copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem); + + /* Do the replacement - we should have at least one op in mem now. */ + if (VEC_length (vn_reference_op_s, mem) == 1) + { + VEC_replace (vn_reference_op_s, *ops, i - 1, + VEC_index (vn_reference_op_s, mem, 0)); + VEC_ordered_remove (vn_reference_op_s, *ops, i); + i--; + } + else if (VEC_length (vn_reference_op_s, mem) == 2) + { + VEC_replace (vn_reference_op_s, *ops, i - 1, + VEC_index (vn_reference_op_s, mem, 0)); + VEC_replace (vn_reference_op_s, *ops, i, + VEC_index (vn_reference_op_s, mem, 1)); + } + else if (VEC_length (vn_reference_op_s, mem) > 2) + { + VEC_replace (vn_reference_op_s, *ops, i - 1, + VEC_index (vn_reference_op_s, mem, 0)); + VEC_replace (vn_reference_op_s, *ops, i, + VEC_index (vn_reference_op_s, mem, 1)); + /* ??? There is no VEC_splice. */ + for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++) + VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op); + } + else + gcc_unreachable (); + + VEC_free (vn_reference_op_s, heap, mem); + *i_p = i; } +/* Optimize the reference REF to a constant if possible or return + NULL_TREE if not. */ + +tree +fully_constant_vn_reference_p (vn_reference_t ref) +{ + VEC (vn_reference_op_s, heap) *operands = ref->operands; + vn_reference_op_t op; + + /* Try to simplify the translated expression if it is + a call to a builtin function with at most two arguments. */ + op = VEC_index (vn_reference_op_s, operands, 0); + if (op->opcode == CALL_EXPR + && TREE_CODE (op->op0) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL + && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0)) + && VEC_length (vn_reference_op_s, operands) >= 2 + && VEC_length (vn_reference_op_s, operands) <= 3) + { + vn_reference_op_t arg0, arg1 = NULL; + bool anyconst = false; + arg0 = VEC_index (vn_reference_op_s, operands, 1); + if (VEC_length (vn_reference_op_s, operands) > 2) + arg1 = VEC_index (vn_reference_op_s, operands, 2); + if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant + || (arg0->opcode == ADDR_EXPR + && is_gimple_min_invariant (arg0->op0))) + anyconst = true; + if (arg1 + && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant + || (arg1->opcode == ADDR_EXPR + && is_gimple_min_invariant (arg1->op0)))) + anyconst = true; + if (anyconst) + { + tree folded = build_call_expr (TREE_OPERAND (op->op0, 0), + arg1 ? 2 : 1, + arg0->op0, + arg1 ? arg1->op0 : NULL); + if (folded + && TREE_CODE (folded) == NOP_EXPR) + folded = TREE_OPERAND (folded, 0); + if (folded + && is_gimple_min_invariant (folded)) + return folded; + } + } + + /* Simplify reads from constant strings. */ + else if (op->opcode == ARRAY_REF + && TREE_CODE (op->op0) == INTEGER_CST + && integer_zerop (op->op1) + && VEC_length (vn_reference_op_s, operands) == 2) + { + vn_reference_op_t arg0; + arg0 = VEC_index (vn_reference_op_s, operands, 1); + if (arg0->opcode == STRING_CST + && (TYPE_MODE (op->type) + == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0)))) + && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT + && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1 + && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0) + return build_int_cst_type (op->type, + (TREE_STRING_POINTER (arg0->op0) + [TREE_INT_CST_LOW (op->op0)])); + } + + return NULL_TREE; +} /* Transform any SSA_NAME's in a vector of vn_reference_op_s structures into their value numbers. This is done in-place, and @@ -649,7 +967,7 @@ static VEC (vn_reference_op_s, heap) * valueize_refs (VEC (vn_reference_op_s, heap) *orig) { vn_reference_op_t vro; - int i; + unsigned int i; for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++) { @@ -661,85 +979,55 @@ valueize_refs (VEC (vn_reference_op_s, heap) *orig) the opcode. */ if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) vro->opcode = TREE_CODE (vro->op0); + /* If it transforms from an SSA_NAME to an address, fold with + a preceding indirect reference. */ + if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR + && VEC_index (vn_reference_op_s, + orig, i - 1)->opcode == INDIRECT_REF) + { + vn_reference_fold_indirect (&orig, &i); + continue; + } } - /* TODO: Do we want to valueize op2 and op1 of - ARRAY_REF/COMPONENT_REF for Ada */ - + if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) + vro->op1 = SSA_VAL (vro->op1); + if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) + vro->op2 = SSA_VAL (vro->op2); } return orig; } -/* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into - their value numbers. This is done in-place, and the vector passed - in is returned. */ - -static VEC (tree, gc) * -valueize_vuses (VEC (tree, gc) *orig) -{ - bool made_replacement = false; - tree vuse; - int i; - - for (i = 0; VEC_iterate (tree, orig, i, vuse); i++) - { - if (vuse != SSA_VAL (vuse)) - { - made_replacement = true; - VEC_replace (tree, orig, i, SSA_VAL (vuse)); - } - } +static VEC(vn_reference_op_s, heap) *shared_lookup_references; - if (made_replacement && VEC_length (tree, orig) > 1) - sort_vuses (orig); +/* Create a vector of vn_reference_op_s structures from REF, a + REFERENCE_CLASS_P tree. The vector is shared among all callers of + this function. */ - return orig; +static VEC(vn_reference_op_s, heap) * +valueize_shared_reference_ops_from_ref (tree ref) +{ + if (!ref) + return NULL; + VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); + copy_reference_ops_from_ref (ref, &shared_lookup_references); + shared_lookup_references = valueize_refs (shared_lookup_references); + return shared_lookup_references; } -/* Return the single reference statement defining all virtual uses - in VUSES or NULL_TREE, if there are multiple defining statements. - Take into account only definitions that alias REF if following - back-edges. */ +/* Create a vector of vn_reference_op_s structures from CALL, a + call statement. The vector is shared among all callers of + this function. */ -static tree -get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses) +static VEC(vn_reference_op_s, heap) * +valueize_shared_reference_ops_from_call (gimple call) { - tree def_stmt, vuse; - unsigned int i; - - gcc_assert (VEC_length (tree, vuses) >= 1); - - def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0)); - if (TREE_CODE (def_stmt) == PHI_NODE) - { - /* We can only handle lookups over PHI nodes for a single - virtual operand. */ - if (VEC_length (tree, vuses) == 1) - { - def_stmt = get_single_def_stmt_from_phi (ref, def_stmt); - goto cont; - } - else - return NULL_TREE; - } - - /* Verify each VUSE reaches the same defining stmt. */ - for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i) - { - tree tmp = SSA_NAME_DEF_STMT (vuse); - if (tmp != def_stmt) - return NULL_TREE; - } - - /* Now see if the definition aliases ref, and loop until it does. */ -cont: - while (def_stmt - && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT - && !get_call_expr_in (def_stmt) - && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0))) - def_stmt = get_single_def_stmt_with_phi (ref, def_stmt); - - return def_stmt; + if (!call) + return NULL; + VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); + copy_reference_ops_from_call (call, &shared_lookup_references); + shared_lookup_references = valueize_refs (shared_lookup_references); + return shared_lookup_references; } /* Lookup a SCCVN reference operation VR in the current hash table. @@ -765,10 +1053,204 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) *vnresult = (vn_reference_t)*slot; return ((vn_reference_t)*slot)->result; } - - return NULL_TREE; -} + return NULL_TREE; +} + +static tree *last_vuse_ptr; + +/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ + with the current VUSE and performs the expression lookup. */ + +static void * +vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_) +{ + vn_reference_t vr = (vn_reference_t)vr_; + void **slot; + hashval_t hash; + + if (last_vuse_ptr) + *last_vuse_ptr = vuse; + + /* Fixup vuse and hash. */ + if (vr->vuse) + vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); + vr->vuse = SSA_VAL (vuse); + if (vr->vuse) + vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); + + hash = vr->hashcode; + slot = htab_find_slot_with_hash (current_info->references, vr, + hash, NO_INSERT); + if (!slot && current_info == optimistic_info) + slot = htab_find_slot_with_hash (valid_info->references, vr, + hash, NO_INSERT); + if (slot) + return *slot; + + return NULL; +} + +/* Callback for walk_non_aliased_vuses. Tries to perform a lookup + from the statement defining VUSE and if not successful tries to + translate *REFP and VR_ through an aggregate copy at the defintion + of VUSE. */ + +static void * +vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) +{ + vn_reference_t vr = (vn_reference_t)vr_; + gimple def_stmt = SSA_NAME_DEF_STMT (vuse); + tree fndecl; + tree base; + HOST_WIDE_INT offset, maxsize; + + base = ao_ref_base (ref); + offset = ref->offset; + maxsize = ref->max_size; + + /* If we cannot constrain the size of the reference we cannot + test if anything kills it. */ + if (maxsize == -1) + return (void *)-1; + + /* def_stmt may-defs *ref. See if we can derive a value for *ref + from that defintion. + 1) Memset. */ + if (is_gimple_reg_type (vr->type) + && is_gimple_call (def_stmt) + && (fndecl = gimple_call_fndecl (def_stmt)) + && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL + && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET + && integer_zerop (gimple_call_arg (def_stmt, 1)) + && host_integerp (gimple_call_arg (def_stmt, 2), 1) + && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) + { + tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); + size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8; + if ((unsigned HOST_WIDE_INT)size2 / 8 + == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) + && operand_equal_p (base, base2, 0) + && offset2 <= offset + && offset2 + size2 >= offset + maxsize) + { + tree val = fold_convert (vr->type, integer_zero_node); + unsigned int value_id = get_or_alloc_constant_value_id (val); + return vn_reference_insert_pieces (vuse, vr->set, vr->type, + VEC_copy (vn_reference_op_s, + heap, vr->operands), + val, value_id); + } + } + + /* 2) Assignment from an empty CONSTRUCTOR. */ + else if (is_gimple_reg_type (vr->type) + && gimple_assign_single_p (def_stmt) + && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR + && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0) + { + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), + &offset2, &size2, &maxsize2); + if (operand_equal_p (base, base2, 0) + && offset2 <= offset + && offset2 + size2 >= offset + maxsize) + { + tree val = fold_convert (vr->type, integer_zero_node); + unsigned int value_id = get_or_alloc_constant_value_id (val); + return vn_reference_insert_pieces (vuse, vr->set, vr->type, + VEC_copy (vn_reference_op_s, + heap, vr->operands), + val, value_id); + } + } + + /* For aggregate copies translate the reference through them if + the copy kills ref. */ + else if (gimple_assign_single_p (def_stmt) + && (DECL_P (gimple_assign_rhs1 (def_stmt)) + || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt)) + || handled_component_p (gimple_assign_rhs1 (def_stmt)))) + { + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + int i, j; + VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL; + vn_reference_op_t vro; + ao_ref r; + + /* See if the assignment kills REF. */ + base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), + &offset2, &size2, &maxsize2); + if (!operand_equal_p (base, base2, 0) + || offset2 > offset + || offset2 + size2 < offset + maxsize) + return (void *)-1; + + /* Find the common base of ref and the lhs. */ + copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs); + i = VEC_length (vn_reference_op_s, vr->operands) - 1; + j = VEC_length (vn_reference_op_s, lhs) - 1; + while (j >= 0 && i >= 0 + && vn_reference_op_eq (VEC_index (vn_reference_op_s, + vr->operands, i), + VEC_index (vn_reference_op_s, lhs, j))) + { + i--; + j--; + } + + VEC_free (vn_reference_op_s, heap, lhs); + /* i now points to the first additional op. + ??? LHS may not be completely contained in VR, one or more + VIEW_CONVERT_EXPRs could be in its way. We could at least + try handling outermost VIEW_CONVERT_EXPRs. */ + if (j != -1) + return (void *)-1; + + /* Now re-write REF to be based on the rhs of the assignment. */ + copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); + /* We need to pre-pend vr->operands[0..i] to rhs. */ + if (i + 1 + VEC_length (vn_reference_op_s, rhs) + > VEC_length (vn_reference_op_s, vr->operands)) + { + VEC (vn_reference_op_s, heap) *old = vr->operands; + VEC_safe_grow (vn_reference_op_s, heap, vr->operands, + i + 1 + VEC_length (vn_reference_op_s, rhs)); + if (old == shared_lookup_references + && vr->operands != old) + shared_lookup_references = NULL; + } + else + VEC_truncate (vn_reference_op_s, vr->operands, + i + 1 + VEC_length (vn_reference_op_s, rhs)); + for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j) + VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro); + VEC_free (vn_reference_op_s, heap, rhs); + vr->hashcode = vn_reference_compute_hash (vr); + + /* Adjust *ref from the new operands. */ + if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) + return (void *)-1; + /* This can happen with bitfields. */ + if (ref->size != r.size) + return (void *)-1; + *ref = r; + + /* Do not update last seen VUSE after translating. */ + last_vuse_ptr = NULL; + + /* Keep looking for the adjusted *REF / VR pair. */ + return NULL; + } + + /* Bail out and stop walking. */ + return (void *)-1; +} /* Lookup a reference operation by it's parts, in the current hash table. Returns the resulting value number if it exists in the hash table, @@ -776,21 +1258,53 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) vn_reference_t stored in the hashtable if something is found. */ tree -vn_reference_lookup_pieces (VEC (tree, gc) *vuses, +vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, VEC (vn_reference_op_s, heap) *operands, - vn_reference_t *vnresult) + vn_reference_t *vnresult, bool maywalk) { struct vn_reference_s vr1; - tree result; - if (vnresult) - *vnresult = NULL; - - vr1.vuses = valueize_vuses (vuses); - vr1.operands = valueize_refs (operands); + vn_reference_t tmp; + tree cst; + + if (!vnresult) + vnresult = &tmp; + *vnresult = NULL; + + vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; + VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); + VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references, + VEC_length (vn_reference_op_s, operands)); + memcpy (VEC_address (vn_reference_op_s, shared_lookup_references), + VEC_address (vn_reference_op_s, operands), + sizeof (vn_reference_op_s) + * VEC_length (vn_reference_op_s, operands)); + vr1.operands = operands = shared_lookup_references + = valueize_refs (shared_lookup_references); + vr1.type = type; + vr1.set = set; vr1.hashcode = vn_reference_compute_hash (&vr1); - result = vn_reference_lookup_1 (&vr1, vnresult); + if ((cst = fully_constant_vn_reference_p (&vr1))) + return cst; - return result; + vn_reference_lookup_1 (&vr1, vnresult); + if (!*vnresult + && maywalk + && vr1.vuse) + { + ao_ref r; + if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) + *vnresult = + (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, + vn_reference_lookup_2, + vn_reference_lookup_3, &vr1); + if (vr1.operands != operands) + VEC_free (vn_reference_op_s, heap, vr1.operands); + } + + if (*vnresult) + return (*vnresult)->result; + + return NULL_TREE; } /* Lookup OP in the current hash table, and return the resulting value @@ -800,41 +1314,47 @@ vn_reference_lookup_pieces (VEC (tree, gc) *vuses, stored in the hashtable if one exists. */ tree -vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk, +vn_reference_lookup (tree op, tree vuse, bool maywalk, vn_reference_t *vnresult) { + VEC (vn_reference_op_s, heap) *operands; struct vn_reference_s vr1; - tree result, def_stmt; + tree cst; + if (vnresult) *vnresult = NULL; - vr1.vuses = valueize_vuses (vuses); - vr1.operands = valueize_refs (shared_reference_ops_from_ref (op)); + vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; + vr1.operands = operands = valueize_shared_reference_ops_from_ref (op); + vr1.type = TREE_TYPE (op); + vr1.set = get_alias_set (op); vr1.hashcode = vn_reference_compute_hash (&vr1); - result = vn_reference_lookup_1 (&vr1, vnresult); + if ((cst = fully_constant_vn_reference_p (&vr1))) + return cst; - /* If there is a single defining statement for all virtual uses, we can - use that, following virtual use-def chains. */ - if (!result - && maywalk - && vr1.vuses - && VEC_length (tree, vr1.vuses) >= 1 - && !get_call_expr_in (op) - && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses)) - && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT - /* If there is a call involved, op must be assumed to - be clobbered. */ - && !get_call_expr_in (def_stmt)) - { - /* We are now at an aliasing definition for the vuses we want to - look up. Re-do the lookup with the vdefs for this stmt. */ - vdefs_to_vec (def_stmt, &vuses); - vr1.vuses = valueize_vuses (vuses); - vr1.hashcode = vn_reference_compute_hash (&vr1); - result = vn_reference_lookup_1 (&vr1, vnresult); + if (maywalk + && vr1.vuse) + { + vn_reference_t wvnresult; + ao_ref r; + ao_ref_init (&r, op); + wvnresult = + (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, + vn_reference_lookup_2, + vn_reference_lookup_3, &vr1); + if (vr1.operands != operands) + VEC_free (vn_reference_op_s, heap, vr1.operands); + if (wvnresult) + { + if (vnresult) + *vnresult = wvnresult; + return wvnresult->result; + } + + return NULL_TREE; } - return result; + return vn_reference_lookup_1 (&vr1, vnresult); } @@ -842,7 +1362,7 @@ vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk, RESULT, and return the resulting reference structure we created. */ vn_reference_t -vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses) +vn_reference_insert (tree op, tree result, tree vuse) { void **slot; vn_reference_t vr1; @@ -852,8 +1372,10 @@ vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses) vr1->value_id = VN_INFO (result)->value_id; else vr1->value_id = get_or_alloc_constant_value_id (result); - vr1->vuses = valueize_vuses (vuses); + vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; vr1->operands = valueize_refs (create_reference_ops_from_ref (op)); + vr1->type = TREE_TYPE (op); + vr1->set = get_alias_set (op); vr1->hashcode = vn_reference_compute_hash (vr1); vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; @@ -881,7 +1403,7 @@ vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses) structure we created. */ vn_reference_t -vn_reference_insert_pieces (VEC (tree, gc) *vuses, +vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, VEC (vn_reference_op_s, heap) *operands, tree result, unsigned int value_id) @@ -890,9 +1412,11 @@ vn_reference_insert_pieces (VEC (tree, gc) *vuses, vn_reference_t vr1; vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); - vr1->value_id = value_id; - vr1->vuses = valueize_vuses (vuses); + vr1->value_id = value_id; + vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; vr1->operands = valueize_refs (operands); + vr1->type = type; + vr1->set = set; vr1->hashcode = vn_reference_compute_hash (vr1); if (result && TREE_CODE (result) == SSA_NAME) result = SSA_VAL (result); @@ -900,10 +1424,10 @@ vn_reference_insert_pieces (VEC (tree, gc) *vuses, slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, INSERT); - + /* At this point we should have all the things inserted that we have - seen before, and we should never try inserting something that - already exists. */ + seen before, and we should never try inserting something that + already exists. */ gcc_assert (!*slot); if (*slot) free_reference (*slot); @@ -914,10 +1438,10 @@ vn_reference_insert_pieces (VEC (tree, gc) *vuses, /* Compute and return the hash value for nary operation VBO1. */ -inline hashval_t +hashval_t vn_nary_op_compute_hash (const vn_nary_op_t vno1) { - hashval_t hash = 0; + hashval_t hash; unsigned i; for (i = 0; i < vno1->length; ++i) @@ -933,8 +1457,9 @@ vn_nary_op_compute_hash (const vn_nary_op_t vno1) vno1->op[1] = temp; } + hash = iterative_hash_hashval_t (vno1->opcode, 0); for (i = 0; i < vno1->length; ++i) - hash += iterative_hash_expr (vno1->op[i], vno1->opcode); + hash = iterative_hash_expr (vno1->op[i], hash); return hash; } @@ -958,8 +1483,11 @@ vn_nary_op_eq (const void *p1, const void *p2) const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2; unsigned i; + if (vno1->hashcode != vno2->hashcode) + return false; + if (vno1->opcode != vno2->opcode - || vno1->type != vno2->type) + || !types_compatible_p (vno1->type, vno2->type)) return false; for (i = 0; i < vno1->length; ++i) @@ -978,7 +1506,7 @@ vn_nary_op_eq (const void *p1, const void *p2) tree vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, - tree op3, vn_nary_op_t *vnresult) + tree op3, vn_nary_op_t *vnresult) { void **slot; struct vn_nary_op_s vno1; @@ -1037,6 +1565,42 @@ vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult) return ((vn_nary_op_t)*slot)->result; } +/* Lookup the rhs of STMT in the current hash table, and return the resulting + value number if it exists in the hash table. Return NULL_TREE if + it does not exist in the hash table. VNRESULT will contain the + vn_nary_op_t from the hashtable if it exists. */ + +tree +vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) +{ + void **slot; + struct vn_nary_op_s vno1; + unsigned i; + + if (vnresult) + *vnresult = NULL; + vno1.opcode = gimple_assign_rhs_code (stmt); + vno1.length = gimple_num_ops (stmt) - 1; + vno1.type = gimple_expr_type (stmt); + for (i = 0; i < vno1.length; ++i) + vno1.op[i] = gimple_op (stmt, i + 1); + if (vno1.opcode == REALPART_EXPR + || vno1.opcode == IMAGPART_EXPR + || vno1.opcode == VIEW_CONVERT_EXPR) + vno1.op[0] = TREE_OPERAND (vno1.op[0], 0); + vno1.hashcode = vn_nary_op_compute_hash (&vno1); + slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode, + NO_INSERT); + if (!slot && current_info == optimistic_info) + slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode, + NO_INSERT); + if (!slot) + return NULL_TREE; + if (vnresult) + *vnresult = (vn_nary_op_t)*slot; + return ((vn_nary_op_t)*slot)->result; +} + /* Insert a n-ary operation into the current hash table using it's pieces. Return the vn_nary_op_t structure we created and put in the hashtable. */ @@ -1046,7 +1610,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, tree op3, tree result, - unsigned int value_id) + unsigned int value_id) { void **slot; vn_nary_op_t vno1; @@ -1074,7 +1638,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, *slot = vno1; return vno1; - + } /* Insert OP into the current hash table with a value number of @@ -1108,22 +1672,64 @@ vn_nary_op_insert (tree op, tree result) return vno1; } +/* Insert the rhs of STMT into the current hash table with a value number of + RESULT. */ + +vn_nary_op_t +vn_nary_op_insert_stmt (gimple stmt, tree result) +{ + unsigned length = gimple_num_ops (stmt) - 1; + void **slot; + vn_nary_op_t vno1; + unsigned i; + + vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, + (sizeof (struct vn_nary_op_s) + - sizeof (tree) * (4 - length))); + vno1->value_id = VN_INFO (result)->value_id; + vno1->opcode = gimple_assign_rhs_code (stmt); + vno1->length = length; + vno1->type = gimple_expr_type (stmt); + for (i = 0; i < vno1->length; ++i) + vno1->op[i] = gimple_op (stmt, i + 1); + if (vno1->opcode == REALPART_EXPR + || vno1->opcode == IMAGPART_EXPR + || vno1->opcode == VIEW_CONVERT_EXPR) + vno1->op[0] = TREE_OPERAND (vno1->op[0], 0); + vno1->result = result; + vno1->hashcode = vn_nary_op_compute_hash (vno1); + slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode, + INSERT); + gcc_assert (!*slot); + + *slot = vno1; + return vno1; +} + /* Compute a hashcode for PHI operation VP1 and return it. */ static inline hashval_t vn_phi_compute_hash (vn_phi_t vp1) { - hashval_t result = 0; + hashval_t result; int i; tree phi1op; + tree type; result = vp1->block->index; + /* If all PHI arguments are constants we need to distinguish + the PHI node via its type. */ + type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)); + result += (INTEGRAL_TYPE_P (type) + + (INTEGRAL_TYPE_P (type) + ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0)); + for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++) { if (phi1op == VN_TOP) continue; - result += iterative_hash_expr (phi1op, result); + result = iterative_hash_expr (phi1op, result); } return result; @@ -1146,11 +1752,20 @@ vn_phi_eq (const void *p1, const void *p2) const_vn_phi_t const vp1 = (const_vn_phi_t) p1; const_vn_phi_t const vp2 = (const_vn_phi_t) p2; + if (vp1->hashcode != vp2->hashcode) + return false; + if (vp1->block == vp2->block) { int i; tree phi1op; + /* If the PHI nodes do not have compatible types + they are not the same. */ + if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)), + TREE_TYPE (VEC_index (tree, vp2->phiargs, 0)))) + return false; + /* Any phi in the same block will have it's arguments in the same edge order, because of how we store phi nodes. */ for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++) @@ -1173,23 +1788,23 @@ static VEC(tree, heap) *shared_lookup_phiargs; it does not exist in the hash table. */ static tree -vn_phi_lookup (tree phi) +vn_phi_lookup (gimple phi) { void **slot; struct vn_phi_s vp1; - int i; + unsigned i; VEC_truncate (tree, shared_lookup_phiargs, 0); /* Canonicalize the SSA_NAME's to their value number. */ - for (i = 0; i < PHI_NUM_ARGS (phi); i++) + for (i = 0; i < gimple_phi_num_args (phi); i++) { tree def = PHI_ARG_DEF (phi, i); def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; VEC_safe_push (tree, heap, shared_lookup_phiargs, def); } vp1.phiargs = shared_lookup_phiargs; - vp1.block = bb_for_stmt (phi); + vp1.block = gimple_bb (phi); vp1.hashcode = vn_phi_compute_hash (&vp1); slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode, NO_INSERT); @@ -1205,15 +1820,15 @@ vn_phi_lookup (tree phi) RESULT. */ static vn_phi_t -vn_phi_insert (tree phi, tree result) +vn_phi_insert (gimple phi, tree result) { void **slot; vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool); - int i; + unsigned i; VEC (tree, heap) *args = NULL; /* Canonicalize the SSA_NAME's to their value number. */ - for (i = 0; i < PHI_NUM_ARGS (phi); i++) + for (i = 0; i < gimple_phi_num_args (phi); i++) { tree def = PHI_ARG_DEF (phi, i); def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; @@ -1221,7 +1836,7 @@ vn_phi_insert (tree phi, tree result) } vp1->value_id = VN_INFO (result)->value_id; vp1->phiargs = args; - vp1->block = bb_for_stmt (phi); + vp1->block = gimple_bb (phi); vp1->result = result; vp1->hashcode = vn_phi_compute_hash (vp1); @@ -1278,16 +1893,19 @@ set_ssa_val_to (tree from, tree to) print_generic_expr (dump_file, from, 0); fprintf (dump_file, " to "); print_generic_expr (dump_file, to, 0); - fprintf (dump_file, "\n"); } currval = SSA_VAL (from); if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME)) { - SSA_VAL (from) = to; + VN_INFO (from)->valnum = to; + if (dump_file && (dump_flags & TDF_DETAILS)) + fprintf (dump_file, " (changed)\n"); return true; } + if (dump_file && (dump_flags & TDF_DETAILS)) + fprintf (dump_file, "\n"); return false; } @@ -1295,7 +1913,7 @@ set_ssa_val_to (tree from, tree to) Return true if a value number changed. */ static bool -defs_to_varying (tree stmt) +defs_to_varying (gimple stmt) { bool changed = false; ssa_op_iter iter; @@ -1312,7 +1930,7 @@ defs_to_varying (tree stmt) } static bool expr_has_constants (tree expr); -static tree try_to_simplify (tree stmt, tree rhs); +static tree valueize_expr (tree expr); /* Visit a copy between LHS and RHS, return true if the value number changed. */ @@ -1320,15 +1938,18 @@ static tree try_to_simplify (tree stmt, tree rhs); static bool visit_copy (tree lhs, tree rhs) { - /* Follow chains of copies to their destination. */ - while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME) + while (TREE_CODE (rhs) == SSA_NAME + && SSA_VAL (rhs) != rhs) rhs = SSA_VAL (rhs); /* The copy may have a more interesting constant filled expression (we don't, since we know our RHS is just an SSA name). */ - VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; - VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; + if (TREE_CODE (rhs) == SSA_NAME) + { + VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; + VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; + } return set_ssa_val_to (lhs, rhs); } @@ -1337,10 +1958,10 @@ visit_copy (tree lhs, tree rhs) value number of LHS has changed as a result. */ static bool -visit_unary_op (tree lhs, tree op) +visit_unary_op (tree lhs, gimple stmt) { bool changed = false; - tree result = vn_nary_op_lookup (op, NULL); + tree result = vn_nary_op_lookup_stmt (stmt, NULL); if (result) { @@ -1349,7 +1970,7 @@ visit_unary_op (tree lhs, tree op) else { changed = set_ssa_val_to (lhs, lhs); - vn_nary_op_insert (op, lhs); + vn_nary_op_insert_stmt (stmt, lhs); } return changed; @@ -1359,19 +1980,65 @@ visit_unary_op (tree lhs, tree op) value number of LHS has changed as a result. */ static bool -visit_binary_op (tree lhs, tree op) +visit_binary_op (tree lhs, gimple stmt) +{ + bool changed = false; + tree result = vn_nary_op_lookup_stmt (stmt, NULL); + + if (result) + { + changed = set_ssa_val_to (lhs, result); + } + else + { + changed = set_ssa_val_to (lhs, lhs); + vn_nary_op_insert_stmt (stmt, lhs); + } + + return changed; +} + +/* Visit a call STMT storing into LHS. Return true if the value number + of the LHS has changed as a result. */ + +static bool +visit_reference_op_call (tree lhs, gimple stmt) { bool changed = false; - tree result = vn_nary_op_lookup (op, NULL); + struct vn_reference_s vr1; + tree result; + tree vuse = gimple_vuse (stmt); + vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; + vr1.operands = valueize_shared_reference_ops_from_call (stmt); + vr1.type = gimple_expr_type (stmt); + vr1.set = 0; + vr1.hashcode = vn_reference_compute_hash (&vr1); + result = vn_reference_lookup_1 (&vr1, NULL); if (result) { changed = set_ssa_val_to (lhs, result); + if (TREE_CODE (result) == SSA_NAME + && VN_INFO (result)->has_constants) + VN_INFO (lhs)->has_constants = true; } else { + void **slot; + vn_reference_t vr2; changed = set_ssa_val_to (lhs, lhs); - vn_nary_op_insert (op, lhs); + vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); + vr2->vuse = vr1.vuse; + vr2->operands = valueize_refs (create_reference_ops_from_call (stmt)); + vr2->type = vr1.type; + vr2->set = vr1.set; + vr2->hashcode = vr1.hashcode; + vr2->result = lhs; + slot = htab_find_slot_with_hash (current_info->references, + vr2, vr2->hashcode, INSERT); + if (*slot) + free_reference (*slot); + *slot = vr2; } return changed; @@ -1381,11 +2048,22 @@ visit_binary_op (tree lhs, tree op) and return true if the value number of the LHS has changed as a result. */ static bool -visit_reference_op_load (tree lhs, tree op, tree stmt) +visit_reference_op_load (tree lhs, tree op, gimple stmt) { bool changed = false; - tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true, - NULL); + tree last_vuse; + tree result; + + last_vuse = gimple_vuse (stmt); + last_vuse_ptr = &last_vuse; + result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); + last_vuse_ptr = NULL; + + /* If we have a VCE, try looking up its operand as it might be stored in + a different type. */ + if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR) + result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt), + true, NULL); /* We handle type-punning through unions by value-numbering based on offset and size of the access. Be prepared to handle a @@ -1398,12 +2076,15 @@ visit_reference_op_load (tree lhs, tree op, tree stmt) So first simplify and lookup this expression to see if it is already available. */ tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result); - if (stmt - && !is_gimple_min_invariant (val) - && TREE_CODE (val) != SSA_NAME) + if ((CONVERT_EXPR_P (val) + || TREE_CODE (val) == VIEW_CONVERT_EXPR) + && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME) { - tree tem = try_to_simplify (stmt, val); - if (tem) + tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0))); + if ((CONVERT_EXPR_P (tem) + || TREE_CODE (tem) == VIEW_CONVERT_EXPR) + && (tem = fold_unary_ignore_overflow (TREE_CODE (val), + TREE_TYPE (val), tem))) val = tem; } result = val; @@ -1414,9 +2095,10 @@ visit_reference_op_load (tree lhs, tree op, tree stmt) a new SSA_NAME we create. */ if (!result && may_insert) { - result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE); + result = make_ssa_name (SSA_NAME_VAR (lhs), NULL); /* Initialize value-number information properly. */ VN_INFO_GET (result)->valnum = result; + VN_INFO (result)->value_id = get_next_value_id (); VN_INFO (result)->expr = val; VN_INFO (result)->has_constants = expr_has_constants (val); VN_INFO (result)->needs_insertion = true; @@ -1459,7 +2141,7 @@ visit_reference_op_load (tree lhs, tree op, tree stmt) else { changed = set_ssa_val_to (lhs, lhs); - vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt)); + vn_reference_insert (op, lhs, last_vuse); } return changed; @@ -1470,7 +2152,7 @@ visit_reference_op_load (tree lhs, tree op, tree stmt) and return true if the value number of the LHS has changed as a result. */ static bool -visit_reference_op_store (tree lhs, tree op, tree stmt) +visit_reference_op_store (tree lhs, tree op, gimple stmt) { bool changed = false; tree result; @@ -1492,8 +2174,7 @@ visit_reference_op_store (tree lhs, tree op, tree stmt) Otherwise, the vdefs for the store are used when inserting into the table, since the store generates a new memory state. */ - result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false, - NULL); + result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL); if (result) { @@ -1506,8 +2187,6 @@ visit_reference_op_store (tree lhs, tree op, tree stmt) if (!result || !resultsame) { - VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt); - int i; tree vdef; if (dump_file && (dump_flags & TDF_DETAILS)) @@ -1521,7 +2200,7 @@ visit_reference_op_store (tree lhs, tree op, tree stmt) } /* Have to set value numbers before insert, since insert is going to valueize the references in-place. */ - for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++) + if ((vdef = gimple_vdef (stmt))) { VN_INFO (vdef)->use_processed = true; changed |= set_ssa_val_to (vdef, vdef); @@ -1530,36 +2209,23 @@ visit_reference_op_store (tree lhs, tree op, tree stmt) /* Do not insert structure copies into the tables. */ if (is_gimple_min_invariant (op) || is_gimple_reg (op)) - vn_reference_insert (lhs, op, vdefs); + vn_reference_insert (lhs, op, vdef); } else { - /* We had a match, so value number the vdefs to have the value - number of the vuses they came from. */ - ssa_op_iter op_iter; - def_operand_p var; - vuse_vec_p vv; + /* We had a match, so value number the vdef to have the value + number of the vuse it came from. */ + tree def, use; if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Store matched earlier value," "value numbering store vdefs to matching vuses.\n"); - FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter) - { - tree def = DEF_FROM_PTR (var); - tree use; - - /* Uh, if the vuse is a multiuse, we can't really do much - here, sadly, since we don't know which value number of - which vuse to use. */ - if (VUSE_VECT_NUM_ELEM (*vv) != 1) - use = def; - else - use = VUSE_ELEMENT_VAR (*vv, 0); + def = gimple_vdef (stmt); + use = gimple_vuse (stmt); - VN_INFO (def)->use_processed = true; - changed |= set_ssa_val_to (def, SSA_VAL (use)); - } + VN_INFO (def)->use_processed = true; + changed |= set_ssa_val_to (def, SSA_VAL (use)); } return changed; @@ -1569,13 +2235,13 @@ visit_reference_op_store (tree lhs, tree op, tree stmt) changed. */ static bool -visit_phi (tree phi) +visit_phi (gimple phi) { bool changed = false; tree result; tree sameval = VN_TOP; bool allsame = true; - int i; + unsigned i; /* TODO: We could check for this in init_sccvn, and replace this with a gcc_assert. */ @@ -1584,7 +2250,7 @@ visit_phi (tree phi) /* See if all non-TOP arguments have the same value. TOP is equivalent to everything, so we can ignore it. */ - for (i = 0; i < PHI_NUM_ARGS (phi); i++) + for (i = 0; i < gimple_phi_num_args (phi); i++) { tree def = PHI_ARG_DEF (phi, i); @@ -1671,6 +2337,32 @@ expr_has_constants (tree expr) return false; } +/* Return true if STMT contains constants. */ + +static bool +stmt_has_constants (gimple stmt) +{ + if (gimple_code (stmt) != GIMPLE_ASSIGN) + return false; + + switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) + { + case GIMPLE_UNARY_RHS: + return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); + + case GIMPLE_BINARY_RHS: + return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt)) + || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))); + case GIMPLE_SINGLE_RHS: + /* Constants inside reference ops are rarely interesting, but + it can take a lot of looking to find them. */ + return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); + default: + gcc_unreachable (); + } + return false; +} + /* Replace SSA_NAMES in expr with their value numbers, and return the result. This is performed in place. */ @@ -1703,11 +2395,11 @@ valueize_expr (tree expr) simplified. */ static tree -simplify_binary_expression (tree stmt, tree rhs) +simplify_binary_expression (gimple stmt) { tree result = NULL_TREE; - tree op0 = TREE_OPERAND (rhs, 0); - tree op1 = TREE_OPERAND (rhs, 1); + tree op0 = gimple_assign_rhs1 (stmt); + tree op1 = gimple_assign_rhs2 (stmt); /* This will not catch every single case we could combine, but will catch those with constants. The goal here is to simultaneously @@ -1715,8 +2407,9 @@ simplify_binary_expression (tree stmt, tree rhs) expansion of expressions during simplification. */ if (TREE_CODE (op0) == SSA_NAME) { - if (VN_INFO (op0)->has_constants) - op0 = valueize_expr (VN_INFO (op0)->expr); + if (VN_INFO (op0)->has_constants + || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison) + op0 = valueize_expr (vn_get_expr_for (op0)); else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0) op0 = SSA_VAL (op0); } @@ -1724,28 +2417,31 @@ simplify_binary_expression (tree stmt, tree rhs) if (TREE_CODE (op1) == SSA_NAME) { if (VN_INFO (op1)->has_constants) - op1 = valueize_expr (VN_INFO (op1)->expr); + op1 = valueize_expr (vn_get_expr_for (op1)); else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1) op1 = SSA_VAL (op1); } /* Avoid folding if nothing changed. */ - if (op0 == TREE_OPERAND (rhs, 0) - && op1 == TREE_OPERAND (rhs, 1)) + if (op0 == gimple_assign_rhs1 (stmt) + && op1 == gimple_assign_rhs2 (stmt)) return NULL_TREE; fold_defer_overflow_warnings (); - result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1); + result = fold_binary (gimple_assign_rhs_code (stmt), + gimple_expr_type (stmt), op0, op1); + if (result) + STRIP_USELESS_TYPE_CONVERSION (result); - fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result), + fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result), stmt, 0); /* Make sure result is not a complex expression consisting of operators of operators (IE (a + b) + (a + c)) Otherwise, we will end up with unbounded expressions if fold does anything at all. */ - if (result && valid_gimple_expression_p (result)) + if (result && valid_gimple_rhs_p (result)) return result; return NULL_TREE; @@ -1755,24 +2451,32 @@ simplify_binary_expression (tree stmt, tree rhs) simplified. */ static tree -simplify_unary_expression (tree rhs) +simplify_unary_expression (gimple stmt) { tree result = NULL_TREE; - tree op0 = TREE_OPERAND (rhs, 0); + tree orig_op0, op0 = gimple_assign_rhs1 (stmt); + + /* We handle some tcc_reference codes here that are all + GIMPLE_ASSIGN_SINGLE codes. */ + if (gimple_assign_rhs_code (stmt) == REALPART_EXPR + || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR + || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR) + op0 = TREE_OPERAND (op0, 0); if (TREE_CODE (op0) != SSA_NAME) return NULL_TREE; + orig_op0 = op0; if (VN_INFO (op0)->has_constants) - op0 = valueize_expr (VN_INFO (op0)->expr); - else if (CONVERT_EXPR_P (rhs) - || TREE_CODE (rhs) == REALPART_EXPR - || TREE_CODE (rhs) == IMAGPART_EXPR - || TREE_CODE (rhs) == VIEW_CONVERT_EXPR) + op0 = valueize_expr (vn_get_expr_for (op0)); + else if (gimple_assign_cast_p (stmt) + || gimple_assign_rhs_code (stmt) == REALPART_EXPR + || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR + || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR) { /* We want to do tree-combining on conversion-like expressions. Make sure we feed only SSA_NAMEs or constants to fold though. */ - tree tem = valueize_expr (VN_INFO (op0)->expr); + tree tem = valueize_expr (vn_get_expr_for (op0)); if (UNARY_CLASS_P (tem) || BINARY_CLASS_P (tem) || TREE_CODE (tem) == VIEW_CONVERT_EXPR @@ -1782,36 +2486,38 @@ simplify_unary_expression (tree rhs) } /* Avoid folding if nothing changed, but remember the expression. */ - if (op0 == TREE_OPERAND (rhs, 0)) - return rhs; + if (op0 == orig_op0) + return NULL_TREE; - result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0); + result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt), + gimple_expr_type (stmt), op0); if (result) { STRIP_USELESS_TYPE_CONVERSION (result); - if (valid_gimple_expression_p (result)) + if (valid_gimple_rhs_p (result)) return result; } - return rhs; + return NULL_TREE; } /* Try to simplify RHS using equivalences and constant folding. */ static tree -try_to_simplify (tree stmt, tree rhs) +try_to_simplify (gimple stmt) { tree tem; /* For stores we can end up simplifying a SSA_NAME rhs. Just return in this case, there is no point in doing extra work. */ - if (TREE_CODE (rhs) == SSA_NAME) - return rhs; + if (gimple_assign_copy_p (stmt) + && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) + return NULL_TREE; - switch (TREE_CODE_CLASS (TREE_CODE (rhs))) + switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) { case tcc_declaration: - tem = get_symbol_constant_value (rhs); + tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt)); if (tem) return tem; break; @@ -1819,29 +2525,29 @@ try_to_simplify (tree stmt, tree rhs) case tcc_reference: /* Do not do full-blown reference lookup here, but simplify reads from constant aggregates. */ - tem = fold_const_aggregate_ref (rhs); + tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt)); if (tem) return tem; /* Fallthrough for some codes that can operate on registers. */ - if (!(TREE_CODE (rhs) == REALPART_EXPR - || TREE_CODE (rhs) == IMAGPART_EXPR - || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)) + if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR + || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR + || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR)) break; /* We could do a little more with unary ops, if they expand into binary ops, but it's debatable whether it is worth it. */ case tcc_unary: - return simplify_unary_expression (rhs); + return simplify_unary_expression (stmt); break; case tcc_comparison: case tcc_binary: - return simplify_binary_expression (stmt, rhs); + return simplify_binary_expression (stmt); break; default: break; } - return rhs; + return NULL_TREE; } /* Visit and value number USE, return true if the value number @@ -1851,67 +2557,52 @@ static bool visit_use (tree use) { bool changed = false; - tree stmt = SSA_NAME_DEF_STMT (use); - stmt_ann_t ann; + gimple stmt = SSA_NAME_DEF_STMT (use); VN_INFO (use)->use_processed = true; gcc_assert (!SSA_NAME_IN_FREE_LIST (use)); if (dump_file && (dump_flags & TDF_DETAILS) - && !IS_EMPTY_STMT (stmt)) + && !SSA_NAME_IS_DEFAULT_DEF (use)) { fprintf (dump_file, "Value numbering "); print_generic_expr (dump_file, use, 0); fprintf (dump_file, " stmt = "); - print_generic_stmt (dump_file, stmt, 0); + print_gimple_stmt (dump_file, stmt, 0, 0); } - /* RETURN_EXPR may have an embedded MODIFY_STMT. */ - if (TREE_CODE (stmt) == RETURN_EXPR - && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT) - stmt = TREE_OPERAND (stmt, 0); - - ann = stmt_ann (stmt); - /* Handle uninitialized uses. */ - if (IS_EMPTY_STMT (stmt)) - { - changed = set_ssa_val_to (use, use); - } + if (SSA_NAME_IS_DEFAULT_DEF (use)) + changed = set_ssa_val_to (use, use); else { - if (TREE_CODE (stmt) == PHI_NODE) - { - changed = visit_phi (stmt); - } - else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT - || (ann && ann->has_volatile_ops) - || tree_could_throw_p (stmt)) - { - changed = defs_to_varying (stmt); - } - else + if (gimple_code (stmt) == GIMPLE_PHI) + changed = visit_phi (stmt); + else if (!gimple_has_lhs (stmt) + || gimple_has_volatile_ops (stmt) + || stmt_could_throw_p (stmt)) + changed = defs_to_varying (stmt); + else if (is_gimple_assign (stmt)) { - tree lhs = GIMPLE_STMT_OPERAND (stmt, 0); - tree rhs = GIMPLE_STMT_OPERAND (stmt, 1); + tree lhs = gimple_assign_lhs (stmt); tree simplified; - STRIP_USELESS_TYPE_CONVERSION (rhs); - /* Shortcut for copies. Simplifying copies is pointless, since we copy the expression and value they represent. */ - if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME) + if (gimple_assign_copy_p (stmt) + && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME + && TREE_CODE (lhs) == SSA_NAME) { - changed = visit_copy (lhs, rhs); + changed = visit_copy (lhs, gimple_assign_rhs1 (stmt)); goto done; } - simplified = try_to_simplify (stmt, rhs); - if (simplified && simplified != rhs) + simplified = try_to_simplify (stmt); + if (simplified) { if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "RHS "); - print_generic_expr (dump_file, rhs, 0); + print_gimple_expr (dump_file, stmt, 0, 0); fprintf (dump_file, " simplified to "); print_generic_expr (dump_file, simplified, 0); if (TREE_CODE (lhs) == SSA_NAME) @@ -1925,16 +2616,17 @@ visit_use (tree use) screw up phi congruence because constants are not uniquely associated with a single ssa name that can be looked up. */ - if (simplified && is_gimple_min_invariant (simplified) - && TREE_CODE (lhs) == SSA_NAME - && simplified != rhs) + if (simplified + && is_gimple_min_invariant (simplified) + && TREE_CODE (lhs) == SSA_NAME) { VN_INFO (lhs)->expr = simplified; VN_INFO (lhs)->has_constants = true; changed = set_ssa_val_to (lhs, simplified); goto done; } - else if (simplified && TREE_CODE (simplified) == SSA_NAME + else if (simplified + && TREE_CODE (simplified) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME) { changed = visit_copy (lhs, simplified); @@ -1949,13 +2641,10 @@ visit_use (tree use) valuizing may change the IL stream. */ VN_INFO (lhs)->expr = unshare_expr (simplified); } - rhs = simplified; - } - else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME) - { - VN_INFO (lhs)->has_constants = true; - VN_INFO (lhs)->expr = unshare_expr (rhs); } + else if (stmt_has_constants (stmt) + && TREE_CODE (lhs) == SSA_NAME) + VN_INFO (lhs)->has_constants = true; else if (TREE_CODE (lhs) == SSA_NAME) { /* We reset expr and constantness here because we may @@ -1964,56 +2653,79 @@ visit_use (tree use) even if they were optimistically constant. */ VN_INFO (lhs)->has_constants = false; - VN_INFO (lhs)->expr = lhs; + VN_INFO (lhs)->expr = NULL_TREE; } - if (TREE_CODE (lhs) == SSA_NAME - /* We can substitute SSA_NAMEs that are live over - abnormal edges with their constant value. */ - && !is_gimple_min_invariant (rhs) - && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) + if ((TREE_CODE (lhs) == SSA_NAME + /* We can substitute SSA_NAMEs that are live over + abnormal edges with their constant value. */ + && !(gimple_assign_copy_p (stmt) + && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) + && !(simplified + && is_gimple_min_invariant (simplified)) + && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) + /* Stores or copies from SSA_NAMEs that are live over + abnormal edges are a problem. */ + || (gimple_assign_single_p (stmt) + && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME + && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))) changed = defs_to_varying (stmt); else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs)) { - changed = visit_reference_op_store (lhs, rhs, stmt); + changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt); } else if (TREE_CODE (lhs) == SSA_NAME) { - if (is_gimple_min_invariant (rhs)) + if ((gimple_assign_copy_p (stmt) + && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) + || (simplified + && is_gimple_min_invariant (simplified))) { VN_INFO (lhs)->has_constants = true; - VN_INFO (lhs)->expr = rhs; - changed = set_ssa_val_to (lhs, rhs); + if (simplified) + changed = set_ssa_val_to (lhs, simplified); + else + changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt)); } else { - switch (TREE_CODE_CLASS (TREE_CODE (rhs))) + switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) { - case tcc_unary: - changed = visit_unary_op (lhs, rhs); + case GIMPLE_UNARY_RHS: + changed = visit_unary_op (lhs, stmt); break; - case tcc_binary: - changed = visit_binary_op (lhs, rhs); + case GIMPLE_BINARY_RHS: + changed = visit_binary_op (lhs, stmt); break; - /* If tcc_vl_expr ever encompasses more than - CALL_EXPR, this will need to be changed. */ - case tcc_vl_exp: - if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST)) - changed = visit_reference_op_load (lhs, rhs, stmt); - else - changed = defs_to_varying (stmt); - break; - case tcc_declaration: - case tcc_reference: - changed = visit_reference_op_load (lhs, rhs, stmt); - break; - case tcc_expression: - if (TREE_CODE (rhs) == ADDR_EXPR) + case GIMPLE_SINGLE_RHS: + switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) { - changed = visit_unary_op (lhs, rhs); - goto done; + case tcc_reference: + /* VOP-less references can go through unary case. */ + if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR + || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR + || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR ) + && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME) + { + changed = visit_unary_op (lhs, stmt); + break; + } + /* Fallthrough. */ + case tcc_declaration: + changed = visit_reference_op_load + (lhs, gimple_assign_rhs1 (stmt), stmt); + break; + case tcc_expression: + if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) + { + changed = visit_unary_op (lhs, stmt); + break; + } + /* Fallthrough. */ + default: + changed = defs_to_varying (stmt); } - /* Fallthrough. */ + break; default: changed = defs_to_varying (stmt); break; @@ -2023,6 +2735,39 @@ visit_use (tree use) else changed = defs_to_varying (stmt); } + else if (is_gimple_call (stmt)) + { + tree lhs = gimple_call_lhs (stmt); + + /* ??? We could try to simplify calls. */ + + if (stmt_has_constants (stmt) + && TREE_CODE (lhs) == SSA_NAME) + VN_INFO (lhs)->has_constants = true; + else if (TREE_CODE (lhs) == SSA_NAME) + { + /* We reset expr and constantness here because we may + have been value numbering optimistically, and + iterating. They may become non-constant in this case, + even if they were optimistically constant. */ + VN_INFO (lhs)->has_constants = false; + VN_INFO (lhs)->expr = NULL_TREE; + } + + if (TREE_CODE (lhs) == SSA_NAME + && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) + changed = defs_to_varying (stmt); + /* ??? We should handle stores from calls. */ + else if (TREE_CODE (lhs) == SSA_NAME) + { + if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) + changed = visit_reference_op_call (lhs, stmt); + else + changed = defs_to_varying (stmt); + } + else + changed = defs_to_varying (stmt); + } } done: return changed; @@ -2035,23 +2780,23 @@ compare_ops (const void *pa, const void *pb) { const tree opa = *((const tree *)pa); const tree opb = *((const tree *)pb); - tree opstmta = SSA_NAME_DEF_STMT (opa); - tree opstmtb = SSA_NAME_DEF_STMT (opb); + gimple opstmta = SSA_NAME_DEF_STMT (opa); + gimple opstmtb = SSA_NAME_DEF_STMT (opb); basic_block bba; basic_block bbb; - if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb)) - return 0; - else if (IS_EMPTY_STMT (opstmta)) + if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); + else if (gimple_nop_p (opstmta)) return -1; - else if (IS_EMPTY_STMT (opstmtb)) + else if (gimple_nop_p (opstmtb)) return 1; - bba = bb_for_stmt (opstmta); - bbb = bb_for_stmt (opstmtb); + bba = gimple_bb (opstmta); + bbb = gimple_bb (opstmtb); if (!bba && !bbb) - return 0; + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); else if (!bba) return -1; else if (!bbb) @@ -2059,13 +2804,17 @@ compare_ops (const void *pa, const void *pb) if (bba == bbb) { - if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE) - return 0; - else if (TREE_CODE (opstmta) == PHI_NODE) + if (gimple_code (opstmta) == GIMPLE_PHI + && gimple_code (opstmtb) == GIMPLE_PHI) + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); + else if (gimple_code (opstmta) == GIMPLE_PHI) return -1; - else if (TREE_CODE (opstmtb) == PHI_NODE) + else if (gimple_code (opstmtb) == GIMPLE_PHI) return 1; - return gimple_stmt_uid (opstmta) - gimple_stmt_uid (opstmtb); + else if (gimple_uid (opstmta) != gimple_uid (opstmtb)) + return gimple_uid (opstmta) - gimple_uid (opstmtb); + else + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); } return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; } @@ -2084,6 +2833,60 @@ sort_scc (VEC (tree, heap) *scc) compare_ops); } +/* Insert the no longer used nary *ENTRY to the current hash. */ + +static int +copy_nary (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_nary_op_t onary = (vn_nary_op_t) *entry; + size_t size = (sizeof (struct vn_nary_op_s) + - sizeof (tree) * (4 - onary->length)); + vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, + size); + void **slot; + memcpy (nary, onary, size); + slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode, + INSERT); + gcc_assert (!*slot); + *slot = nary; + return 1; +} + +/* Insert the no longer used phi *ENTRY to the current hash. */ + +static int +copy_phis (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_phi_t ophi = (vn_phi_t) *entry; + vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool); + void **slot; + memcpy (phi, ophi, sizeof (*phi)); + ophi->phiargs = NULL; + slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode, + INSERT); + *slot = phi; + return 1; +} + +/* Insert the no longer used reference *ENTRY to the current hash. */ + +static int +copy_references (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_reference_t oref = (vn_reference_t) *entry; + vn_reference_t ref; + void **slot; + ref = (vn_reference_t) pool_alloc (current_info->references_pool); + memcpy (ref, oref, sizeof (*ref)); + oref->operands = NULL; + slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode, + INSERT); + if (*slot) + free_reference (*slot); + *slot = ref; + return 1; +} + /* Process a strongly connected component in the SSA graph. */ static void @@ -2111,6 +2914,9 @@ process_scc (VEC (tree, heap) *scc) { changed = false; iterations++; + /* As we are value-numbering optimistically we have to + clear the expression tables and the simplified expressions + in each iteration until we converge. */ htab_empty (optimistic_info->nary); htab_empty (optimistic_info->phis); htab_empty (optimistic_info->references); @@ -2119,15 +2925,19 @@ process_scc (VEC (tree, heap) *scc) empty_alloc_pool (optimistic_info->phis_pool); empty_alloc_pool (optimistic_info->references_pool); for (i = 0; VEC_iterate (tree, scc, i, var); i++) + VN_INFO (var)->expr = NULL_TREE; + for (i = 0; VEC_iterate (tree, scc, i, var); i++) changed |= visit_use (var); } statistics_histogram_event (cfun, "SCC iterations", iterations); - /* Finally, visit the SCC once using the valid table. */ + /* Finally, copy the contents of the no longer used optimistic + table to the valid table. */ current_info = valid_info; - for (i = 0; VEC_iterate (tree, scc, i, var); i++) - visit_use (var); + htab_traverse (optimistic_info->nary, copy_nary, NULL); + htab_traverse (optimistic_info->phis, copy_phis, NULL); + htab_traverse (optimistic_info->references, copy_references, NULL); } } @@ -2191,7 +3001,8 @@ DFS (tree name) VEC(ssa_op_iter, heap) *itervec = NULL; VEC(tree, heap) *namevec = NULL; use_operand_p usep = NULL; - tree defstmt, use; + gimple defstmt; + tree use; ssa_op_iter iter; start_over: @@ -2205,16 +3016,16 @@ start_over: defstmt = SSA_NAME_DEF_STMT (name); /* Recursively DFS on our operands, looking for SCC's. */ - if (!IS_EMPTY_STMT (defstmt)) + if (!gimple_nop_p (defstmt)) { /* Push a new iterator. */ - if (TREE_CODE (defstmt) == PHI_NODE) + if (gimple_code (defstmt) == GIMPLE_PHI) usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES); else usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES); } else - iter.done = true; + clear_and_done_ssa_iter (&iter); while (1) { @@ -2322,12 +3133,12 @@ init_scc_vn (void) sccstack = NULL; constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, free); - + constant_value_ids = BITMAP_ALLOC (NULL); - + next_dfs_num = 1; next_value_id = 1; - + vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); /* VEC_alloc doesn't actually grow it to the right size, it just preallocates the space to do so. */ @@ -2335,7 +3146,6 @@ init_scc_vn (void) gcc_obstack_init (&vn_ssa_aux_obstack); shared_lookup_phiargs = NULL; - shared_lookup_vops = NULL; shared_lookup_references = NULL; rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); @@ -2359,7 +3169,7 @@ init_scc_vn (void) if (name) { VN_INFO_GET (name)->valnum = VN_TOP; - VN_INFO (name)->expr = name; + VN_INFO (name)->expr = NULL_TREE; VN_INFO (name)->value_id = 0; } } @@ -2381,7 +3191,6 @@ free_scc_vn (void) htab_delete (constant_to_value_id); BITMAP_FREE (constant_value_ids); VEC_free (tree, heap, shared_lookup_phiargs); - VEC_free (tree, gc, shared_lookup_vops); VEC_free (vn_reference_op_s, heap, shared_lookup_references); XDELETEVEC (rpo_numbers); @@ -2416,7 +3225,7 @@ set_hashtable_value_ids (void) table. */ FOR_EACH_HTAB_ELEMENT (valid_info->nary, - vno, vn_nary_op_t, hi) + vno, vn_nary_op_t, hi) { if (vno->result) { @@ -2428,7 +3237,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->phis, - vp, vn_phi_t, hi) + vp, vn_phi_t, hi) { if (vp->result) { @@ -2440,7 +3249,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->references, - vr, vn_reference_t, hi) + vr, vn_reference_t, hi) { if (vr->result) { @@ -2461,7 +3270,7 @@ run_scc_vn (bool may_insert_arg) size_t i; tree param; bool changed = true; - + may_insert = may_insert_arg; init_scc_vn (); @@ -2474,7 +3283,7 @@ run_scc_vn (bool may_insert_arg) if (gimple_default_def (cfun, param) != NULL) { tree def = gimple_default_def (cfun, param); - SSA_VAL (def) = def; + VN_INFO (def)->valnum = def; } } @@ -2493,7 +3302,7 @@ run_scc_vn (bool may_insert_arg) } /* Initialize the value ids. */ - + for (i = 1; i < num_ssa_names; ++i) { tree name = ssa_name (i); @@ -2501,12 +3310,13 @@ run_scc_vn (bool may_insert_arg) if (!name) continue; info = VN_INFO (name); - if (info->valnum == name) + if (info->valnum == name + || info->valnum == VN_TOP) info->value_id = get_next_value_id (); else if (is_gimple_min_invariant (info->valnum)) info->value_id = get_or_alloc_constant_value_id (info->valnum); } - + /* Propagate until they stop changing. */ while (changed) { @@ -2527,9 +3337,9 @@ run_scc_vn (bool may_insert_arg) } } } - + set_hashtable_value_ids (); - + if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Value numbers:\n"); @@ -2555,7 +3365,7 @@ run_scc_vn (bool may_insert_arg) /* Return the maximum value id we have ever seen. */ unsigned int -get_max_value_id (void) +get_max_value_id (void) { return next_value_id; } @@ -2569,67 +3379,70 @@ get_next_value_id (void) } -/* Compare two expressions E1 and E2 and return true if they are - equal. */ +/* Compare two expressions E1 and E2 and return true if they are equal. */ bool expressions_equal_p (tree e1, tree e2) { - tree te1, te2; - + /* The obvious case. */ if (e1 == e2) return true; - te1 = TREE_TYPE (e1); - te2 = TREE_TYPE (e2); - if (te1 != te2) + /* If only one of them is null, they cannot be equal. */ + if (!e1 || !e2) return false; - if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST) - { - tree lop1 = e1; - tree lop2 = e2; - for (lop1 = e1, lop2 = e2; - lop1 || lop2; - lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2)) - { - if (!lop1 || !lop2) - return false; - if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2))) - return false; - } - return true; - - } - else if (TREE_CODE (e1) == TREE_CODE (e2) - && operand_equal_p (e1, e2, OEP_PURE_SAME)) + /* Now perform the actual comparison. */ + if (TREE_CODE (e1) == TREE_CODE (e2) + && operand_equal_p (e1, e2, OEP_PURE_SAME)) return true; return false; } -/* Sort the VUSE array so that we can do equality comparisons - quicker on two vuse vecs. */ -void -sort_vuses (VEC (tree,gc) *vuses) -{ - if (VEC_length (tree, vuses) > 1) - qsort (VEC_address (tree, vuses), - VEC_length (tree, vuses), - sizeof (tree), - operand_build_cmp); -} +/* Return true if the nary operation NARY may trap. This is a copy + of stmt_could_throw_1_p adjusted to the SCCVN IL. */ -/* Sort the VUSE array so that we can do equality comparisons - quicker on two vuse vecs. */ +bool +vn_nary_may_trap (vn_nary_op_t nary) +{ + tree type; + tree rhs2; + bool honor_nans = false; + bool honor_snans = false; + bool fp_operation = false; + bool honor_trapv = false; + bool handled, ret; + unsigned i; -void -sort_vuses_heap (VEC (tree,heap) *vuses) -{ - if (VEC_length (tree, vuses) > 1) - qsort (VEC_address (tree, vuses), - VEC_length (tree, vuses), - sizeof (tree), - operand_build_cmp); + if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison + || TREE_CODE_CLASS (nary->opcode) == tcc_unary + || TREE_CODE_CLASS (nary->opcode) == tcc_binary) + { + type = nary->type; + fp_operation = FLOAT_TYPE_P (type); + if (fp_operation) + { + honor_nans = flag_trapping_math && !flag_finite_math_only; + honor_snans = flag_signaling_nans != 0; + } + else if (INTEGRAL_TYPE_P (type) + && TYPE_OVERFLOW_TRAPS (type)) + honor_trapv = true; + } + rhs2 = nary->op[1]; + ret = operation_could_trap_helper_p (nary->opcode, fp_operation, + honor_trapv, + honor_nans, honor_snans, rhs2, + &handled); + if (handled + && ret) + return true; + + for (i = 0; i < nary->length; ++i) + if (tree_could_trap_p (nary->op[i])) + return true; + + return false; }