X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ftree-ssa-sccvn.c;h=79ce3c20a25af10a62a585ac02e67d8070284a41;hp=729787be3f7078479aa8e3c95799c2fdd6b1bc2c;hb=2bd888dc62ee6298b759613c47a05300bd23e8ba;hpb=d12dee9c52edfea940d74d84291ae0fe591db827 diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 729787be3f7..79ce3c20a25 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -257,9 +257,10 @@ vn_get_expr_for (tree name) switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt))) { case tcc_reference: - if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR - || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR - || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR) + if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR + || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR + || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR) + && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) expr = fold_build1 (gimple_assign_rhs_code (def_stmt), gimple_expr_type (def_stmt), TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0)); @@ -323,7 +324,7 @@ vn_constant_eq (const void *p1, const void *p2) } /* Hash table hash function for vn_constant_t. */ - + static hashval_t vn_constant_hash (const void *p1) { @@ -356,21 +357,23 @@ unsigned int get_or_alloc_constant_value_id (tree constant) { void **slot; - vn_constant_t vc = XNEW (struct vn_constant_s); - - vc->hashcode = vn_hash_constant_with_type (constant); - vc->constant = constant; - slot = htab_find_slot_with_hash (constant_to_value_id, vc, - vc->hashcode, INSERT); + struct vn_constant_s vc; + vn_constant_t vcp; + + vc.hashcode = vn_hash_constant_with_type (constant); + vc.constant = constant; + slot = htab_find_slot_with_hash (constant_to_value_id, &vc, + vc.hashcode, INSERT); if (*slot) - { - free (vc); - return ((vn_constant_t)*slot)->value_id; - } - vc->value_id = get_next_value_id (); - *slot = vc; - bitmap_set_bit (constant_value_ids, vc->value_id); - return vc->value_id; + return ((vn_constant_t)*slot)->value_id; + + vcp = XNEW (struct vn_constant_s); + vcp->hashcode = vc.hashcode; + vcp->constant = constant; + vcp->value_id = get_next_value_id (); + *slot = (void *) vcp; + bitmap_set_bit (constant_value_ids, vcp->value_id); + return vcp->value_id; } /* Return true if V is a value id for a constant. */ @@ -378,7 +381,7 @@ get_or_alloc_constant_value_id (tree constant) bool value_id_constant_p (unsigned int v) { - return bitmap_bit_p (constant_value_ids, v); + return bitmap_bit_p (constant_value_ids, v); } /* Compare two reference operands P1 and P2 for equality. Return true if @@ -400,15 +403,15 @@ vn_reference_op_eq (const void *p1, const void *p2) /* Compute the hash for a reference operand VRO1. */ static hashval_t -vn_reference_op_compute_hash (const vn_reference_op_t vro1) +vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result) { - hashval_t result = 0; + result = iterative_hash_hashval_t (vro1->opcode, result); if (vro1->op0) - result += iterative_hash_expr (vro1->op0, vro1->opcode); + result = iterative_hash_expr (vro1->op0, result); if (vro1->op1) - result += iterative_hash_expr (vro1->op1, vro1->opcode); + result = iterative_hash_expr (vro1->op1, result); if (vro1->op2) - result += iterative_hash_expr (vro1->op2, vro1->opcode); + result = iterative_hash_expr (vro1->op2, result); return result; } @@ -426,13 +429,14 @@ vn_reference_hash (const void *p1) hashval_t vn_reference_compute_hash (const vn_reference_t vr1) { - hashval_t result; + hashval_t result = 0; int i; vn_reference_op_t vro; - result = iterative_hash_expr (vr1->vuse, 0); for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) - result += vn_reference_op_compute_hash (vro); + result = vn_reference_op_compute_hash (vro, result); + if (vr1->vuse) + result += SSA_NAME_VERSION (vr1->vuse); return result; } @@ -487,20 +491,26 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) if (TREE_CODE (ref) == TARGET_MEM_REF) { vn_reference_op_s temp; + tree base; + + base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); + if (!base) + base = build_int_cst (ptr_type_node, 0); memset (&temp, 0, sizeof (temp)); /* We do not care for spurious type qualifications. */ temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref)); temp.opcode = TREE_CODE (ref); - temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); - temp.op1 = TMR_INDEX (ref); + temp.op0 = TMR_INDEX (ref); + temp.op1 = TMR_STEP (ref); + temp.op2 = TMR_OFFSET (ref); VEC_safe_push (vn_reference_op_s, heap, *result, &temp); memset (&temp, 0, sizeof (temp)); temp.type = NULL_TREE; - temp.opcode = TREE_CODE (ref); - temp.op0 = TMR_STEP (ref); - temp.op1 = TMR_OFFSET (ref); + temp.opcode = TREE_CODE (base); + temp.op0 = base; + temp.op1 = TMR_ORIGINAL (ref); VEC_safe_push (vn_reference_op_s, heap, *result, &temp); return; } @@ -536,29 +546,27 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) a matching type is not necessary and a mismatching type is always a spurious difference. */ temp.type = NULL_TREE; + temp.op0 = TREE_OPERAND (ref, 1); + temp.op1 = TREE_OPERAND (ref, 2); /* If this is a reference to a union member, record the union member size as operand. Do so only if we are doing expression insertion (during FRE), as PRE currently gets confused with this. */ if (may_insert - && TREE_OPERAND (ref, 2) == NULL_TREE - && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE - && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1))) - && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)))) - temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))); - else - { - /* Record field as operand. */ - temp.op0 = TREE_OPERAND (ref, 1); - temp.op1 = TREE_OPERAND (ref, 2); - } + && temp.op1 == NULL_TREE + && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE + && integer_zerop (DECL_FIELD_OFFSET (temp.op0)) + && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0)) + && host_integerp (DECL_SIZE (temp.op0), 0)) + temp.op0 = DECL_SIZE (temp.op0); break; case ARRAY_RANGE_REF: case ARRAY_REF: /* Record index as operand. */ temp.op0 = TREE_OPERAND (ref, 1); - temp.op1 = TREE_OPERAND (ref, 2); - temp.op2 = TREE_OPERAND (ref, 3); + /* Always record lower bounds and element size. */ + temp.op1 = array_ref_low_bound (ref); + temp.op2 = array_ref_element_size (ref); break; case STRING_CST: case INTEGER_CST: @@ -571,8 +579,6 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) case CONST_DECL: case RESULT_DECL: case SSA_NAME: - case EXC_PTR_EXPR: - case FILTER_EXPR: temp.op0 = ref; break; case ADDR_EXPR: @@ -605,24 +611,68 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) } } -/* Re-create a reference tree from the reference ops OPS. - Returns NULL_TREE if the ops were not handled. - This routine needs to be kept in sync with copy_reference_ops_from_ref. */ +/* Build a alias-oracle reference abstraction in *REF from the vn_reference + operands in *OPS, the reference alias set SET and the reference type TYPE. + Return true if something useful was produced. */ -tree -get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops) +bool +ao_ref_init_from_vn_reference (ao_ref *ref, + alias_set_type set, tree type, + VEC (vn_reference_op_s, heap) *ops) { vn_reference_op_t op; unsigned i; - tree ref, *op0_p = &ref; + tree base = NULL_TREE; + tree *op0_p = &base; + HOST_WIDE_INT offset = 0; + HOST_WIDE_INT max_size; + HOST_WIDE_INT size = -1; + tree size_tree = NULL_TREE; + + /* First get the final access size from just the outermost expression. */ + op = VEC_index (vn_reference_op_s, ops, 0); + if (op->opcode == COMPONENT_REF) + { + if (TREE_CODE (op->op0) == INTEGER_CST) + size_tree = op->op0; + else + size_tree = DECL_SIZE (op->op0); + } + else if (op->opcode == BIT_FIELD_REF) + size_tree = op->op0; + else + { + enum machine_mode mode = TYPE_MODE (type); + if (mode == BLKmode) + size_tree = TYPE_SIZE (type); + else + size = GET_MODE_BITSIZE (mode); + } + if (size_tree != NULL_TREE) + { + if (!host_integerp (size_tree, 1)) + size = -1; + else + size = TREE_INT_CST_LOW (size_tree); + } + /* Initially, maxsize is the same as the accessed element size. + In the following it will only grow (or become -1). */ + max_size = size; + + /* Compute cumulative bit-offset for nested component-refs and array-refs, + and find the ultimate containing object. */ for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) { switch (op->opcode) { + /* These may be in the reference ops, but we cannot do anything + sensible with them here. */ case CALL_EXPR: - return NULL_TREE; + case ADDR_EXPR: + return false; + /* Record the base objects. */ case ALIGN_INDIRECT_REF: case INDIRECT_REF: *op0_p = build1 (op->opcode, op->type, NULL_TREE); @@ -635,23 +685,65 @@ get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops) op0_p = &TREE_OPERAND (*op0_p, 0); break; + case VAR_DECL: + case PARM_DECL: + case RESULT_DECL: + case SSA_NAME: + *op0_p = op->op0; + break; + + /* And now the usual component-reference style ops. */ case BIT_FIELD_REF: - *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE, - op->op0, op->op1); - op0_p = &TREE_OPERAND (*op0_p, 0); + offset += tree_low_cst (op->op1, 0); break; case COMPONENT_REF: - *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE, - op->op0, op->op1); - op0_p = &TREE_OPERAND (*op0_p, 0); - break; + { + tree field = op->op0; + /* We do not have a complete COMPONENT_REF tree here so we + cannot use component_ref_field_offset. Do the interesting + parts manually. */ + + /* Our union trick, done for offset zero only. */ + if (TREE_CODE (field) == INTEGER_CST) + ; + else if (op->op1 + || !host_integerp (DECL_FIELD_OFFSET (field), 1)) + max_size = -1; + else + { + offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) + * BITS_PER_UNIT); + offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); + } + break; + } case ARRAY_RANGE_REF: case ARRAY_REF: - *op0_p = build4 (op->opcode, op->type, NULL_TREE, - op->op0, op->op1, op->op2); - op0_p = &TREE_OPERAND (*op0_p, 0); + /* We recorded the lower bound and the element size. */ + if (!host_integerp (op->op0, 0) + || !host_integerp (op->op1, 0) + || !host_integerp (op->op2, 0)) + max_size = -1; + else + { + HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0); + hindex -= TREE_INT_CST_LOW (op->op1); + hindex *= TREE_INT_CST_LOW (op->op2); + hindex *= BITS_PER_UNIT; + offset += hindex; + } + break; + + case REALPART_EXPR: + break; + + case IMAGPART_EXPR: + offset += size; + break; + + case VIEW_CONVERT_EXPR: break; case STRING_CST: @@ -660,37 +752,26 @@ get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops) case VECTOR_CST: case REAL_CST: case CONSTRUCTOR: - case VAR_DECL: - case PARM_DECL: case CONST_DECL: - case RESULT_DECL: - case SSA_NAME: - case FILTER_EXPR: - case EXC_PTR_EXPR: - *op0_p = op->op0; - break; - - case ADDR_EXPR: - if (op->op0 != NULL_TREE) - { - gcc_assert (is_gimple_min_invariant (op->op0)); - *op0_p = op->op0; - break; - } - /* Fallthrough. */ - case IMAGPART_EXPR: - case REALPART_EXPR: - case VIEW_CONVERT_EXPR: - *op0_p = build1 (op->opcode, op->type, NULL_TREE); - op0_p = &TREE_OPERAND (*op0_p, 0); - break; + return false; default: - return NULL_TREE; + return false; } } - return ref; + if (base == NULL_TREE) + return false; + + ref->ref = NULL_TREE; + ref->base = base; + ref->offset = offset; + ref->size = size; + ref->max_size = max_size; + ref->ref_alias_set = set; + ref->base_alias_set = -1; + + return true; } /* Copy the operations present in load/store/call REF into RESULT, a vector of @@ -757,6 +838,23 @@ vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops, /* Get ops for the addressed object. */ op = VEC_index (vn_reference_op_s, *ops, i); + /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work + around it to avoid later ICEs. */ + if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE + && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE) + { + vn_reference_op_s aref; + tree dom; + aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0))); + aref.opcode = ARRAY_REF; + aref.op0 = integer_zero_node; + if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0)))) + && TYPE_MIN_VALUE (dom)) + aref.op0 = TYPE_MIN_VALUE (dom); + aref.op1 = aref.op0; + aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0))); + VEC_safe_push (vn_reference_op_s, heap, mem, &aref); + } copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem); /* Do the replacement - we should have at least one op in mem now. */ @@ -816,7 +914,10 @@ valueize_refs (VEC (vn_reference_op_s, heap) *orig) if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR && VEC_index (vn_reference_op_s, orig, i - 1)->opcode == INDIRECT_REF) - vn_reference_fold_indirect (&orig, &i); + { + vn_reference_fold_indirect (&orig, &i); + continue; + } } if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) vro->op1 = SSA_VAL (vro->op1); @@ -882,24 +983,31 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) *vnresult = (vn_reference_t)*slot; return ((vn_reference_t)*slot)->result; } - + return NULL_TREE; } +static tree *last_vuse_ptr; + /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ with the current VUSE and performs the expression lookup. */ static void * -vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_) +vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_) { vn_reference_t vr = (vn_reference_t)vr_; void **slot; hashval_t hash; + if (last_vuse_ptr) + *last_vuse_ptr = vuse; + /* Fixup vuse and hash. */ - vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0); + if (vr->vuse) + vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); vr->vuse = SSA_VAL (vuse); - vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0); + if (vr->vuse) + vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); hash = vr->hashcode; slot = htab_find_slot_with_hash (current_info->references, vr, @@ -909,17 +1017,178 @@ vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_) hash, NO_INSERT); if (slot) return *slot; - + return NULL; } +/* Callback for walk_non_aliased_vuses. Tries to perform a lookup + from the statement defining VUSE and if not successful tries to + translate *REFP and VR_ through an aggregate copy at the defintion + of VUSE. */ + +static void * +vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) +{ + vn_reference_t vr = (vn_reference_t)vr_; + gimple def_stmt = SSA_NAME_DEF_STMT (vuse); + tree fndecl; + tree base; + HOST_WIDE_INT offset, maxsize; + + base = ao_ref_base (ref); + offset = ref->offset; + maxsize = ref->max_size; + + /* If we cannot constrain the size of the reference we cannot + test if anything kills it. */ + if (maxsize == -1) + return (void *)-1; + + /* def_stmt may-defs *ref. See if we can derive a value for *ref + from that defintion. + 1) Memset. */ + if (is_gimple_reg_type (vr->type) + && is_gimple_call (def_stmt) + && (fndecl = gimple_call_fndecl (def_stmt)) + && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL + && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET + && integer_zerop (gimple_call_arg (def_stmt, 1)) + && host_integerp (gimple_call_arg (def_stmt, 2), 1) + && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) + { + tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); + size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8; + if ((unsigned HOST_WIDE_INT)size2 / 8 + == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) + && operand_equal_p (base, base2, 0) + && offset2 <= offset + && offset2 + size2 >= offset + maxsize) + { + tree val = fold_convert (vr->type, integer_zero_node); + unsigned int value_id = get_or_alloc_constant_value_id (val); + return vn_reference_insert_pieces (vuse, vr->set, vr->type, + VEC_copy (vn_reference_op_s, + heap, vr->operands), + val, value_id); + } + } + + /* 2) Assignment from an empty CONSTRUCTOR. */ + else if (is_gimple_reg_type (vr->type) + && gimple_assign_single_p (def_stmt) + && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR + && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0) + { + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), + &offset2, &size2, &maxsize2); + if (operand_equal_p (base, base2, 0) + && offset2 <= offset + && offset2 + size2 >= offset + maxsize) + { + tree val = fold_convert (vr->type, integer_zero_node); + unsigned int value_id = get_or_alloc_constant_value_id (val); + return vn_reference_insert_pieces (vuse, vr->set, vr->type, + VEC_copy (vn_reference_op_s, + heap, vr->operands), + val, value_id); + } + } + + /* For aggregate copies translate the reference through them if + the copy kills ref. */ + else if (gimple_assign_single_p (def_stmt) + && (DECL_P (gimple_assign_rhs1 (def_stmt)) + || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt)) + || handled_component_p (gimple_assign_rhs1 (def_stmt)))) + { + tree base2; + HOST_WIDE_INT offset2, size2, maxsize2; + int i, j; + VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL; + vn_reference_op_t vro; + ao_ref r; + + /* See if the assignment kills REF. */ + base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), + &offset2, &size2, &maxsize2); + if (!operand_equal_p (base, base2, 0) + || offset2 > offset + || offset2 + size2 < offset + maxsize) + return (void *)-1; + + /* Find the common base of ref and the lhs. */ + copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs); + i = VEC_length (vn_reference_op_s, vr->operands) - 1; + j = VEC_length (vn_reference_op_s, lhs) - 1; + while (j >= 0 && i >= 0 + && vn_reference_op_eq (VEC_index (vn_reference_op_s, + vr->operands, i), + VEC_index (vn_reference_op_s, lhs, j))) + { + i--; + j--; + } + + VEC_free (vn_reference_op_s, heap, lhs); + /* i now points to the first additional op. + ??? LHS may not be completely contained in VR, one or more + VIEW_CONVERT_EXPRs could be in its way. We could at least + try handling outermost VIEW_CONVERT_EXPRs. */ + if (j != -1) + return (void *)-1; + + /* Now re-write REF to be based on the rhs of the assignment. */ + copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); + /* We need to pre-pend vr->operands[0..i] to rhs. */ + if (i + 1 + VEC_length (vn_reference_op_s, rhs) + > VEC_length (vn_reference_op_s, vr->operands)) + { + VEC (vn_reference_op_s, heap) *old = vr->operands; + VEC_safe_grow (vn_reference_op_s, heap, vr->operands, + i + 1 + VEC_length (vn_reference_op_s, rhs)); + if (old == shared_lookup_references + && vr->operands != old) + shared_lookup_references = NULL; + } + else + VEC_truncate (vn_reference_op_s, vr->operands, + i + 1 + VEC_length (vn_reference_op_s, rhs)); + for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j) + VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro); + VEC_free (vn_reference_op_s, heap, rhs); + vr->hashcode = vn_reference_compute_hash (vr); + + /* Adjust *ref from the new operands. */ + if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) + return (void *)-1; + /* This can happen with bitfields. */ + if (ref->size != r.size) + return (void *)-1; + *ref = r; + + /* Do not update last seen VUSE after translating. */ + last_vuse_ptr = NULL; + + /* Keep looking for the adjusted *REF / VR pair. */ + return NULL; + } + + /* Bail out and stop walking. */ + return (void *)-1; +} + /* Lookup a reference operation by it's parts, in the current hash table. Returns the resulting value number if it exists in the hash table, NULL_TREE otherwise. VNRESULT will be filled in with the actual vn_reference_t stored in the hashtable if something is found. */ tree -vn_reference_lookup_pieces (tree vuse, +vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, VEC (vn_reference_op_s, heap) *operands, vn_reference_t *vnresult, bool maywalk) { @@ -929,9 +1198,19 @@ vn_reference_lookup_pieces (tree vuse, if (!vnresult) vnresult = &tmp; *vnresult = NULL; - + vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; - vr1.operands = valueize_refs (operands); + VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); + VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references, + VEC_length (vn_reference_op_s, operands)); + memcpy (VEC_address (vn_reference_op_s, shared_lookup_references), + VEC_address (vn_reference_op_s, operands), + sizeof (vn_reference_op_s) + * VEC_length (vn_reference_op_s, operands)); + vr1.operands = operands = shared_lookup_references + = valueize_refs (shared_lookup_references); + vr1.type = type; + vr1.set = set; vr1.hashcode = vn_reference_compute_hash (&vr1); vn_reference_lookup_1 (&vr1, vnresult); @@ -939,12 +1218,14 @@ vn_reference_lookup_pieces (tree vuse, && maywalk && vr1.vuse) { - tree ref = get_ref_from_reference_ops (operands); - if (!ref) - return NULL_TREE; - *vnresult = - (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse, - vn_reference_lookup_2, &vr1); + ao_ref r; + if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) + *vnresult = + (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, + vn_reference_lookup_2, + vn_reference_lookup_3, &vr1); + if (vr1.operands != operands) + VEC_free (vn_reference_op_s, heap, vr1.operands); } if (*vnresult) @@ -963,22 +1244,30 @@ tree vn_reference_lookup (tree op, tree vuse, bool maywalk, vn_reference_t *vnresult) { + VEC (vn_reference_op_s, heap) *operands; struct vn_reference_s vr1; if (vnresult) *vnresult = NULL; vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; - vr1.operands = valueize_shared_reference_ops_from_ref (op); + vr1.operands = operands = valueize_shared_reference_ops_from_ref (op); + vr1.type = TREE_TYPE (op); + vr1.set = get_alias_set (op); vr1.hashcode = vn_reference_compute_hash (&vr1); if (maywalk && vr1.vuse) { vn_reference_t wvnresult; + ao_ref r; + ao_ref_init (&r, op); wvnresult = - (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse, - vn_reference_lookup_2, &vr1); + (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, + vn_reference_lookup_2, + vn_reference_lookup_3, &vr1); + if (vr1.operands != operands) + VEC_free (vn_reference_op_s, heap, vr1.operands); if (wvnresult) { if (vnresult) @@ -1009,6 +1298,8 @@ vn_reference_insert (tree op, tree result, tree vuse) vr1->value_id = get_or_alloc_constant_value_id (result); vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; vr1->operands = valueize_refs (create_reference_ops_from_ref (op)); + vr1->type = TREE_TYPE (op); + vr1->set = get_alias_set (op); vr1->hashcode = vn_reference_compute_hash (vr1); vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; @@ -1036,7 +1327,7 @@ vn_reference_insert (tree op, tree result, tree vuse) structure we created. */ vn_reference_t -vn_reference_insert_pieces (tree vuse, +vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, VEC (vn_reference_op_s, heap) *operands, tree result, unsigned int value_id) @@ -1048,6 +1339,8 @@ vn_reference_insert_pieces (tree vuse, vr1->value_id = value_id; vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; vr1->operands = valueize_refs (operands); + vr1->type = type; + vr1->set = set; vr1->hashcode = vn_reference_compute_hash (vr1); if (result && TREE_CODE (result) == SSA_NAME) result = SSA_VAL (result); @@ -1055,7 +1348,7 @@ vn_reference_insert_pieces (tree vuse, slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, INSERT); - + /* At this point we should have all the things inserted that we have seen before, and we should never try inserting something that already exists. */ @@ -1069,10 +1362,10 @@ vn_reference_insert_pieces (tree vuse, /* Compute and return the hash value for nary operation VBO1. */ -inline hashval_t +hashval_t vn_nary_op_compute_hash (const vn_nary_op_t vno1) { - hashval_t hash = 0; + hashval_t hash; unsigned i; for (i = 0; i < vno1->length; ++i) @@ -1088,8 +1381,9 @@ vn_nary_op_compute_hash (const vn_nary_op_t vno1) vno1->op[1] = temp; } + hash = iterative_hash_hashval_t (vno1->opcode, 0); for (i = 0; i < vno1->length; ++i) - hash += iterative_hash_expr (vno1->op[i], vno1->opcode); + hash = iterative_hash_expr (vno1->op[i], hash); return hash; } @@ -1136,7 +1430,7 @@ vn_nary_op_eq (const void *p1, const void *p2) tree vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, - tree op3, vn_nary_op_t *vnresult) + tree op3, vn_nary_op_t *vnresult) { void **slot; struct vn_nary_op_s vno1; @@ -1211,7 +1505,7 @@ vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) *vnresult = NULL; vno1.opcode = gimple_assign_rhs_code (stmt); vno1.length = gimple_num_ops (stmt) - 1; - vno1.type = TREE_TYPE (gimple_assign_lhs (stmt)); + vno1.type = gimple_expr_type (stmt); for (i = 0; i < vno1.length; ++i) vno1.op[i] = gimple_op (stmt, i + 1); if (vno1.opcode == REALPART_EXPR @@ -1240,7 +1534,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, tree op3, tree result, - unsigned int value_id) + unsigned int value_id) { void **slot; vn_nary_op_t vno1; @@ -1268,7 +1562,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, *slot = vno1; return vno1; - + } /* Insert OP into the current hash table with a value number of @@ -1319,7 +1613,7 @@ vn_nary_op_insert_stmt (gimple stmt, tree result) vno1->value_id = VN_INFO (result)->value_id; vno1->opcode = gimple_assign_rhs_code (stmt); vno1->length = length; - vno1->type = TREE_TYPE (gimple_assign_lhs (stmt)); + vno1->type = gimple_expr_type (stmt); for (i = 0; i < vno1->length; ++i) vno1->op[i] = gimple_op (stmt, i + 1); if (vno1->opcode == REALPART_EXPR @@ -1341,7 +1635,7 @@ vn_nary_op_insert_stmt (gimple stmt, tree result) static inline hashval_t vn_phi_compute_hash (vn_phi_t vp1) { - hashval_t result = 0; + hashval_t result; int i; tree phi1op; tree type; @@ -1359,7 +1653,7 @@ vn_phi_compute_hash (vn_phi_t vp1) { if (phi1op == VN_TOP) continue; - result += iterative_hash_expr (phi1op, result); + result = iterative_hash_expr (phi1op, result); } return result; @@ -1641,6 +1935,8 @@ visit_reference_op_call (tree lhs, gimple stmt) vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; vr1.operands = valueize_shared_reference_ops_from_call (stmt); + vr1.type = gimple_expr_type (stmt); + vr1.set = 0; vr1.hashcode = vn_reference_compute_hash (&vr1); result = vn_reference_lookup_1 (&vr1, NULL); if (result) @@ -1658,6 +1954,8 @@ visit_reference_op_call (tree lhs, gimple stmt) vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); vr2->vuse = vr1.vuse; vr2->operands = valueize_refs (create_reference_ops_from_call (stmt)); + vr2->type = vr1.type; + vr2->set = vr1.set; vr2->hashcode = vr1.hashcode; vr2->result = lhs; slot = htab_find_slot_with_hash (current_info->references, @@ -1677,7 +1975,19 @@ static bool visit_reference_op_load (tree lhs, tree op, gimple stmt) { bool changed = false; - tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); + tree last_vuse; + tree result; + + last_vuse = gimple_vuse (stmt); + last_vuse_ptr = &last_vuse; + result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); + last_vuse_ptr = NULL; + + /* If we have a VCE, try looking up its operand as it might be stored in + a different type. */ + if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR) + result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt), + true, NULL); /* We handle type-punning through unions by value-numbering based on offset and size of the access. Be prepared to handle a @@ -1755,7 +2065,7 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt) else { changed = set_ssa_val_to (lhs, lhs); - vn_reference_insert (op, lhs, gimple_vuse (stmt)); + vn_reference_insert (op, lhs, last_vuse); } return changed; @@ -2044,7 +2354,7 @@ simplify_binary_expression (gimple stmt) fold_defer_overflow_warnings (); result = fold_binary (gimple_assign_rhs_code (stmt), - TREE_TYPE (gimple_get_lhs (stmt)), op0, op1); + gimple_expr_type (stmt), op0, op1); if (result) STRIP_USELESS_TYPE_CONVERSION (result); @@ -2400,7 +2710,7 @@ compare_ops (const void *pa, const void *pb) basic_block bbb; if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) - return 0; + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); else if (gimple_nop_p (opstmta)) return -1; else if (gimple_nop_p (opstmtb)) @@ -2410,7 +2720,7 @@ compare_ops (const void *pa, const void *pb) bbb = gimple_bb (opstmtb); if (!bba && !bbb) - return 0; + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); else if (!bba) return -1; else if (!bbb) @@ -2420,12 +2730,15 @@ compare_ops (const void *pa, const void *pb) { if (gimple_code (opstmta) == GIMPLE_PHI && gimple_code (opstmtb) == GIMPLE_PHI) - return 0; + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); else if (gimple_code (opstmta) == GIMPLE_PHI) return -1; else if (gimple_code (opstmtb) == GIMPLE_PHI) return 1; - return gimple_uid (opstmta) - gimple_uid (opstmtb); + else if (gimple_uid (opstmta) != gimple_uid (opstmtb)) + return gimple_uid (opstmta) - gimple_uid (opstmtb); + else + return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); } return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; } @@ -2444,6 +2757,60 @@ sort_scc (VEC (tree, heap) *scc) compare_ops); } +/* Insert the no longer used nary *ENTRY to the current hash. */ + +static int +copy_nary (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_nary_op_t onary = (vn_nary_op_t) *entry; + size_t size = (sizeof (struct vn_nary_op_s) + - sizeof (tree) * (4 - onary->length)); + vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, + size); + void **slot; + memcpy (nary, onary, size); + slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode, + INSERT); + gcc_assert (!*slot); + *slot = nary; + return 1; +} + +/* Insert the no longer used phi *ENTRY to the current hash. */ + +static int +copy_phis (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_phi_t ophi = (vn_phi_t) *entry; + vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool); + void **slot; + memcpy (phi, ophi, sizeof (*phi)); + ophi->phiargs = NULL; + slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode, + INSERT); + *slot = phi; + return 1; +} + +/* Insert the no longer used reference *ENTRY to the current hash. */ + +static int +copy_references (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_reference_t oref = (vn_reference_t) *entry; + vn_reference_t ref; + void **slot; + ref = (vn_reference_t) pool_alloc (current_info->references_pool); + memcpy (ref, oref, sizeof (*ref)); + oref->operands = NULL; + slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode, + INSERT); + if (*slot) + free_reference (*slot); + *slot = ref; + return 1; +} + /* Process a strongly connected component in the SSA graph. */ static void @@ -2489,10 +2856,12 @@ process_scc (VEC (tree, heap) *scc) statistics_histogram_event (cfun, "SCC iterations", iterations); - /* Finally, visit the SCC once using the valid table. */ + /* Finally, copy the contents of the no longer used optimistic + table to the valid table. */ current_info = valid_info; - for (i = 0; VEC_iterate (tree, scc, i, var); i++) - visit_use (var); + htab_traverse (optimistic_info->nary, copy_nary, NULL); + htab_traverse (optimistic_info->phis, copy_phis, NULL); + htab_traverse (optimistic_info->references, copy_references, NULL); } } @@ -2688,12 +3057,12 @@ init_scc_vn (void) sccstack = NULL; constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, free); - + constant_value_ids = BITMAP_ALLOC (NULL); - + next_dfs_num = 1; next_value_id = 1; - + vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); /* VEC_alloc doesn't actually grow it to the right size, it just preallocates the space to do so. */ @@ -2780,7 +3149,7 @@ set_hashtable_value_ids (void) table. */ FOR_EACH_HTAB_ELEMENT (valid_info->nary, - vno, vn_nary_op_t, hi) + vno, vn_nary_op_t, hi) { if (vno->result) { @@ -2792,7 +3161,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->phis, - vp, vn_phi_t, hi) + vp, vn_phi_t, hi) { if (vp->result) { @@ -2804,7 +3173,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->references, - vr, vn_reference_t, hi) + vr, vn_reference_t, hi) { if (vr->result) { @@ -2825,7 +3194,7 @@ run_scc_vn (bool may_insert_arg) size_t i; tree param; bool changed = true; - + may_insert = may_insert_arg; init_scc_vn (); @@ -2857,7 +3226,7 @@ run_scc_vn (bool may_insert_arg) } /* Initialize the value ids. */ - + for (i = 1; i < num_ssa_names; ++i) { tree name = ssa_name (i); @@ -2865,12 +3234,13 @@ run_scc_vn (bool may_insert_arg) if (!name) continue; info = VN_INFO (name); - if (info->valnum == name) + if (info->valnum == name + || info->valnum == VN_TOP) info->value_id = get_next_value_id (); else if (is_gimple_min_invariant (info->valnum)) info->value_id = get_or_alloc_constant_value_id (info->valnum); } - + /* Propagate until they stop changing. */ while (changed) { @@ -2891,9 +3261,9 @@ run_scc_vn (bool may_insert_arg) } } } - + set_hashtable_value_ids (); - + if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Value numbers:\n"); @@ -2919,7 +3289,7 @@ run_scc_vn (bool may_insert_arg) /* Return the maximum value id we have ever seen. */ unsigned int -get_max_value_id (void) +get_max_value_id (void) { return next_value_id; } @@ -2946,23 +3316,6 @@ expressions_equal_p (tree e1, tree e2) if (!e1 || !e2) return false; - /* Recurse on elements of lists. */ - if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST) - { - tree lop1 = e1; - tree lop2 = e2; - for (lop1 = e1, lop2 = e2; - lop1 || lop2; - lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2)) - { - if (!lop1 || !lop2) - return false; - if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2))) - return false; - } - return true; - } - /* Now perform the actual comparison. */ if (TREE_CODE (e1) == TREE_CODE (e2) && operand_equal_p (e1, e2, OEP_PURE_SAME))