X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-sccvn.c;h=79ce3c20a25af10a62a585ac02e67d8070284a41;hb=738928bee1b9d374e8d3db6508a3975867771734;hp=3d814fc2e0bb023d62c6fa06b526841738094953;hpb=9fa672189b407b95f332b4701d6679f9babca98c;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 3d814fc2e0b..79ce3c20a25 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -324,7 +324,7 @@ vn_constant_eq (const void *p1, const void *p2) } /* Hash table hash function for vn_constant_t. */ - + static hashval_t vn_constant_hash (const void *p1) { @@ -357,21 +357,23 @@ unsigned int get_or_alloc_constant_value_id (tree constant) { void **slot; - vn_constant_t vc = XNEW (struct vn_constant_s); - - vc->hashcode = vn_hash_constant_with_type (constant); - vc->constant = constant; - slot = htab_find_slot_with_hash (constant_to_value_id, vc, - vc->hashcode, INSERT); + struct vn_constant_s vc; + vn_constant_t vcp; + + vc.hashcode = vn_hash_constant_with_type (constant); + vc.constant = constant; + slot = htab_find_slot_with_hash (constant_to_value_id, &vc, + vc.hashcode, INSERT); if (*slot) - { - free (vc); - return ((vn_constant_t)*slot)->value_id; - } - vc->value_id = get_next_value_id (); - *slot = vc; - bitmap_set_bit (constant_value_ids, vc->value_id); - return vc->value_id; + return ((vn_constant_t)*slot)->value_id; + + vcp = XNEW (struct vn_constant_s); + vcp->hashcode = vc.hashcode; + vcp->constant = constant; + vcp->value_id = get_next_value_id (); + *slot = (void *) vcp; + bitmap_set_bit (constant_value_ids, vcp->value_id); + return vcp->value_id; } /* Return true if V is a value id for a constant. */ @@ -379,7 +381,7 @@ get_or_alloc_constant_value_id (tree constant) bool value_id_constant_p (unsigned int v) { - return bitmap_bit_p (constant_value_ids, v); + return bitmap_bit_p (constant_value_ids, v); } /* Compare two reference operands P1 and P2 for equality. Return true if @@ -401,15 +403,15 @@ vn_reference_op_eq (const void *p1, const void *p2) /* Compute the hash for a reference operand VRO1. */ static hashval_t -vn_reference_op_compute_hash (const vn_reference_op_t vro1) +vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result) { - hashval_t result = 0; + result = iterative_hash_hashval_t (vro1->opcode, result); if (vro1->op0) - result += iterative_hash_expr (vro1->op0, vro1->opcode); + result = iterative_hash_expr (vro1->op0, result); if (vro1->op1) - result += iterative_hash_expr (vro1->op1, vro1->opcode); + result = iterative_hash_expr (vro1->op1, result); if (vro1->op2) - result += iterative_hash_expr (vro1->op2, vro1->opcode); + result = iterative_hash_expr (vro1->op2, result); return result; } @@ -427,13 +429,14 @@ vn_reference_hash (const void *p1) hashval_t vn_reference_compute_hash (const vn_reference_t vr1) { - hashval_t result; + hashval_t result = 0; int i; vn_reference_op_t vro; - result = iterative_hash_expr (vr1->vuse, 0); for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) - result += vn_reference_op_compute_hash (vro); + result = vn_reference_op_compute_hash (vro, result); + if (vr1->vuse) + result += SSA_NAME_VERSION (vr1->vuse); return result; } @@ -576,8 +579,6 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) case CONST_DECL: case RESULT_DECL: case SSA_NAME: - case EXC_PTR_EXPR: - case FILTER_EXPR: temp.op0 = ref; break; case ADDR_EXPR: @@ -688,8 +689,6 @@ ao_ref_init_from_vn_reference (ao_ref *ref, case PARM_DECL: case RESULT_DECL: case SSA_NAME: - case FILTER_EXPR: - case EXC_PTR_EXPR: *op0_p = op->op0; break; @@ -984,10 +983,12 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) *vnresult = (vn_reference_t)*slot; return ((vn_reference_t)*slot)->result; } - + return NULL_TREE; } +static tree *last_vuse_ptr; + /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ with the current VUSE and performs the expression lookup. */ @@ -998,10 +999,15 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_) void **slot; hashval_t hash; + if (last_vuse_ptr) + *last_vuse_ptr = vuse; + /* Fixup vuse and hash. */ - vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0); + if (vr->vuse) + vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); vr->vuse = SSA_VAL (vuse); - vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0); + if (vr->vuse) + vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); hash = vr->hashcode; slot = htab_find_slot_with_hash (current_info->references, vr, @@ -1011,7 +1017,7 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_) hash, NO_INSERT); if (slot) return *slot; - + return NULL; } @@ -1027,11 +1033,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) gimple def_stmt = SSA_NAME_DEF_STMT (vuse); tree fndecl; tree base; - HOST_WIDE_INT offset, size, maxsize; + HOST_WIDE_INT offset, maxsize; base = ao_ref_base (ref); offset = ref->offset; - size = ref->size; maxsize = ref->max_size; /* If we cannot constrain the size of the reference we cannot @@ -1120,7 +1125,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs); i = VEC_length (vn_reference_op_s, vr->operands) - 1; j = VEC_length (vn_reference_op_s, lhs) - 1; - while (j >= 0 + while (j >= 0 && i >= 0 && vn_reference_op_eq (VEC_index (vn_reference_op_s, vr->operands, i), VEC_index (vn_reference_op_s, lhs, j))) @@ -1128,13 +1133,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) i--; j--; } + + VEC_free (vn_reference_op_s, heap, lhs); /* i now points to the first additional op. ??? LHS may not be completely contained in VR, one or more VIEW_CONVERT_EXPRs could be in its way. We could at least try handling outermost VIEW_CONVERT_EXPRs. */ if (j != -1) return (void *)-1; - VEC_free (vn_reference_op_s, heap, lhs); /* Now re-write REF to be based on the rhs of the assignment. */ copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); @@ -1160,9 +1166,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) /* Adjust *ref from the new operands. */ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) return (void *)-1; - gcc_assert (ref->size == r.size); + /* This can happen with bitfields. */ + if (ref->size != r.size) + return (void *)-1; *ref = r; + /* Do not update last seen VUSE after translating. */ + last_vuse_ptr = NULL; + /* Keep looking for the adjusted *REF / VR pair. */ return NULL; } @@ -1337,7 +1348,7 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, INSERT); - + /* At this point we should have all the things inserted that we have seen before, and we should never try inserting something that already exists. */ @@ -1354,7 +1365,7 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, hashval_t vn_nary_op_compute_hash (const vn_nary_op_t vno1) { - hashval_t hash = 0; + hashval_t hash; unsigned i; for (i = 0; i < vno1->length; ++i) @@ -1370,8 +1381,9 @@ vn_nary_op_compute_hash (const vn_nary_op_t vno1) vno1->op[1] = temp; } + hash = iterative_hash_hashval_t (vno1->opcode, 0); for (i = 0; i < vno1->length; ++i) - hash += iterative_hash_expr (vno1->op[i], vno1->opcode); + hash = iterative_hash_expr (vno1->op[i], hash); return hash; } @@ -1418,7 +1430,7 @@ vn_nary_op_eq (const void *p1, const void *p2) tree vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, - tree op3, vn_nary_op_t *vnresult) + tree op3, vn_nary_op_t *vnresult) { void **slot; struct vn_nary_op_s vno1; @@ -1522,7 +1534,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, tree type, tree op0, tree op1, tree op2, tree op3, tree result, - unsigned int value_id) + unsigned int value_id) { void **slot; vn_nary_op_t vno1; @@ -1550,7 +1562,7 @@ vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, *slot = vno1; return vno1; - + } /* Insert OP into the current hash table with a value number of @@ -1623,7 +1635,7 @@ vn_nary_op_insert_stmt (gimple stmt, tree result) static inline hashval_t vn_phi_compute_hash (vn_phi_t vp1) { - hashval_t result = 0; + hashval_t result; int i; tree phi1op; tree type; @@ -1641,7 +1653,7 @@ vn_phi_compute_hash (vn_phi_t vp1) { if (phi1op == VN_TOP) continue; - result += iterative_hash_expr (phi1op, result); + result = iterative_hash_expr (phi1op, result); } return result; @@ -1963,7 +1975,13 @@ static bool visit_reference_op_load (tree lhs, tree op, gimple stmt) { bool changed = false; - tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); + tree last_vuse; + tree result; + + last_vuse = gimple_vuse (stmt); + last_vuse_ptr = &last_vuse; + result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); + last_vuse_ptr = NULL; /* If we have a VCE, try looking up its operand as it might be stored in a different type. */ @@ -2047,7 +2065,7 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt) else { changed = set_ssa_val_to (lhs, lhs); - vn_reference_insert (op, lhs, gimple_vuse (stmt)); + vn_reference_insert (op, lhs, last_vuse); } return changed; @@ -2739,6 +2757,60 @@ sort_scc (VEC (tree, heap) *scc) compare_ops); } +/* Insert the no longer used nary *ENTRY to the current hash. */ + +static int +copy_nary (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_nary_op_t onary = (vn_nary_op_t) *entry; + size_t size = (sizeof (struct vn_nary_op_s) + - sizeof (tree) * (4 - onary->length)); + vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, + size); + void **slot; + memcpy (nary, onary, size); + slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode, + INSERT); + gcc_assert (!*slot); + *slot = nary; + return 1; +} + +/* Insert the no longer used phi *ENTRY to the current hash. */ + +static int +copy_phis (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_phi_t ophi = (vn_phi_t) *entry; + vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool); + void **slot; + memcpy (phi, ophi, sizeof (*phi)); + ophi->phiargs = NULL; + slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode, + INSERT); + *slot = phi; + return 1; +} + +/* Insert the no longer used reference *ENTRY to the current hash. */ + +static int +copy_references (void **entry, void *data ATTRIBUTE_UNUSED) +{ + vn_reference_t oref = (vn_reference_t) *entry; + vn_reference_t ref; + void **slot; + ref = (vn_reference_t) pool_alloc (current_info->references_pool); + memcpy (ref, oref, sizeof (*ref)); + oref->operands = NULL; + slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode, + INSERT); + if (*slot) + free_reference (*slot); + *slot = ref; + return 1; +} + /* Process a strongly connected component in the SSA graph. */ static void @@ -2784,10 +2856,12 @@ process_scc (VEC (tree, heap) *scc) statistics_histogram_event (cfun, "SCC iterations", iterations); - /* Finally, visit the SCC once using the valid table. */ + /* Finally, copy the contents of the no longer used optimistic + table to the valid table. */ current_info = valid_info; - for (i = 0; VEC_iterate (tree, scc, i, var); i++) - visit_use (var); + htab_traverse (optimistic_info->nary, copy_nary, NULL); + htab_traverse (optimistic_info->phis, copy_phis, NULL); + htab_traverse (optimistic_info->references, copy_references, NULL); } } @@ -2983,12 +3057,12 @@ init_scc_vn (void) sccstack = NULL; constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, free); - + constant_value_ids = BITMAP_ALLOC (NULL); - + next_dfs_num = 1; next_value_id = 1; - + vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); /* VEC_alloc doesn't actually grow it to the right size, it just preallocates the space to do so. */ @@ -3075,7 +3149,7 @@ set_hashtable_value_ids (void) table. */ FOR_EACH_HTAB_ELEMENT (valid_info->nary, - vno, vn_nary_op_t, hi) + vno, vn_nary_op_t, hi) { if (vno->result) { @@ -3087,7 +3161,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->phis, - vp, vn_phi_t, hi) + vp, vn_phi_t, hi) { if (vp->result) { @@ -3099,7 +3173,7 @@ set_hashtable_value_ids (void) } FOR_EACH_HTAB_ELEMENT (valid_info->references, - vr, vn_reference_t, hi) + vr, vn_reference_t, hi) { if (vr->result) { @@ -3120,7 +3194,7 @@ run_scc_vn (bool may_insert_arg) size_t i; tree param; bool changed = true; - + may_insert = may_insert_arg; init_scc_vn (); @@ -3152,7 +3226,7 @@ run_scc_vn (bool may_insert_arg) } /* Initialize the value ids. */ - + for (i = 1; i < num_ssa_names; ++i) { tree name = ssa_name (i); @@ -3166,7 +3240,7 @@ run_scc_vn (bool may_insert_arg) else if (is_gimple_min_invariant (info->valnum)) info->value_id = get_or_alloc_constant_value_id (info->valnum); } - + /* Propagate until they stop changing. */ while (changed) { @@ -3187,9 +3261,9 @@ run_scc_vn (bool may_insert_arg) } } } - + set_hashtable_value_ids (); - + if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Value numbers:\n"); @@ -3215,7 +3289,7 @@ run_scc_vn (bool may_insert_arg) /* Return the maximum value id we have ever seen. */ unsigned int -get_max_value_id (void) +get_max_value_id (void) { return next_value_id; } @@ -3242,23 +3316,6 @@ expressions_equal_p (tree e1, tree e2) if (!e1 || !e2) return false; - /* Recurse on elements of lists. */ - if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST) - { - tree lop1 = e1; - tree lop2 = e2; - for (lop1 = e1, lop2 = e2; - lop1 || lop2; - lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2)) - { - if (!lop1 || !lop2) - return false; - if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2))) - return false; - } - return true; - } - /* Now perform the actual comparison. */ if (TREE_CODE (e1) == TREE_CODE (e2) && operand_equal_p (e1, e2, OEP_PURE_SAME))