X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ftree-ssa-ccp.c;h=ec70c36d6d37d743674a7db5c791790f6fde84c9;hp=58b8c2c01bef1658c9f45ec4f96c96b1c5f048c5;hb=8b0e883501b45c7240394bae2cc694482291a2ea;hpb=8782adcf1c5f6047fa912890d7b74d7636484f30 diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c index 58b8c2c01be..ec70c36d6d3 100644 --- a/gcc/tree-ssa-ccp.c +++ b/gcc/tree-ssa-ccp.c @@ -18,8 +18,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ /* Conditional constant propagation (CCP) is based on the SSA propagation engine (tree-ssa-propagate.c). Constant assignments of @@ -199,7 +199,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "ggc.h" #include "basic-block.h" #include "output.h" -#include "errors.h" #include "expr.h" #include "function.h" #include "diagnostic.h" @@ -228,7 +227,7 @@ typedef enum (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will contain the actual memory reference used to store (i.e., the LHS of the assignment doing the store). */ -prop_value_t *const_val; +static prop_value_t *const_val; /* True if we are also propagating constants in stores and loads. */ static bool do_store_ccp; @@ -274,6 +273,32 @@ debug_lattice_value (prop_value_t val) } +/* The regular is_gimple_min_invariant does a shallow test of the object. + It assumes that full gimplification has happened, or will happen on the + object. For a value coming from DECL_INITIAL, this is not true, so we + have to be more strict ourselves. */ + +static bool +ccp_decl_initial_min_invariant (tree t) +{ + if (!is_gimple_min_invariant (t)) + return false; + if (TREE_CODE (t) == ADDR_EXPR) + { + /* Inline and unroll is_gimple_addressable. */ + while (1) + { + t = TREE_OPERAND (t, 0); + if (is_gimple_id (t)) + return true; + if (!handled_component_p (t)) + return false; + } + } + return true; +} + + /* Compute a default value for variable VAR and store it in the CONST_VAL array. The following rules are used to get default values: @@ -317,8 +342,9 @@ get_default_value (tree var) } else if (TREE_STATIC (sym) && TREE_READONLY (sym) + && !MTAG_P (sym) && DECL_INITIAL (sym) - && is_gimple_min_invariant (DECL_INITIAL (sym))) + && ccp_decl_initial_min_invariant (DECL_INITIAL (sym))) { /* Globals and static variables declared 'const' take their initial value. */ @@ -456,9 +482,7 @@ likely_value (tree stmt) /* If we are not doing store-ccp, statements with loads and/or stores will never fold into a constant. */ if (!do_store_ccp - && (ann->makes_aliased_stores - || ann->makes_aliased_loads - || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))) + && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) return VARYING; @@ -474,6 +498,9 @@ likely_value (tree stmt) && TREE_CODE (stmt) != SWITCH_EXPR) return VARYING; + if (is_gimple_min_invariant (get_rhs (stmt))) + return CONSTANT; + found_constant = false; FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) { @@ -509,7 +536,7 @@ ccp_initialize (void) { basic_block bb; - const_val = xmalloc (num_ssa_names * sizeof (*const_val)); + const_val = XNEWVEC (prop_value_t, num_ssa_names); memset (const_val, 0, num_ssa_names * sizeof (*const_val)); /* Initialize simulation flags for PHI nodes and statements. */ @@ -581,7 +608,7 @@ static void ccp_finalize (void) { /* Perform substitutions based on the known constant values. */ - substitute_and_fold (const_val); + substitute_and_fold (const_val, false); free (const_val); } @@ -659,7 +686,8 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) && val2->lattice_val == CONSTANT && simple_cst_equal (val1->value, val2->value) == 1 && (!do_store_ccp - || simple_cst_equal (val1->mem_ref, val2->mem_ref) == 1)) + || (val1->mem_ref && val2->mem_ref + && operand_equal_p (val1->mem_ref, val2->mem_ref, 0)))) { /* Ci M Cj = Ci if (i == j) Ci M Cj = VARYING if (i != j) @@ -827,10 +855,19 @@ ccp_fold (tree stmt) /* If the RHS is a memory load, see if the VUSEs associated with it are a valid constant for that memory load. */ prop_value_t *val = get_value_loaded_by (stmt, const_val); - if (val && simple_cst_equal (val->mem_ref, rhs) == 1) - return val->value; - else - return NULL_TREE; + if (val && val->mem_ref) + { + if (operand_equal_p (val->mem_ref, rhs, 0)) + return val->value; + + /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a + complex type with a known constant value, return it. */ + if ((TREE_CODE (rhs) == REALPART_EXPR + || TREE_CODE (rhs) == IMAGPART_EXPR) + && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0)) + return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value); + } + return NULL_TREE; } /* Unary operators. Note that we know the single operand must @@ -849,27 +886,11 @@ ccp_fold (tree stmt) op0 = get_value (op0, true)->value; } - retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0); - - /* If we folded, but did not create an invariant, then we can not - use this expression. */ - if (retval && ! is_gimple_min_invariant (retval)) - return NULL; - - /* If we could not fold the expression, but the arguments are all - constants and gimple values, then build and return the new - expression. - - In some cases the new expression is still something we can - use as a replacement for an argument. This happens with - NOP conversions of types for example. - - In other cases the new expression can not be used as a - replacement for an argument (as it would create non-gimple - code). But the new expression can still be used to derive - other constants. */ - if (! retval && is_gimple_min_invariant (op0)) - return build1 (code, TREE_TYPE (rhs), op0); + if ((code == NOP_EXPR || code == CONVERT_EXPR) + && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs), + TREE_TYPE (op0))) + return op0; + return fold_unary (code, TREE_TYPE (rhs), op0); } /* Binary and comparison operators. We know one or both of the @@ -900,29 +921,7 @@ ccp_fold (tree stmt) op1 = val->value; } - retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1); - - /* If we folded, but did not create an invariant, then we can not - use this expression. */ - if (retval && ! is_gimple_min_invariant (retval)) - return NULL; - - /* If we could not fold the expression, but the arguments are all - constants and gimple values, then build and return the new - expression. - - In some cases the new expression is still something we can - use as a replacement for an argument. This happens with - NOP conversions of types for example. - - In other cases the new expression can not be used as a - replacement for an argument (as it would create non-gimple - code). But the new expression can still be used to derive - other constants. */ - if (! retval - && is_gimple_min_invariant (op0) - && is_gimple_min_invariant (op1)) - return build (code, TREE_TYPE (rhs), op0, op1); + return fold_binary (code, TREE_TYPE (rhs), op0, op1); } /* We may be able to fold away calls to builtin functions if their @@ -942,7 +941,7 @@ ccp_fold (tree stmt) use_operand_p var_p; /* Preserve the original values of every operand. */ - orig = xmalloc (sizeof (tree) * NUM_SSA_OPERANDS (stmt, SSA_OP_USE)); + orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE)); FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE) orig[i++] = var; @@ -979,7 +978,9 @@ static tree fold_const_aggregate_ref (tree t) { prop_value_t *value; - tree base, ctor, idx, field, elt; + tree base, ctor, idx, field; + unsigned HOST_WIDE_INT cnt; + tree cfield, cval; switch (TREE_CODE (t)) { @@ -1036,13 +1037,9 @@ fold_const_aggregate_ref (tree t) } /* Whoo-hoo! I'll fold ya baby. Yeah! */ - for (elt = CONSTRUCTOR_ELTS (ctor); - (elt && !tree_int_cst_equal (TREE_PURPOSE (elt), idx)); - elt = TREE_CHAIN (elt)) - ; - - if (elt) - return TREE_VALUE (elt); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) + if (tree_int_cst_equal (cfield, idx)) + return cval; break; case COMPONENT_REF: @@ -1078,13 +1075,22 @@ fold_const_aggregate_ref (tree t) field = TREE_OPERAND (t, 1); - for (elt = CONSTRUCTOR_ELTS (ctor); elt; elt = TREE_CHAIN (elt)) - if (TREE_PURPOSE (elt) == field + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) + if (cfield == field /* FIXME: Handle bit-fields. */ - && ! DECL_BIT_FIELD (TREE_PURPOSE (elt))) - return TREE_VALUE (elt); + && ! DECL_BIT_FIELD (cfield)) + return cval; break; + case REALPART_EXPR: + case IMAGPART_EXPR: + { + tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); + if (c && TREE_CODE (c) == COMPLEX_CST) + return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c); + break; + } + default: break; } @@ -1130,7 +1136,11 @@ evaluate_stmt (tree stmt) /* The statement produced a nonconstant value. If the statement had UNDEFINED operands, then the result of the statement should be UNDEFINED. Otherwise, the statement is VARYING. */ - val.lattice_val = (likelyvalue == UNDEFINED) ? UNDEFINED : VARYING; + if (likelyvalue == UNDEFINED || likelyvalue == UNKNOWN_VAL) + val.lattice_val = likelyvalue; + else + val.lattice_val = VARYING; + val.value = NULL_TREE; } @@ -1167,7 +1177,8 @@ visit_assignment (tree stmt, tree *output_p) we can propagate the value on the RHS. */ prop_value_t *nval = get_value_loaded_by (stmt, const_val); - if (nval && simple_cst_equal (nval->mem_ref, rhs) == 1) + if (nval && nval->mem_ref + && operand_equal_p (nval->mem_ref, rhs, 0)) val = *nval; else val = evaluate_stmt (stmt); @@ -1188,9 +1199,9 @@ visit_assignment (tree stmt, tree *output_p) if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR && val.lattice_val == CONSTANT) { - tree w = fold (build1 (VIEW_CONVERT_EXPR, - TREE_TYPE (TREE_OPERAND (orig_lhs, 0)), - val.value)); + tree w = fold_unary (VIEW_CONVERT_EXPR, + TREE_TYPE (TREE_OPERAND (orig_lhs, 0)), + val.value); orig_lhs = TREE_OPERAND (orig_lhs, 0); if (w && is_gimple_min_invariant (w)) @@ -1477,8 +1488,8 @@ widen_bitfield (tree val, tree field, tree var) for (i = 0, mask = 0; i < field_size; i++) mask |= ((HOST_WIDE_INT) 1) << i; - wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val, - build_int_cst (TREE_TYPE (var), mask)); + wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val, + build_int_cst (TREE_TYPE (var), mask)); } else { @@ -1488,11 +1499,11 @@ widen_bitfield (tree val, tree field, tree var) for (i = 0, mask = 0; i < (var_size - field_size); i++) mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1); - wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val, - build_int_cst (TREE_TYPE (var), mask)); + wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val, + build_int_cst (TREE_TYPE (var), mask)); } - return fold (wide_val); + return wide_val; } @@ -1585,9 +1596,9 @@ maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type) if (!integer_zerop (elt_offset)) idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0); - return build (ARRAY_REF, orig_type, base, idx, min_idx, - size_int (tree_low_cst (elt_size, 1) - / (TYPE_ALIGN_UNIT (elt_type)))); + return build4 (ARRAY_REF, orig_type, base, idx, min_idx, + size_int (tree_low_cst (elt_size, 1) + / (TYPE_ALIGN_UNIT (elt_type)))); } @@ -1648,7 +1659,7 @@ maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, { if (base_is_ptr) base = build1 (INDIRECT_REF, record_type, base); - t = build (COMPONENT_REF, field_type, base, f, NULL_TREE); + t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); return t; } @@ -1688,7 +1699,7 @@ maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, nonzero offset into them. Recurse and hope for a valid match. */ if (base_is_ptr) base = build1 (INDIRECT_REF, record_type, base); - base = build (COMPONENT_REF, field_type, base, f, NULL_TREE); + base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); t = maybe_fold_offset_to_array_ref (base, offset, orig_type); if (t) @@ -1738,7 +1749,7 @@ maybe_fold_stmt_indirect (tree expr, tree base, tree offset) /* Fold away CONST_DECL to its value, if the type is scalar. */ if (TREE_CODE (base) == CONST_DECL - && is_gimple_min_invariant (DECL_INITIAL (base))) + && ccp_decl_initial_min_invariant (DECL_INITIAL (base))) return DECL_INITIAL (base); /* Try folding *(&B+O) to B[X]. */ @@ -1898,9 +1909,9 @@ maybe_fold_stmt_addition (tree expr) { if (TYPE_UNSIGNED (TREE_TYPE (op1))) return NULL; - op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1)); + op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1); /* ??? In theory fold should always produce another integer. */ - if (TREE_CODE (op1) != INTEGER_CST) + if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST) return NULL; } @@ -1917,6 +1928,14 @@ maybe_fold_stmt_addition (tree expr) return t; } +/* For passing state through walk_tree into fold_stmt_r and its + children. */ + +struct fold_stmt_r_data +{ + bool *changed_p; + bool *inside_addr_expr_p; +}; /* Subroutine of fold_stmt called via walk_tree. We perform several simplifications of EXPR_P, mostly having to do with pointer arithmetic. */ @@ -1924,7 +1943,9 @@ maybe_fold_stmt_addition (tree expr) static tree fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) { - bool *changed_p = data; + struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data; + bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p; + bool *changed_p = fold_stmt_r_data->changed_p; tree expr = *expr_p, t; /* ??? It'd be nice if walk_tree had a pre-order option. */ @@ -1940,13 +1961,23 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) integer_zero_node); break; - /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF. + /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF. We'd only want to bother decomposing an existing ARRAY_REF if the base array is found to have another offset contained within. Otherwise we'd be wasting time. */ + case ARRAY_REF: + /* If we are not processing expressions found within an + ADDR_EXPR, then we can fold constant array references. */ + if (!*inside_addr_expr_p) + t = fold_read_from_constant_string (expr); + else + t = NULL; + break; case ADDR_EXPR: + *inside_addr_expr_p = true; t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); + *inside_addr_expr_p = false; if (t) return t; *walk_subtrees = 0; @@ -1954,7 +1985,7 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) /* Set TREE_INVARIANT properly so that the value is properly considered constant, and so gets propagated as expected. */ if (*changed_p) - recompute_tree_invarant_for_addr_expr (expr); + recompute_tree_invariant_for_addr_expr (expr); return NULL_TREE; case PLUS_EXPR: @@ -1991,6 +2022,10 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) } break; + case TARGET_MEM_REF: + t = maybe_fold_tmr (expr); + break; + default: return NULL_TREE; } @@ -2005,24 +2040,49 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) } -/* Return the string length of ARG in LENGTH. If ARG is an SSA name variable, - follow its use-def chains. If LENGTH is not NULL and its value is not - equal to the length we determine, or if we are unable to determine the - length, return false. VISITED is a bitmap of visited variables. */ +/* Return the string length, maximum string length or maximum value of + ARG in LENGTH. + If ARG is an SSA name variable, follow its use-def chains. If LENGTH + is not NULL and, for TYPE == 0, its value is not equal to the length + we determine or if we are unable to determine the length or value, + return false. VISITED is a bitmap of visited variables. + TYPE is 0 if string length should be returned, 1 for maximum string + length and 2 for maximum value ARG can have. */ static bool -get_strlen (tree arg, tree *length, bitmap visited) +get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) { tree var, def_stmt, val; if (TREE_CODE (arg) != SSA_NAME) { - val = c_strlen (arg, 1); + if (type == 2) + { + val = arg; + if (TREE_CODE (val) != INTEGER_CST + || tree_int_cst_sgn (val) < 0) + return false; + } + else + val = c_strlen (arg, 1); if (!val) return false; - if (*length && simple_cst_equal (val, *length) != 1) - return false; + if (*length) + { + if (type > 0) + { + if (TREE_CODE (*length) != INTEGER_CST + || TREE_CODE (val) != INTEGER_CST) + return false; + + if (tree_int_cst_lt (*length, val)) + *length = val; + return true; + } + else if (simple_cst_equal (val, *length) != 1) + return false; + } *length = val; return true; @@ -2040,28 +2100,14 @@ get_strlen (tree arg, tree *length, bitmap visited) { case MODIFY_EXPR: { - tree len, rhs; - + tree rhs; + /* The RHS of the statement defining VAR must either have a constant length or come from another SSA_NAME with a constant length. */ rhs = TREE_OPERAND (def_stmt, 1); STRIP_NOPS (rhs); - if (TREE_CODE (rhs) == SSA_NAME) - return get_strlen (rhs, length, visited); - - /* See if the RHS is a constant length. */ - len = c_strlen (rhs, 1); - if (len) - { - if (*length && simple_cst_equal (len, *length) != 1) - return false; - - *length = len; - return true; - } - - break; + return get_maxval_strlen (rhs, length, visited, type); } case PHI_NODE: @@ -2083,7 +2129,7 @@ get_strlen (tree arg, tree *length, bitmap visited) if (arg == PHI_RESULT (def_stmt)) continue; - if (!get_strlen (arg, length, visited)) + if (!get_maxval_strlen (arg, length, visited, type)) return false; } @@ -2105,9 +2151,9 @@ get_strlen (tree arg, tree *length, bitmap visited) static tree ccp_fold_builtin (tree stmt, tree fn) { - tree result, strlen_val[2]; + tree result, val[3]; tree callee, arglist, a; - int strlen_arg, i; + int arg_mask, i, type; bitmap visited; bool ignore; @@ -2119,11 +2165,11 @@ ccp_fold_builtin (tree stmt, tree fn) arglist = TREE_OPERAND (fn, 1); result = fold_builtin (callee, arglist, ignore); if (result) - { - if (ignore) - STRIP_NOPS (result); - return result; - } + { + if (ignore) + STRIP_NOPS (result); + return result; + } /* Ignore MD builtins. */ if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) @@ -2140,11 +2186,31 @@ ccp_fold_builtin (tree stmt, tree fn) case BUILT_IN_STRLEN: case BUILT_IN_FPUTS: case BUILT_IN_FPUTS_UNLOCKED: - strlen_arg = 1; + arg_mask = 1; + type = 0; break; case BUILT_IN_STRCPY: case BUILT_IN_STRNCPY: - strlen_arg = 2; + arg_mask = 2; + type = 0; + break; + case BUILT_IN_MEMCPY_CHK: + case BUILT_IN_MEMPCPY_CHK: + case BUILT_IN_MEMMOVE_CHK: + case BUILT_IN_MEMSET_CHK: + case BUILT_IN_STRNCPY_CHK: + arg_mask = 4; + type = 2; + break; + case BUILT_IN_STRCPY_CHK: + case BUILT_IN_STPCPY_CHK: + arg_mask = 2; + type = 1; + break; + case BUILT_IN_SNPRINTF_CHK: + case BUILT_IN_VSNPRINTF_CHK: + arg_mask = 2; + type = 2; break; default: return NULL_TREE; @@ -2153,15 +2219,15 @@ ccp_fold_builtin (tree stmt, tree fn) /* Try to use the dataflow information gathered by the CCP process. */ visited = BITMAP_ALLOC (NULL); - memset (strlen_val, 0, sizeof (strlen_val)); + memset (val, 0, sizeof (val)); for (i = 0, a = arglist; - strlen_arg; - i++, strlen_arg >>= 1, a = TREE_CHAIN (a)) - if (strlen_arg & 1) + arg_mask; + i++, arg_mask >>= 1, a = TREE_CHAIN (a)) + if (arg_mask & 1) { bitmap_clear (visited); - if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited)) - strlen_val[i] = NULL_TREE; + if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type)) + val[i] = NULL_TREE; } BITMAP_FREE (visited); @@ -2170,9 +2236,9 @@ ccp_fold_builtin (tree stmt, tree fn) switch (DECL_FUNCTION_CODE (callee)) { case BUILT_IN_STRLEN: - if (strlen_val[0]) + if (val[0]) { - tree new = fold_convert (TREE_TYPE (fn), strlen_val[0]); + tree new = fold_convert (TREE_TYPE (fn), val[0]); /* If the result is not a valid gimple value, or not a cast of a valid gimple value, then we can not use the result. */ @@ -2184,33 +2250,53 @@ ccp_fold_builtin (tree stmt, tree fn) break; case BUILT_IN_STRCPY: - if (strlen_val[1] && is_gimple_val (strlen_val[1])) - { - tree fndecl = get_callee_fndecl (fn); - tree arglist = TREE_OPERAND (fn, 1); - result = fold_builtin_strcpy (fndecl, arglist, strlen_val[1]); - } + if (val[1] && is_gimple_val (val[1])) + result = fold_builtin_strcpy (callee, arglist, val[1]); break; case BUILT_IN_STRNCPY: - if (strlen_val[1] && is_gimple_val (strlen_val[1])) - { - tree fndecl = get_callee_fndecl (fn); - tree arglist = TREE_OPERAND (fn, 1); - result = fold_builtin_strncpy (fndecl, arglist, strlen_val[1]); - } + if (val[1] && is_gimple_val (val[1])) + result = fold_builtin_strncpy (callee, arglist, val[1]); break; case BUILT_IN_FPUTS: result = fold_builtin_fputs (arglist, TREE_CODE (stmt) != MODIFY_EXPR, 0, - strlen_val[0]); + val[0]); break; case BUILT_IN_FPUTS_UNLOCKED: result = fold_builtin_fputs (arglist, TREE_CODE (stmt) != MODIFY_EXPR, 1, - strlen_val[0]); + val[0]); + break; + + case BUILT_IN_MEMCPY_CHK: + case BUILT_IN_MEMPCPY_CHK: + case BUILT_IN_MEMMOVE_CHK: + case BUILT_IN_MEMSET_CHK: + if (val[2] && is_gimple_val (val[2])) + result = fold_builtin_memory_chk (callee, arglist, val[2], ignore, + DECL_FUNCTION_CODE (callee)); + break; + + case BUILT_IN_STRCPY_CHK: + case BUILT_IN_STPCPY_CHK: + if (val[1] && is_gimple_val (val[1])) + result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore, + DECL_FUNCTION_CODE (callee)); + break; + + case BUILT_IN_STRNCPY_CHK: + if (val[2] && is_gimple_val (val[2])) + result = fold_builtin_strncpy_chk (arglist, val[2]); + break; + + case BUILT_IN_SNPRINTF_CHK: + case BUILT_IN_VSNPRINTF_CHK: + if (val[1] && is_gimple_val (val[1])) + result = fold_builtin_snprintf_chk (arglist, val[1], + DECL_FUNCTION_CODE (callee)); break; default: @@ -2223,7 +2309,7 @@ ccp_fold_builtin (tree stmt, tree fn) } -/* Fold the statement pointed by STMT_P. In some cases, this function may +/* Fold the statement pointed to by STMT_P. In some cases, this function may replace the whole statement with a new one. Returns true iff folding makes any changes. */ @@ -2231,13 +2317,18 @@ bool fold_stmt (tree *stmt_p) { tree rhs, result, stmt; + struct fold_stmt_r_data fold_stmt_r_data; bool changed = false; + bool inside_addr_expr = false; + + fold_stmt_r_data.changed_p = &changed; + fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; stmt = *stmt_p; /* If we replaced constants and the statement makes pointer dereferences, then we may need to fold instances of *&VAR into VAR, etc. */ - if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL)) + if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL)) { *stmt_p = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], @@ -2259,38 +2350,6 @@ fold_stmt (tree *stmt_p) callee = get_callee_fndecl (rhs); if (callee && DECL_BUILT_IN (callee)) result = ccp_fold_builtin (stmt, rhs); - else - { - /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve - here are when we've propagated the address of a decl into the - object slot. */ - /* ??? Should perhaps do this in fold proper. However, doing it - there requires that we create a new CALL_EXPR, and that requires - copying EH region info to the new node. Easier to just do it - here where we can just smash the call operand. */ - callee = TREE_OPERAND (rhs, 0); - if (TREE_CODE (callee) == OBJ_TYPE_REF - && lang_hooks.fold_obj_type_ref - && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR - && DECL_P (TREE_OPERAND - (OBJ_TYPE_REF_OBJECT (callee), 0))) - { - tree t; - - /* ??? Caution: Broken ADDR_EXPR semantics means that - looking at the type of the operand of the addr_expr - can yield an array type. See silly exception in - check_pointer_types_r. */ - - t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee))); - t = lang_hooks.fold_obj_type_ref (callee, t); - if (t) - { - TREE_OPERAND (rhs, 0) = t; - changed = true; - } - } - } } /* If we couldn't fold the RHS, hand over to the generic fold routines. */ @@ -2308,6 +2367,38 @@ fold_stmt (tree *stmt_p) return changed; } +/* Perform the minimal folding on statement STMT. Only operations like + *&x created by constant propagation are handled. The statement cannot + be replaced with a new one. */ + +bool +fold_stmt_inplace (tree stmt) +{ + tree old_stmt = stmt, rhs, new_rhs; + struct fold_stmt_r_data fold_stmt_r_data; + bool changed = false; + bool inside_addr_expr = false; + + fold_stmt_r_data.changed_p = &changed; + fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; + + walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL); + gcc_assert (stmt == old_stmt); + + rhs = get_rhs (stmt); + if (!rhs || rhs == stmt) + return changed; + + new_rhs = fold (rhs); + STRIP_USELESS_TYPE_CONVERSION (new_rhs); + if (new_rhs == rhs) + return changed; + + changed |= set_rhs (&stmt, new_rhs); + gcc_assert (stmt == old_stmt); + + return changed; +} /* Convert EXPR into a GIMPLE value suitable for substitution on the RHS of an assignment. Insert the necessary statements before @@ -2352,17 +2443,26 @@ execute_fold_all_builtins (void) FOR_EACH_BB (bb) { block_stmt_iterator i; - for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) + for (i = bsi_start (bb); !bsi_end_p (i); ) { tree *stmtp = bsi_stmt_ptr (i); + tree old_stmt = *stmtp; tree call = get_rhs (*stmtp); tree callee, result; + enum built_in_function fcode; if (!call || TREE_CODE (call) != CALL_EXPR) - continue; + { + bsi_next (&i); + continue; + } callee = get_callee_fndecl (call); if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) - continue; + { + bsi_next (&i); + continue; + } + fcode = DECL_FUNCTION_CODE (callee); result = ccp_fold_builtin (*stmtp, call); if (!result) @@ -2376,6 +2476,7 @@ execute_fold_all_builtins (void) break; default: + bsi_next (&i); continue; } @@ -2395,8 +2496,8 @@ execute_fold_all_builtins (void) gcc_assert (ok); } } - update_stmt (*stmtp); - if (maybe_clean_eh_stmt (*stmtp) + mark_new_vars_to_rename (*stmtp); + if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp) && tree_purge_dead_eh_edges (bb)) cfg_changed = true; @@ -2406,6 +2507,20 @@ execute_fold_all_builtins (void) print_generic_stmt (dump_file, *stmtp, dump_flags); fprintf (dump_file, "\n"); } + + /* Retry the same statement if it changed into another + builtin, there might be new opportunities now. */ + call = get_rhs (*stmtp); + if (!call || TREE_CODE (call) != CALL_EXPR) + { + bsi_next (&i); + continue; + } + callee = get_callee_fndecl (call); + if (!callee + || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL + || DECL_FUNCTION_CODE (callee) == fcode) + bsi_next (&i); } }