/* Pass computing data for optimizing stdarg functions.
- Copyright (C) 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Jakub Jelinek <jakub@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "function.h"
#include "langhooks.h"
#include "diagnostic.h"
+#include "gimple-pretty-print.h"
+#include "target.h"
#include "tree-flow.h"
#include "tree-pass.h"
+#include "tree-stdarg.h"
/* A simple pass that attempts to optimize stdarg functions on architectures
that need to save register arguments to stack on entry to stdarg functions.
in va_arg macros. E.g. if va_arg is only used with integral types
in the function, floating point registers don't need to be saved, etc. */
-struct stdarg_info
-{
- bitmap va_list_vars;
- basic_block va_start_bb, bb;
- int compute_sizes, va_start_count;
-};
/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
is executed at most as many times as VA_START_BB. */
static bool
reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
{
- edge *stack, e;
+ VEC (edge, heap) *stack = NULL;
+ edge e;
edge_iterator ei;
- int sp;
sbitmap visited;
bool ret;
if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
return false;
- stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
- sp = 0;
-
visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (visited);
ret = true;
FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
- stack[sp++] = e;
+ VEC_safe_push (edge, heap, stack, e);
- while (sp)
+ while (! VEC_empty (edge, stack))
{
basic_block src;
- --sp;
- e = stack[sp];
+ e = VEC_pop (edge, stack);
src = e->src;
if (e->flags & EDGE_COMPLEX)
{
SET_BIT (visited, src->index);
FOR_EACH_EDGE (e, ei, src->preds)
- stack[sp++] = e;
+ VEC_safe_push (edge, heap, stack, e);
}
}
- free (stack);
+ VEC_free (edge, heap, stack);
sbitmap_free (visited);
return ret;
}
/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
- return constant, otherwise return 0. */
+ return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
+ GPR_P is true if this is GPR counter. */
static unsigned HOST_WIDE_INT
-va_list_counter_bump (tree counter, tree rhs)
+va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
+ bool gpr_p)
{
- tree plus_stmt = SSA_NAME_DEF_STMT (rhs);
- tree rhs1, addend, load_stmt, counter1;
+ tree lhs, orig_lhs;
+ gimple stmt;
+ unsigned HOST_WIDE_INT ret = 0, val, counter_val;
+ unsigned int max_size;
- if (TREE_CODE (plus_stmt) != MODIFY_EXPR
- || TREE_OPERAND (plus_stmt, 0) != rhs)
- return 0;
+ if (si->offsets == NULL)
+ {
+ unsigned int i;
- rhs1 = TREE_OPERAND (plus_stmt, 1);
+ si->offsets = XNEWVEC (int, num_ssa_names);
+ for (i = 0; i < num_ssa_names; ++i)
+ si->offsets[i] = -1;
+ }
- if (TREE_CODE (rhs1) != PLUS_EXPR
- || TREE_CODE (TREE_OPERAND (rhs1, 0)) != SSA_NAME
- || TREE_CODE (TREE_OPERAND (rhs1, 1)) != INTEGER_CST
- || !host_integerp (TREE_OPERAND (rhs1, 1), 1))
- return 0;
+ counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
+ max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
+ orig_lhs = lhs = rhs;
+ while (lhs)
+ {
+ enum tree_code rhs_code;
- addend = TREE_OPERAND (rhs1, 0);
- load_stmt = SSA_NAME_DEF_STMT (addend);
+ if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
+ {
+ if (counter_val >= max_size)
+ {
+ ret = max_size;
+ break;
+ }
+
+ ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
+ break;
+ }
- if (TREE_CODE (load_stmt) != MODIFY_EXPR
- || TREE_OPERAND (load_stmt, 0) != addend)
- return 0;
+ stmt = SSA_NAME_DEF_STMT (lhs);
- counter1 = TREE_OPERAND (load_stmt, 1);
- if (TREE_CODE (counter) != TREE_CODE (counter1))
- return 0;
+ if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
+ return (unsigned HOST_WIDE_INT) -1;
- if (TREE_CODE (counter) == COMPONENT_REF)
+ rhs_code = gimple_assign_rhs_code (stmt);
+ if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
+ || gimple_assign_cast_p (stmt))
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+ {
+ lhs = gimple_assign_rhs1 (stmt);
+ continue;
+ }
+
+ if ((rhs_code == POINTER_PLUS_EXPR
+ || rhs_code == PLUS_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+ && host_integerp (gimple_assign_rhs2 (stmt), 1))
+ {
+ ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+ lhs = gimple_assign_rhs1 (stmt);
+ continue;
+ }
+
+ if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
+ return (unsigned HOST_WIDE_INT) -1;
+
+ rhs = gimple_assign_rhs1 (stmt);
+ if (TREE_CODE (counter) != TREE_CODE (rhs))
+ return (unsigned HOST_WIDE_INT) -1;
+
+ if (TREE_CODE (counter) == COMPONENT_REF)
+ {
+ if (get_base_address (counter) != get_base_address (rhs)
+ || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
+ || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
+ return (unsigned HOST_WIDE_INT) -1;
+ }
+ else if (counter != rhs)
+ return (unsigned HOST_WIDE_INT) -1;
+
+ lhs = NULL;
+ }
+
+ lhs = orig_lhs;
+ val = ret + counter_val;
+ while (lhs)
{
- if (get_base_address (counter) != get_base_address (counter1)
- || TREE_CODE (TREE_OPERAND (counter1, 1)) != FIELD_DECL
- || TREE_OPERAND (counter, 1) != TREE_OPERAND (counter1, 1))
- return 0;
+ enum tree_code rhs_code;
+
+ if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
+ break;
+
+ if (val >= max_size)
+ si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
+ else
+ si->offsets[SSA_NAME_VERSION (lhs)] = val;
+
+ stmt = SSA_NAME_DEF_STMT (lhs);
+
+ rhs_code = gimple_assign_rhs_code (stmt);
+ if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
+ || gimple_assign_cast_p (stmt))
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+ {
+ lhs = gimple_assign_rhs1 (stmt);
+ continue;
+ }
+
+ if ((rhs_code == POINTER_PLUS_EXPR
+ || rhs_code == PLUS_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+ && host_integerp (gimple_assign_rhs2 (stmt), 1))
+ {
+ val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+ lhs = gimple_assign_rhs1 (stmt);
+ continue;
+ }
+
+ lhs = NULL;
}
- else
- return 0;
- return tree_low_cst (TREE_OPERAND (rhs1, 1), 1);
+ return ret;
}
find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
void *data)
{
- bitmap va_list_vars = (bitmap) data;
+ bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
tree var = *tp;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (TREE_CODE (var) == VAR_DECL
- && bitmap_bit_p (va_list_vars, var_ann (var)->uid))
+ && bitmap_bit_p (va_list_vars, DECL_UID (var)))
return var;
return NULL_TREE;
/* Helper function of va_list_counter_struct_op. Compute
- cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
- if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
- statement. GPR_P is true if AP is a GPR counter, false if it is
- a FPR counter. */
+ cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
+ if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
+ statement. GPR_P is true if AP is a GPR counter, false if it is
+ a FPR counter. */
static void
va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
if (write_p
&& si->compute_sizes
- && (increment = va_list_counter_bump (ap, var)) != 0)
+ && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
{
if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
{
return false;
if (TREE_CODE (var) != SSA_NAME
- || bitmap_bit_p (si->va_list_vars, var_ann (SSA_NAME_VAR (var))->uid))
+ || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
return false;
base = get_base_address (ap);
if (TREE_CODE (base) != VAR_DECL
- || !bitmap_bit_p (si->va_list_vars, var_ann (base)->uid))
+ || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
return false;
if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
}
+/* Check for TEM = AP. Return true if found and the caller shouldn't
+ search for va_list references in the statement. */
+
+static bool
+va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
+{
+ if (TREE_CODE (ap) != VAR_DECL
+ || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
+ return false;
+
+ if (TREE_CODE (tem) != SSA_NAME
+ || bitmap_bit_p (si->va_list_vars,
+ DECL_UID (SSA_NAME_VAR (tem)))
+ || is_global_var (SSA_NAME_VAR (tem)))
+ return false;
+
+ if (si->compute_sizes < 0)
+ {
+ si->compute_sizes = 0;
+ if (si->va_start_count == 1
+ && reachable_at_most_once (si->bb, si->va_start_bb))
+ si->compute_sizes = 1;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file,
+ "bb%d will %sbe executed at most once for each va_start "
+ "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
+ si->va_start_bb->index);
+ }
+
+ /* For void * or char * va_list types, there is just one counter.
+ If va_arg is used in a loop, we don't know how many registers need
+ saving. */
+ if (! si->compute_sizes)
+ return false;
+
+ if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
+ return false;
+
+ /* Note the temporary, as we need to track whether it doesn't escape
+ the current function. */
+ bitmap_set_bit (si->va_list_escape_vars,
+ DECL_UID (SSA_NAME_VAR (tem)));
+ return true;
+}
+
+
+/* Check for:
+ tem1 = AP;
+ TEM2 = tem1 + CST;
+ AP = TEM2;
+ sequence and update cfun->va_list_gpr_size. Return true if found. */
+
+static bool
+va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
+{
+ unsigned HOST_WIDE_INT increment;
+
+ if (TREE_CODE (ap) != VAR_DECL
+ || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
+ return false;
+
+ if (TREE_CODE (tem2) != SSA_NAME
+ || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
+ return false;
+
+ if (si->compute_sizes <= 0)
+ return false;
+
+ increment = va_list_counter_bump (si, ap, tem2, true);
+ if (increment + 1 <= 1)
+ return false;
+
+ if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
+ cfun->va_list_gpr_size += increment;
+ else
+ cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
+
+ return true;
+}
+
+
+/* If RHS is X, (some type *) X or X + CST for X a temporary variable
+ containing value of some va_list variable plus optionally some constant,
+ either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
+ depending whether LHS is a function local temporary. */
+
+static void
+check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
+{
+ if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
+ return;
+
+ if (TREE_CODE (rhs) != SSA_NAME
+ || ! bitmap_bit_p (si->va_list_escape_vars,
+ DECL_UID (SSA_NAME_VAR (rhs))))
+ return;
+
+ if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
+ {
+ si->va_list_escapes = true;
+ return;
+ }
+
+ if (si->compute_sizes < 0)
+ {
+ si->compute_sizes = 0;
+ if (si->va_start_count == 1
+ && reachable_at_most_once (si->bb, si->va_start_bb))
+ si->compute_sizes = 1;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file,
+ "bb%d will %sbe executed at most once for each va_start "
+ "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
+ si->va_start_bb->index);
+ }
+
+ /* For void * or char * va_list types, there is just one counter.
+ If va_arg is used in a loop, we don't know how many registers need
+ saving. */
+ if (! si->compute_sizes)
+ {
+ si->va_list_escapes = true;
+ return;
+ }
+
+ if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
+ == (unsigned HOST_WIDE_INT) -1)
+ {
+ si->va_list_escapes = true;
+ return;
+ }
+
+ bitmap_set_bit (si->va_list_escape_vars,
+ DECL_UID (SSA_NAME_VAR (lhs)));
+}
+
+
+/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
+ Return true if va_list might be escaping. */
+
+static bool
+check_all_va_list_escapes (struct stdarg_info *si)
+{
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
+ {
+ gimple_stmt_iterator i;
+
+ for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+ {
+ gimple stmt = gsi_stmt (i);
+ tree use;
+ ssa_op_iter iter;
+
+ if (is_gimple_debug (stmt))
+ continue;
+
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
+ {
+ if (! bitmap_bit_p (si->va_list_escape_vars,
+ DECL_UID (SSA_NAME_VAR (use))))
+ continue;
+
+ if (is_gimple_assign (stmt))
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
+
+ /* x = *ap_temp; */
+ if (gimple_assign_rhs_code (stmt) == INDIRECT_REF
+ && TREE_OPERAND (rhs, 0) == use
+ && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
+ && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
+ && si->offsets[SSA_NAME_VERSION (use)] != -1)
+ {
+ unsigned HOST_WIDE_INT gpr_size;
+ tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
+
+ gpr_size = si->offsets[SSA_NAME_VERSION (use)]
+ + tree_low_cst (access_size, 1);
+ if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
+ cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
+ else if (gpr_size > cfun->va_list_gpr_size)
+ cfun->va_list_gpr_size = gpr_size;
+ continue;
+ }
+
+ /* va_arg sequences may contain
+ other_ap_temp = ap_temp;
+ other_ap_temp = ap_temp + constant;
+ other_ap_temp = (some_type *) ap_temp;
+ ap = ap_temp;
+ statements. */
+ if (rhs == use
+ && ((rhs_code == POINTER_PLUS_EXPR
+ && (TREE_CODE (gimple_assign_rhs2 (stmt))
+ == INTEGER_CST))
+ || gimple_assign_cast_p (stmt)
+ || (get_gimple_rhs_class (rhs_code)
+ == GIMPLE_SINGLE_RHS)))
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+
+ if (TREE_CODE (lhs) == SSA_NAME
+ && bitmap_bit_p (si->va_list_escape_vars,
+ DECL_UID (SSA_NAME_VAR (lhs))))
+ continue;
+
+ if (TREE_CODE (lhs) == VAR_DECL
+ && bitmap_bit_p (si->va_list_vars,
+ DECL_UID (lhs)))
+ continue;
+ }
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fputs ("va_list escapes in ", dump_file);
+ print_gimple_stmt (dump_file, stmt, 0, dump_flags);
+ fputc ('\n', dump_file);
+ }
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+
/* Return true if this optimization pass should be done.
It makes only sense for stdarg functions. */
gate_optimize_stdarg (void)
{
/* This optimization is only for stdarg functions. */
- return current_function_stdarg != 0;
-}
+ return cfun->stdarg != 0;
+}
/* Entry point to the stdarg optimization pass. */
-static void
+static unsigned int
execute_optimize_stdarg (void)
{
basic_block bb;
bool va_list_escapes = false;
+ bool va_list_simple_ptr;
struct stdarg_info si;
+ struct walk_stmt_info wi;
const char *funcname = NULL;
+ tree cfun_va_list;
cfun->va_list_gpr_size = 0;
cfun->va_list_fpr_size = 0;
memset (&si, 0, sizeof (si));
- si.va_list_vars = BITMAP_XMALLOC ();
+ si.va_list_vars = BITMAP_ALLOC (NULL);
+ si.va_list_escape_vars = BITMAP_ALLOC (NULL);
if (dump_file)
funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
+ cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
+ va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
+ && (TREE_TYPE (cfun_va_list) == void_type_node
+ || TREE_TYPE (cfun_va_list) == char_type_node);
+ gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
+
FOR_EACH_BB (bb)
{
- block_stmt_iterator i;
+ gimple_stmt_iterator i;
- for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+ for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- tree stmt = bsi_stmt (i);
- tree call = get_call_expr_in (stmt), callee;
- tree ap;
+ gimple stmt = gsi_stmt (i);
+ tree callee, ap;
- if (!call)
+ if (!is_gimple_call (stmt))
continue;
- callee = get_callee_fndecl (call);
+ callee = gimple_call_fndecl (stmt);
if (!callee
- || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
- || DECL_FUNCTION_CODE (callee) != BUILT_IN_VA_START)
+ || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
continue;
+ switch (DECL_FUNCTION_CODE (callee))
+ {
+ case BUILT_IN_VA_START:
+ break;
+ /* If old style builtins are used, don't optimize anything. */
+ case BUILT_IN_SAVEREGS:
+ case BUILT_IN_ARGS_INFO:
+ case BUILT_IN_NEXT_ARG:
+ va_list_escapes = true;
+ continue;
+ default:
+ continue;
+ }
+
si.va_start_count++;
- ap = TREE_VALUE (TREE_OPERAND (call, 1));
- if (TREE_CODE (ap) != ADDR_EXPR
- || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (ap, 0)))
- != TYPE_MAIN_VARIANT (va_list_type_node)
- || TREE_CODE (TREE_OPERAND (ap, 0)) != VAR_DECL)
+ ap = gimple_call_arg (stmt, 0);
+
+ if (TREE_CODE (ap) != ADDR_EXPR)
+ {
+ va_list_escapes = true;
+ break;
+ }
+ ap = TREE_OPERAND (ap, 0);
+ if (TREE_CODE (ap) == ARRAY_REF)
+ {
+ if (! integer_zerop (TREE_OPERAND (ap, 1)))
+ {
+ va_list_escapes = true;
+ break;
+ }
+ ap = TREE_OPERAND (ap, 0);
+ }
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
+ != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
+ || TREE_CODE (ap) != VAR_DECL)
{
va_list_escapes = true;
break;
}
- ap = TREE_OPERAND (ap, 0);
if (is_global_var (ap))
{
va_list_escapes = true;
break;
}
- bitmap_set_bit (si.va_list_vars, var_ann (ap)->uid);
+ bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
- /* VA_START_BB will be only used if there is just one
- va_start in the function. */
+ /* VA_START_BB and VA_START_AP will be only used if there is just
+ one va_start in the function. */
si.va_start_bb = bb;
+ si.va_start_ap = ap;
}
if (va_list_escapes)
if (va_list_escapes)
goto finish;
- /* If the backend didn't tell us what the counter fields are, there is
- nothing more we can do. */
- if (va_list_gpr_counter_field == NULL_TREE
+ /* For void * or char * va_list, something useful can be done only
+ if there is just one va_start. */
+ if (va_list_simple_ptr && si.va_start_count > 1)
+ {
+ va_list_escapes = true;
+ goto finish;
+ }
+
+ /* For struct * va_list, if the backend didn't tell us what the counter fields
+ are, there is nothing more we can do. */
+ if (!va_list_simple_ptr
+ && va_list_gpr_counter_field == NULL_TREE
&& va_list_fpr_counter_field == NULL_TREE)
{
va_list_escapes = true;
goto finish;
}
+ /* For void * or char * va_list there is just one counter
+ (va_list itself). Use VA_LIST_GPR_SIZE for it. */
+ if (va_list_simple_ptr)
+ cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
+
+ calculate_dominance_info (CDI_DOMINATORS);
+ memset (&wi, 0, sizeof (wi));
+ wi.info = si.va_list_vars;
+
FOR_EACH_BB (bb)
{
- block_stmt_iterator i;
+ gimple_stmt_iterator i;
si.compute_sizes = -1;
si.bb = bb;
- for (i = bsi_start (bb);
- !bsi_end_p (i) && !va_list_escapes;
- bsi_next (&i))
+
+ /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
+ them as assignments for the purpose of escape analysis. This is
+ not needed for non-simple va_list because virtual phis don't perform
+ any real data movement. */
+ if (va_list_simple_ptr)
+ {
+ tree lhs, rhs;
+ use_operand_p uop;
+ ssa_op_iter soi;
+
+ for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+ {
+ gimple phi = gsi_stmt (i);
+ lhs = PHI_RESULT (phi);
+
+ if (!is_gimple_reg (lhs))
+ continue;
+
+ FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
+ {
+ rhs = USE_FROM_PTR (uop);
+ if (va_list_ptr_read (&si, rhs, lhs))
+ continue;
+ else if (va_list_ptr_write (&si, lhs, rhs))
+ continue;
+ else
+ check_va_list_escapes (&si, lhs, rhs);
+
+ if (si.va_list_escapes)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fputs ("va_list escapes in ", dump_file);
+ print_gimple_stmt (dump_file, phi, 0, dump_flags);
+ fputc ('\n', dump_file);
+ }
+ va_list_escapes = true;
+ }
+ }
+ }
+ }
+
+ for (i = gsi_start_bb (bb);
+ !gsi_end_p (i) && !va_list_escapes;
+ gsi_next (&i))
{
- tree stmt = bsi_stmt (i);
- tree call;
+ gimple stmt = gsi_stmt (i);
/* Don't look at __builtin_va_{start,end}, they are ok. */
- call = get_call_expr_in (stmt);
- if (call)
+ if (is_gimple_call (stmt))
{
- tree callee = get_callee_fndecl (call);
+ tree callee = gimple_call_fndecl (stmt);
if (callee
&& DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
continue;
}
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ if (is_gimple_assign (stmt))
{
- tree lhs = TREE_OPERAND (stmt, 0);
- tree rhs = TREE_OPERAND (stmt, 1);
-
- if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
- rhs = TREE_OPERAND (rhs, 0);
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs = gimple_assign_rhs1 (stmt);
- /* Check for ap[0].field = temp. */
- if (va_list_counter_struct_op (&si, lhs, rhs, true))
- continue;
-
- /* Check for temp = ap[0].field. */
- else if (va_list_counter_struct_op (&si, rhs, lhs, false))
- continue;
+ if (va_list_simple_ptr)
+ {
+ if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+ == GIMPLE_SINGLE_RHS)
+ {
+ /* Check for tem = ap. */
+ if (va_list_ptr_read (&si, rhs, lhs))
+ continue;
+
+ /* Check for the last insn in:
+ tem1 = ap;
+ tem2 = tem1 + CST;
+ ap = tem2;
+ sequence. */
+ else if (va_list_ptr_write (&si, lhs, rhs))
+ continue;
+ }
+
+ if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
+ || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
+ || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+ == GIMPLE_SINGLE_RHS))
+ check_va_list_escapes (&si, lhs, rhs);
+ }
+ else
+ {
+ if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+ == GIMPLE_SINGLE_RHS)
+ {
+ /* Check for ap[0].field = temp. */
+ if (va_list_counter_struct_op (&si, lhs, rhs, true))
+ continue;
+
+ /* Check for temp = ap[0].field. */
+ else if (va_list_counter_struct_op (&si, rhs, lhs,
+ false))
+ continue;
+ }
+
+ /* Do any architecture specific checking. */
+ if (targetm.stdarg_optimize_hook
+ && targetm.stdarg_optimize_hook (&si, stmt))
+ continue;
+ }
}
+ else if (is_gimple_debug (stmt))
+ continue;
/* All other uses of va_list are either va_copy (that is not handled
in this optimization), taking address of va_list variable or
escape the function and therefore va_start needs to set it up
fully), or some unexpected use of va_list. None of these should
happen in a gimplified VA_ARG_EXPR. */
- if (walk_tree (&stmt, find_va_list_reference, si.va_list_vars, NULL))
+ if (si.va_list_escapes
+ || walk_gimple_op (stmt, find_va_list_reference, &wi))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fputs ("va_list escapes in ", dump_file);
- print_generic_expr (dump_file, stmt, dump_flags);
+ print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fputc ('\n', dump_file);
}
va_list_escapes = true;
break;
}
+ if (! va_list_escapes
+ && va_list_simple_ptr
+ && ! bitmap_empty_p (si.va_list_escape_vars)
+ && check_all_va_list_escapes (&si))
+ va_list_escapes = true;
+
finish:
if (va_list_escapes)
{
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
}
- BITMAP_XFREE (si.va_list_vars);
+ BITMAP_FREE (si.va_list_vars);
+ BITMAP_FREE (si.va_list_escape_vars);
+ free (si.offsets);
if (dump_file)
{
fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
fprintf (dump_file, "%d", cfun->va_list_fpr_size);
fputs (" FPR units.\n", dump_file);
}
+ return 0;
}
-struct tree_opt_pass pass_stdarg =
+struct gimple_opt_pass pass_stdarg =
{
+ {
+ GIMPLE_PASS,
"stdarg", /* name */
gate_optimize_stdarg, /* gate */
execute_optimize_stdarg, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ TV_NONE, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func /* todo_flags_finish */
+ }
};