X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ftree-mudflap.c;h=5f4f5ac0b2958c194931dd403a54661b85d1b243;hp=732ef5e4106ba7a89182cd4d7f4e957bc42c3538;hb=6a352190025d95e9307ca42790f7e6aea147238d;hpb=0ec80471fe5e25ecd3dcb82e2147a7c24c9816f5 diff --git a/gcc/tree-mudflap.c b/gcc/tree-mudflap.c index 732ef5e4106..5f4f5ac0b29 100644 --- a/gcc/tree-mudflap.c +++ b/gcc/tree-mudflap.c @@ -1,5 +1,6 @@ /* Mudflap: narrow-pointer bounds-checking by tree rewriting. - Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc. + Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + Free Software Foundation, Inc. Contributed by Frank Ch. Eigler and Graydon Hoare @@ -7,7 +8,7 @@ This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -16,13 +17,11 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" -#include "errors.h" #include "system.h" #include "coretypes.h" #include "tm.h" @@ -34,7 +33,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "flags.h" #include "function.h" #include "tree-inline.h" -#include "tree-gimple.h" +#include "gimple.h" +#include "tree-iterator.h" #include "tree-flow.h" #include "tree-mudflap.h" #include "tree-dump.h" @@ -44,28 +44,33 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include #include "langhooks.h" #include "ggc.h" +#include "cgraph.h" +#include "toplev.h" +#include "gimple.h" /* Internal function decls */ + +/* Options. */ +#define flag_mudflap_threads (flag_mudflap == 2) + /* Helpers. */ static tree mf_build_string (const char *string); static tree mf_varname_tree (tree); -static tree mf_file_function_line_tree (location_t *); - -/* Initialization of all the mf-runtime.h extern decls. */ -static void mf_init_extern_trees (void); +static tree mf_file_function_line_tree (location_t); /* Indirection-related instrumentation. */ static void mf_decl_cache_locals (void); static void mf_decl_clear_locals (void); static void mf_xform_derefs (void); -static void execute_mudflap_function_ops (void); +static unsigned int execute_mudflap_function_ops (void); /* Addressable variables instrumentation. */ -static void mf_xform_decls (tree, tree); -static tree mx_xfn_xform_decls (tree *, int *, void *); -static void mx_register_decls (tree, tree *); -static void execute_mudflap_function_decls (void); +static void mf_xform_decls (gimple_seq, tree); +static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *, + struct walk_stmt_info *); +static gimple_seq mx_register_decls (tree, gimple_seq, location_t); +static unsigned int execute_mudflap_function_decls (void); /* ------------------------------------------------------------------------ */ @@ -78,11 +83,9 @@ mf_build_string (const char *string) size_t len = strlen (string); tree result = mf_mark (build_string (len + 1, string)); - TREE_TYPE (result) - = build_array_type (char_type_node, - build_index_type (build_int_2 (len, 0))); + TREE_TYPE (result) = build_array_type + (char_type_node, build_index_type (build_int_cst (NULL_TREE, len))); TREE_CONSTANT (result) = 1; - TREE_INVARIANT (result) = 1; TREE_READONLY (result) = 1; TREE_STATIC (result) = 1; @@ -105,8 +108,7 @@ mf_varname_tree (tree decl) const char *buf_contents; tree result; - if (decl == NULL_TREE) - abort (); + gcc_assert (decl); if (!initialized) { @@ -115,12 +117,14 @@ mf_varname_tree (tree decl) } pp_clear_output_area (buf); - /* Add FILENAME[:LINENUMBER]. */ + /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */ { + expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl)); const char *sourcefile; - unsigned sourceline; - - sourcefile = DECL_SOURCE_FILE (decl); + unsigned sourceline = xloc.line; + unsigned sourcecolumn = 0; + sourcecolumn = xloc.column; + sourcefile = xloc.file; if (sourcefile == NULL && current_function_decl != NULL_TREE) sourcefile = DECL_SOURCE_FILE (current_function_decl); if (sourcefile == NULL) @@ -128,17 +132,22 @@ mf_varname_tree (tree decl) pp_string (buf, sourcefile); - sourceline = DECL_SOURCE_LINE (decl); if (sourceline != 0) { pp_string (buf, ":"); pp_decimal_int (buf, sourceline); + + if (sourcecolumn != 0) + { + pp_string (buf, ":"); + pp_decimal_int (buf, sourcecolumn); + } } } if (current_function_decl != NULL_TREE) { - /* Add (FUNCTION): */ + /* Add (FUNCTION) */ pp_string (buf, " ("); { const char *funcname = NULL; @@ -158,18 +167,18 @@ mf_varname_tree (tree decl) { const char *declname = NULL; - if (strcmp ("GNU C++", lang_hooks.name) == 0 && - DECL_NAME (decl) != NULL) + if (DECL_NAME (decl) != NULL) { - /* The gcc/cp decl_printable_name hook doesn't do as good a job as - the libiberty demangler. */ - declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)), - DMGL_AUTO | DMGL_VERBOSE); + if (strcmp ("GNU C++", lang_hooks.name) == 0) + { + /* The gcc/cp decl_printable_name hook doesn't do as good a job as + the libiberty demangler. */ + declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)), + DMGL_AUTO | DMGL_VERBOSE); + } + if (declname == NULL) + declname = lang_hooks.decl_printable_name (decl, 3); } - - if (declname == NULL) - declname = lang_hooks.decl_printable_name (decl, 3); - if (declname == NULL) declname = ""; @@ -188,27 +197,29 @@ mf_varname_tree (tree decl) /* And another friend, for producing a simpler message. */ static tree -mf_file_function_line_tree (location_t *locus) +mf_file_function_line_tree (location_t location) { + expanded_location xloc = expand_location (location); const char *file = NULL, *colon, *line, *op, *name, *cp; - char linebuf[18]; + char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */ char *string; tree result; - /* Add FILENAME. */ - if (locus != NULL) - file = locus->file; + /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */ + file = xloc.file; if (file == NULL && current_function_decl != NULL_TREE) file = DECL_SOURCE_FILE (current_function_decl); if (file == NULL) file = ""; - /* Add :LINENUMBER. */ - if (locus != NULL && locus->line > 0) + if (xloc.line > 0) { - sprintf (linebuf, "%d", locus->line); + if (xloc.column > 0) + sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column); + else + sprintf (linecolbuf, "%d", xloc.line); colon = ":"; - line = linebuf; + line = linecolbuf; } else colon = line = ""; @@ -249,13 +260,13 @@ static GTY (()) tree mf_cache_structptr_type; /* extern struct __mf_cache __mf_lookup_cache []; */ static GTY (()) tree mf_cache_array_decl; -/* extern const unsigned char __mf_lc_shift; */ +/* extern unsigned char __mf_lc_shift; */ static GTY (()) tree mf_cache_shift_decl; -/* extern const uintptr_t __mf_lc_mask; */ +/* extern uintptr_t __mf_lc_mask; */ static GTY (()) tree mf_cache_mask_decl; -/* Their function-scope local shadows, used in single-threaded mode only. */ +/* Their function-scope local shadows, used in single-threaded mode only. */ /* auto const unsigned char __mf_lc_shift_l; */ static GTY (()) tree mf_cache_shift_decl_l; @@ -269,31 +280,136 @@ static GTY (()) tree mf_check_fndecl; /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */ static GTY (()) tree mf_register_fndecl; -/* extern void __mf_unregister (void *ptr, size_t sz); */ +/* extern void __mf_unregister (void *ptr, size_t sz, int type); */ static GTY (()) tree mf_unregister_fndecl; +/* extern void __mf_init (); */ +static GTY (()) tree mf_init_fndecl; + +/* extern int __mf_set_options (const char*); */ +static GTY (()) tree mf_set_options_fndecl; + + +/* Helper for mudflap_init: construct a decl with the given category, + name, and type, mark it an external reference, and pushdecl it. */ +static inline tree +mf_make_builtin (enum tree_code category, const char *name, tree type) +{ + tree decl = mf_mark (build_decl (category, get_identifier (name), type)); + TREE_PUBLIC (decl) = 1; + DECL_EXTERNAL (decl) = 1; + lang_hooks.decls.pushdecl (decl); + /* The decl was declared by the compiler. */ + DECL_ARTIFICIAL (decl) = 1; + /* And we don't want debug info for it. */ + DECL_IGNORED_P (decl) = 1; + return decl; +} + +/* Helper for mudflap_init: construct a tree corresponding to the type + struct __mf_cache { uintptr_t low; uintptr_t high; }; + where uintptr_t is the FIELD_TYPE argument. */ +static inline tree +mf_make_mf_cache_struct_type (tree field_type) +{ + /* There is, abominably, no language-independent way to construct a + RECORD_TYPE. So we have to call the basic type construction + primitives by hand. */ + tree fieldlo = build_decl (FIELD_DECL, get_identifier ("low"), field_type); + tree fieldhi = build_decl (FIELD_DECL, get_identifier ("high"), field_type); + + tree struct_type = make_node (RECORD_TYPE); + DECL_CONTEXT (fieldlo) = struct_type; + DECL_CONTEXT (fieldhi) = struct_type; + TREE_CHAIN (fieldlo) = fieldhi; + TYPE_FIELDS (struct_type) = fieldlo; + TYPE_NAME (struct_type) = get_identifier ("__mf_cache"); + layout_type (struct_type); + + return struct_type; +} + +#define build_function_type_0(rtype) \ + build_function_type (rtype, void_list_node) +#define build_function_type_1(rtype, arg1) \ + build_function_type (rtype, tree_cons (0, arg1, void_list_node)) +#define build_function_type_3(rtype, arg1, arg2, arg3) \ + build_function_type (rtype, \ + tree_cons (0, arg1, \ + tree_cons (0, arg2, \ + tree_cons (0, arg3, \ + void_list_node)))) +#define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \ + build_function_type (rtype, \ + tree_cons (0, arg1, \ + tree_cons (0, arg2, \ + tree_cons (0, arg3, \ + tree_cons (0, arg4, \ + void_list_node))))) /* Initialize the global tree nodes that correspond to mf-runtime.h declarations. */ -static void -mf_init_extern_trees (void) +void +mudflap_init (void) { static bool done = false; + tree mf_const_string_type; + tree mf_cache_array_type; + tree mf_check_register_fntype; + tree mf_unregister_fntype; + tree mf_init_fntype; + tree mf_set_options_fntype; if (done) return; done = true; - mf_uintptr_type = TREE_TYPE (mflang_lookup_decl ("uintptr_t")); - mf_cache_array_decl = mf_mark (mflang_lookup_decl ("__mf_lookup_cache")); - mf_cache_struct_type = TREE_TYPE (TREE_TYPE (mf_cache_array_decl)); + mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, + /*unsignedp=*/true); + mf_const_string_type + = build_pointer_type (build_qualified_type + (char_type_node, TYPE_QUAL_CONST)); + + mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type); mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type); - mf_cache_shift_decl = mf_mark (mflang_lookup_decl ("__mf_lc_shift")); - mf_cache_mask_decl = mf_mark (mflang_lookup_decl ("__mf_lc_mask")); - mf_check_fndecl = mflang_lookup_decl ("__mf_check"); - mf_register_fndecl = mflang_lookup_decl ("__mf_register"); - mf_unregister_fndecl = mflang_lookup_decl ("__mf_unregister"); + mf_cache_array_type = build_array_type (mf_cache_struct_type, 0); + mf_check_register_fntype = + build_function_type_4 (void_type_node, ptr_type_node, size_type_node, + integer_type_node, mf_const_string_type); + mf_unregister_fntype = + build_function_type_3 (void_type_node, ptr_type_node, size_type_node, + integer_type_node); + mf_init_fntype = + build_function_type_0 (void_type_node); + mf_set_options_fntype = + build_function_type_1 (integer_type_node, mf_const_string_type); + + mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache", + mf_cache_array_type); + mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift", + unsigned_char_type_node); + mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask", + mf_uintptr_type); + /* Don't process these in mudflap_enqueue_decl, should they come by + there for some reason. */ + mf_mark (mf_cache_array_decl); + mf_mark (mf_cache_shift_decl); + mf_mark (mf_cache_mask_decl); + mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check", + mf_check_register_fntype); + mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register", + mf_check_register_fntype); + mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister", + mf_unregister_fntype); + mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init", + mf_init_fntype); + mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options", + mf_set_options_fntype); } +#undef build_function_type_4 +#undef build_function_type_3 +#undef build_function_type_1 +#undef build_function_type_0 /* ------------------------------------------------------------------------ */ @@ -305,13 +421,18 @@ mf_init_extern_trees (void) tree optimizations have been performed, but we have to preserve the CFG for expansion from trees to RTL. */ -static void +static unsigned int execute_mudflap_function_ops (void) { - if (mf_marked_p (current_function_decl)) - return; + struct gimplify_ctx gctx; + + /* Don't instrument functions such as the synthetic constructor + built during mudflap_finish_file. */ + if (mf_marked_p (current_function_decl) || + DECL_ARTIFICIAL (current_function_decl)) + return 0; - push_gimplify_context (); + push_gimplify_context (&gctx); /* In multithreaded mode, don't cache the lookup cache parameters. */ if (! flag_mudflap_threads) @@ -323,223 +444,234 @@ execute_mudflap_function_ops (void) mf_decl_clear_locals (); pop_gimplify_context (NULL); + return 0; +} + +/* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that + if BB has more than one edge, STMT will be replicated for each edge. + Also, abnormal edges will be ignored. */ + +static void +insert_edge_copies_seq (gimple_seq seq, basic_block bb) +{ + edge e; + edge_iterator ei; + unsigned n_copies = -1; + + FOR_EACH_EDGE (e, ei, bb->succs) + if (!(e->flags & EDGE_ABNORMAL)) + n_copies++; + + FOR_EACH_EDGE (e, ei, bb->succs) + if (!(e->flags & EDGE_ABNORMAL)) + gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq); } /* Create and initialize local shadow variables for the lookup cache globals. Put their decls in the *_l globals for use by - mf_build_check_statement_for. */ + mf_build_check_statement_for. */ static void mf_decl_cache_locals (void) { - tree t, shift_init_stmts, mask_init_stmts; - tree_stmt_iterator tsi; + gimple g; + gimple_seq seq = gimple_seq_alloc (); /* Build the cache vars. */ mf_cache_shift_decl_l - = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_shift_decl), + = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl), "__mf_lookup_shift_l")); mf_cache_mask_decl_l - = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_mask_decl), + = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl), "__mf_lookup_mask_l")); /* Build initialization nodes for the cache vars. We just load the globals into the cache variables. */ - t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_shift_decl_l), - mf_cache_shift_decl_l, mf_cache_shift_decl); - annotate_with_locus (t, DECL_SOURCE_LOCATION (current_function_decl)); - gimplify_to_stmt_list (&t); - shift_init_stmts = t; - - t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_mask_decl_l), - mf_cache_mask_decl_l, mf_cache_mask_decl); - annotate_with_locus (t, DECL_SOURCE_LOCATION (current_function_decl)); - gimplify_to_stmt_list (&t); - mask_init_stmts = t; - - /* Anticipating multiple entry points, we insert the cache vars - initializers in each successor of the ENTRY_BLOCK_PTR. */ - for (tsi = tsi_start (shift_init_stmts); - ! tsi_end_p (tsi); - tsi_next (&tsi)) - insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR); - - for (tsi = tsi_start (mask_init_stmts); - ! tsi_end_p (tsi); - tsi_next (&tsi)) - insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR); - bsi_commit_edge_inserts (NULL); + g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl); + gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl)); + gimple_seq_add_stmt (&seq, g); + + g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl); + gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl)); + gimple_seq_add_stmt (&seq, g); + + insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR); + + gsi_commit_edge_inserts (); } static void mf_decl_clear_locals (void) { - /* Unset local shadows. */ + /* Unset local shadows. */ mf_cache_shift_decl_l = NULL_TREE; mf_cache_mask_decl_l = NULL_TREE; } static void -mf_build_check_statement_for (tree addr, tree size, - block_stmt_iterator *instr_bsi, - location_t *locus, tree dirflag) +mf_build_check_statement_for (tree base, tree limit, + gimple_stmt_iterator *instr_gsi, + location_t location, tree dirflag) { - tree_stmt_iterator head, tsi; - tree ptrtype = TREE_TYPE (addr); - block_stmt_iterator bsi; + gimple_stmt_iterator gsi; basic_block cond_bb, then_bb, join_bb; edge e; - tree cond, t, u, v, l1, l2; - tree mf_value; + tree cond, t, u, v; tree mf_base; tree mf_elem; + tree mf_limit; + gimple g; + gimple_seq seq, stmts; /* We first need to split the current basic block, and start altering the CFG. This allows us to insert the statements we're about to - construct into the right basic blocks. The label l1 is the label - of the block for the THEN clause of the conditional jump we're - about to construct, and l2 is the ELSE clause, which is just the - continuation of the old statement stream. */ - l1 = create_artificial_label (); - l2 = create_artificial_label (); - cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi)); - bsi = *instr_bsi; - bsi_prev (&bsi); - if (! bsi_end_p (bsi)) - { - e = split_block (cond_bb, bsi_stmt (bsi)); - cond_bb = e->src; - join_bb = e->dest; - } + construct into the right basic blocks. */ + + cond_bb = gimple_bb (gsi_stmt (*instr_gsi)); + gsi = *instr_gsi; + gsi_prev (&gsi); + if (! gsi_end_p (gsi)) + e = split_block (cond_bb, gsi_stmt (gsi)); else - { - join_bb = cond_bb; - cond_bb = create_empty_bb (join_bb->prev_bb); - e = make_edge (cond_bb, join_bb, 0); - } - e->flags = EDGE_FALSE_VALUE; + e = split_block_after_labels (cond_bb); + cond_bb = e->src; + join_bb = e->dest; + + /* A recap at this point: join_bb is the basic block at whose head + is the gimple statement for which this check expression is being + built. cond_bb is the (possibly new, synthetic) basic block the + end of which will contain the cache-lookup code, and a + conditional that jumps to the cache-miss code or, much more + likely, over to join_bb. */ + + /* Create the bb that contains the cache-miss fallback block (mf_check). */ then_bb = create_empty_bb (cond_bb); make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); - make_edge (then_bb, join_bb, EDGE_FALLTHRU); + make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU); - /* We expect that the conditional jump we will construct will not - be taken very often as it basically is an exception condition. */ - predict_edge_def (then_bb->pred, PRED_MUDFLAP, NOT_TAKEN); + /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */ + e = find_edge (cond_bb, join_bb); + e->flags = EDGE_FALSE_VALUE; + e->count = cond_bb->count; + e->probability = REG_BR_PROB_BASE; /* Update dominance info. Note that bb_join's data was updated by split_block. */ - if (dom_computed[CDI_DOMINATORS] >= DOM_CONS_OK) + if (dom_info_available_p (CDI_DOMINATORS)) { set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb); } /* Build our local variables. */ - mf_value = create_tmp_var (ptrtype, "__mf_value"); - mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem"); - mf_base = create_tmp_var (mf_uintptr_type, "__mf_base"); - - /* Build: __mf_value =
. */ - t = build (MODIFY_EXPR, void_type_node, mf_value, unshare_expr (addr)); - SET_EXPR_LOCUS (t, locus); - gimplify_to_stmt_list (&t); - head = tsi_start (t); - tsi = tsi_last (t); - - /* Build: __mf_base = (uintptr_t)__mf_value. */ - t = build (MODIFY_EXPR, void_type_node, mf_base, - build1 (NOP_EXPR, mf_uintptr_type, mf_value)); - SET_EXPR_LOCUS (t, locus); - gimplify_to_stmt_list (&t); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem"); + mf_base = make_rename_temp (mf_uintptr_type, "__mf_base"); + mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit"); + + /* Build: __mf_base = (uintptr_t) . */ + seq = gimple_seq_alloc (); + t = fold_convert (mf_uintptr_type, unshare_expr (base)); + t = force_gimple_operand (t, &stmts, false, NULL_TREE); + gimple_seq_add_seq (&seq, stmts); + g = gimple_build_assign (mf_base, t); + gimple_set_location (g, location); + gimple_seq_add_stmt (&seq, g); + + /* Build: __mf_limit = (uintptr_t) . */ + t = fold_convert (mf_uintptr_type, unshare_expr (limit)); + t = force_gimple_operand (t, &stmts, false, NULL_TREE); + gimple_seq_add_seq (&seq, stmts); + g = gimple_build_assign (mf_limit, t); + gimple_set_location (g, location); + gimple_seq_add_stmt (&seq, g); /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift) & __mf_mask]. */ - t = build (RSHIFT_EXPR, mf_uintptr_type, mf_base, - (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l)); - t = build (BIT_AND_EXPR, mf_uintptr_type, t, - (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l)); - t = build (ARRAY_REF, - TREE_TYPE (TREE_TYPE (mf_cache_array_decl)), - mf_cache_array_decl, t); + t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base, + flag_mudflap_threads ? mf_cache_shift_decl + : mf_cache_shift_decl_l); + t = build2 (BIT_AND_EXPR, mf_uintptr_type, t, + flag_mudflap_threads ? mf_cache_mask_decl + : mf_cache_mask_decl_l); + t = build4 (ARRAY_REF, + TREE_TYPE (TREE_TYPE (mf_cache_array_decl)), + mf_cache_array_decl, t, NULL_TREE, NULL_TREE); t = build1 (ADDR_EXPR, mf_cache_structptr_type, t); - t = build (MODIFY_EXPR, void_type_node, mf_elem, t); - SET_EXPR_LOCUS (t, locus); - gimplify_to_stmt_list (&t); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + t = force_gimple_operand (t, &stmts, false, NULL_TREE); + gimple_seq_add_seq (&seq, stmts); + g = gimple_build_assign (mf_elem, t); + gimple_set_location (g, location); + gimple_seq_add_stmt (&seq, g); /* Quick validity check. if (__mf_elem->low > __mf_base - || (__mf_elem_high < __mf_base + sizeof(T) - 1)) - { - __mf_check (); - ... and only if single-threaded: - __mf_lookup_shift_1 = f...; - __mf_lookup_mask_l = ...; - } + || (__mf_elem_high < __mf_limit)) + { + __mf_check (); + ... and only if single-threaded: + __mf_lookup_shift_1 = f...; + __mf_lookup_mask_l = ...; + } It is expected that this body of code is rarely executed so we mark the edge to the THEN clause of the conditional jump as unlikely. */ /* Construct t <-- '__mf_elem->low > __mf_base'. */ - t = build (COMPONENT_REF, mf_uintptr_type, - build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), - TYPE_FIELDS (mf_cache_struct_type)); - t = build (GT_EXPR, boolean_type_node, t, mf_base); + t = build3 (COMPONENT_REF, mf_uintptr_type, + build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), + TYPE_FIELDS (mf_cache_struct_type), NULL_TREE); + t = build2 (GT_EXPR, boolean_type_node, t, mf_base); - /* Construct '__mf_elem->high < __mf_base + sizeof(T) - 1'. + /* Construct '__mf_elem->high < __mf_limit'. First build: - 1) u <-- '__mf_elem->high' - 2) v <-- '__mf_base + sizeof (T) - 1'. + 1) u <-- '__mf_elem->high' + 2) v <-- '__mf_limit'. Then build 'u <-- (u < v). */ + u = build3 (COMPONENT_REF, mf_uintptr_type, + build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), + TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE); - u = build (COMPONENT_REF, mf_uintptr_type, - build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), - TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type))); - - v = convert (mf_uintptr_type, - size_binop (MINUS_EXPR, size, size_one_node)); - v = fold (build (PLUS_EXPR, mf_uintptr_type, mf_base, v)); + v = mf_limit; - u = build (LT_EXPR, boolean_type_node, u, v); + u = build2 (LT_EXPR, boolean_type_node, u, v); /* Build the composed conditional: t <-- 't || u'. Then store the result of the evaluation of 't' in a temporary variable which we can use as the condition for the conditional jump. */ - t = build (TRUTH_OR_EXPR, boolean_type_node, t, u); - cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond"); - t = build (MODIFY_EXPR, boolean_type_node, cond, t); - gimplify_to_stmt_list (&t); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u); + t = force_gimple_operand (t, &stmts, false, NULL_TREE); + gimple_seq_add_seq (&seq, stmts); + cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond"); + g = gimple_build_assign (cond, t); + gimple_set_location (g, location); + gimple_seq_add_stmt (&seq, g); /* Build the conditional jump. 'cond' is just a temporary so we can simply build a void COND_EXPR. We do need labels in both arms though. */ - t = build (COND_EXPR, void_type_node, cond, - build (GOTO_EXPR, void_type_node, tree_block_label (then_bb)), - build (GOTO_EXPR, void_type_node, tree_block_label (join_bb))); - SET_EXPR_LOCUS (t, locus); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE, + NULL_TREE); + gimple_set_location (g, location); + gimple_seq_add_stmt (&seq, g); /* At this point, after so much hard work, we have only constructed the conditional jump, if (__mf_elem->low > __mf_base - || (__mf_elem_high < __mf_base + sizeof(T) - 1)) + || (__mf_elem_high < __mf_limit)) The lowered GIMPLE tree representing this code is in the statement list starting at 'head'. - We can insert this now in the current basic block, ie. the one that + We can insert this now in the current basic block, i.e. the one that the statement we're instrumenting was originally in. */ - bsi = bsi_last (cond_bb); - for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) - bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); + gsi = gsi_last_bb (cond_bb); + gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); /* Now build up the body of the cache-miss handling: @@ -547,137 +679,222 @@ mf_build_check_statement_for (tree addr, tree size, refresh *_l vars. This is the body of the conditional. */ - - u = tree_cons (NULL_TREE, mf_file_function_line_tree (locus), NULL_TREE); - u = tree_cons (NULL_TREE, dirflag, u); - u = tree_cons (NULL_TREE, size, u); - u = tree_cons (NULL_TREE, mf_value, u); - t = build_function_call_expr (mf_check_fndecl, u); - gimplify_to_stmt_list (&t); - head = tsi_start (t); - tsi = tsi_last (t); + + seq = gimple_seq_alloc (); + /* u is a string, so it is already a gimple value. */ + u = mf_file_function_line_tree (location); + /* NB: we pass the overall [base..limit] range to mf_check. */ + v = fold_build2 (PLUS_EXPR, mf_uintptr_type, + fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base), + build_int_cst (mf_uintptr_type, 1)); + v = force_gimple_operand (v, &stmts, true, NULL_TREE); + gimple_seq_add_seq (&seq, stmts); + g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u); + gimple_seq_add_stmt (&seq, g); if (! flag_mudflap_threads) { - t = build (MODIFY_EXPR, void_type_node, - mf_cache_shift_decl_l, mf_cache_shift_decl); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + if (stmt_ends_bb_p (g)) + { + gsi = gsi_start_bb (then_bb); + gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); + e = split_block (then_bb, g); + then_bb = e->dest; + seq = gimple_seq_alloc (); + } - t = build (MODIFY_EXPR, void_type_node, - mf_cache_mask_decl_l, mf_cache_mask_decl); - tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); + g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl); + gimple_seq_add_stmt (&seq, g); + + g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl); + gimple_seq_add_stmt (&seq, g); } /* Insert the check code in the THEN block. */ - bsi = bsi_start (then_bb); - for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) - bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); + gsi = gsi_start_bb (then_bb); + gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); + + *instr_gsi = gsi_start_bb (join_bb); +} + - *instr_bsi = bsi_start (join_bb); - bsi_next (instr_bsi); +/* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is + eligible for instrumentation. For the mudflap1 pass, this implies + that it should be registered with the libmudflap runtime. For the + mudflap2 pass this means instrumenting an indirection operation with + respect to the object. +*/ +static int +mf_decl_eligible_p (tree decl) +{ + return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) + /* The decl must have its address taken. In the case of + arrays, this flag is also set if the indexes are not + compile-time known valid constants. */ + /* XXX: not sufficient: return-by-value structs! */ + && TREE_ADDRESSABLE (decl) + /* The type of the variable must be complete. */ + && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl)) + /* The decl hasn't been decomposed somehow. */ + && !DECL_HAS_VALUE_EXPR_P (decl)); } + static void -mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, - location_t *locus, tree dirflag) +mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp, + location_t location, tree dirflag) { - tree type, ptr_type, addr, size, t; + tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; + /* Don't instrument marked nodes. */ + if (mf_marked_p (*tp)) + return; + t = *tp; type = TREE_TYPE (t); + + if (type == error_mark_node) + return; + size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: + case COMPONENT_REF: { - /* Omit checking if we can statically determine that the access is - valid. For non-addressable local arrays this is not optional, - since we won't have called __mf_register for the object. */ - tree op0, op1; - - op0 = TREE_OPERAND (t, 0); - op1 = TREE_OPERAND (t, 1); - while (TREE_CODE (op1) == INTEGER_CST) + /* This is trickier than it may first appear. The reason is + that we are looking at expressions from the "inside out" at + this point. We may have a complex nested aggregate/array + expression (e.g. "a.b[i].c"), maybe with an indirection as + the leftmost operator ("p->a.b.d"), where instrumentation + is necessary. Or we may have an innocent "a.b.c" + expression that must not be instrumented. We need to + recurse all the way down the nesting structure to figure it + out: looking just at the outer node is not enough. */ + tree var; + int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); + /* If we have a bitfield component reference, we must note the + innermost addressable object in ELT, from which we will + construct the byte-addressable bounds of the bitfield. */ + tree elt = NULL_TREE; + int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF + && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); + + /* Iterate to the top of the ARRAY_REF/COMPONENT_REF + containment hierarchy to find the outermost VAR_DECL. */ + var = TREE_OPERAND (t, 0); + while (1) { - tree dom = TYPE_DOMAIN (TREE_TYPE (op0)); - - /* Test for index in range. Break if not. */ - if (!dom - || (! TYPE_MIN_VALUE (dom) - || ! really_constant_p (TYPE_MIN_VALUE (dom))) - || (! TYPE_MAX_VALUE (dom) - || ! really_constant_p (TYPE_MAX_VALUE (dom))) - || (tree_int_cst_lt (op1, TYPE_MIN_VALUE (dom)) - || tree_int_cst_lt (TYPE_MAX_VALUE (dom), op1))) - break; - - /* If we're looking at a non-external VAR_DECL, then the - access must be ok. */ - if (TREE_CODE (op0) == VAR_DECL && !DECL_EXTERNAL (op0)) - return; + if (bitfield_ref_p && elt == NULL_TREE + && (TREE_CODE (var) == ARRAY_REF + || TREE_CODE (var) == COMPONENT_REF)) + elt = var; + + if (TREE_CODE (var) == ARRAY_REF) + { + component_ref_only = 0; + var = TREE_OPERAND (var, 0); + } + else if (TREE_CODE (var) == COMPONENT_REF) + var = TREE_OPERAND (var, 0); + else if (INDIRECT_REF_P (var)) + { + base = TREE_OPERAND (var, 0); + break; + } + else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) + { + var = TREE_OPERAND (var, 0); + if (CONSTANT_CLASS_P (var) + && TREE_CODE (var) != STRING_CST) + return; + } + else + { + gcc_assert (TREE_CODE (var) == VAR_DECL + || TREE_CODE (var) == PARM_DECL + || TREE_CODE (var) == RESULT_DECL + || TREE_CODE (var) == STRING_CST); + /* Don't instrument this access if the underlying + variable is not "eligible". This test matches + those arrays that have only known-valid indexes, + and thus are not labeled TREE_ADDRESSABLE. */ + if (! mf_decl_eligible_p (var) || component_ref_only) + return; + else + { + base = build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (var)), var); + break; + } + } + } - /* Only continue if we're still looking at an array. */ - if (TREE_CODE (op0) != ARRAY_REF) - break; + /* Handle the case of ordinary non-indirection structure + accesses. These have only nested COMPONENT_REF nodes (no + INDIRECT_REF), but pass through the above filter loop. + Note that it's possible for such a struct variable to match + the eligible_p test because someone else might take its + address sometime. */ - op1 = TREE_OPERAND (op0, 1); - op0 = TREE_OPERAND (op0, 0); + /* We need special processing for bitfield components, because + their addresses cannot be taken. */ + if (bitfield_ref_p) + { + tree field = TREE_OPERAND (t, 1); + + if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) + size = DECL_SIZE_UNIT (field); + + if (elt) + elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), + elt); + addr = fold_convert (ptr_type_node, elt ? elt : base); + addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, + addr, fold_convert (sizetype, + byte_position (field))); } - - /* If we got here, we couldn't statically the check. */ - ptr_type = build_pointer_type (type); - addr = build1 (ADDR_EXPR, ptr_type, t); + else + addr = build1 (ADDR_EXPR, build_pointer_type (type), t); + + limit = fold_build2 (MINUS_EXPR, mf_uintptr_type, + fold_build2 (PLUS_EXPR, mf_uintptr_type, + convert (mf_uintptr_type, addr), + size), + integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); - ptr_type = TREE_TYPE (addr); + base = addr; + limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, + fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base, + size), + size_int (-1)); + break; + + case TARGET_MEM_REF: + addr = tree_mem_ref_addr (ptr_type_node, t); + base = addr; + limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, + fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base, + size), + size_int (-1)); break; case ARRAY_RANGE_REF: - warning ("mudflap checking not yet implemented for ARRAY_RANGE_REF"); + warning (OPT_Wmudflap, + "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; - case COMPONENT_REF: - { - tree field; - - /* If we're not dereferencing something, then the access - must be ok. */ - if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) - return; - - field = TREE_OPERAND (t, 1); - - /* If we're looking at a bit field, then we can't take its address - with ADDR_EXPR -- lang_hooks.mark_addressable will error. Do - things the hard way with PLUS. */ - if (DECL_BIT_FIELD_TYPE (field)) - { - size = bitsize_int (BITS_PER_UNIT); - size = size_binop (CEIL_DIV_EXPR, DECL_SIZE (field), size); - size = convert (sizetype, size); - - addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); - addr = convert (ptr_type_node, addr); - addr = fold (build (PLUS_EXPR, ptr_type_node, - addr, byte_position (field))); - } - else - { - ptr_type = build_pointer_type (type); - addr = build1 (ADDR_EXPR, ptr_type, t); - } - } - break; - case BIT_FIELD_REF: + /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; @@ -689,7 +906,7 @@ mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, bpu = bitsize_int (BITS_PER_UNIT); ofs = convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu); - ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu); + ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu)); size = convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop (PLUS_EXPR, size, rem); @@ -698,7 +915,13 @@ mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = convert (ptr_type_node, addr); - addr = fold (build (PLUS_EXPR, ptr_type_node, addr, ofs)); + addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs); + + base = addr; + limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, + fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, + base, size), + size_int (-1)); } break; @@ -706,50 +929,52 @@ mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, return; } - mf_build_check_statement_for (addr, size, iter, locus, dirflag); + mf_build_check_statement_for (base, limit, iter, location, dirflag); } static void mf_xform_derefs (void) { basic_block bb, next; - block_stmt_iterator i; + gimple_stmt_iterator i; int saved_last_basic_block = last_basic_block; + enum gimple_rhs_class grhs_class; bb = ENTRY_BLOCK_PTR ->next_bb; do { next = bb->next_bb; - for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) - { - tree s = bsi_stmt (i); + for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) + { + gimple s = gsi_stmt (i); - /* Only a few GIMPLE statements can reference memory. */ - switch (TREE_CODE (s)) - { - case MODIFY_EXPR: - mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s), - integer_one_node); - mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 1), EXPR_LOCUS (s), - integer_zero_node); - break; - - case RETURN_EXPR: - if (TREE_OPERAND (s, 0) != NULL_TREE) - { - if (TREE_CODE (TREE_OPERAND (s, 0)) == MODIFY_EXPR) - mf_xform_derefs_1 (&i, &TREE_OPERAND (TREE_OPERAND (s, 0), 1), - EXPR_LOCUS (s), integer_zero_node); - else - mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s), - integer_zero_node); - } - break; - - default: - ; - } - } + /* Only a few GIMPLE statements can reference memory. */ + switch (gimple_code (s)) + { + case GIMPLE_ASSIGN: + mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s), + gimple_location (s), integer_one_node); + mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s), + gimple_location (s), integer_zero_node); + grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s)); + if (grhs_class == GIMPLE_BINARY_RHS) + mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s), + gimple_location (s), integer_zero_node); + break; + + case GIMPLE_RETURN: + if (gimple_return_retval (s) != NULL_TREE) + { + mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s), + gimple_location (s), + integer_zero_node); + } + break; + + default: + ; + } + } bb = next; } while (bb && bb->index <= saved_last_basic_block); @@ -764,19 +989,24 @@ mf_xform_derefs (void) of their BIND_EXPR binding context, and we lose liveness information for the declarations we wish to instrument. */ -static void +static unsigned int execute_mudflap_function_decls (void) { - if (mf_marked_p (current_function_decl)) - return; + struct gimplify_ctx gctx; + + /* Don't instrument functions such as the synthetic constructor + built during mudflap_finish_file. */ + if (mf_marked_p (current_function_decl) || + DECL_ARTIFICIAL (current_function_decl)) + return 0; - push_gimplify_context (); + push_gimplify_context (&gctx); - mf_init_extern_trees (); - mf_xform_decls (DECL_SAVED_TREE (current_function_decl), + mf_xform_decls (gimple_body (current_function_decl), DECL_ARGUMENTS (current_function_decl)); pop_gimplify_context (NULL); + return 0; } /* This struct is passed between mf_xform_decls to store state needed @@ -790,144 +1020,78 @@ struct mf_xform_decls_data /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function - now, and the __mf_unregister function later for each. */ -static void -mx_register_decls (tree decl, tree *stmt_list) + now, and the __mf_unregister function later for each. Return the + gimple sequence after synthesis. */ +gimple_seq +mx_register_decls (tree decl, gimple_seq seq, location_t location) { - tree finally_stmts = NULL_TREE; - tree_stmt_iterator initially_stmts = tsi_start (*stmt_list); + gimple_seq finally_stmts = NULL; + gimple_stmt_iterator initially_stmts = gsi_start (seq); while (decl != NULL_TREE) { - /* Eligible decl? */ - if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) - /* It must be a non-external, automatic variable. */ - && ! DECL_EXTERNAL (decl) - && ! TREE_STATIC (decl) - /* The decl must have its address taken. */ - && TREE_ADDRESSABLE (decl) - /* The type of the variable must be complete. */ - && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl)) - /* Don't process the same decl twice. */ - && ! mf_marked_p (decl)) + if (mf_decl_eligible_p (decl) + /* Not already processed. */ + && ! mf_marked_p (decl) + /* Automatic variable. */ + && ! DECL_EXTERNAL (decl) + && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; - tree unregister_fncall, unregister_fncall_params; - tree register_fncall, register_fncall_params; - - if (DECL_DEFER_OUTPUT (decl)) - { - /* Oh no ... it's probably a variable-length array (VLA). - The size and address cannot be computed by merely - looking at the DECL. See gimplfiy_decl_stmt for the - method by which VLA declarations turn into calls to - BUILT_IN_STACK_ALLOC. We assume that multiple - VLAs declared later in the same block get allocation - code later than the others. */ - tree stack_alloc_call = NULL_TREE; - - while(! tsi_end_p (initially_stmts)) - { - tree t = tsi_stmt (initially_stmts); - - tree call = NULL_TREE; - if (TREE_CODE (t) == CALL_EXPR) - call = t; - else if (TREE_CODE (t) == MODIFY_EXPR && - TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR) - call = TREE_OPERAND (t, 1); - else if (TREE_CODE (t) == TRY_FINALLY_EXPR) - { - /* We hope that this is the try/finally block sometimes - constructed by gimplify_bind_expr() for a BIND_EXPR - that contains VLAs. This very naive recursion - appears to be sufficient. */ - initially_stmts = tsi_start (TREE_OPERAND (t, 0)); - } - - if (call != NULL_TREE) - { - if (TREE_CODE (TREE_OPERAND(call, 0)) == ADDR_EXPR && - TREE_OPERAND (TREE_OPERAND (call, 0), 0) == - implicit_built_in_decls [BUILT_IN_STACK_ALLOC]) - { - tree stack_alloc_args = TREE_OPERAND (call, 1); - tree stack_alloc_op1 = TREE_VALUE (stack_alloc_args); - tree stack_alloc_op2 = TREE_VALUE (TREE_CHAIN (stack_alloc_args)); - - if (TREE_CODE (stack_alloc_op1) == ADDR_EXPR && - TREE_OPERAND (stack_alloc_op1, 0) == decl) - { - /* Got it! */ - size = stack_alloc_op2; - stack_alloc_call = call; - /* Advance iterator to point past this allocation call. */ - tsi_next (&initially_stmts); - break; - } - } - } - - tsi_next (&initially_stmts); - } + gimple unregister_fncall, register_fncall; + tree unregister_fncall_param, register_fncall_param; + + /* Variable-sized objects should have sizes already been + gimplified when we got here. */ + size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); + gcc_assert (is_gimple_val (size)); + + + unregister_fncall_param = + mf_mark (build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (decl)), + decl)); + /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ + unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3, + unregister_fncall_param, + size, + build_int_cst (NULL_TREE, 3)); - if (stack_alloc_call == NULL_TREE) - { - warning ("mudflap cannot handle variable-sized declaration `%s'", - IDENTIFIER_POINTER (DECL_NAME (decl))); - break; - } - } - else - { - size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); - } - /* (& VARIABLE, sizeof (VARIABLE)) */ - unregister_fncall_params = - tree_cons (NULL_TREE, - convert (ptr_type_node, - mf_mark (build1 (ADDR_EXPR, - build_pointer_type (TREE_TYPE (decl)), - decl))), - tree_cons (NULL_TREE, size, NULL_TREE)); - /* __mf_unregister (...) */ - unregister_fncall = build_function_call_expr (mf_unregister_fndecl, - unregister_fncall_params); - - /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ variable_name = mf_varname_tree (decl); - register_fncall_params = - tree_cons (NULL_TREE, - convert (ptr_type_node, - mf_mark (build1 (ADDR_EXPR, - build_pointer_type (TREE_TYPE (decl)), - decl))), - tree_cons (NULL_TREE, - size, - tree_cons (NULL_TREE, - build_int_2 (3, 0), /* __MF_TYPE_STACK */ - tree_cons (NULL_TREE, - variable_name, - NULL_TREE)))); - - /* __mf_register (...) */ - register_fncall = build_function_call_expr (mf_register_fndecl, - register_fncall_params); + register_fncall_param = + mf_mark (build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (decl)), + decl)); + /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, + "name") */ + register_fncall = gimple_build_call (mf_register_fndecl, 4, + register_fncall_param, + size, + build_int_cst (NULL_TREE, 3), + variable_name); + /* Accumulate the two calls. */ - /* ??? Set EXPR_LOCUS. */ - gimplify_stmt (®ister_fncall); - gimplify_stmt (&unregister_fncall); + gimple_set_location (register_fncall, location); + gimple_set_location (unregister_fncall, location); /* Add the __mf_register call at the current appending point. */ - if (tsi_end_p (initially_stmts)) - internal_error ("mudflap ran off end of BIND_EXPR body"); - tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT); - - /* Accumulate the FINALLY piece. */ - append_to_statement_list (unregister_fncall, &finally_stmts); + if (gsi_end_p (initially_stmts)) + { + if (!DECL_ARTIFICIAL (decl)) + warning (OPT_Wmudflap, + "mudflap cannot track %qE in stub function", + DECL_NAME (decl)); + } + else + { + gsi_insert_before (&initially_stmts, register_fncall, + GSI_SAME_STMT); + /* Accumulate the FINALLY piece. */ + gimple_seq_add_stmt (&finally_stmts, unregister_fncall); + } mf_mark (decl); } @@ -935,39 +1099,46 @@ mx_register_decls (tree decl, tree *stmt_list) } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ - if (finally_stmts != NULL_TREE) + if (finally_stmts != NULL) { - tree t = build (TRY_FINALLY_EXPR, void_type_node, - *stmt_list, finally_stmts); - *stmt_list = NULL; - append_to_statement_list (t, stmt_list); + gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); + gimple_seq new_seq = gimple_seq_alloc (); + + gimple_seq_add_stmt (&new_seq, stmt); + return new_seq; } + else + return seq; } /* Process every variable mentioned in BIND_EXPRs. */ static tree -mx_xfn_xform_decls (tree *t, int *continue_p, void *data) +mx_xfn_xform_decls (gimple_stmt_iterator *gsi, + bool *handled_operands_p ATTRIBUTE_UNUSED, + struct walk_stmt_info *wi) { - struct mf_xform_decls_data* d = (struct mf_xform_decls_data*) data; + struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info; + gimple stmt = gsi_stmt (*gsi); - if (*t == NULL_TREE || *t == error_mark_node) + switch (gimple_code (stmt)) { - *continue_p = 0; - return NULL_TREE; - } - - *continue_p = 1; - - switch (TREE_CODE (*t)) - { - case BIND_EXPR: + case GIMPLE_BIND: { /* Process function parameters now (but only once). */ - mx_register_decls (d->param_decls, &BIND_EXPR_BODY (*t)); - d->param_decls = NULL_TREE; - - mx_register_decls (BIND_EXPR_VARS (*t), &BIND_EXPR_BODY (*t)); + if (d->param_decls) + { + gimple_bind_set_body (stmt, + mx_register_decls (d->param_decls, + gimple_bind_body (stmt), + gimple_location (stmt))); + d->param_decls = NULL_TREE; + } + + gimple_bind_set_body (stmt, + mx_register_decls (gimple_bind_vars (stmt), + gimple_bind_body (stmt), + gimple_location (stmt))); } break; @@ -975,7 +1146,7 @@ mx_xfn_xform_decls (tree *t, int *continue_p, void *data) break; } - return NULL; + return NULL_TREE; } /* Perform the object lifetime tracking mudflap transform on the given function @@ -987,11 +1158,18 @@ mx_xfn_xform_decls (tree *t, int *continue_p, void *data) */ static void -mf_xform_decls (tree fnbody, tree fnparams) +mf_xform_decls (gimple_seq fnbody, tree fnparams) { struct mf_xform_decls_data d; + struct walk_stmt_info wi; + struct pointer_set_t *pset = pointer_set_create (); + d.param_decls = fnparams; - walk_tree_without_duplicates (&fnbody, mx_xfn_xform_decls, &d); + memset (&wi, 0, sizeof (wi)); + wi.info = (void*) &d; + wi.pset = pset; + walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi); + pointer_set_destroy (pset); } @@ -1009,7 +1187,8 @@ mf_mark (tree t) void **slot; if (marked_trees == NULL) - marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer, NULL); + marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer, + NULL); slot = htab_find_slot (marked_trees, t, INSERT); *slot = t; @@ -1037,7 +1216,7 @@ mf_marked_p (tree t) delayed until program finish time. If they're still incomplete by then, warnings are emitted. */ -static GTY (()) varray_type deferred_static_decls; +static GTY (()) VEC(tree,gc) *deferred_static_decls; /* A list of statements for calling __mf_register() at startup time. */ static GTY (()) tree enqueued_call_stmt_chain; @@ -1045,22 +1224,17 @@ static GTY (()) tree enqueued_call_stmt_chain; static void mudflap_register_call (tree obj, tree object_size, tree varname) { - tree arg, args, call_stmt; - - args = tree_cons (NULL_TREE, varname, NULL_TREE); - - arg = build_int_2 (4, 0); /* __MF_TYPE_STATIC */ - args = tree_cons (NULL_TREE, arg, args); - - arg = convert (size_type_node, object_size); - args = tree_cons (NULL_TREE, arg, args); + tree arg, call_stmt; arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj); arg = convert (ptr_type_node, arg); - args = tree_cons (NULL_TREE, arg, args); - mf_init_extern_trees (); - call_stmt = build_function_call_expr (mf_register_fndecl, args); + call_stmt = build_call_expr (mf_register_fndecl, 4, + arg, + convert (size_type_node, object_size), + /* __MF_TYPE_STATIC */ + build_int_cst (NULL_TREE, 4), + varname); append_to_statement_list (call_stmt, &enqueued_call_stmt_chain); } @@ -1079,50 +1253,10 @@ mudflap_enqueue_decl (tree obj) if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj)) return; - if (COMPLETE_TYPE_P (TREE_TYPE (obj))) - { - tree object_size; - - mf_mark (obj); - - object_size = size_in_bytes (TREE_TYPE (obj)); - - if (dump_file) - { - fprintf (dump_file, "enqueue_decl obj=`"); - print_generic_expr (dump_file, obj, dump_flags); - fprintf (dump_file, "' size="); - print_generic_expr (dump_file, object_size, dump_flags); - fprintf (dump_file, "\n"); - } - - /* NB: the above condition doesn't require TREE_USED or - TREE_ADDRESSABLE. That's because this object may be a global - only used from other compilation units. XXX: Maybe static - objects could require those attributes being set. */ - - mudflap_register_call (obj, object_size, mf_varname_tree (obj)); - } - else - { - size_t i; - - if (! deferred_static_decls) - VARRAY_TREE_INIT (deferred_static_decls, 10, "deferred static list"); - - /* Ugh, linear search... */ - for (i = 0; i < VARRAY_ACTIVE_SIZE (deferred_static_decls); i++) - if (VARRAY_TREE (deferred_static_decls, i) == obj) - { - warning ("mudflap cannot track lifetime of `%s'", - IDENTIFIER_POINTER (DECL_NAME (obj))); - return; - } - - VARRAY_PUSH_TREE (deferred_static_decls, obj); - } + VEC_safe_push (tree, gc, deferred_static_decls, obj); } + void mudflap_enqueue_constant (tree obj) { @@ -1132,19 +1266,10 @@ mudflap_enqueue_constant (tree obj) return; if (TREE_CODE (obj) == STRING_CST) - object_size = build_int_2 (TREE_STRING_LENGTH (obj), 0); + object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj)); else object_size = size_in_bytes (TREE_TYPE (obj)); - if (dump_file) - { - fprintf (dump_file, "enqueue_constant obj=`"); - print_generic_expr (dump_file, obj, dump_flags); - fprintf (dump_file, "' size="); - print_generic_expr (dump_file, object_size, dump_flags); - fprintf (dump_file, "\n"); - } - if (TREE_CODE (obj) == STRING_CST) varname = mf_build_string ("string literal"); else @@ -1158,25 +1283,70 @@ mudflap_enqueue_constant (tree obj) void mudflap_finish_file (void) { - /* Try to give the deferred objects one final try. */ + tree ctor_statements = NULL_TREE; + + /* No need to continue when there were errors. */ + if (errorcount != 0 || sorrycount != 0) + return; + + /* Insert a call to __mf_init. */ + { + tree call2_stmt = build_call_expr (mf_init_fndecl, 0); + append_to_statement_list (call2_stmt, &ctor_statements); + } + + /* If appropriate, call __mf_set_options to pass along read-ignore mode. */ + if (flag_mudflap_ignore_reads) + { + tree arg = mf_build_string ("-ignore-reads"); + tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg); + append_to_statement_list (call_stmt, &ctor_statements); + } + + /* Process all enqueued object decls. */ if (deferred_static_decls) { size_t i; - - for (i = 0; i < VARRAY_ACTIVE_SIZE (deferred_static_decls); i++) + tree obj; + for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++) { - tree obj = VARRAY_TREE (deferred_static_decls, i); + gcc_assert (DECL_P (obj)); + + if (mf_marked_p (obj)) + continue; + + /* Omit registration for static unaddressed objects. NB: + Perform registration for non-static objects regardless of + TREE_USED or TREE_ADDRESSABLE, because they may be used + from other compilation units. */ + if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj)) + continue; - /* Call enqueue_decl again on the same object it has previously - put into the table. (It won't modify the table this time, so - infinite iteration is not a problem.) */ - mudflap_enqueue_decl (obj); + if (! COMPLETE_TYPE_P (TREE_TYPE (obj))) + { + warning (OPT_Wmudflap, + "mudflap cannot track unknown size extern %qE", + DECL_NAME (obj)); + continue; + } + + mudflap_register_call (obj, + size_in_bytes (TREE_TYPE (obj)), + mf_varname_tree (obj)); } - VARRAY_CLEAR (deferred_static_decls); + VEC_truncate (tree, deferred_static_decls, 0); + } + + /* Append all the enqueued registration calls. */ + if (enqueued_call_stmt_chain) + { + append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements); + enqueued_call_stmt_chain = NULL_TREE; } - mflang_flush_calls (enqueued_call_stmt_chain); + cgraph_build_static_cdtor ('I', ctor_statements, + MAX_RESERVED_INIT_PRIORITY-1); } @@ -1186,37 +1356,43 @@ gate_mudflap (void) return flag_mudflap != 0; } -struct tree_opt_pass pass_mudflap_1 = +struct gimple_opt_pass pass_mudflap_1 = { + { + GIMPLE_PASS, "mudflap1", /* name */ gate_mudflap, /* gate */ execute_mudflap_function_decls, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ + TV_NONE, /* tv_id */ PROP_gimple_any, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ TODO_dump_func /* todo_flags_finish */ + } }; -struct tree_opt_pass pass_mudflap_2 = +struct gimple_opt_pass pass_mudflap_2 = { + { + GIMPLE_PASS, "mudflap2", /* name */ gate_mudflap, /* gate */ execute_mudflap_function_ops, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ - PROP_gimple_leh, /* properties_required */ + TV_NONE, /* tv_id */ + PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ TODO_verify_flow | TODO_verify_stmts - | TODO_dump_func /* todo_flags_finish */ + | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */ + } }; #include "gt-tree-mudflap.h"