/* Mudflap: narrow-pointer bounds-checking by tree rewriting.
- Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Frank Ch. Eigler <fche@redhat.com>
and Graydon Hoare <graydon@redhat.com>
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
-#include "errors.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "langhooks.h"
#include "ggc.h"
#include "cgraph.h"
+#include "toplev.h"
/* Internal function decls */
+
+/* Options. */
+#define flag_mudflap_threads (flag_mudflap == 2)
+
/* Helpers. */
static tree mf_build_string (const char *string);
static tree mf_varname_tree (tree);
static void mf_decl_cache_locals (void);
static void mf_decl_clear_locals (void);
static void mf_xform_derefs (void);
-static void execute_mudflap_function_ops (void);
+static unsigned int execute_mudflap_function_ops (void);
/* Addressable variables instrumentation. */
static void mf_xform_decls (tree, tree);
static tree mx_xfn_xform_decls (tree *, int *, void *);
static void mx_register_decls (tree, tree *);
-static void execute_mudflap_function_decls (void);
+static unsigned int execute_mudflap_function_decls (void);
/* ------------------------------------------------------------------------ */
tree result = mf_mark (build_string (len + 1, string));
TREE_TYPE (result) = build_array_type
- (char_type_node, build_index_type (build_int_cst (NULL_TREE, len, 0)));
+ (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
TREE_CONSTANT (result) = 1;
TREE_INVARIANT (result) = 1;
TREE_READONLY (result) = 1;
const char *buf_contents;
tree result;
- if (decl == NULL_TREE)
- abort ();
+ gcc_assert (decl);
if (!initialized)
{
}
pp_clear_output_area (buf);
- /* Add FILENAME[:LINENUMBER]. */
+ /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
{
expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
const char *sourcefile;
unsigned sourceline = xloc.line;
-
+ unsigned sourcecolumn = 0;
+#ifdef USE_MAPPED_LOCATION
+ sourcecolumn = xloc.column;
+#endif
sourcefile = xloc.file;
if (sourcefile == NULL && current_function_decl != NULL_TREE)
sourcefile = DECL_SOURCE_FILE (current_function_decl);
{
pp_string (buf, ":");
pp_decimal_int (buf, sourceline);
+
+ if (sourcecolumn != 0)
+ {
+ pp_string (buf, ":");
+ pp_decimal_int (buf, sourcecolumn);
+ }
}
}
if (current_function_decl != NULL_TREE)
{
- /* Add (FUNCTION): */
+ /* Add (FUNCTION) */
pp_string (buf, " (");
{
const char *funcname = NULL;
{
const char *declname = NULL;
- if (strcmp ("GNU C++", lang_hooks.name) == 0 &&
- DECL_NAME (decl) != NULL)
+ if (DECL_NAME (decl) != NULL)
{
- /* The gcc/cp decl_printable_name hook doesn't do as good a job as
- the libiberty demangler. */
- declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
- DMGL_AUTO | DMGL_VERBOSE);
+ if (strcmp ("GNU C++", lang_hooks.name) == 0)
+ {
+ /* The gcc/cp decl_printable_name hook doesn't do as good a job as
+ the libiberty demangler. */
+ declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
+ DMGL_AUTO | DMGL_VERBOSE);
+ }
+ if (declname == NULL)
+ declname = lang_hooks.decl_printable_name (decl, 3);
}
-
- if (declname == NULL)
- declname = lang_hooks.decl_printable_name (decl, 3);
-
if (declname == NULL)
declname = "<unnamed variable>";
{
expanded_location xloc = expand_location (location);
const char *file = NULL, *colon, *line, *op, *name, *cp;
- char linebuf[18];
+ char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
char *string;
tree result;
- /* Add FILENAME[:LINENUMBER]. */
+ /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
file = xloc.file;
if (file == NULL && current_function_decl != NULL_TREE)
file = DECL_SOURCE_FILE (current_function_decl);
if (xloc.line > 0)
{
- sprintf (linebuf, "%d", xloc.line);
+#ifdef USE_MAPPED_LOCATION
+ if (xloc.column > 0)
+ sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
+ else
+#endif
+ sprintf (linecolbuf, "%d", xloc.line);
colon = ":";
- line = linebuf;
+ line = linecolbuf;
}
else
colon = line = "";
/* extern uintptr_t __mf_lc_mask; */
static GTY (()) tree mf_cache_mask_decl;
-/* Their function-scope local shadows, used in single-threaded mode only. */
+/* Their function-scope local shadows, used in single-threaded mode only. */
/* auto const unsigned char __mf_lc_shift_l; */
static GTY (()) tree mf_cache_shift_decl_l;
unsigned_char_type_node);
mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
mf_uintptr_type);
+ /* Don't process these in mudflap_enqueue_decl, should they come by
+ there for some reason. */
+ mf_mark (mf_cache_array_decl);
+ mf_mark (mf_cache_shift_decl);
+ mf_mark (mf_cache_mask_decl);
mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
mf_check_register_fntype);
mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
tree optimizations have been performed, but we have to preserve the CFG
for expansion from trees to RTL. */
-static void
+static unsigned int
execute_mudflap_function_ops (void)
{
- if (mf_marked_p (current_function_decl))
- return;
+ /* Don't instrument functions such as the synthetic constructor
+ built during mudflap_finish_file. */
+ if (mf_marked_p (current_function_decl) ||
+ DECL_ARTIFICIAL (current_function_decl))
+ return 0;
push_gimplify_context ();
mf_decl_clear_locals ();
pop_gimplify_context (NULL);
+ return 0;
}
/* Create and initialize local shadow variables for the lookup cache
globals. Put their decls in the *_l globals for use by
- mf_build_check_statement_for. */
+ mf_build_check_statement_for. */
static void
mf_decl_cache_locals (void)
/* Build initialization nodes for the cache vars. We just load the
globals into the cache variables. */
- t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_shift_decl_l),
- mf_cache_shift_decl_l, mf_cache_shift_decl);
+ t = build2 (MODIFY_EXPR, TREE_TYPE (mf_cache_shift_decl_l),
+ mf_cache_shift_decl_l, mf_cache_shift_decl);
SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
gimplify_to_stmt_list (&t);
shift_init_stmts = t;
- t = build (MODIFY_EXPR, TREE_TYPE (mf_cache_mask_decl_l),
- mf_cache_mask_decl_l, mf_cache_mask_decl);
+ t = build2 (MODIFY_EXPR, TREE_TYPE (mf_cache_mask_decl_l),
+ mf_cache_mask_decl_l, mf_cache_mask_decl);
SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
gimplify_to_stmt_list (&t);
mask_init_stmts = t;
! tsi_end_p (tsi);
tsi_next (&tsi))
insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
- bsi_commit_edge_inserts (NULL);
+ bsi_commit_edge_inserts ();
}
static void
mf_decl_clear_locals (void)
{
- /* Unset local shadows. */
+ /* Unset local shadows. */
mf_cache_shift_decl_l = NULL_TREE;
mf_cache_mask_decl_l = NULL_TREE;
}
static void
-mf_build_check_statement_for (tree addr, tree size,
+mf_build_check_statement_for (tree base, tree limit,
block_stmt_iterator *instr_bsi,
location_t *locus, tree dirflag)
{
tree_stmt_iterator head, tsi;
- tree ptrtype = TREE_TYPE (addr);
block_stmt_iterator bsi;
basic_block cond_bb, then_bb, join_bb;
edge e;
- tree cond, t, u, v, l1, l2;
- tree mf_value;
+ tree cond, t, u, v;
tree mf_base;
tree mf_elem;
+ tree mf_limit;
/* We first need to split the current basic block, and start altering
the CFG. This allows us to insert the statements we're about to
- construct into the right basic blocks. The label l1 is the label
- of the block for the THEN clause of the conditional jump we're
- about to construct, and l2 is the ELSE clause, which is just the
- continuation of the old statement stream. */
- l1 = create_artificial_label ();
- l2 = create_artificial_label ();
+ construct into the right basic blocks. */
+
cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi));
bsi = *instr_bsi;
bsi_prev (&bsi);
if (! bsi_end_p (bsi))
- {
- e = split_block (cond_bb, bsi_stmt (bsi));
- cond_bb = e->src;
- join_bb = e->dest;
- }
+ e = split_block (cond_bb, bsi_stmt (bsi));
else
- {
- join_bb = cond_bb;
- cond_bb = create_empty_bb (join_bb->prev_bb);
- e = make_edge (cond_bb, join_bb, 0);
- }
- e->flags = EDGE_FALSE_VALUE;
+ e = split_block_after_labels (cond_bb);
+ cond_bb = e->src;
+ join_bb = e->dest;
+
+ /* A recap at this point: join_bb is the basic block at whose head
+ is the gimple statement for which this check expression is being
+ built. cond_bb is the (possibly new, synthetic) basic block the
+ end of which will contain the cache-lookup code, and a
+ conditional that jumps to the cache-miss code or, much more
+ likely, over to join_bb. */
+
+ /* Create the bb that contains the cache-miss fallback block (mf_check). */
then_bb = create_empty_bb (cond_bb);
make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
- make_edge (then_bb, join_bb, EDGE_FALLTHRU);
+ make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
- /* We expect that the conditional jump we will construct will not
- be taken very often as it basically is an exception condition. */
- predict_edge_def (then_bb->pred, PRED_MUDFLAP, NOT_TAKEN);
+ /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
+ e = find_edge (cond_bb, join_bb);
+ e->flags = EDGE_FALSE_VALUE;
+ e->count = cond_bb->count;
+ e->probability = REG_BR_PROB_BASE;
/* Update dominance info. Note that bb_join's data was
updated by split_block. */
- if (dom_computed[CDI_DOMINATORS] >= DOM_CONS_OK)
+ if (dom_info_available_p (CDI_DOMINATORS))
{
set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
}
/* Build our local variables. */
- mf_value = create_tmp_var (ptrtype, "__mf_value");
mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem");
mf_base = create_tmp_var (mf_uintptr_type, "__mf_base");
+ mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");
- /* Build: __mf_value = <address expression>. */
- t = build (MODIFY_EXPR, void_type_node, mf_value, unshare_expr (addr));
+ /* Build: __mf_base = (uintptr_t) <base address expression>. */
+ t = build2 (MODIFY_EXPR, void_type_node, mf_base,
+ convert (mf_uintptr_type, unshare_expr (base)));
SET_EXPR_LOCUS (t, locus);
gimplify_to_stmt_list (&t);
head = tsi_start (t);
tsi = tsi_last (t);
- /* Build: __mf_base = (uintptr_t)__mf_value. */
- t = build (MODIFY_EXPR, void_type_node, mf_base,
- build1 (NOP_EXPR, mf_uintptr_type, mf_value));
+ /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
+ t = build2 (MODIFY_EXPR, void_type_node, mf_limit,
+ convert (mf_uintptr_type, unshare_expr (limit)));
SET_EXPR_LOCUS (t, locus);
gimplify_to_stmt_list (&t);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
/* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
& __mf_mask]. */
- t = build (RSHIFT_EXPR, mf_uintptr_type, mf_base,
- (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
- t = build (BIT_AND_EXPR, mf_uintptr_type, t,
- (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
- t = build (ARRAY_REF,
- TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
- mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
+ t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
+ (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
+ t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
+ (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
+ t = build4 (ARRAY_REF,
+ TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
+ mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
- t = build (MODIFY_EXPR, void_type_node, mf_elem, t);
+ t = build2 (MODIFY_EXPR, void_type_node, mf_elem, t);
SET_EXPR_LOCUS (t, locus);
gimplify_to_stmt_list (&t);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
/* Quick validity check.
if (__mf_elem->low > __mf_base
- || (__mf_elem_high < __mf_base + sizeof(T) - 1))
+ || (__mf_elem_high < __mf_limit))
{
__mf_check ();
... and only if single-threaded:
the edge to the THEN clause of the conditional jump as unlikely. */
/* Construct t <-- '__mf_elem->low > __mf_base'. */
- t = build (COMPONENT_REF, mf_uintptr_type,
- build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
- TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
- t = build (GT_EXPR, boolean_type_node, t, mf_base);
+ t = build3 (COMPONENT_REF, mf_uintptr_type,
+ build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
+ TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
+ t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
- /* Construct '__mf_elem->high < __mf_base + sizeof(T) - 1'.
+ /* Construct '__mf_elem->high < __mf_limit'.
First build:
1) u <-- '__mf_elem->high'
- 2) v <-- '__mf_base + sizeof (T) - 1'.
+ 2) v <-- '__mf_limit'.
Then build 'u <-- (u < v). */
+ u = build3 (COMPONENT_REF, mf_uintptr_type,
+ build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
+ TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
- u = build (COMPONENT_REF, mf_uintptr_type,
- build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
- TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
+ v = mf_limit;
- v = convert (mf_uintptr_type,
- size_binop (MINUS_EXPR, size, size_one_node));
- v = fold (build (PLUS_EXPR, mf_uintptr_type, mf_base, v));
-
- u = build (LT_EXPR, boolean_type_node, u, v);
+ u = build2 (LT_EXPR, boolean_type_node, u, v);
/* Build the composed conditional: t <-- 't || u'. Then store the
result of the evaluation of 't' in a temporary variable which we
can use as the condition for the conditional jump. */
- t = build (TRUTH_OR_EXPR, boolean_type_node, t, u);
+ t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
- t = build (MODIFY_EXPR, boolean_type_node, cond, t);
+ t = build2 (MODIFY_EXPR, boolean_type_node, cond, t);
gimplify_to_stmt_list (&t);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
/* Build the conditional jump. 'cond' is just a temporary so we can
simply build a void COND_EXPR. We do need labels in both arms though. */
- t = build (COND_EXPR, void_type_node, cond,
- build (GOTO_EXPR, void_type_node, tree_block_label (then_bb)),
- build (GOTO_EXPR, void_type_node, tree_block_label (join_bb)));
+ t = build3 (COND_EXPR, void_type_node, cond,
+ build1 (GOTO_EXPR, void_type_node, tree_block_label (then_bb)),
+ build1 (GOTO_EXPR, void_type_node, tree_block_label (join_bb)));
SET_EXPR_LOCUS (t, locus);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
the conditional jump,
if (__mf_elem->low > __mf_base
- || (__mf_elem_high < __mf_base + sizeof(T) - 1))
+ || (__mf_elem_high < __mf_limit))
The lowered GIMPLE tree representing this code is in the statement
list starting at 'head'.
- We can insert this now in the current basic block, ie. the one that
+ We can insert this now in the current basic block, i.e. the one that
the statement we're instrumenting was originally in. */
bsi = bsi_last (cond_bb);
for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
: *locus),
NULL_TREE);
u = tree_cons (NULL_TREE, dirflag, u);
- u = tree_cons (NULL_TREE, size, u);
- u = tree_cons (NULL_TREE, mf_value, u);
+ /* NB: we pass the overall [base..limit] range to mf_check. */
+ u = tree_cons (NULL_TREE,
+ fold_build2 (PLUS_EXPR, integer_type_node,
+ fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
+ integer_one_node),
+ u);
+ u = tree_cons (NULL_TREE, mf_base, u);
t = build_function_call_expr (mf_check_fndecl, u);
gimplify_to_stmt_list (&t);
head = tsi_start (t);
if (! flag_mudflap_threads)
{
- t = build (MODIFY_EXPR, void_type_node,
- mf_cache_shift_decl_l, mf_cache_shift_decl);
+ t = build2 (MODIFY_EXPR, void_type_node,
+ mf_cache_shift_decl_l, mf_cache_shift_decl);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
- t = build (MODIFY_EXPR, void_type_node,
- mf_cache_mask_decl_l, mf_cache_mask_decl);
+ t = build2 (MODIFY_EXPR, void_type_node,
+ mf_cache_mask_decl_l, mf_cache_mask_decl);
tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
}
bsi_next (instr_bsi);
}
+
+/* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
+ eligible for instrumentation. For the mudflap1 pass, this implies
+ that it should be registered with the libmudflap runtime. For the
+ mudflap2 pass this means instrumenting an indirection operation with
+ respect to the object.
+*/
+static int
+mf_decl_eligible_p (tree decl)
+{
+ return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
+ /* The decl must have its address taken. In the case of
+ arrays, this flag is also set if the indexes are not
+ compile-time known valid constants. */
+ && TREE_ADDRESSABLE (decl) /* XXX: not sufficient: return-by-value structs! */
+ /* The type of the variable must be complete. */
+ && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
+ /* The decl hasn't been decomposed somehow. */
+ && !DECL_HAS_VALUE_EXPR_P (decl));
+}
+
+
static void
mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
location_t *locus, tree dirflag)
{
- tree type, ptr_type, addr, size, t;
+ tree type, base, limit, addr, size, t;
/* Don't instrument read operations. */
if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
return;
+ /* Don't instrument marked nodes. */
+ if (mf_marked_p (*tp))
+ return;
+
t = *tp;
type = TREE_TYPE (t);
+
+ if (type == error_mark_node)
+ return;
+
size = TYPE_SIZE_UNIT (type);
switch (TREE_CODE (t))
{
case ARRAY_REF:
+ case COMPONENT_REF:
{
- /* Omit checking if we can statically determine that the access is
- valid. For non-addressable local arrays this is not optional,
- since we won't have called __mf_register for the object. */
- tree op0, op1;
-
- op0 = TREE_OPERAND (t, 0);
- op1 = TREE_OPERAND (t, 1);
- while (TREE_CODE (op1) == INTEGER_CST)
+ /* This is trickier than it may first appear. The reason is
+ that we are looking at expressions from the "inside out" at
+ this point. We may have a complex nested aggregate/array
+ expression (e.g. "a.b[i].c"), maybe with an indirection as
+ the leftmost operator ("p->a.b.d"), where instrumentation
+ is necessary. Or we may have an innocent "a.b.c"
+ expression that must not be instrumented. We need to
+ recurse all the way down the nesting structure to figure it
+ out: looking just at the outer node is not enough. */
+ tree var;
+ int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
+ /* If we have a bitfield component reference, we must note the
+ innermost addressable object in ELT, from which we will
+ construct the byte-addressable bounds of the bitfield. */
+ tree elt = NULL_TREE;
+ int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
+
+ /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
+ containment hierarchy to find the outermost VAR_DECL. */
+ var = TREE_OPERAND (t, 0);
+ while (1)
{
- tree dom = TYPE_DOMAIN (TREE_TYPE (op0));
-
- /* Test for index in range. Break if not. */
- if (!dom
- || (! TYPE_MIN_VALUE (dom)
- || ! really_constant_p (TYPE_MIN_VALUE (dom)))
- || (! TYPE_MAX_VALUE (dom)
- || ! really_constant_p (TYPE_MAX_VALUE (dom)))
- || (tree_int_cst_lt (op1, TYPE_MIN_VALUE (dom))
- || tree_int_cst_lt (TYPE_MAX_VALUE (dom), op1)))
- break;
+ if (bitfield_ref_p && elt == NULL_TREE
+ && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF))
+ elt = var;
+
+ if (TREE_CODE (var) == ARRAY_REF)
+ {
+ component_ref_only = 0;
+ var = TREE_OPERAND (var, 0);
+ }
+ else if (TREE_CODE (var) == COMPONENT_REF)
+ var = TREE_OPERAND (var, 0);
+ else if (INDIRECT_REF_P (var))
+ {
+ base = TREE_OPERAND (var, 0);
+ break;
+ }
+ else
+ {
+ gcc_assert (TREE_CODE (var) == VAR_DECL
+ || TREE_CODE (var) == PARM_DECL
+ || TREE_CODE (var) == RESULT_DECL
+ || TREE_CODE (var) == STRING_CST);
+ /* Don't instrument this access if the underlying
+ variable is not "eligible". This test matches
+ those arrays that have only known-valid indexes,
+ and thus are not labeled TREE_ADDRESSABLE. */
+ if (! mf_decl_eligible_p (var) || component_ref_only)
+ return;
+ else
+ {
+ base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var);
+ break;
+ }
+ }
+ }
- /* If we're looking at a non-external VAR_DECL, then the
- access must be ok. */
- if (TREE_CODE (op0) == VAR_DECL && !DECL_EXTERNAL (op0))
- return;
+ /* Handle the case of ordinary non-indirection structure
+ accesses. These have only nested COMPONENT_REF nodes (no
+ INDIRECT_REF), but pass through the above filter loop.
+ Note that it's possible for such a struct variable to match
+ the eligible_p test because someone else might take its
+ address sometime. */
- /* Only continue if we're still looking at an array. */
- if (TREE_CODE (op0) != ARRAY_REF)
- break;
+ /* We need special processing for bitfield components, because
+ their addresses cannot be taken. */
+ if (bitfield_ref_p)
+ {
+ tree field = TREE_OPERAND (t, 1);
- op1 = TREE_OPERAND (op0, 1);
- op0 = TREE_OPERAND (op0, 0);
+ if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
+ size = DECL_SIZE_UNIT (field);
+
+ if (elt)
+ elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt);
+ addr = fold_convert (ptr_type_node, elt ? elt : base);
+ addr = fold_build2 (PLUS_EXPR, ptr_type_node,
+ addr, fold_convert (ptr_type_node,
+ byte_position (field)));
}
-
- /* If we got here, we couldn't statically the check. */
- ptr_type = build_pointer_type (type);
- addr = build1 (ADDR_EXPR, ptr_type, t);
+ else
+ addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
+
+ limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
+ fold_build2 (PLUS_EXPR, mf_uintptr_type,
+ convert (mf_uintptr_type, addr),
+ size),
+ integer_one_node);
}
break;
case INDIRECT_REF:
addr = TREE_OPERAND (t, 0);
- ptr_type = TREE_TYPE (addr);
+ base = addr;
+ limit = fold_build2 (MINUS_EXPR, ptr_type_node,
+ fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
+ integer_one_node);
+ break;
+
+ case TARGET_MEM_REF:
+ addr = tree_mem_ref_addr (ptr_type_node, t);
+ base = addr;
+ limit = fold_build2 (MINUS_EXPR, ptr_type_node,
+ fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
+ build_int_cst (ptr_type_node, 1));
break;
case ARRAY_RANGE_REF:
- warning ("mudflap checking not yet implemented for ARRAY_RANGE_REF");
+ warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF");
return;
- case COMPONENT_REF:
- {
- tree field;
-
- /* If we're not dereferencing something, then the access
- must be ok. */
- if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
- return;
-
- field = TREE_OPERAND (t, 1);
-
- /* If we're looking at a bit field, then we can't take its address
- with ADDR_EXPR -- lang_hooks.mark_addressable will error. Do
- things the hard way with PLUS. */
- if (DECL_BIT_FIELD_TYPE (field))
- {
- if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
- size = DECL_SIZE_UNIT (field);
-
- addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
- addr = fold_convert (ptr_type_node, addr);
- addr = fold (build (PLUS_EXPR, ptr_type_node,
- addr, fold_convert (ptr_type_node,
- byte_position (field))));
- }
- else
- {
- ptr_type = build_pointer_type (type);
- addr = build1 (ADDR_EXPR, ptr_type, t);
- }
- }
- break;
-
case BIT_FIELD_REF:
+ /* ??? merge with COMPONENT_REF code above? */
{
tree ofs, rem, bpu;
addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
addr = convert (ptr_type_node, addr);
- addr = fold (build (PLUS_EXPR, ptr_type_node, addr, ofs));
+ addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs);
+
+ base = addr;
+ limit = fold_build2 (MINUS_EXPR, ptr_type_node,
+ fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
+ integer_one_node);
}
break;
return;
}
- mf_build_check_statement_for (addr, size, iter, locus, dirflag);
+ mf_build_check_statement_for (base, limit, iter, locus, dirflag);
}
static void
of their BIND_EXPR binding context, and we lose liveness information
for the declarations we wish to instrument. */
-static void
+static unsigned int
execute_mudflap_function_decls (void)
{
- if (mf_marked_p (current_function_decl))
- return;
+ /* Don't instrument functions such as the synthetic constructor
+ built during mudflap_finish_file. */
+ if (mf_marked_p (current_function_decl) ||
+ DECL_ARTIFICIAL (current_function_decl))
+ return 0;
push_gimplify_context ();
DECL_ARGUMENTS (current_function_decl));
pop_gimplify_context (NULL);
+ return 0;
}
/* This struct is passed between mf_xform_decls to store state needed
while (decl != NULL_TREE)
{
- /* Eligible decl? */
- if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
- /* It must be a non-external, automatic variable. */
+ if (mf_decl_eligible_p (decl)
+ /* Not already processed. */
+ && ! mf_marked_p (decl)
+ /* Automatic variable. */
&& ! DECL_EXTERNAL (decl)
- && ! TREE_STATIC (decl)
- /* The decl must have its address taken. */
- && TREE_ADDRESSABLE (decl)
- /* The type of the variable must be complete. */
- && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
- /* The decl hasn't been decomposed somehow. */
- && DECL_VALUE_EXPR (decl) == NULL
- /* Don't process the same decl twice. */
- && ! mf_marked_p (decl))
+ && ! TREE_STATIC (decl))
{
tree size = NULL_TREE, variable_name;
tree unregister_fncall, unregister_fncall_params;
size,
tree_cons (NULL_TREE,
/* __MF_TYPE_STACK */
- build_int_cst (NULL_TREE, 3, 0),
+ build_int_cst (NULL_TREE, 3),
NULL_TREE)));
/* __mf_unregister (...) */
unregister_fncall = build_function_call_expr (mf_unregister_fndecl,
size,
tree_cons (NULL_TREE,
/* __MF_TYPE_STACK */
- build_int_cst (NULL_TREE, 3, 0),
+ build_int_cst (NULL_TREE, 3),
tree_cons (NULL_TREE,
variable_name,
NULL_TREE))));
/* Add the __mf_register call at the current appending point. */
if (tsi_end_p (initially_stmts))
- internal_error ("mudflap ran off end of BIND_EXPR body");
- tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT);
-
- /* Accumulate the FINALLY piece. */
- append_to_statement_list (unregister_fncall, &finally_stmts);
-
+ warning (0, "mudflap cannot track %qs in stub function",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ else
+ {
+ tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT);
+
+ /* Accumulate the FINALLY piece. */
+ append_to_statement_list (unregister_fncall, &finally_stmts);
+ }
mf_mark (decl);
}
/* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
if (finally_stmts != NULL_TREE)
{
- tree t = build (TRY_FINALLY_EXPR, void_type_node,
- *stmt_list, finally_stmts);
+ tree t = build2 (TRY_FINALLY_EXPR, void_type_node,
+ *stmt_list, finally_stmts);
*stmt_list = NULL;
append_to_statement_list (t, stmt_list);
}
break;
}
- return NULL;
+ return NULL_TREE;
}
/* Perform the object lifetime tracking mudflap transform on the given function
delayed until program finish time. If they're still incomplete by
then, warnings are emitted. */
-static GTY (()) varray_type deferred_static_decls;
+static GTY (()) VEC(tree,gc) *deferred_static_decls;
/* A list of statements for calling __mf_register() at startup time. */
static GTY (()) tree enqueued_call_stmt_chain;
args = tree_cons (NULL_TREE, varname, NULL_TREE);
- arg = build_int_cst (NULL_TREE, 4, 0); /* __MF_TYPE_STATIC */
+ arg = build_int_cst (NULL_TREE, 4); /* __MF_TYPE_STATIC */
args = tree_cons (NULL_TREE, arg, args);
arg = convert (size_type_node, object_size);
if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
return;
- if (COMPLETE_TYPE_P (TREE_TYPE (obj)))
- {
- tree object_size;
-
- mf_mark (obj);
-
- object_size = size_in_bytes (TREE_TYPE (obj));
-
- if (dump_file)
- {
- fprintf (dump_file, "enqueue_decl obj=`");
- print_generic_expr (dump_file, obj, dump_flags);
- fprintf (dump_file, "' size=");
- print_generic_expr (dump_file, object_size, dump_flags);
- fprintf (dump_file, "\n");
- }
-
- /* NB: the above condition doesn't require TREE_USED or
- TREE_ADDRESSABLE. That's because this object may be a global
- only used from other compilation units. XXX: Maybe static
- objects could require those attributes being set. */
-
- mudflap_register_call (obj, object_size, mf_varname_tree (obj));
- }
- else
- {
- size_t i;
-
- if (! deferred_static_decls)
- VARRAY_TREE_INIT (deferred_static_decls, 10, "deferred static list");
-
- /* Ugh, linear search... */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (deferred_static_decls); i++)
- if (VARRAY_TREE (deferred_static_decls, i) == obj)
- {
- warning ("mudflap cannot track lifetime of `%s'",
- IDENTIFIER_POINTER (DECL_NAME (obj)));
- return;
- }
-
- VARRAY_PUSH_TREE (deferred_static_decls, obj);
- }
+ VEC_safe_push (tree, gc, deferred_static_decls, obj);
}
+
void
mudflap_enqueue_constant (tree obj)
{
return;
if (TREE_CODE (obj) == STRING_CST)
- object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj), 0);
+ object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
else
object_size = size_in_bytes (TREE_TYPE (obj));
- if (dump_file)
- {
- fprintf (dump_file, "enqueue_constant obj=`");
- print_generic_expr (dump_file, obj, dump_flags);
- fprintf (dump_file, "' size=");
- print_generic_expr (dump_file, object_size, dump_flags);
- fprintf (dump_file, "\n");
- }
-
if (TREE_CODE (obj) == STRING_CST)
varname = mf_build_string ("string literal");
else
{
tree ctor_statements = NULL_TREE;
- /* Try to give the deferred objects one final try. */
- if (deferred_static_decls)
- {
- size_t i;
-
- for (i = 0; i < VARRAY_ACTIVE_SIZE (deferred_static_decls); i++)
- {
- tree obj = VARRAY_TREE (deferred_static_decls, i);
-
- /* Call enqueue_decl again on the same object it has previously
- put into the table. (It won't modify the table this time, so
- infinite iteration is not a problem.) */
- mudflap_enqueue_decl (obj);
- }
-
- VARRAY_CLEAR (deferred_static_decls);
- }
+ /* No need to continue when there were errors. */
+ if (errorcount != 0 || sorrycount != 0)
+ return;
/* Insert a call to __mf_init. */
{
append_to_statement_list (call_stmt, &ctor_statements);
}
+ /* Process all enqueued object decls. */
+ if (deferred_static_decls)
+ {
+ size_t i;
+ tree obj;
+ for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
+ {
+ gcc_assert (DECL_P (obj));
+
+ if (mf_marked_p (obj))
+ continue;
+
+ /* Omit registration for static unaddressed objects. NB:
+ Perform registration for non-static objects regardless of
+ TREE_USED or TREE_ADDRESSABLE, because they may be used
+ from other compilation units. */
+ if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
+ continue;
+
+ if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
+ {
+ warning (0, "mudflap cannot track unknown size extern %qs",
+ IDENTIFIER_POINTER (DECL_NAME (obj)));
+ continue;
+ }
+
+ mudflap_register_call (obj,
+ size_in_bytes (TREE_TYPE (obj)),
+ mf_varname_tree (obj));
+ }
+
+ VEC_truncate (tree, deferred_static_decls, 0);
+ }
+
/* Append all the enqueued registration calls. */
if (enqueued_call_stmt_chain)
{
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
struct tree_opt_pass pass_mudflap_2 =
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_verify_flow | TODO_verify_stmts
- | TODO_dump_func /* todo_flags_finish */
+ | TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
#include "gt-tree-mudflap.h"