1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
30 #include "basic-block.h"
33 #include "tree-inline.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
41 #include "diagnostic.h"
43 #include "langhooks.h"
48 /* Internal function decls */
52 #define flag_mudflap_threads (flag_mudflap == 2)
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
72 /* Return true if DECL is artificial stub that shouldn't be instrumented by
73 mf. We should instrument clones of non-artificial functions. */
75 mf_artificial (const_tree decl)
77 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
80 /* ------------------------------------------------------------------------ */
81 /* Some generally helpful functions for mudflap instrumentation. */
83 /* Build a reference to a literal string. */
85 mf_build_string (const char *string)
87 size_t len = strlen (string);
88 tree result = mf_mark (build_string (len + 1, string));
90 TREE_TYPE (result) = build_array_type
91 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
92 TREE_CONSTANT (result) = 1;
93 TREE_READONLY (result) = 1;
94 TREE_STATIC (result) = 1;
96 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
98 return mf_mark (result);
101 /* Create a properly typed STRING_CST node that describes the given
102 declaration. It will be used as an argument for __mf_register().
103 Try to construct a helpful string, including file/function/variable
107 mf_varname_tree (tree decl)
109 static pretty_printer buf_rec;
110 static int initialized = 0;
111 pretty_printer *buf = & buf_rec;
112 const char *buf_contents;
119 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
122 pp_clear_output_area (buf);
124 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
126 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
127 const char *sourcefile;
128 unsigned sourceline = xloc.line;
129 unsigned sourcecolumn = 0;
130 sourcecolumn = xloc.column;
131 sourcefile = xloc.file;
132 if (sourcefile == NULL && current_function_decl != NULL_TREE)
133 sourcefile = DECL_SOURCE_FILE (current_function_decl);
134 if (sourcefile == NULL)
135 sourcefile = "<unknown file>";
137 pp_string (buf, sourcefile);
141 pp_string (buf, ":");
142 pp_decimal_int (buf, sourceline);
144 if (sourcecolumn != 0)
146 pp_string (buf, ":");
147 pp_decimal_int (buf, sourcecolumn);
152 if (current_function_decl != NULL_TREE)
155 pp_string (buf, " (");
157 const char *funcname = NULL;
158 if (DECL_NAME (current_function_decl))
159 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
160 if (funcname == NULL)
161 funcname = "anonymous fn";
163 pp_string (buf, funcname);
165 pp_string (buf, ") ");
168 pp_string (buf, " ");
170 /* Add <variable-declaration>, possibly demangled. */
172 const char *declname = NULL;
174 if (DECL_NAME (decl) != NULL)
176 if (strcmp ("GNU C++", lang_hooks.name) == 0)
178 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
179 the libiberty demangler. */
180 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
181 DMGL_AUTO | DMGL_VERBOSE);
183 if (declname == NULL)
184 declname = lang_hooks.decl_printable_name (decl, 3);
186 if (declname == NULL)
187 declname = "<unnamed variable>";
189 pp_string (buf, declname);
192 /* Return the lot as a new STRING_CST. */
193 buf_contents = pp_base_formatted_text (buf);
194 result = mf_build_string (buf_contents);
195 pp_clear_output_area (buf);
201 /* And another friend, for producing a simpler message. */
204 mf_file_function_line_tree (location_t location)
206 expanded_location xloc = expand_location (location);
207 const char *file = NULL, *colon, *line, *op, *name, *cp;
208 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
212 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
214 if (file == NULL && current_function_decl != NULL_TREE)
215 file = DECL_SOURCE_FILE (current_function_decl);
217 file = "<unknown file>";
222 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
224 sprintf (linecolbuf, "%d", xloc.line);
231 /* Add (FUNCTION). */
232 name = lang_hooks.decl_printable_name (current_function_decl, 1);
241 string = concat (file, colon, line, op, name, cp, NULL);
242 result = mf_build_string (string);
249 /* global tree nodes */
251 /* Global tree objects for global variables and functions exported by
252 mudflap runtime library. mf_init_extern_trees must be called
253 before using these. */
255 /* uintptr_t (usually "unsigned long") */
256 static GTY (()) tree mf_uintptr_type;
258 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
259 static GTY (()) tree mf_cache_struct_type;
261 /* struct __mf_cache * const */
262 static GTY (()) tree mf_cache_structptr_type;
264 /* extern struct __mf_cache __mf_lookup_cache []; */
265 static GTY (()) tree mf_cache_array_decl;
267 /* extern unsigned char __mf_lc_shift; */
268 static GTY (()) tree mf_cache_shift_decl;
270 /* extern uintptr_t __mf_lc_mask; */
271 static GTY (()) tree mf_cache_mask_decl;
273 /* Their function-scope local shadows, used in single-threaded mode only. */
275 /* auto const unsigned char __mf_lc_shift_l; */
276 static GTY (()) tree mf_cache_shift_decl_l;
278 /* auto const uintptr_t __mf_lc_mask_l; */
279 static GTY (()) tree mf_cache_mask_decl_l;
281 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
282 static GTY (()) tree mf_check_fndecl;
284 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
285 static GTY (()) tree mf_register_fndecl;
287 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
288 static GTY (()) tree mf_unregister_fndecl;
290 /* extern void __mf_init (); */
291 static GTY (()) tree mf_init_fndecl;
293 /* extern int __mf_set_options (const char*); */
294 static GTY (()) tree mf_set_options_fndecl;
297 /* Helper for mudflap_init: construct a decl with the given category,
298 name, and type, mark it an external reference, and pushdecl it. */
300 mf_make_builtin (enum tree_code category, const char *name, tree type)
302 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
303 category, get_identifier (name), type));
304 TREE_PUBLIC (decl) = 1;
305 DECL_EXTERNAL (decl) = 1;
306 lang_hooks.decls.pushdecl (decl);
307 /* The decl was declared by the compiler. */
308 DECL_ARTIFICIAL (decl) = 1;
309 /* And we don't want debug info for it. */
310 DECL_IGNORED_P (decl) = 1;
314 /* Helper for mudflap_init: construct a tree corresponding to the type
315 struct __mf_cache { uintptr_t low; uintptr_t high; };
316 where uintptr_t is the FIELD_TYPE argument. */
318 mf_make_mf_cache_struct_type (tree field_type)
320 /* There is, abominably, no language-independent way to construct a
321 RECORD_TYPE. So we have to call the basic type construction
322 primitives by hand. */
323 tree fieldlo = build_decl (UNKNOWN_LOCATION,
324 FIELD_DECL, get_identifier ("low"), field_type);
325 tree fieldhi = build_decl (UNKNOWN_LOCATION,
326 FIELD_DECL, get_identifier ("high"), field_type);
328 tree struct_type = make_node (RECORD_TYPE);
329 DECL_CONTEXT (fieldlo) = struct_type;
330 DECL_CONTEXT (fieldhi) = struct_type;
331 DECL_CHAIN (fieldlo) = fieldhi;
332 TYPE_FIELDS (struct_type) = fieldlo;
333 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
334 layout_type (struct_type);
339 /* Initialize the global tree nodes that correspond to mf-runtime.h
344 static bool done = false;
345 tree mf_const_string_type;
346 tree mf_cache_array_type;
347 tree mf_check_register_fntype;
348 tree mf_unregister_fntype;
350 tree mf_set_options_fntype;
356 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
359 = build_pointer_type (build_qualified_type
360 (char_type_node, TYPE_QUAL_CONST));
362 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
363 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
364 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
365 mf_check_register_fntype =
366 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
367 integer_type_node, mf_const_string_type, NULL_TREE);
368 mf_unregister_fntype =
369 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
370 integer_type_node, NULL_TREE);
372 build_function_type_list (void_type_node, NULL_TREE);
373 mf_set_options_fntype =
374 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
376 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
377 mf_cache_array_type);
378 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
379 unsigned_char_type_node);
380 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
382 /* Don't process these in mudflap_enqueue_decl, should they come by
383 there for some reason. */
384 mf_mark (mf_cache_array_decl);
385 mf_mark (mf_cache_shift_decl);
386 mf_mark (mf_cache_mask_decl);
387 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
388 mf_check_register_fntype);
389 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
390 mf_check_register_fntype);
391 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
392 mf_unregister_fntype);
393 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
395 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
396 mf_set_options_fntype);
400 /* ------------------------------------------------------------------------ */
401 /* This is the second part of the mudflap instrumentation. It works on
402 low-level GIMPLE using the CFG, because we want to run this pass after
403 tree optimizations have been performed, but we have to preserve the CFG
404 for expansion from trees to RTL.
405 Below is the list of transformations performed on statements in the
408 1) Memory reference transforms: Perform the mudflap indirection-related
409 tree transforms on memory references.
411 2) Mark BUILTIN_ALLOCA calls not inlineable.
416 execute_mudflap_function_ops (void)
418 struct gimplify_ctx gctx;
420 /* Don't instrument functions such as the synthetic constructor
421 built during mudflap_finish_file. */
422 if (mf_marked_p (current_function_decl)
423 || mf_artificial (current_function_decl))
426 push_gimplify_context (&gctx);
428 /* In multithreaded mode, don't cache the lookup cache parameters. */
429 if (! flag_mudflap_threads)
430 mf_decl_cache_locals ();
432 mf_xform_statements ();
434 if (! flag_mudflap_threads)
435 mf_decl_clear_locals ();
437 pop_gimplify_context (NULL);
441 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
442 if BB has more than one edge, STMT will be replicated for each edge.
443 Also, abnormal edges will be ignored. */
446 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
450 unsigned n_copies = -1;
452 FOR_EACH_EDGE (e, ei, bb->succs)
453 if (!(e->flags & EDGE_ABNORMAL))
456 FOR_EACH_EDGE (e, ei, bb->succs)
457 if (!(e->flags & EDGE_ABNORMAL))
458 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
461 /* Create and initialize local shadow variables for the lookup cache
462 globals. Put their decls in the *_l globals for use by
463 mf_build_check_statement_for. */
466 mf_decl_cache_locals (void)
469 gimple_seq seq = gimple_seq_alloc ();
471 /* Build the cache vars. */
472 mf_cache_shift_decl_l
473 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
474 "__mf_lookup_shift_l"));
477 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
478 "__mf_lookup_mask_l"));
480 /* Build initialization nodes for the cache vars. We just load the
481 globals into the cache variables. */
482 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
483 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
484 gimple_seq_add_stmt (&seq, g);
486 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
487 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
488 gimple_seq_add_stmt (&seq, g);
490 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
492 gsi_commit_edge_inserts ();
497 mf_decl_clear_locals (void)
499 /* Unset local shadows. */
500 mf_cache_shift_decl_l = NULL_TREE;
501 mf_cache_mask_decl_l = NULL_TREE;
505 mf_build_check_statement_for (tree base, tree limit,
506 gimple_stmt_iterator *instr_gsi,
507 location_t location, tree dirflag)
509 gimple_stmt_iterator gsi;
510 basic_block cond_bb, then_bb, join_bb;
517 gimple_seq seq, stmts;
519 /* We first need to split the current basic block, and start altering
520 the CFG. This allows us to insert the statements we're about to
521 construct into the right basic blocks. */
523 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
526 if (! gsi_end_p (gsi))
527 e = split_block (cond_bb, gsi_stmt (gsi));
529 e = split_block_after_labels (cond_bb);
533 /* A recap at this point: join_bb is the basic block at whose head
534 is the gimple statement for which this check expression is being
535 built. cond_bb is the (possibly new, synthetic) basic block the
536 end of which will contain the cache-lookup code, and a
537 conditional that jumps to the cache-miss code or, much more
538 likely, over to join_bb. */
540 /* Create the bb that contains the cache-miss fallback block (mf_check). */
541 then_bb = create_empty_bb (cond_bb);
542 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
543 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
545 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
546 e = find_edge (cond_bb, join_bb);
547 e->flags = EDGE_FALSE_VALUE;
548 e->count = cond_bb->count;
549 e->probability = REG_BR_PROB_BASE;
551 /* Update dominance info. Note that bb_join's data was
552 updated by split_block. */
553 if (dom_info_available_p (CDI_DOMINATORS))
555 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
556 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
559 /* Build our local variables. */
560 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
561 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
562 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
564 /* Build: __mf_base = (uintptr_t) <base address expression>. */
565 seq = gimple_seq_alloc ();
566 t = fold_convert_loc (location, mf_uintptr_type,
567 unshare_expr (base));
568 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
569 gimple_seq_add_seq (&seq, stmts);
570 g = gimple_build_assign (mf_base, t);
571 gimple_set_location (g, location);
572 gimple_seq_add_stmt (&seq, g);
574 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
575 t = fold_convert_loc (location, mf_uintptr_type,
576 unshare_expr (limit));
577 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
578 gimple_seq_add_seq (&seq, stmts);
579 g = gimple_build_assign (mf_limit, t);
580 gimple_set_location (g, location);
581 gimple_seq_add_stmt (&seq, g);
583 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
585 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
586 flag_mudflap_threads ? mf_cache_shift_decl
587 : mf_cache_shift_decl_l);
588 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
589 flag_mudflap_threads ? mf_cache_mask_decl
590 : mf_cache_mask_decl_l);
591 t = build4 (ARRAY_REF,
592 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
593 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
594 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
595 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
596 gimple_seq_add_seq (&seq, stmts);
597 g = gimple_build_assign (mf_elem, t);
598 gimple_set_location (g, location);
599 gimple_seq_add_stmt (&seq, g);
601 /* Quick validity check.
603 if (__mf_elem->low > __mf_base
604 || (__mf_elem_high < __mf_limit))
607 ... and only if single-threaded:
608 __mf_lookup_shift_1 = f...;
609 __mf_lookup_mask_l = ...;
612 It is expected that this body of code is rarely executed so we mark
613 the edge to the THEN clause of the conditional jump as unlikely. */
615 /* Construct t <-- '__mf_elem->low > __mf_base'. */
616 t = build3 (COMPONENT_REF, mf_uintptr_type,
617 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
618 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
619 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
621 /* Construct '__mf_elem->high < __mf_limit'.
624 1) u <-- '__mf_elem->high'
625 2) v <-- '__mf_limit'.
627 Then build 'u <-- (u < v). */
629 u = build3 (COMPONENT_REF, mf_uintptr_type,
630 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
631 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
635 u = build2 (LT_EXPR, boolean_type_node, u, v);
637 /* Build the composed conditional: t <-- 't || u'. Then store the
638 result of the evaluation of 't' in a temporary variable which we
639 can use as the condition for the conditional jump. */
640 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
641 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
642 gimple_seq_add_seq (&seq, stmts);
643 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
644 g = gimple_build_assign (cond, t);
645 gimple_set_location (g, location);
646 gimple_seq_add_stmt (&seq, g);
648 /* Build the conditional jump. 'cond' is just a temporary so we can
649 simply build a void COND_EXPR. We do need labels in both arms though. */
650 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
652 gimple_set_location (g, location);
653 gimple_seq_add_stmt (&seq, g);
655 /* At this point, after so much hard work, we have only constructed
656 the conditional jump,
658 if (__mf_elem->low > __mf_base
659 || (__mf_elem_high < __mf_limit))
661 The lowered GIMPLE tree representing this code is in the statement
662 list starting at 'head'.
664 We can insert this now in the current basic block, i.e. the one that
665 the statement we're instrumenting was originally in. */
666 gsi = gsi_last_bb (cond_bb);
667 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
669 /* Now build up the body of the cache-miss handling:
674 This is the body of the conditional. */
676 seq = gimple_seq_alloc ();
677 /* u is a string, so it is already a gimple value. */
678 u = mf_file_function_line_tree (location);
679 /* NB: we pass the overall [base..limit] range to mf_check. */
680 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
681 fold_build2_loc (location,
682 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
683 build_int_cst (mf_uintptr_type, 1));
684 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
685 gimple_seq_add_seq (&seq, stmts);
686 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
687 gimple_seq_add_stmt (&seq, g);
689 if (! flag_mudflap_threads)
691 if (stmt_ends_bb_p (g))
693 gsi = gsi_start_bb (then_bb);
694 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
695 e = split_block (then_bb, g);
697 seq = gimple_seq_alloc ();
700 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
701 gimple_seq_add_stmt (&seq, g);
703 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
704 gimple_seq_add_stmt (&seq, g);
707 /* Insert the check code in the THEN block. */
708 gsi = gsi_start_bb (then_bb);
709 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
711 *instr_gsi = gsi_start_bb (join_bb);
715 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
716 eligible for instrumentation. For the mudflap1 pass, this implies
717 that it should be registered with the libmudflap runtime. For the
718 mudflap2 pass this means instrumenting an indirection operation with
719 respect to the object.
722 mf_decl_eligible_p (tree decl)
724 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
725 /* The decl must have its address taken. In the case of
726 arrays, this flag is also set if the indexes are not
727 compile-time known valid constants. */
728 /* XXX: not sufficient: return-by-value structs! */
729 && TREE_ADDRESSABLE (decl)
730 /* The type of the variable must be complete. */
731 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
732 /* The decl hasn't been decomposed somehow. */
733 && !DECL_HAS_VALUE_EXPR_P (decl));
738 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
739 location_t location, tree dirflag)
741 tree type, base, limit, addr, size, t;
743 /* Don't instrument read operations. */
744 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
747 /* Don't instrument marked nodes. */
748 if (mf_marked_p (*tp))
752 type = TREE_TYPE (t);
754 if (type == error_mark_node)
757 size = TYPE_SIZE_UNIT (type);
759 switch (TREE_CODE (t))
764 /* This is trickier than it may first appear. The reason is
765 that we are looking at expressions from the "inside out" at
766 this point. We may have a complex nested aggregate/array
767 expression (e.g. "a.b[i].c"), maybe with an indirection as
768 the leftmost operator ("p->a.b.d"), where instrumentation
769 is necessary. Or we may have an innocent "a.b.c"
770 expression that must not be instrumented. We need to
771 recurse all the way down the nesting structure to figure it
772 out: looking just at the outer node is not enough. */
774 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
775 /* If we have a bitfield component reference, we must note the
776 innermost addressable object in ELT, from which we will
777 construct the byte-addressable bounds of the bitfield. */
778 tree elt = NULL_TREE;
779 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
780 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
782 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
783 containment hierarchy to find the outermost VAR_DECL. */
784 var = TREE_OPERAND (t, 0);
787 if (bitfield_ref_p && elt == NULL_TREE
788 && (TREE_CODE (var) == ARRAY_REF
789 || TREE_CODE (var) == COMPONENT_REF))
792 if (TREE_CODE (var) == ARRAY_REF)
794 component_ref_only = 0;
795 var = TREE_OPERAND (var, 0);
797 else if (TREE_CODE (var) == COMPONENT_REF)
798 var = TREE_OPERAND (var, 0);
799 else if (INDIRECT_REF_P (var)
800 || TREE_CODE (var) == MEM_REF)
802 base = TREE_OPERAND (var, 0);
805 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
807 var = TREE_OPERAND (var, 0);
808 if (CONSTANT_CLASS_P (var)
809 && TREE_CODE (var) != STRING_CST)
814 gcc_assert (TREE_CODE (var) == VAR_DECL
815 || TREE_CODE (var) == PARM_DECL
816 || TREE_CODE (var) == RESULT_DECL
817 || TREE_CODE (var) == STRING_CST);
818 /* Don't instrument this access if the underlying
819 variable is not "eligible". This test matches
820 those arrays that have only known-valid indexes,
821 and thus are not labeled TREE_ADDRESSABLE. */
822 if (! mf_decl_eligible_p (var) || component_ref_only)
826 base = build1 (ADDR_EXPR,
827 build_pointer_type (TREE_TYPE (var)), var);
833 /* Handle the case of ordinary non-indirection structure
834 accesses. These have only nested COMPONENT_REF nodes (no
835 INDIRECT_REF), but pass through the above filter loop.
836 Note that it's possible for such a struct variable to match
837 the eligible_p test because someone else might take its
840 /* We need special processing for bitfield components, because
841 their addresses cannot be taken. */
844 tree field = TREE_OPERAND (t, 1);
846 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
847 size = DECL_SIZE_UNIT (field);
850 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
852 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
853 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
854 addr, fold_convert_loc (location, sizetype,
855 byte_position (field)));
858 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
860 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
861 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
862 convert (mf_uintptr_type, addr),
869 addr = TREE_OPERAND (t, 0);
871 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
872 fold_build2_loc (location,
873 POINTER_PLUS_EXPR, ptr_type_node, base,
879 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)),
881 fold_convert (sizetype, TREE_OPERAND (t, 1)));
883 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
884 fold_build2_loc (location,
885 POINTER_PLUS_EXPR, ptr_type_node, base,
891 addr = tree_mem_ref_addr (ptr_type_node, t);
893 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
894 fold_build2_loc (location,
895 POINTER_PLUS_EXPR, ptr_type_node, base,
900 case ARRAY_RANGE_REF:
901 warning (OPT_Wmudflap,
902 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
906 /* ??? merge with COMPONENT_REF code above? */
910 /* If we're not dereferencing something, then the access
912 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
915 bpu = bitsize_int (BITS_PER_UNIT);
916 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
917 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
918 ofs = fold_convert_loc (location,
920 size_binop_loc (location,
921 TRUNC_DIV_EXPR, ofs, bpu));
923 size = convert (bitsizetype, TREE_OPERAND (t, 1));
924 size = size_binop_loc (location, PLUS_EXPR, size, rem);
925 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
926 size = convert (sizetype, size);
928 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
929 addr = convert (ptr_type_node, addr);
930 addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
931 ptr_type_node, addr, ofs);
934 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
935 fold_build2_loc (location,
936 POINTER_PLUS_EXPR, ptr_type_node,
946 mf_build_check_statement_for (base, limit, iter, location, dirflag);
949 1) Memory references.
950 2) BUILTIN_ALLOCA calls.
953 mf_xform_statements (void)
955 basic_block bb, next;
956 gimple_stmt_iterator i;
957 int saved_last_basic_block = last_basic_block;
958 enum gimple_rhs_class grhs_class;
960 bb = ENTRY_BLOCK_PTR ->next_bb;
964 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
966 gimple s = gsi_stmt (i);
968 /* Only a few GIMPLE statements can reference memory. */
969 switch (gimple_code (s))
972 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
973 gimple_location (s), integer_one_node);
974 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
975 gimple_location (s), integer_zero_node);
976 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
977 if (grhs_class == GIMPLE_BINARY_RHS)
978 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
979 gimple_location (s), integer_zero_node);
983 if (gimple_return_retval (s) != NULL_TREE)
985 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
993 tree fndecl = gimple_call_fndecl (s);
994 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
995 gimple_call_set_cannot_inline (s, true);
1005 while (bb && bb->index <= saved_last_basic_block);
1008 /* ------------------------------------------------------------------------ */
1009 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
1010 transforms on the current function.
1012 This is the first part of the mudflap instrumentation. It works on
1013 high-level GIMPLE because after lowering, all variables are moved out
1014 of their BIND_EXPR binding context, and we lose liveness information
1015 for the declarations we wish to instrument. */
1018 execute_mudflap_function_decls (void)
1020 struct gimplify_ctx gctx;
1022 /* Don't instrument functions such as the synthetic constructor
1023 built during mudflap_finish_file. */
1024 if (mf_marked_p (current_function_decl)
1025 || mf_artificial (current_function_decl))
1028 push_gimplify_context (&gctx);
1030 mf_xform_decls (gimple_body (current_function_decl),
1031 DECL_ARGUMENTS (current_function_decl));
1033 pop_gimplify_context (NULL);
1037 /* This struct is passed between mf_xform_decls to store state needed
1038 during the traversal searching for objects that have their
1040 struct mf_xform_decls_data
1046 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1047 _DECLs if appropriate. Arrange to call the __mf_register function
1048 now, and the __mf_unregister function later for each. Return the
1049 gimple sequence after synthesis. */
1051 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1053 gimple_seq finally_stmts = NULL;
1054 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1056 while (decl != NULL_TREE)
1058 if (mf_decl_eligible_p (decl)
1059 /* Not already processed. */
1060 && ! mf_marked_p (decl)
1061 /* Automatic variable. */
1062 && ! DECL_EXTERNAL (decl)
1063 && ! TREE_STATIC (decl))
1065 tree size = NULL_TREE, variable_name;
1066 gimple unregister_fncall, register_fncall;
1067 tree unregister_fncall_param, register_fncall_param;
1069 /* Variable-sized objects should have sizes already been
1070 gimplified when we got here. */
1071 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1072 gcc_assert (is_gimple_val (size));
1075 unregister_fncall_param =
1076 mf_mark (build1 (ADDR_EXPR,
1077 build_pointer_type (TREE_TYPE (decl)),
1079 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1080 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1081 unregister_fncall_param,
1083 integer_three_node);
1086 variable_name = mf_varname_tree (decl);
1087 register_fncall_param =
1088 mf_mark (build1 (ADDR_EXPR,
1089 build_pointer_type (TREE_TYPE (decl)),
1091 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1093 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1094 register_fncall_param,
1100 /* Accumulate the two calls. */
1101 gimple_set_location (register_fncall, location);
1102 gimple_set_location (unregister_fncall, location);
1104 /* Add the __mf_register call at the current appending point. */
1105 if (gsi_end_p (initially_stmts))
1107 if (!mf_artificial (decl))
1108 warning (OPT_Wmudflap,
1109 "mudflap cannot track %qE in stub function",
1114 gsi_insert_before (&initially_stmts, register_fncall,
1117 /* Accumulate the FINALLY piece. */
1118 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1123 decl = DECL_CHAIN (decl);
1126 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1127 if (finally_stmts != NULL)
1129 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1130 gimple_seq new_seq = gimple_seq_alloc ();
1132 gimple_seq_add_stmt (&new_seq, stmt);
1140 /* Process every variable mentioned in BIND_EXPRs. */
1142 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1143 bool *handled_operands_p ATTRIBUTE_UNUSED,
1144 struct walk_stmt_info *wi)
1146 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1147 gimple stmt = gsi_stmt (*gsi);
1149 switch (gimple_code (stmt))
1153 /* Process function parameters now (but only once). */
1156 gimple_bind_set_body (stmt,
1157 mx_register_decls (d->param_decls,
1158 gimple_bind_body (stmt),
1159 gimple_location (stmt)));
1160 d->param_decls = NULL_TREE;
1163 gimple_bind_set_body (stmt,
1164 mx_register_decls (gimple_bind_vars (stmt),
1165 gimple_bind_body (stmt),
1166 gimple_location (stmt)));
1177 /* Perform the object lifetime tracking mudflap transform on the given function
1178 tree. The tree is mutated in place, with possibly copied subtree nodes.
1180 For every auto variable declared, if its address is ever taken
1181 within the function, then supply its lifetime to the mudflap
1182 runtime with the __mf_register and __mf_unregister calls.
1186 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1188 struct mf_xform_decls_data d;
1189 struct walk_stmt_info wi;
1190 struct pointer_set_t *pset = pointer_set_create ();
1192 d.param_decls = fnparams;
1193 memset (&wi, 0, sizeof (wi));
1194 wi.info = (void*) &d;
1196 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1197 pointer_set_destroy (pset);
1201 /* ------------------------------------------------------------------------ */
1202 /* Externally visible mudflap functions. */
1205 /* Mark and return the given tree node to prevent further mudflap
1207 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1214 if (marked_trees == NULL)
1215 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1218 slot = htab_find_slot (marked_trees, t, INSERT);
1224 mf_marked_p (tree t)
1228 if (marked_trees == NULL)
1231 entry = htab_find (marked_trees, t);
1232 return (entry != NULL);
1235 /* Remember given node as a static of some kind: global data,
1236 function-scope static, or an anonymous constant. Its assembler
1239 /* A list of globals whose incomplete declarations we encountered.
1240 Instead of emitting the __mf_register call for them here, it's
1241 delayed until program finish time. If they're still incomplete by
1242 then, warnings are emitted. */
1244 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1246 /* A list of statements for calling __mf_register() at startup time. */
1247 static GTY (()) tree enqueued_call_stmt_chain;
1250 mudflap_register_call (tree obj, tree object_size, tree varname)
1252 tree arg, call_stmt;
1254 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1255 arg = convert (ptr_type_node, arg);
1257 call_stmt = build_call_expr (mf_register_fndecl, 4,
1259 convert (size_type_node, object_size),
1260 /* __MF_TYPE_STATIC */
1261 build_int_cst (NULL_TREE, 4),
1264 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1268 mudflap_enqueue_decl (tree obj)
1270 if (mf_marked_p (obj))
1273 /* We don't need to process variable decls that are internally
1274 generated extern. If we did, we'd end up with warnings for them
1275 during mudflap_finish_file (). That would confuse the user,
1276 since the text would refer to variables that don't show up in the
1277 user's source code. */
1278 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1281 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1286 mudflap_enqueue_constant (tree obj)
1288 tree object_size, varname;
1290 if (mf_marked_p (obj))
1293 if (TREE_CODE (obj) == STRING_CST)
1294 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1296 object_size = size_in_bytes (TREE_TYPE (obj));
1298 if (TREE_CODE (obj) == STRING_CST)
1299 varname = mf_build_string ("string literal");
1301 varname = mf_build_string ("constant");
1303 mudflap_register_call (obj, object_size, varname);
1307 /* Emit any file-wide instrumentation. */
1309 mudflap_finish_file (void)
1311 tree ctor_statements = NULL_TREE;
1313 /* No need to continue when there were errors. */
1317 /* Insert a call to __mf_init. */
1319 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1320 append_to_statement_list (call2_stmt, &ctor_statements);
1323 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1324 if (flag_mudflap_ignore_reads)
1326 tree arg = mf_build_string ("-ignore-reads");
1327 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1328 append_to_statement_list (call_stmt, &ctor_statements);
1331 /* Process all enqueued object decls. */
1332 if (deferred_static_decls)
1336 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1338 gcc_assert (DECL_P (obj));
1340 if (mf_marked_p (obj))
1343 /* Omit registration for static unaddressed objects. NB:
1344 Perform registration for non-static objects regardless of
1345 TREE_USED or TREE_ADDRESSABLE, because they may be used
1346 from other compilation units. */
1347 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1350 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1352 warning (OPT_Wmudflap,
1353 "mudflap cannot track unknown size extern %qE",
1358 mudflap_register_call (obj,
1359 size_in_bytes (TREE_TYPE (obj)),
1360 mf_varname_tree (obj));
1363 VEC_truncate (tree, deferred_static_decls, 0);
1366 /* Append all the enqueued registration calls. */
1367 if (enqueued_call_stmt_chain)
1369 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1370 enqueued_call_stmt_chain = NULL_TREE;
1373 cgraph_build_static_cdtor ('I', ctor_statements,
1374 MAX_RESERVED_INIT_PRIORITY-1);
1381 return flag_mudflap != 0;
1384 struct gimple_opt_pass pass_mudflap_1 =
1388 "mudflap1", /* name */
1389 gate_mudflap, /* gate */
1390 execute_mudflap_function_decls, /* execute */
1393 0, /* static_pass_number */
1394 TV_NONE, /* tv_id */
1395 PROP_gimple_any, /* properties_required */
1396 0, /* properties_provided */
1397 0, /* properties_destroyed */
1398 0, /* todo_flags_start */
1399 TODO_dump_func /* todo_flags_finish */
1403 struct gimple_opt_pass pass_mudflap_2 =
1407 "mudflap2", /* name */
1408 gate_mudflap, /* gate */
1409 execute_mudflap_function_ops, /* execute */
1412 0, /* static_pass_number */
1413 TV_NONE, /* tv_id */
1414 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1415 0, /* properties_provided */
1416 0, /* properties_destroyed */
1417 0, /* todo_flags_start */
1418 TODO_verify_flow | TODO_verify_stmts
1419 | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
1423 #include "gt-tree-mudflap.h"