/* Variable tracking routines for the GNU compiler.
- Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2, or (at your option)
+ the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT
License for more details.
You should have received a copy of the GNU General Public License
- along with GCC; see the file COPYING. If not, write to the Free
- Software Foundation, 59 Temple Place - Suite 330, Boston, MA
- 02111-1307, USA. */
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
/* This file contains the variable tracking pass. It computes where
variables are located (which registers or where in memory) at each position
#include "alloc-pool.h"
#include "fibheap.h"
#include "hashtab.h"
+#include "regs.h"
+#include "expr.h"
+#include "timevar.h"
+#include "tree-pass.h"
/* Type of micro operation. */
enum micro_operation_type
MO_USE_NO_VAR,/* Use location which is not associated with a variable
or the variable is not trackable. */
MO_SET, /* Set location. */
+ MO_COPY, /* Copy the same portion of a variable from one
+ location to another. */
MO_CLOBBER, /* Clobber location. */
MO_CALL, /* Call insn. */
MO_ADJUST /* Adjust stack pointer. */
enum micro_operation_type type;
union {
- /* Location. */
+ /* Location. For MO_SET and MO_COPY, this is the SET that performs
+ the assignment, if known, otherwise it is the target of the
+ assignment. */
rtx loc;
/* Stack adjustment. */
HOST_WIDE_INT adjust;
} u;
- /* The instruction which the micro operation is in. */
+ /* The instruction which the micro operation is in, for MO_USE,
+ MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
+ instruction or note in the original flow (before any var-tracking
+ notes are inserted, to simplify emission of notes), for MO_SET
+ and MO_CLOBBER. */
rtx insn;
} micro_operation;
HOST_WIDE_INT offset;
} *attrs;
+/* Structure holding a refcounted hash table. If refcount > 1,
+ it must be first unshared before modified. */
+typedef struct shared_hash_def
+{
+ /* Reference count. */
+ int refcount;
+
+ /* Actual hash table. */
+ htab_t htab;
+} *shared_hash;
+
/* Structure holding the IN or OUT set for a basic block. */
typedef struct dataflow_set_def
{
attrs regs[FIRST_PSEUDO_REGISTER];
/* Variable locations. */
- htab_t vars;
+ shared_hash vars;
} dataflow_set;
/* The structure (one for each basic block) containing the information
/* The location (REG or MEM). */
rtx loc;
+
+ /* The "value" stored in this location. */
+ rtx set_src;
+
+ /* Initialized? */
+ enum var_init_status init;
} *location_chain;
/* Structure describing one part of variable. */
/* The variable parts. */
variable_part var_part[MAX_VAR_PARTS];
} *variable;
+typedef const struct variable_def *const_variable;
/* Hash function for DECL for VARIABLE_HTAB. */
-#define VARIABLE_HASH_VAL(decl) ((size_t) (decl))
+#define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
/* Pointer to the BB's information specific to variable tracking pass. */
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
+/* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
+#define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
+
/* Alloc pool for struct attrs_def. */
static alloc_pool attrs_pool;
/* Alloc pool for struct location_chain_def. */
static alloc_pool loc_chain_pool;
+/* Alloc pool for struct shared_hash_def. */
+static alloc_pool shared_hash_pool;
+
/* Changed variables, notes will be emitted for them. */
static htab_t changed_variables;
/* Shall notes be emitted? */
static bool emit_notes;
-/* Fake variable for stack pointer. */
-tree frame_base_decl;
+/* Empty shared hashtable. */
+static shared_hash empty_shared_hash;
/* Local function prototypes. */
static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
HOST_WIDE_INT *);
static void bb_stack_adjust_offset (basic_block);
-static HOST_WIDE_INT prologue_stack_adjust (void);
static bool vt_stack_adjustments (void);
static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
static hashval_t variable_htab_hash (const void *);
static void attrs_list_copy (attrs *, attrs);
static void attrs_list_union (attrs *, attrs);
-static void vars_clear (htab_t);
-static variable unshare_variable (dataflow_set *set, variable var);
+static variable unshare_variable (dataflow_set *set, variable var,
+ enum var_init_status);
static int vars_copy_1 (void **, void *);
static void vars_copy (htab_t, htab_t);
-static void var_reg_delete_and_set (dataflow_set *, rtx);
-static void var_reg_delete (dataflow_set *, rtx);
+static tree var_debug_decl (tree);
+static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
+static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
+ enum var_init_status, rtx);
+static void var_reg_delete (dataflow_set *, rtx, bool);
static void var_regno_delete (dataflow_set *, int);
-static void var_mem_delete_and_set (dataflow_set *, rtx);
-static void var_mem_delete (dataflow_set *, rtx);
+static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
+static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
+ enum var_init_status, rtx);
+static void var_mem_delete (dataflow_set *, rtx, bool);
-static void dataflow_set_init (dataflow_set *, int);
+static void dataflow_set_init (dataflow_set *);
static void dataflow_set_clear (dataflow_set *);
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
static int variable_union_info_cmp_pos (const void *, const void *);
static int variable_union (void **, void *);
+static int variable_canonicalize (void **, void *);
static void dataflow_set_union (dataflow_set *, dataflow_set *);
static bool variable_part_different_p (variable_part *, variable_part *);
static bool variable_different_p (variable, variable, bool);
static int dataflow_set_different_1 (void **, void *);
-static int dataflow_set_different_2 (void **, void *);
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
static void dataflow_set_destroy (dataflow_set *);
static bool contains_symbol_ref (rtx);
static bool track_expr_p (tree);
+static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
static int count_uses (rtx *, void *);
static void count_uses_1 (rtx *, void *);
-static void count_stores (rtx, rtx, void *);
+static void count_stores (rtx, const_rtx, void *);
static int add_uses (rtx *, void *);
static void add_uses_1 (rtx *, void *);
-static void add_stores (rtx, rtx, void *);
+static void add_stores (rtx, const_rtx, void *);
static bool compute_bb_dataflow (basic_block);
static void vt_find_locations (void);
static void dump_dataflow_set (dataflow_set *);
static void dump_dataflow_sets (void);
-static void variable_was_changed (variable, htab_t);
-static void set_frame_base_location (dataflow_set *, rtx);
-static void set_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT);
+static void variable_was_changed (variable, dataflow_set *);
+static void set_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT,
+ enum var_init_status, rtx);
+static void clobber_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT,
+ rtx);
static void delete_variable_part (dataflow_set *, rtx, tree, HOST_WIDE_INT);
static int emit_note_insn_var_location (void **, void *);
static void emit_notes_for_changes (rtx, enum emit_note_where);
code = GET_CODE (src);
if (! (code == PLUS || code == MINUS)
|| XEXP (src, 0) != stack_pointer_rtx
- || GET_CODE (XEXP (src, 1)) != CONST_INT)
+ || !CONST_INT_P (XEXP (src, 1)))
return;
if (code == MINUS)
{
rtx val = XEXP (XEXP (src, 1), 1);
/* We handle only adjustments by constant amount. */
- if (GET_CODE (XEXP (src, 1)) != PLUS ||
- GET_CODE (val) != CONST_INT)
- abort ();
+ gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
+ CONST_INT_P (val));
+
if (code == PRE_MODIFY)
*pre -= INTVAL (val);
else
insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
HOST_WIDE_INT *post)
{
+ rtx pattern;
+
*pre = 0;
*post = 0;
- if (GET_CODE (PATTERN (insn)) == SET)
- stack_adjust_offset_pre_post (PATTERN (insn), pre, post);
- else if (GET_CODE (PATTERN (insn)) == PARALLEL
- || GET_CODE (PATTERN (insn)) == SEQUENCE)
+ pattern = PATTERN (insn);
+ if (RTX_FRAME_RELATED_P (insn))
+ {
+ rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
+ if (expr)
+ pattern = XEXP (expr, 0);
+ }
+
+ if (GET_CODE (pattern) == SET)
+ stack_adjust_offset_pre_post (pattern, pre, post);
+ else if (GET_CODE (pattern) == PARALLEL
+ || GET_CODE (pattern) == SEQUENCE)
{
int i;
/* There may be stack adjustments inside compound insns. Search
for them. */
- for ( i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
- if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
- stack_adjust_offset_pre_post (XVECEXP (PATTERN (insn), 0, i),
- pre, post);
+ for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
+ stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
}
}
VTI (bb)->out.stack_adjust = offset;
}
-/* Compute stack adjustment caused by function prolog. */
-
-static HOST_WIDE_INT
-prologue_stack_adjust (void)
-{
- HOST_WIDE_INT offset = 0;
- basic_block bb = ENTRY_BLOCK_PTR->next_bb;
- rtx insn;
- rtx end;
-
- if (!BB_END (bb))
- return 0;
-
- end = NEXT_INSN (BB_END (bb));
- for (insn = BB_HEAD (bb); insn != end; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
- break;
-
- if (INSN_P (insn))
- {
- HOST_WIDE_INT tmp;
-
- insn_stack_adjust_offset_pre_post (insn, &tmp, &tmp);
- offset += tmp;
- }
- }
-
- return offset;
-}
-
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
- Heavily borrowed from flow_depth_first_order_compute. */
+ Heavily borrowed from pre_and_rev_post_order_compute. */
static bool
vt_stack_adjustments (void)
{
- edge *stack;
+ edge_iterator *stack;
int sp;
/* Initialize entry block. */
VTI (ENTRY_BLOCK_PTR)->visited = true;
- VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = 0;
+ VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
/* Allocate stack for back-tracking up CFG. */
- stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
+ stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
/* Push the first edge on to the stack. */
- stack[sp++] = ENTRY_BLOCK_PTR->succ;
+ stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
while (sp)
{
- edge e;
+ edge_iterator ei;
basic_block src;
basic_block dest;
/* Look at the edge on the top of the stack. */
- e = stack[sp - 1];
- src = e->src;
- dest = e->dest;
+ ei = stack[sp - 1];
+ src = ei_edge (ei)->src;
+ dest = ei_edge (ei)->dest;
/* Check if the edge destination has been visited yet. */
if (!VTI (dest)->visited)
VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
bb_stack_adjust_offset (dest);
- if (dest->succ)
+ if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
time, check its successors. */
- stack[sp++] = dest->succ;
+ stack[sp++] = ei_start (dest->succs);
}
else
{
return false;
}
- if (e->succ_next)
+ if (! ei_one_before_end_p (ei))
/* Go to the next edge. */
- stack[sp - 1] = e->succ_next;
+ ei_next (&stack[sp - 1]);
else
/* Return to previous level if there are no more edges. */
sp--;
return true;
}
-/* Adjust stack reference MEM by ADJUSTMENT bytes and return the new rtx. */
+/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
+ to the argument pointer. Return the new rtx. */
static rtx
adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
{
- rtx adjusted_mem;
- rtx tmp;
+ rtx addr, cfa, tmp;
- adjusted_mem = copy_rtx (mem);
- XEXP (adjusted_mem, 0) = replace_rtx (XEXP (adjusted_mem, 0),
- stack_pointer_rtx,
- gen_rtx_PLUS (Pmode, stack_pointer_rtx,
- GEN_INT (adjustment)));
- tmp = simplify_rtx (XEXP (adjusted_mem, 0));
+#ifdef FRAME_POINTER_CFA_OFFSET
+ adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
+ cfa = plus_constant (frame_pointer_rtx, adjustment);
+#else
+ adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
+ cfa = plus_constant (arg_pointer_rtx, adjustment);
+#endif
+
+ addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
+ tmp = simplify_rtx (addr);
if (tmp)
- XEXP (adjusted_mem, 0) = tmp;
+ addr = tmp;
- return adjusted_mem;
+ return replace_equiv_address_nv (mem, addr);
}
/* The hash function for variable_htab, computes the hash value
static hashval_t
variable_htab_hash (const void *x)
{
- const variable v = (const variable) x;
+ const_variable const v = (const_variable) x;
return (VARIABLE_HASH_VAL (v->decl));
}
static int
variable_htab_eq (const void *x, const void *y)
{
- const variable v = (const variable) x;
- const tree decl = (const tree) y;
+ const_variable const v = (const_variable) x;
+ const_tree const decl = (const_tree) y;
return (VARIABLE_HASH_VAL (v->decl) == VARIABLE_HASH_VAL (decl));
}
variable var = (variable) elem;
location_chain node, next;
-#ifdef ENABLE_CHECKING
- if (var->refcount <= 0)
- abort ();
-#endif
+ gcc_assert (var->refcount > 0);
var->refcount--;
if (var->refcount > 0)
{
attrs list;
- list = pool_alloc (attrs_pool);
+ list = (attrs) pool_alloc (attrs_pool);
list->loc = loc;
list->decl = decl;
list->offset = offset;
attrs_list_clear (dstp);
for (; src; src = src->next)
{
- n = pool_alloc (attrs_pool);
+ n = (attrs) pool_alloc (attrs_pool);
n->loc = src->loc;
n->decl = src->decl;
n->offset = src->offset;
}
}
-/* Delete all variables from hash table VARS. */
+/* Shared hashtable support. */
+
+/* Return true if VARS is shared. */
+
+static inline bool
+shared_hash_shared (shared_hash vars)
+{
+ return vars->refcount > 1;
+}
+
+/* Return the hash table for VARS. */
+
+static inline htab_t
+shared_hash_htab (shared_hash vars)
+{
+ return vars->htab;
+}
+
+/* Copy variables into a new hash table. */
+
+static shared_hash
+shared_hash_unshare (shared_hash vars)
+{
+ shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
+ gcc_assert (vars->refcount > 1);
+ new_vars->refcount = 1;
+ new_vars->htab
+ = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
+ variable_htab_eq, variable_htab_free);
+ vars_copy (new_vars->htab, vars->htab);
+ vars->refcount--;
+ return new_vars;
+}
+
+/* Increment reference counter on VARS and return it. */
+
+static inline shared_hash
+shared_hash_copy (shared_hash vars)
+{
+ vars->refcount++;
+ return vars;
+}
+
+/* Decrement reference counter and destroy hash table if not shared
+ anymore. */
static void
-vars_clear (htab_t vars)
+shared_hash_destroy (shared_hash vars)
+{
+ gcc_assert (vars->refcount > 0);
+ if (--vars->refcount == 0)
+ {
+ htab_delete (vars->htab);
+ pool_free (shared_hash_pool, vars);
+ }
+}
+
+/* Unshare *PVARS if shared and return slot for DECL. If INS is
+ INSERT, insert it if not already present. */
+
+static inline void **
+shared_hash_find_slot_unshare (shared_hash *pvars, tree decl,
+ enum insert_option ins)
+{
+ if (shared_hash_shared (*pvars))
+ *pvars = shared_hash_unshare (*pvars);
+ return htab_find_slot_with_hash (shared_hash_htab (*pvars), decl,
+ VARIABLE_HASH_VAL (decl), ins);
+}
+
+/* Return slot for DECL, if it is already present in the hash table.
+ If it is not present, insert it only VARS is not shared, otherwise
+ return NULL. */
+
+static inline void **
+shared_hash_find_slot (shared_hash vars, tree decl)
+{
+ return htab_find_slot_with_hash (shared_hash_htab (vars), decl,
+ VARIABLE_HASH_VAL (decl),
+ shared_hash_shared (vars)
+ ? NO_INSERT : INSERT);
+}
+
+/* Return slot for DECL only if it is already present in the hash table. */
+
+static inline void **
+shared_hash_find_slot_noinsert (shared_hash vars, tree decl)
+{
+ return htab_find_slot_with_hash (shared_hash_htab (vars), decl,
+ VARIABLE_HASH_VAL (decl), NO_INSERT);
+}
+
+/* Return variable for DECL or NULL if not already present in the hash
+ table. */
+
+static inline variable
+shared_hash_find (shared_hash vars, tree decl)
{
- htab_empty (vars);
+ return (variable)
+ htab_find_with_hash (shared_hash_htab (vars), decl,
+ VARIABLE_HASH_VAL (decl));
}
/* Return a copy of a variable VAR and insert it to dataflow set SET. */
static variable
-unshare_variable (dataflow_set *set, variable var)
+unshare_variable (dataflow_set *set, variable var,
+ enum var_init_status initialized)
{
void **slot;
variable new_var;
int i;
- new_var = pool_alloc (var_pool);
+ new_var = (variable) pool_alloc (var_pool);
new_var->decl = var->decl;
new_var->refcount = 1;
var->refcount--;
new_var->n_var_parts = var->n_var_parts;
+ if (! flag_var_tracking_uninit)
+ initialized = VAR_INIT_STATUS_INITIALIZED;
+
for (i = 0; i < var->n_var_parts; i++)
{
location_chain node;
{
location_chain new_lc;
- new_lc = pool_alloc (loc_chain_pool);
+ new_lc = (location_chain) pool_alloc (loc_chain_pool);
new_lc->next = NULL;
+ if (node->init > initialized)
+ new_lc->init = node->init;
+ else
+ new_lc->init = initialized;
+ if (node->set_src && !(MEM_P (node->set_src)))
+ new_lc->set_src = node->set_src;
+ else
+ new_lc->set_src = NULL;
new_lc->loc = node->loc;
*nextp = new_lc;
new_var->var_part[i].cur_loc = NULL;
}
- slot = htab_find_slot_with_hash (set->vars, new_var->decl,
- VARIABLE_HASH_VAL (new_var->decl),
- INSERT);
+ slot = shared_hash_find_slot_unshare (&set->vars, new_var->decl, INSERT);
*slot = new_var;
return new_var;
}
static void
vars_copy (htab_t dst, htab_t src)
{
- vars_clear (dst);
- htab_traverse (src, vars_copy_1, dst);
+ htab_traverse_noresize (src, vars_copy_1, dst);
+}
+
+/* Map a decl to its main debug decl. */
+
+static inline tree
+var_debug_decl (tree decl)
+{
+ if (decl && DECL_P (decl)
+ && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
+ && DECL_P (DECL_DEBUG_EXPR (decl)))
+ decl = DECL_DEBUG_EXPR (decl);
+
+ return decl;
+}
+
+/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
+
+static void
+var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
+ rtx set_src)
+{
+ tree decl = REG_EXPR (loc);
+ HOST_WIDE_INT offset = REG_OFFSET (loc);
+ attrs node;
+
+ decl = var_debug_decl (decl);
+
+ for (node = set->regs[REGNO (loc)]; node; node = node->next)
+ if (node->decl == decl && node->offset == offset)
+ break;
+ if (!node)
+ attrs_list_insert (&set->regs[REGNO (loc)], decl, offset, loc);
+ set_variable_part (set, loc, decl, offset, initialized, set_src);
+}
+
+static enum var_init_status
+get_init_value (dataflow_set *set, rtx loc, tree decl)
+{
+ variable var;
+ int i;
+ enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
+
+ if (! flag_var_tracking_uninit)
+ return VAR_INIT_STATUS_INITIALIZED;
+
+ var = shared_hash_find (set->vars, decl);
+ if (var)
+ {
+ for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
+ {
+ location_chain nextp;
+ for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
+ if (rtx_equal_p (nextp->loc, loc))
+ {
+ ret_val = nextp->init;
+ break;
+ }
+ }
+ }
+
+ return ret_val;
}
-/* Delete current content of register LOC in dataflow set SET
- and set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
+/* Delete current content of register LOC in dataflow set SET and set
+ the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
+ MODIFY is true, any other live copies of the same variable part are
+ also deleted from the dataflow set, otherwise the variable part is
+ assumed to be copied from another location holding the same
+ part. */
static void
-var_reg_delete_and_set (dataflow_set *set, rtx loc)
+var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
+ enum var_init_status initialized, rtx set_src)
{
tree decl = REG_EXPR (loc);
HOST_WIDE_INT offset = REG_OFFSET (loc);
attrs node, next;
attrs *nextp;
+ decl = var_debug_decl (decl);
+
+ if (initialized == VAR_INIT_STATUS_UNKNOWN)
+ initialized = get_init_value (set, loc, decl);
+
nextp = &set->regs[REGNO (loc)];
for (node = *nextp; node; node = next)
{
nextp = &node->next;
}
}
- if (set->regs[REGNO (loc)] == NULL)
- attrs_list_insert (&set->regs[REGNO (loc)], decl, offset, loc);
- set_variable_part (set, loc, decl, offset);
+ if (modify)
+ clobber_variable_part (set, loc, decl, offset, set_src);
+ var_reg_set (set, loc, initialized, set_src);
}
-/* Delete current content of register LOC in dataflow set SET. */
+/* Delete current content of register LOC in dataflow set SET. If
+ CLOBBER is true, also delete any other live copies of the same
+ variable part. */
static void
-var_reg_delete (dataflow_set *set, rtx loc)
+var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
{
attrs *reg = &set->regs[REGNO (loc)];
attrs node, next;
+ if (clobber)
+ {
+ tree decl = REG_EXPR (loc);
+ HOST_WIDE_INT offset = REG_OFFSET (loc);
+
+ decl = var_debug_decl (decl);
+
+ clobber_variable_part (set, NULL, decl, offset, NULL);
+ }
+
for (node = *reg; node; node = next)
{
next = node->next;
*reg = NULL;
}
-/* Delete and set the location part of variable MEM_EXPR (LOC)
- in dataflow set SET to LOC.
+/* Set the location part of variable MEM_EXPR (LOC) in dataflow set
+ SET to LOC.
+ Adjust the address first if it is stack pointer based. */
+
+static void
+var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
+ rtx set_src)
+{
+ tree decl = MEM_EXPR (loc);
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
+
+ decl = var_debug_decl (decl);
+
+ set_variable_part (set, loc, decl, offset, initialized, set_src);
+}
+
+/* Delete and set the location part of variable MEM_EXPR (LOC) in
+ dataflow set SET to LOC. If MODIFY is true, any other live copies
+ of the same variable part are also deleted from the dataflow set,
+ otherwise the variable part is assumed to be copied from another
+ location holding the same part.
Adjust the address first if it is stack pointer based. */
static void
-var_mem_delete_and_set (dataflow_set *set, rtx loc)
+var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
+ enum var_init_status initialized, rtx set_src)
{
tree decl = MEM_EXPR (loc);
- HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
+
+ decl = var_debug_decl (decl);
- set_variable_part (set, loc, decl, offset);
+ if (initialized == VAR_INIT_STATUS_UNKNOWN)
+ initialized = get_init_value (set, loc, decl);
+
+ if (modify)
+ clobber_variable_part (set, NULL, decl, offset, set_src);
+ var_mem_set (set, loc, initialized, set_src);
}
-/* Delete the location part LOC from dataflow set SET.
+/* Delete the location part LOC from dataflow set SET. If CLOBBER is
+ true, also delete any other live copies of the same variable part.
Adjust the address first if it is stack pointer based. */
static void
-var_mem_delete (dataflow_set *set, rtx loc)
+var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
{
tree decl = MEM_EXPR (loc);
- HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0;
+ HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
+ decl = var_debug_decl (decl);
+ if (clobber)
+ clobber_variable_part (set, NULL, decl, offset, NULL);
delete_variable_part (set, loc, decl, offset);
}
VARS_SIZE is the initial size of hash table VARS. */
static void
-dataflow_set_init (dataflow_set *set, int vars_size)
+dataflow_set_init (dataflow_set *set)
{
init_attrs_list_set (set->regs);
- set->vars = htab_create (vars_size, variable_htab_hash, variable_htab_eq,
- variable_htab_free);
+ set->vars = shared_hash_copy (empty_shared_hash);
set->stack_adjust = 0;
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
attrs_list_clear (&set->regs[i]);
- vars_clear (set->vars);
+ shared_hash_destroy (set->vars);
+ set->vars = shared_hash_copy (empty_shared_hash);
}
/* Copy the contents of dataflow set SRC to DST. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
attrs_list_copy (&dst->regs[i], src->regs[i]);
- vars_copy (dst->vars, src->vars);
+ shared_hash_destroy (dst->vars);
+ dst->vars = shared_hash_copy (src->vars);
dst->stack_adjust = src->stack_adjust;
}
/* The sum of positions in the input chains. */
int pos;
- /* The position in the chains of SRC and DST dataflow sets. */
- int pos_src;
+ /* The position in the chain of DST dataflow set. */
int pos_dst;
};
+/* Buffer for location list sorting and its allocated size. */
+static struct variable_union_info *vui_vec;
+static int vui_allocated;
+
/* Compare function for qsort, order the structures by POS element. */
static int
variable_union_info_cmp_pos (const void *n1, const void *n2)
{
- const struct variable_union_info *i1 = n1;
- const struct variable_union_info *i2 = n2;
+ const struct variable_union_info *const i1 =
+ (const struct variable_union_info *) n1;
+ const struct variable_union_info *const i2 =
+ ( const struct variable_union_info *) n2;
if (i1->pos != i2->pos)
return i1->pos - i2->pos;
static int
variable_union (void **slot, void *data)
{
- variable src, dst, *dstp;
+ variable src, dst;
+ void **dstp;
dataflow_set *set = (dataflow_set *) data;
int i, j, k;
src = *(variable *) slot;
- dstp = (variable *) htab_find_slot_with_hash (set->vars, src->decl,
- VARIABLE_HASH_VAL (src->decl),
- INSERT);
- if (!*dstp)
+ dstp = shared_hash_find_slot (set->vars, src->decl);
+ if (!dstp || !*dstp)
{
src->refcount++;
a copy of the variable. */
for (k = 0; k < src->n_var_parts; k++)
{
+ gcc_assert (!src->var_part[k].loc_chain
+ == !src->var_part[k].cur_loc);
if (src->var_part[k].loc_chain)
{
-#ifdef ENABLE_CHECKING
- if (src->var_part[k].cur_loc == NULL)
- abort ();
-#endif
+ gcc_assert (src->var_part[k].cur_loc);
if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
break;
}
-#ifdef ENABLE_CHECKING
- else
- {
- if (src->var_part[k].cur_loc != NULL)
- abort ();
- }
-#endif
}
if (k < src->n_var_parts)
- unshare_variable (set, src);
+ {
+ if (dstp)
+ *dstp = (void *) src;
+ unshare_variable (set, src, VAR_INIT_STATUS_UNKNOWN);
+ }
else
- *dstp = src;
+ {
+ if (!dstp)
+ dstp = shared_hash_find_slot_unshare (&set->vars, src->decl,
+ INSERT);
+ *dstp = (void *) src;
+ }
/* Continue traversing the hash table. */
return 1;
}
else
- dst = *dstp;
+ dst = (variable) *dstp;
-#ifdef ENABLE_CHECKING
- if (src->n_var_parts == 0)
- abort ();
-#endif
+ gcc_assert (src->n_var_parts);
/* Count the number of location parts, result is K. */
for (i = 0, j = 0, k = 0;
}
k += src->n_var_parts - i;
k += dst->n_var_parts - j;
-#ifdef ENABLE_CHECKING
+
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
thus there are at most MAX_VAR_PARTS different offsets. */
- if (k > MAX_VAR_PARTS)
- abort ();
-#endif
+ gcc_assert (k <= MAX_VAR_PARTS);
- if (dst->refcount > 1 && dst->n_var_parts != k)
- dst = unshare_variable (set, dst);
+ if ((dst->refcount > 1 || shared_hash_shared (set->vars))
+ && dst->n_var_parts != k)
+ dst = unshare_variable (set, dst, VAR_INIT_STATUS_UNKNOWN);
i = src->n_var_parts - 1;
j = dst->n_var_parts - 1;
/* If DST is shared compare the location chains.
If they are different we will modify the chain in DST with
high probability so make a copy of DST. */
- if (dst->refcount > 1)
+ if (dst->refcount > 1 || shared_hash_shared (set->vars))
{
for (node = src->var_part[i].loc_chain,
node2 = dst->var_part[j].loc_chain; node && node2;
&& REG_P (node->loc)
&& REGNO (node2->loc) == REGNO (node->loc))
|| rtx_equal_p (node2->loc, node->loc)))
- break;
+ {
+ if (node2->init < node->init)
+ node2->init = node->init;
+ break;
+ }
}
if (node || node2)
- dst = unshare_variable (set, dst);
+ dst = unshare_variable (set, dst, VAR_INIT_STATUS_UNKNOWN);
}
src_l = 0;
dst_l = 0;
for (node = dst->var_part[j].loc_chain; node; node = node->next)
dst_l++;
- vui = xcalloc (src_l + dst_l, sizeof (struct variable_union_info));
- /* Fill in the locations from DST. */
- for (node = dst->var_part[j].loc_chain, jj = 0; node;
- node = node->next, jj++)
+ if (dst_l == 1)
{
- vui[jj].lc = node;
- vui[jj].pos_dst = jj;
-
- /* Value larger than a sum of 2 valid positions. */
- vui[jj].pos_src = src_l + dst_l;
+ /* The most common case, much simpler, no qsort is needed. */
+ location_chain dstnode = dst->var_part[j].loc_chain;
+ dst->var_part[k].loc_chain = dstnode;
+ dst->var_part[k].offset = dst->var_part[j].offset;
+ node2 = dstnode;
+ for (node = src->var_part[i].loc_chain; node; node = node->next)
+ if (!((REG_P (dstnode->loc)
+ && REG_P (node->loc)
+ && REGNO (dstnode->loc) == REGNO (node->loc))
+ || rtx_equal_p (dstnode->loc, node->loc)))
+ {
+ location_chain new_node;
+
+ /* Copy the location from SRC. */
+ new_node = (location_chain) pool_alloc (loc_chain_pool);
+ new_node->loc = node->loc;
+ new_node->init = node->init;
+ if (!node->set_src || MEM_P (node->set_src))
+ new_node->set_src = NULL;
+ else
+ new_node->set_src = node->set_src;
+ node2->next = new_node;
+ node2 = new_node;
+ }
+ node2->next = NULL;
}
-
- /* Fill in the locations from SRC. */
- n = dst_l;
- for (node = src->var_part[i].loc_chain, ii = 0; node;
- node = node->next, ii++)
+ else
{
- /* Find location from NODE. */
- for (jj = 0; jj < dst_l; jj++)
- {
- if ((REG_P (vui[jj].lc->loc)
- && REG_P (node->loc)
- && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
- || rtx_equal_p (vui[jj].lc->loc, node->loc))
- {
- vui[jj].pos_src = ii;
- break;
- }
- }
- if (jj >= dst_l) /* The location has not been found. */
+ if (src_l + dst_l > vui_allocated)
{
- location_chain new_node;
-
- /* Copy the location from SRC. */
- new_node = pool_alloc (loc_chain_pool);
- new_node->loc = node->loc;
- vui[n].lc = new_node;
- vui[n].pos_src = ii;
- vui[n].pos_dst = src_l + dst_l;
- n++;
+ vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
+ vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
+ vui_allocated);
}
- }
+ vui = vui_vec;
- for (ii = 0; ii < src_l + dst_l; ii++)
- vui[ii].pos = vui[ii].pos_src + vui[ii].pos_dst;
+ /* Fill in the locations from DST. */
+ for (node = dst->var_part[j].loc_chain, jj = 0; node;
+ node = node->next, jj++)
+ {
+ vui[jj].lc = node;
+ vui[jj].pos_dst = jj;
- qsort (vui, n, sizeof (struct variable_union_info),
- variable_union_info_cmp_pos);
+ /* Pos plus value larger than a sum of 2 valid positions. */
+ vui[jj].pos = jj + src_l + dst_l;
+ }
- /* Reconnect the nodes in sorted order. */
- for (ii = 1; ii < n; ii++)
- vui[ii - 1].lc->next = vui[ii].lc;
- vui[n - 1].lc->next = NULL;
+ /* Fill in the locations from SRC. */
+ n = dst_l;
+ for (node = src->var_part[i].loc_chain, ii = 0; node;
+ node = node->next, ii++)
+ {
+ /* Find location from NODE. */
+ for (jj = 0; jj < dst_l; jj++)
+ {
+ if ((REG_P (vui[jj].lc->loc)
+ && REG_P (node->loc)
+ && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
+ || rtx_equal_p (vui[jj].lc->loc, node->loc))
+ {
+ vui[jj].pos = jj + ii;
+ break;
+ }
+ }
+ if (jj >= dst_l) /* The location has not been found. */
+ {
+ location_chain new_node;
+
+ /* Copy the location from SRC. */
+ new_node = (location_chain) pool_alloc (loc_chain_pool);
+ new_node->loc = node->loc;
+ new_node->init = node->init;
+ if (!node->set_src || MEM_P (node->set_src))
+ new_node->set_src = NULL;
+ else
+ new_node->set_src = node->set_src;
+ vui[n].lc = new_node;
+ vui[n].pos_dst = src_l + dst_l;
+ vui[n].pos = ii + src_l + dst_l;
+ n++;
+ }
+ }
- dst->var_part[k].loc_chain = vui[0].lc;
- dst->var_part[k].offset = dst->var_part[j].offset;
+ if (dst_l == 2)
+ {
+ /* Special case still very common case. For dst_l == 2
+ all entries dst_l ... n-1 are sorted, with for i >= dst_l
+ vui[i].pos == i + src_l + dst_l. */
+ if (vui[0].pos > vui[1].pos)
+ {
+ /* Order should be 1, 0, 2... */
+ dst->var_part[k].loc_chain = vui[1].lc;
+ vui[1].lc->next = vui[0].lc;
+ if (n >= 3)
+ {
+ vui[0].lc->next = vui[2].lc;
+ vui[n - 1].lc->next = NULL;
+ }
+ else
+ vui[0].lc->next = NULL;
+ ii = 3;
+ }
+ else
+ {
+ dst->var_part[k].loc_chain = vui[0].lc;
+ if (n >= 3 && vui[2].pos < vui[1].pos)
+ {
+ /* Order should be 0, 2, 1, 3... */
+ vui[0].lc->next = vui[2].lc;
+ vui[2].lc->next = vui[1].lc;
+ if (n >= 4)
+ {
+ vui[1].lc->next = vui[3].lc;
+ vui[n - 1].lc->next = NULL;
+ }
+ else
+ vui[1].lc->next = NULL;
+ ii = 4;
+ }
+ else
+ {
+ /* Order should be 0, 1, 2... */
+ ii = 1;
+ vui[n - 1].lc->next = NULL;
+ }
+ }
+ for (; ii < n; ii++)
+ vui[ii - 1].lc->next = vui[ii].lc;
+ }
+ else
+ {
+ qsort (vui, n, sizeof (struct variable_union_info),
+ variable_union_info_cmp_pos);
+
+ /* Reconnect the nodes in sorted order. */
+ for (ii = 1; ii < n; ii++)
+ vui[ii - 1].lc->next = vui[ii].lc;
+ vui[n - 1].lc->next = NULL;
+ dst->var_part[k].loc_chain = vui[0].lc;
+ }
- free (vui);
+ dst->var_part[k].offset = dst->var_part[j].offset;
+ }
i--;
j--;
}
{
location_chain new_lc;
- new_lc = pool_alloc (loc_chain_pool);
+ new_lc = (location_chain) pool_alloc (loc_chain_pool);
new_lc->next = NULL;
+ new_lc->init = node->init;
+ if (!node->set_src || MEM_P (node->set_src))
+ new_lc->set_src = NULL;
+ else
+ new_lc->set_src = node->set_src;
new_lc->loc = node->loc;
*nextp = new_lc;
dst->var_part[k].cur_loc = NULL;
}
+ if (flag_var_tracking_uninit)
+ for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
+ {
+ location_chain node, node2;
+ for (node = src->var_part[i].loc_chain; node; node = node->next)
+ for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
+ if (rtx_equal_p (node->loc, node2->loc))
+ {
+ if (node->init > node2->init)
+ node2->init = node->init;
+ }
+ }
+
/* Continue traversing the hash table. */
return 1;
}
+/* Like variable_union, but only used when doing dataflow_set_union
+ into an empty hashtab. To allow sharing, dst is initially shared
+ with src (so all variables are "copied" from src to dst hashtab),
+ so only unshare_variable for variables that need canonicalization
+ are needed. */
+
+static int
+variable_canonicalize (void **slot, void *data)
+{
+ variable src;
+ dataflow_set *set = (dataflow_set *) data;
+ int k;
+
+ src = *(variable *) slot;
+
+ /* If CUR_LOC of some variable part is not the first element of
+ the location chain we are going to change it so we have to make
+ a copy of the variable. */
+ for (k = 0; k < src->n_var_parts; k++)
+ {
+ gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
+ if (src->var_part[k].loc_chain)
+ {
+ gcc_assert (src->var_part[k].cur_loc);
+ if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
+ break;
+ }
+ }
+ if (k < src->n_var_parts)
+ unshare_variable (set, src, VAR_INIT_STATUS_UNKNOWN);
+ return 1;
+}
+
/* Compute union of dataflow sets SRC and DST and store it to DST. */
static void
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
attrs_list_union (&dst->regs[i], src->regs[i]);
- htab_traverse (src->vars, variable_union, dst);
+ if (dst->vars == empty_shared_hash)
+ {
+ shared_hash_destroy (dst->vars);
+ dst->vars = shared_hash_copy (src->vars);
+ htab_traverse (shared_hash_htab (src->vars), variable_canonicalize, dst);
+ }
+ else
+ htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
}
/* Flag whether two dataflow sets being compared contain different data. */
variable var1, var2;
var1 = *(variable *) slot;
- var2 = htab_find_with_hash (htab, var1->decl,
+ var2 = (variable) htab_find_with_hash (htab, var1->decl,
VARIABLE_HASH_VAL (var1->decl));
if (!var2)
{
return 1;
}
-/* Compare variable *SLOT with the same variable in hash table DATA
- and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
-
-static int
-dataflow_set_different_2 (void **slot, void *data)
-{
- htab_t htab = (htab_t) data;
- variable var1, var2;
-
- var1 = *(variable *) slot;
- var2 = htab_find_with_hash (htab, var1->decl,
- VARIABLE_HASH_VAL (var1->decl));
- if (!var2)
- {
- dataflow_set_different_value = true;
-
- /* Stop traversing the hash table. */
- return 0;
- }
-
-#ifdef ENABLE_CHECKING
- /* If both variables are defined they have been already checked for
- equivalence. */
- if (variable_different_p (var1, var2, false))
- abort ();
-#endif
-
- /* Continue traversing the hash table. */
- return 1;
-}
-
/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
static bool
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
{
+ if (old_set->vars == new_set->vars)
+ return false;
+
+ if (htab_elements (shared_hash_htab (old_set->vars))
+ != htab_elements (shared_hash_htab (new_set->vars)))
+ return true;
+
dataflow_set_different_value = false;
- htab_traverse (old_set->vars, dataflow_set_different_1, new_set->vars);
- if (!dataflow_set_different_value)
- {
- /* We have compared the variables which are in both hash tables
- so now only check whether there are some variables in NEW_SET->VARS
- which are not in OLD_SET->VARS. */
- htab_traverse (new_set->vars, dataflow_set_different_2, old_set->vars);
- }
+ htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
+ shared_hash_htab (new_set->vars));
+ /* No need to traverse the second hashtab, if both have the same number
+ of elements and the second one had all entries found in the first one,
+ then it can't have any extra entries. */
return dataflow_set_different_value;
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
attrs_list_clear (&set->regs[i]);
- htab_delete (set->vars);
+ shared_hash_destroy (set->vars);
set->vars = NULL;
}
track_expr_p (tree expr)
{
rtx decl_rtl;
+ tree realdecl;
/* If EXPR is not a parameter or a variable do not track it. */
if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
decl_rtl = DECL_RTL_IF_SET (expr);
if (!decl_rtl)
return 0;
+
+ /* If this expression is really a debug alias of some other declaration, we
+ don't need to track this expression if the ultimate declaration is
+ ignored. */
+ realdecl = expr;
+ if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
+ {
+ realdecl = DECL_DEBUG_EXPR (realdecl);
+ /* ??? We don't yet know how to emit DW_OP_piece for variable
+ that has been SRA'ed. */
+ if (!DECL_P (realdecl))
+ return 0;
+ }
- /* Do not track EXPR if it should be ignored for debugging purposes. */
- if (DECL_IGNORED_P (expr))
+ /* Do not track EXPR if REALDECL it should be ignored for debugging
+ purposes. */
+ if (DECL_IGNORED_P (realdecl))
return 0;
/* Do not track global variables until we are able to emit correct location
list for them. */
- if (TREE_STATIC (expr))
+ if (TREE_STATIC (realdecl))
return 0;
/* When the EXPR is a DECL for alias of some variable (see example)
if (MEM_P (decl_rtl))
{
/* Do not track structures and arrays. */
- if (GET_MODE (decl_rtl) == BLKmode)
+ if (GET_MODE (decl_rtl) == BLKmode
+ || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
return 0;
if (MEM_SIZE (decl_rtl)
&& INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
return 1;
}
-/* Count uses (register and memory references) LOC which will be tracked.
- INSN is instruction which the LOC is part of. */
+/* Determine whether a given LOC refers to the same variable part as
+ EXPR+OFFSET. */
-static int
-count_uses (rtx *loc, void *insn)
+static bool
+same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
{
- basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
+ tree expr2;
+ HOST_WIDE_INT offset2;
- if (REG_P (*loc))
+ if (! DECL_P (expr))
+ return false;
+
+ if (REG_P (loc))
{
-#ifdef ENABLE_CHECKING
- if (REGNO (*loc) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
- VTI (bb)->n_mos++;
+ expr2 = REG_EXPR (loc);
+ offset2 = REG_OFFSET (loc);
}
- else if (MEM_P (*loc)
- && MEM_EXPR (*loc)
- && track_expr_p (MEM_EXPR (*loc)))
+ else if (MEM_P (loc))
{
- VTI (bb)->n_mos++;
+ expr2 = MEM_EXPR (loc);
+ offset2 = INT_MEM_OFFSET (loc);
}
+ else
+ return false;
- return 0;
-}
+ if (! expr2 || ! DECL_P (expr2))
+ return false;
-/* Helper function for finding all uses of REG/MEM in X in insn INSN. */
+ expr = var_debug_decl (expr);
+ expr2 = var_debug_decl (expr2);
-static void
-count_uses_1 (rtx *x, void *insn)
-{
- for_each_rtx (x, count_uses, insn);
+ return (expr == expr2 && offset == offset2);
}
-/* Count stores (register and memory references) LOC which will be tracked.
- INSN is instruction which the LOC is part of. */
+/* LOC is a REG or MEM that we would like to track if possible.
+ If EXPR is null, we don't know what expression LOC refers to,
+ otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
+ LOC is an lvalue register.
-static void
-count_stores (rtx loc, rtx expr ATTRIBUTE_UNUSED, void *insn)
-{
+ Return true if EXPR is nonnull and if LOC, or some lowpart of it,
+ is something we can track. When returning true, store the mode of
+ the lowpart we can track in *MODE_OUT (if nonnull) and its offset
+ from EXPR in *OFFSET_OUT (if nonnull). */
+
+static bool
+track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
+ enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
+{
+ enum machine_mode mode;
+
+ if (expr == NULL || !track_expr_p (expr))
+ return false;
+
+ /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
+ whole subreg, but only the old inner part is really relevant. */
+ mode = GET_MODE (loc);
+ if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
+ {
+ enum machine_mode pseudo_mode;
+
+ pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
+ if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
+ {
+ offset += byte_lowpart_offset (pseudo_mode, mode);
+ mode = pseudo_mode;
+ }
+ }
+
+ /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
+ Do the same if we are storing to a register and EXPR occupies
+ the whole of register LOC; in that case, the whole of EXPR is
+ being changed. We exclude complex modes from the second case
+ because the real and imaginary parts are represented as separate
+ pseudo registers, even if the whole complex value fits into one
+ hard register. */
+ if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
+ || (store_reg_p
+ && !COMPLEX_MODE_P (DECL_MODE (expr))
+ && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
+ && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
+ {
+ mode = DECL_MODE (expr);
+ offset = 0;
+ }
+
+ if (offset < 0 || offset >= MAX_VAR_PARTS)
+ return false;
+
+ if (mode_out)
+ *mode_out = mode;
+ if (offset_out)
+ *offset_out = offset;
+ return true;
+}
+
+/* Return the MODE lowpart of LOC, or null if LOC is not something we
+ want to track. When returning nonnull, make sure that the attributes
+ on the returned value are updated. */
+
+static rtx
+var_lowpart (enum machine_mode mode, rtx loc)
+{
+ unsigned int offset, reg_offset, regno;
+
+ if (!REG_P (loc) && !MEM_P (loc))
+ return NULL;
+
+ if (GET_MODE (loc) == mode)
+ return loc;
+
+ offset = byte_lowpart_offset (mode, GET_MODE (loc));
+
+ if (MEM_P (loc))
+ return adjust_address_nv (loc, mode, offset);
+
+ reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
+ regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
+ reg_offset, mode);
+ return gen_rtx_REG_offset (loc, mode, regno, offset);
+}
+
+/* Count uses (register and memory references) LOC which will be tracked.
+ INSN is instruction which the LOC is part of. */
+
+static int
+count_uses (rtx *loc, void *insn)
+{
+ basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
+
+ if (REG_P (*loc))
+ {
+ gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER);
+ VTI (bb)->n_mos++;
+ }
+ else if (MEM_P (*loc)
+ && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
+ false, NULL, NULL))
+ {
+ VTI (bb)->n_mos++;
+ }
+
+ return 0;
+}
+
+/* Helper function for finding all uses of REG/MEM in X in insn INSN. */
+
+static void
+count_uses_1 (rtx *x, void *insn)
+{
+ for_each_rtx (x, count_uses, insn);
+}
+
+/* Count stores (register and memory references) LOC which will be tracked.
+ INSN is instruction which the LOC is part of. */
+
+static void
+count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *insn)
+{
count_uses (&loc, insn);
}
static int
add_uses (rtx *loc, void *insn)
{
+ enum machine_mode mode;
+
if (REG_P (*loc))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
- mo->type = ((REG_EXPR (*loc) && track_expr_p (REG_EXPR (*loc)))
- ? MO_USE : MO_USE_NO_VAR);
- mo->u.loc = *loc;
+ if (track_loc_p (*loc, REG_EXPR (*loc), REG_OFFSET (*loc),
+ false, &mode, NULL))
+ {
+ mo->type = MO_USE;
+ mo->u.loc = var_lowpart (mode, *loc);
+ }
+ else
+ {
+ mo->type = MO_USE_NO_VAR;
+ mo->u.loc = *loc;
+ }
mo->insn = (rtx) insn;
}
else if (MEM_P (*loc)
- && MEM_EXPR (*loc)
- && track_expr_p (MEM_EXPR (*loc)))
+ && track_loc_p (*loc, MEM_EXPR (*loc), INT_MEM_OFFSET (*loc),
+ false, &mode, NULL))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
mo->type = MO_USE;
- mo->u.loc = *loc;
+ mo->u.loc = var_lowpart (mode, *loc);
mo->insn = (rtx) insn;
}
INSN is instruction which the LOC is part of. */
static void
-add_stores (rtx loc, rtx expr, void *insn)
+add_stores (rtx loc, const_rtx expr, void *insn)
{
+ enum machine_mode mode;
+
if (REG_P (loc))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
- mo->type = ((GET_CODE (expr) != CLOBBER && REG_EXPR (loc)
- && track_expr_p (REG_EXPR (loc)))
- ? MO_SET : MO_CLOBBER);
- mo->u.loc = loc;
+ if (GET_CODE (expr) == CLOBBER
+ || !track_loc_p (loc, REG_EXPR (loc), REG_OFFSET (loc),
+ true, &mode, NULL))
+ {
+ mo->type = MO_CLOBBER;
+ mo->u.loc = loc;
+ }
+ else
+ {
+ rtx src = NULL;
+
+ if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
+ src = var_lowpart (mode, SET_SRC (expr));
+ loc = var_lowpart (mode, loc);
+
+ if (src == NULL)
+ {
+ mo->type = MO_SET;
+ mo->u.loc = loc;
+ }
+ else
+ {
+ if (SET_SRC (expr) != src)
+ expr = gen_rtx_SET (VOIDmode, loc, src);
+ if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
+ mo->type = MO_COPY;
+ else
+ mo->type = MO_SET;
+ mo->u.loc = CONST_CAST_RTX (expr);
+ }
+ }
mo->insn = (rtx) insn;
}
else if (MEM_P (loc)
- && MEM_EXPR (loc)
- && track_expr_p (MEM_EXPR (loc)))
+ && track_loc_p (loc, MEM_EXPR (loc), INT_MEM_OFFSET (loc),
+ false, &mode, NULL))
{
basic_block bb = BLOCK_FOR_INSN ((rtx) insn);
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
- mo->type = GET_CODE (expr) == CLOBBER ? MO_CLOBBER : MO_SET;
- mo->u.loc = loc;
+ if (GET_CODE (expr) == CLOBBER)
+ {
+ mo->type = MO_CLOBBER;
+ mo->u.loc = var_lowpart (mode, loc);
+ }
+ else
+ {
+ rtx src = NULL;
+
+ if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
+ src = var_lowpart (mode, SET_SRC (expr));
+ loc = var_lowpart (mode, loc);
+
+ if (src == NULL)
+ {
+ mo->type = MO_SET;
+ mo->u.loc = loc;
+ }
+ else
+ {
+ if (SET_SRC (expr) != src)
+ expr = gen_rtx_SET (VOIDmode, loc, src);
+ if (same_variable_part_p (SET_SRC (expr),
+ MEM_EXPR (loc),
+ INT_MEM_OFFSET (loc)))
+ mo->type = MO_COPY;
+ else
+ mo->type = MO_SET;
+ mo->u.loc = CONST_CAST_RTX (expr);
+ }
+ }
mo->insn = (rtx) insn;
}
}
+static enum var_init_status
+find_src_status (dataflow_set *in, rtx src)
+{
+ tree decl = NULL_TREE;
+ enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
+
+ if (! flag_var_tracking_uninit)
+ status = VAR_INIT_STATUS_INITIALIZED;
+
+ if (src && REG_P (src))
+ decl = var_debug_decl (REG_EXPR (src));
+ else if (src && MEM_P (src))
+ decl = var_debug_decl (MEM_EXPR (src));
+
+ if (src && decl)
+ status = get_init_value (in, src, decl);
+
+ return status;
+}
+
+/* SRC is the source of an assignment. Use SET to try to find what
+ was ultimately assigned to SRC. Return that value if known,
+ otherwise return SRC itself. */
+
+static rtx
+find_src_set_src (dataflow_set *set, rtx src)
+{
+ tree decl = NULL_TREE; /* The variable being copied around. */
+ rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
+ variable var;
+ location_chain nextp;
+ int i;
+ bool found;
+
+ if (src && REG_P (src))
+ decl = var_debug_decl (REG_EXPR (src));
+ else if (src && MEM_P (src))
+ decl = var_debug_decl (MEM_EXPR (src));
+
+ if (src && decl)
+ {
+ var = shared_hash_find (set->vars, decl);
+ if (var)
+ {
+ found = false;
+ for (i = 0; i < var->n_var_parts && !found; i++)
+ for (nextp = var->var_part[i].loc_chain; nextp && !found;
+ nextp = nextp->next)
+ if (rtx_equal_p (nextp->loc, src))
+ {
+ set_src = nextp->set_src;
+ found = true;
+ }
+
+ }
+ }
+
+ return set_src;
+}
+
/* Compute the changes of variable locations in the basic block BB. */
static bool
dataflow_set *in = &VTI (bb)->in;
dataflow_set *out = &VTI (bb)->out;
- dataflow_set_init (&old_out, htab_elements (VTI (bb)->out.vars) + 3);
+ dataflow_set_init (&old_out);
dataflow_set_copy (&old_out, out);
dataflow_set_copy (out, in);
break;
case MO_USE:
+ {
+ rtx loc = VTI (bb)->mos[i].u.loc;
+
+ if (REG_P (loc))
+ var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
+ else if (MEM_P (loc))
+ var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
+ }
+ break;
+
case MO_SET:
{
rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx set_src = NULL;
+
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
+
+ if (REG_P (loc))
+ var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
+ set_src);
+ else if (MEM_P (loc))
+ var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
+ set_src);
+ }
+ break;
+
+ case MO_COPY:
+ {
+ rtx loc = VTI (bb)->mos[i].u.loc;
+ enum var_init_status src_status;
+ rtx set_src = NULL;
+
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
+
+ if (! flag_var_tracking_uninit)
+ src_status = VAR_INIT_STATUS_INITIALIZED;
+ else
+ {
+ src_status = find_src_status (in, set_src);
+
+ if (src_status == VAR_INIT_STATUS_UNKNOWN)
+ src_status = find_src_status (out, set_src);
+ }
+
+ set_src = find_src_set_src (in, set_src);
if (REG_P (loc))
- var_reg_delete_and_set (out, loc);
+ var_reg_delete_and_set (out, loc, false, src_status, set_src);
else if (MEM_P (loc))
- var_mem_delete_and_set (out, loc);
+ var_mem_delete_and_set (out, loc, false, src_status, set_src);
}
break;
case MO_USE_NO_VAR:
- case MO_CLOBBER:
{
rtx loc = VTI (bb)->mos[i].u.loc;
if (REG_P (loc))
- var_reg_delete (out, loc);
+ var_reg_delete (out, loc, false);
else if (MEM_P (loc))
- var_mem_delete (out, loc);
+ var_mem_delete (out, loc, false);
}
break;
- case MO_ADJUST:
+ case MO_CLOBBER:
{
- rtx base;
+ rtx loc = VTI (bb)->mos[i].u.loc;
- out->stack_adjust += VTI (bb)->mos[i].u.adjust;
- base = gen_rtx_MEM (Pmode,
- gen_rtx_PLUS (Pmode, stack_pointer_rtx,
- GEN_INT (out->stack_adjust)));
- set_frame_base_location (out, base);
+ if (REG_P (loc))
+ var_reg_delete (out, loc, true);
+ else if (MEM_P (loc))
+ var_mem_delete (out, loc, true);
}
break;
+
+ case MO_ADJUST:
+ out->stack_adjust += VTI (bb)->mos[i].u.adjust;
+ break;
}
}
/* Compute reverse completion order of depth first search of the CFG
so that the data-flow runs faster. */
- rc_order = xmalloc (n_basic_blocks * sizeof (int));
- bb_order = xmalloc (last_basic_block * sizeof (int));
- flow_depth_first_order_compute (NULL, rc_order);
- for (i = 0; i < n_basic_blocks; i++)
+ rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
+ bb_order = XNEWVEC (int, last_basic_block);
+ pre_and_rev_post_order_compute (NULL, rc_order, false);
+ for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
bb_order[rc_order[i]] = i;
free (rc_order);
in_worklist = sbitmap_alloc (last_basic_block);
in_pending = sbitmap_alloc (last_basic_block);
sbitmap_zero (in_worklist);
- sbitmap_zero (in_pending);
FOR_EACH_BB (bb)
- {
- fibheap_insert (pending, bb_order[bb->index], bb);
- SET_BIT (in_pending, bb->index);
- }
+ fibheap_insert (pending, bb_order[bb->index], bb);
+ sbitmap_ones (in_pending);
while (!fibheap_empty (pending))
{
while (!fibheap_empty (worklist))
{
- bb = fibheap_extract_min (worklist);
+ bb = (basic_block) fibheap_extract_min (worklist);
RESET_BIT (in_worklist, bb->index);
if (!TEST_BIT (visited, bb->index))
{
bool changed;
+ edge_iterator ei;
SET_BIT (visited, bb->index);
/* Calculate the IN set as union of predecessor OUT sets. */
dataflow_set_clear (&VTI (bb)->in);
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
{
dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
}
changed = compute_bb_dataflow (bb);
if (changed)
{
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
if (e->dest == EXIT_BLOCK_PTR)
continue;
for (; list; list = list->next)
{
print_mem_expr (dump_file, list->decl);
- fprintf (dump_file, "+");
- fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC, list->offset);
+ fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
}
fprintf (dump_file, "\n");
}
int i;
location_chain node;
- fprintf (dump_file, " name: %s\n",
+ fprintf (dump_file, " name: %s",
IDENTIFIER_POINTER (DECL_NAME (var->decl)));
+ if (dump_flags & TDF_UID)
+ fprintf (dump_file, " D.%u\n", DECL_UID (var->decl));
+ else
+ fprintf (dump_file, "\n");
+
for (i = 0; i < var->n_var_parts; i++)
{
fprintf (dump_file, " offset %ld\n",
for (node = var->var_part[i].loc_chain; node; node = node->next)
{
fprintf (dump_file, " ");
+ if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
+ fprintf (dump_file, "[uninit]");
print_rtl_single (dump_file, node->loc);
}
}
{
int i;
- fprintf (dump_file, "Stack adjustment: ");
- fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC, set->stack_adjust);
- fprintf (dump_file, "\n");
- for (i = 1; i < FIRST_PSEUDO_REGISTER; i++)
+ fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
+ set->stack_adjust);
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
if (set->regs[i])
{
dump_attrs_list (set->regs[i]);
}
}
- dump_vars (set->vars);
+ dump_vars (shared_hash_htab (set->vars));
fprintf (dump_file, "\n");
}
}
/* Add variable VAR to the hash table of changed variables and
- if it has no locations delete it from hash table HTAB. */
+ if it has no locations delete it from SET's hash table. */
static void
-variable_was_changed (variable var, htab_t htab)
+variable_was_changed (variable var, dataflow_set *set)
{
hashval_t hash = VARIABLE_HASH_VAL (var->decl);
slot = (variable *) htab_find_slot_with_hash (changed_variables,
var->decl, hash, INSERT);
- if (htab && var->n_var_parts == 0)
+ if (set && var->n_var_parts == 0)
{
variable empty_var;
- void **old;
- empty_var = pool_alloc (var_pool);
+ empty_var = (variable) pool_alloc (var_pool);
empty_var->decl = var->decl;
empty_var->refcount = 1;
empty_var->n_var_parts = 0;
*slot = empty_var;
-
- old = htab_find_slot_with_hash (htab, var->decl, hash,
- NO_INSERT);
- if (old)
- htab_clear_slot (htab, old);
+ goto drop_var;
}
else
{
+ var->refcount++;
*slot = var;
}
}
else
{
-#ifdef ENABLE_CHECKING
- if (!htab)
- abort ();
-#endif
+ gcc_assert (set);
if (var->n_var_parts == 0)
{
- void **slot = htab_find_slot_with_hash (htab, var->decl, hash,
- NO_INSERT);
+ void **slot;
+
+ drop_var:
+ slot = shared_hash_find_slot_noinsert (set->vars, var->decl);
if (slot)
- htab_clear_slot (htab, slot);
+ {
+ if (shared_hash_shared (set->vars))
+ slot = shared_hash_find_slot_unshare (&set->vars, var->decl,
+ NO_INSERT);
+ htab_clear_slot (shared_hash_htab (set->vars), slot);
+ }
}
}
}
-/* Set the location of frame_base_decl to LOC in dataflow set SET. This
- function expects that
- frame_base_decl has already one location for offset 0 in the variable table.
- */
+/* Look for the index in VAR->var_part corresponding to OFFSET.
+ Return -1 if not found. If INSERTION_POINT is non-NULL, the
+ referenced int will be set to the index that the part has or should
+ have, if it should be inserted. */
-static void
-set_frame_base_location (dataflow_set *set, rtx loc)
+static inline int
+find_variable_location_part (variable var, HOST_WIDE_INT offset,
+ int *insertion_point)
{
- variable var;
-
- var = htab_find_with_hash (set->vars, frame_base_decl,
- VARIABLE_HASH_VAL (frame_base_decl));
-#ifdef ENABLE_CHECKING
- if (!var)
- abort ();
- if (var->n_var_parts != 1)
- abort ();
- if (var->var_part[0].offset != 0)
- abort ();
- if (!var->var_part[0].loc_chain)
- abort ();
-#endif
+ int pos, low, high;
- /* If frame_base_decl is shared unshare it first. */
- if (var->refcount > 1)
- var = unshare_variable (set, var);
+ /* Find the location part. */
+ low = 0;
+ high = var->n_var_parts;
+ while (low != high)
+ {
+ pos = (low + high) / 2;
+ if (var->var_part[pos].offset < offset)
+ low = pos + 1;
+ else
+ high = pos;
+ }
+ pos = low;
+
+ if (insertion_point)
+ *insertion_point = pos;
+
+ if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
+ return pos;
- var->var_part[0].loc_chain->loc = loc;
- var->var_part[0].cur_loc = loc;
- variable_was_changed (var, set->vars);
+ return -1;
}
/* Set the part of variable's location in the dataflow set SET. The variable
part's location by LOC. */
static void
-set_variable_part (dataflow_set *set, rtx loc, tree decl, HOST_WIDE_INT offset)
+set_variable_part (dataflow_set *set, rtx loc, tree decl, HOST_WIDE_INT offset,
+ enum var_init_status initialized, rtx set_src)
{
- int pos, low, high;
+ int pos;
location_chain node, next;
location_chain *nextp;
variable var;
- void **slot;
-
- slot = htab_find_slot_with_hash (set->vars, decl,
- VARIABLE_HASH_VAL (decl), INSERT);
- if (!*slot)
+ void **slot = shared_hash_find_slot (set->vars, decl);
+
+ if (! flag_var_tracking_uninit)
+ initialized = VAR_INIT_STATUS_INITIALIZED;
+
+ if (!slot || !*slot)
{
+ if (!slot)
+ slot = shared_hash_find_slot_unshare (&set->vars, decl, INSERT);
/* Create new variable information. */
- var = pool_alloc (var_pool);
+ var = (variable) pool_alloc (var_pool);
var->decl = decl;
var->refcount = 1;
var->n_var_parts = 1;
}
else
{
+ int inspos = 0;
+
var = (variable) *slot;
- /* Find the location part. */
- low = 0;
- high = var->n_var_parts;
- while (low != high)
- {
- pos = (low + high) / 2;
- if (var->var_part[pos].offset < offset)
- low = pos + 1;
- else
- high = pos;
- }
- pos = low;
+ pos = find_variable_location_part (var, offset, &inspos);
- if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
+ if (pos >= 0)
{
node = var->var_part[pos].loc_chain;
{
/* LOC is in the beginning of the chain so we have nothing
to do. */
+ if (node->init < initialized)
+ node->init = initialized;
+ if (set_src != NULL)
+ node->set_src = set_src;
+
return;
}
else
{
/* We have to make a copy of a shared variable. */
- if (var->refcount > 1)
- var = unshare_variable (set, var);
+ if (var->refcount > 1 || shared_hash_shared (set->vars))
+ var = unshare_variable (set, var, initialized);
}
}
else
/* We have not found the location part, new one will be created. */
/* We have to make a copy of the shared variable. */
- if (var->refcount > 1)
- var = unshare_variable (set, var);
+ if (var->refcount > 1 || shared_hash_shared (set->vars))
+ var = unshare_variable (set, var, initialized);
-#ifdef ENABLE_CHECKING
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
thus there are at most MAX_VAR_PARTS different offsets. */
- if (var->n_var_parts >= MAX_VAR_PARTS)
- abort ();
-#endif
+ gcc_assert (var->n_var_parts < MAX_VAR_PARTS);
- /* We have to move the elements of array starting at index low to the
- next position. */
- for (high = var->n_var_parts; high > low; high--)
- var->var_part[high] = var->var_part[high - 1];
+ /* We have to move the elements of array starting at index
+ inspos to the next position. */
+ for (pos = var->n_var_parts; pos > inspos; pos--)
+ var->var_part[pos] = var->var_part[pos - 1];
var->n_var_parts++;
var->var_part[pos].offset = offset;
&& REGNO (node->loc) == REGNO (loc))
|| rtx_equal_p (node->loc, loc))
{
+ /* Save these values, to assign to the new node, before
+ deleting this one. */
+ if (node->init > initialized)
+ initialized = node->init;
+ if (node->set_src != NULL && set_src == NULL)
+ set_src = node->set_src;
pool_free (loc_chain_pool, node);
*nextp = next;
break;
}
/* Add the location to the beginning. */
- node = pool_alloc (loc_chain_pool);
+ node = (location_chain) pool_alloc (loc_chain_pool);
node->loc = loc;
+ node->init = initialized;
+ node->set_src = set_src;
node->next = var->var_part[pos].loc_chain;
var->var_part[pos].loc_chain = node;
if (var->var_part[pos].cur_loc == NULL)
{
var->var_part[pos].cur_loc = loc;
- variable_was_changed (var, set->vars);
+ variable_was_changed (var, set);
+ }
+}
+
+/* Remove all recorded register locations for the given variable part
+ from dataflow set SET, except for those that are identical to loc.
+ The variable part is specified by variable's declaration DECL and
+ offset OFFSET. */
+
+static void
+clobber_variable_part (dataflow_set *set, rtx loc, tree decl,
+ HOST_WIDE_INT offset, rtx set_src)
+{
+ variable var;
+
+ if (! decl || ! DECL_P (decl))
+ return;
+
+ var = shared_hash_find (set->vars, decl);
+ if (var)
+ {
+ int pos = find_variable_location_part (var, offset, NULL);
+
+ if (pos >= 0)
+ {
+ location_chain node, next;
+
+ /* Remove the register locations from the dataflow set. */
+ next = var->var_part[pos].loc_chain;
+ for (node = next; node; node = next)
+ {
+ next = node->next;
+ if (node->loc != loc
+ && (!flag_var_tracking_uninit
+ || !set_src
+ || MEM_P (set_src)
+ || !rtx_equal_p (set_src, node->set_src)))
+ {
+ if (REG_P (node->loc))
+ {
+ attrs anode, anext;
+ attrs *anextp;
+
+ /* Remove the variable part from the register's
+ list, but preserve any other variable parts
+ that might be regarded as live in that same
+ register. */
+ anextp = &set->regs[REGNO (node->loc)];
+ for (anode = *anextp; anode; anode = anext)
+ {
+ anext = anode->next;
+ if (anode->decl == decl
+ && anode->offset == offset)
+ {
+ pool_free (attrs_pool, anode);
+ *anextp = anext;
+ }
+ else
+ anextp = &anode->next;
+ }
+ }
+
+ delete_variable_part (set, node->loc, decl, offset);
+ }
+ }
+ }
}
}
delete_variable_part (dataflow_set *set, rtx loc, tree decl,
HOST_WIDE_INT offset)
{
- int pos, low, high;
- void **slot;
-
- slot = htab_find_slot_with_hash (set->vars, decl, VARIABLE_HASH_VAL (decl),
- NO_INSERT);
- if (slot)
+ variable var = shared_hash_find (set->vars, decl);;
+ if (var)
{
- variable var = (variable) *slot;
+ int pos = find_variable_location_part (var, offset, NULL);
- /* Find the location part. */
- low = 0;
- high = var->n_var_parts;
- while (low != high)
- {
- pos = (low + high) / 2;
- if (var->var_part[pos].offset < offset)
- low = pos + 1;
- else
- high = pos;
- }
- pos = low;
-
- if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
+ if (pos >= 0)
{
location_chain node, next;
location_chain *nextp;
bool changed;
- if (var->refcount > 1)
+ if (var->refcount > 1 || shared_hash_shared (set->vars))
{
/* If the variable contains the location part we have to
make a copy of the variable. */
&& REGNO (node->loc) == REGNO (loc))
|| rtx_equal_p (node->loc, loc))
{
- var = unshare_variable (set, var);
+ var = unshare_variable (set, var,
+ VAR_INIT_STATUS_UNKNOWN);
break;
}
}
}
}
if (changed)
- variable_was_changed (var, set->vars);
+ variable_was_changed (var, set);
}
}
}
rtx insn = ((emit_note_data *)data)->insn;
enum emit_note_where where = ((emit_note_data *)data)->where;
rtx note;
- int i;
+ int i, j, n_var_parts;
bool complete;
+ enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
HOST_WIDE_INT last_limit;
tree type_size_unit;
+ HOST_WIDE_INT offsets[MAX_VAR_PARTS];
+ rtx loc[MAX_VAR_PARTS];
-#ifdef ENABLE_CHECKING
- if (!var->decl)
- abort ();
-#endif
+ gcc_assert (var->decl);
complete = true;
last_limit = 0;
+ n_var_parts = 0;
for (i = 0; i < var->n_var_parts; i++)
{
+ enum machine_mode mode, wider_mode;
+
if (last_limit < var->var_part[i].offset)
{
complete = false;
break;
}
- last_limit
- = (var->var_part[i].offset
- + GET_MODE_SIZE (GET_MODE (var->var_part[i].loc_chain->loc)));
+ else if (last_limit > var->var_part[i].offset)
+ continue;
+ offsets[n_var_parts] = var->var_part[i].offset;
+ loc[n_var_parts] = var->var_part[i].loc_chain->loc;
+ mode = GET_MODE (loc[n_var_parts]);
+ initialized = var->var_part[i].loc_chain->init;
+ last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
+
+ /* Attempt to merge adjacent registers or memory. */
+ wider_mode = GET_MODE_WIDER_MODE (mode);
+ for (j = i + 1; j < var->n_var_parts; j++)
+ if (last_limit <= var->var_part[j].offset)
+ break;
+ if (j < var->n_var_parts
+ && wider_mode != VOIDmode
+ && GET_CODE (loc[n_var_parts])
+ == GET_CODE (var->var_part[j].loc_chain->loc)
+ && mode == GET_MODE (var->var_part[j].loc_chain->loc)
+ && last_limit == var->var_part[j].offset)
+ {
+ rtx new_loc = NULL;
+ rtx loc2 = var->var_part[j].loc_chain->loc;
+
+ if (REG_P (loc[n_var_parts])
+ && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
+ == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
+ && end_hard_regno (mode, REGNO (loc[n_var_parts]))
+ == REGNO (loc2))
+ {
+ if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
+ new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
+ mode, 0);
+ else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
+ new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
+ if (new_loc)
+ {
+ if (!REG_P (new_loc)
+ || REGNO (new_loc) != REGNO (loc[n_var_parts]))
+ new_loc = NULL;
+ else
+ REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
+ }
+ }
+ else if (MEM_P (loc[n_var_parts])
+ && GET_CODE (XEXP (loc2, 0)) == PLUS
+ && REG_P (XEXP (XEXP (loc2, 0), 0))
+ && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
+ {
+ if ((REG_P (XEXP (loc[n_var_parts], 0))
+ && rtx_equal_p (XEXP (loc[n_var_parts], 0),
+ XEXP (XEXP (loc2, 0), 0))
+ && INTVAL (XEXP (XEXP (loc2, 0), 1))
+ == GET_MODE_SIZE (mode))
+ || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
+ && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
+ && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
+ XEXP (XEXP (loc2, 0), 0))
+ && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
+ + GET_MODE_SIZE (mode)
+ == INTVAL (XEXP (XEXP (loc2, 0), 1))))
+ new_loc = adjust_address_nv (loc[n_var_parts],
+ wider_mode, 0);
+ }
+
+ if (new_loc)
+ {
+ loc[n_var_parts] = new_loc;
+ mode = wider_mode;
+ last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
+ i = j;
+ }
+ }
+ ++n_var_parts;
}
type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (var->decl));
if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
else
note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
+ if (! flag_var_tracking_uninit)
+ initialized = VAR_INIT_STATUS_INITIALIZED;
+
if (!complete)
{
NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
- NULL_RTX);
+ NULL_RTX, (int) initialized);
}
- else if (var->n_var_parts == 1)
+ else if (n_var_parts == 1)
{
rtx expr_list
- = gen_rtx_EXPR_LIST (VOIDmode,
- var->var_part[0].loc_chain->loc,
- GEN_INT (var->var_part[0].offset));
+ = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
- expr_list);
+ expr_list,
+ (int) initialized);
}
- else if (var->n_var_parts)
+ else if (n_var_parts)
{
- rtx argp[MAX_VAR_PARTS];
rtx parallel;
- for (i = 0; i < var->n_var_parts; i++)
- argp[i] = gen_rtx_EXPR_LIST (VOIDmode, var->var_part[i].loc_chain->loc,
- GEN_INT (var->var_part[i].offset));
+ for (i = 0; i < n_var_parts; i++)
+ loc[i]
+ = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
+
parallel = gen_rtx_PARALLEL (VOIDmode,
- gen_rtvec_v (var->n_var_parts, argp));
+ gen_rtvec_v (n_var_parts, loc));
NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, var->decl,
- parallel);
+ parallel,
+ (int) initialized);
}
htab_clear_slot (changed_variables, varp);
- /* When there are no location parts the variable has been already
- removed from hash table and a new empty variable was created.
- Free the empty variable. */
- if (var->n_var_parts == 0)
- {
- pool_free (var_pool, var);
- }
-
/* Continue traversing the hash table. */
return 1;
}
variable old_var, new_var;
old_var = *(variable *) slot;
- new_var = htab_find_with_hash (new_vars, old_var->decl,
+ new_var = (variable) htab_find_with_hash (new_vars, old_var->decl,
VARIABLE_HASH_VAL (old_var->decl));
if (!new_var)
/* Variable has disappeared. */
variable empty_var;
- empty_var = pool_alloc (var_pool);
+ empty_var = (variable) pool_alloc (var_pool);
empty_var->decl = old_var->decl;
- empty_var->refcount = 1;
+ empty_var->refcount = 0;
empty_var->n_var_parts = 0;
variable_was_changed (empty_var, NULL);
}
variable old_var, new_var;
new_var = *(variable *) slot;
- old_var = htab_find_with_hash (old_vars, new_var->decl,
+ old_var = (variable) htab_find_with_hash (old_vars, new_var->decl,
VARIABLE_HASH_VAL (new_var->decl));
if (!old_var)
{
emit_notes_for_differences (rtx insn, dataflow_set *old_set,
dataflow_set *new_set)
{
- htab_traverse (old_set->vars, emit_notes_for_differences_1, new_set->vars);
- htab_traverse (new_set->vars, emit_notes_for_differences_2, old_set->vars);
+ htab_traverse (shared_hash_htab (old_set->vars),
+ emit_notes_for_differences_1,
+ shared_hash_htab (new_set->vars));
+ htab_traverse (shared_hash_htab (new_set->vars),
+ emit_notes_for_differences_2,
+ shared_hash_htab (old_set->vars));
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN);
}
int i;
dataflow_set set;
- dataflow_set_init (&set, htab_elements (VTI (bb)->in.vars) + 3);
+ dataflow_set_init (&set);
dataflow_set_copy (&set, &VTI (bb)->in);
for (i = 0; i < VTI (bb)->n_mos; i++)
break;
case MO_USE:
+ {
+ rtx loc = VTI (bb)->mos[i].u.loc;
+
+ if (REG_P (loc))
+ var_reg_set (&set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
+ else
+ var_mem_set (&set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
+
+ emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+ }
+ break;
+
case MO_SET:
{
rtx loc = VTI (bb)->mos[i].u.loc;
+ rtx set_src = NULL;
+
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
if (REG_P (loc))
- var_reg_delete_and_set (&set, loc);
+ var_reg_delete_and_set (&set, loc, true, VAR_INIT_STATUS_INITIALIZED,
+ set_src);
else
- var_mem_delete_and_set (&set, loc);
+ var_mem_delete_and_set (&set, loc, true, VAR_INIT_STATUS_INITIALIZED,
+ set_src);
+
+ emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
+ }
+ break;
- if (VTI (bb)->mos[i].type == MO_USE)
- emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN);
+ case MO_COPY:
+ {
+ rtx loc = VTI (bb)->mos[i].u.loc;
+ enum var_init_status src_status;
+ rtx set_src = NULL;
+
+ if (GET_CODE (loc) == SET)
+ {
+ set_src = SET_SRC (loc);
+ loc = SET_DEST (loc);
+ }
+
+ src_status = find_src_status (&set, set_src);
+ set_src = find_src_set_src (&set, set_src);
+
+ if (REG_P (loc))
+ var_reg_delete_and_set (&set, loc, false, src_status, set_src);
else
- emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+ var_mem_delete_and_set (&set, loc, false, src_status, set_src);
+
+ emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
}
break;
case MO_USE_NO_VAR:
- case MO_CLOBBER:
{
rtx loc = VTI (bb)->mos[i].u.loc;
if (REG_P (loc))
- var_reg_delete (&set, loc);
+ var_reg_delete (&set, loc, false);
else
- var_mem_delete (&set, loc);
+ var_mem_delete (&set, loc, false);
- if (VTI (bb)->mos[i].type == MO_USE_NO_VAR)
- emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN);
- else
- emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+ emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
}
break;
- case MO_ADJUST:
+ case MO_CLOBBER:
{
- rtx base;
+ rtx loc = VTI (bb)->mos[i].u.loc;
- set.stack_adjust += VTI (bb)->mos[i].u.adjust;
- base = gen_rtx_MEM (Pmode,
- gen_rtx_PLUS (Pmode, stack_pointer_rtx,
- GEN_INT (set.stack_adjust)));
- set_frame_base_location (&set, base);
- emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN);
+ if (REG_P (loc))
+ var_reg_delete (&set, loc, true);
+ else
+ var_mem_delete (&set, loc, true);
+
+ emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN);
}
break;
+
+ case MO_ADJUST:
+ set.stack_adjust += VTI (bb)->mos[i].u.adjust;
+ break;
}
}
dataflow_set_destroy (&set);
dataflow_set *last_out;
dataflow_set empty;
-#ifdef ENABLE_CHECKING
- if (htab_elements (changed_variables))
- abort ();
-#endif
+ gcc_assert (!htab_elements (changed_variables));
/* Enable emitting notes by functions (mainly by set_variable_part and
delete_variable_part). */
emit_notes = true;
- dataflow_set_init (&empty, 7);
+ dataflow_set_init (&empty);
last_out = ∅
FOR_EACH_BB (bb)
if (MEM_ATTRS (rtl))
{
*declp = MEM_EXPR (rtl);
- *offsetp = MEM_OFFSET (rtl) ? INTVAL (MEM_OFFSET (rtl)) : 0;
+ *offsetp = INT_MEM_OFFSET (rtl);
return true;
}
}
vt_add_function_parameters (void)
{
tree parm;
- HOST_WIDE_INT stack_adjust = 0;
- if (!frame_pointer_needed)
- stack_adjust = prologue_stack_adjust ();
-
for (parm = DECL_ARGUMENTS (current_function_decl);
parm; parm = TREE_CHAIN (parm))
{
rtx decl_rtl = DECL_RTL_IF_SET (parm);
rtx incoming = DECL_INCOMING_RTL (parm);
tree decl;
+ enum machine_mode mode;
HOST_WIDE_INT offset;
dataflow_set *out;
continue;
if (!vt_get_decl_and_offset (incoming, &decl, &offset))
- if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
- continue;
+ {
+ if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
+ continue;
+ offset += byte_lowpart_offset (GET_MODE (incoming),
+ GET_MODE (decl_rtl));
+ }
if (!decl)
continue;
-#ifdef ENABLE_CHECKING
if (parm != decl)
- abort ();
-#endif
+ {
+ /* Assume that DECL_RTL was a pseudo that got spilled to
+ memory. The spill slot sharing code will force the
+ memory to reference spill_slot_decl (%sfp), so we don't
+ match above. That's ok, the pseudo must have referenced
+ the entire parameter, so just reset OFFSET. */
+ gcc_assert (decl == get_spill_slot_decl (false));
+ offset = 0;
+ }
+
+ if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
+ continue;
- incoming = eliminate_regs (incoming, 0, NULL_RTX);
- if (!frame_pointer_needed && MEM_P (incoming))
- incoming = adjust_stack_reference (incoming, -stack_adjust);
out = &VTI (ENTRY_BLOCK_PTR)->out;
if (REG_P (incoming))
{
-#ifdef ENABLE_CHECKING
- if (REGNO (incoming) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
+ incoming = var_lowpart (mode, incoming);
+ gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
attrs_list_insert (&out->regs[REGNO (incoming)],
parm, offset, incoming);
- set_variable_part (out, incoming, parm, offset);
+ set_variable_part (out, incoming, parm, offset, VAR_INIT_STATUS_INITIALIZED,
+ NULL);
}
else if (MEM_P (incoming))
{
- set_variable_part (out, incoming, parm, offset);
+ incoming = var_lowpart (mode, incoming);
+ set_variable_part (out, incoming, parm, offset,
+ VAR_INIT_STATUS_INITIALIZED, NULL);
}
}
}
FOR_EACH_BB (bb)
{
rtx insn;
- HOST_WIDE_INT pre, post;
+ HOST_WIDE_INT pre, post = 0;
/* Count the number of micro operations. */
VTI (bb)->n_mos = 0;
}
note_uses (&PATTERN (insn), count_uses_1, insn);
note_stores (PATTERN (insn), count_stores, insn);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
VTI (bb)->n_mos++;
}
}
/* Add the micro-operations to the array. */
- VTI (bb)->mos = xmalloc (VTI (bb)->n_mos
- * sizeof (struct micro_operation_def));
+ VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
VTI (bb)->n_mos = 0;
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
}
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
}
n1 = VTI (bb)->n_mos;
+ /* This will record NEXT_INSN (insn), such that we can
+ insert notes before it without worrying about any
+ notes that MO_USEs might emit after the insn. */
note_stores (PATTERN (insn), add_stores, insn);
n2 = VTI (bb)->n_mos - 1;
- /* Order the MO_SETs to be before MO_CLOBBERs. */
+ /* Order the MO_CLOBBERs to be before MO_SETs. */
while (n1 < n2)
{
- while (n1 < n2 && VTI (bb)->mos[n1].type == MO_SET)
+ while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
n1++;
- while (n1 < n2 && VTI (bb)->mos[n2].type == MO_CLOBBER)
+ while (n1 < n2 && (VTI (bb)->mos[n2].type == MO_SET
+ || VTI (bb)->mos[n2].type == MO_COPY))
n2--;
if (n1 < n2)
{
}
}
- /* Init the IN and OUT sets. */
- FOR_ALL_BB (bb)
- {
- VTI (bb)->visited = false;
- dataflow_set_init (&VTI (bb)->in, 7);
- dataflow_set_init (&VTI (bb)->out, 7);
- }
-
attrs_pool = create_alloc_pool ("attrs_def pool",
sizeof (struct attrs_def), 1024);
var_pool = create_alloc_pool ("variable_def pool",
loc_chain_pool = create_alloc_pool ("location_chain_def pool",
sizeof (struct location_chain_def),
1024);
+ shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
+ sizeof (struct shared_hash_def), 256);
+ empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
+ empty_shared_hash->refcount = 1;
+ empty_shared_hash->htab
+ = htab_create (1, variable_htab_hash, variable_htab_eq,
+ variable_htab_free);
changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
- NULL);
- vt_add_function_parameters ();
+ variable_htab_free);
- if (!frame_pointer_needed)
- {
- rtx base;
-
- /* Create fake variable for tracking stack pointer changes. */
- frame_base_decl = make_node (VAR_DECL);
- DECL_NAME (frame_base_decl) = get_identifier ("___frame_base_decl");
- TREE_TYPE (frame_base_decl) = char_type_node;
- DECL_ARTIFICIAL (frame_base_decl) = 1;
-
- /* Set its initial "location". */
- base = gen_rtx_MEM (Pmode, stack_pointer_rtx);
- set_variable_part (&VTI (ENTRY_BLOCK_PTR)->out, base, frame_base_decl, 0);
- }
- else
+ /* Init the IN and OUT sets. */
+ FOR_ALL_BB (bb)
{
- frame_base_decl = NULL;
+ VTI (bb)->visited = false;
+ dataflow_set_init (&VTI (bb)->in);
+ dataflow_set_init (&VTI (bb)->out);
}
+
+ vt_add_function_parameters ();
}
/* Free the data structures needed for variable tracking. */
dataflow_set_destroy (&VTI (bb)->out);
}
free_aux_for_blocks ();
+ htab_delete (empty_shared_hash->htab);
+ htab_delete (changed_variables);
free_alloc_pool (attrs_pool);
free_alloc_pool (var_pool);
free_alloc_pool (loc_chain_pool);
- htab_delete (changed_variables);
+ free_alloc_pool (shared_hash_pool);
+ if (vui_vec)
+ free (vui_vec);
+ vui_vec = NULL;
+ vui_allocated = 0;
}
/* The entry point to variable tracking pass. */
-void
+unsigned int
variable_tracking_main (void)
{
if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
- return;
+ return 0;
mark_dfs_back_edges ();
vt_initialize ();
if (!vt_stack_adjustments ())
{
vt_finalize ();
- return;
+ return 0;
}
}
vt_find_locations ();
vt_emit_notes ();
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
dump_dataflow_sets ();
- dump_flow_info (dump_file);
+ dump_flow_info (dump_file, dump_flags);
}
vt_finalize ();
+ return 0;
+}
+\f
+static bool
+gate_handle_var_tracking (void)
+{
+ return (flag_var_tracking);
}
+
+
+
+struct rtl_opt_pass pass_variable_tracking =
+{
+ {
+ RTL_PASS,
+ "vartrack", /* name */
+ gate_handle_var_tracking, /* gate */
+ variable_tracking_main, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_VAR_TRACKING, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */
+ }
+};