/* Output Dwarf2 format symbol table information from GCC.
Copyright (C) 1992, 1993, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
- 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+ 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Gary Funck (gary@intrepid.com).
Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
Extensively modified by Jason Merrill (jason@cygnus.com).
static void output_cfi_directive (dw_cfi_ref);
static void output_call_frame_info (int);
static void dwarf2out_note_section_used (void);
-static void dwarf2out_stack_adjust (rtx, bool);
-static void dwarf2out_args_size_adjust (HOST_WIDE_INT, const char *);
static void flush_queued_reg_saves (void);
static bool clobbers_queued_reg_save (const_rtx);
static void dwarf2out_frame_debug_expr (rtx, const char *);
/* Emit the state save. */
emit_cfa_remember = false;
- cfi_remember = new_cfi ();
+ cfi_remember = new_cfi ();
cfi_remember->dw_cfi_opc = DW_CFA_remember_state;
add_fde_cfi (label, cfi_remember);
}
if (loc.reg == old_cfa.reg && !loc.indirect)
{
/* Construct a "DW_CFA_def_cfa_offset <offset>" instruction, indicating
- the CFA register did not change but the offset did. The data
+ the CFA register did not change but the offset did. The data
factoring for DW_CFA_def_cfa_offset_sf happens in output_cfi, or
in the assembler via the .cfi_def_cfa_offset directive. */
if (loc.offset < 0)
add_fde_cfi (label, cfi);
}
-/* Add a CFI to update the running total of the size of arguments
- pushed onto the stack. */
-
-void
-dwarf2out_args_size (const char *label, HOST_WIDE_INT size)
-{
- dw_cfi_ref cfi;
-
- if (size == old_args_size)
- return;
-
- old_args_size = size;
-
- cfi = new_cfi ();
- cfi->dw_cfi_opc = DW_CFA_GNU_args_size;
- cfi->dw_cfi_oprnd1.dw_cfi_offset = size;
- add_fde_cfi (label, cfi);
-}
-
/* Entry point for saving a register to the stack. REG is the GCC register
number. LABEL and OFFSET are passed to reg_save. */
VEC_free (rtx, heap, next);
}
+/* Add a CFI to update the running total of the size of arguments
+ pushed onto the stack. */
+
+static void
+dwarf2out_args_size (const char *label, HOST_WIDE_INT size)
+{
+ dw_cfi_ref cfi;
+
+ if (size == old_args_size)
+ return;
+
+ old_args_size = size;
+
+ cfi = new_cfi ();
+ cfi->dw_cfi_opc = DW_CFA_GNU_args_size;
+ cfi->dw_cfi_oprnd1.dw_cfi_offset = size;
+ add_fde_cfi (label, cfi);
+}
+
+/* Record a stack adjustment of OFFSET bytes. */
+
+static void
+dwarf2out_stack_adjust (HOST_WIDE_INT offset, const char *label)
+{
+ if (cfa.reg == STACK_POINTER_REGNUM)
+ cfa.offset += offset;
+
+ if (cfa_store.reg == STACK_POINTER_REGNUM)
+ cfa_store.offset += offset;
+
+ if (ACCUMULATE_OUTGOING_ARGS)
+ return;
+
+#ifndef STACK_GROWS_DOWNWARD
+ offset = -offset;
+#endif
+
+ args_size += offset;
+ if (args_size < 0)
+ args_size = 0;
+
+ def_cfa_1 (label, &cfa);
+ if (flag_asynchronous_unwind_tables)
+ dwarf2out_args_size (label, args_size);
+}
/* Check INSN to see if it looks like a push or a stack adjustment, and
- make a note of it if it does. EH uses this information to find out how
- much extra space it needs to pop off the stack. */
+ make a note of it if it does. EH uses this information to find out
+ how much extra space it needs to pop off the stack. */
static void
-dwarf2out_stack_adjust (rtx insn, bool after_p)
+dwarf2out_notice_stack_adjust (rtx insn, bool after_p)
{
HOST_WIDE_INT offset;
const char *label;
return;
label = dwarf2out_cfi_label (false);
- dwarf2out_args_size_adjust (offset, label);
-}
-
-/* Adjust args_size based on stack adjustment OFFSET. */
-
-static void
-dwarf2out_args_size_adjust (HOST_WIDE_INT offset, const char *label)
-{
- if (cfa.reg == STACK_POINTER_REGNUM)
- cfa.offset += offset;
-
- if (cfa_store.reg == STACK_POINTER_REGNUM)
- cfa_store.offset += offset;
-
-#ifndef STACK_GROWS_DOWNWARD
- offset = -offset;
-#endif
-
- args_size += offset;
- if (args_size < 0)
- args_size = 0;
-
- def_cfa_1 (label, &cfa);
- if (flag_asynchronous_unwind_tables)
- dwarf2out_args_size (label, args_size);
+ dwarf2out_stack_adjust (offset, label);
}
#endif
addr = XEXP (set, 0);
gcc_assert (MEM_P (addr));
addr = XEXP (addr, 0);
-
+
/* As documented, only consider extremely simple addresses. */
switch (GET_CODE (addr))
{
HOST_WIDE_INT offset = stack_adjust_offset (elem, args_size, 0);
if (offset != 0)
- dwarf2out_args_size_adjust (offset, label);
+ dwarf2out_stack_adjust (offset, label);
}
}
return;
if (!NONJUMP_INSN_P (insn) || clobbers_queued_reg_save (insn))
flush_queued_reg_saves ();
- if (! RTX_FRAME_RELATED_P (insn))
+ if (!RTX_FRAME_RELATED_P (insn))
{
+ /* ??? This should be done unconditionally since stack adjustments
+ matter if the stack pointer is not the CFA register anymore but
+ is still used to save registers. */
if (!ACCUMULATE_OUTGOING_ARGS)
- dwarf2out_stack_adjust (insn, after_p);
+ dwarf2out_notice_stack_adjust (insn, after_p);
return;
}
void
dwarf2out_frame_debug_restore_state (void)
{
- dw_cfi_ref cfi = new_cfi ();
+ dw_cfi_ref cfi = new_cfi ();
const char *label = dwarf2out_cfi_label (false);
cfi->dw_cfi_opc = DW_CFA_restore_state;
static void dwarf2out_init (const char *);
static void dwarf2out_finish (const char *);
+static void dwarf2out_assembly_start (void);
static void dwarf2out_define (unsigned int, const char *);
static void dwarf2out_undef (unsigned int, const char *);
static void dwarf2out_start_source_file (unsigned, const char *);
static void dwarf2out_var_location (rtx);
static void dwarf2out_direct_call (tree);
static void dwarf2out_virtual_call_token (tree, int);
+static void dwarf2out_copy_call_info (rtx, rtx);
static void dwarf2out_virtual_call (int);
static void dwarf2out_begin_function (tree);
static void dwarf2out_set_name (tree, tree);
{
dwarf2out_init,
dwarf2out_finish,
+ dwarf2out_assembly_start,
dwarf2out_define,
dwarf2out_undef,
dwarf2out_start_source_file,
dwarf2out_switch_text_section,
dwarf2out_direct_call,
dwarf2out_virtual_call_token,
+ dwarf2out_copy_call_info,
dwarf2out_virtual_call,
dwarf2out_set_name,
1 /* start_end_main_source_file */
static int decl_loc_table_eq (const void *, const void *);
static var_loc_list *lookup_decl_loc (const_tree);
static void equate_decl_number_to_die (tree, dw_die_ref);
-static void add_var_loc_to_decl (tree, struct var_loc_node *);
+static struct var_loc_node *add_var_loc_to_decl (tree, rtx);
static void print_spaces (FILE *);
static void print_die (dw_die_ref, FILE *);
static void print_dwarf_line_table (FILE *);
static void output_aranges (void);
static unsigned int add_ranges_num (int);
static unsigned int add_ranges (const_tree);
-static unsigned int add_ranges_by_labels (const char *, const char *);
+static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
+ bool *);
static void output_ranges (void);
static void output_line_info (void);
static void output_file_names (void);
static void splice_child_die (dw_die_ref, dw_die_ref);
static int file_info_cmp (const void *, const void *);
static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
- const char *, const char *, unsigned);
-static void add_loc_descr_to_loc_list (dw_loc_list_ref *, dw_loc_descr_ref,
- const char *, const char *,
- const char *);
+ const char *, const char *);
static void output_loc_list (dw_loc_list_ref);
static char *gen_internal_sym (const char *);
return a->dw_attr_val.v.val_loc_list;
}
+static inline dw_loc_list_ref *
+AT_loc_list_ptr (dw_attr_ref a)
+{
+ gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
+ return &a->dw_attr_val.v.val_loc_list;
+}
+
/* Add an address constant attribute value to a DIE. */
static inline void
/* Add a variable location node to the linked list for DECL. */
-static void
-add_var_loc_to_decl (tree decl, struct var_loc_node *loc)
+static struct var_loc_node *
+add_var_loc_to_decl (tree decl, rtx loc_note)
{
unsigned int decl_id = DECL_UID (decl);
var_loc_list *temp;
void **slot;
+ struct var_loc_node *loc = NULL;
slot = htab_find_slot_with_hash (decl_loc_table, decl, decl_id, INSERT);
if (*slot == NULL)
and either both or neither of the locations is uninitialized,
we have nothing to do. */
if ((!rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->last->var_loc_note),
- NOTE_VAR_LOCATION_LOC (loc->var_loc_note)))
+ NOTE_VAR_LOCATION_LOC (loc_note)))
|| ((NOTE_VAR_LOCATION_STATUS (temp->last->var_loc_note)
- != NOTE_VAR_LOCATION_STATUS (loc->var_loc_note))
+ != NOTE_VAR_LOCATION_STATUS (loc_note))
&& ((NOTE_VAR_LOCATION_STATUS (temp->last->var_loc_note)
== VAR_INIT_STATUS_UNINITIALIZED)
- || (NOTE_VAR_LOCATION_STATUS (loc->var_loc_note)
+ || (NOTE_VAR_LOCATION_STATUS (loc_note)
== VAR_INIT_STATUS_UNINITIALIZED))))
{
/* Add LOC to the end of list and update LAST. */
+ loc = GGC_CNEW (struct var_loc_node);
temp->last->next = loc;
temp->last = loc;
}
}
- /* Do not add empty location to the beginning of the list. */
- else if (NOTE_VAR_LOCATION_LOC (loc->var_loc_note) != NULL_RTX)
+ else
{
+ loc = GGC_CNEW (struct var_loc_node);
temp->first = loc;
temp->last = loc;
}
+ return loc;
}
\f
/* Keep track of the number of spaces used to indent the
free (entry);
}
-/* Copy DIE and its ancestors, up to, but not including, the compile unit
+/* Copy DIE and its ancestors, up to, but not including, the compile unit
or type unit entry, to a new tree. Adds the new tree to UNIT and returns
a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
to check if the ancestor has already been copied into UNIT. */
}
/* Return a new location list, given the begin and end range, and the
- expression. gensym tells us whether to generate a new internal symbol for
- this location list node, which is done for the head of the list only. */
+ expression. */
static inline dw_loc_list_ref
new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
- const char *section, unsigned int gensym)
+ const char *section)
{
dw_loc_list_ref retlist = GGC_CNEW (dw_loc_list_node);
retlist->end = end;
retlist->expr = expr;
retlist->section = section;
- if (gensym)
- retlist->ll_symbol = gen_internal_sym ("LLST");
return retlist;
}
-/* Add a location description expression to a location list. */
+/* Generate a new internal symbol for this location list node, if it
+ hasn't got one yet. */
static inline void
-add_loc_descr_to_loc_list (dw_loc_list_ref *list_head, dw_loc_descr_ref descr,
- const char *begin, const char *end,
- const char *section)
+gen_llsym (dw_loc_list_ref list)
{
- dw_loc_list_ref *d;
-
- /* Find the end of the chain. */
- for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
- ;
-
- /* Add a new location list node to the list. */
- *d = new_loc_list (descr, begin, end, section, 0);
+ gcc_assert (!list->ll_symbol);
+ list->ll_symbol = gen_internal_sym ("LLST");
}
/* Output the location list given to us. */
add_pubname (tree decl, dw_die_ref die)
{
if (TREE_PUBLIC (decl))
- add_pubname_string (dwarf2_name (decl, 1), die);
+ {
+ const char *name = dwarf2_name (decl, 1);
+ if (name)
+ add_pubname_string (name, die);
+ }
}
/* Add a new entry to .debug_pubtypes if appropriate. */
}
}
else
- e.name = xstrdup (dwarf2_name (decl, 1));
+ {
+ e.name = dwarf2_name (decl, 1);
+ if (e.name)
+ e.name = xstrdup (e.name);
+ }
/* If we don't have a name for the type, there's no point in adding
it to the table. */
/* Add a new entry to .debug_ranges corresponding to a pair of
labels. */
-static unsigned int
-add_ranges_by_labels (const char *begin, const char *end)
+static void
+add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
+ bool *added)
{
unsigned int in_use = ranges_by_label_in_use;
+ unsigned int offset;
if (in_use == ranges_by_label_allocated)
{
ranges_by_label[in_use].end = end;
ranges_by_label_in_use = in_use + 1;
- return add_ranges_num (-(int)in_use - 1);
+ offset = add_ranges_num (-(int)in_use - 1);
+ if (!*added)
+ {
+ add_AT_range_list (die, DW_AT_ranges, offset);
+ *added = true;
+ }
}
static void
int ndirs;
int idx_offset;
int i;
- int idx;
if (!last_emitted_file)
{
}
/* Emit the directory name table. */
- idx = 1;
idx_offset = dirs[0].length > 0 ? 1 : 0;
for (i = 1 - idx_offset; i < ndirs; i++)
dw2_asm_output_nstring (dirs[i].path,
add_AT_unsigned (mod_type_die, DW_AT_byte_size,
simple_type_size_in_bits (type) / BITS_PER_UNIT);
item_type = TREE_TYPE (type);
+ if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (item_type)))
+ add_AT_unsigned (mod_type_die, DW_AT_address_class,
+ TYPE_ADDR_SPACE (item_type));
}
else if (code == REFERENCE_TYPE)
{
add_AT_unsigned (mod_type_die, DW_AT_byte_size,
simple_type_size_in_bits (type) / BITS_PER_UNIT);
item_type = TREE_TYPE (type);
+ if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (item_type)))
+ add_AT_unsigned (mod_type_die, DW_AT_address_class,
+ TYPE_ADDR_SPACE (item_type));
}
else if (code == INTEGER_TYPE
&& TREE_TYPE (type) != NULL_TREE
/* The DW_AT_GNU_template_name attribute of the DIE must be set
to the name of the argument. */
name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
- add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
+ if (name)
+ add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
}
if (TREE_CODE (parm) == PARM_DECL)
case POST_INC:
case POST_DEC:
case POST_MODIFY:
- /* POST_INC and POST_DEC can be handled just like a SUBREG. So we
- just fall into the SUBREG code. */
-
- /* ... fall through ... */
+ return mem_loc_descriptor (XEXP (rtl, 0), mode, initialized);
case SUBREG:
/* The case of a subreg may arise when we have a local (register)
up an entire register. For now, just assume that it is
legitimate to make the Dwarf info refer to the whole register which
contains the given subreg. */
- rtl = XEXP (rtl, 0);
+ if (!subreg_lowpart_p (rtl))
+ break;
+ rtl = SUBREG_REG (rtl);
if (GET_MODE_SIZE (GET_MODE (rtl)) > DWARF2_ADDR_SIZE)
break;
+ if (GET_MODE_CLASS (GET_MODE (rtl)) != MODE_INT)
+ break;
mem_loc_result = mem_loc_descriptor (rtl, mode, initialized);
break;
if (mem_loc_result == NULL)
mem_loc_result = tls_mem_loc_descriptor (rtl);
if (mem_loc_result != 0)
- add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
+ {
+ if (GET_MODE_SIZE (GET_MODE (rtl)) > DWARF2_ADDR_SIZE)
+ {
+ expansion_failed (NULL_TREE, rtl, "DWARF address size mismatch");
+ return 0;
+ }
+ else if (GET_MODE_SIZE (GET_MODE (rtl)) == DWARF2_ADDR_SIZE)
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
+ else
+ add_loc_descr (&mem_loc_result,
+ new_loc_descr (DW_OP_deref_size,
+ GET_MODE_SIZE (GET_MODE (rtl)), 0));
+ }
+ else
+ {
+ rtx new_rtl = avoid_constant_pool_reference (rtl);
+ if (new_rtl != rtl)
+ return mem_loc_descriptor (new_rtl, mode, initialized);
+ }
break;
case LO_SUM:
pool. */
case CONST:
case SYMBOL_REF:
- /* Alternatively, the symbol in the constant pool might be referenced
- by a different symbol. */
- if (GET_CODE (rtl) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (rtl))
- {
- bool marked;
- rtx tmp = get_pool_constant_mark (rtl, &marked);
-
- if (GET_CODE (tmp) == SYMBOL_REF)
- {
- rtl = tmp;
- if (CONSTANT_POOL_ADDRESS_P (tmp))
- get_pool_constant_mark (tmp, &marked);
- else
- marked = true;
- }
-
- /* If all references to this pool constant were optimized away,
- it was not output and thus we can't represent it.
- FIXME: might try to use DW_OP_const_value here, though
- DW_OP_piece complicates it. */
- if (!marked)
- {
- expansion_failed (NULL_TREE, rtl,
- "Constant was removed from constant pool.\n");
- return 0;
- }
- }
-
if (GET_CODE (rtl) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
{
op = DW_OP_div;
goto do_binop;
- case MOD:
+ case UMOD:
op = DW_OP_mod;
goto do_binop;
add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
break;
+ case MOD:
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ mem_loc_result = op0;
+ add_loc_descr (&mem_loc_result, op1);
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
+ break;
+
case NOT:
op = DW_OP_not;
goto do_unop;
goto do_scompare;
do_scompare:
- if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) != MODE_INT
- || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
- || GET_MODE (XEXP (rtl, 0)) != GET_MODE (XEXP (rtl, 1)))
+ if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
+ || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 1))) > DWARF2_ADDR_SIZE)
break;
+ else
+ {
+ enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
- op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
- VAR_INIT_STATUS_INITIALIZED);
- op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
- VAR_INIT_STATUS_INITIALIZED);
+ if (op_mode == VOIDmode)
+ op_mode = GET_MODE (XEXP (rtl, 1));
+ if (op_mode != VOIDmode && GET_MODE_CLASS (op_mode) != MODE_INT)
+ break;
- if (op0 == 0 || op1 == 0)
- break;
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
- if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) < DWARF2_ADDR_SIZE)
- {
- int shift = DWARF2_ADDR_SIZE
- - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
- shift *= BITS_PER_UNIT;
- add_loc_descr (&op0, int_loc_descriptor (shift));
- add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
- else
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ if (op_mode != VOIDmode
+ && GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
{
- add_loc_descr (&op1, int_loc_descriptor (shift));
- add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
+ int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode);
+ shift *= BITS_PER_UNIT;
+ /* For eq/ne, if the operands are known to be zero-extended,
+ there is no need to do the fancy shifting up. */
+ if (op == DW_OP_eq || op == DW_OP_ne)
+ {
+ dw_loc_descr_ref last0, last1;
+ for (last0 = op0;
+ last0->dw_loc_next != NULL;
+ last0 = last0->dw_loc_next)
+ ;
+ for (last1 = op1;
+ last1->dw_loc_next != NULL;
+ last1 = last1->dw_loc_next)
+ ;
+ /* deref_size zero extends, and for constants we can check
+ whether they are zero extended or not. */
+ if (((last0->dw_loc_opc == DW_OP_deref_size
+ && last0->dw_loc_oprnd1.v.val_int
+ <= GET_MODE_SIZE (op_mode))
+ || (CONST_INT_P (XEXP (rtl, 0))
+ && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
+ == (INTVAL (XEXP (rtl, 0))
+ & GET_MODE_MASK (op_mode))))
+ && ((last1->dw_loc_opc == DW_OP_deref_size
+ && last1->dw_loc_oprnd1.v.val_int
+ <= GET_MODE_SIZE (op_mode))
+ || (CONST_INT_P (XEXP (rtl, 1))
+ && (unsigned HOST_WIDE_INT)
+ INTVAL (XEXP (rtl, 1))
+ == (INTVAL (XEXP (rtl, 1))
+ & GET_MODE_MASK (op_mode)))))
+ goto do_compare;
+ }
+ add_loc_descr (&op0, int_loc_descriptor (shift));
+ add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
+ else
+ {
+ add_loc_descr (&op1, int_loc_descriptor (shift));
+ add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
+ }
}
}
goto do_ucompare;
do_ucompare:
- if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) != MODE_INT
- || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
- || GET_MODE (XEXP (rtl, 0)) != GET_MODE (XEXP (rtl, 1)))
+ if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
+ || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 1))) > DWARF2_ADDR_SIZE)
break;
+ else
+ {
+ enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
- op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
- VAR_INIT_STATUS_INITIALIZED);
- op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
- VAR_INIT_STATUS_INITIALIZED);
+ if (op_mode == VOIDmode)
+ op_mode = GET_MODE (XEXP (rtl, 1));
+ if (op_mode != VOIDmode && GET_MODE_CLASS (op_mode) != MODE_INT)
+ break;
- if (op0 == 0 || op1 == 0)
- break;
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
- if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) < DWARF2_ADDR_SIZE)
- {
- HOST_WIDE_INT mask = GET_MODE_MASK (GET_MODE (XEXP (rtl, 0)));
- add_loc_descr (&op0, int_loc_descriptor (mask));
- add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
- else
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ if (op_mode != VOIDmode
+ && GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
{
- add_loc_descr (&op1, int_loc_descriptor (mask));
- add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
+ HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
+ dw_loc_descr_ref last0, last1;
+ for (last0 = op0;
+ last0->dw_loc_next != NULL;
+ last0 = last0->dw_loc_next)
+ ;
+ for (last1 = op1;
+ last1->dw_loc_next != NULL;
+ last1 = last1->dw_loc_next)
+ ;
+ if (CONST_INT_P (XEXP (rtl, 0)))
+ op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
+ /* deref_size zero extends, so no need to mask it again. */
+ else if (last0->dw_loc_opc != DW_OP_deref_size
+ || last0->dw_loc_oprnd1.v.val_int
+ > GET_MODE_SIZE (op_mode))
+ {
+ add_loc_descr (&op0, int_loc_descriptor (mask));
+ add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
+ }
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
+ /* deref_size zero extends, so no need to mask it again. */
+ else if (last1->dw_loc_opc != DW_OP_deref_size
+ || last1->dw_loc_oprnd1.v.val_int
+ > GET_MODE_SIZE (op_mode))
+ {
+ add_loc_descr (&op1, int_loc_descriptor (mask));
+ add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
+ }
}
- }
- else
- {
- HOST_WIDE_INT bias = 1;
- bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
- add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
- + INTVAL (XEXP (rtl, 1)));
else
- add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
+ {
+ HOST_WIDE_INT bias = 1;
+ bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
+ add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
+ + INTVAL (XEXP (rtl, 1)));
+ else
+ add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
+ bias, 0));
+ }
}
goto do_compare;
if (BITS_BIG_ENDIAN)
shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
- shift - size;
- add_loc_descr (&mem_loc_result,
- int_loc_descriptor (DWARF2_ADDR_SIZE - shift - size));
- add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
- add_loc_descr (&mem_loc_result,
- int_loc_descriptor (DWARF2_ADDR_SIZE - size));
- add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
+ if (shift + size != (int) DWARF2_ADDR_SIZE)
+ {
+ add_loc_descr (&mem_loc_result,
+ int_loc_descriptor (DWARF2_ADDR_SIZE
+ - shift - size));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
+ }
+ if (size != (int) DWARF2_ADDR_SIZE)
+ {
+ add_loc_descr (&mem_loc_result,
+ int_loc_descriptor (DWARF2_ADDR_SIZE - size));
+ add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
+ }
}
break;
case US_MULT:
case SS_DIV:
case US_DIV:
+ case SS_PLUS:
+ case US_PLUS:
+ case SS_MINUS:
+ case US_MINUS:
+ case SS_NEG:
+ case US_NEG:
+ case SS_ABS:
+ case SS_ASHIFT:
+ case US_ASHIFT:
+ case SS_TRUNCATE:
+ case US_TRUNCATE:
case UDIV:
- case UMOD:
case UNORDERED:
case ORDERED:
case UNEQ:
initialized);
if (loc_result == NULL)
loc_result = tls_mem_loc_descriptor (rtl);
+ if (loc_result == NULL)
+ {
+ rtx new_rtl = avoid_constant_pool_reference (rtl);
+ if (new_rtl != rtl)
+ loc_result = loc_descriptor (new_rtl, mode, initialized);
+ }
break;
case CONCAT:
break;
case CONST_DOUBLE:
+ if (mode == VOIDmode)
+ mode = GET_MODE (rtl);
+
if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
{
+ gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
+
/* Note that a CONST_DOUBLE rtx could represent either an integer
or a floating-point constant. A CONST_DOUBLE is used whenever
the constant requires more than one word in order to be
adequately represented. We output CONST_DOUBLEs as blocks. */
- if (GET_MODE (rtl) != VOIDmode)
- mode = GET_MODE (rtl);
-
loc_result = new_loc_descr (DW_OP_implicit_value,
GET_MODE_SIZE (mode), 0);
if (SCALAR_FLOAT_MODE_P (mode))
break;
case CONST_VECTOR:
+ if (mode == VOIDmode)
+ mode = GET_MODE (rtl);
+
if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
{
unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
unsigned int i;
unsigned char *p;
- mode = GET_MODE (rtl);
+ gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
switch (GET_MODE_CLASS (mode))
{
case MODE_VECTOR_INT:
if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
&& (dwarf_version >= 4 || !dwarf_strict))
{
- loc_result = new_loc_descr (DW_OP_implicit_value,
- DWARF2_ADDR_SIZE, 0);
- loc_result->dw_loc_oprnd2.val_class = dw_val_class_addr;
- loc_result->dw_loc_oprnd2.v.val_addr = rtl;
+ loc_result = new_loc_descr (DW_OP_addr, 0, 0);
+ loc_result->dw_loc_oprnd1.val_class = dw_val_class_addr;
+ loc_result->dw_loc_oprnd1.v.val_addr = rtl;
+ add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
VEC_safe_push (rtx, gc, used_rtx_array, rtl);
}
break;
&& DECL_BY_REFERENCE (decl));
}
-/* Return single element location list containing loc descr REF. */
-
-static dw_loc_list_ref
-single_element_loc_list (dw_loc_descr_ref ref)
-{
- return new_loc_list (ref, NULL, NULL, NULL, 0);
-}
-
/* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
for VARLOC. */
mode = GET_MODE (varloc);
if (MEM_P (varloc))
{
- varloc = XEXP (varloc, 0);
- have_address = 1;
+ rtx addr = XEXP (varloc, 0);
+ descr = mem_loc_descriptor (addr, mode, initialized);
+ if (descr)
+ have_address = 1;
+ else
+ {
+ rtx x = avoid_constant_pool_reference (varloc);
+ if (x != varloc)
+ descr = mem_loc_descriptor (x, mode, initialized);
+ }
}
- descr = mem_loc_descriptor (varloc, mode, initialized);
+ else
+ descr = mem_loc_descriptor (varloc, mode, initialized);
}
else
return 0;
return descr;
}
-/* Return dwarf representation of location list representing for
- LOC_LIST of DECL. WANT_ADDRESS has the same meaning as in
- loc_list_from_tree function. */
+/* Return the dwarf representation of the location list LOC_LIST of
+ DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
+ function. */
static dw_loc_list_ref
-dw_loc_list (var_loc_list * loc_list, tree decl, int want_address)
+dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
{
const char *endname, *secname;
- dw_loc_list_ref list;
rtx varloc;
enum var_init_status initialized;
struct var_loc_node *node;
dw_loc_descr_ref descr;
char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
+ dw_loc_list_ref list = NULL;
+ dw_loc_list_ref *listp = &list;
/* Now that we know what section we are using for a base,
actually construct the list of locations.
This means we have to special case the last node, and generate
a range of [last location start, end of function label]. */
- node = loc_list->first;
secname = secname_for_decl (decl);
- if (NOTE_VAR_LOCATION_LOC (node->var_loc_note))
- initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- else
- initialized = VAR_INIT_STATUS_INITIALIZED;
- varloc = NOTE_VAR_LOCATION (node->var_loc_note);
- descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
-
- if (loc_list && loc_list->first != loc_list->last)
- list = new_loc_list (descr, node->label, node->next->label, secname, 1);
- else
- return single_element_loc_list (descr);
- node = node->next;
-
- if (!node)
- return NULL;
-
- for (; node->next; node = node->next)
+ for (node = loc_list->first; node->next; node = node->next)
if (NOTE_VAR_LOCATION_LOC (node->var_loc_note) != NULL_RTX)
{
/* The variable has a location between NODE->LABEL and
initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
varloc = NOTE_VAR_LOCATION (node->var_loc_note);
descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
- add_loc_descr_to_loc_list (&list, descr,
- node->label, node->next->label, secname);
+ if (descr)
+ {
+ *listp = new_loc_list (descr, node->label, node->next->label,
+ secname);
+ listp = &(*listp)->dw_loc_next;
+ }
}
/* If the variable has a location at the last label
it keeps its location until the end of function. */
if (NOTE_VAR_LOCATION_LOC (node->var_loc_note) != NULL_RTX)
{
- if (!current_function_decl)
- endname = text_end_label;
- else
- {
- ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
- current_function_funcdef_no);
- endname = ggc_strdup (label_id);
- }
-
initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
varloc = NOTE_VAR_LOCATION (node->var_loc_note);
descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
- add_loc_descr_to_loc_list (&list, descr, node->label, endname, secname);
+ if (descr)
+ {
+ if (!current_function_decl)
+ endname = text_end_label;
+ else
+ {
+ ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
+ current_function_funcdef_no);
+ endname = ggc_strdup (label_id);
+ }
+
+ *listp = new_loc_list (descr, node->label, endname, secname);
+ listp = &(*listp)->dw_loc_next;
+ }
}
+
+ /* Try to avoid the overhead of a location list emitting a location
+ expression instead, but only if we didn't have more than one
+ location entry in the first place. If some entries were not
+ representable, we don't want to pretend a single entry that was
+ applies to the entire scope in which the variable is
+ available. */
+ if (list && loc_list->first->next)
+ gen_llsym (list);
+
return list;
}
static bool
single_element_loc_list_p (dw_loc_list_ref list)
{
- return (!list->dw_loc_next && !list->begin && !list->end);
+ gcc_assert (!list->dw_loc_next || list->ll_symbol);
+ return !list->ll_symbol;
}
/* To each location in list LIST add loc descr REF. */
TODO: We handle only simple cases of RET or LIST having at most one
element. General case would inolve sorting the lists in program order
- and merging them that will need some additional work.
+ and merging them that will need some additional work.
Adding that will improve quality of debug info especially for SRA-ed
structures. */
case RESULT_DECL:
case FUNCTION_DECL:
{
- rtx rtl = rtl_for_decl_location (loc);
+ rtx rtl;
var_loc_list *loc_list = lookup_decl_loc (loc);
- if (loc_list && loc_list->first
- && (list_ret = dw_loc_list (loc_list, loc, want_address)))
- have_address = want_address != 0;
- else if (rtl == NULL_RTX)
+ if (loc_list && loc_list->first)
+ {
+ list_ret = dw_loc_list (loc_list, loc, want_address);
+ have_address = want_address != 0;
+ break;
+ }
+ rtl = rtl_for_decl_location (loc);
+ if (rtl == NULL_RTX)
{
expansion_failed (loc, NULL_RTX, "DECL has no RTL");
return 0;
if (bytepos > 0)
add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
else if (bytepos < 0)
- loc_list_plus_const (list_ret, bytepos);
+ loc_list_plus_const (list_ret, bytepos);
have_address = 1;
break;
case CEIL_DIV_EXPR:
case ROUND_DIV_EXPR:
case TRUNC_DIV_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (loc)))
+ return 0;
op = DW_OP_div;
goto do_binop;
case CEIL_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- op = DW_OP_mod;
- goto do_binop;
+ if (TYPE_UNSIGNED (TREE_TYPE (loc)))
+ {
+ op = DW_OP_mod;
+ goto do_binop;
+ }
+ list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0);
+ list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0);
+ if (list_ret == 0 || list_ret1 == 0)
+ return 0;
+
+ add_loc_list (&list_ret, list_ret1);
+ if (list_ret == 0)
+ return 0;
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
+ break;
case MULT_EXPR:
op = DW_OP_mul;
add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
}
if (ret)
- list_ret = single_element_loc_list (ret);
+ list_ret = new_loc_list (ret, NULL, NULL, NULL);
return list_ret;
}
else
{
enum dwarf_location_atom op;
-
+
/* The DWARF2 standard says that we should assume that the structure
address is already on the stack, so we can specify a structure
field address by using DW_OP_plus_uconst. */
-
+
#ifdef MIPS_DEBUGGING_INFO
/* ??? The SGI dwarf reader does not handle the DW_OP_plus_uconst
operator correctly. It works only if we leave the offset on the
#else
op = DW_OP_plus_uconst;
#endif
-
+
loc_descr = new_loc_descr (op, offset, 0);
}
}
return true;
case CONST_STRING:
- resolve_one_addr (&rtl, NULL);
- add_AT_addr (die, DW_AT_const_value, rtl);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
- return true;
+ if (dwarf_version >= 4 || !dwarf_strict)
+ {
+ dw_loc_descr_ref loc_result;
+ resolve_one_addr (&rtl, NULL);
+ rtl_addr:
+ loc_result = new_loc_descr (DW_OP_addr, 0, 0);
+ loc_result->dw_loc_oprnd1.val_class = dw_val_class_addr;
+ loc_result->dw_loc_oprnd1.v.val_addr = rtl;
+ add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
+ add_AT_loc (die, DW_AT_location, loc_result);
+ VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ return true;
+ }
+ return false;
case CONST:
if (CONSTANT_P (XEXP (rtl, 0)))
if (!const_ok_for_output (rtl))
return false;
case LABEL_REF:
- add_AT_addr (die, DW_AT_const_value, rtl);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
- return true;
+ if (dwarf_version >= 4 || !dwarf_strict)
+ goto rtl_addr;
+ return false;
case PLUS:
/* In cases where an inlined instance of an inline function is passed
if (rtl)
rtl = avoid_constant_pool_reference (rtl);
+ /* Try harder to get a rtl. If this symbol ends up not being emitted
+ in the current CU, resolve_addr will remove the expression referencing
+ it. */
+ if (rtl == NULL_RTX
+ && TREE_CODE (decl) == VAR_DECL
+ && !DECL_EXTERNAL (decl)
+ && TREE_STATIC (decl)
+ && DECL_NAME (decl)
+ && !DECL_HARD_REGISTER (decl)
+ && DECL_MODE (decl) != VOIDmode)
+ {
+ rtl = DECL_RTL (decl);
+ /* Reset DECL_RTL back, as various parts of the compiler expects
+ DECL_RTL set meaning it is actually going to be output. */
+ SET_DECL_RTL (decl, NULL);
+ if (!MEM_P (rtl)
+ || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
+ || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
+ rtl = NULL_RTX;
+ }
+
return rtl;
}
tree offset;
int volatilep = 0, unsignedp = 0;
- /* If the decl isn't a VAR_DECL, or if it isn't public or static, or if
+ /* If the decl isn't a VAR_DECL, or if it isn't static, or if
it does not have a value (the offset into the common area), or if it
is thread local (as opposed to global) then it isn't common, and shouldn't
be handled as such. */
if (TREE_CODE (decl) != VAR_DECL
- || !TREE_PUBLIC (decl)
|| !TREE_STATIC (decl)
|| !DECL_HAS_VALUE_EXPR_P (decl)
|| !is_fortran ())
a constant value. That way we are better to use add_const_value_attribute
rather than expanding constant value equivalent. */
loc_list = lookup_decl_loc (decl);
- if (loc_list && loc_list->first && loc_list->first == loc_list->last)
+ if (loc_list
+ && loc_list->first
+ && loc_list->first == loc_list->last
+ && NOTE_VAR_LOCATION (loc_list->first->var_loc_note)
+ && NOTE_VAR_LOCATION_LOC (loc_list->first->var_loc_note))
{
- enum var_init_status status;
struct var_loc_node *node;
node = loc_list->first;
- status = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- rtl = NOTE_VAR_LOCATION (node->var_loc_note);
- if (GET_CODE (rtl) == VAR_LOCATION
- && GET_CODE (XEXP (rtl, 1)) != PARALLEL)
- rtl = XEXP (XEXP (rtl, 1), 0);
+ rtl = NOTE_VAR_LOCATION_LOC (node->var_loc_note);
+ if (GET_CODE (rtl) != PARALLEL)
+ rtl = XEXP (rtl, 0);
if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
&& add_const_value_attribute (die, rtl))
return true;
if (!cfa_equal_p (&last_cfa, &next_cfa))
{
*list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
- start_label, last_label, section,
- list == NULL);
+ start_label, last_label, section);
list_tail = &(*list_tail)->dw_loc_next;
last_cfa = next_cfa;
if (!cfa_equal_p (&last_cfa, &next_cfa))
{
*list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
- start_label, last_label, section,
- list == NULL);
+ start_label, last_label, section);
list_tail = &(*list_tail)->dw_loc_next;
start_label = last_label;
}
+
*list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
- start_label, fde->dw_fde_end, section,
- list == NULL);
+ start_label, fde->dw_fde_end, section);
+
+ if (list && list->dw_loc_next)
+ gen_llsym (list);
return list;
}
0));
/* GNU extension: Record what type this method came from originally. */
- if (debug_info_level > DINFO_LEVEL_TERSE)
+ if (debug_info_level > DINFO_LEVEL_TERSE
+ && DECL_CONTEXT (func_decl))
add_AT_die_ref (die, DW_AT_containing_type,
lookup_type_die (DECL_CONTEXT (func_decl)));
}
decl_name = DECL_NAME (decl);
if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
{
- add_name_attribute (die, dwarf2_name (decl, 0));
+ const char *name = dwarf2_name (decl, 0);
+ if (name)
+ add_name_attribute (die, name);
if (! DECL_ARTIFICIAL (decl))
add_src_coords_attributes (die, decl);
add_subscript_info (array_die, type, collapse_nested_arrays);
/* Add representation of the type of the elements of this array type and
- emit the corresponding DIE if we haven't done it already. */
+ emit the corresponding DIE if we haven't done it already. */
element_type = TREE_TYPE (type);
if (collapse_nested_arrays)
while (TREE_CODE (element_type) == ARRAY_TYPE)
int i;
for (i = VEC_length (tree, incomplete_types) - 1; i >= 0; i--)
- gen_type_die (VEC_index (tree, incomplete_types, i), comp_unit_die);
+ if (should_emit_struct_debug (VEC_index (tree, incomplete_types, i),
+ DINFO_USAGE_DIR_USE))
+ gen_type_die (VEC_index (tree, incomplete_types, i), comp_unit_die);
}
/* Determine what tag to use for a record type. */
/* If we're trying to avoid duplicate debug info, we may not have
emitted the member decl for this function. Emit it now. */
- if (TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
+ if (TYPE_STUB_DECL (type)
+ && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
&& ! lookup_decl_die (member))
{
dw_die_ref type_die;
dw_die_ref old_die;
tree save_fn;
tree context;
- int was_abstract = DECL_ABSTRACT (decl);
+ int was_abstract;
htab_t old_decl_loc_table;
/* Make sure we have the actual abstract inline, not a clone. */
current_function_decl = decl;
push_cfun (DECL_STRUCT_FUNCTION (decl));
+ was_abstract = DECL_ABSTRACT (decl);
set_decl_abstract_flags (decl, 1);
dwarf2out_decl (decl);
if (! was_abstract)
of the pack. Note that the set of pack arguments can be empty.
In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
children DIE.
-
+
Otherwise, we just consider the parameters of DECL. */
while (generic_decl_parm || parm)
{
dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
dw_die_ref origin_die;
int declaration = (DECL_EXTERNAL (decl_or_origin)
- /* If DECL is COMDAT and has not actually been
- emitted, we cannot take its address; there
- might end up being no definition anywhere in
- the program. For example, consider the C++
- test case:
-
- template <class T>
- struct S { static const int i = 7; };
-
- template <class T>
- const int S<T>::i;
-
- int f() { return S<int>::i; }
-
- Here, S<int>::i is not DECL_EXTERNAL, but no
- definition is required, so the compiler will
- not emit a definition. */
- || (TREE_CODE (decl_or_origin) == VAR_DECL
- && DECL_COMDAT (decl_or_origin)
- && !TREE_ASM_WRITTEN (decl_or_origin))
|| class_or_namespace_scope_p (context_die));
if (!origin)
of a data member. */
if (com_decl)
{
- tree field;
dw_die_ref com_die;
dw_loc_list_ref loc;
die_node com_die_arg;
= htab_create_ggc (10, common_block_die_table_hash,
common_block_die_table_eq, NULL);
- field = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
com_die_arg.decl_id = DECL_UID (com_decl);
com_die_arg.die_parent = context_die;
com_die = (dw_die_ref) htab_find (common_block_die_table, &com_die_arg);
and if we already emitted a DIE for it, don't emit a second
DIE for it again. */
if (old_die
- && declaration
- && old_die->die_parent == context_die)
+ && declaration)
return;
/* For static data members, the declaration in the class is supposed
{
/* Find die that represents this context. */
if (TYPE_P (context))
- return force_type_die (context);
+ return force_type_die (TYPE_MAIN_VARIANT (context));
else
return force_decl_die (context);
}
context_die, decl);
/* For Fortran modules defined in different CU don't add src coords. */
if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
- add_name_attribute (namespace_die, dwarf2_name (decl, 0));
+ {
+ const char *name = dwarf2_name (decl, 0);
+ if (name)
+ add_name_attribute (namespace_die, name);
+ }
else
add_name_and_src_coords_attributes (namespace_die, decl);
if (DECL_EXTERNAL (decl))
/* Replace DW_AT_name for the decl with name. */
-
+
static void
dwarf2out_set_name (tree decl, tree name)
{
dw_die_ref die;
dw_attr_ref attr;
+ const char *dname;
die = TYPE_SYMTAB_DIE (decl);
if (!die)
return;
+ dname = dwarf2_name (name, 0);
+ if (!dname)
+ return;
+
attr = get_AT (die, DW_AT_name);
if (attr)
{
struct indirect_string_node *node;
- node = find_AT_string (dwarf2_name (name, 0));
+ node = find_AT_string (dname);
/* replace the string. */
attr->dw_attr_val.v.val_str = node;
}
else
- add_name_attribute (die, dwarf2_name (name, 0));
+ add_name_attribute (die, dname);
}
/* Called by the final INSN scan whenever we see a direct function call.
== ((const struct vcall_insn *) y)->insn_uid);
}
+/* Associate VTABLE_SLOT with INSN_UID in the VCALL_INSN_TABLE. */
+
+static void
+store_vcall_insn (unsigned int vtable_slot, int insn_uid)
+{
+ struct vcall_insn *item = GGC_NEW (struct vcall_insn);
+ struct vcall_insn **slot;
+
+ gcc_assert (item);
+ item->insn_uid = insn_uid;
+ item->vtable_slot = vtable_slot;
+ slot = (struct vcall_insn **)
+ htab_find_slot_with_hash (vcall_insn_table, &item,
+ (hashval_t) insn_uid, INSERT);
+ *slot = item;
+}
+
+/* Return the VTABLE_SLOT associated with INSN_UID. */
+
+static unsigned int
+lookup_vcall_insn (unsigned int insn_uid)
+{
+ struct vcall_insn item;
+ struct vcall_insn *p;
+
+ item.insn_uid = insn_uid;
+ item.vtable_slot = 0;
+ p = (struct vcall_insn *) htab_find_with_hash (vcall_insn_table,
+ (void *) &item,
+ (hashval_t) insn_uid);
+ if (p == NULL)
+ return (unsigned int) -1;
+ return p->vtable_slot;
+}
+
+
/* Called when lowering indirect calls to RTL. We make a note of INSN_UID
and the OBJ_TYPE_REF_TOKEN from ADDR. For C++ virtual calls, the token
is the vtable slot index that we will need to put in the virtual call
{
tree token = OBJ_TYPE_REF_TOKEN (addr);
if (TREE_CODE (token) == INTEGER_CST)
- {
- struct vcall_insn *item = GGC_NEW (struct vcall_insn);
- struct vcall_insn **slot;
-
- gcc_assert (item);
- item->insn_uid = insn_uid;
- item->vtable_slot = TREE_INT_CST_LOW (token);
- slot = (struct vcall_insn **)
- htab_find_slot_with_hash (vcall_insn_table, &item,
- (hashval_t) insn_uid, INSERT);
- *slot = item;
- }
+ store_vcall_insn (TREE_INT_CST_LOW (token), insn_uid);
}
}
+/* Called when scheduling RTL, when a CALL_INSN is split. Copies the
+ OBJ_TYPE_REF_TOKEN previously associated with OLD_INSN and associates it
+ with NEW_INSN. */
+
+static void
+dwarf2out_copy_call_info (rtx old_insn, rtx new_insn)
+{
+ unsigned int vtable_slot = lookup_vcall_insn (INSN_UID (old_insn));
+
+ if (vtable_slot != (unsigned int) -1)
+ store_vcall_insn (vtable_slot, INSN_UID (new_insn));
+}
+
/* Called by the final INSN scan whenever we see a virtual function call.
Make an entry into the virtual call table, recording the point of call
and the slot index of the vtable entry used to call the virtual member
static void
dwarf2out_virtual_call (int insn_uid)
{
+ unsigned int vtable_slot = lookup_vcall_insn (insn_uid);
vcall_entry e;
- struct vcall_insn item;
- struct vcall_insn *p;
- item.insn_uid = insn_uid;
- item.vtable_slot = 0;
- p = (struct vcall_insn *) htab_find_with_hash (vcall_insn_table,
- (void *) &item,
- (hashval_t) insn_uid);
- if (p == NULL)
+ if (vtable_slot == (unsigned int) -1)
return;
e.poc_label_num = poc_label_num++;
- e.vtable_slot = p->vtable_slot;
+ e.vtable_slot = vtable_slot;
VEC_safe_push (vcall_entry, gc, vcall_table, &e);
/* Drop a label at the return point to mark the point of call. */
if (next_real == NULL_RTX)
return;
- newloc = GGC_CNEW (struct var_loc_node);
+ decl = NOTE_VAR_LOCATION_DECL (loc_note);
+ newloc = add_var_loc_to_decl (decl, loc_note);
+ if (newloc == NULL)
+ return;
+
/* If there were no real insns between note we processed last time
and this note, use the label we emitted last time. */
if (last_var_location_insn == NULL_RTX
last_var_location_insn = next_real;
last_in_cold_section_p = in_cold_section_p;
- decl = NOTE_VAR_LOCATION_DECL (loc_note);
- add_var_loc_to_decl (decl, newloc);
}
/* We need to reset the locations at the beginning of each
ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
}
+}
+
+/* Called before cgraph_optimize starts outputtting functions, variables
+ and toplevel asms into assembly. */
+
+static void
+dwarf2out_assembly_start (void)
+{
if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE && dwarf2out_do_cfi_asm ())
{
#ifndef TARGET_UNWIND_INFO
breaking out types into comdat sections, do this
for all type definitions. */
if (die->die_tag == DW_TAG_array_type
- || (dwarf_version >= 4
+ || (dwarf_version >= 4
&& is_type_die (die) && ! is_declaration_die (die)))
FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
else
{
dw_die_ref c;
dw_attr_ref a;
- dw_loc_list_ref curr;
+ dw_loc_list_ref *curr;
unsigned ix;
for (ix = 0; VEC_iterate (dw_attr_node, die->die_attr, ix, a); ix++)
switch (AT_class (a))
{
case dw_val_class_loc_list:
- for (curr = AT_loc_list (a); curr != NULL; curr = curr->dw_loc_next)
- if (!resolve_addr_in_expr (curr->expr))
- curr->expr = NULL;
+ curr = AT_loc_list_ptr (a);
+ while (*curr)
+ {
+ if (!resolve_addr_in_expr ((*curr)->expr))
+ {
+ dw_loc_list_ref next = (*curr)->dw_loc_next;
+ if (next && (*curr)->ll_symbol)
+ {
+ gcc_assert (!next->ll_symbol);
+ next->ll_symbol = (*curr)->ll_symbol;
+ }
+ *curr = next;
+ }
+ else
+ curr = &(*curr)->dw_loc_next;
+ }
+ if (!AT_loc_list (a))
+ {
+ remove_AT (die, a->dw_attr);
+ ix--;
+ }
break;
case dw_val_class_loc:
if (!resolve_addr_in_expr (AT_loc (a)))
- a->dw_attr_val.v.val_loc = NULL;
+ {
+ remove_AT (die, a->dw_attr);
+ ix--;
+ }
break;
case dw_val_class_addr:
if (a->dw_attr == DW_AT_const_value
&& resolve_one_addr (&a->dw_attr_val.v.val_addr, NULL))
{
- a->dw_attr = DW_AT_location;
- a->dw_attr_val.val_class = dw_val_class_loc;
- a->dw_attr_val.v.val_loc = NULL;
+ remove_AT (die, a->dw_attr);
+ ix--;
}
break;
default:
else
{
unsigned fde_idx = 0;
+ bool range_list_added = false;
/* We need to give .debug_loc and .debug_ranges an appropriate
"base address". Use zero so that these addresses become
add_AT_addr (comp_unit_die, DW_AT_low_pc, const0_rtx);
add_AT_addr (comp_unit_die, DW_AT_entry_pc, const0_rtx);
- add_AT_range_list (comp_unit_die, DW_AT_ranges,
- add_ranges_by_labels (text_section_label,
- text_end_label));
- if (flag_reorder_blocks_and_partition)
- add_ranges_by_labels (cold_text_section_label,
- cold_end_label);
+ if (text_section_used)
+ add_ranges_by_labels (comp_unit_die, text_section_label,
+ text_end_label, &range_list_added);
+ if (flag_reorder_blocks_and_partition && cold_text_section_used)
+ add_ranges_by_labels (comp_unit_die, cold_text_section_label,
+ cold_end_label, &range_list_added);
for (fde_idx = 0; fde_idx < fde_table_in_use; fde_idx++)
{
if (fde->dw_fde_switched_sections)
{
if (!fde->in_std_section)
- add_ranges_by_labels (fde->dw_fde_hot_section_label,
- fde->dw_fde_hot_section_end_label);
+ add_ranges_by_labels (comp_unit_die,
+ fde->dw_fde_hot_section_label,
+ fde->dw_fde_hot_section_end_label,
+ &range_list_added);
if (!fde->cold_in_std_section)
- add_ranges_by_labels (fde->dw_fde_unlikely_section_label,
- fde->dw_fde_unlikely_section_end_label);
+ add_ranges_by_labels (comp_unit_die,
+ fde->dw_fde_unlikely_section_label,
+ fde->dw_fde_unlikely_section_end_label,
+ &range_list_added);
}
else if (!fde->in_std_section)
- add_ranges_by_labels (fde->dw_fde_begin,
- fde->dw_fde_end);
+ add_ranges_by_labels (comp_unit_die, fde->dw_fde_begin,
+ fde->dw_fde_end, &range_list_added);
}
- add_ranges (NULL);
+ if (range_list_added)
+ add_ranges (NULL);
}
/* Output location list section if necessary. */
{
0, /* init */
0, /* finish */
+ 0, /* assembly_start */
0, /* define */
0, /* undef */
0, /* start_source_file */
0, /* switch_text_section */
0, /* direct_call */
0, /* virtual_call_token */
+ 0, /* copy_call_info */
0, /* virtual_call */
0, /* set_name */
0 /* start_end_main_source_file */