/* Output Dwarf2 format symbol table information from GCC.
Copyright (C) 1992, 1993, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
- 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+ 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Gary Funck (gary@intrepid.com).
Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
Extensively modified by Jason Merrill (jason@cygnus.com).
#include "dwarf2out.h"
#include "dwarf2asm.h"
#include "toplev.h"
-#include "varray.h"
#include "ggc.h"
#include "md5.h"
#include "tm_p.h"
#include "input.h"
#include "gimple.h"
#include "tree-pass.h"
+#include "tree-flow.h"
#ifdef DWARF2_DEBUGGING_INFO
static void dwarf2out_source_line (unsigned int, const char *, int, bool);
static void output_cfi_directive (dw_cfi_ref);
static void output_call_frame_info (int);
static void dwarf2out_note_section_used (void);
-static void dwarf2out_stack_adjust (rtx, bool);
-static void dwarf2out_args_size_adjust (HOST_WIDE_INT, const char *);
static void flush_queued_reg_saves (void);
static bool clobbers_queued_reg_save (const_rtx);
static void dwarf2out_frame_debug_expr (rtx, const char *);
/* Emit the state save. */
emit_cfa_remember = false;
- cfi_remember = new_cfi ();
+ cfi_remember = new_cfi ();
cfi_remember->dw_cfi_opc = DW_CFA_remember_state;
add_fde_cfi (label, cfi_remember);
}
cfi = new_cfi ();
- if (loc.reg == old_cfa.reg && !loc.indirect)
+ if (loc.reg == old_cfa.reg && !loc.indirect && !old_cfa.indirect)
{
/* Construct a "DW_CFA_def_cfa_offset <offset>" instruction, indicating
- the CFA register did not change but the offset did. The data
+ the CFA register did not change but the offset did. The data
factoring for DW_CFA_def_cfa_offset_sf happens in output_cfi, or
in the assembler via the .cfi_def_cfa_offset directive. */
if (loc.offset < 0)
#ifndef MIPS_DEBUGGING_INFO /* SGI dbx thinks this means no offset. */
else if (loc.offset == old_cfa.offset
&& old_cfa.reg != INVALID_REGNUM
- && !loc.indirect)
+ && !loc.indirect
+ && !old_cfa.indirect)
{
/* Construct a "DW_CFA_def_cfa_register <register>" instruction,
indicating the CFA register has changed to <register> but the
&& sreg == INVALID_REGNUM)
{
cfi->dw_cfi_opc = DW_CFA_expression;
- cfi->dw_cfi_oprnd2.dw_cfi_reg_num = reg;
- cfi->dw_cfi_oprnd1.dw_cfi_loc
+ cfi->dw_cfi_oprnd1.dw_cfi_reg_num = reg;
+ cfi->dw_cfi_oprnd2.dw_cfi_loc
= build_cfa_aligned_loc (offset, fde->stack_realignment);
}
else if (sreg == INVALID_REGNUM)
add_fde_cfi (label, cfi);
}
-/* Add a CFI to update the running total of the size of arguments
- pushed onto the stack. */
-
-void
-dwarf2out_args_size (const char *label, HOST_WIDE_INT size)
-{
- dw_cfi_ref cfi;
-
- if (size == old_args_size)
- return;
-
- old_args_size = size;
-
- cfi = new_cfi ();
- cfi->dw_cfi_opc = DW_CFA_GNU_args_size;
- cfi->dw_cfi_oprnd1.dw_cfi_offset = size;
- add_fde_cfi (label, cfi);
-}
-
/* Entry point for saving a register to the stack. REG is the GCC register
number. LABEL and OFFSET are passed to reg_save. */
VEC_free (rtx, heap, next);
}
+/* Add a CFI to update the running total of the size of arguments
+ pushed onto the stack. */
+
+static void
+dwarf2out_args_size (const char *label, HOST_WIDE_INT size)
+{
+ dw_cfi_ref cfi;
+
+ if (size == old_args_size)
+ return;
+
+ old_args_size = size;
+
+ cfi = new_cfi ();
+ cfi->dw_cfi_opc = DW_CFA_GNU_args_size;
+ cfi->dw_cfi_oprnd1.dw_cfi_offset = size;
+ add_fde_cfi (label, cfi);
+}
+
+/* Record a stack adjustment of OFFSET bytes. */
+
+static void
+dwarf2out_stack_adjust (HOST_WIDE_INT offset, const char *label)
+{
+ if (cfa.reg == STACK_POINTER_REGNUM)
+ cfa.offset += offset;
+
+ if (cfa_store.reg == STACK_POINTER_REGNUM)
+ cfa_store.offset += offset;
+
+ if (ACCUMULATE_OUTGOING_ARGS)
+ return;
+
+#ifndef STACK_GROWS_DOWNWARD
+ offset = -offset;
+#endif
+
+ args_size += offset;
+ if (args_size < 0)
+ args_size = 0;
+
+ def_cfa_1 (label, &cfa);
+ if (flag_asynchronous_unwind_tables)
+ dwarf2out_args_size (label, args_size);
+}
/* Check INSN to see if it looks like a push or a stack adjustment, and
- make a note of it if it does. EH uses this information to find out how
- much extra space it needs to pop off the stack. */
+ make a note of it if it does. EH uses this information to find out
+ how much extra space it needs to pop off the stack. */
static void
-dwarf2out_stack_adjust (rtx insn, bool after_p)
+dwarf2out_notice_stack_adjust (rtx insn, bool after_p)
{
HOST_WIDE_INT offset;
const char *label;
return;
label = dwarf2out_cfi_label (false);
- dwarf2out_args_size_adjust (offset, label);
-}
-
-/* Adjust args_size based on stack adjustment OFFSET. */
-
-static void
-dwarf2out_args_size_adjust (HOST_WIDE_INT offset, const char *label)
-{
- if (cfa.reg == STACK_POINTER_REGNUM)
- cfa.offset += offset;
-
- if (cfa_store.reg == STACK_POINTER_REGNUM)
- cfa_store.offset += offset;
-
-#ifndef STACK_GROWS_DOWNWARD
- offset = -offset;
-#endif
-
- args_size += offset;
- if (args_size < 0)
- args_size = 0;
-
- def_cfa_1 (label, &cfa);
- if (flag_asynchronous_unwind_tables)
- dwarf2out_args_size (label, args_size);
+ dwarf2out_stack_adjust (offset, label);
}
#endif
addr = XEXP (set, 0);
gcc_assert (MEM_P (addr));
addr = XEXP (addr, 0);
-
+
/* As documented, only consider extremely simple addresses. */
switch (GET_CODE (addr))
{
&& cfa.indirect == 0
&& cfa.reg != HARD_FRAME_POINTER_REGNUM
effects: Use DW_CFA_def_cfa_expression to define cfa
- cfa.reg == fde->drap_reg
-
- Rule 20:
- (set reg fde->drap_reg)
- constraints: fde->vdrap_reg == INVALID_REGNUM
- effects: fde->vdrap_reg = reg.
- (set mem fde->drap_reg)
- constraints: fde->drap_reg_saved == 1
- effects: none. */
+ cfa.reg == fde->drap_reg */
static void
dwarf2out_frame_debug_expr (rtx expr, const char *label)
HOST_WIDE_INT offset = stack_adjust_offset (elem, args_size, 0);
if (offset != 0)
- dwarf2out_args_size_adjust (offset, label);
+ dwarf2out_stack_adjust (offset, label);
}
}
return;
fde = current_fde ();
- if (REG_P (src)
- && fde
- && fde->drap_reg == REGNO (src)
- && (fde->drap_reg_saved
- || REG_P (dest)))
- {
- /* Rule 20 */
- /* If we are saving dynamic realign argument pointer to a
- register, the destination is virtual dynamic realign
- argument pointer. It may be used to access argument. */
- if (REG_P (dest))
- {
- gcc_assert (fde->vdrap_reg == INVALID_REGNUM);
- fde->vdrap_reg = REGNO (dest);
- }
- return;
- }
-
switch (GET_CODE (dest))
{
case REG:
if (!NONJUMP_INSN_P (insn) || clobbers_queued_reg_save (insn))
flush_queued_reg_saves ();
- if (! RTX_FRAME_RELATED_P (insn))
+ if (!RTX_FRAME_RELATED_P (insn))
{
+ /* ??? This should be done unconditionally since stack adjustments
+ matter if the stack pointer is not the CFA register anymore but
+ is still used to save registers. */
if (!ACCUMULATE_OUTGOING_ARGS)
- dwarf2out_stack_adjust (insn, after_p);
+ dwarf2out_notice_stack_adjust (insn, after_p);
return;
}
handled_one = true;
break;
+ case REG_CFA_SET_VDRAP:
+ n = XEXP (note, 0);
+ if (REG_P (n))
+ {
+ dw_fde_ref fde = current_fde ();
+ if (fde)
+ {
+ gcc_assert (fde->vdrap_reg == INVALID_REGNUM);
+ if (REG_P (n))
+ fde->vdrap_reg = REGNO (n);
+ }
+ }
+ handled_one = true;
+ break;
+
default:
break;
}
void
dwarf2out_frame_debug_restore_state (void)
{
- dw_cfi_ref cfi = new_cfi ();
+ dw_cfi_ref cfi = new_cfi ();
const char *label = dwarf2out_cfi_label (false);
cfi->dw_cfi_opc = DW_CFA_restore_state;
case DW_CFA_same_value:
case DW_CFA_def_cfa_register:
case DW_CFA_register:
+ case DW_CFA_expression:
return dw_cfi_oprnd_reg_num;
case DW_CFA_def_cfa_offset:
return dw_cfi_oprnd_offset;
case DW_CFA_def_cfa_expression:
- case DW_CFA_expression:
return dw_cfi_oprnd_loc;
default:
case DW_CFA_register:
return dw_cfi_oprnd_reg_num;
+ case DW_CFA_expression:
+ return dw_cfi_oprnd_loc;
+
default:
return dw_cfi_oprnd_unused;
}
{
r = DWARF2_FRAME_REG_OUT (cfi->dw_cfi_oprnd1.dw_cfi_reg_num, for_eh);
dw2_asm_output_data (1, (cfi->dw_cfi_opc | (r & 0x3f)),
- "DW_CFA_offset, column 0x%lx", r);
+ "DW_CFA_offset, column %#lx", r);
off = div_data_align (cfi->dw_cfi_oprnd2.dw_cfi_offset);
dw2_asm_output_data_uleb128 (off, NULL);
}
{
r = DWARF2_FRAME_REG_OUT (cfi->dw_cfi_oprnd1.dw_cfi_reg_num, for_eh);
dw2_asm_output_data (1, (cfi->dw_cfi_opc | (r & 0x3f)),
- "DW_CFA_restore, column 0x%lx", r);
+ "DW_CFA_restore, column %#lx", r);
}
else
{
break;
case DW_CFA_GNU_args_size:
- fprintf (asm_out_file, "\t.cfi_escape 0x%x,", DW_CFA_GNU_args_size);
+ fprintf (asm_out_file, "\t.cfi_escape %#x,", DW_CFA_GNU_args_size);
dw2_asm_output_data_uleb128_raw (cfi->dw_cfi_oprnd1.dw_cfi_offset);
if (flag_debug_asm)
fprintf (asm_out_file, "\t%s args_size "HOST_WIDE_INT_PRINT_DEC,
case DW_CFA_def_cfa_expression:
case DW_CFA_expression:
- fprintf (asm_out_file, "\t.cfi_escape 0x%x,", cfi->dw_cfi_opc);
+ fprintf (asm_out_file, "\t.cfi_escape %#x,", cfi->dw_cfi_opc);
output_cfa_loc_raw (cfi);
fputc ('\n', asm_out_file);
break;
}
dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
+ if (dw_cie_version >= 4)
+ {
+ dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
+ dw2_asm_output_data (1, 0, "CIE Segment Size");
+ }
dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
"CIE Data Alignment Factor");
if (enc & DW_EH_PE_indirect)
ref = dw2_force_const_mem (ref, true);
- fprintf (asm_out_file, "\t.cfi_personality 0x%x,", enc);
+ fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
output_addr_const (asm_out_file, ref);
fputc ('\n', asm_out_file);
}
if (enc & DW_EH_PE_indirect)
ref = dw2_force_const_mem (ref, true);
- fprintf (asm_out_file, "\t.cfi_lsda 0x%x,", enc);
+ fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
output_addr_const (asm_out_file, ref);
fputc ('\n', asm_out_file);
}
case DW_OP_piece:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
break;
+ case DW_OP_bit_piece:
+ size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
+ size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
+ break;
case DW_OP_deref_size:
case DW_OP_xderef_size:
size += 1;
case DW_OP_piece:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
break;
+ case DW_OP_bit_piece:
+ dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
+ dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
+ break;
case DW_OP_deref_size:
case DW_OP_xderef_size:
dw2_asm_output_data (1, val1->v.val_int, NULL);
dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
break;
+ case DW_OP_bit_piece:
+ fputc (',', asm_out_file);
+ dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
+ dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
+ break;
+
case DW_OP_consts:
case DW_OP_breg0:
case DW_OP_breg1:
while (1)
{
/* Output the opcode. */
- fprintf (asm_out_file, "0x%x", loc->dw_loc_opc);
+ fprintf (asm_out_file, "%#x", loc->dw_loc_opc);
output_loc_operands_raw (loc);
if (!loc->dw_loc_next)
unsigned long size;
if (cfi->dw_cfi_opc == DW_CFA_expression)
- dw2_asm_output_data (1, cfi->dw_cfi_oprnd2.dw_cfi_reg_num, NULL);
+ {
+ dw2_asm_output_data (1, cfi->dw_cfi_oprnd1.dw_cfi_reg_num, NULL);
+ loc = cfi->dw_cfi_oprnd2.dw_cfi_loc;
+ }
+ else
+ loc = cfi->dw_cfi_oprnd1.dw_cfi_loc;
/* Output the size of the block. */
- loc = cfi->dw_cfi_oprnd1.dw_cfi_loc;
size = size_of_locs (loc);
dw2_asm_output_data_uleb128 (size, NULL);
unsigned long size;
if (cfi->dw_cfi_opc == DW_CFA_expression)
- fprintf (asm_out_file, "0x%x,", cfi->dw_cfi_oprnd2.dw_cfi_reg_num);
+ {
+ fprintf (asm_out_file, "%#x,", cfi->dw_cfi_oprnd1.dw_cfi_reg_num);
+ loc = cfi->dw_cfi_oprnd2.dw_cfi_loc;
+ }
+ else
+ loc = cfi->dw_cfi_oprnd1.dw_cfi_loc;
/* Output the size of the block. */
- loc = cfi->dw_cfi_oprnd1.dw_cfi_loc;
size = size_of_locs (loc);
dw2_asm_output_data_uleb128_raw (size);
fputc (',', asm_out_file);
static void dwarf2out_undef (unsigned int, const char *);
static void dwarf2out_start_source_file (unsigned, const char *);
static void dwarf2out_end_source_file (unsigned);
+static void dwarf2out_function_decl (tree);
static void dwarf2out_begin_block (unsigned, unsigned);
static void dwarf2out_end_block (unsigned, unsigned);
static bool dwarf2out_ignore_block (const_tree);
static void dwarf2out_var_location (rtx);
static void dwarf2out_direct_call (tree);
static void dwarf2out_virtual_call_token (tree, int);
+static void dwarf2out_copy_call_info (rtx, rtx);
static void dwarf2out_virtual_call (int);
static void dwarf2out_begin_function (tree);
static void dwarf2out_set_name (tree, tree);
dwarf2out_end_epilogue,
dwarf2out_begin_function,
debug_nothing_int, /* end_function */
- dwarf2out_decl, /* function_decl */
+ dwarf2out_function_decl, /* function_decl */
dwarf2out_global_decl,
dwarf2out_type_decl, /* type_decl */
dwarf2out_imported_module_or_decl,
dwarf2out_switch_text_section,
dwarf2out_direct_call,
dwarf2out_virtual_call_token,
+ dwarf2out_copy_call_info,
dwarf2out_virtual_call,
dwarf2out_set_name,
1 /* start_end_main_source_file */
is not made available by the GCC front-end. */
#define DWARF_LINE_DEFAULT_IS_STMT_START 1
+/* Maximum number of operations per instruction bundle. */
+#ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
+#define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
+#endif
+
#ifdef DWARF2_DEBUGGING_INFO
/* This location is used by calc_die_sizes() to keep track
the offset of each DIE within the .debug_info section. */
static GTY(()) limbo_die_node *limbo_die_list;
/* A list of DIEs for which we may have to generate
- DW_AT_MIPS_linkage_name once their DECL_ASSEMBLER_NAMEs are
- set. */
+ DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
static GTY(()) limbo_die_node *deferred_asm_name;
/* Filenames referenced by this compilation unit. */
/* Node of the variable location list. */
struct GTY ((chain_next ("%h.next"))) var_loc_node {
- rtx GTY (()) var_loc_note;
+ /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
+ EXPR_LIST chain. For small bitsizes, bitsize is encoded
+ in mode of the EXPR_LIST node and first EXPR_LIST operand
+ is either NOTE_INSN_VAR_LOCATION for a piece with a known
+ location or NULL for padding. For larger bitsizes,
+ mode is 0 and first operand is a CONCAT with bitsize
+ as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
+ NULL as second operand. */
+ rtx GTY (()) loc;
const char * GTY (()) label;
- const char * GTY (()) section_label;
struct var_loc_node * GTY (()) next;
};
struct GTY (()) var_loc_list_def {
struct var_loc_node * GTY (()) first;
- /* Do not mark the last element of the chained list because
- it is marked through the chain. */
+ /* Pointer to the last but one or last element of the
+ chained list. If the list is empty, both first and
+ last are NULL, if the list contains just one node
+ or the last node certainly is not redundant, it points
+ to the last node, otherwise points to the last but one.
+ Do not mark it for GC because it is marked through the chain. */
struct var_loc_node * GTY ((skip ("%h"))) last;
/* DECL_UID of the variable decl. */
static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
-static bool is_c_family (void);
static bool is_cxx (void);
-static bool is_java (void);
static bool is_fortran (void);
static bool is_ada (void);
static void remove_AT (dw_die_ref, enum dwarf_attribute);
static int decl_loc_table_eq (const void *, const void *);
static var_loc_list *lookup_decl_loc (const_tree);
static void equate_decl_number_to_die (tree, dw_die_ref);
-static void add_var_loc_to_decl (tree, struct var_loc_node *);
+static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
static void print_spaces (FILE *);
static void print_die (dw_die_ref, FILE *);
static void print_dwarf_line_table (FILE *);
static void output_aranges (void);
static unsigned int add_ranges_num (int);
static unsigned int add_ranges (const_tree);
-static unsigned int add_ranges_by_labels (const char *, const char *);
+static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
+ bool *);
static void output_ranges (void);
static void output_line_info (void);
static void output_file_names (void);
static void add_data_member_location_attribute (dw_die_ref, tree);
static bool add_const_value_attribute (dw_die_ref, rtx);
static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
+static void insert_double (double_int, unsigned char *);
static void insert_float (const_rtx, unsigned char *);
static rtx rtl_for_decl_location (tree);
static bool add_location_or_const_value_attribute (dw_die_ref, tree,
static void splice_child_die (dw_die_ref, dw_die_ref);
static int file_info_cmp (const void *, const void *);
static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
- const char *, const char *, unsigned);
-static void add_loc_descr_to_loc_list (dw_loc_list_ref *, dw_loc_descr_ref,
- const char *, const char *,
- const char *);
+ const char *, const char *);
static void output_loc_list (dw_loc_list_ref);
static char *gen_internal_sym (const char *);
#define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
#endif
+/* Mangled name attribute to use. This used to be a vendor extension
+ until DWARF 4 standardized it. */
+#define AT_linkage_name \
+ (dwarf_version >= 4 ? DW_AT_linkage_name : DW_AT_MIPS_linkage_name)
+
+
/* Definitions of defaults for formats and names of various special
(artificial) labels which may be generated within this file (when the -g
options is used and DWARF2_DEBUGGING_INFO is in effect.
return a->dw_attr_val.v.val_loc_list;
}
+static inline dw_loc_list_ref *
+AT_loc_list_ptr (dw_attr_ref a)
+{
+ gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
+ return &a->dw_attr_val.v.val_loc_list;
+}
+
/* Add an address constant attribute value to a DIE. */
static inline void
return a ? AT_file (a) : NULL;
}
-/* Return TRUE if the language is C or C++. */
-
-static inline bool
-is_c_family (void)
-{
- unsigned int lang = get_AT_unsigned (comp_unit_die, DW_AT_language);
-
- return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_ObjC
- || lang == DW_LANG_C99
- || lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus);
-}
-
/* Return TRUE if the language is C++. */
static inline bool
|| lang == DW_LANG_Fortran95);
}
-/* Return TRUE if the language is Java. */
-
-static inline bool
-is_java (void)
-{
- unsigned int lang = get_AT_unsigned (comp_unit_die, DW_AT_language);
-
- return lang == DW_LANG_Java;
-}
-
/* Return TRUE if the language is Ada. */
static inline bool
decl_die->decl_id = decl_id;
}
-/* Add a variable location node to the linked list for DECL. */
+/* Return how many bits covers PIECE EXPR_LIST. */
+
+static int
+decl_piece_bitsize (rtx piece)
+{
+ int ret = (int) GET_MODE (piece);
+ if (ret)
+ return ret;
+ gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
+ && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
+ return INTVAL (XEXP (XEXP (piece, 0), 0));
+}
+
+/* Return pointer to the location of location note in PIECE EXPR_LIST. */
+
+static rtx *
+decl_piece_varloc_ptr (rtx piece)
+{
+ if ((int) GET_MODE (piece))
+ return &XEXP (piece, 0);
+ else
+ return &XEXP (XEXP (piece, 0), 1);
+}
+
+/* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
+ Next is the chain of following piece nodes. */
+
+static rtx
+decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
+{
+ if (bitsize <= (int) MAX_MACHINE_MODE)
+ return alloc_EXPR_LIST (bitsize, loc_note, next);
+ else
+ return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
+ GEN_INT (bitsize),
+ loc_note), next);
+}
+
+/* Return rtx that should be stored into loc field for
+ LOC_NOTE and BITPOS/BITSIZE. */
+
+static rtx
+construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
+ HOST_WIDE_INT bitsize)
+{
+ if (bitsize != -1)
+ {
+ loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
+ if (bitpos != 0)
+ loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
+ }
+ return loc_note;
+}
+
+/* This function either modifies location piece list *DEST in
+ place (if SRC and INNER is NULL), or copies location piece list
+ *SRC to *DEST while modifying it. Location BITPOS is modified
+ to contain LOC_NOTE, any pieces overlapping it are removed resp.
+ not copied and if needed some padding around it is added.
+ When modifying in place, DEST should point to EXPR_LIST where
+ earlier pieces cover PIECE_BITPOS bits, when copying SRC points
+ to the start of the whole list and INNER points to the EXPR_LIST
+ where earlier pieces cover PIECE_BITPOS bits. */
static void
-add_var_loc_to_decl (tree decl, struct var_loc_node *loc)
+adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
+ HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
+ HOST_WIDE_INT bitsize, rtx loc_note)
{
- unsigned int decl_id = DECL_UID (decl);
+ int diff;
+ bool copy = inner != NULL;
+
+ if (copy)
+ {
+ /* First copy all nodes preceeding the current bitpos. */
+ while (src != inner)
+ {
+ *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
+ decl_piece_bitsize (*src), NULL_RTX);
+ dest = &XEXP (*dest, 1);
+ src = &XEXP (*src, 1);
+ }
+ }
+ /* Add padding if needed. */
+ if (bitpos != piece_bitpos)
+ {
+ *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
+ copy ? NULL_RTX : *dest);
+ dest = &XEXP (*dest, 1);
+ }
+ else if (*dest && decl_piece_bitsize (*dest) == bitsize)
+ {
+ gcc_assert (!copy);
+ /* A piece with correct bitpos and bitsize already exist,
+ just update the location for it and return. */
+ *decl_piece_varloc_ptr (*dest) = loc_note;
+ return;
+ }
+ /* Add the piece that changed. */
+ *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
+ dest = &XEXP (*dest, 1);
+ /* Skip over pieces that overlap it. */
+ diff = bitpos - piece_bitpos + bitsize;
+ if (!copy)
+ src = dest;
+ while (diff > 0 && *src)
+ {
+ rtx piece = *src;
+ diff -= decl_piece_bitsize (piece);
+ if (copy)
+ src = &XEXP (piece, 1);
+ else
+ {
+ *src = XEXP (piece, 1);
+ free_EXPR_LIST_node (piece);
+ }
+ }
+ /* Add padding if needed. */
+ if (diff < 0 && *src)
+ {
+ if (!copy)
+ dest = src;
+ *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
+ dest = &XEXP (*dest, 1);
+ }
+ if (!copy)
+ return;
+ /* Finally copy all nodes following it. */
+ while (*src)
+ {
+ *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
+ decl_piece_bitsize (*src), NULL_RTX);
+ dest = &XEXP (*dest, 1);
+ src = &XEXP (*src, 1);
+ }
+}
+
+/* Add a variable location node to the linked list for DECL. */
+
+static struct var_loc_node *
+add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
+{
+ unsigned int decl_id;
var_loc_list *temp;
void **slot;
+ struct var_loc_node *loc = NULL;
+ HOST_WIDE_INT bitsize = -1, bitpos = -1;
+
+ if (DECL_DEBUG_EXPR_IS_FROM (decl))
+ {
+ tree realdecl = DECL_DEBUG_EXPR (decl);
+ if (realdecl && handled_component_p (realdecl))
+ {
+ HOST_WIDE_INT maxsize;
+ tree innerdecl;
+ innerdecl
+ = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize);
+ if (!DECL_P (innerdecl)
+ || DECL_IGNORED_P (innerdecl)
+ || TREE_STATIC (innerdecl)
+ || bitsize <= 0
+ || bitpos + bitsize > 256
+ || bitsize != maxsize)
+ return NULL;
+ decl = innerdecl;
+ }
+ }
+ decl_id = DECL_UID (decl);
slot = htab_find_slot_with_hash (decl_loc_table, decl, decl_id, INSERT);
if (*slot == NULL)
{
if (temp->last)
{
+ struct var_loc_node *last = temp->last, *unused = NULL;
+ rtx *piece_loc = NULL, last_loc_note;
+ int piece_bitpos = 0;
+ if (last->next)
+ {
+ last = last->next;
+ gcc_assert (last->next == NULL);
+ }
+ if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
+ {
+ piece_loc = &last->loc;
+ do
+ {
+ int cur_bitsize = decl_piece_bitsize (*piece_loc);
+ if (piece_bitpos + cur_bitsize > bitpos)
+ break;
+ piece_bitpos += cur_bitsize;
+ piece_loc = &XEXP (*piece_loc, 1);
+ }
+ while (*piece_loc);
+ }
+ /* TEMP->LAST here is either pointer to the last but one or
+ last element in the chained list, LAST is pointer to the
+ last element. */
+ if (label && strcmp (last->label, label) == 0)
+ {
+ /* For SRA optimized variables if there weren't any real
+ insns since last note, just modify the last node. */
+ if (piece_loc != NULL)
+ {
+ adjust_piece_list (piece_loc, NULL, NULL,
+ bitpos, piece_bitpos, bitsize, loc_note);
+ return NULL;
+ }
+ /* If the last note doesn't cover any instructions, remove it. */
+ if (temp->last != last)
+ {
+ temp->last->next = NULL;
+ unused = last;
+ last = temp->last;
+ gcc_assert (strcmp (last->label, label) != 0);
+ }
+ else
+ {
+ gcc_assert (temp->first == temp->last);
+ memset (temp->last, '\0', sizeof (*temp->last));
+ temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
+ return temp->last;
+ }
+ }
+ if (bitsize == -1 && NOTE_P (last->loc))
+ last_loc_note = last->loc;
+ else if (piece_loc != NULL
+ && *piece_loc != NULL_RTX
+ && piece_bitpos == bitpos
+ && decl_piece_bitsize (*piece_loc) == bitsize)
+ last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
+ else
+ last_loc_note = NULL_RTX;
/* If the current location is the same as the end of the list,
and either both or neither of the locations is uninitialized,
we have nothing to do. */
- if ((!rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->last->var_loc_note),
- NOTE_VAR_LOCATION_LOC (loc->var_loc_note)))
- || ((NOTE_VAR_LOCATION_STATUS (temp->last->var_loc_note)
- != NOTE_VAR_LOCATION_STATUS (loc->var_loc_note))
- && ((NOTE_VAR_LOCATION_STATUS (temp->last->var_loc_note)
+ if (last_loc_note == NULL_RTX
+ || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
+ NOTE_VAR_LOCATION_LOC (loc_note)))
+ || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
+ != NOTE_VAR_LOCATION_STATUS (loc_note))
+ && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
== VAR_INIT_STATUS_UNINITIALIZED)
- || (NOTE_VAR_LOCATION_STATUS (loc->var_loc_note)
+ || (NOTE_VAR_LOCATION_STATUS (loc_note)
== VAR_INIT_STATUS_UNINITIALIZED))))
{
- /* Add LOC to the end of list and update LAST. */
- temp->last->next = loc;
- temp->last = loc;
+ /* Add LOC to the end of list and update LAST. If the last
+ element of the list has been removed above, reuse its
+ memory for the new node, otherwise allocate a new one. */
+ if (unused)
+ {
+ loc = unused;
+ memset (loc, '\0', sizeof (*loc));
+ }
+ else
+ loc = GGC_CNEW (struct var_loc_node);
+ if (bitsize == -1 || piece_loc == NULL)
+ loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
+ else
+ adjust_piece_list (&loc->loc, &last->loc, piece_loc,
+ bitpos, piece_bitpos, bitsize, loc_note);
+ last->next = loc;
+ /* Ensure TEMP->LAST will point either to the new last but one
+ element of the chain, or to the last element in it. */
+ if (last != temp->last)
+ temp->last = last;
}
+ else if (unused)
+ ggc_free (unused);
}
- /* Do not add empty location to the beginning of the list. */
- else if (NOTE_VAR_LOCATION_LOC (loc->var_loc_note) != NULL_RTX)
+ else
{
+ loc = GGC_CNEW (struct var_loc_node);
temp->first = loc;
temp->last = loc;
+ loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
}
+ return loc;
}
\f
/* Keep track of the number of spaces used to indent the
if ((at->dw_attr == DW_AT_type
&& (tag == DW_TAG_pointer_type
|| tag == DW_TAG_reference_type
+ || tag == DW_TAG_rvalue_reference_type
|| tag == DW_TAG_ptr_to_member_type))
|| (at->dw_attr == DW_AT_friend
&& tag == DW_TAG_friend))
case DW_TAG_enumeration_type:
case DW_TAG_pointer_type:
case DW_TAG_reference_type:
+ case DW_TAG_rvalue_reference_type:
case DW_TAG_string_type:
case DW_TAG_structure_type:
case DW_TAG_subroutine_type:
if (c->die_tag == DW_TAG_pointer_type
|| c->die_tag == DW_TAG_reference_type
+ || c->die_tag == DW_TAG_rvalue_reference_type
|| c->die_tag == DW_TAG_const_type
|| c->die_tag == DW_TAG_volatile_type)
{
case DW_TAG_interface_type:
case DW_TAG_pointer_type:
case DW_TAG_reference_type:
+ case DW_TAG_rvalue_reference_type:
case DW_TAG_string_type:
case DW_TAG_subroutine_type:
case DW_TAG_ptr_to_member_type:
case DW_AT_name:
case DW_AT_type:
case DW_AT_virtuality:
+ case DW_AT_linkage_name:
case DW_AT_MIPS_linkage_name:
add_dwarf_attr (clone, a);
break;
free (entry);
}
-/* Copy DIE and its ancestors, up to, but not including, the compile unit
+/* Copy DIE and its ancestors, up to, but not including, the compile unit
or type unit entry, to a new tree. Adds the new tree to UNIT and returns
a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
to check if the ancestor has already been copied into UNIT. */
unsigned long lsize = size_of_locs (AT_loc (a));
/* Block length. */
- size += constant_size (lsize);
+ if (dwarf_version >= 4)
+ size += size_of_uleb128 (lsize);
+ else
+ size += constant_size (lsize);
size += lsize;
}
break;
* a->dw_attr_val.v.val_vec.elt_size; /* block */
break;
case dw_val_class_flag:
- size += 1;
+ if (dwarf_version >= 4)
+ /* Currently all add_AT_flag calls pass in 1 as last argument,
+ so DW_FORM_flag_present can be used. If that ever changes,
+ we'll need to use DW_FORM_flag and have some optimization
+ in build_abbrev_table that will change those to
+ DW_FORM_flag_present if it is set to 1 in all DIEs using
+ the same abbrev entry. */
+ gcc_assert (a->dw_attr_val.v.val_flag == 1);
+ else
+ size += 1;
break;
case dw_val_class_die_ref:
if (AT_ref_external (a))
gcc_unreachable ();
}
case dw_val_class_range_list:
- case dw_val_class_offset:
case dw_val_class_loc_list:
+ if (dwarf_version >= 4)
+ return DW_FORM_sec_offset;
+ /* FALLTHRU */
+ case dw_val_class_offset:
switch (DWARF_OFFSET_SIZE)
{
case 4:
gcc_unreachable ();
}
case dw_val_class_loc:
+ if (dwarf_version >= 4)
+ return DW_FORM_exprloc;
switch (constant_size (size_of_locs (AT_loc (a))))
{
case 1:
gcc_unreachable ();
}
case dw_val_class_flag:
+ if (dwarf_version >= 4)
+ {
+ /* Currently all add_AT_flag calls pass in 1 as last argument,
+ so DW_FORM_flag_present can be used. If that ever changes,
+ we'll need to use DW_FORM_flag and have some optimization
+ in build_abbrev_table that will change those to
+ DW_FORM_flag_present if it is set to 1 in all DIEs using
+ the same abbrev entry. */
+ gcc_assert (a->dw_attr_val.v.val_flag == 1);
+ return DW_FORM_flag_present;
+ }
return DW_FORM_flag;
case dw_val_class_die_ref:
if (AT_ref_external (a))
return DW_FORM_addr;
case dw_val_class_lineptr:
case dw_val_class_macptr:
- return DW_FORM_data;
+ return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
case dw_val_class_str:
return AT_string_form (a);
case dw_val_class_file:
}
/* Return a new location list, given the begin and end range, and the
- expression. gensym tells us whether to generate a new internal symbol for
- this location list node, which is done for the head of the list only. */
+ expression. */
static inline dw_loc_list_ref
new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
- const char *section, unsigned int gensym)
+ const char *section)
{
dw_loc_list_ref retlist = GGC_CNEW (dw_loc_list_node);
retlist->end = end;
retlist->expr = expr;
retlist->section = section;
- if (gensym)
- retlist->ll_symbol = gen_internal_sym ("LLST");
return retlist;
}
-/* Add a location description expression to a location list. */
+/* Generate a new internal symbol for this location list node, if it
+ hasn't got one yet. */
static inline void
-add_loc_descr_to_loc_list (dw_loc_list_ref *list_head, dw_loc_descr_ref descr,
- const char *begin, const char *end,
- const char *section)
+gen_llsym (dw_loc_list_ref list)
{
- dw_loc_list_ref *d;
-
- /* Find the end of the chain. */
- for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
- ;
-
- /* Add a new location list node to the list. */
- *d = new_loc_list (descr, begin, end, section, 0);
+ gcc_assert (!list->ll_symbol);
+ list->ll_symbol = gen_internal_sym ("LLST");
}
/* Output the location list given to us. */
if (dwarf_version < 4 && die->die_id.die_symbol)
output_die_symbol (die);
- dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (0x%lx) %s)",
+ dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
(unsigned long)die->die_offset,
dwarf_tag_name (die->die_tag));
size = size_of_locs (AT_loc (a));
/* Output the block length for this list of location operations. */
- dw2_asm_output_data (constant_size (size), size, "%s", name);
+ if (dwarf_version >= 4)
+ dw2_asm_output_data_uleb128 (size, "%s", name);
+ else
+ dw2_asm_output_data (constant_size (size), size, "%s", name);
output_loc_sequence (AT_loc (a));
break;
}
case dw_val_class_flag:
+ if (dwarf_version >= 4)
+ {
+ /* Currently all add_AT_flag calls pass in 1 as last argument,
+ so DW_FORM_flag_present can be used. If that ever changes,
+ we'll need to use DW_FORM_flag and have some optimization
+ in build_abbrev_table that will change those to
+ DW_FORM_flag_present if it is set to 1 in all DIEs using
+ the same abbrev entry. */
+ gcc_assert (AT_flag (a) == 1);
+ if (flag_debug_asm)
+ fprintf (asm_out_file, "\t\t\t%s %s\n",
+ ASM_COMMENT_START, name);
+ break;
+ }
dw2_asm_output_data (1, AT_flag (a), "%s", name);
break;
/* Add null byte to terminate sibling list. */
if (die->die_child != NULL)
- dw2_asm_output_data (1, 0, "end of children of DIE 0x%lx",
+ dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
(unsigned long) die->die_offset);
}
{
int ver = dwarf_version;
- /* Don't mark the output as DWARF-4 until we make full use of the
- version 4 extensions, and gdb supports them. For now, -gdwarf-4
- selects only a few extensions from the DWARF-4 spec. */
- if (ver > 3)
- ver = 3;
if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
dw2_asm_output_data (4, 0xffffffff,
"Initial length escape value indicating 64-bit DWARF extension");
/* Add a new entry to .debug_ranges corresponding to a pair of
labels. */
-static unsigned int
-add_ranges_by_labels (const char *begin, const char *end)
+static void
+add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
+ bool *added)
{
unsigned int in_use = ranges_by_label_in_use;
+ unsigned int offset;
if (in_use == ranges_by_label_allocated)
{
ranges_by_label[in_use].end = end;
ranges_by_label_in_use = in_use + 1;
- return add_ranges_num (-(int)in_use - 1);
+ offset = add_ranges_num (-(int)in_use - 1);
+ if (!*added)
+ {
+ add_AT_range_list (die, DW_AT_ranges, offset);
+ *added = true;
+ }
}
static void
output_ranges (void)
{
unsigned i;
- static const char *const start_fmt = "Offset 0x%x";
+ static const char *const start_fmt = "Offset %#x";
const char *fmt = start_fmt;
for (i = 0; i < ranges_table_in_use; i++)
int ndirs;
int idx_offset;
int i;
- int idx;
if (!last_emitted_file)
{
}
/* Emit the directory name table. */
- idx = 1;
idx_offset = dirs[0].length > 0 ? 1 : 0;
for (i = 1 - idx_offset; i < ndirs; i++)
dw2_asm_output_nstring (dirs[i].path,
dirs[i].length
- !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
- "Directory Entry: 0x%x", i + idx_offset);
+ "Directory Entry: %#x", i + idx_offset);
dw2_asm_output_data (1, 0, "End directory table");
files[file_idx].path + dirs[dir_idx].length, ver);
dw2_asm_output_nstring
- (filebuf, -1, "File Entry: 0x%x", (unsigned) i + 1);
+ (filebuf, -1, "File Entry: %#x", (unsigned) i + 1);
/* Include directory index. */
dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
NULL);
#else
dw2_asm_output_nstring (files[file_idx].path + dirs[dir_idx].length, -1,
- "File Entry: 0x%x", (unsigned) i + 1);
+ "File Entry: %#x", (unsigned) i + 1);
/* Include directory index. */
dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
unsigned long function;
int ver = dwarf_version;
- /* Don't mark the output as DWARF-4 until we make full use of the
- version 4 extensions, and gdb supports them. For now, -gdwarf-4
- selects only a few extensions from the DWARF-4 spec. */
- if (ver > 3)
- ver = 3;
-
ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, 0);
dw2_asm_output_data (1, 1,
"Minimum Instruction Length");
+ if (ver >= 4)
+ dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
+ "Maximum Operations Per Instruction");
dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
"Default is_stmt_start flag");
dw2_asm_output_data (1, DWARF_LINE_BASE,
break;
}
- dw2_asm_output_data (1, n_op_args, "opcode: 0x%x has %d args",
+ dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
opc, n_op_args);
}
base_type_result = new_die (DW_TAG_base_type, comp_unit_die, type);
- /* This probably indicates a bug. */
- if (! TYPE_NAME (type))
- add_name_attribute (base_type_result, "__unknown__");
-
add_AT_unsigned (base_type_result, DW_AT_byte_size,
int_size_in_bytes (type));
add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
((is_const_type ? TYPE_QUAL_CONST : 0)
| (is_volatile_type ? TYPE_QUAL_VOLATILE : 0)));
- /* If we do, then we can just use its DIE, if it exists. */
- if (qualified_type)
+ if (qualified_type == sizetype
+ && TYPE_NAME (qualified_type)
+ && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
{
- mod_type_die = lookup_type_die (qualified_type);
+#ifdef ENABLE_CHECKING
+ gcc_assert (TREE_CODE (TREE_TYPE (TYPE_NAME (qualified_type)))
+ == INTEGER_TYPE
+ && TYPE_PRECISION (TREE_TYPE (TYPE_NAME (qualified_type)))
+ == TYPE_PRECISION (qualified_type)
+ && TYPE_UNSIGNED (TREE_TYPE (TYPE_NAME (qualified_type)))
+ == TYPE_UNSIGNED (qualified_type));
+#endif
+ qualified_type = TREE_TYPE (TYPE_NAME (qualified_type));
+ }
+
+ /* If we do, then we can just use its DIE, if it exists. */
+ if (qualified_type)
+ {
+ mod_type_die = lookup_type_die (qualified_type);
if (mod_type_die)
return mod_type_die;
}
name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
/* Handle C typedef types. */
- if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name))
+ if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)
+ && !DECL_ARTIFICIAL (name))
{
tree dtype = TREE_TYPE (name);
}
else if (code == REFERENCE_TYPE)
{
- mod_type_die = new_die (DW_TAG_reference_type, comp_unit_die, type);
+ if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
+ mod_type_die = new_die (DW_TAG_rvalue_reference_type, comp_unit_die,
+ type);
+ else
+ mod_type_die = new_die (DW_TAG_reference_type, comp_unit_die, type);
add_AT_unsigned (mod_type_die, DW_AT_byte_size,
simple_type_size_in_bits (type) / BITS_PER_UNIT);
item_type = TREE_TYPE (type);
name = DECL_NAME (name);
add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
}
+ /* This probably indicates a bug. */
+ else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
+ add_name_attribute (mod_type_die, "__unknown__");
if (qualified_type)
equate_type_number_to_die (qualified_type, mod_type_die);
return new_loc_descr (DW_OP_fbreg, offset, 0);
}
}
- else if (fde
- && fde->drap_reg != INVALID_REGNUM
+ else if (!optimize
+ && fde
&& (fde->drap_reg == REGNO (reg)
|| fde->vdrap_reg == REGNO (reg)))
{
/* Use cfa+offset to represent the location of arguments passed
- on stack when drap is used to align stack. */
+ on the stack when drap is used to align stack.
+ Only do this when not optimizing, for optimized code var-tracking
+ is supposed to track where the arguments live and the register
+ used as vdrap or drap in some spot might be used for something
+ else in other part of the routine. */
return new_loc_descr (DW_OP_fbreg, offset, 0);
}
{
rtx rtl = *rtlp;
+ if (GET_CODE (rtl) == UNSPEC)
+ {
+ /* If delegitimize_address couldn't do anything with the UNSPEC, assume
+ we can't express it in the debug info. */
+#ifdef ENABLE_CHECKING
+ inform (current_function_decl
+ ? DECL_SOURCE_LOCATION (current_function_decl)
+ : UNKNOWN_LOCATION,
+ "non-delegitimized UNSPEC %d found in variable location",
+ XINT (rtl, 1));
+#endif
+ expansion_failed (NULL_TREE, rtl,
+ "UNSPEC hasn't been delegitimized.\n");
+ return 1;
+ }
+
if (GET_CODE (rtl) != SYMBOL_REF)
return 0;
if (mem_loc_result == NULL)
mem_loc_result = tls_mem_loc_descriptor (rtl);
if (mem_loc_result != 0)
- add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
+ {
+ if (GET_MODE_SIZE (GET_MODE (rtl)) > DWARF2_ADDR_SIZE)
+ {
+ expansion_failed (NULL_TREE, rtl, "DWARF address size mismatch");
+ return 0;
+ }
+ else if (GET_MODE_SIZE (GET_MODE (rtl)) == DWARF2_ADDR_SIZE)
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
+ else
+ add_loc_descr (&mem_loc_result,
+ new_loc_descr (DW_OP_deref_size,
+ GET_MODE_SIZE (GET_MODE (rtl)), 0));
+ }
+ else
+ {
+ rtx new_rtl = avoid_constant_pool_reference (rtl);
+ if (new_rtl != rtl)
+ return mem_loc_descriptor (new_rtl, mode, initialized);
+ }
break;
case LO_SUM:
pool. */
case CONST:
case SYMBOL_REF:
- /* Alternatively, the symbol in the constant pool might be referenced
- by a different symbol. */
- if (GET_CODE (rtl) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (rtl))
- {
- bool marked;
- rtx tmp = get_pool_constant_mark (rtl, &marked);
-
- if (GET_CODE (tmp) == SYMBOL_REF)
- {
- rtl = tmp;
- if (CONSTANT_POOL_ADDRESS_P (tmp))
- get_pool_constant_mark (tmp, &marked);
- else
- marked = true;
- }
-
- /* If all references to this pool constant were optimized away,
- it was not output and thus we can't represent it.
- FIXME: might try to use DW_OP_const_value here, though
- DW_OP_piece complicates it. */
- if (!marked)
- {
- expansion_failed (NULL_TREE, rtl,
- "Constant was removed from constant pool.\n");
- return 0;
- }
- }
-
if (GET_CODE (rtl) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
{
op = DW_OP_div;
goto do_binop;
- case MOD:
+ case UMOD:
op = DW_OP_mod;
goto do_binop;
add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
break;
+ case MOD:
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ mem_loc_result = op0;
+ add_loc_descr (&mem_loc_result, op1);
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
+ add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
+ break;
+
case NOT:
op = DW_OP_not;
goto do_unop;
goto do_scompare;
do_scompare:
- if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) != MODE_INT
- || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
- || GET_MODE (XEXP (rtl, 0)) != GET_MODE (XEXP (rtl, 1)))
+ if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
+ || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 1))) > DWARF2_ADDR_SIZE)
break;
+ else
+ {
+ enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
- op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
- VAR_INIT_STATUS_INITIALIZED);
- op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
- VAR_INIT_STATUS_INITIALIZED);
+ if (op_mode == VOIDmode)
+ op_mode = GET_MODE (XEXP (rtl, 1));
+ if (op_mode != VOIDmode && GET_MODE_CLASS (op_mode) != MODE_INT)
+ break;
- if (op0 == 0 || op1 == 0)
- break;
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
- if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) < DWARF2_ADDR_SIZE)
- {
- int shift = DWARF2_ADDR_SIZE
- - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
- shift *= BITS_PER_UNIT;
- add_loc_descr (&op0, int_loc_descriptor (shift));
- add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
- else
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ if (op_mode != VOIDmode
+ && GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
{
- add_loc_descr (&op1, int_loc_descriptor (shift));
- add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
+ int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode);
+ shift *= BITS_PER_UNIT;
+ /* For eq/ne, if the operands are known to be zero-extended,
+ there is no need to do the fancy shifting up. */
+ if (op == DW_OP_eq || op == DW_OP_ne)
+ {
+ dw_loc_descr_ref last0, last1;
+ for (last0 = op0;
+ last0->dw_loc_next != NULL;
+ last0 = last0->dw_loc_next)
+ ;
+ for (last1 = op1;
+ last1->dw_loc_next != NULL;
+ last1 = last1->dw_loc_next)
+ ;
+ /* deref_size zero extends, and for constants we can check
+ whether they are zero extended or not. */
+ if (((last0->dw_loc_opc == DW_OP_deref_size
+ && last0->dw_loc_oprnd1.v.val_int
+ <= GET_MODE_SIZE (op_mode))
+ || (CONST_INT_P (XEXP (rtl, 0))
+ && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
+ == (INTVAL (XEXP (rtl, 0))
+ & GET_MODE_MASK (op_mode))))
+ && ((last1->dw_loc_opc == DW_OP_deref_size
+ && last1->dw_loc_oprnd1.v.val_int
+ <= GET_MODE_SIZE (op_mode))
+ || (CONST_INT_P (XEXP (rtl, 1))
+ && (unsigned HOST_WIDE_INT)
+ INTVAL (XEXP (rtl, 1))
+ == (INTVAL (XEXP (rtl, 1))
+ & GET_MODE_MASK (op_mode)))))
+ goto do_compare;
+ }
+ add_loc_descr (&op0, int_loc_descriptor (shift));
+ add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
+ else
+ {
+ add_loc_descr (&op1, int_loc_descriptor (shift));
+ add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
+ }
}
}
goto do_ucompare;
do_ucompare:
- if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) != MODE_INT
- || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
- || GET_MODE (XEXP (rtl, 0)) != GET_MODE (XEXP (rtl, 1)))
+ if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) > DWARF2_ADDR_SIZE
+ || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 1))) > DWARF2_ADDR_SIZE)
break;
+ else
+ {
+ enum machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
- op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
- VAR_INIT_STATUS_INITIALIZED);
- op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
- VAR_INIT_STATUS_INITIALIZED);
+ if (op_mode == VOIDmode)
+ op_mode = GET_MODE (XEXP (rtl, 1));
+ if (op_mode != VOIDmode && GET_MODE_CLASS (op_mode) != MODE_INT)
+ break;
- if (op0 == 0 || op1 == 0)
- break;
+ op0 = mem_loc_descriptor (XEXP (rtl, 0), mode,
+ VAR_INIT_STATUS_INITIALIZED);
+ op1 = mem_loc_descriptor (XEXP (rtl, 1), mode,
+ VAR_INIT_STATUS_INITIALIZED);
- if (GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) < DWARF2_ADDR_SIZE)
- {
- HOST_WIDE_INT mask = GET_MODE_MASK (GET_MODE (XEXP (rtl, 0)));
- add_loc_descr (&op0, int_loc_descriptor (mask));
- add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
- else
+ if (op0 == 0 || op1 == 0)
+ break;
+
+ if (op_mode != VOIDmode
+ && GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
{
- add_loc_descr (&op1, int_loc_descriptor (mask));
- add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
+ HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
+ dw_loc_descr_ref last0, last1;
+ for (last0 = op0;
+ last0->dw_loc_next != NULL;
+ last0 = last0->dw_loc_next)
+ ;
+ for (last1 = op1;
+ last1->dw_loc_next != NULL;
+ last1 = last1->dw_loc_next)
+ ;
+ if (CONST_INT_P (XEXP (rtl, 0)))
+ op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
+ /* deref_size zero extends, so no need to mask it again. */
+ else if (last0->dw_loc_opc != DW_OP_deref_size
+ || last0->dw_loc_oprnd1.v.val_int
+ > GET_MODE_SIZE (op_mode))
+ {
+ add_loc_descr (&op0, int_loc_descriptor (mask));
+ add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
+ }
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
+ /* deref_size zero extends, so no need to mask it again. */
+ else if (last1->dw_loc_opc != DW_OP_deref_size
+ || last1->dw_loc_oprnd1.v.val_int
+ > GET_MODE_SIZE (op_mode))
+ {
+ add_loc_descr (&op1, int_loc_descriptor (mask));
+ add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
+ }
}
- }
- else
- {
- HOST_WIDE_INT bias = 1;
- bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
- add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
- if (CONST_INT_P (XEXP (rtl, 1)))
- op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
- + INTVAL (XEXP (rtl, 1)));
else
- add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
+ {
+ HOST_WIDE_INT bias = 1;
+ bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
+ add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
+ if (CONST_INT_P (XEXP (rtl, 1)))
+ op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
+ + INTVAL (XEXP (rtl, 1)));
+ else
+ add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
+ bias, 0));
+ }
}
goto do_compare;
case US_MULT:
case SS_DIV:
case US_DIV:
+ case SS_PLUS:
+ case US_PLUS:
+ case SS_MINUS:
+ case US_MINUS:
+ case SS_NEG:
+ case US_NEG:
+ case SS_ABS:
+ case SS_ASHIFT:
+ case US_ASHIFT:
+ case SS_TRUNCATE:
+ case US_TRUNCATE:
case UDIV:
- case UMOD:
case UNORDERED:
case ORDERED:
case UNEQ:
case POPCOUNT:
case PARITY:
case ASM_OPERANDS:
+ case VEC_MERGE:
+ case VEC_SELECT:
+ case VEC_CONCAT:
+ case VEC_DUPLICATE:
case UNSPEC:
case HIGH:
/* If delegitimize_address couldn't do anything with the UNSPEC, we
initialized);
if (loc_result == NULL)
loc_result = tls_mem_loc_descriptor (rtl);
+ if (loc_result == NULL)
+ {
+ rtx new_rtl = avoid_constant_pool_reference (rtl);
+ if (new_rtl != rtl)
+ loc_result = loc_descriptor (new_rtl, mode, initialized);
+ }
break;
case CONCAT:
case VAR_LOCATION:
/* Single part. */
- if (GET_CODE (XEXP (rtl, 1)) != PARALLEL)
+ if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
{
- loc_result = loc_descriptor (XEXP (XEXP (rtl, 1), 0), mode,
- initialized);
+ rtx loc = PAT_VAR_LOCATION_LOC (rtl);
+ if (GET_CODE (loc) == EXPR_LIST)
+ loc = XEXP (loc, 0);
+ loc_result = loc_descriptor (loc, mode, initialized);
break;
}
break;
case CONST_DOUBLE:
+ if (mode == VOIDmode)
+ mode = GET_MODE (rtl);
+
if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
{
+ gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
+
/* Note that a CONST_DOUBLE rtx could represent either an integer
or a floating-point constant. A CONST_DOUBLE is used whenever
the constant requires more than one word in order to be
adequately represented. We output CONST_DOUBLEs as blocks. */
- if (GET_MODE (rtl) != VOIDmode)
- mode = GET_MODE (rtl);
-
loc_result = new_loc_descr (DW_OP_implicit_value,
GET_MODE_SIZE (mode), 0);
if (SCALAR_FLOAT_MODE_P (mode))
else
{
loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
- loc_result->dw_loc_oprnd2.v.val_double.high
- = CONST_DOUBLE_HIGH (rtl);
- loc_result->dw_loc_oprnd2.v.val_double.low
- = CONST_DOUBLE_LOW (rtl);
+ loc_result->dw_loc_oprnd2.v.val_double
+ = rtx_to_double_int (rtl);
}
}
break;
case CONST_VECTOR:
+ if (mode == VOIDmode)
+ mode = GET_MODE (rtl);
+
if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
{
unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
unsigned int i;
unsigned char *p;
- mode = GET_MODE (rtl);
+ gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
switch (GET_MODE_CLASS (mode))
{
case MODE_VECTOR_INT:
for (i = 0, p = array; i < length; i++, p += elt_size)
{
rtx elt = CONST_VECTOR_ELT (rtl, i);
- HOST_WIDE_INT lo, hi;
-
- switch (GET_CODE (elt))
- {
- case CONST_INT:
- lo = INTVAL (elt);
- hi = -(lo < 0);
- break;
-
- case CONST_DOUBLE:
- lo = CONST_DOUBLE_LOW (elt);
- hi = CONST_DOUBLE_HIGH (elt);
- break;
-
- default:
- gcc_unreachable ();
- }
+ double_int val = rtx_to_double_int (elt);
if (elt_size <= sizeof (HOST_WIDE_INT))
- insert_int (lo, elt_size, p);
+ insert_int (double_int_to_shwi (val), elt_size, p);
else
{
- unsigned char *p0 = p;
- unsigned char *p1 = p + sizeof (HOST_WIDE_INT);
-
gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT));
- if (WORDS_BIG_ENDIAN)
- {
- p0 = p1;
- p1 = p;
- }
- insert_int (lo, sizeof (HOST_WIDE_INT), p0);
- insert_int (hi, sizeof (HOST_WIDE_INT), p1);
+ insert_double (val, p);
}
}
break;
if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
&& (dwarf_version >= 4 || !dwarf_strict))
{
- loc_result = new_loc_descr (DW_OP_implicit_value,
- DWARF2_ADDR_SIZE, 0);
- loc_result->dw_loc_oprnd2.val_class = dw_val_class_addr;
- loc_result->dw_loc_oprnd2.v.val_addr = rtl;
+ loc_result = new_loc_descr (DW_OP_addr, 0, 0);
+ loc_result->dw_loc_oprnd1.val_class = dw_val_class_addr;
+ loc_result->dw_loc_oprnd1.v.val_addr = rtl;
+ add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
VEC_safe_push (rtx, gc, used_rtx_array, rtl);
}
break;
&& DECL_BY_REFERENCE (decl));
}
-/* Return single element location list containing loc descr REF. */
-
-static dw_loc_list_ref
-single_element_loc_list (dw_loc_descr_ref ref)
-{
- return new_loc_list (ref, NULL, NULL, NULL, 0);
-}
-
/* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
for VARLOC. */
{
gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
/* Single part. */
- if (GET_CODE (XEXP (varloc, 1)) != PARALLEL)
+ if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
{
- varloc = XEXP (XEXP (varloc, 1), 0);
+ varloc = PAT_VAR_LOCATION_LOC (varloc);
+ if (GET_CODE (varloc) == EXPR_LIST)
+ varloc = XEXP (varloc, 0);
mode = GET_MODE (varloc);
if (MEM_P (varloc))
{
- varloc = XEXP (varloc, 0);
- have_address = 1;
+ rtx addr = XEXP (varloc, 0);
+ descr = mem_loc_descriptor (addr, mode, initialized);
+ if (descr)
+ have_address = 1;
+ else
+ {
+ rtx x = avoid_constant_pool_reference (varloc);
+ if (x != varloc)
+ descr = mem_loc_descriptor (x, mode, initialized);
+ }
}
- descr = mem_loc_descriptor (varloc, mode, initialized);
+ else
+ descr = mem_loc_descriptor (varloc, mode, initialized);
}
else
return 0;
}
else
{
- descr = loc_descriptor (varloc, DECL_MODE (loc), initialized);
+ if (GET_CODE (varloc) == VAR_LOCATION)
+ mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
+ else
+ mode = DECL_MODE (loc);
+ descr = loc_descriptor (varloc, mode, initialized);
have_address = 1;
}
return descr;
}
-/* Return dwarf representation of location list representing for
- LOC_LIST of DECL. WANT_ADDRESS has the same meaning as in
- loc_list_from_tree function. */
+/* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
+ if it is not possible. */
+
+static dw_loc_descr_ref
+new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize)
+{
+ if ((bitsize % BITS_PER_UNIT) == 0)
+ return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
+ else if (dwarf_version >= 3 || !dwarf_strict)
+ return new_loc_descr (DW_OP_bit_piece, bitsize, 0);
+ else
+ return NULL;
+}
+
+/* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
+ for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
+
+static dw_loc_descr_ref
+dw_sra_loc_expr (tree decl, rtx loc)
+{
+ rtx p;
+ unsigned int padsize = 0;
+ dw_loc_descr_ref descr, *descr_tail;
+ unsigned HOST_WIDE_INT decl_size;
+ rtx varloc;
+ enum var_init_status initialized;
+
+ if (DECL_SIZE (decl) == NULL
+ || !host_integerp (DECL_SIZE (decl), 1))
+ return NULL;
+
+ decl_size = tree_low_cst (DECL_SIZE (decl), 1);
+ descr = NULL;
+ descr_tail = &descr;
+
+ for (p = loc; p; p = XEXP (p, 1))
+ {
+ unsigned int bitsize = decl_piece_bitsize (p);
+ rtx loc_note = *decl_piece_varloc_ptr (p);
+ dw_loc_descr_ref cur_descr;
+ dw_loc_descr_ref *tail, last = NULL;
+ unsigned int opsize = 0;
+
+ if (loc_note == NULL_RTX
+ || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
+ {
+ padsize += bitsize;
+ continue;
+ }
+ initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
+ varloc = NOTE_VAR_LOCATION (loc_note);
+ cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
+ if (cur_descr == NULL)
+ {
+ padsize += bitsize;
+ continue;
+ }
+
+ /* Check that cur_descr either doesn't use
+ DW_OP_*piece operations, or their sum is equal
+ to bitsize. Otherwise we can't embed it. */
+ for (tail = &cur_descr; *tail != NULL;
+ tail = &(*tail)->dw_loc_next)
+ if ((*tail)->dw_loc_opc == DW_OP_piece)
+ {
+ opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
+ * BITS_PER_UNIT;
+ last = *tail;
+ }
+ else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
+ {
+ opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
+ last = *tail;
+ }
+
+ if (last != NULL && opsize != bitsize)
+ {
+ padsize += bitsize;
+ continue;
+ }
+
+ /* If there is a hole, add DW_OP_*piece after empty DWARF
+ expression, which means that those bits are optimized out. */
+ if (padsize)
+ {
+ if (padsize > decl_size)
+ return NULL;
+ decl_size -= padsize;
+ *descr_tail = new_loc_descr_op_bit_piece (padsize);
+ if (*descr_tail == NULL)
+ return NULL;
+ descr_tail = &(*descr_tail)->dw_loc_next;
+ padsize = 0;
+ }
+ *descr_tail = cur_descr;
+ descr_tail = tail;
+ if (bitsize > decl_size)
+ return NULL;
+ decl_size -= bitsize;
+ if (last == NULL)
+ {
+ *descr_tail = new_loc_descr_op_bit_piece (bitsize);
+ if (*descr_tail == NULL)
+ return NULL;
+ descr_tail = &(*descr_tail)->dw_loc_next;
+ }
+ }
+
+ /* If there were any non-empty expressions, add padding till the end of
+ the decl. */
+ if (descr != NULL && decl_size != 0)
+ {
+ *descr_tail = new_loc_descr_op_bit_piece (decl_size);
+ if (*descr_tail == NULL)
+ return NULL;
+ }
+ return descr;
+}
+
+/* Return the dwarf representation of the location list LOC_LIST of
+ DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
+ function. */
static dw_loc_list_ref
-dw_loc_list (var_loc_list * loc_list, tree decl, int want_address)
+dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
{
const char *endname, *secname;
- dw_loc_list_ref list;
rtx varloc;
enum var_init_status initialized;
struct var_loc_node *node;
dw_loc_descr_ref descr;
char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
+ dw_loc_list_ref list = NULL;
+ dw_loc_list_ref *listp = &list;
/* Now that we know what section we are using for a base,
actually construct the list of locations.
This means we have to special case the last node, and generate
a range of [last location start, end of function label]. */
- node = loc_list->first;
secname = secname_for_decl (decl);
- if (NOTE_VAR_LOCATION_LOC (node->var_loc_note))
- initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- else
- initialized = VAR_INIT_STATUS_INITIALIZED;
- varloc = NOTE_VAR_LOCATION (node->var_loc_note);
- descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
-
- if (loc_list && loc_list->first != loc_list->last)
- list = new_loc_list (descr, node->label, node->next->label, secname, 1);
- else
- return single_element_loc_list (descr);
- node = node->next;
-
- if (!node)
- return NULL;
-
- for (; node->next; node = node->next)
- if (NOTE_VAR_LOCATION_LOC (node->var_loc_note) != NULL_RTX)
+ for (node = loc_list->first; node; node = node->next)
+ if (GET_CODE (node->loc) == EXPR_LIST
+ || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
{
- /* The variable has a location between NODE->LABEL and
- NODE->NEXT->LABEL. */
- initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- varloc = NOTE_VAR_LOCATION (node->var_loc_note);
- descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
- add_loc_descr_to_loc_list (&list, descr,
- node->label, node->next->label, secname);
+ if (GET_CODE (node->loc) == EXPR_LIST)
+ {
+ /* This requires DW_OP_{,bit_}piece, which is not usable
+ inside DWARF expressions. */
+ if (want_address != 2)
+ continue;
+ descr = dw_sra_loc_expr (decl, node->loc);
+ if (descr == NULL)
+ continue;
+ }
+ else
+ {
+ initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
+ varloc = NOTE_VAR_LOCATION (node->loc);
+ descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
+ }
+ if (descr)
+ {
+ /* The variable has a location between NODE->LABEL and
+ NODE->NEXT->LABEL. */
+ if (node->next)
+ endname = node->next->label;
+ /* If the variable has a location at the last label
+ it keeps its location until the end of function. */
+ else if (!current_function_decl)
+ endname = text_end_label;
+ else
+ {
+ ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
+ current_function_funcdef_no);
+ endname = ggc_strdup (label_id);
+ }
+
+ *listp = new_loc_list (descr, node->label, endname, secname);
+ listp = &(*listp)->dw_loc_next;
+ }
}
- /* If the variable has a location at the last label
- it keeps its location until the end of function. */
- if (NOTE_VAR_LOCATION_LOC (node->var_loc_note) != NULL_RTX)
- {
- if (!current_function_decl)
- endname = text_end_label;
- else
- {
- ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
- current_function_funcdef_no);
- endname = ggc_strdup (label_id);
- }
+ /* Try to avoid the overhead of a location list emitting a location
+ expression instead, but only if we didn't have more than one
+ location entry in the first place. If some entries were not
+ representable, we don't want to pretend a single entry that was
+ applies to the entire scope in which the variable is
+ available. */
+ if (list && loc_list->first->next)
+ gen_llsym (list);
- initialized = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- varloc = NOTE_VAR_LOCATION (node->var_loc_note);
- descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
- add_loc_descr_to_loc_list (&list, descr, node->label, endname, secname);
- }
return list;
}
static bool
single_element_loc_list_p (dw_loc_list_ref list)
{
- return (!list->dw_loc_next && !list->begin && !list->end);
+ gcc_assert (!list->dw_loc_next || list->ll_symbol);
+ return !list->ll_symbol;
}
/* To each location in list LIST add loc descr REF. */
TODO: We handle only simple cases of RET or LIST having at most one
element. General case would inolve sorting the lists in program order
- and merging them that will need some additional work.
+ and merging them that will need some additional work.
Adding that will improve quality of debug info especially for SRA-ed
structures. */
case RESULT_DECL:
case FUNCTION_DECL:
{
- rtx rtl = rtl_for_decl_location (loc);
+ rtx rtl;
var_loc_list *loc_list = lookup_decl_loc (loc);
- if (loc_list && loc_list->first
- && (list_ret = dw_loc_list (loc_list, loc, want_address)))
- have_address = want_address != 0;
- else if (rtl == NULL_RTX)
+ if (loc_list && loc_list->first)
+ {
+ list_ret = dw_loc_list (loc_list, loc, want_address);
+ have_address = want_address != 0;
+ break;
+ }
+ rtl = rtl_for_decl_location (loc);
+ if (rtl == NULL_RTX)
{
expansion_failed (loc, NULL_RTX, "DECL has no RTL");
return 0;
if (bytepos > 0)
add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
else if (bytepos < 0)
- loc_list_plus_const (list_ret, bytepos);
+ loc_list_plus_const (list_ret, bytepos);
have_address = 1;
break;
case CEIL_DIV_EXPR:
case ROUND_DIV_EXPR:
case TRUNC_DIV_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (loc)))
+ return 0;
op = DW_OP_div;
goto do_binop;
case CEIL_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- op = DW_OP_mod;
- goto do_binop;
+ if (TYPE_UNSIGNED (TREE_TYPE (loc)))
+ {
+ op = DW_OP_mod;
+ goto do_binop;
+ }
+ list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0);
+ list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0);
+ if (list_ret == 0 || list_ret1 == 0)
+ return 0;
+
+ add_loc_list (&list_ret, list_ret1);
+ if (list_ret == 0)
+ return 0;
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
+ add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
+ break;
case MULT_EXPR:
op = DW_OP_mul;
add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
}
if (ret)
- list_ret = single_element_loc_list (ret);
+ list_ret = new_loc_list (ret, NULL, NULL, NULL);
return list_ret;
}
else
{
enum dwarf_location_atom op;
-
+
/* The DWARF2 standard says that we should assume that the structure
address is already on the stack, so we can specify a structure
field address by using DW_OP_plus_uconst. */
-
+
#ifdef MIPS_DEBUGGING_INFO
/* ??? The SGI dwarf reader does not handle the DW_OP_plus_uconst
operator correctly. It works only if we leave the offset on the
#else
op = DW_OP_plus_uconst;
#endif
-
+
loc_descr = new_loc_descr (op, offset, 0);
}
}
return val;
}
+/* Writes double_int values to dw_vec_const array. */
+
+static void
+insert_double (double_int val, unsigned char *dest)
+{
+ unsigned char *p0 = dest;
+ unsigned char *p1 = dest + sizeof (HOST_WIDE_INT);
+
+ if (WORDS_BIG_ENDIAN)
+ {
+ p0 = p1;
+ p1 = dest;
+ }
+
+ insert_int ((HOST_WIDE_INT) val.low, sizeof (HOST_WIDE_INT), p0);
+ insert_int ((HOST_WIDE_INT) val.high, sizeof (HOST_WIDE_INT), p1);
+}
+
/* Writes floating point values to dw_vec_const array. */
static void
for (i = 0, p = array; i < length; i++, p += elt_size)
{
rtx elt = CONST_VECTOR_ELT (rtl, i);
- HOST_WIDE_INT lo, hi;
-
- switch (GET_CODE (elt))
- {
- case CONST_INT:
- lo = INTVAL (elt);
- hi = -(lo < 0);
- break;
-
- case CONST_DOUBLE:
- lo = CONST_DOUBLE_LOW (elt);
- hi = CONST_DOUBLE_HIGH (elt);
- break;
-
- default:
- gcc_unreachable ();
- }
+ double_int val = rtx_to_double_int (elt);
if (elt_size <= sizeof (HOST_WIDE_INT))
- insert_int (lo, elt_size, p);
+ insert_int (double_int_to_shwi (val), elt_size, p);
else
{
- unsigned char *p0 = p;
- unsigned char *p1 = p + sizeof (HOST_WIDE_INT);
-
gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT));
- if (WORDS_BIG_ENDIAN)
- {
- p0 = p1;
- p1 = p;
- }
- insert_int (lo, sizeof (HOST_WIDE_INT), p0);
- insert_int (hi, sizeof (HOST_WIDE_INT), p1);
+ insert_double (val, p);
}
}
break;
return true;
case CONST_STRING:
- resolve_one_addr (&rtl, NULL);
- add_AT_addr (die, DW_AT_const_value, rtl);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
- return true;
+ if (dwarf_version >= 4 || !dwarf_strict)
+ {
+ dw_loc_descr_ref loc_result;
+ resolve_one_addr (&rtl, NULL);
+ rtl_addr:
+ loc_result = new_loc_descr (DW_OP_addr, 0, 0);
+ loc_result->dw_loc_oprnd1.val_class = dw_val_class_addr;
+ loc_result->dw_loc_oprnd1.v.val_addr = rtl;
+ add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
+ add_AT_loc (die, DW_AT_location, loc_result);
+ VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ return true;
+ }
+ return false;
case CONST:
if (CONSTANT_P (XEXP (rtl, 0)))
if (!const_ok_for_output (rtl))
return false;
case LABEL_REF:
- add_AT_addr (die, DW_AT_const_value, rtl);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
- return true;
+ if (dwarf_version >= 4 || !dwarf_strict)
+ goto rtl_addr;
+ return false;
case PLUS:
/* In cases where an inlined instance of an inline function is passed
if (rtl)
rtl = avoid_constant_pool_reference (rtl);
+ /* Try harder to get a rtl. If this symbol ends up not being emitted
+ in the current CU, resolve_addr will remove the expression referencing
+ it. */
+ if (rtl == NULL_RTX
+ && TREE_CODE (decl) == VAR_DECL
+ && !DECL_EXTERNAL (decl)
+ && TREE_STATIC (decl)
+ && DECL_NAME (decl)
+ && !DECL_HARD_REGISTER (decl)
+ && DECL_MODE (decl) != VOIDmode)
+ {
+ rtl = make_decl_rtl_for_debug (decl);
+ if (!MEM_P (rtl)
+ || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
+ || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
+ rtl = NULL_RTX;
+ }
+
return rtl;
}
tree offset;
int volatilep = 0, unsignedp = 0;
- /* If the decl isn't a VAR_DECL, or if it isn't public or static, or if
+ /* If the decl isn't a VAR_DECL, or if it isn't static, or if
it does not have a value (the offset into the common area), or if it
is thread local (as opposed to global) then it isn't common, and shouldn't
be handled as such. */
if (TREE_CODE (decl) != VAR_DECL
- || !TREE_PUBLIC (decl)
|| !TREE_STATIC (decl)
|| !DECL_HAS_VALUE_EXPR_P (decl)
|| !is_fortran ())
a constant value. That way we are better to use add_const_value_attribute
rather than expanding constant value equivalent. */
loc_list = lookup_decl_loc (decl);
- if (loc_list && loc_list->first && loc_list->first == loc_list->last)
+ if (loc_list
+ && loc_list->first
+ && loc_list->first->next == NULL
+ && NOTE_P (loc_list->first->loc)
+ && NOTE_VAR_LOCATION (loc_list->first->loc)
+ && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
{
- enum var_init_status status;
struct var_loc_node *node;
node = loc_list->first;
- status = NOTE_VAR_LOCATION_STATUS (node->var_loc_note);
- rtl = NOTE_VAR_LOCATION (node->var_loc_note);
- if (GET_CODE (rtl) == VAR_LOCATION
- && GET_CODE (XEXP (rtl, 1)) != PARALLEL)
- rtl = XEXP (XEXP (rtl, 1), 0);
+ rtl = NOTE_VAR_LOCATION_LOC (node->loc);
+ if (GET_CODE (rtl) == EXPR_LIST)
+ rtl = XEXP (rtl, 0);
if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
&& add_const_value_attribute (die, rtl))
return true;
if (!cfa_equal_p (&last_cfa, &next_cfa))
{
*list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
- start_label, last_label, section,
- list == NULL);
+ start_label, last_label, section);
list_tail = &(*list_tail)->dw_loc_next;
last_cfa = next_cfa;
if (!cfa_equal_p (&last_cfa, &next_cfa))
{
*list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
- start_label, last_label, section,
- list == NULL);
+ start_label, last_label, section);
list_tail = &(*list_tail)->dw_loc_next;
start_label = last_label;
}
+
*list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
- start_label, fde->dw_fde_end, section,
- list == NULL);
+ start_label, fde->dw_fde_end, section);
+
+ if (list && list->dw_loc_next)
+ gen_llsym (list);
return list;
}
add_AT_string (die, DW_AT_comp_dir, remap_debug_filename (wd));
}
+/* Return the default for DW_AT_lower_bound, or -1 if there is not any
+ default. */
+
+static int
+lower_bound_default (void)
+{
+ switch (get_AT_unsigned (comp_unit_die, DW_AT_language))
+ {
+ case DW_LANG_C:
+ case DW_LANG_C89:
+ case DW_LANG_C99:
+ case DW_LANG_C_plus_plus:
+ case DW_LANG_ObjC:
+ case DW_LANG_ObjC_plus_plus:
+ case DW_LANG_Java:
+ return 0;
+ case DW_LANG_Fortran77:
+ case DW_LANG_Fortran90:
+ case DW_LANG_Fortran95:
+ return 1;
+ case DW_LANG_UPC:
+ case DW_LANG_D:
+ case DW_LANG_Python:
+ return dwarf_version >= 4 ? 0 : -1;
+ case DW_LANG_Ada95:
+ case DW_LANG_Ada83:
+ case DW_LANG_Cobol74:
+ case DW_LANG_Cobol85:
+ case DW_LANG_Pascal83:
+ case DW_LANG_Modula2:
+ case DW_LANG_PLI:
+ return dwarf_version >= 4 ? 1 : -1;
+ default:
+ return -1;
+ }
+}
+
/* Given a tree node describing an array bound (either lower or upper) output
a representation for that bound. */
case INTEGER_CST:
{
unsigned int prec = simple_type_size_in_bits (TREE_TYPE (bound));
+ int dflt;
/* Use the default if possible. */
if (bound_attr == DW_AT_lower_bound
- && (((is_c_family () || is_java ()) && integer_zerop (bound))
- || (is_fortran () && integer_onep (bound))))
+ && host_integerp (bound, 0)
+ && (dflt = lower_bound_default ()) != -1
+ && tree_low_cst (bound, 0) == dflt)
;
/* Otherwise represent the bound as an unsigned value with the
case RESULT_DECL:
{
dw_die_ref decl_die = lookup_decl_die (bound);
- dw_loc_list_ref loc;
/* ??? Can this happen, or should the variable have been bound
first? Probably it can, since I imagine that we try to create
the list, and won't have created a forward reference to a
later parameter. */
if (decl_die != NULL)
- add_AT_die_ref (subrange_die, bound_attr, decl_die);
- else
{
- loc = loc_list_from_tree (bound, 0);
- add_AT_location_description (subrange_die, bound_attr, loc);
+ add_AT_die_ref (subrange_die, bound_attr, decl_die);
+ break;
}
- break;
}
+ /* FALLTHRU */
default:
{
dw_loc_list_ref list;
list = loc_list_from_tree (bound, 2);
+ if (list == NULL || single_element_loc_list_p (list))
+ {
+ /* If DW_AT_*bound is not a reference nor constant, it is
+ a DWARF expression rather than location description.
+ For that loc_list_from_tree (bound, 0) is needed.
+ If that fails to give a single element list,
+ fall back to outputting this as a reference anyway. */
+ dw_loc_list_ref list2 = loc_list_from_tree (bound, 0);
+ if (list2 && single_element_loc_list_p (list2))
+ {
+ add_AT_loc (subrange_die, bound_attr, list2->expr);
+ break;
+ }
+ }
if (list == NULL)
break;
decl_die = new_die (DW_TAG_variable, ctx, bound);
add_AT_flag (decl_die, DW_AT_artificial, 1);
add_type_attribute (decl_die, TREE_TYPE (bound), 1, 0, ctx);
- if (list->dw_loc_next)
- add_AT_loc_list (decl_die, DW_AT_location, list);
- else
- add_AT_loc (decl_die, DW_AT_location, list->expr);
-
+ add_AT_location_description (decl_die, DW_AT_location, list);
add_AT_die_ref (subrange_die, bound_attr, decl_die);
break;
}
0));
/* GNU extension: Record what type this method came from originally. */
- if (debug_info_level > DINFO_LEVEL_TERSE)
+ if (debug_info_level > DINFO_LEVEL_TERSE
+ && DECL_CONTEXT (func_decl))
add_AT_die_ref (die, DW_AT_containing_type,
lookup_type_die (DECL_CONTEXT (func_decl)));
}
if ((TREE_CODE (decl) == FUNCTION_DECL || TREE_CODE (decl) == VAR_DECL)
&& TREE_PUBLIC (decl)
&& !DECL_ABSTRACT (decl)
- && !(TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
- && !is_fortran ())
+ && !(TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl)))
{
/* Defer until we have an assembler name set. */
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
deferred_asm_name = asm_name;
}
else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
- add_AT_string (die, DW_AT_MIPS_linkage_name,
+ add_AT_string (die, AT_linkage_name,
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
}
}
add_subscript_info (array_die, type, collapse_nested_arrays);
/* Add representation of the type of the elements of this array type and
- emit the corresponding DIE if we haven't done it already. */
+ emit the corresponding DIE if we haven't done it already. */
element_type = TREE_TYPE (type);
if (collapse_nested_arrays)
while (TREE_CODE (element_type) == ARRAY_TYPE)
if (info->dimen[dim].lower_bound)
{
/* If it is the default value, omit it. */
- if ((is_c_family () || is_java ())
- && integer_zerop (info->dimen[dim].lower_bound))
- ;
- else if (is_fortran ()
- && integer_onep (info->dimen[dim].lower_bound))
+ int dflt;
+
+ if (host_integerp (info->dimen[dim].lower_bound, 0)
+ && (dflt = lower_bound_default ()) != -1
+ && tree_low_cst (info->dimen[dim].lower_bound, 0) == dflt)
;
else
add_descr_info_field (subrange_die, DW_AT_lower_bound,
int i;
for (i = VEC_length (tree, incomplete_types) - 1; i >= 0; i--)
- gen_type_die (VEC_index (tree, incomplete_types, i), comp_unit_die);
+ if (should_emit_struct_debug (VEC_index (tree, incomplete_types, i),
+ DINFO_USAGE_DIR_USE))
+ gen_type_die (VEC_index (tree, incomplete_types, i), comp_unit_die);
}
/* Determine what tag to use for a record type. */
scope_die_for (type, context_die), type);
equate_type_number_to_die (type, type_die);
add_name_attribute (type_die, type_tag (type));
+ if ((dwarf_version >= 4 || !dwarf_strict)
+ && ENUM_IS_SCOPED (type))
+ add_AT_flag (type_die, DW_AT_enum_class, 1);
}
else if (! TYPE_SIZE (type))
return type_die;
dw_die_ref context_die)
{
tree node_or_origin = node ? node : origin;
+ tree ultimate_origin;
dw_die_ref parm_die
= new_die (DW_TAG_formal_parameter, context_die, node);
switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
{
case tcc_declaration:
- if (!origin)
- origin = decl_ultimate_origin (node);
+ ultimate_origin = decl_ultimate_origin (node_or_origin);
+ if (node || ultimate_origin)
+ origin = ultimate_origin;
if (origin != NULL)
add_abstract_origin_attribute (parm_die, origin);
else
/* If we're trying to avoid duplicate debug info, we may not have
emitted the member decl for this function. Emit it now. */
- if (TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
+ if (TYPE_STUB_DECL (type)
+ && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
&& ! lookup_decl_die (member))
{
dw_die_ref type_die;
dw_die_ref old_die;
tree save_fn;
tree context;
- int was_abstract = DECL_ABSTRACT (decl);
+ int was_abstract;
htab_t old_decl_loc_table;
/* Make sure we have the actual abstract inline, not a clone. */
current_function_decl = decl;
push_cfun (DECL_STRUCT_FUNCTION (decl));
+ was_abstract = DECL_ABSTRACT (decl);
set_decl_abstract_flags (decl, 1);
dwarf2out_decl (decl);
if (! was_abstract)
of the pack. Note that the set of pack arguments can be empty.
In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
children DIE.
-
+
Otherwise, we just consider the parameters of DECL. */
while (generic_decl_parm || parm)
{
HOST_WIDE_INT off;
tree com_decl;
tree decl_or_origin = decl ? decl : origin;
+ tree ultimate_origin;
dw_die_ref var_die;
dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
dw_die_ref origin_die;
int declaration = (DECL_EXTERNAL (decl_or_origin)
- /* If DECL is COMDAT and has not actually been
- emitted, we cannot take its address; there
- might end up being no definition anywhere in
- the program. For example, consider the C++
- test case:
-
- template <class T>
- struct S { static const int i = 7; };
-
- template <class T>
- const int S<T>::i;
-
- int f() { return S<int>::i; }
-
- Here, S<int>::i is not DECL_EXTERNAL, but no
- definition is required, so the compiler will
- not emit a definition. */
- || (TREE_CODE (decl_or_origin) == VAR_DECL
- && DECL_COMDAT (decl_or_origin)
- && !TREE_ASM_WRITTEN (decl_or_origin))
|| class_or_namespace_scope_p (context_die));
- if (!origin)
- origin = decl_ultimate_origin (decl);
-
+ ultimate_origin = decl_ultimate_origin (decl_or_origin);
+ if (decl || ultimate_origin)
+ origin = ultimate_origin;
com_decl = fortran_common (decl_or_origin, &off);
/* Symbol in common gets emitted as a child of the common block, in the form
of a data member. */
if (com_decl)
{
- tree field;
dw_die_ref com_die;
dw_loc_list_ref loc;
die_node com_die_arg;
= htab_create_ggc (10, common_block_die_table_hash,
common_block_die_table_eq, NULL);
- field = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
com_die_arg.decl_id = DECL_UID (com_decl);
com_die_arg.die_parent = context_die;
com_die = (dw_die_ref) htab_find (common_block_die_table, &com_die_arg);
/* If the compiler emitted a definition for the DECL declaration
and if we already emitted a DIE for it, don't emit a second
- DIE for it again. */
- if (old_die
- && declaration
- && old_die->die_parent == context_die)
+ DIE for it again. Allow re-declarations of DECLs that are
+ inside functions, though. */
+ if (old_die && declaration && !local_scope_p (context_die))
return;
/* For static data members, the declaration in the class is supposed
static void
gen_reference_type_die (tree type, dw_die_ref context_die)
{
- dw_die_ref ref_die
- = new_die (DW_TAG_reference_type, scope_die_for (type, context_die), type);
+ dw_die_ref ref_die, scope_die = scope_die_for (type, context_die);
+
+ if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
+ ref_die = new_die (DW_TAG_rvalue_reference_type, scope_die, type);
+ else
+ ref_die = new_die (DW_TAG_reference_type, scope_die, type);
equate_type_number_to_die (type, ref_die);
add_type_attribute (ref_die, TREE_TYPE (type), 0, 0, context_die);
}
add_AT_unsigned (die, DW_AT_language, language);
+
+ switch (language)
+ {
+ case DW_LANG_Fortran77:
+ case DW_LANG_Fortran90:
+ case DW_LANG_Fortran95:
+ /* Fortran has case insensitive identifiers and the front-end
+ lowercases everything. */
+ add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
+ break;
+ default:
+ /* The default DW_ID_case_sensitive doesn't need to be specified. */
+ break;
+ }
return die;
}
break;
case LANG_TYPE:
- /* No Dwarf representation currently defined. */
+ /* Just use DW_TAG_unspecified_type. */
+ {
+ dw_die_ref type_die = lookup_type_die (type);
+ if (type_die == NULL)
+ {
+ tree name = TYPE_NAME (type);
+ if (TREE_CODE (name) == TYPE_DECL)
+ name = DECL_NAME (name);
+ type_die = new_die (DW_TAG_unspecified_type, comp_unit_die, type);
+ add_name_attribute (type_die, IDENTIFIER_POINTER (name));
+ equate_type_number_to_die (type, type_die);
+ }
+ }
break;
default:
{
dw_die_ref die;
tree decl_or_origin = decl ? decl : origin;
- tree ultimate_origin = origin ? decl_ultimate_origin (origin) : NULL;
-
- if (ultimate_origin)
- origin = ultimate_origin;
if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
die = lookup_decl_die (decl_or_origin);
{
/* Find die that represents this context. */
if (TYPE_P (context))
- return force_type_die (context);
+ return force_type_die (TYPE_MAIN_VARIANT (context));
else
return force_decl_die (context);
}
gen_decl_die (tree decl, tree origin, dw_die_ref context_die)
{
tree decl_or_origin = decl ? decl : origin;
- tree class_origin = NULL;
+ tree class_origin = NULL, ultimate_origin;
if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
return;
/* If we're emitting a clone, emit info for the abstract instance. */
if (origin || DECL_ORIGIN (decl) != decl)
- dwarf2out_abstract_function (origin ? origin : DECL_ABSTRACT_ORIGIN (decl));
+ dwarf2out_abstract_function (origin
+ ? DECL_ORIGIN (origin)
+ : DECL_ABSTRACT_ORIGIN (decl));
/* If we're emitting an out-of-line copy of an inline function,
emit info for the abstract instance and set up to refer to it. */
complicated because of the possibility that the VAR_DECL really
represents an inlined instance of a formal parameter for an inline
function. */
- if (!origin)
- origin = decl_ultimate_origin (decl);
- if (origin != NULL_TREE && TREE_CODE (origin) == PARM_DECL)
+ ultimate_origin = decl_ultimate_origin (decl_or_origin);
+ if (ultimate_origin != NULL_TREE
+ && TREE_CODE (ultimate_origin) == PARM_DECL)
gen_formal_parameter_die (decl, origin,
true /* Emit name attribute. */,
context_die);
if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
{
- if (is_base_type (TREE_TYPE (decl)))
- at_import_die = base_type_die (TREE_TYPE (decl));
- else
- at_import_die = force_type_die (TREE_TYPE (decl));
+ at_import_die = force_type_die (TREE_TYPE (decl));
/* For namespace N { typedef void T; } using N::T; base_type_die
returns NULL, but DW_TAG_imported_declaration requires
the DW_AT_import tag. Force creation of DW_TAG_typedef. */
gen_decl_die (decl, NULL, context_die);
}
+/* Write the debugging output for DECL. */
+
+static void
+dwarf2out_function_decl (tree decl)
+{
+ dwarf2out_decl (decl);
+
+ htab_empty (decl_loc_table);
+}
+
/* Output a marker (i.e. a label) for the beginning of the generated code for
a lexical block. */
/* Replace DW_AT_name for the decl with name. */
-
+
static void
dwarf2out_set_name (tree decl, tree name)
{
== ((const struct vcall_insn *) y)->insn_uid);
}
+/* Associate VTABLE_SLOT with INSN_UID in the VCALL_INSN_TABLE. */
+
+static void
+store_vcall_insn (unsigned int vtable_slot, int insn_uid)
+{
+ struct vcall_insn *item = GGC_NEW (struct vcall_insn);
+ struct vcall_insn **slot;
+
+ gcc_assert (item);
+ item->insn_uid = insn_uid;
+ item->vtable_slot = vtable_slot;
+ slot = (struct vcall_insn **)
+ htab_find_slot_with_hash (vcall_insn_table, &item,
+ (hashval_t) insn_uid, INSERT);
+ *slot = item;
+}
+
+/* Return the VTABLE_SLOT associated with INSN_UID. */
+
+static unsigned int
+lookup_vcall_insn (unsigned int insn_uid)
+{
+ struct vcall_insn item;
+ struct vcall_insn *p;
+
+ item.insn_uid = insn_uid;
+ item.vtable_slot = 0;
+ p = (struct vcall_insn *) htab_find_with_hash (vcall_insn_table,
+ (void *) &item,
+ (hashval_t) insn_uid);
+ if (p == NULL)
+ return (unsigned int) -1;
+ return p->vtable_slot;
+}
+
+
/* Called when lowering indirect calls to RTL. We make a note of INSN_UID
and the OBJ_TYPE_REF_TOKEN from ADDR. For C++ virtual calls, the token
is the vtable slot index that we will need to put in the virtual call
{
tree token = OBJ_TYPE_REF_TOKEN (addr);
if (TREE_CODE (token) == INTEGER_CST)
- {
- struct vcall_insn *item = GGC_NEW (struct vcall_insn);
- struct vcall_insn **slot;
-
- gcc_assert (item);
- item->insn_uid = insn_uid;
- item->vtable_slot = TREE_INT_CST_LOW (token);
- slot = (struct vcall_insn **)
- htab_find_slot_with_hash (vcall_insn_table, &item,
- (hashval_t) insn_uid, INSERT);
- *slot = item;
- }
+ store_vcall_insn (TREE_INT_CST_LOW (token), insn_uid);
}
}
+/* Called when scheduling RTL, when a CALL_INSN is split. Copies the
+ OBJ_TYPE_REF_TOKEN previously associated with OLD_INSN and associates it
+ with NEW_INSN. */
+
+static void
+dwarf2out_copy_call_info (rtx old_insn, rtx new_insn)
+{
+ unsigned int vtable_slot = lookup_vcall_insn (INSN_UID (old_insn));
+
+ if (vtable_slot != (unsigned int) -1)
+ store_vcall_insn (vtable_slot, INSN_UID (new_insn));
+}
+
/* Called by the final INSN scan whenever we see a virtual function call.
Make an entry into the virtual call table, recording the point of call
and the slot index of the vtable entry used to call the virtual member
static void
dwarf2out_virtual_call (int insn_uid)
{
+ unsigned int vtable_slot = lookup_vcall_insn (insn_uid);
vcall_entry e;
- struct vcall_insn item;
- struct vcall_insn *p;
- item.insn_uid = insn_uid;
- item.vtable_slot = 0;
- p = (struct vcall_insn *) htab_find_with_hash (vcall_insn_table,
- (void *) &item,
- (hashval_t) insn_uid);
- if (p == NULL)
+ if (vtable_slot == (unsigned int) -1)
return;
e.poc_label_num = poc_label_num++;
- e.vtable_slot = p->vtable_slot;
+ e.vtable_slot = vtable_slot;
VEC_safe_push (vcall_entry, gc, vcall_table, &e);
/* Drop a label at the return point to mark the point of call. */
if (next_real == NULL_RTX)
return;
- newloc = GGC_CNEW (struct var_loc_node);
- /* If there were no real insns between note we processed last time
- and this note, use the label we emitted last time. */
+ /* If there were any real insns between note we processed last time
+ and this note (or if it is the first note), clear
+ last_{,postcall_}label so that they are not reused this time. */
if (last_var_location_insn == NULL_RTX
|| last_var_location_insn != next_real
|| last_in_cold_section_p != in_cold_section_p)
{
+ last_label = NULL;
+ last_postcall_label = NULL;
+ }
+
+ decl = NOTE_VAR_LOCATION_DECL (loc_note);
+ newloc = add_var_loc_to_decl (decl, loc_note,
+ NOTE_DURING_CALL_P (loc_note)
+ ? last_postcall_label : last_label);
+ if (newloc == NULL)
+ return;
+
+ /* If there were no real insns between note we processed last time
+ and this note, use the label we emitted last time. Otherwise
+ create a new label and emit it. */
+ if (last_label == NULL)
+ {
ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
loclabel_num++;
last_label = ggc_strdup (loclabel);
- last_postcall_label = NULL;
}
- newloc->var_loc_note = loc_note;
- newloc->next = NULL;
if (!NOTE_DURING_CALL_P (loc_note))
newloc->label = last_label;
newloc->label = last_postcall_label;
}
- if (cfun && in_cold_section_p)
- newloc->section_label = crtl->subsections.cold_section_label;
- else
- newloc->section_label = text_section_label;
-
last_var_location_insn = next_real;
last_in_cold_section_p = in_cold_section_p;
- decl = NOTE_VAR_LOCATION_DECL (loc_note);
- add_var_loc_to_decl (decl, newloc);
}
/* We need to reset the locations at the beginning of each
static void
dwarf2out_begin_function (tree fun)
{
- htab_empty (decl_loc_table);
-
if (function_section (fun) != text_section)
have_multiple_function_sections = true;
breaking out types into comdat sections, do this
for all type definitions. */
if (die->die_tag == DW_TAG_array_type
- || (dwarf_version >= 4
+ || (dwarf_version >= 4
&& is_type_die (die) && ! is_declaration_die (die)))
FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
else
case DW_TAG_packed_type:
case DW_TAG_pointer_type:
case DW_TAG_reference_type:
+ case DW_TAG_rvalue_reference_type:
case DW_TAG_volatile_type:
case DW_TAG_typedef:
case DW_TAG_array_type:
DWARF_TYPE_SIGNATURE_SIZE));
}
-/* Move a DW_AT_MIPS_linkage_name attribute just added to dw_die_ref
+/* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
to the location it would have been added, should we know its
DECL_ASSEMBLER_NAME when we added other attributes. This will
probably improve compactness of debug info, removing equivalent
unsigned ix = VEC_length (dw_attr_node, die->die_attr);
dw_attr_node linkage = *VEC_index (dw_attr_node, die->die_attr, ix - 1);
- gcc_assert (linkage.dw_attr == DW_AT_MIPS_linkage_name);
+ gcc_assert (linkage.dw_attr == AT_linkage_name);
while (--ix > 0)
{
if (GET_CODE (rtl) == SYMBOL_REF
&& SYMBOL_REF_DECL (rtl)
- && TREE_CODE (SYMBOL_REF_DECL (rtl)) == VAR_DECL
&& !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
return 1;
{
dw_die_ref c;
dw_attr_ref a;
- dw_loc_list_ref curr;
+ dw_loc_list_ref *curr;
unsigned ix;
for (ix = 0; VEC_iterate (dw_attr_node, die->die_attr, ix, a); ix++)
switch (AT_class (a))
{
case dw_val_class_loc_list:
- for (curr = AT_loc_list (a); curr != NULL; curr = curr->dw_loc_next)
- if (!resolve_addr_in_expr (curr->expr))
- curr->expr = NULL;
+ curr = AT_loc_list_ptr (a);
+ while (*curr)
+ {
+ if (!resolve_addr_in_expr ((*curr)->expr))
+ {
+ dw_loc_list_ref next = (*curr)->dw_loc_next;
+ if (next && (*curr)->ll_symbol)
+ {
+ gcc_assert (!next->ll_symbol);
+ next->ll_symbol = (*curr)->ll_symbol;
+ }
+ *curr = next;
+ }
+ else
+ curr = &(*curr)->dw_loc_next;
+ }
+ if (!AT_loc_list (a))
+ {
+ remove_AT (die, a->dw_attr);
+ ix--;
+ }
break;
case dw_val_class_loc:
if (!resolve_addr_in_expr (AT_loc (a)))
- a->dw_attr_val.v.val_loc = NULL;
+ {
+ remove_AT (die, a->dw_attr);
+ ix--;
+ }
break;
case dw_val_class_addr:
if (a->dw_attr == DW_AT_const_value
&& resolve_one_addr (&a->dw_attr_val.v.val_addr, NULL))
{
- a->dw_attr = DW_AT_location;
- a->dw_attr_val.val_class = dw_val_class_loc;
- a->dw_attr_val.v.val_loc = NULL;
+ remove_AT (die, a->dw_attr);
+ ix--;
}
break;
default:
tree decl = node->created_for;
if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
{
- add_AT_string (node->die, DW_AT_MIPS_linkage_name,
+ add_AT_string (node->die, AT_linkage_name,
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
move_linkage_attr (node->die);
}
else
{
unsigned fde_idx = 0;
+ bool range_list_added = false;
/* We need to give .debug_loc and .debug_ranges an appropriate
"base address". Use zero so that these addresses become
add_AT_addr (comp_unit_die, DW_AT_low_pc, const0_rtx);
add_AT_addr (comp_unit_die, DW_AT_entry_pc, const0_rtx);
- add_AT_range_list (comp_unit_die, DW_AT_ranges,
- add_ranges_by_labels (text_section_label,
- text_end_label));
- if (flag_reorder_blocks_and_partition)
- add_ranges_by_labels (cold_text_section_label,
- cold_end_label);
+ if (text_section_used)
+ add_ranges_by_labels (comp_unit_die, text_section_label,
+ text_end_label, &range_list_added);
+ if (flag_reorder_blocks_and_partition && cold_text_section_used)
+ add_ranges_by_labels (comp_unit_die, cold_text_section_label,
+ cold_end_label, &range_list_added);
for (fde_idx = 0; fde_idx < fde_table_in_use; fde_idx++)
{
if (fde->dw_fde_switched_sections)
{
if (!fde->in_std_section)
- add_ranges_by_labels (fde->dw_fde_hot_section_label,
- fde->dw_fde_hot_section_end_label);
+ add_ranges_by_labels (comp_unit_die,
+ fde->dw_fde_hot_section_label,
+ fde->dw_fde_hot_section_end_label,
+ &range_list_added);
if (!fde->cold_in_std_section)
- add_ranges_by_labels (fde->dw_fde_unlikely_section_label,
- fde->dw_fde_unlikely_section_end_label);
+ add_ranges_by_labels (comp_unit_die,
+ fde->dw_fde_unlikely_section_label,
+ fde->dw_fde_unlikely_section_end_label,
+ &range_list_added);
}
else if (!fde->in_std_section)
- add_ranges_by_labels (fde->dw_fde_begin,
- fde->dw_fde_end);
+ add_ranges_by_labels (comp_unit_die, fde->dw_fde_begin,
+ fde->dw_fde_end, &range_list_added);
}
- add_ranges (NULL);
+ if (range_list_added)
+ add_ranges (NULL);
}
/* Output location list section if necessary. */
0, /* switch_text_section */
0, /* direct_call */
0, /* virtual_call_token */
+ 0, /* copy_call_info */
0, /* virtual_call */
0, /* set_name */
0 /* start_end_main_source_file */