/* Output variables, constants and external declarations, for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* This file handles generation of all the assembler code
#include "output.h"
#include "toplev.h"
#include "hashtab.h"
-#include "c-pragma.h"
#include "ggc.h"
#include "langhooks.h"
#include "tm_p.h"
#include "debug.h"
#include "target.h"
+#include "targhooks.h"
#include "tree-mudflap.h"
#include "cgraph.h"
#include "cfglayout.h"
#include "basic-block.h"
+#include "tree-iterator.h"
#ifdef XCOFF_DEBUGGING_INFO
#include "xcoffout.h" /* Needed for external data
struct constant_descriptor_rtx;
struct rtx_constant_pool;
-struct varasm_status GTY(())
-{
- /* If we're using a per-function constant pool, this is it. */
- struct rtx_constant_pool *pool;
-
- /* Number of tree-constants deferred during the expansion of this
- function. */
- unsigned int deferred_constants;
-};
-
-#define n_deferred_constants (cfun->varasm->deferred_constants)
+#define n_deferred_constants (crtl->varasm.deferred_constants)
/* Number for making the label on the next
constant that is stored in memory. */
/* We give all constants their own alias set. Perhaps redundant with
MEM_READONLY_P, but pre-dates it. */
-static HOST_WIDE_INT const_alias_set;
+static alias_set_type const_alias_set;
static const char *strip_reg_name (const char *);
static int contains_pointers_p (tree);
static void output_addressed_constants (tree);
static unsigned HOST_WIDE_INT array_size_for_constructor (tree);
static unsigned min_align (unsigned, unsigned);
-static void output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int);
static void globalize_decl (tree);
-static void maybe_assemble_visibility (tree);
#ifdef BSS_SECTION_ASM_OP
#ifdef ASM_OUTPUT_BSS
static void asm_output_bss (FILE *, tree, const char *,
/* A pool of constants that can be shared between functions. */
static GTY(()) struct rtx_constant_pool *shared_constant_pool;
+/* TLS emulation. */
+
+static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+ htab_t emutls_htab;
+static GTY (()) tree emutls_object_type;
+/* Emulated TLS objects have the TLS model TLS_MODEL_EMULATED. This
+ macro can be used on them to distinguish the control variable from
+ the initialization template. */
+#define DECL_EMUTLS_VAR_P(D) (TREE_TYPE (D) == emutls_object_type)
+
+#if !defined (NO_DOT_IN_LABEL)
+# define EMUTLS_SEPARATOR "."
+#elif !defined (NO_DOLLAR_IN_LABEL)
+# define EMUTLS_SEPARATOR "$"
+#else
+# define EMUTLS_SEPARATOR "_"
+#endif
+
+/* Create an IDENTIFIER_NODE by prefixing PREFIX to the
+ IDENTIFIER_NODE NAME's name. */
+
+static tree
+prefix_name (const char *prefix, tree name)
+{
+ unsigned plen = strlen (prefix);
+ unsigned nlen = strlen (IDENTIFIER_POINTER (name));
+ char *toname = (char *) alloca (plen + nlen + 1);
+
+ memcpy (toname, prefix, plen);
+ memcpy (toname + plen, IDENTIFIER_POINTER (name), nlen + 1);
+
+ return get_identifier (toname);
+}
+
+/* Create an identifier for the struct __emutls_object, given an identifier
+ of the DECL_ASSEMBLY_NAME of the original object. */
+
+static tree
+get_emutls_object_name (tree name)
+{
+ const char *prefix = (targetm.emutls.var_prefix
+ ? targetm.emutls.var_prefix
+ : "__emutls_v" EMUTLS_SEPARATOR);
+ return prefix_name (prefix, name);
+}
+
+tree
+default_emutls_var_fields (tree type, tree *name ATTRIBUTE_UNUSED)
+{
+ tree word_type_node, field, next_field;
+
+ field = build_decl (UNKNOWN_LOCATION,
+ FIELD_DECL, get_identifier ("__templ"), ptr_type_node);
+ DECL_CONTEXT (field) = type;
+ next_field = field;
+
+ field = build_decl (UNKNOWN_LOCATION,
+ FIELD_DECL, get_identifier ("__offset"),
+ ptr_type_node);
+ DECL_CONTEXT (field) = type;
+ TREE_CHAIN (field) = next_field;
+ next_field = field;
+
+ word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
+ field = build_decl (UNKNOWN_LOCATION,
+ FIELD_DECL, get_identifier ("__align"),
+ word_type_node);
+ DECL_CONTEXT (field) = type;
+ TREE_CHAIN (field) = next_field;
+ next_field = field;
+
+ field = build_decl (UNKNOWN_LOCATION,
+ FIELD_DECL, get_identifier ("__size"), word_type_node);
+ DECL_CONTEXT (field) = type;
+ TREE_CHAIN (field) = next_field;
+
+ return field;
+}
+
+/* Create the structure for struct __emutls_object. This should match the
+ structure at the top of emutls.c, modulo the union there. */
+
+static tree
+get_emutls_object_type (void)
+{
+ tree type, type_name, field;
+
+ type = emutls_object_type;
+ if (type)
+ return type;
+
+ emutls_object_type = type = lang_hooks.types.make_type (RECORD_TYPE);
+ type_name = NULL;
+ field = targetm.emutls.var_fields (type, &type_name);
+ if (!type_name)
+ type_name = get_identifier ("__emutls_object");
+ type_name = build_decl (UNKNOWN_LOCATION,
+ TYPE_DECL, type_name, type);
+ TYPE_NAME (type) = type_name;
+ TYPE_FIELDS (type) = field;
+ layout_type (type);
+
+ return type;
+}
+
+/* Create a read-only variable like DECL, with the same DECL_INITIAL.
+ This will be used for initializing the emulated tls data area. */
+
+static tree
+get_emutls_init_templ_addr (tree decl)
+{
+ tree name, to;
+
+ if (targetm.emutls.register_common && !DECL_INITIAL (decl)
+ && !DECL_SECTION_NAME (decl))
+ return null_pointer_node;
+
+ name = DECL_ASSEMBLER_NAME (decl);
+ if (!targetm.emutls.tmpl_prefix || targetm.emutls.tmpl_prefix[0])
+ {
+ const char *prefix = (targetm.emutls.tmpl_prefix
+ ? targetm.emutls.tmpl_prefix
+ : "__emutls_t" EMUTLS_SEPARATOR);
+ name = prefix_name (prefix, name);
+ }
+
+ to = build_decl (DECL_SOURCE_LOCATION (decl),
+ VAR_DECL, name, TREE_TYPE (decl));
+ SET_DECL_ASSEMBLER_NAME (to, DECL_NAME (to));
+ DECL_TLS_MODEL (to) = TLS_MODEL_EMULATED;
+ DECL_ARTIFICIAL (to) = 1;
+ TREE_USED (to) = TREE_USED (decl);
+ TREE_READONLY (to) = 1;
+ DECL_IGNORED_P (to) = 1;
+ DECL_CONTEXT (to) = DECL_CONTEXT (decl);
+ DECL_SECTION_NAME (to) = DECL_SECTION_NAME (decl);
+
+ DECL_WEAK (to) = DECL_WEAK (decl);
+ if (DECL_ONE_ONLY (decl))
+ {
+ make_decl_one_only (to, DECL_ASSEMBLER_NAME (to));
+ TREE_STATIC (to) = TREE_STATIC (decl);
+ TREE_PUBLIC (to) = TREE_PUBLIC (decl);
+ DECL_VISIBILITY (to) = DECL_VISIBILITY (decl);
+ }
+ else
+ TREE_STATIC (to) = 1;
+
+ DECL_INITIAL (to) = DECL_INITIAL (decl);
+ DECL_INITIAL (decl) = NULL;
+
+ varpool_finalize_decl (to);
+ return build_fold_addr_expr (to);
+}
+
+/* When emulating tls, we use a control structure for use by the runtime.
+ Create and return this structure. */
+
+tree
+emutls_decl (tree decl)
+{
+ tree name, to;
+ struct tree_map *h, in;
+ void **loc;
+
+ if (targetm.have_tls || decl == NULL || decl == error_mark_node
+ || TREE_CODE (decl) != VAR_DECL || ! DECL_THREAD_LOCAL_P (decl))
+ return decl;
+
+ /* Look up the object in the hash; return the control structure if
+ it has already been created. */
+ if (! emutls_htab)
+ emutls_htab = htab_create_ggc (512, tree_map_hash, tree_map_eq, 0);
+
+ name = DECL_ASSEMBLER_NAME (decl);
+
+ /* Note that we use the hash of the decl's name, rather than a hash
+ of the decl's pointer. In emutls_finish we iterate through the
+ hash table, and we want this traversal to be predictable. */
+ in.hash = htab_hash_string (IDENTIFIER_POINTER (name));
+ in.base.from = decl;
+ loc = htab_find_slot_with_hash (emutls_htab, &in, in.hash, INSERT);
+ h = (struct tree_map *) *loc;
+ if (h != NULL)
+ to = h->to;
+ else
+ {
+ to = build_decl (DECL_SOURCE_LOCATION (decl),
+ VAR_DECL, get_emutls_object_name (name),
+ get_emutls_object_type ());
+
+ h = GGC_NEW (struct tree_map);
+ h->hash = in.hash;
+ h->base.from = decl;
+ h->to = to;
+ *(struct tree_map **) loc = h;
+
+ DECL_TLS_MODEL (to) = TLS_MODEL_EMULATED;
+ DECL_ARTIFICIAL (to) = 1;
+ DECL_IGNORED_P (to) = 1;
+ TREE_READONLY (to) = 0;
+ SET_DECL_ASSEMBLER_NAME (to, DECL_NAME (to));
+ if (DECL_ONE_ONLY (decl))
+ make_decl_one_only (to, DECL_ASSEMBLER_NAME (to));
+ DECL_CONTEXT (to) = DECL_CONTEXT (decl);
+ if (targetm.emutls.var_align_fixed)
+ /* If we're not allowed to change the proxy object's
+ alignment, pretend it's been set by the user. */
+ DECL_USER_ALIGN (to) = 1;
+ }
+
+ /* Note that these fields may need to be updated from time to time from
+ the original decl. Consider:
+ extern __thread int i;
+ int foo() { return i; }
+ __thread int i = 1;
+ in which I goes from external to locally defined and initialized. */
+
+ TREE_STATIC (to) = TREE_STATIC (decl);
+ TREE_USED (to) = TREE_USED (decl);
+ TREE_PUBLIC (to) = TREE_PUBLIC (decl);
+ DECL_EXTERNAL (to) = DECL_EXTERNAL (decl);
+ DECL_COMMON (to) = DECL_COMMON (decl);
+ DECL_WEAK (to) = DECL_WEAK (decl);
+ DECL_VISIBILITY (to) = DECL_VISIBILITY (decl);
+
+ return to;
+}
+
+static int
+emutls_common_1 (void **loc, void *xstmts)
+{
+ struct tree_map *h = *(struct tree_map **) loc;
+ tree args, x, *pstmts = (tree *) xstmts;
+ tree word_type_node;
+
+ if (! DECL_COMMON (h->base.from)
+ || (DECL_INITIAL (h->base.from)
+ && DECL_INITIAL (h->base.from) != error_mark_node))
+ return 1;
+
+ word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
+
+ /* The idea was to call get_emutls_init_templ_addr here, but if we
+ do this and there is an initializer, -fanchor_section loses,
+ because it would be too late to ensure the template is
+ output. */
+ x = null_pointer_node;
+ args = tree_cons (NULL, x, NULL);
+ x = build_int_cst (word_type_node, DECL_ALIGN_UNIT (h->base.from));
+ args = tree_cons (NULL, x, args);
+ x = fold_convert (word_type_node, DECL_SIZE_UNIT (h->base.from));
+ args = tree_cons (NULL, x, args);
+ x = build_fold_addr_expr (h->to);
+ args = tree_cons (NULL, x, args);
+
+ x = built_in_decls[BUILT_IN_EMUTLS_REGISTER_COMMON];
+ x = build_function_call_expr (UNKNOWN_LOCATION, x, args);
+
+ append_to_statement_list (x, pstmts);
+ return 1;
+}
+
+void
+emutls_finish (void)
+{
+ if (targetm.emutls.register_common)
+ {
+ tree body = NULL_TREE;
+
+ if (emutls_htab == NULL)
+ return;
+
+ htab_traverse_noresize (emutls_htab, emutls_common_1, &body);
+ if (body == NULL_TREE)
+ return;
+
+ cgraph_build_static_cdtor ('I', body, DEFAULT_INIT_PRIORITY);
+ }
+}
+
/* Helper routines for maintaining section_htab. */
static int
section_entry_eq (const void *p1, const void *p2)
{
- const section *old = p1;
- const char *new = p2;
+ const section *old = (const section *) p1;
+ const char *new_name = (const char *) p2;
- return strcmp (old->named.name, new) == 0;
+ return strcmp (old->named.name, new_name) == 0;
}
static hashval_t
section_entry_hash (const void *p)
{
- const section *old = p;
+ const section *old = (const section *) p;
return htab_hash_string (old->named.name);
}
static int
object_block_entry_eq (const void *p1, const void *p2)
{
- const struct object_block *old = p1;
- const section *new = p2;
+ const struct object_block *old = (const struct object_block *) p1;
+ const section *new_section = (const section *) p2;
- return old->sect == new;
+ return old->sect == new_section;
}
static hashval_t
object_block_entry_hash (const void *p)
{
- const struct object_block *old = p;
+ const struct object_block *old = (const struct object_block *) p;
return hash_section (old->sect);
}
{
section *sect;
- sect = ggc_alloc (sizeof (struct unnamed_section));
+ sect = GGC_NEW (section);
sect->unnamed.common.flags = flags | SECTION_UNNAMED;
sect->unnamed.callback = callback;
sect->unnamed.data = data;
{
section *sect;
- sect = ggc_alloc (sizeof (struct unnamed_section));
+ sect = GGC_NEW (section);
sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
sect->noswitch.callback = callback;
flags |= SECTION_NAMED;
if (*slot == NULL)
{
- sect = ggc_alloc (sizeof (struct named_section));
+ sect = GGC_NEW (section);
sect->named.common.flags = flags;
sect->named.name = ggc_strdup (name);
sect->named.decl = decl;
/* Create the extended SYMBOL_REF. */
size = RTX_HDR_SIZE + sizeof (struct block_symbol);
- symbol = ggc_alloc_zone (size, &rtl_zone);
+ symbol = (rtx) ggc_alloc_zone (size, &rtl_zone);
/* Initialize the normal SYMBOL_REF fields. */
memset (symbol, 0, size);
tree dsn;
gcc_assert (cfun && current_function_decl);
- if (cfun->unlikely_text_section_name)
+ if (crtl->subsections.unlikely_text_section_name)
return;
dsn = DECL_SECTION_NAME (current_function_decl);
if (flag_function_sections && dsn)
{
- name = alloca (TREE_STRING_LENGTH (dsn) + 1);
+ name = (char *) alloca (TREE_STRING_LENGTH (dsn) + 1);
memcpy (name, TREE_STRING_POINTER (dsn), TREE_STRING_LENGTH (dsn) + 1);
stripped_name = targetm.strip_name_encoding (name);
buffer = ACONCAT ((stripped_name, "_unlikely", NULL));
- cfun->unlikely_text_section_name = ggc_strdup (buffer);
+ crtl->subsections.unlikely_text_section_name = ggc_strdup (buffer);
}
else
- cfun->unlikely_text_section_name = UNLIKELY_EXECUTED_TEXT_SECTION_NAME;
+ crtl->subsections.unlikely_text_section_name = UNLIKELY_EXECUTED_TEXT_SECTION_NAME;
}
/* Tell assembler to switch to unlikely-to-be-executed text section. */
{
if (cfun)
{
- if (!cfun->unlikely_text_section_name)
+ if (!crtl->subsections.unlikely_text_section_name)
initialize_cold_section_name ();
- return get_named_section (NULL, cfun->unlikely_text_section_name, 0);
+ return get_named_section (NULL, crtl->subsections.unlikely_text_section_name, 0);
}
else
return get_named_section (NULL, UNLIKELY_EXECUTED_TEXT_SECTION_NAME, 0);
const char *name;
if (cfun)
- name = cfun->unlikely_text_section_name;
+ name = crtl->subsections.unlikely_text_section_name;
else
name = UNLIKELY_EXECUTED_TEXT_SECTION_NAME;
unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
unsigned HOST_WIDE_INT rounded)
{
- targetm.asm_out.globalize_label (file, name);
+ gcc_assert (strcmp (XSTR (XEXP (DECL_RTL (decl), 0), 0), name) == 0);
+ targetm.asm_out.globalize_decl_name (file, decl);
switch_to_section (bss_section);
#ifdef ASM_DECLARE_OBJECT_NAME
last_assemble_variable_decl = decl;
if (DECL_ONE_ONLY (decl) && HAVE_COMDAT_GROUP)
{
- size_t len = strlen (name) + 3;
- char* rname = alloca (len);
+ const char *dot;
+ size_t len;
+ char* rname;
+
+ dot = strchr (name + 1, '.');
+ if (!dot)
+ dot = name;
+ len = strlen (dot) + 8;
+ rname = (char *) alloca (len);
strcpy (rname, ".rodata");
- strcat (rname, name + 5);
+ strcat (rname, dot);
return get_section (rname, SECTION_LINKONCE, decl);
}
/* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */
&& strncmp (name, ".gnu.linkonce.t.", 16) == 0)
{
size_t len = strlen (name) + 1;
- char *rname = alloca (len);
+ char *rname = (char *) alloca (len);
memcpy (rname, name, len);
rname[14] = 'r';
&& strncmp (name, ".text.", 6) == 0)
{
size_t len = strlen (name) + 1;
- char *rname = alloca (len + 2);
+ char *rname = (char *) alloca (len + 2);
memcpy (rname, ".rodata", 7);
memcpy (rname + 7, name + 5, len - 5);
void
set_user_assembler_name (tree decl, const char *name)
{
- char *starred = alloca (strlen (name) + 2);
+ char *starred = (char *) alloca (strlen (name) + 2);
starred[0] = '*';
strcpy (starred + 1, name);
change_decl_assembler_name (decl, get_identifier (starred));
/* Return true if DECL's initializer is suitable for a BSS section. */
static bool
-bss_initializer_p (tree decl)
+bss_initializer_p (const_tree decl)
{
return (DECL_INITIAL (decl) == NULL
|| DECL_INITIAL (decl) == error_mark_node
if (! DECL_USER_ALIGN (decl))
{
#ifdef DATA_ALIGNMENT
- align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
+ unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
+ /* Don't increase alignment too much for TLS variables - TLS space
+ is too precious. */
+ if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
+ align = data_align;
#endif
#ifdef CONSTANT_ALIGNMENT
if (DECL_INITIAL (decl) != 0 && DECL_INITIAL (decl) != error_mark_node)
- align = CONSTANT_ALIGNMENT (DECL_INITIAL (decl), align);
+ {
+ unsigned int const_align = CONSTANT_ALIGNMENT (DECL_INITIAL (decl),
+ align);
+ /* Don't increase alignment too much for TLS variables - TLS space
+ is too precious. */
+ if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
+ align = const_align;
+ }
#endif
}
{
if (DECL_THREAD_LOCAL_P (decl))
return tls_comm_section;
- if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
+ /* This cannot be common bss for an emulated TLS object without
+ a register_common hook. */
+ else if (DECL_TLS_MODEL (decl) == TLS_MODEL_EMULATED
+ && !targetm.emutls.register_common)
+ ;
+ else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
return comm_section;
}
if (DECL_INITIAL (decl) == decl)
return false;
+ /* If this decl is an alias, then we don't want to emit a definition. */
+ if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
+ return false;
+
return true;
}
if (flag_mudflap && TREE_CODE (decl) == VAR_DECL)
mudflap_enqueue_decl (decl);
}
-
-/* Make the rtl for variable VAR be volatile.
- Use this only for static variables. */
-
-void
-make_var_volatile (tree var)
-{
- gcc_assert (MEM_P (DECL_RTL (var)));
-
- MEM_VOLATILE_P (DECL_RTL (var)) = 1;
-}
\f
/* Output a string of literal assembler code
for an `asm' keyword used between functions. */
#endif
}
-void
-default_named_section_asm_out_destructor (rtx symbol, int priority)
+/* Write the address of the entity given by SYMBOL to SEC. */
+void
+assemble_addr_to_section (rtx symbol, section *sec)
+{
+ switch_to_section (sec);
+ assemble_align (POINTER_SIZE);
+ assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
+}
+
+/* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
+ not) section for PRIORITY. */
+section *
+get_cdtor_priority_section (int priority, bool constructor_p)
{
- const char *section = ".dtors";
char buf[16];
/* ??? This only works reliably with the GNU linker. */
+ sprintf (buf, "%s.%.5u",
+ constructor_p ? ".ctors" : ".dtors",
+ /* Invert the numbering so the linker puts us in the proper
+ order; constructors are run from right to left, and the
+ linker sorts in increasing order. */
+ MAX_INIT_PRIORITY - priority);
+ return get_section (buf, SECTION_WRITE, NULL);
+}
+
+void
+default_named_section_asm_out_destructor (rtx symbol, int priority)
+{
+ section *sec;
+
if (priority != DEFAULT_INIT_PRIORITY)
- {
- sprintf (buf, ".dtors.%.5u",
- /* Invert the numbering so the linker puts us in the proper
- order; constructors are run from right to left, and the
- linker sorts in increasing order. */
- MAX_INIT_PRIORITY - priority);
- section = buf;
- }
+ sec = get_cdtor_priority_section (priority,
+ /*constructor_p=*/false);
+ else
+ sec = get_section (".dtors", SECTION_WRITE, NULL);
- switch_to_section (get_section (section, SECTION_WRITE, NULL));
- assemble_align (POINTER_SIZE);
- assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
+ assemble_addr_to_section (symbol, sec);
}
#ifdef DTORS_SECTION_ASM_OP
default_dtor_section_asm_out_destructor (rtx symbol,
int priority ATTRIBUTE_UNUSED)
{
- switch_to_section (dtors_section);
- assemble_align (POINTER_SIZE);
- assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
+ assemble_addr_to_section (symbol, dtors_section);
}
#endif
void
default_named_section_asm_out_constructor (rtx symbol, int priority)
{
- const char *section = ".ctors";
- char buf[16];
+ section *sec;
- /* ??? This only works reliably with the GNU linker. */
if (priority != DEFAULT_INIT_PRIORITY)
- {
- sprintf (buf, ".ctors.%.5u",
- /* Invert the numbering so the linker puts us in the proper
- order; constructors are run from right to left, and the
- linker sorts in increasing order. */
- MAX_INIT_PRIORITY - priority);
- section = buf;
- }
+ sec = get_cdtor_priority_section (priority,
+ /*constructor_p=*/true);
+ else
+ sec = get_section (".ctors", SECTION_WRITE, NULL);
- switch_to_section (get_section (section, SECTION_WRITE, NULL));
- assemble_align (POINTER_SIZE);
- assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
+ assemble_addr_to_section (symbol, sec);
}
#ifdef CTORS_SECTION_ASM_OP
default_ctor_section_asm_out_constructor (rtx symbol,
int priority ATTRIBUTE_UNUSED)
{
- switch_to_section (ctors_section);
- assemble_align (POINTER_SIZE);
- assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
+ assemble_addr_to_section (symbol, ctors_section);
}
#endif
\f
/* We win when global object is found, but it is useful to know about weak
symbol as well so we can produce nicer unique names. */
- if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl))
+ if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
type = &weak_global_object_name;
if (!*type)
char tmp_label[100];
bool hot_label_written = false;
- cfun->unlikely_text_section_name = NULL;
+ crtl->subsections.unlikely_text_section_name = NULL;
first_function_block_is_cold = false;
if (flag_reorder_blocks_and_partition)
{
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
- cfun->hot_section_label = ggc_strdup (tmp_label);
+ crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
- cfun->cold_section_label = ggc_strdup (tmp_label);
+ crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
- cfun->hot_section_end_label = ggc_strdup (tmp_label);
+ crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
- cfun->cold_section_end_label = ggc_strdup (tmp_label);
+ crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
const_labelno++;
}
else
{
- cfun->hot_section_label = NULL;
- cfun->cold_section_label = NULL;
- cfun->hot_section_end_label = NULL;
- cfun->cold_section_end_label = NULL;
+ crtl->subsections.hot_section_label = NULL;
+ crtl->subsections.cold_section_label = NULL;
+ crtl->subsections.hot_section_end_label = NULL;
+ crtl->subsections.cold_section_end_label = NULL;
}
/* The following code does not need preprocessing in the assembler. */
if (flag_reorder_blocks_and_partition)
{
switch_to_section (unlikely_text_section ());
- assemble_align (FUNCTION_BOUNDARY);
- ASM_OUTPUT_LABEL (asm_out_file, cfun->cold_section_label);
+ assemble_align (DECL_ALIGN (decl));
+ ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
/* When the function starts with a cold section, we need to explicitly
align the hot section and write out the hot section label.
But if the current function is a thunk, we do not have a CFG. */
- if (!current_function_is_thunk
+ if (!cfun->is_thunk
&& BB_PARTITION (ENTRY_BLOCK_PTR->next_bb) == BB_COLD_PARTITION)
{
switch_to_section (text_section);
- assemble_align (FUNCTION_BOUNDARY);
- ASM_OUTPUT_LABEL (asm_out_file, cfun->hot_section_label);
+ assemble_align (DECL_ALIGN (decl));
+ ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
hot_label_written = true;
first_function_block_is_cold = true;
}
initialize_cold_section_name ();
- if (cfun->unlikely_text_section_name
+ if (crtl->subsections.unlikely_text_section_name
&& strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
- cfun->unlikely_text_section_name) == 0)
+ crtl->subsections.unlikely_text_section_name) == 0)
first_function_block_is_cold = true;
}
switch_to_section (function_section (decl));
if (flag_reorder_blocks_and_partition
&& !hot_label_written)
- ASM_OUTPUT_LABEL (asm_out_file, cfun->hot_section_label);
+ ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
/* Tell assembler to move to target machine's alignment for functions. */
- align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
- if (align < force_align_functions_log)
- align = force_align_functions_log;
+ align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
if (align > 0)
{
ASM_OUTPUT_ALIGN (asm_out_file, align);
}
/* Handle a user-specified function alignment.
- Note that we still need to align to FUNCTION_BOUNDARY, as above,
+ Note that we still need to align to DECL_ALIGN, as above,
because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
- if (align_functions_log > align
- && cfun->function_frequency != FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
+ if (! DECL_USER_ALIGN (decl)
+ && align_functions_log > align
+ && optimize_function_for_speed_p (cfun))
{
#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
save_text_section = in_section;
switch_to_section (unlikely_text_section ());
- ASM_OUTPUT_LABEL (asm_out_file, cfun->cold_section_end_label);
+ ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
if (first_function_block_is_cold)
switch_to_section (text_section);
else
switch_to_section (function_section (decl));
- ASM_OUTPUT_LABEL (asm_out_file, cfun->hot_section_end_label);
+ ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
switch_to_section (save_text_section);
}
}
}
}
+/* Initialize emulated tls object TO, which refers to TLS variable
+ DECL and is initialized by PROXY. */
+
+tree
+default_emutls_var_init (tree to, tree decl, tree proxy)
+{
+ VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 4);
+ constructor_elt *elt;
+ tree type = TREE_TYPE (to);
+ tree field = TYPE_FIELDS (type);
+
+ elt = VEC_quick_push (constructor_elt, v, NULL);
+ elt->index = field;
+ elt->value = fold_convert (TREE_TYPE (field), DECL_SIZE_UNIT (decl));
+
+ elt = VEC_quick_push (constructor_elt, v, NULL);
+ field = TREE_CHAIN (field);
+ elt->index = field;
+ elt->value = build_int_cst (TREE_TYPE (field),
+ DECL_ALIGN_UNIT (decl));
+
+ elt = VEC_quick_push (constructor_elt, v, NULL);
+ field = TREE_CHAIN (field);
+ elt->index = field;
+ elt->value = null_pointer_node;
+
+ elt = VEC_quick_push (constructor_elt, v, NULL);
+ field = TREE_CHAIN (field);
+ elt->index = field;
+ elt->value = proxy;
+
+ return build_constructor (type, v);
+}
+
/* Assemble everything that is needed for a variable or function declaration.
Not used for automatic variables, and not used for function definitions.
Should not be called for variables of incomplete structure type.
rtx decl_rtl, symbol;
section *sect;
- if (lang_hooks.decls.prepare_assemble_variable)
- lang_hooks.decls.prepare_assemble_variable (decl);
+ if (! targetm.have_tls
+ && TREE_CODE (decl) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (decl))
+ {
+ tree to = emutls_decl (decl);
+
+ /* If this variable is defined locally, then we need to initialize the
+ control structure with size and alignment information. We do this
+ at the last moment because tentative definitions can take a locally
+ defined but uninitialized variable and initialize it later, which
+ would result in incorrect contents. */
+ if (! DECL_EXTERNAL (to)
+ && (! DECL_COMMON (to)
+ || (DECL_INITIAL (decl)
+ && DECL_INITIAL (decl) != error_mark_node)))
+ {
+ DECL_INITIAL (to) = targetm.emutls.var_init
+ (to, decl, get_emutls_init_templ_addr (decl));
+
+ /* Make sure the template is marked as needed early enough.
+ Without this, if the variable is placed in a
+ section-anchored block, the template will only be marked
+ when it's too late. */
+ record_references_in_initializer (to);
+ }
+
+ decl = to;
+ }
last_assemble_variable_decl = 0;
/* First make the assembler name(s) global if appropriate. */
sect = get_variable_section (decl, false);
if (TREE_PUBLIC (decl)
- && DECL_NAME (decl)
&& (sect->common.flags & SECTION_COMMON) == 0)
globalize_decl (decl);
}
}
-/* In unit-at-a-time mode, we delay assemble_external processing until
+/* We delay assemble_external processing until
the compilation unit is finalized. This is the best we can do for
right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
it all the way to final. See PR 17982 for further discussion. */
return true;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
- if (strncmp (name, "__builtin_", strlen ("__builtin_")) == 0)
+ if (is_builtin_name (name))
return true;
}
return false;
#endif
}
-/* Output something to declare an external symbol to the assembler.
- (Most assemblers don't need this, so we normally output nothing.)
- Do nothing if DECL is not external. */
+/* This TREE_LIST contains any weak symbol declarations waiting
+ to be emitted. */
+static GTY(()) tree weak_decls;
+
+/* Output something to declare an external symbol to the assembler,
+ and qualifiers such as weakness. (Most assemblers don't need
+ extern declaration, so we normally output nothing.) Do nothing if
+ DECL is not external. */
void
assemble_external (tree decl ATTRIBUTE_UNUSED)
open. If it's not, we should not be calling this function. */
gcc_assert (asm_out_file);
-#ifdef ASM_OUTPUT_EXTERNAL
if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
return;
- if (flag_unit_at_a_time)
- pending_assemble_externals = tree_cons (0, decl,
+ /* We want to output annotation for weak and external symbols at
+ very last to check if they are references or not. */
+
+ if (SUPPORTS_WEAK && DECL_WEAK (decl)
+ /* TREE_STATIC is a weird and abused creature which is not
+ generally the right test for whether an entity has been
+ locally emitted, inlined or otherwise not-really-extern, but
+ for declarations that can be weak, it happens to be
+ match. */
+ && !TREE_STATIC (decl)
+ && tree_find_value (weak_decls, decl) == NULL_TREE)
+ weak_decls = tree_cons (NULL, decl, weak_decls);
+
+#ifdef ASM_OUTPUT_EXTERNAL
+ if (tree_find_value (pending_assemble_externals, decl) == NULL_TREE)
+ pending_assemble_externals = tree_cons (NULL, decl,
pending_assemble_externals);
- else
- assemble_external_real (decl);
#endif
}
}
else if (TREE_CODE (decl) == VAR_DECL)
{
- struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
- cgraph_varpool_mark_needed_node (node);
+ struct varpool_node *node = varpool_node (decl);
+ varpool_mark_needed_node (node);
/* C++ frontend use mark_decl_references to force COMDAT variables
to be output that might appear dead otherwise. */
node->force_output = true;
enum machine_mode omode, imode;
unsigned int subalign;
unsigned int subsize, i;
+ enum mode_class mclass;
subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
subalign = MIN (align, subsize * BITS_PER_UNIT);
- omode = mode_for_size (subsize * BITS_PER_UNIT, MODE_INT, 0);
- imode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
+ if (GET_CODE (x) == CONST_FIXED)
+ mclass = GET_MODE_CLASS (GET_MODE (x));
+ else
+ mclass = MODE_INT;
+
+ omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0);
+ imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0);
for (i = 0; i < size; i += subsize)
{
Store them both in the structure *VALUE.
EXP must be reducible. */
-struct addr_const GTY(())
-{
+struct GTY(()) addr_const {
rtx base;
HOST_WIDE_INT offset;
};
break;
case REAL_CST:
+ case FIXED_CST:
case STRING_CST:
case COMPLEX_CST:
case CONSTRUCTOR:
value->offset = offset;
}
\f
-/* Uniquize all constants that appear in memory.
- Each constant in memory thus far output is recorded
- in `const_desc_table'. */
-
-struct constant_descriptor_tree GTY(())
-{
- /* A MEM for the constant. */
- rtx rtl;
-
- /* The value of the constant. */
- tree value;
-
- /* Hash of value. Computing the hash from value each time
- hashfn is called can't work properly, as that means recursive
- use of the hash table during hash table expansion. */
- hashval_t hash;
-};
static GTY((param_is (struct constant_descriptor_tree)))
htab_t const_desc_htab;
static struct constant_descriptor_tree * build_constant_desc (tree);
static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
+/* Constant pool accessor function. */
+
+htab_t
+constant_pool_htab (void)
+{
+ return const_desc_htab;
+}
+
/* Compute a hash code for a constant expression. */
static hashval_t
const_desc_hash (const void *ptr)
{
- return ((struct constant_descriptor_tree *)ptr)->hash;
+ return ((const struct constant_descriptor_tree *)ptr)->hash;
}
static hashval_t
case REAL_CST:
return real_hash (TREE_REAL_CST_PTR (exp));
+ case FIXED_CST:
+ return fixed_hash (TREE_FIXED_CST_PTR (exp));
+
case STRING_CST:
p = TREE_STRING_POINTER (exp);
len = TREE_STRING_LENGTH (exp);
return hi;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
+ const_hash_1 (TREE_OPERAND (exp, 1)));
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
default:
static int
const_desc_eq (const void *p1, const void *p2)
{
- const struct constant_descriptor_tree *c1 = p1;
- const struct constant_descriptor_tree *c2 = p2;
+ const struct constant_descriptor_tree *const c1
+ = (const struct constant_descriptor_tree *) p1;
+ const struct constant_descriptor_tree *const c2
+ = (const struct constant_descriptor_tree *) p2;
if (c1->hash != c2->hash)
return 0;
return compare_constant (c1->value, c2->value);
return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
+ case FIXED_CST:
+ /* Fixed constants are the same only if the same width of type. */
+ if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
+ return 0;
+
+ return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
+
case STRING_CST:
if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
return 0;
}
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case RANGE_EXPR:
return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
&& compare_constant(TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
case VIEW_CONVERT_EXPR:
return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
default:
- {
- tree nt1, nt2;
- nt1 = lang_hooks.expand_constant (t1);
- nt2 = lang_hooks.expand_constant (t2);
- if (nt1 != t1 || nt2 != t2)
- return compare_constant (nt1, nt2);
- else
- return 0;
- }
+ return 0;
}
gcc_unreachable ();
case INTEGER_CST:
case REAL_CST:
+ case FIXED_CST:
case STRING_CST:
return copy_node (exp);
copy_constant (TREE_IMAGPART (exp)));
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
return build2 (TREE_CODE (exp), TREE_TYPE (exp),
copy_constant (TREE_OPERAND (exp, 0)),
copy_constant (TREE_OPERAND (exp, 1)));
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
case VIEW_CONVERT_EXPR:
return build1 (TREE_CODE (exp), TREE_TYPE (exp),
copy_constant (TREE_OPERAND (exp, 0)));
}
default:
- {
- tree t = lang_hooks.expand_constant (exp);
-
- gcc_assert (t != exp);
- return copy_constant (t);
- }
+ gcc_unreachable ();
}
}
\f
int labelno;
struct constant_descriptor_tree *desc;
- desc = ggc_alloc (sizeof (*desc));
+ desc = GGC_NEW (struct constant_descriptor_tree);
desc->value = copy_constant (exp);
/* Propagate marked-ness to copied constant. */
set_mem_alias_set (rtl, 0);
set_mem_alias_set (rtl, const_alias_set);
+ /* We cannot share RTX'es in pool entries.
+ Mark this piece of RTL as required for unsharing. */
+ RTX_FLAG (rtl, used) = 1;
+
/* Set flags or add text to the name to record information, such as
that it is a local symbol. If the name is changed, the macro
ASM_OUTPUT_LABELREF will have to know how to strip this
key.hash = const_hash_1 (exp);
loc = htab_find_slot_with_hash (const_desc_htab, &key, key.hash, INSERT);
- desc = *loc;
+ desc = (struct constant_descriptor_tree *) *loc;
if (desc == 0)
{
desc = build_constant_desc (exp);
key.value = exp;
key.hash = const_hash_1 (exp);
- desc = htab_find_with_hash (const_desc_htab, &key, key.hash);
+ desc = (struct constant_descriptor_tree *)
+ htab_find_with_hash (const_desc_htab, &key, key.hash);
return (desc ? desc->rtl : NULL_RTX);
}
can use one per-file pool. Should add a targetm bit to tell the
difference. */
-struct rtx_constant_pool GTY(())
-{
+struct GTY(()) rtx_constant_pool {
/* Pointers to first and last constant in pool, as ordered by offset. */
struct constant_descriptor_rtx *first;
struct constant_descriptor_rtx *last;
HOST_WIDE_INT offset;
};
-struct constant_descriptor_rtx GTY((chain_next ("%h.next")))
-{
+struct GTY((chain_next ("%h.next"))) constant_descriptor_rtx {
struct constant_descriptor_rtx *next;
rtx mem;
rtx sym;
static hashval_t
const_desc_rtx_hash (const void *ptr)
{
- const struct constant_descriptor_rtx *desc = ptr;
+ const struct constant_descriptor_rtx *const desc
+ = (const struct constant_descriptor_rtx *) ptr;
return desc->hash;
}
static int
const_desc_rtx_eq (const void *a, const void *b)
{
- const struct constant_descriptor_rtx *x = a;
- const struct constant_descriptor_rtx *y = b;
+ const struct constant_descriptor_rtx *const x
+ = (const struct constant_descriptor_rtx *) a;
+ const struct constant_descriptor_rtx *const y
+ = (const struct constant_descriptor_rtx *) b;
if (x->mode != y->mode)
return 0;
hwi = INTVAL (x);
fold_hwi:
{
- const int shift = sizeof (hashval_t) * CHAR_BIT;
+ int shift = sizeof (hashval_t) * CHAR_BIT;
const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
int i;
h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
break;
+ case CONST_FIXED:
+ h ^= fixed_hash (CONST_FIXED_VALUE (x));
+ break;
+
case CONST_VECTOR:
{
int i;
break;
}
- hp = data;
+ hp = (hashval_t *) data;
*hp = *hp * 509 + h;
return 0;
}
{
struct rtx_constant_pool *pool;
- pool = ggc_alloc (sizeof (struct rtx_constant_pool));
+ pool = GGC_NEW (struct rtx_constant_pool);
pool->const_rtx_htab = htab_create_ggc (31, const_desc_rtx_hash,
const_desc_rtx_eq, NULL);
pool->first = NULL;
/* Initialize constant pool hashing for a new function. */
void
-init_varasm_status (struct function *f)
+init_varasm_status (void)
{
- struct varasm_status *p;
-
- p = ggc_alloc (sizeof (struct varasm_status));
- f->varasm = p;
-
- p->pool = create_constant_pool ();
- p->deferred_constants = 0;
+ crtl->varasm.pool = create_constant_pool ();
+ crtl->varasm.deferred_constants = 0;
}
\f
/* Given a MINUS expression, simplify it if both sides
return NULL_RTX;
/* Record that this function has used a constant pool entry. */
- current_function_uses_const_pool = 1;
+ crtl->uses_const_pool = 1;
/* Decide which pool to use. */
pool = (targetm.use_blocks_for_constant_p (mode, x)
? shared_constant_pool
- : cfun->varasm->pool);
+ : crtl->varasm.pool);
/* Lookup the value in the hashtable. */
tmp.constant = x;
tmp.mode = mode;
hash = const_rtx_hash (x);
slot = htab_find_slot_with_hash (pool->const_rtx_htab, &tmp, hash, INSERT);
- desc = *slot;
+ desc = (struct constant_descriptor_rtx *) *slot;
/* If the constant was already present, return its memory. */
if (desc)
return copy_rtx (desc->mem);
/* Otherwise, create a new descriptor. */
- desc = ggc_alloc (sizeof (*desc));
+ desc = GGC_NEW (struct constant_descriptor_rtx);
*slot = desc;
/* Align the location counter as required by EXP's data type. */
/* Similar, return the mode. */
enum machine_mode
-get_pool_mode (rtx addr)
+get_pool_mode (const_rtx addr)
{
return SYMBOL_REF_CONSTANT (addr)->mode;
}
int
get_pool_size (void)
{
- return cfun->varasm->pool->offset;
+ return crtl->varasm.pool->offset;
}
\f
/* Worker function for output_constant_pool_1. Emit assembly for X
case MODE_INT:
case MODE_PARTIAL_INT:
+ case MODE_FRACT:
+ case MODE_UFRACT:
+ case MODE_ACCUM:
+ case MODE_UACCUM:
assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
break;
case MODE_VECTOR_FLOAT:
case MODE_VECTOR_INT:
+ case MODE_VECTOR_FRACT:
+ case MODE_VECTOR_UFRACT:
+ case MODE_VECTOR_ACCUM:
+ case MODE_VECTOR_UACCUM:
{
int i, units;
enum machine_mode submode = GET_MODE_INNER (mode);
functioning even with INSN_DELETED_P and friends. */
tmp = x;
- switch (GET_CODE (x))
+ switch (GET_CODE (tmp))
{
case CONST:
- if (GET_CODE (XEXP (x, 0)) != PLUS
- || GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
+ if (GET_CODE (XEXP (tmp, 0)) != PLUS
+ || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
break;
- tmp = XEXP (XEXP (x, 0), 0);
+ tmp = XEXP (XEXP (tmp, 0), 0);
/* FALLTHRU */
case LABEL_REF:
- tmp = XEXP (x, 0);
+ tmp = XEXP (tmp, 0);
gcc_assert (!INSN_DELETED_P (tmp));
gcc_assert (!NOTE_P (tmp)
- || NOTE_LINE_NUMBER (tmp) != NOTE_INSN_DELETED);
+ || NOTE_KIND (tmp) != NOTE_INSN_DELETED);
break;
default:
{
rtx insn, link;
- if (!current_function_uses_const_pool && n_deferred_constants == 0)
+ if (!crtl->uses_const_pool && n_deferred_constants == 0)
return;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
mark_constants (insn);
- for (link = current_function_epilogue_delay_list;
+ for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
mark_constants (XEXP (link, 0));
output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
tree fndecl ATTRIBUTE_UNUSED)
{
- struct rtx_constant_pool *pool = cfun->varasm->pool;
+ struct rtx_constant_pool *pool = crtl->varasm.pool;
/* It is possible for gcc to call force_const_mem and then to later
discard the instructions which refer to the constant. In such a
int reloc = 0, reloc2;
tree tem;
- /* Give the front-end a chance to convert VALUE to something that
- looks more like a constant to the back-end. */
- exp = lang_hooks.expand_constant (exp);
-
switch (TREE_CODE (exp))
{
case ADDR_EXPR:
break;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
break;
reloc |= reloc2;
break;
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
case VIEW_CONVERT_EXPR:
reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
break;
{
tree tem;
- /* Give the front-end a chance to convert VALUE to something that
- looks more like a constant to the back-end. */
- exp = lang_hooks.expand_constant (exp);
-
switch (TREE_CODE (exp))
{
case ADDR_EXPR:
break;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
output_addressed_constants (TREE_OPERAND (exp, 1));
/* Fall through. */
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
case VIEW_CONVERT_EXPR:
output_addressed_constants (TREE_OPERAND (exp, 0));
break;
evaluate the property while walking a constructor for other purposes. */
bool
-constructor_static_from_elts_p (tree ctor)
+constructor_static_from_elts_p (const_tree ctor)
{
return (TREE_CONSTANT (ctor)
&& (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
&& !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)));
}
+/* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
+ PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
+ which are valid when ENDTYPE is an integer of any size; in
+ particular, this does not accept a pointer minus a constant. This
+ returns null_pointer_node if the VALUE is an absolute constant
+ which can be used to initialize a static variable. Otherwise it
+ returns NULL. */
+
+static tree
+narrowing_initializer_constant_valid_p (tree value, tree endtype)
+{
+ tree op0, op1;
+
+ if (!INTEGRAL_TYPE_P (endtype))
+ return NULL_TREE;
+
+ op0 = TREE_OPERAND (value, 0);
+ op1 = TREE_OPERAND (value, 1);
+
+ /* Like STRIP_NOPS except allow the operand mode to widen. This
+ works around a feature of fold that simplifies (int)(p1 - p2) to
+ ((int)p1 - (int)p2) under the theory that the narrower operation
+ is cheaper. */
+
+ while (CONVERT_EXPR_P (op0)
+ || TREE_CODE (op0) == NON_LVALUE_EXPR)
+ {
+ tree inner = TREE_OPERAND (op0, 0);
+ if (inner == error_mark_node
+ || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
+ || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))
+ > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
+ break;
+ op0 = inner;
+ }
+
+ while (CONVERT_EXPR_P (op1)
+ || TREE_CODE (op1) == NON_LVALUE_EXPR)
+ {
+ tree inner = TREE_OPERAND (op1, 0);
+ if (inner == error_mark_node
+ || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
+ || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))
+ > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
+ break;
+ op1 = inner;
+ }
+
+ op0 = initializer_constant_valid_p (op0, endtype);
+ op1 = initializer_constant_valid_p (op1, endtype);
+
+ /* Both initializers must be known. */
+ if (op0 && op1)
+ {
+ if (op0 == op1
+ && (op0 == null_pointer_node
+ || TREE_CODE (value) == MINUS_EXPR))
+ return null_pointer_node;
+
+ /* Support differences between labels. */
+ if (TREE_CODE (op0) == LABEL_DECL
+ && TREE_CODE (op1) == LABEL_DECL)
+ return null_pointer_node;
+
+ if (TREE_CODE (op0) == STRING_CST
+ && TREE_CODE (op1) == STRING_CST
+ && operand_equal_p (op0, op1, 1))
+ return null_pointer_node;
+ }
+
+ return NULL_TREE;
+}
+
/* Return nonzero if VALUE is a valid constant-valued expression
for use in initializing a static variable; one that can be an
element of a "constant" initializer.
tree
initializer_constant_valid_p (tree value, tree endtype)
{
- /* Give the front-end a chance to convert VALUE to something that
- looks more like a constant to the back-end. */
- value = lang_hooks.expand_constant (value);
+ tree ret;
switch (TREE_CODE (value))
{
case INTEGER_CST:
case VECTOR_CST:
case REAL_CST:
+ case FIXED_CST:
case STRING_CST:
case COMPLEX_CST:
return null_pointer_node;
case ADDR_EXPR:
case FDESC_EXPR:
- value = staticp (TREE_OPERAND (value, 0));
- if (value)
- {
- /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out to
- be a constant, this is old-skool offsetof-like nonsense. */
- if (TREE_CODE (value) == INDIRECT_REF
- && TREE_CONSTANT (TREE_OPERAND (value, 0)))
- return null_pointer_node;
- /* Taking the address of a nested function involves a trampoline. */
- if (TREE_CODE (value) == FUNCTION_DECL
- && ((decl_function_context (value)
- && !DECL_NO_STATIC_CHAIN (value))
- || DECL_DLLIMPORT_P (value)))
- return NULL_TREE;
- /* "&{...}" requires a temporary to hold the constructed
- object. */
- if (TREE_CODE (value) == CONSTRUCTOR)
- return NULL_TREE;
- }
- return value;
+ {
+ tree op0 = staticp (TREE_OPERAND (value, 0));
+ if (op0)
+ {
+ /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
+ to be a constant, this is old-skool offsetof-like nonsense. */
+ if (TREE_CODE (op0) == INDIRECT_REF
+ && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
+ return null_pointer_node;
+ /* Taking the address of a nested function involves a trampoline,
+ unless we don't need or want one. */
+ if (TREE_CODE (op0) == FUNCTION_DECL
+ && decl_function_context (op0)
+ && !DECL_NO_STATIC_CHAIN (op0)
+ && !TREE_NO_TRAMPOLINE (value))
+ return NULL_TREE;
+ /* "&{...}" requires a temporary to hold the constructed
+ object. */
+ if (TREE_CODE (op0) == CONSTRUCTOR)
+ return NULL_TREE;
+ }
+ return op0;
+ }
- case VIEW_CONVERT_EXPR:
case NON_LVALUE_EXPR:
return initializer_constant_valid_p (TREE_OPERAND (value, 0), endtype);
- case CONVERT_EXPR:
- case NOP_EXPR:
+ case VIEW_CONVERT_EXPR:
{
- tree src;
- tree src_type;
- tree dest_type;
+ tree src = TREE_OPERAND (value, 0);
+ tree src_type = TREE_TYPE (src);
+ tree dest_type = TREE_TYPE (value);
+
+ /* Allow view-conversions from aggregate to non-aggregate type only
+ if the bit pattern is fully preserved afterwards; otherwise, the
+ RTL expander won't be able to apply a subsequent transformation
+ to the underlying constructor. */
+ if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
+ {
+ if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
+ return initializer_constant_valid_p (src, endtype);
+ else
+ return NULL_TREE;
+ }
+
+ /* Allow all other kinds of view-conversion. */
+ return initializer_constant_valid_p (src, endtype);
+ }
- src = TREE_OPERAND (value, 0);
- src_type = TREE_TYPE (src);
- dest_type = TREE_TYPE (value);
+ CASE_CONVERT:
+ {
+ tree src = TREE_OPERAND (value, 0);
+ tree src_type = TREE_TYPE (src);
+ tree dest_type = TREE_TYPE (value);
/* Allow conversions between pointer types, floating-point
types, and offset types. */
}
break;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (! INTEGRAL_TYPE_P (endtype)
|| TYPE_PRECISION (endtype) >= POINTER_SIZE)
endtype);
tree valid1 = initializer_constant_valid_p (TREE_OPERAND (value, 1),
endtype);
- /* If either term is absolute, use the other terms relocation. */
+ /* If either term is absolute, use the other term's relocation. */
if (valid0 == null_pointer_node)
return valid1;
if (valid1 == null_pointer_node)
return valid0;
}
+
+ /* Support narrowing pointer differences. */
+ ret = narrowing_initializer_constant_valid_p (value, endtype);
+ if (ret != NULL_TREE)
+ return ret;
+
break;
case MINUS_EXPR:
}
/* Support narrowing differences. */
- if (INTEGRAL_TYPE_P (endtype))
- {
- tree op0, op1;
-
- op0 = TREE_OPERAND (value, 0);
- op1 = TREE_OPERAND (value, 1);
+ ret = narrowing_initializer_constant_valid_p (value, endtype);
+ if (ret != NULL_TREE)
+ return ret;
- /* Like STRIP_NOPS except allow the operand mode to widen.
- This works around a feature of fold that simplifies
- (int)(p1 - p2) to ((int)p1 - (int)p2) under the theory
- that the narrower operation is cheaper. */
-
- while (TREE_CODE (op0) == NOP_EXPR
- || TREE_CODE (op0) == CONVERT_EXPR
- || TREE_CODE (op0) == NON_LVALUE_EXPR)
- {
- tree inner = TREE_OPERAND (op0, 0);
- if (inner == error_mark_node
- || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
- || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))
- > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
- break;
- op0 = inner;
- }
-
- while (TREE_CODE (op1) == NOP_EXPR
- || TREE_CODE (op1) == CONVERT_EXPR
- || TREE_CODE (op1) == NON_LVALUE_EXPR)
- {
- tree inner = TREE_OPERAND (op1, 0);
- if (inner == error_mark_node
- || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
- || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))
- > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
- break;
- op1 = inner;
- }
-
- op0 = initializer_constant_valid_p (op0, endtype);
- op1 = initializer_constant_valid_p (op1, endtype);
-
- /* Both initializers must be known. */
- if (op0 && op1)
- {
- if (op0 == op1)
- return null_pointer_node;
-
- /* Support differences between labels. */
- if (TREE_CODE (op0) == LABEL_DECL
- && TREE_CODE (op1) == LABEL_DECL)
- return null_pointer_node;
-
- if (TREE_CODE (op0) == STRING_CST
- && TREE_CODE (op1) == STRING_CST
- && operand_equal_p (op0, op1, 1))
- return null_pointer_node;
- }
- }
break;
default:
return 0;
}
\f
-/* Output assembler code for constant EXP to FILE, with no label.
- This includes the pseudo-op such as ".int" or ".byte", and a newline.
- Assumes output_addressed_constants has been done on EXP already.
+/* Return true if VALUE is a valid constant-valued expression
+ for use in initializing a static bit-field; one that can be
+ an element of a "constant" initializer. */
- Generate exactly SIZE bytes of assembler data, padding at the end
- with zeros if necessary. SIZE must always be specified.
-
- SIZE is important for structure constructors,
- since trailing members may have been omitted from the constructor.
- It is also important for initialization of arrays from string constants
- since the full length of the string constant might not be wanted.
- It is also needed for initialization of unions, where the initializer's
- type is just one member, and that may not be as long as the union.
+bool
+initializer_constant_valid_for_bitfield_p (tree value)
+{
+ /* For bitfields we support integer constants or possibly nested aggregates
+ of such. */
+ switch (TREE_CODE (value))
+ {
+ case CONSTRUCTOR:
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree elt;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
+ if (!initializer_constant_valid_for_bitfield_p (elt))
+ return false;
+ return true;
+ }
+
+ case INTEGER_CST:
+ return true;
+
+ case VIEW_CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ return
+ initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
+
+ default:
+ break;
+ }
+
+ return false;
+}
+
+/* output_constructor outer state of relevance in recursive calls, typically
+ for nested aggregate bitfields. */
+
+typedef struct {
+ unsigned int bit_offset; /* current position in ... */
+ int byte; /* ... the outer byte buffer. */
+} oc_outer_state;
+
+static unsigned HOST_WIDE_INT
+ output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int,
+ oc_outer_state *);
+
+/* Output assembler code for constant EXP to FILE, with no label.
+ This includes the pseudo-op such as ".int" or ".byte", and a newline.
+ Assumes output_addressed_constants has been done on EXP already.
+
+ Generate exactly SIZE bytes of assembler data, padding at the end
+ with zeros if necessary. SIZE must always be specified.
+
+ SIZE is important for structure constructors,
+ since trailing members may have been omitted from the constructor.
+ It is also important for initialization of arrays from string constants
+ since the full length of the string constant might not be wanted.
+ It is also needed for initialization of unions, where the initializer's
+ type is just one member, and that may not be as long as the union.
There a case in which we would fail to output exactly SIZE bytes:
for a structure constructor that wants to produce more than SIZE bytes.
enum tree_code code;
unsigned HOST_WIDE_INT thissize;
- /* Some front-ends use constants other than the standard language-independent
- varieties, but which may still be output directly. Give the front-end a
- chance to convert EXP to a language-independent representation. */
- exp = lang_hooks.expand_constant (exp);
-
if (size == 0 || flag_syntax_only)
return;
/* Eliminate any conversions since we'll be outputting the underlying
constant. */
- while (TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
+ while (CONVERT_EXPR_P (exp)
|| TREE_CODE (exp) == NON_LVALUE_EXPR
|| TREE_CODE (exp) == VIEW_CONVERT_EXPR)
{
code = TREE_CODE (TREE_TYPE (exp));
thissize = int_size_in_bytes (TREE_TYPE (exp));
- /* Give the front end another chance to expand constants. */
- exp = lang_hooks.expand_constant (exp);
-
/* Allow a constructor with no elements for any data type.
This means to fill the space with zeros. */
if (TREE_CODE (exp) == CONSTRUCTOR
case POINTER_TYPE:
case REFERENCE_TYPE:
case OFFSET_TYPE:
+ case FIXED_POINT_TYPE:
if (! assemble_integer (expand_expr (exp, NULL_RTX, VOIDmode,
EXPAND_INITIALIZER),
MIN (size, thissize), align, 0))
- error ("initializer for integer value is too complicated");
+ error ("initializer for integer/fixed-point value is too complicated");
break;
case REAL_TYPE:
switch (TREE_CODE (exp))
{
case CONSTRUCTOR:
- output_constructor (exp, size, align);
+ output_constructor (exp, size, align, NULL);
return;
case STRING_CST:
thissize = MIN ((unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp),
link = TREE_VECTOR_CST_ELTS (exp);
output_constant (TREE_VALUE (link), elt_size, align);
+ thissize = elt_size;
while ((link = TREE_CHAIN (link)) != NULL)
- output_constant (TREE_VALUE (link), elt_size, nalign);
+ {
+ output_constant (TREE_VALUE (link), elt_size, nalign);
+ thissize += elt_size;
+ }
break;
}
default:
case RECORD_TYPE:
case UNION_TYPE:
gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
- output_constructor (exp, size, align);
+ output_constructor (exp, size, align, NULL);
return;
case ERROR_MARK:
return tree_low_cst (i, 1);
}
-/* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
- Generate at least SIZE bytes, padding if necessary. */
+/* Other datastructures + helpers for output_constructor. */
-static void
-output_constructor (tree exp, unsigned HOST_WIDE_INT size,
- unsigned int align)
-{
- tree type = TREE_TYPE (exp);
- tree field = 0;
- tree min_index = 0;
- /* Number of bytes output or skipped so far.
- In other words, current position within the constructor. */
- HOST_WIDE_INT total_bytes = 0;
- /* Nonzero means BYTE contains part of a byte, to be output. */
- int byte_buffer_in_use = 0;
- int byte = 0;
- unsigned HOST_WIDE_INT cnt;
- constructor_elt *ce;
+/* output_constructor local state to support interaction with helpers. */
- gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
+typedef struct {
- if (TREE_CODE (type) == RECORD_TYPE)
- field = TYPE_FIELDS (type);
+ /* Received arguments. */
+ tree exp; /* Constructor expression. */
+ unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
+ unsigned int align; /* Known initial alignment. */
- if (TREE_CODE (type) == ARRAY_TYPE
- && TYPE_DOMAIN (type) != 0)
- min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
+ /* Constructor expression data. */
+ tree type; /* Expression type. */
+ tree field; /* Current field decl in a record. */
+ tree min_index; /* Lower bound if specified for an array. */
- /* As LINK goes through the elements of the constant,
- FIELD goes through the structure fields, if the constant is a structure.
- if the constant is a union, then we override this,
- by getting the field from the TREE_LIST element.
- But the constant could also be an array. Then FIELD is zero.
+ /* Output processing state. */
+ HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
+ bool byte_buffer_in_use; /* Whether byte ... */
+ int byte; /* ... contains part of a bitfield byte yet to
+ be output. */
- There is always a maximum of one element in the chain LINK for unions
- (even if the initializer in a source program incorrectly contains
- more one). */
- for (cnt = 0;
- VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), cnt, ce);
- cnt++, field = field ? TREE_CHAIN (field) : 0)
+ int last_relative_index; /* Implicit or explicit index of the last
+ array element output within a bitfield. */
+ /* Current element. */
+ tree val; /* Current element value. */
+ tree index; /* Current element index. */
+
+} oc_local_state;
+
+/* Helper for output_constructor. From the current LOCAL state, output a
+ RANGE_EXPR element. */
+
+static void
+output_constructor_array_range (oc_local_state *local)
+{
+ unsigned HOST_WIDE_INT fieldsize
+ = int_size_in_bytes (TREE_TYPE (local->type));
+
+ HOST_WIDE_INT lo_index
+ = tree_low_cst (TREE_OPERAND (local->index, 0), 0);
+ HOST_WIDE_INT hi_index
+ = tree_low_cst (TREE_OPERAND (local->index, 1), 0);
+ HOST_WIDE_INT index;
+
+ unsigned int align2
+ = min_align (local->align, fieldsize * BITS_PER_UNIT);
+
+ for (index = lo_index; index <= hi_index; index++)
{
- tree val = ce->value;
- tree index = 0;
+ /* Output the element's initial value. */
+ if (local->val == NULL_TREE)
+ assemble_zeros (fieldsize);
+ else
+ output_constant (local->val, fieldsize, align2);
+
+ /* Count its size. */
+ local->total_bytes += fieldsize;
+ }
+}
- /* The element in a union constructor specifies the proper field
- or index. */
- if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
- && ce->index != 0)
- field = ce->index;
+/* Helper for output_constructor. From the current LOCAL state, output a
+ field element that is not true bitfield or part of an outer one. */
+
+static void
+output_constructor_regular_field (oc_local_state *local)
+{
+ /* Field size and position. Since this structure is static, we know the
+ positions are constant. */
+ unsigned HOST_WIDE_INT fieldsize;
+ HOST_WIDE_INT fieldpos;
- else if (TREE_CODE (type) == ARRAY_TYPE)
- index = ce->index;
+ unsigned int align2;
-#ifdef ASM_COMMENT_START
- if (field && flag_verbose_asm)
- fprintf (asm_out_file, "%s %s:\n",
- ASM_COMMENT_START,
- DECL_NAME (field)
- ? IDENTIFIER_POINTER (DECL_NAME (field))
- : "<anonymous>");
-#endif
+ if (local->index != NULL_TREE)
+ fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
+ * ((tree_low_cst (local->index, 0)
+ - tree_low_cst (local->min_index, 0))));
+ else if (local->field != NULL_TREE)
+ fieldpos = int_byte_position (local->field);
+ else
+ fieldpos = 0;
- /* Eliminate the marker that makes a cast not be an lvalue. */
- if (val != 0)
- STRIP_NOPS (val);
+ /* Output any buffered-up bit-fields preceding this element. */
+ if (local->byte_buffer_in_use)
+ {
+ assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
+ local->total_bytes++;
+ local->byte_buffer_in_use = false;
+ }
+
+ /* Advance to offset of this element.
+ Note no alignment needed in an array, since that is guaranteed
+ if each element has the proper size. */
+ if ((local->field != NULL_TREE || local->index != NULL_TREE)
+ && fieldpos != local->total_bytes)
+ {
+ gcc_assert (fieldpos >= local->total_bytes);
+ assemble_zeros (fieldpos - local->total_bytes);
+ local->total_bytes = fieldpos;
+ }
+
+ /* Find the alignment of this element. */
+ align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
- if (index && TREE_CODE (index) == RANGE_EXPR)
+ /* Determine size this element should occupy. */
+ if (local->field)
+ {
+ fieldsize = 0;
+
+ /* If this is an array with an unspecified upper bound,
+ the initializer determines the size. */
+ /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
+ but we cannot do this until the deprecated support for
+ initializing zero-length array members is removed. */
+ if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
+ && TYPE_DOMAIN (TREE_TYPE (local->field))
+ && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field))))
{
- unsigned HOST_WIDE_INT fieldsize
- = int_size_in_bytes (TREE_TYPE (type));
- HOST_WIDE_INT lo_index = tree_low_cst (TREE_OPERAND (index, 0), 0);
- HOST_WIDE_INT hi_index = tree_low_cst (TREE_OPERAND (index, 1), 0);
- HOST_WIDE_INT index;
- unsigned int align2 = min_align (align, fieldsize * BITS_PER_UNIT);
-
- for (index = lo_index; index <= hi_index; index++)
- {
- /* Output the element's initial value. */
- if (val == 0)
- assemble_zeros (fieldsize);
- else
- output_constant (val, fieldsize, align2);
-
- /* Count its size. */
- total_bytes += fieldsize;
- }
+ fieldsize = array_size_for_constructor (local->val);
+ /* Given a non-empty initialization, this field had
+ better be last. */
+ gcc_assert (!fieldsize || !TREE_CHAIN (local->field));
}
- else if (field == 0 || !DECL_BIT_FIELD (field))
+ else if (DECL_SIZE_UNIT (local->field))
{
- /* An element that is not a bit-field. */
-
- unsigned HOST_WIDE_INT fieldsize;
- /* Since this structure is static,
- we know the positions are constant. */
- HOST_WIDE_INT pos = field ? int_byte_position (field) : 0;
- unsigned int align2;
+ /* ??? This can't be right. If the decl size overflows
+ a host integer we will silently emit no data. */
+ if (host_integerp (DECL_SIZE_UNIT (local->field), 1))
+ fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
+ }
+ }
+ else
+ fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
+
+ /* Output the element's initial value. */
+ if (local->val == NULL_TREE)
+ assemble_zeros (fieldsize);
+ else
+ output_constant (local->val, fieldsize, align2);
- if (index != 0)
- pos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (val)), 1)
- * (tree_low_cst (index, 0) - tree_low_cst (min_index, 0)));
+ /* Count its size. */
+ local->total_bytes += fieldsize;
+}
- /* Output any buffered-up bit-fields preceding this element. */
- if (byte_buffer_in_use)
- {
- assemble_integer (GEN_INT (byte), 1, BITS_PER_UNIT, 1);
- total_bytes++;
- byte_buffer_in_use = 0;
- }
+/* Helper for output_constructor. From the current LOCAL and OUTER states,
+ output an element that is a true bitfield or part of an outer one. */
- /* Advance to offset of this element.
- Note no alignment needed in an array, since that is guaranteed
- if each element has the proper size. */
- if ((field != 0 || index != 0) && pos != total_bytes)
- {
- gcc_assert (pos >= total_bytes);
- assemble_zeros (pos - total_bytes);
- total_bytes = pos;
- }
+static void
+output_constructor_bitfield (oc_local_state *local, oc_outer_state *outer)
+{
+ /* Bit size of this element. */
+ HOST_WIDE_INT ebitsize
+ = (local->field
+ ? tree_low_cst (DECL_SIZE (local->field), 1)
+ : tree_low_cst (TYPE_SIZE (TREE_TYPE (local->type)), 1));
+
+ /* Relative index of this element if this is an array component. */
+ HOST_WIDE_INT relative_index
+ = (!local->field
+ ? (local->index
+ ? (tree_low_cst (local->index, 0)
+ - tree_low_cst (local->min_index, 0))
+ : local->last_relative_index + 1)
+ : 0);
+
+ /* Bit position of this element from the start of the containing
+ constructor. */
+ HOST_WIDE_INT constructor_relative_ebitpos
+ = (local->field
+ ? int_bit_position (local->field)
+ : ebitsize * relative_index);
+
+ /* Bit position of this element from the start of a possibly ongoing
+ outer byte buffer. */
+ HOST_WIDE_INT byte_relative_ebitpos
+ = ((outer ? outer->bit_offset : 0) + constructor_relative_ebitpos);
+
+ /* From the start of a possibly ongoing outer byte buffer, offsets to
+ the first bit of this element and to the first bit past the end of
+ this element. */
+ HOST_WIDE_INT next_offset = byte_relative_ebitpos;
+ HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
+
+ local->last_relative_index = relative_index;
+
+ if (local->val == NULL_TREE)
+ local->val = integer_zero_node;
+
+ while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
+ || TREE_CODE (local->val) == NON_LVALUE_EXPR)
+ local->val = TREE_OPERAND (local->val, 0);
+
+ if (TREE_CODE (local->val) != INTEGER_CST
+ && TREE_CODE (local->val) != CONSTRUCTOR)
+ {
+ error ("invalid initial value for member %qE", DECL_NAME (local->field));
+ return;
+ }
- /* Find the alignment of this element. */
- align2 = min_align (align, BITS_PER_UNIT * pos);
+ /* If this field does not start in this (or, next) byte,
+ skip some bytes. */
+ if (next_offset / BITS_PER_UNIT != local->total_bytes)
+ {
+ /* Output remnant of any bit field in previous bytes. */
+ if (local->byte_buffer_in_use)
+ {
+ assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
+ local->total_bytes++;
+ local->byte_buffer_in_use = false;
+ }
+
+ /* If still not at proper byte, advance to there. */
+ if (next_offset / BITS_PER_UNIT != local->total_bytes)
+ {
+ gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
+ assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
+ local->total_bytes = next_offset / BITS_PER_UNIT;
+ }
+ }
+
+ /* Set up the buffer if necessary. */
+ if (!local->byte_buffer_in_use)
+ {
+ local->byte = 0;
+ if (ebitsize > 0)
+ local->byte_buffer_in_use = true;
+ }
+
+ /* If this is nested constructor, recurse passing the bit offset and the
+ pending data, then retrieve the new pending data afterwards. */
+ if (TREE_CODE (local->val) == CONSTRUCTOR)
+ {
+ oc_outer_state output_state;
- /* Determine size this element should occupy. */
- if (field)
+ output_state.bit_offset = next_offset % BITS_PER_UNIT;
+ output_state.byte = local->byte;
+ local->total_bytes
+ += output_constructor (local->val, 0, 0, &output_state);
+ local->byte = output_state.byte;
+ return;
+ }
+
+ /* Otherwise, we must split the element into pieces that fall within
+ separate bytes, and combine each byte with previous or following
+ bit-fields. */
+ while (next_offset < end_offset)
+ {
+ int this_time;
+ int shift;
+ HOST_WIDE_INT value;
+ HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
+ HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
+
+ /* Advance from byte to byte
+ within this element when necessary. */
+ while (next_byte != local->total_bytes)
+ {
+ assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
+ local->total_bytes++;
+ local->byte = 0;
+ }
+
+ /* Number of bits we can process at once
+ (all part of the same byte). */
+ this_time = MIN (end_offset - next_offset,
+ BITS_PER_UNIT - next_bit);
+ if (BYTES_BIG_ENDIAN)
+ {
+ /* On big-endian machine, take the most significant bits
+ first (of the bits that are significant)
+ and put them into bytes from the most significant end. */
+ shift = end_offset - next_offset - this_time;
+
+ /* Don't try to take a bunch of bits that cross
+ the word boundary in the INTEGER_CST. We can
+ only select bits from the LOW or HIGH part
+ not from both. */
+ if (shift < HOST_BITS_PER_WIDE_INT
+ && shift + this_time > HOST_BITS_PER_WIDE_INT)
{
- fieldsize = 0;
-
- /* If this is an array with an unspecified upper bound,
- the initializer determines the size. */
- /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
- but we cannot do this until the deprecated support for
- initializing zero-length array members is removed. */
- if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
- && TYPE_DOMAIN (TREE_TYPE (field))
- && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
- {
- fieldsize = array_size_for_constructor (val);
- /* Given a non-empty initialization, this field had
- better be last. */
- gcc_assert (!fieldsize || !TREE_CHAIN (field));
- }
- else if (DECL_SIZE_UNIT (field))
- {
- /* ??? This can't be right. If the decl size overflows
- a host integer we will silently emit no data. */
- if (host_integerp (DECL_SIZE_UNIT (field), 1))
- fieldsize = tree_low_cst (DECL_SIZE_UNIT (field), 1);
- }
+ this_time = shift + this_time - HOST_BITS_PER_WIDE_INT;
+ shift = HOST_BITS_PER_WIDE_INT;
}
+
+ /* Now get the bits from the appropriate constant word. */
+ if (shift < HOST_BITS_PER_WIDE_INT)
+ value = TREE_INT_CST_LOW (local->val);
else
- fieldsize = int_size_in_bytes (TREE_TYPE (type));
-
- /* Output the element's initial value. */
- if (val == 0)
- assemble_zeros (fieldsize);
- else
- output_constant (val, fieldsize, align2);
-
- /* Count its size. */
- total_bytes += fieldsize;
+ {
+ gcc_assert (shift < 2 * HOST_BITS_PER_WIDE_INT);
+ value = TREE_INT_CST_HIGH (local->val);
+ shift -= HOST_BITS_PER_WIDE_INT;
+ }
+
+ /* Get the result. This works only when:
+ 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
+ local->byte |= (((value >> shift)
+ & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
+ << (BITS_PER_UNIT - this_time - next_bit));
}
- else if (val != 0 && TREE_CODE (val) != INTEGER_CST)
- error ("invalid initial value for member %qs",
- IDENTIFIER_POINTER (DECL_NAME (field)));
else
{
- /* Element that is a bit-field. */
+ /* On little-endian machines,
+ take first the least significant bits of the value
+ and pack them starting at the least significant
+ bits of the bytes. */
+ shift = next_offset - byte_relative_ebitpos;
+
+ /* Don't try to take a bunch of bits that cross
+ the word boundary in the INTEGER_CST. We can
+ only select bits from the LOW or HIGH part
+ not from both. */
+ if (shift < HOST_BITS_PER_WIDE_INT
+ && shift + this_time > HOST_BITS_PER_WIDE_INT)
+ this_time = (HOST_BITS_PER_WIDE_INT - shift);
+
+ /* Now get the bits from the appropriate constant word. */
+ if (shift < HOST_BITS_PER_WIDE_INT)
+ value = TREE_INT_CST_LOW (local->val);
+ else
+ {
+ gcc_assert (shift < 2 * HOST_BITS_PER_WIDE_INT);
+ value = TREE_INT_CST_HIGH (local->val);
+ shift -= HOST_BITS_PER_WIDE_INT;
+ }
+
+ /* Get the result. This works only when:
+ 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
+ local->byte |= (((value >> shift)
+ & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
+ << next_bit);
+ }
+
+ next_offset += this_time;
+ local->byte_buffer_in_use = true;
+ }
+}
- HOST_WIDE_INT next_offset = int_bit_position (field);
- HOST_WIDE_INT end_offset
- = (next_offset + tree_low_cst (DECL_SIZE (field), 1));
+/* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
+ Generate at least SIZE bytes, padding if necessary. OUTER designates the
+ caller output state of relevance in recursive invocations. */
- if (val == 0)
- val = integer_zero_node;
+static unsigned HOST_WIDE_INT
+output_constructor (tree exp, unsigned HOST_WIDE_INT size,
+ unsigned int align, oc_outer_state * outer)
+{
+ unsigned HOST_WIDE_INT cnt;
+ constructor_elt *ce;
- /* If this field does not start in this (or, next) byte,
- skip some bytes. */
- if (next_offset / BITS_PER_UNIT != total_bytes)
- {
- /* Output remnant of any bit field in previous bytes. */
- if (byte_buffer_in_use)
- {
- assemble_integer (GEN_INT (byte), 1, BITS_PER_UNIT, 1);
- total_bytes++;
- byte_buffer_in_use = 0;
- }
+ oc_local_state local;
- /* If still not at proper byte, advance to there. */
- if (next_offset / BITS_PER_UNIT != total_bytes)
- {
- gcc_assert (next_offset / BITS_PER_UNIT >= total_bytes);
- assemble_zeros (next_offset / BITS_PER_UNIT - total_bytes);
- total_bytes = next_offset / BITS_PER_UNIT;
- }
- }
+ /* Setup our local state to communicate with helpers. */
+ local.exp = exp;
+ local.size = size;
+ local.align = align;
- if (! byte_buffer_in_use)
- byte = 0;
+ local.total_bytes = 0;
+ local.byte_buffer_in_use = outer != NULL;
+ local.byte = outer ? outer->byte : 0;
- /* We must split the element into pieces that fall within
- separate bytes, and combine each byte with previous or
- following bit-fields. */
+ local.type = TREE_TYPE (exp);
- /* next_offset is the offset n fbits from the beginning of
- the structure to the next bit of this element to be processed.
- end_offset is the offset of the first bit past the end of
- this element. */
- while (next_offset < end_offset)
- {
- int this_time;
- int shift;
- HOST_WIDE_INT value;
- HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
- HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
-
- /* Advance from byte to byte
- within this element when necessary. */
- while (next_byte != total_bytes)
- {
- assemble_integer (GEN_INT (byte), 1, BITS_PER_UNIT, 1);
- total_bytes++;
- byte = 0;
- }
+ local.last_relative_index = -1;
- /* Number of bits we can process at once
- (all part of the same byte). */
- this_time = MIN (end_offset - next_offset,
- BITS_PER_UNIT - next_bit);
- if (BYTES_BIG_ENDIAN)
- {
- /* On big-endian machine, take the most significant bits
- first (of the bits that are significant)
- and put them into bytes from the most significant end. */
- shift = end_offset - next_offset - this_time;
-
- /* Don't try to take a bunch of bits that cross
- the word boundary in the INTEGER_CST. We can
- only select bits from the LOW or HIGH part
- not from both. */
- if (shift < HOST_BITS_PER_WIDE_INT
- && shift + this_time > HOST_BITS_PER_WIDE_INT)
- {
- this_time = shift + this_time - HOST_BITS_PER_WIDE_INT;
- shift = HOST_BITS_PER_WIDE_INT;
- }
-
- /* Now get the bits from the appropriate constant word. */
- if (shift < HOST_BITS_PER_WIDE_INT)
- value = TREE_INT_CST_LOW (val);
- else
- {
- gcc_assert (shift < 2 * HOST_BITS_PER_WIDE_INT);
- value = TREE_INT_CST_HIGH (val);
- shift -= HOST_BITS_PER_WIDE_INT;
- }
-
- /* Get the result. This works only when:
- 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
- byte |= (((value >> shift)
- & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
- << (BITS_PER_UNIT - this_time - next_bit));
- }
- else
- {
- /* On little-endian machines,
- take first the least significant bits of the value
- and pack them starting at the least significant
- bits of the bytes. */
- shift = next_offset - int_bit_position (field);
-
- /* Don't try to take a bunch of bits that cross
- the word boundary in the INTEGER_CST. We can
- only select bits from the LOW or HIGH part
- not from both. */
- if (shift < HOST_BITS_PER_WIDE_INT
- && shift + this_time > HOST_BITS_PER_WIDE_INT)
- this_time = (HOST_BITS_PER_WIDE_INT - shift);
-
- /* Now get the bits from the appropriate constant word. */
- if (shift < HOST_BITS_PER_WIDE_INT)
- value = TREE_INT_CST_LOW (val);
- else
- {
- gcc_assert (shift < 2 * HOST_BITS_PER_WIDE_INT);
- value = TREE_INT_CST_HIGH (val);
- shift -= HOST_BITS_PER_WIDE_INT;
- }
-
- /* Get the result. This works only when:
- 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
- byte |= (((value >> shift)
- & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
- << next_bit);
- }
+ local.min_index = NULL_TREE;
+ if (TREE_CODE (local.type) == ARRAY_TYPE
+ && TYPE_DOMAIN (local.type) != NULL_TREE)
+ local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
+
+ gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
- next_offset += this_time;
- byte_buffer_in_use = 1;
- }
- }
- }
+ /* As CE goes through the elements of the constant, FIELD goes through the
+ structure fields if the constant is a structure. If the constant is a
+ union, we override this by getting the field from the TREE_LIST element.
+ But the constant could also be an array. Then FIELD is zero.
- if (byte_buffer_in_use)
+ There is always a maximum of one element in the chain LINK for unions
+ (even if the initializer in a source program incorrectly contains
+ more one). */
+
+ local.field = NULL_TREE;
+ if (TREE_CODE (local.type) == RECORD_TYPE)
+ local.field = TYPE_FIELDS (local.type);
+
+ for (cnt = 0;
+ VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), cnt, ce);
+ cnt++, local.field = local.field ? TREE_CHAIN (local.field) : 0)
{
- assemble_integer (GEN_INT (byte), 1, BITS_PER_UNIT, 1);
- total_bytes++;
+ local.val = ce->value;
+ local.index = NULL_TREE;
+
+ /* The element in a union constructor specifies the proper field
+ or index. */
+ if ((TREE_CODE (local.type) == RECORD_TYPE
+ || TREE_CODE (local.type) == UNION_TYPE
+ || TREE_CODE (local.type) == QUAL_UNION_TYPE)
+ && ce->index != NULL_TREE)
+ local.field = ce->index;
+
+ else if (TREE_CODE (local.type) == ARRAY_TYPE)
+ local.index = ce->index;
+
+#ifdef ASM_COMMENT_START
+ if (local.field && flag_verbose_asm)
+ fprintf (asm_out_file, "%s %s:\n",
+ ASM_COMMENT_START,
+ DECL_NAME (local.field)
+ ? IDENTIFIER_POINTER (DECL_NAME (local.field))
+ : "<anonymous>");
+#endif
+
+ /* Eliminate the marker that makes a cast not be an lvalue. */
+ if (local.val != NULL_TREE)
+ STRIP_NOPS (local.val);
+
+ /* Output the current element, using the appropriate helper ... */
+
+ /* For an array slice not part of an outer bitfield. */
+ if (!outer
+ && local.index != NULL_TREE
+ && TREE_CODE (local.index) == RANGE_EXPR)
+ output_constructor_array_range (&local);
+
+ /* For a field that is neither a true bitfield nor part of an outer one,
+ known to be at least byte aligned and multiple-of-bytes long. */
+ else if (!outer
+ && (local.field == NULL_TREE
+ || !CONSTRUCTOR_BITFIELD_P (local.field)))
+ output_constructor_regular_field (&local);
+
+ /* For a true bitfield or part of an outer one. */
+ else
+ output_constructor_bitfield (&local, outer);
}
- if ((unsigned HOST_WIDE_INT)total_bytes < size)
- assemble_zeros (size - total_bytes);
-}
+ /* If we are not at toplevel, save the pending data for our caller.
+ Otherwise output the pending data and padding zeros as needed. */
+ if (outer)
+ outer->byte = local.byte;
+ else
+ {
+ if (local.byte_buffer_in_use)
+ {
+ assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
+ local.total_bytes++;
+ }
-/* This TREE_LIST contains any weak symbol declarations waiting
- to be emitted. */
-static GTY(()) tree weak_decls;
+ if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
+ {
+ assemble_zeros (local.size - local.total_bytes);
+ local.total_bytes = local.size;
+ }
+ }
+
+ return local.total_bytes;
+}
/* Mark DECL as weak. */
/* NEWDECL is weak, but OLDDECL is not. */
/* If we already output the OLDDECL, we're in trouble; we can't
- go back and make it weak. This error cannot caught in
+ go back and make it weak. This error cannot be caught in
declare_weak because the NEWDECL and OLDDECL was not yet
been merged; therefore, TREE_ASM_WRITTEN was not set. */
if (TREE_ASM_WRITTEN (olddecl))
error ("weak declaration of %q+D must be public", decl);
else if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
error ("weak declaration of %q+D must precede definition", decl);
- else if (SUPPORTS_WEAK)
- {
- if (! DECL_WEAK (decl))
- weak_decls = tree_cons (NULL, decl, weak_decls);
- }
- else
+ else if (!SUPPORTS_WEAK)
warning (0, "weak declaration of %q+D not supported", decl);
mark_weak (decl);
else if (! TREE_SYMBOL_REFERENCED (target))
{
/* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
- defined, otherwise we and weak_finish_1 would use a
+ defined, otherwise we and weak_finish_1 would use
different macros. */
# if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
if (! decl)
{
- decl = build_decl (TREE_CODE (alias_decl), target,
+ decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
+ TREE_CODE (alias_decl), target,
TREE_TYPE (alias_decl));
DECL_EXTERNAL (decl) = 1;
static void
globalize_decl (tree decl)
{
- const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
#if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
if (DECL_WEAK (decl))
{
+ const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
tree *p, t;
#ifdef ASM_WEAKEN_DECL
return;
}
-#elif defined(ASM_MAKE_LABEL_LINKONCE)
- if (DECL_ONE_ONLY (decl))
- ASM_MAKE_LABEL_LINKONCE (asm_out_file, name);
#endif
- targetm.asm_out.globalize_label (asm_out_file, name);
+ targetm.asm_out.globalize_decl_name (asm_out_file, decl);
}
/* We have to be able to tell cgraph about the needed-ness of the target
of an alias. This requires that the decl have been defined. Aliases
that precede their definition have to be queued for later processing. */
-typedef struct alias_pair GTY(())
-{
+typedef struct GTY(()) alias_pair {
tree decl;
tree target;
} alias_pair;
find_decl_and_mark_needed (tree decl, tree target)
{
struct cgraph_node *fnode = NULL;
- struct cgraph_varpool_node *vnode = NULL;
+ struct varpool_node *vnode = NULL;
if (TREE_CODE (decl) == FUNCTION_DECL)
{
fnode = cgraph_node_for_asm (target);
if (fnode == NULL)
- vnode = cgraph_varpool_node_for_asm (target);
+ vnode = varpool_node_for_asm (target);
}
else
{
- vnode = cgraph_varpool_node_for_asm (target);
+ vnode = varpool_node_for_asm (target);
if (vnode == NULL)
fnode = cgraph_node_for_asm (target);
}
}
else if (vnode)
{
- cgraph_varpool_mark_needed_node (vnode);
+ varpool_mark_needed_node (vnode);
return vnode->decl;
}
else
{
ultimate_transparent_alias_target (&target);
+ if (!targetm.have_tls
+ && TREE_CODE (decl) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (decl))
+ {
+ decl = emutls_decl (decl);
+ target = get_emutls_object_name (target);
+ }
+
if (!TREE_SYMBOL_REFERENCED (target))
weakref_targets = tree_cons (decl, target, weakref_targets);
#else
if (!SUPPORTS_WEAK)
{
- error ("%Jweakref is not supported in this configuration", decl);
+ error_at (DECL_SOURCE_LOCATION (decl),
+ "weakref is not supported in this configuration");
return;
}
#endif
return;
}
+ if (!targetm.have_tls
+ && TREE_CODE (decl) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (decl))
+ {
+ decl = emutls_decl (decl);
+ target = get_emutls_object_name (target);
+ }
+
#ifdef ASM_OUTPUT_DEF
/* Make name accessible from other files, if appropriate. */
if (target_decl == NULL)
{
if (! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
- error ("%q+D aliased to undefined symbol %qs",
- p->decl, IDENTIFIER_POINTER (p->target));
+ error ("%q+D aliased to undefined symbol %qE",
+ p->decl, p->target);
}
else if (DECL_EXTERNAL (target_decl)
&& ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
- error ("%q+D aliased to external symbol %qs",
- p->decl, IDENTIFIER_POINTER (p->target));
+ error ("%q+D aliased to external symbol %qE",
+ p->decl, p->target);
}
}
{
#if !defined (ASM_OUTPUT_DEF)
# if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
- error ("%Jalias definitions not supported in this configuration", decl);
+ error_at (DECL_SOURCE_LOCATION (decl),
+ "alias definitions not supported in this configuration");
return;
# else
if (!DECL_WEAK (decl))
{
- error ("%Jonly weak aliases are supported in this configuration", decl);
+ error_at (DECL_SOURCE_LOCATION (decl),
+ "only weak aliases are supported in this configuration");
return;
}
# endif
if (TREE_CODE (decl) == FUNCTION_DECL)
cgraph_node (decl)->alias = true;
else
- cgraph_varpool_node (decl)->alias = true;
+ varpool_node (decl)->alias = true;
/* If the target has already been emitted, we don't have to queue the
- alias. This saves a tad o memory. */
- target_decl = find_decl_and_mark_needed (decl, target);
+ alias. This saves a tad of memory. */
+ if (cgraph_global_info_ready)
+ target_decl = find_decl_and_mark_needed (decl, target);
+ else
+ target_decl= NULL;
if (target_decl && TREE_ASM_WRITTEN (target_decl))
do_assemble_alias (decl, target);
else
/* A helper function to call assemble_visibility when needed for a decl. */
-static void
+int
maybe_assemble_visibility (tree decl)
{
enum symbol_visibility vis = DECL_VISIBILITY (decl);
if (vis != VISIBILITY_DEFAULT)
- targetm.asm_out.visibility (decl, vis);
+ {
+ targetm.asm_out.visibility (decl, vis);
+ return 1;
+ }
+ else
+ return 0;
}
/* Returns 1 if the target configuration supports defining public symbols
translation units without generating a linker error. */
void
-make_decl_one_only (tree decl)
+make_decl_one_only (tree decl, tree comdat_group)
{
gcc_assert (TREE_CODE (decl) == VAR_DECL
|| TREE_CODE (decl) == FUNCTION_DECL);
#ifdef MAKE_DECL_ONE_ONLY
MAKE_DECL_ONE_ONLY (decl);
#endif
- DECL_ONE_ONLY (decl) = 1;
+ DECL_COMDAT_GROUP (decl) = comdat_group;
}
else if (TREE_CODE (decl) == VAR_DECL
&& (DECL_INITIAL (decl) == 0 || DECL_INITIAL (decl) == error_mark_node))
}
enum tls_model
-decl_default_tls_model (tree decl)
+decl_default_tls_model (const_tree decl)
{
enum tls_model kind;
bool is_local;
unsigned int
default_section_type_flags (tree decl, const char *name, int reloc)
{
- return default_section_type_flags_1 (decl, name, reloc, flag_pic);
-}
-
-unsigned int
-default_section_type_flags_1 (tree decl, const char *name, int reloc,
- int shlib)
-{
unsigned int flags;
if (decl && TREE_CODE (decl) == FUNCTION_DECL)
flags = SECTION_CODE;
- else if (decl && decl_readonly_section_1 (decl, reloc, shlib))
+ else if (decl && decl_readonly_section (decl, reloc))
flags = 0;
else if (current_function_decl
&& cfun
- && cfun->unlikely_text_section_name
- && strcmp (name, cfun->unlikely_text_section_name) == 0)
+ && crtl->subsections.unlikely_text_section_name
+ && strcmp (name, crtl->subsections.unlikely_text_section_name) == 0)
flags = SECTION_CODE;
else if (!decl
&& (!current_function_decl || !cfun)
fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
fprintf (asm_out_file, ",%s,comdat",
- lang_hooks.decls.comdat_group (decl));
+ IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
}
putc ('\n', asm_out_file);
}
enum section_category
-categorize_decl_for_section (tree decl, int reloc, int shlib)
+categorize_decl_for_section (const_tree decl, int reloc)
{
enum section_category ret;
|| TREE_SIDE_EFFECTS (decl)
|| ! TREE_CONSTANT (DECL_INITIAL (decl)))
{
- if (shlib && (reloc & 2))
- ret = SECCAT_DATA_REL;
- else if (shlib && reloc)
- ret = SECCAT_DATA_REL_LOCAL;
+ /* Here the reloc_rw_mask is not testing whether the section should
+ be read-only or not, but whether the dynamic link will have to
+ do something. If so, we wish to segregate the data in order to
+ minimize cache misses inside the dynamic linker. */
+ if (reloc & targetm.asm_out.reloc_rw_mask ())
+ ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
else
ret = SECCAT_DATA;
}
- else if (shlib && (reloc & 2))
- ret = SECCAT_DATA_REL_RO;
- else if (shlib && reloc)
- ret = SECCAT_DATA_REL_RO_LOCAL;
+ else if (reloc & targetm.asm_out.reloc_rw_mask ())
+ ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
else if (reloc || flag_merge_constants < 2)
/* C and C++ don't allow different variables to share the same
location. -fmerge-all-constants allows even that (at the
}
else if (TREE_CODE (decl) == CONSTRUCTOR)
{
- if ((shlib && reloc)
+ if ((reloc & targetm.asm_out.reloc_rw_mask ())
|| TREE_SIDE_EFFECTS (decl)
|| ! TREE_CONSTANT (decl))
ret = SECCAT_DATA;
ret = SECCAT_RODATA;
/* There are no read-only thread-local sections. */
- if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
+ if (TREE_CODE (decl) == VAR_DECL && DECL_TLS_MODEL (decl))
{
+ if (DECL_TLS_MODEL (decl) == TLS_MODEL_EMULATED)
+ {
+ if (DECL_EMUTLS_VAR_P (decl))
+ {
+ if (targetm.emutls.var_section)
+ ret = SECCAT_EMUTLS_VAR;
+ }
+ else
+ {
+ if (targetm.emutls.tmpl_prefix)
+ ret = SECCAT_EMUTLS_TMPL;
+ }
+ }
/* Note that this would be *just* SECCAT_BSS, except that there's
no concept of a read-only thread-local-data section. */
- if (ret == SECCAT_BSS
- || (flag_zero_initialized_in_bss
- && initializer_zerop (DECL_INITIAL (decl))))
+ else if (ret == SECCAT_BSS
+ || (flag_zero_initialized_in_bss
+ && initializer_zerop (DECL_INITIAL (decl))))
ret = SECCAT_TBSS;
else
ret = SECCAT_TDATA;
}
bool
-decl_readonly_section (tree decl, int reloc)
+decl_readonly_section (const_tree decl, int reloc)
{
- return decl_readonly_section_1 (decl, reloc, flag_pic);
-}
-
-bool
-decl_readonly_section_1 (tree decl, int reloc, int shlib)
-{
- switch (categorize_decl_for_section (decl, reloc, shlib))
+ switch (categorize_decl_for_section (decl, reloc))
{
case SECCAT_RODATA:
case SECCAT_RODATA_MERGE_STR:
default_elf_select_section (tree decl, int reloc,
unsigned HOST_WIDE_INT align)
{
- return default_elf_select_section_1 (decl, reloc, align, flag_pic);
-}
-
-section *
-default_elf_select_section_1 (tree decl, int reloc,
- unsigned HOST_WIDE_INT align, int shlib)
-{
const char *sname;
- switch (categorize_decl_for_section (decl, reloc, shlib))
+ switch (categorize_decl_for_section (decl, reloc))
{
case SECCAT_TEXT:
/* We're not supposed to be called on FUNCTION_DECLs. */
case SECCAT_TBSS:
sname = ".tbss";
break;
+ case SECCAT_EMUTLS_VAR:
+ sname = targetm.emutls.var_section;
+ break;
+ case SECCAT_EMUTLS_TMPL:
+ sname = targetm.emutls.tmpl_section;
+ break;
default:
gcc_unreachable ();
}
void
default_unique_section (tree decl, int reloc)
{
- default_unique_section_1 (decl, reloc, flag_pic);
-}
-
-void
-default_unique_section_1 (tree decl, int reloc, int shlib)
-{
/* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
- const char *prefix, *name;
- size_t nlen, plen;
+ const char *prefix, *name, *linkonce;
char *string;
- switch (categorize_decl_for_section (decl, reloc, shlib))
+ switch (categorize_decl_for_section (decl, reloc))
{
case SECCAT_TEXT:
- prefix = one_only ? ".gnu.linkonce.t." : ".text.";
+ prefix = one_only ? ".t" : ".text";
break;
case SECCAT_RODATA:
case SECCAT_RODATA_MERGE_STR:
case SECCAT_RODATA_MERGE_STR_INIT:
case SECCAT_RODATA_MERGE_CONST:
- prefix = one_only ? ".gnu.linkonce.r." : ".rodata.";
+ prefix = one_only ? ".r" : ".rodata";
break;
case SECCAT_SRODATA:
- prefix = one_only ? ".gnu.linkonce.s2." : ".sdata2.";
+ prefix = one_only ? ".s2" : ".sdata2";
break;
case SECCAT_DATA:
- prefix = one_only ? ".gnu.linkonce.d." : ".data.";
+ prefix = one_only ? ".d" : ".data";
break;
case SECCAT_DATA_REL:
- prefix = one_only ? ".gnu.linkonce.d.rel." : ".data.rel.";
+ prefix = one_only ? ".d.rel" : ".data.rel";
break;
case SECCAT_DATA_REL_LOCAL:
- prefix = one_only ? ".gnu.linkonce.d.rel.local." : ".data.rel.local.";
+ prefix = one_only ? ".d.rel.local" : ".data.rel.local";
break;
case SECCAT_DATA_REL_RO:
- prefix = one_only ? ".gnu.linkonce.d.rel.ro." : ".data.rel.ro.";
+ prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
break;
case SECCAT_DATA_REL_RO_LOCAL:
- prefix = one_only ? ".gnu.linkonce.d.rel.ro.local."
- : ".data.rel.ro.local.";
+ prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
break;
case SECCAT_SDATA:
- prefix = one_only ? ".gnu.linkonce.s." : ".sdata.";
+ prefix = one_only ? ".s" : ".sdata";
break;
case SECCAT_BSS:
- prefix = one_only ? ".gnu.linkonce.b." : ".bss.";
+ prefix = one_only ? ".b" : ".bss";
break;
case SECCAT_SBSS:
- prefix = one_only ? ".gnu.linkonce.sb." : ".sbss.";
+ prefix = one_only ? ".sb" : ".sbss";
break;
case SECCAT_TDATA:
- prefix = one_only ? ".gnu.linkonce.td." : ".tdata.";
+ prefix = one_only ? ".td" : ".tdata";
break;
case SECCAT_TBSS:
- prefix = one_only ? ".gnu.linkonce.tb." : ".tbss.";
+ prefix = one_only ? ".tb" : ".tbss";
+ break;
+ case SECCAT_EMUTLS_VAR:
+ prefix = targetm.emutls.var_section;
+ break;
+ case SECCAT_EMUTLS_TMPL:
+ prefix = targetm.emutls.tmpl_section;
break;
default:
gcc_unreachable ();
}
- plen = strlen (prefix);
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
name = targetm.strip_name_encoding (name);
- nlen = strlen (name);
- string = alloca (nlen + plen + 1);
- memcpy (string, prefix, plen);
- memcpy (string + plen, name, nlen + 1);
+ /* If we're using one_only, then there needs to be a .gnu.linkonce
+ prefix to the section name. */
+ linkonce = one_only ? ".gnu.linkonce" : "";
+
+ string = ACONCAT ((linkonce, prefix, ".", name, NULL));
- DECL_SECTION_NAME (decl) = build_string (nlen + plen, string);
+ DECL_SECTION_NAME (decl) = build_string (strlen (string), string);
+}
+
+/* Like compute_reloc_for_constant, except for an RTX. The return value
+ is a mask for which bit 1 indicates a global relocation, and bit 0
+ indicates a local relocation. */
+
+static int
+compute_reloc_for_rtx_1 (rtx *xp, void *data)
+{
+ int *preloc = (int *) data;
+ rtx x = *xp;
+
+ switch (GET_CODE (x))
+ {
+ case SYMBOL_REF:
+ *preloc |= SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
+ break;
+ case LABEL_REF:
+ *preloc |= 1;
+ break;
+ default:
+ break;
+ }
+
+ return 0;
+}
+
+static int
+compute_reloc_for_rtx (rtx x)
+{
+ int reloc;
+
+ switch (GET_CODE (x))
+ {
+ case CONST:
+ case SYMBOL_REF:
+ case LABEL_REF:
+ reloc = 0;
+ for_each_rtx (&x, compute_reloc_for_rtx_1, &reloc);
+ return reloc;
+
+ default:
+ return 0;
+ }
}
section *
rtx x,
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
{
- if (flag_pic)
- switch (GET_CODE (x))
- {
- case CONST:
- case SYMBOL_REF:
- case LABEL_REF:
- return data_section;
-
- default:
- break;
- }
-
- return readonly_data_section;
+ if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
+ return data_section;
+ else
+ return readonly_data_section;
}
section *
default_elf_select_rtx_section (enum machine_mode mode, rtx x,
unsigned HOST_WIDE_INT align)
{
- /* ??? Handle small data here somehow. */
+ int reloc = compute_reloc_for_rtx (x);
- if (flag_pic)
- switch (GET_CODE (x))
- {
- case CONST:
- case SYMBOL_REF:
- return get_named_section (NULL, ".data.rel.ro", 3);
+ /* ??? Handle small data here somehow. */
- case LABEL_REF:
+ if (reloc & targetm.asm_out.reloc_rw_mask ())
+ {
+ if (reloc == 1)
return get_named_section (NULL, ".data.rel.ro.local", 1);
-
- default:
- break;
- }
+ else
+ return get_named_section (NULL, ".data.rel.ro", 3);
+ }
return mergeable_constant_section (mode, align, 0);
}
flags |= SYMBOL_FLAG_FUNCTION;
if (targetm.binds_local_p (decl))
flags |= SYMBOL_FLAG_LOCAL;
- if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
+ if (targetm.have_tls && TREE_CODE (decl) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (decl))
flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
else if (targetm.in_small_data_p (decl))
flags |= SYMBOL_FLAG_SMALL;
{
char buffer[100];
- sprintf (buffer, ". + " HOST_WIDE_INT_PRINT_DEC,
+ sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
SYMBOL_REF_BLOCK_OFFSET (symbol));
ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
}
/* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
bool
-default_use_anchors_for_symbol_p (rtx symbol)
+default_use_anchors_for_symbol_p (const_rtx symbol)
{
section *sect;
tree decl;
wrt cross-module name binding. */
bool
-default_binds_local_p (tree exp)
+default_binds_local_p (const_tree exp)
{
return default_binds_local_p_1 (exp, flag_shlib);
}
bool
-default_binds_local_p_1 (tree exp, int shlib)
+default_binds_local_p_1 (const_tree exp, int shlib)
{
bool local_p;
else if (DECL_WEAK (exp))
local_p = false;
/* If PIC, then assume that any global name can be overridden by
- symbols resolved from other modules. */
+ symbols resolved from other modules, unless we are compiling with
+ -fwhole-program, which assumes that names are local. */
else if (shlib)
- local_p = false;
+ local_p = flag_whole_program;
/* Uninitialized COMMON variable may be unified with symbols
resolved from other modules. */
else if (DECL_COMMON (exp)
}
#endif /* GLOBAL_ASM_OP */
+/* Default function to output code that will globalize a declaration. */
+void
+default_globalize_decl_name (FILE * stream, tree decl)
+{
+ const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
+ targetm.asm_out.globalize_label (stream, name);
+}
+
/* Default function to output a label for unwind information. The
default is to do nothing. A target that needs nonlocal labels for
unwind information must provide its own function to do this. */
default_internal_label (FILE *stream, const char *prefix,
unsigned long labelno)
{
- char *const buf = alloca (40 + strlen (prefix));
+ char *const buf = (char *) alloca (40 + strlen (prefix));
ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
}
void
default_file_start (void)
{
- if (targetm.file_start_app_off && !flag_verbose_asm)
+ if (targetm.file_start_app_off
+ && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
fputs (ASM_APP_OFF, asm_out_file);
if (targetm.file_start_file_directive)
{
case SECTION_NAMED:
if (cfun
- && !cfun->unlikely_text_section_name
+ && !crtl->subsections.unlikely_text_section_name
&& strcmp (new_section->named.name,
UNLIKELY_EXECUTED_TEXT_SECTION_NAME) == 0)
- cfun->unlikely_text_section_name = UNLIKELY_EXECUTED_TEXT_SECTION_NAME;
+ crtl->subsections.unlikely_text_section_name = UNLIKELY_EXECUTED_TEXT_SECTION_NAME;
targetm.asm_out.named_section (new_section->named.name,
new_section->named.common.flags,
htab_traverse (object_block_htab, output_object_block_htab, NULL);
}
+/* This function provides a possible implementation of the
+ TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
+ by -frecord-gcc-switches it creates a new mergeable, string section in the
+ assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
+ contains the switches in ASCII format.
+
+ FIXME: This code does not correctly handle double quote characters
+ that appear inside strings, (it strips them rather than preserving them).
+ FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
+ characters - instead it treats them as sub-string separators. Since
+ we want to emit NUL strings terminators into the object file we have to use
+ ASM_OUTPUT_SKIP. */
+
+int
+elf_record_gcc_switches (print_switch_type type, const char * name)
+{
+ static char buffer[1024];
+
+ /* This variable is used as part of a simplistic heuristic to detect
+ command line switches which take an argument:
+
+ "If a command line option does not start with a dash then
+ it is an argument for the previous command line option."
+
+ This fails in the case of the command line option which is the name
+ of the file to compile, but otherwise it is pretty reasonable. */
+ static bool previous_name_held_back = FALSE;
+
+ switch (type)
+ {
+ case SWITCH_TYPE_PASSED:
+ if (* name != '-')
+ {
+ if (previous_name_held_back)
+ {
+ unsigned int len = strlen (buffer);
+
+ snprintf (buffer + len, sizeof buffer - len, " %s", name);
+ ASM_OUTPUT_ASCII (asm_out_file, buffer, strlen (buffer));
+ ASM_OUTPUT_SKIP (asm_out_file, (unsigned HOST_WIDE_INT) 1);
+ previous_name_held_back = FALSE;
+ }
+ else
+ {
+ strncpy (buffer, name, sizeof buffer);
+ ASM_OUTPUT_ASCII (asm_out_file, buffer, strlen (buffer));
+ ASM_OUTPUT_SKIP (asm_out_file, (unsigned HOST_WIDE_INT) 1);
+ }
+ }
+ else
+ {
+ if (previous_name_held_back)
+ {
+ ASM_OUTPUT_ASCII (asm_out_file, buffer, strlen (buffer));
+ ASM_OUTPUT_SKIP (asm_out_file, (unsigned HOST_WIDE_INT) 1);
+ }
+
+ strncpy (buffer, name, sizeof buffer);
+ previous_name_held_back = TRUE;
+ }
+ break;
+
+ case SWITCH_TYPE_DESCRIPTIVE:
+ if (name == NULL)
+ {
+ /* Distinguish between invocations where name is NULL. */
+ static bool started = false;
+
+ if (started)
+ {
+ if (previous_name_held_back)
+ {
+ ASM_OUTPUT_ASCII (asm_out_file, buffer, strlen (buffer));
+ ASM_OUTPUT_SKIP (asm_out_file, (unsigned HOST_WIDE_INT) 1);
+ }
+ }
+ else
+ {
+ section * sec;
+
+ sec = get_section (targetm.asm_out.record_gcc_switches_section,
+ SECTION_DEBUG
+ | SECTION_MERGE
+ | SECTION_STRINGS
+ | (SECTION_ENTSIZE & 1),
+ NULL);
+ switch_to_section (sec);
+ started = true;
+ }
+ }
+
+ default:
+ break;
+ }
+
+ /* The return value is currently ignored by the caller, but must be 0.
+ For -fverbose-asm the return value would be the number of characters
+ emitted into the assembler file. */
+ return 0;
+}
+
+/* Emit text to declare externally defined symbols. It is needed to
+ properly support non-default visibility. */
+void
+default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
+ tree decl,
+ const char *name ATTRIBUTE_UNUSED)
+{
+ /* We output the name if and only if TREE_SYMBOL_REFERENCED is
+ set in order to avoid putting out names that are never really
+ used. */
+ if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
+ && targetm.binds_local_p (decl))
+ maybe_assemble_visibility (decl);
+}
+
#include "gt-varasm.h"