+2005-02-18 Richard Sandiford <richard@codesourcery.com>
+
+ * cselib.c (cselib_init): Change RTX_SIZE to RTX_CODE_SIZE.
+ * emit-rtl.c (copy_rtx_if_shared_1): Use shallow_copy_rtx.
+ (copy_insn_1): Likewise. Don't copy each field individually.
+ Reindent.
+ * read-rtl.c (apply_macro_to_rtx): Use RTX_CODE_SIZE instead
+ of RTX_SIZE.
+ * reload1.c (eliminate_regs): Use shallow_copy_rtx.
+ * rtl.c (rtx_size): Rename variable to...
+ (rtx_code_size): ...this.
+ (rtx_size): New function.
+ (rtx_alloc_stat): Use RTX_CODE_SIZE instead of RTX_SIZE.
+ (copy_rtx): Use shallow_copy_rtx. Don't copy each field individually.
+ Reindent.
+ (shallow_copy_rtx_stat): Use rtx_size instead of RTX_SIZE.
+ * rtl.h (rtx_code_size): New variable.
+ (rtx_size): Change from a variable to a function.
+ (RTX_SIZE): Rename to...
+ (RTX_CODE_SIZE): ...this.
+
+ PR target/9703
+ PR tree-optimization/17106
+ * doc/tm.texi (TARGET_USE_BLOCKS_FOR_CONSTANT_P): Document.
+ (Anchored Addresses): New section.
+ * doc/invoke.texi (-fsection-anchors): Document.
+ * doc/rtl.texi (SYMBOL_REF_IN_BLOCK_P, SYMBOL_FLAG_IN_BLOCK): Likewise.
+ (SYMBOL_REF_ANCHOR_P, SYMBOL_FLAG_ANCHOR): Likewise.
+ (SYMBOL_REF_BLOCK, SYMBOL_REF_BLOCK_OFFSET): Likewise.
+ * hooks.c (hook_bool_mode_rtx_false): New function.
+ * hooks.h (hook_bool_mode_rtx_false): Declare.
+ * gengtype.c (create_optional_field): New function.
+ (adjust_field_rtx_def): Add the "block_sym" field for SYMBOL_REFs when
+ SYMBOL_REF_IN_BLOCK_P is true.
+ * target.h (output_anchor, use_blocks_for_constant_p): New hooks.
+ (min_anchor_offset, max_anchor_offset): Likewise.
+ (use_anchors_for_symbol_p): New hook.
+ * toplev.c (compile_file): Call output_object_blocks.
+ (target_supports_section_anchors_p): New function.
+ (process_options): Check that -fsection-anchors is only used on
+ targets that support it and when -funit-at-a-time is in effect.
+ * tree-ssa-loop-ivopts.c (prepare_decl_rtl): Only create DECL_RTL
+ if the decl doesn't have one.
+ * dwarf2out.c: Remove instantiations of VEC(rtx,gc).
+ * expr.c (emit_move_multi_word, emit_move_insn): Pass the result
+ of force_const_mem through use_anchored_address.
+ (expand_expr_constant): New function.
+ (expand_expr_addr_expr_1): Call it. Use the same modifier when
+ calling expand_expr for INDIRECT_REF.
+ (expand_expr_real_1): Pass DECL_RTL through use_anchored_address
+ for all modifiers except EXPAND_INITIALIZER. Use expand_expr_constant.
+ * expr.h (use_anchored_address): Declare.
+ * loop-unroll.c: Don't declare rtx vectors here.
+ * explow.c: Include output.h.
+ (validize_mem): Call use_anchored_address.
+ (use_anchored_address): New function.
+ * common.opt (-fsection-anchors): New switch.
+ * varasm.c (object_block_htab, anchor_labelno): New variables.
+ (hash_section, object_block_entry_eq, object_block_entry_hash)
+ (use_object_blocks_p, get_block_for_section, create_block_symbol)
+ (use_blocks_for_decl_p, change_symbol_section): New functions.
+ (get_variable_section): New function, split out from assemble_variable.
+ (make_decl_rtl): Create a block symbol if use_object_blocks_p and
+ use_blocks_for_decl_p say so. Use change_symbol_section if the
+ symbol has already been created.
+ (assemble_variable_contents): New function, split out from...
+ (assemble_variable): ...here. Don't output any code for
+ block symbols; just pass them to place_block_symbol.
+ Use get_variable_section and assemble_variable_contents.
+ (get_constant_alignment, get_constant_section, get_constant_size): New
+ functions, split from output_constant_def_contents.
+ (build_constant_desc): Create a block symbol if use_object_blocks_p
+ says so. Or into SYMBOL_REF_FLAGS.
+ (assemble_constant_contents): New function, split from...
+ (output_constant_def_contents): ...here. Don't output any code
+ for block symbols; just pass them to place_section_symbol.
+ Use get_constant_section and get_constant_alignment.
+ (force_const_mem): Create a block symbol if use_object_blocks_p and
+ use_blocks_for_constant_p say so. Or into SYMBOL_REF_FLAGS.
+ (output_constant_pool_1): Add an explicit alignment argument.
+ Don't switch sections here.
+ (output_constant_pool): Adjust call to output_constant_pool_1.
+ Switch sections here instead. Don't output anything for block symbols;
+ just pass them to place_block_symbol.
+ (init_varasm_once): Initialize object_block_htab.
+ (default_encode_section_info): Keep the old SYMBOL_FLAG_IN_BLOCK.
+ (default_asm_output_anchor, default_use_aenchors_for_symbol_p)
+ (place_block_symbol, get_section_anchor, output_object_block)
+ (output_object_block_htab, output_object_blocks): New functions.
+ * target-def.h (TARGET_ASM_OUTPUT_ANCHOR): New macro.
+ (TARGET_ASM_OUT): Include it.
+ (TARGET_USE_BLOCKS_FOR_CONSTANT_P): New macro.
+ (TARGET_MIN_ANCHOR_OFFSET, TARGET_MAX_ANCHOR_OFFSET): New macros.
+ (TARGET_USE_ANCHORS_FOR_SYMBOL_P): New macro.
+ (TARGET_INITIALIZER): Include them.
+ * rtl.c (rtl_check_failed_block_symbol): New function.
+ * rtl.h: Include vec.h. Declare heap and gc rtx vectors.
+ (block_symbol, object_block): New structures.
+ (rtx_def): Add a block_symbol field to the union.
+ (BLOCK_SYMBOL_CHECK): New macro.
+ (rtl_check_failed_block_symbol): Declare.
+ (SYMBOL_FLAG_IN_BLOCK, SYMBOL_FLAG_ANCHOR): New SYMBOL_REF flags.
+ (SYMBOL_REF_IN_BLOCK_P, SYMBOL_REF_ANCHOR_P): New predicates.
+ (SYMBOL_FLAG_MACH_DEP_SHIFT): Bump by 2.
+ (SYMBOL_REF_BLOCK, SYMBOL_REF_BLOCK_OFFSET): New accessors.
+ * output.h (output_section_symbols): Declare.
+ (object_block): Name structure.
+ (place_section_symbol, get_section_anchor, default_asm_output_anchor)
+ (default_use_anchors_for_symbol_p): Declare.
+ * Makefile.in (RTL_BASE_H): Add vec.h.
+ (explow.o): Depend on output.h.
+ * config/rs6000/rs6000.c (TARGET_MIN_ANCHOR_OFFSET): Override default.
+ (TARGET_MAX_ANCHOR_OFFSET): Likewise.
+ (TARGET_USE_BLOCKS_FOR_CONSTANT_P): Likewise.
+ (rs6000_use_blocks_for_constant_p): New function.
+
2006-02-18 John David Anglin <dave.anglin@nrc-cnrc.gc.ca>
* doc/install.texi (hppa*-hp-hpux*): Update for 4.1.0.
LANGHOOKS_DEF_H = langhooks-def.h $(HOOKS_H)
TARGET_DEF_H = target-def.h $(HOOKS_H)
RTL_BASE_H = rtl.h rtl.def $(MACHMODE_H) reg-notes.def insn-notes.def \
- input.h real.h statistics.h
+ input.h real.h statistics.h vec.h
RTL_H = $(RTL_BASE_H) genrtl.h
PARAMS_H = params.h params.def
BUILTINS_DEF = builtins.def sync-builtins.def omp-builtins.def
toplev.h $(TM_P_H) langhooks.h
explow.o : explow.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) hard-reg-set.h insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) \
- toplev.h $(FUNCTION_H) $(GGC_H) $(TM_P_H) langhooks.h gt-explow.h target.h
+ toplev.h $(FUNCTION_H) $(GGC_H) $(TM_P_H) langhooks.h gt-explow.h target.h \
+ output.h
optabs.o : optabs.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) insn-config.h $(EXPR_H) $(OPTABS_H) libfuncs.h \
$(RECOG_H) reload.h toplev.h $(GGC_H) real.h $(TM_P_H) except.h \
Common RejectNegative Joined UInteger
-fsched-stalled-insns-dep=<number> Set dependence distance checking in premature scheduling of queued insns
+fsection-anchors
+Common Report Var(flag_section_anchors)
+Access data in the same section from shared anchor points
+
frtl-abstract-sequences
Common Report Var(flag_rtl_seqabstr)
Perform sequence abstraction optimization on RTL
static void rs6000_elf_encode_section_info (tree, rtx, int)
ATTRIBUTE_UNUSED;
#endif
+static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
#if TARGET_XCOFF
static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
static void rs6000_xcoff_asm_init_sections (void);
#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
#endif
+/* Use a 32-bit anchor range. This leads to sequences like:
+
+ addis tmp,anchor,high
+ add dest,tmp,low
+
+ where tmp itself acts as an anchor, and can be shared between
+ accesses to the same 64k page. */
+#undef TARGET_MIN_ANCHOR_OFFSET
+#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
+#undef TARGET_MAX_ANCHOR_OFFSET
+#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
+#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
+#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
+
struct gcc_target targetm = TARGET_INITIALIZER;
\f
}
#endif /* USING_ELFOS_H */
+\f
+/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
+static bool
+rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
+{
+ return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
+}
\f
/* Return a REG that occurs in ADDR with coefficient 1.
ADDR can be effectively incremented by incrementing REG.
sizeof (struct elt_loc_list), 10);
cselib_val_pool = create_alloc_pool ("cselib_val_list",
sizeof (cselib_val), 10);
- value_pool = create_alloc_pool ("value",
- RTX_SIZE (VALUE), 100);
+ value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
cselib_record_memory = record_memory;
/* This is only created once. */
if (! callmem)
-fsched-stalled-insns=@var{n} -fsched-stalled-insns-dep=@var{n} @gol
-fsched2-use-superblocks @gol
-fsched2-use-traces -freschedule-modulo-scheduled-loops @gol
--fsignaling-nans -fsingle-precision-constant @gol
+-fsection-anchors -fsignaling-nans -fsingle-precision-constant @gol
-fstack-protector -fstack-protector-all @gol
-fstrength-reduce -fstrict-aliasing -ftracer -fthread-jumps @gol
-funroll-all-loops -funroll-loops -fpeel-loops @gol
@item -fstack-protector-all
Like @option{-fstack-protector} except that all functions are protected.
+@item -fsection-anchors
+@opindex fsection-anchors
+Try to reduce the number of symbolic address calculations by using
+shared ``anchor'' symbols to address nearby objects. This transformation
+can help to reduce the number of GOT entries and GOT accesses on some
+targets.
+
+For example, the implementation of the following function @code{foo}:
+
+@smallexample
+static int a, b, c;
+int foo (void) @{ return a + b + c; @}
+@end smallexample
+
+would usually calculate the addresses of all three variables, but if you
+compile it with @option{-fsection-anchors}, it will access the variables
+from a common anchor point instead. The effect is similar to the
+following pseudocode (which isn't valid C):
+
+@smallexample
+int foo (void)
+@{
+ register int *xr = &x;
+ return xr[&a - &x] + xr[&b - &x] + xr[&c - &x];
+@}
+@end smallexample
+
+Not all targets support this option.
+
@item --param @var{name}=@var{value}
@opindex param
In some places, GCC uses various constants to control the amount of
This is a multi-bit field accessor that returns the @code{tls_model}
to be used for a thread-local storage symbol. It returns zero for
non-thread-local symbols.
+
+@findex SYMBOL_REF_IN_BLOCK_P
+@findex SYMBOL_FLAG_IN_BLOCK
+@item SYMBOL_FLAG_IN_BLOCK
+Set if the symbol has been assigned to an @code{object_block} structure.
+@code{SYMBOL_REF_BLOCK} and @code{SYMBOL_REF_BLOCK_OFFSET} provide more
+information about such symbols.
+
+@findex SYMBOL_REF_ANCHOR_P
+@findex SYMBOL_FLAG_ANCHOR
+@cindex @option{-fsection-anchors}
+@item SYMBOL_FLAG_ANCHOR
+Set if the symbol is used as a section anchor. ``Section anchors''
+are symbols that have a known position within an @code{object_block}
+and that can be used to access nearby members of that block.
+They are used to implement @option{-fsection-anchors}.
+
+If this flag is set, then @code{SYMBOL_FLAG_IN_BLOCK} will be too.
@end table
Bits beginning with @code{SYMBOL_FLAG_MACH_DEP} are available for
the target's use.
@end table
+
+@findex SYMBOL_REF_BLOCK
+@item SYMBOL_REF_BLOCK (@var{x})
+If @samp{SYMBOL_REF_IN_BLOCK_P (@var{x})}, this is the @samp{object_block}
+structure to which the symbol belongs. The value is always nonnull.
+
+@findex SYMBOL_REF_BLOCK_OFFSET
+@item SYMBOL_REF_BLOCK_OFFSET (@var{x})
+If @samp{SYMBOL_REF_IN_BLOCK_P (@var{x})}, this is the offset of @var{x}
+from the first object in @samp{SYMBOL_REF_BLOCK (@var{x})}. The value is
+negative if @var{x} has not yet been assigned an offset.
@end table
@node Flags
* Trampolines:: Code set up at run time to enter a nested function.
* Library Calls:: Controlling how library routines are implicitly called.
* Addressing Modes:: Defining addressing modes valid for memory operands.
+* Anchored Addresses:: Defining how @option{-fsection-anchors} should work.
* Condition Code:: Defining how insns update the condition code.
* Costs:: Defining relative costs of different operations.
* Scheduling:: Adjusting the behavior of the instruction scheduler.
of TLS symbols for various targets.
@end deftypefn
+@deftypefn {Target Hook} bool TARGET_USE_BLOCKS_FOR_CONSTANT_P (enum machine_mode @var{mode}, rtx @var{x})
+This hook should return true if pool entries for constant @var{x} can
+be placed in an @code{object_block} structure. @var{mode} is the mode
+of @var{x}.
+
+The default version returns false for all constants.
+@end deftypefn
+
@deftypefn {Target Hook} tree TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD (void)
This hook should return the DECL of a function @var{f} that given an
address @var{addr} as an argument returns a mask @var{m} that can be
log2(@var{VS})-1 bits of @var{addr} will be considered.
@end deftypefn
+@node Anchored Addresses
+@section Anchored Addresses
+@cindex anchored addresses
+@cindex @option{-fsection-anchors}
+
+GCC usually addresses every static object as a separate entity.
+For example, if we have:
+
+@smallexample
+static int a, b, c;
+int foo (void) @{ return a + b + c; @}
+@end smallexample
+
+the code for @code{foo} will usually calculate three separate symbolic
+addresses: those of @code{a}, @code{b} and @code{c}. On some targets,
+it would be better to calculate just one symbolic address and access
+the three variables relative to it. The equivalent pseudocode would
+be something like:
+
+@smallexample
+int foo (void)
+@{
+ register int *xr = &x;
+ return xr[&a - &x] + xr[&b - &x] + xr[&c - &x];
+@}
+@end smallexample
+
+(which isn't valid C). We refer to shared addresses like @code{x} as
+``section anchors''. Their use is controlled by @option{-fsection-anchors}.
+
+The hooks below describe the target properties that GCC needs to know
+in order to make effective use of section anchors. It won't use
+section anchors at all unless either @code{TARGET_MIN_ANCHOR_OFFSET}
+or @code{TARGET_MAX_ANCHOR_OFFSET} is set to a nonzero value.
+
+@deftypevar {Target Hook} HOST_WIDE_INT TARGET_MIN_ANCHOR_OFFSET
+The minimum offset that should be applied to a section anchor.
+On most targets, it should be the smallest offset that can be
+applied to a base register while still giving a legitimate address
+for every mode. The default value is 0.
+@end deftypevar
+
+@deftypevar {Target Hook} HOST_WIDE_INT TARGET_MAX_ANCHOR_OFFSET
+Like @code{TARGET_MIN_ANCHOR_OFFSET}, but the maximum (inclusive)
+offset that should be applied to section anchors. The default
+value is 0.
+@end deftypevar
+
+@deftypefn {Target Hook} void TARGET_ASM_OUTPUT_ANCHOR (rtx @var{x})
+Write the assembly code to define section anchor @var{x}, which is a
+@code{SYMBOL_REF} for which @samp{SYMBOL_REF_ANCHOR_P (@var{x})} is true.
+The hook is called with the assembly output position set to the beginning
+of @code{SYMBOL_REF_BLOCK (@var{x})}.
+
+If @code{ASM_OUTPUT_DEF} is available, the hook's default definition uses
+it to define the symbol as @samp{. + SYMBOL_REF_BLOCK_OFFSET (@var{x})}.
+If @code{ASM_OUTPUT_DEF} is not available, the hook's default definition
+is @code{NULL}, which disables the use of section anchors altogether.
+@end deftypefn
+
+@deftypefn {Target Hook} bool TARGET_USE_ANCHORS_FOR_SYMBOL_P (rtx @var{x})
+Return true if GCC should attempt to use anchors to access @code{SYMBOL_REF}
+@var{x}. You can assume @samp{SYMBOL_REF_IN_BLOCK_P (@var{x})} and
+@samp{!SYMBOL_REF_ANCHOR_P (@var{x})}.
+
+The default version is correct for most targets, but you might need to
+intercept this hook to handle things like target-specific attributes
+or target-specific sections.
+@end deftypefn
+
@node Condition Code
@section Condition Code Status
@cindex condition code status
#define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
#endif
-DEF_VEC_P(rtx);
-DEF_VEC_ALLOC_P(rtx,gc);
-
/* Array of RTXes referenced by the debugging information, which therefore
must be kept around forever. */
static GTY(()) VEC(rtx,gc) *used_rtx_array;
if (RTX_FLAG (x, used))
{
- rtx copy;
-
- copy = rtx_alloc (code);
- memcpy (copy, x, RTX_SIZE (code));
- x = copy;
+ x = shallow_copy_rtx (x);
copied = 1;
}
RTX_FLAG (x, used) = 1;
break;
}
- copy = rtx_alloc (code);
-
- /* Copy the various flags, and other information. We assume that
- all fields need copying, and then clear the fields that should
+ /* Copy the various flags, fields, and other information. We assume
+ that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
- memcpy (copy, orig, RTX_HDR_SIZE);
+ copy = shallow_copy_rtx (orig);
/* We do not copy the USED flag, which is used as a mark bit during
walks over the RTL. */
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
- {
- copy->u.fld[i] = orig->u.fld[i];
- switch (*format_ptr++)
- {
- case 'e':
- if (XEXP (orig, i) != NULL)
- XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
- break;
+ switch (*format_ptr++)
+ {
+ case 'e':
+ if (XEXP (orig, i) != NULL)
+ XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
+ break;
- case 'E':
- case 'V':
- if (XVEC (orig, i) == orig_asm_constraints_vector)
- XVEC (copy, i) = copy_asm_constraints_vector;
- else if (XVEC (orig, i) == orig_asm_operands_vector)
- XVEC (copy, i) = copy_asm_operands_vector;
- else if (XVEC (orig, i) != NULL)
- {
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
- XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
- }
- break;
+ case 'E':
+ case 'V':
+ if (XVEC (orig, i) == orig_asm_constraints_vector)
+ XVEC (copy, i) = copy_asm_constraints_vector;
+ else if (XVEC (orig, i) == orig_asm_operands_vector)
+ XVEC (copy, i) = copy_asm_operands_vector;
+ else if (XVEC (orig, i) != NULL)
+ {
+ XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
+ for (j = 0; j < XVECLEN (copy, i); j++)
+ XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
+ }
+ break;
- case 't':
- case 'w':
- case 'i':
- case 's':
- case 'S':
- case 'u':
- case '0':
- /* These are left unchanged. */
- break;
+ case 't':
+ case 'w':
+ case 'i':
+ case 's':
+ case 'S':
+ case 'u':
+ case '0':
+ /* These are left unchanged. */
+ break;
- default:
- gcc_unreachable ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
if (code == SCRATCH)
{
#include "recog.h"
#include "langhooks.h"
#include "target.h"
+#include "output.h"
static rtx break_out_memory_refs (rtx);
static void emit_stack_probe (rtx);
{
if (!MEM_P (ref))
return ref;
+ ref = use_anchored_address (ref);
if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
&& memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
return ref;
/* Don't alter REF itself, since that is probably a stack slot. */
return replace_equiv_address (ref, XEXP (ref, 0));
}
+
+/* If X is a memory reference to a member of an object block, try rewriting
+ it to use an anchor instead. Return the new memory reference on success
+ and the old one on failure. */
+
+rtx
+use_anchored_address (rtx x)
+{
+ rtx base;
+ HOST_WIDE_INT offset;
+
+ if (!flag_section_anchors)
+ return x;
+
+ if (!MEM_P (x))
+ return x;
+
+ /* Split the address into a base and offset. */
+ base = XEXP (x, 0);
+ offset = 0;
+ if (GET_CODE (base) == CONST
+ && GET_CODE (XEXP (base, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
+ {
+ offset += INTVAL (XEXP (XEXP (base, 0), 1));
+ base = XEXP (XEXP (base, 0), 0);
+ }
+
+ /* Check whether BASE is suitable for anchors. */
+ if (GET_CODE (base) != SYMBOL_REF
+ || !SYMBOL_REF_IN_BLOCK_P (base)
+ || SYMBOL_REF_ANCHOR_P (base)
+ || !targetm.use_anchors_for_symbol_p (base))
+ return x;
+
+ /* Decide where BASE is going to be. */
+ place_block_symbol (base);
+
+ /* Get the anchor we need to use. */
+ offset += SYMBOL_REF_BLOCK_OFFSET (base);
+ base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
+ SYMBOL_REF_TLS_MODEL (base));
+
+ /* Work out the offset from the anchor. */
+ offset -= SYMBOL_REF_BLOCK_OFFSET (base);
+
+ /* If we're going to run a CSE pass, force the anchor into a register.
+ We will then be able to reuse registers for several accesses, if the
+ target costs say that that's worthwhile. */
+ if (!cse_not_expected)
+ base = force_reg (GET_MODE (base), base);
+
+ return replace_equiv_address (x, plus_constant (base, offset));
+}
\f
/* Copy the value or contents of X to a new temp reg and return that reg. */
be able to get a part of Y. */
if (ypart == 0 && CONSTANT_P (y))
{
- y = force_const_mem (mode, y);
+ y = use_anchored_address (force_const_mem (mode, y));
ypart = operand_subword (y, i, 1, mode);
}
else if (ypart == 0)
of the non-legitimate constant. */
if (!y)
y = y_cst;
+ else
+ y = use_anchored_address (y);
}
}
}
\f
+/* Return a MEM that constains constant EXP. DEFER is as for
+ output_constant_def and MODIFIER is as for expand_expr. */
+
+static rtx
+expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
+{
+ rtx mem;
+
+ mem = output_constant_def (exp, defer);
+ if (modifier != EXPAND_INITIALIZER)
+ mem = use_anchored_address (mem);
+ return mem;
+}
+
/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
exception here is STRING_CST. */
if (TREE_CODE (exp) == CONSTRUCTOR
|| CONSTANT_CLASS_P (exp))
- return XEXP (output_constant_def (exp, 0), 0);
+ return XEXP (expand_expr_constant (exp, 0, modifier), 0);
/* Everything must be something allowed by is_gimple_addressable. */
switch (TREE_CODE (exp))
{
case INDIRECT_REF:
/* This case will happen via recursion for &a->b. */
- return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
+ return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
case CONST_DECL:
/* Recurse and make the output_constant_def clause above handle this. */
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
enum expand_modifier modifier, rtx *alt_rtl)
{
- rtx op0, op1, temp;
+ rtx op0, op1, temp, decl_rtl;
tree type = TREE_TYPE (exp);
int unsignedp;
enum machine_mode mode;
case FUNCTION_DECL:
case RESULT_DECL:
- gcc_assert (DECL_RTL (exp));
+ decl_rtl = DECL_RTL (exp);
+ gcc_assert (decl_rtl);
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
from its initializer, while the initializer is still being parsed.
See expand_decl. */
- if (MEM_P (DECL_RTL (exp))
- && REG_P (XEXP (DECL_RTL (exp), 0)))
- temp = validize_mem (DECL_RTL (exp));
+ if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
+ temp = validize_mem (decl_rtl);
/* If DECL_RTL is memory, we are in the normal case and either
the address is not valid or it is not a register and -fforce-addr
is specified, get the address into a register. */
- else if (MEM_P (DECL_RTL (exp))
- && modifier != EXPAND_CONST_ADDRESS
- && modifier != EXPAND_SUM
- && modifier != EXPAND_INITIALIZER
- && (! memory_address_p (DECL_MODE (exp),
- XEXP (DECL_RTL (exp), 0))
- || (flag_force_addr
- && !REG_P (XEXP (DECL_RTL (exp), 0)))))
+ else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
{
if (alt_rtl)
- *alt_rtl = DECL_RTL (exp);
- temp = replace_equiv_address (DECL_RTL (exp),
- copy_rtx (XEXP (DECL_RTL (exp), 0)));
+ *alt_rtl = decl_rtl;
+ decl_rtl = use_anchored_address (decl_rtl);
+ if (modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_SUM
+ && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
+ || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
+ temp = replace_equiv_address (decl_rtl,
+ copy_rtx (XEXP (decl_rtl, 0)));
}
/* If we got something, return it. But first, set the alignment
must be a promoted value. We return a SUBREG of the wanted mode,
but mark it so that we know that it was already extended. */
- if (REG_P (DECL_RTL (exp))
- && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
+ if (REG_P (decl_rtl)
+ && GET_MODE (decl_rtl) != DECL_MODE (exp))
{
enum machine_mode pmode;
pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
(TREE_CODE (exp) == RESULT_DECL
|| TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
- gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
+ gcc_assert (GET_MODE (decl_rtl) == pmode);
- temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
+ temp = gen_lowpart_SUBREG (mode, decl_rtl);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
return temp;
}
- return DECL_RTL (exp);
+ return decl_rtl;
case INTEGER_CST:
temp = immed_double_const (TREE_INT_CST_LOW (exp),
/* ... fall through ... */
case STRING_CST:
- temp = output_constant_def (exp, 1);
+ temp = expand_expr_constant (exp, 1, modifier);
/* temp contains a constant address.
On RISC machines where a constant address isn't valid,
|| modifier == EXPAND_CONST_ADDRESS)
&& TREE_CONSTANT (exp)))
{
- rtx constructor = output_constant_def (exp, 1);
+ rtx constructor = expand_expr_constant (exp, 1, modifier);
if (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
valid address. */
extern rtx validize_mem (rtx);
+extern rtx use_anchored_address (rtx);
+
/* Given REF, a MEM, and T, either the type of X or the expression
corresponding to REF, set the memory attributes. OBJECTP is nonzero
if we are making a new object of this type. */
return field;
}
+/* Like create_field, but the field is only valid when condition COND
+ is true. */
+
+static pair_p
+create_optional_field (pair_p next, type_p type, const char *name,
+ const char *cond)
+{
+ static int id = 1;
+ pair_p union_fields, field;
+ type_p union_type;
+
+ /* Create a fake union type with a single nameless field of type TYPE.
+ The field has a tag of "1". This allows us to make the presence
+ of a field of type TYPE depend on some boolean "desc" being true. */
+ union_fields = create_field (NULL, type, "");
+ union_fields->opt = create_option (union_fields->opt, "dot", "");
+ union_fields->opt = create_option (union_fields->opt, "tag", "1");
+ union_type = new_structure (xasprintf ("%s_%d", "fake_union", id++), 1,
+ &lexer_line, union_fields, NULL);
+
+ /* Create the field and give it the new fake union type. Add a "desc"
+ tag that specifies the condition under which the field is valid. */
+ field = create_field (next, union_type, name);
+ field->opt = create_option (field->opt, "desc", cond);
+ return field;
+}
+
/* We don't care how long a CONST_DOUBLE is. */
#define CONST_DOUBLE_FORMAT "ww"
/* We don't want to see codes that are only for generator files. */
"CONSTANT_POOL_ADDRESS_P (&%0)");
}
+ if (i == SYMBOL_REF)
+ {
+ /* Add the "block_sym" field if SYMBOL_REF_IN_BLOCK_P holds. */
+ type_p field_tp = find_structure ("block_symbol", 0);
+ subfields = create_optional_field (subfields, field_tp, "block_sym",
+ "SYMBOL_REF_IN_BLOCK_P (&%0)");
+ }
+
sname = xasprintf ("rtx_def_%s", rtx_name[i]);
substruct = new_structure (sname, 0, &lexer_line, subfields, NULL);
return false;
}
+/* Generic hook that takes (enum machine_mode, rtx) and returns false. */
+bool
+hook_bool_mode_rtx_false (enum machine_mode mode ATTRIBUTE_UNUSED,
+ rtx value ATTRIBUTE_UNUSED)
+{
+ return false;
+}
+
/* Generic hook that takes (FILE *, const char *) and does nothing. */
void
hook_void_FILEptr_constcharptr (FILE *a ATTRIBUTE_UNUSED, const char *b ATTRIBUTE_UNUSED)
extern bool hook_bool_void_true (void);
extern bool hook_bool_bool_false (bool);
extern bool hook_bool_mode_false (enum machine_mode);
+extern bool hook_bool_mode_rtx_false (enum machine_mode, rtx);
extern bool hook_bool_tree_false (tree);
extern bool hook_bool_tree_true (tree);
extern bool hook_bool_tree_hwi_hwi_tree_false (tree, HOST_WIDE_INT, HOST_WIDE_INT,
XEXP (XEXP (single_set, loc[0]), loc[1]). */
};
-DEF_VEC_P(rtx);
-DEF_VEC_ALLOC_P(rtx,heap);
-
/* Information about accumulators to expand. */
struct var_to_expand
/* Write all the constants in the constant pool. */
extern void output_constant_pool (const char *, tree);
+extern void output_object_blocks (void);
+
/* Return nonzero if VALUE is a valid constant-valued expression
for use in initializing a static variable; one that can be an
element of a "constant" initializer.
struct unnamed_section GTY ((tag ("0"))) unnamed;
};
+struct object_block;
+
/* Special well-known sections. */
extern GTY(()) section *text_section;
extern GTY(()) section *data_section;
const void *);
extern section *get_section (const char *, unsigned int, tree);
extern section *get_named_section (tree, const char *, int);
+extern void place_block_symbol (rtx);
+extern rtx get_section_anchor (struct object_block *, HOST_WIDE_INT,
+ enum tls_model);
extern section *mergeable_constant_section (enum machine_mode,
unsigned HOST_WIDE_INT,
unsigned int);
unsigned HOST_WIDE_INT);
extern void default_encode_section_info (tree, rtx, int);
extern const char *default_strip_name_encoding (const char *);
+extern void default_asm_output_anchor (rtx);
+extern bool default_use_anchors_for_symbol_p (rtx);
extern bool default_binds_local_p (tree);
extern bool default_binds_local_p_1 (tree, int);
extern void default_globalize_label (FILE *, const char *);
/* Create a shallow copy of ORIGINAL. */
bellwether_code = BELLWETHER_CODE (GET_CODE (original));
x = rtx_alloc (bellwether_code);
- memcpy (x, original, RTX_SIZE (bellwether_code));
+ memcpy (x, original, RTX_CODE_SIZE (bellwether_code));
/* Change the mode or code itself. */
group = macro->group;
new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
if (new != XEXP (x, i) && ! copied)
{
- rtx new_x = rtx_alloc (code);
- memcpy (new_x, x, RTX_SIZE (code));
- x = new_x;
+ x = shallow_copy_rtx (x);
copied = 1;
}
XEXP (x, i) = new;
XVEC (x, i)->elem);
if (! copied)
{
- rtx new_x = rtx_alloc (code);
- memcpy (new_x, x, RTX_SIZE (code));
- x = new_x;
+ x = shallow_copy_rtx (x);
copied = 1;
}
XVEC (x, i) = new_v;
/* Indexed by rtx code, gives the size of the rtx in bytes. */
-const unsigned char rtx_size[NUM_RTX_CODE] = {
+const unsigned char rtx_code_size[NUM_RTX_CODE] = {
#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) \
((ENUM) == CONST_INT || (ENUM) == CONST_DOUBLE \
? RTX_HDR_SIZE + (sizeof FORMAT - 1) * sizeof (HOST_WIDE_INT) \
return rt;
}
+/* Return the number of bytes occupied by rtx value X. */
+
+unsigned int
+rtx_size (rtx x)
+{
+ if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_IN_BLOCK_P (x))
+ return RTX_HDR_SIZE + sizeof (struct block_symbol);
+ return RTX_CODE_SIZE (GET_CODE (x));
+}
+
/* Allocate an rtx of code CODE. The CODE is stored in the rtx;
all the rest is initialized to zero. */
{
rtx rt;
- rt = (rtx) ggc_alloc_zone_pass_stat (RTX_SIZE (code), &rtl_zone);
+ rt = (rtx) ggc_alloc_zone_pass_stat (RTX_CODE_SIZE (code), &rtl_zone);
/* We want to clear everything up to the FLD array. Normally, this
is one int, but we don't want to assume that and it isn't very
#ifdef GATHER_STATISTICS
rtx_alloc_counts[code]++;
- rtx_alloc_sizes[code] += RTX_SIZE (code);
+ rtx_alloc_sizes[code] += RTX_CODE_SIZE (code);
#endif
return rt;
break;
}
- copy = rtx_alloc (code);
-
- /* Copy the various flags, and other information. We assume that
- all fields need copying, and then clear the fields that should
+ /* Copy the various flags, fields, and other information. We assume
+ that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
- memcpy (copy, orig, RTX_HDR_SIZE);
+ copy = shallow_copy_rtx (orig);
/* We do not copy the USED flag, which is used as a mark bit during
walks over the RTL. */
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
- {
- copy->u.fld[i] = orig->u.fld[i];
- switch (*format_ptr++)
- {
- case 'e':
- if (XEXP (orig, i) != NULL)
- XEXP (copy, i) = copy_rtx (XEXP (orig, i));
- break;
-
- case 'E':
- case 'V':
- if (XVEC (orig, i) != NULL)
- {
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
- XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
- }
- break;
-
- case 't':
- case 'w':
- case 'i':
- case 's':
- case 'S':
- case 'T':
- case 'u':
- case 'B':
- case '0':
- /* These are left unchanged. */
- break;
-
- default:
- gcc_unreachable ();
- }
- }
+ switch (*format_ptr++)
+ {
+ case 'e':
+ if (XEXP (orig, i) != NULL)
+ XEXP (copy, i) = copy_rtx (XEXP (orig, i));
+ break;
+
+ case 'E':
+ case 'V':
+ if (XVEC (orig, i) != NULL)
+ {
+ XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
+ for (j = 0; j < XVECLEN (copy, i); j++)
+ XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
+ }
+ break;
+
+ case 't':
+ case 'w':
+ case 'i':
+ case 's':
+ case 'S':
+ case 'T':
+ case 'u':
+ case 'B':
+ case '0':
+ /* These are left unchanged. */
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
return copy;
}
rtx
shallow_copy_rtx_stat (rtx orig MEM_STAT_DECL)
{
+ unsigned int size;
rtx copy;
- copy = (rtx) ggc_alloc_zone_pass_stat (RTX_SIZE (GET_CODE (orig)),
- &rtl_zone);
- memcpy (copy, orig, RTX_SIZE (GET_CODE (orig)));
+ size = rtx_size (orig);
+ copy = (rtx) ggc_alloc_zone_pass_stat (size, &rtl_zone);
+ memcpy (copy, orig, size);
return copy;
}
\f
func, trim_filename (file), line);
}
+/* Report that line LINE of FILE tried to access the block symbol fields
+ of a non-block symbol. FUNC is the function that contains the line. */
+
+void
+rtl_check_failed_block_symbol (const char *file, int line, const char *func)
+{
+ internal_error
+ ("RTL check: attempt to treat non-block symbol as a block symbol "
+ "in %s, at %s:%d", func, trim_filename (file), line);
+}
+
/* XXX Maybe print the vector? */
void
rtvec_check_failed_bounds (rtvec r, int n, const char *file, int line,
#include "machmode.h"
#include "input.h"
#include "real.h"
+#include "vec.h"
#undef FFS /* Some systems predefine this symbol; don't let it interfere. */
#undef FLOAT /* Likewise. */
extern const enum rtx_class rtx_class[NUM_RTX_CODE];
#define GET_RTX_CLASS(CODE) (rtx_class[(int) (CODE)])
-extern const unsigned char rtx_size[NUM_RTX_CODE];
+extern const unsigned char rtx_code_size[NUM_RTX_CODE];
extern const unsigned char rtx_next[NUM_RTX_CODE];
\f
/* The flags and bitfields of an ADDR_DIFF_VEC. BASE is the base label
};
typedef union rtunion_def rtunion;
+/* This structure remembers the position of a SYMBOL_REF within an
+ object_block structure. A SYMBOL_REF only provides this information
+ if SYMBOL_REF_IN_BLOCK_P is true. */
+struct block_symbol GTY(()) {
+ /* The usual SYMBOL_REF fields. */
+ rtunion GTY ((skip)) fld[3];
+
+ /* The block that contains this object. */
+ struct object_block *block;
+
+ /* The offset of this object from the start of its block. It is negative
+ if the symbol has not yet been assigned an offset. */
+ HOST_WIDE_INT offset;
+};
+
+DEF_VEC_P(rtx);
+DEF_VEC_ALLOC_P(rtx,heap);
+DEF_VEC_ALLOC_P(rtx,gc);
+
+/* Describes a group of objects that are to be placed together in such
+ a way that their relative positions are known. */
+struct object_block GTY(())
+{
+ /* The section in which these objects should be placed. */
+ section *sect;
+
+ /* The alignment of the first object, measured in bits. */
+ unsigned int alignment;
+
+ /* The total size of the objects, measured in bytes. */
+ HOST_WIDE_INT size;
+
+ /* The SYMBOL_REFs for each object. The vector is sorted in
+ order of increasing offset and the following conditions will
+ hold for each element X:
+
+ SYMBOL_REF_IN_BLOCK_P (X)
+ !SYMBOL_REF_ANCHOR_P (X)
+ SYMBOL_REF_BLOCK (X) == [address of this structure]
+ SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
+ VEC(rtx,gc) *objects;
+
+ /* All the anchor SYMBOL_REFs used to address these objects, sorted
+ in order of increasing offset, and then increasing TLS model.
+ The following conditions will hold for each element X in this vector:
+
+ SYMBOL_REF_IN_BLOCK_P (X)
+ SYMBOL_REF_ANCHOR_P (X)
+ SYMBOL_REF_BLOCK (X) == [address of this structure]
+ SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
+ VEC(rtx,gc) *anchors;
+};
+
/* RTL expression ("rtx"). */
struct rtx_def GTY((chain_next ("RTX_NEXT (&%h)"),
union u {
rtunion fld[1];
HOST_WIDE_INT hwint[1];
+ struct block_symbol block_sym;
struct real_value rv;
} GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u;
};
#define RTX_HDR_SIZE offsetof (struct rtx_def, u)
/* The size in bytes of an rtx with code CODE. */
-#define RTX_SIZE(CODE) rtx_size[CODE]
+#define RTX_CODE_SIZE(CODE) rtx_code_size[CODE]
#define NULL_RTX (rtx) 0
__LINE__, __FUNCTION__); \
&_rtx->u.rv; })
+#define BLOCK_SYMBOL_CHECK(RTX) __extension__ \
+({ rtx const _symbol = (RTX); \
+ unsigned int flags = RTL_CHECKC1 (_symbol, 1, SYMBOL_REF).rt_int; \
+ if ((flags & SYMBOL_FLAG_IN_BLOCK) == 0) \
+ rtl_check_failed_block_symbol (__FILE__, __LINE__, \
+ __FUNCTION__); \
+ &_symbol->u.block_sym; })
+
extern void rtl_check_failed_bounds (rtx, int, const char *, int,
const char *)
ATTRIBUTE_NORETURN;
extern void rtl_check_failed_code_mode (rtx, enum rtx_code, enum machine_mode,
bool, const char *, int, const char *)
ATTRIBUTE_NORETURN;
+extern void rtl_check_failed_block_symbol (const char *, int, const char *)
+ ATTRIBUTE_NORETURN;
extern void rtvec_check_failed_bounds (rtvec, int, const char *, int,
const char *)
ATTRIBUTE_NORETURN;
#define XCMWINT(RTX, N, C, M) ((RTX)->u.hwint[N])
#define XCNMWINT(RTX, N, C, M) ((RTX)->u.hwint[N])
#define XCNMPRV(RTX, C, M) (&(RTX)->u.rv)
+#define BLOCK_SYMBOL_CHECK(RTX) (&(RTX)->u.block_sym)
#endif
#define SYMBOL_FLAG_EXTERNAL (1 << 6)
#define SYMBOL_REF_EXTERNAL_P(RTX) \
((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0)
+/* Set if this symbol has a block_symbol structure associated with it. */
+#define SYMBOL_FLAG_IN_BLOCK (1 << 7)
+#define SYMBOL_REF_IN_BLOCK_P(RTX) \
+ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_IN_BLOCK) != 0)
+/* Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies
+ SYMBOL_REF_IN_BLOCK_P. */
+#define SYMBOL_FLAG_ANCHOR (1 << 8)
+#define SYMBOL_REF_ANCHOR_P(RTX) \
+ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0)
/* Subsequent bits are available for the target to use. */
-#define SYMBOL_FLAG_MACH_DEP_SHIFT 7
+#define SYMBOL_FLAG_MACH_DEP_SHIFT 9
#define SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT)
+/* The block to which the given SYMBOL_REF belongs. Only valid if
+ SYMBOL_REF_IN_BLOCK_P (RTX). */
+#define SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block)
+
+/* The byte offset of the given SYMBOL_REF from the start of its block,
+ or a negative value if the symbol has not yet been assigned a position.
+ Only valid if SYMBOL_REF_IN_BLOCK_P (RTX). */
+#define SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset)
+
/* Define a macro to look for REG_INC notes,
but save time on machines where they never exist. */
extern rtx copy_rtx_if_shared (rtx);
/* In rtl.c */
+extern unsigned int rtx_size (rtx);
extern rtx shallow_copy_rtx_stat (rtx MEM_STAT_DECL);
#define shallow_copy_rtx(a) shallow_copy_rtx_stat (a MEM_STAT_INFO)
extern int rtx_equal_p (rtx, rtx);
#define TARGET_ASM_MARK_DECL_PRESERVED hook_void_constcharptr
#endif
+#ifndef TARGET_ASM_OUTPUT_ANCHOR
+#ifdef ASM_OUTPUT_DEF
+#define TARGET_ASM_OUTPUT_ANCHOR default_asm_output_anchor
+#else
+#define TARGET_ASM_OUTPUT_ANCHOR NULL
+#endif
+#endif
+
#ifndef TARGET_ASM_OUTPUT_DWARF_DTPREL
#define TARGET_ASM_OUTPUT_DWARF_DTPREL NULL
#endif
TARGET_ASM_FILE_END, \
TARGET_ASM_EXTERNAL_LIBCALL, \
TARGET_ASM_MARK_DECL_PRESERVED, \
+ TARGET_ASM_OUTPUT_ANCHOR, \
TARGET_ASM_OUTPUT_DWARF_DTPREL}
/* Scheduler hooks. All of these default to null pointers, which
#define TARGET_CANNOT_COPY_INSN_P NULL
#define TARGET_COMMUTATIVE_P hook_bool_rtx_commutative_p
#define TARGET_DELEGITIMIZE_ADDRESS hook_rtx_rtx_identity
+#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_rtx_false
+#define TARGET_MIN_ANCHOR_OFFSET 0
+#define TARGET_MAX_ANCHOR_OFFSET 0
+#define TARGET_USE_ANCHORS_FOR_SYMBOL_P default_use_anchors_for_symbol_p
#define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_false
#define TARGET_COMP_TYPE_ATTRIBUTES hook_int_tree_tree_1
#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES hook_void_tree
TARGET_CANNOT_COPY_INSN_P, \
TARGET_COMMUTATIVE_P, \
TARGET_DELEGITIMIZE_ADDRESS, \
+ TARGET_USE_BLOCKS_FOR_CONSTANT_P, \
+ TARGET_MIN_ANCHOR_OFFSET, \
+ TARGET_MAX_ANCHOR_OFFSET, \
+ TARGET_USE_ANCHORS_FOR_SYMBOL_P, \
TARGET_FUNCTION_OK_FOR_SIBCALL, \
TARGET_IN_SMALL_DATA_P, \
TARGET_BINDS_LOCAL_P, \
linker to not dead code strip this symbol. */
void (*mark_decl_preserved) (const char *);
+ /* Output the definition of a section anchor. */
+ void (*output_anchor) (rtx);
+
/* Output a DTP-relative reference to a TLS symbol. */
void (*output_dwarf_dtprel) (FILE *file, int size, rtx x);
/* Given an address RTX, undo the effects of LEGITIMIZE_ADDRESS. */
rtx (* delegitimize_address) (rtx);
+ /* True if the given constant can be put into an object_block. */
+ bool (* use_blocks_for_constant_p) (enum machine_mode, rtx);
+
+ /* The minimum and maximum byte offsets for anchored addresses. */
+ HOST_WIDE_INT min_anchor_offset;
+ HOST_WIDE_INT max_anchor_offset;
+
+ /* True if section anchors can be used to access the given symbol. */
+ bool (* use_anchors_for_symbol_p) (rtx);
+
/* True if it is OK to do sibling call optimization for the specified
call expression EXP. DECL will be the called function, or NULL if
this is an indirect call. */
if (flag_mudflap)
mudflap_finish_file ();
+ output_object_blocks ();
+
/* Write out any pending weak symbol declarations. */
weak_finish ();
init_optimization_passes ();
}
+/* Return true if the current target supports -fsection-anchors. */
+
+static bool
+target_supports_section_anchors_p (void)
+{
+ if (targetm.min_anchor_offset == 0 && targetm.max_anchor_offset == 0)
+ return false;
+
+ if (targetm.asm_out.output_anchor == NULL)
+ return false;
+
+ return true;
+}
+
/* Process the options that have been parsed. */
static void
process_options (void)
OVERRIDE_OPTIONS;
#endif
+ if (flag_section_anchors && !target_supports_section_anchors_p ())
+ {
+ warning (OPT_fsection_anchors,
+ "this target does not support %qs", "-fsection-anchors");
+ flag_section_anchors = 0;
+ }
+
if (flag_short_enums == 2)
flag_short_enums = targetm.default_short_enums ();
if (flag_unit_at_a_time && ! lang_hooks.callgraph.expand_function)
flag_unit_at_a_time = 0;
+ if (!flag_unit_at_a_time)
+ flag_section_anchors = 0;
+
if (flag_value_profile_transformations)
flag_profile_values = 1;
expr_p = &TREE_OPERAND (*expr_p, 0))
continue;
obj = *expr_p;
- if (DECL_P (obj))
+ if (DECL_P (obj) && !DECL_RTL_SET_P (obj))
x = produce_memory_decl_rtl (obj, regno);
break;
/* Hash table of named sections. */
static GTY((param_is (section))) htab_t section_htab;
+/* A table of object_blocks, indexed by section. */
+static GTY((param_is (struct object_block))) htab_t object_block_htab;
+
+/* The next number to use for internal anchor labels. */
+static GTY(()) int anchor_labelno;
+
/* Helper routines for maintaining section_htab. */
static int
return htab_hash_string (old->named.name);
}
+/* Return a hash value for section SECT. */
+
+static hashval_t
+hash_section (section *sect)
+{
+ if (sect->common.flags & SECTION_NAMED)
+ return htab_hash_string (sect->named.name);
+ return sect->common.flags;
+}
+
+/* Helper routines for maintaining object_block_htab. */
+
+static int
+object_block_entry_eq (const void *p1, const void *p2)
+{
+ const struct object_block *old = p1;
+ const section *new = p2;
+
+ return old->sect == new;
+}
+
+static hashval_t
+object_block_entry_hash (const void *p)
+{
+ const struct object_block *old = p;
+ return hash_section (old->sect);
+}
+
/* Return a new unnamed section with the given fields. */
section *
return sect;
}
+/* Return true if the current compilation mode benefits from having
+ objects grouped into blocks. */
+
+static bool
+use_object_blocks_p (void)
+{
+ return flag_section_anchors;
+}
+
+/* Return the object_block structure for section SECT. Create a new
+ structure if we haven't created one already. */
+
+static struct object_block *
+get_block_for_section (section *sect)
+{
+ struct object_block *block;
+ void **slot;
+
+ slot = htab_find_slot_with_hash (object_block_htab, sect,
+ hash_section (sect), INSERT);
+ block = (struct object_block *) *slot;
+ if (block == NULL)
+ {
+ block = (struct object_block *)
+ ggc_alloc_cleared (sizeof (struct object_block));
+ block->sect = sect;
+ *slot = block;
+ }
+ return block;
+}
+
+/* Create a symbol with label LABEL and place it at byte offset
+ OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
+ is not yet known. LABEL must be a garbage-collected string. */
+
+static rtx
+create_block_symbol (const char *label, struct object_block *block,
+ HOST_WIDE_INT offset)
+{
+ rtx symbol;
+ unsigned int size;
+
+ /* Create the extended SYMBOL_REF. */
+ size = RTX_HDR_SIZE + sizeof (struct block_symbol);
+ symbol = ggc_alloc_zone (size, &rtl_zone);
+
+ /* Initialize the normal SYMBOL_REF fields. */
+ memset (symbol, 0, size);
+ PUT_CODE (symbol, SYMBOL_REF);
+ PUT_MODE (symbol, Pmode);
+ XSTR (symbol, 0) = label;
+ SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_IN_BLOCK;
+
+ /* Initialize the block_symbol stuff. */
+ SYMBOL_REF_BLOCK (symbol) = block;
+ SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
+
+ return symbol;
+}
+
static void
initialize_cold_section_name (void)
{
return -1;
}
\f
+/* Return true if it is possible to put DECL in an object_block. */
+
+static bool
+use_blocks_for_decl_p (tree decl)
+{
+ /* Only data DECLs can be placed into object blocks. */
+ if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != CONST_DECL)
+ return false;
+
+ if (TREE_CODE (decl) == VAR_DECL)
+ {
+ /* The object must be defined in this translation unit. */
+ if (DECL_EXTERNAL (decl))
+ return false;
+
+ /* There's no point using object blocks for something that is
+ isolated by definition. */
+ if (DECL_ONE_ONLY (decl))
+ return false;
+
+ /* Symbols that use .common cannot be put into blocks. */
+ if (DECL_COMMON (decl) && DECL_INITIAL (decl) == NULL)
+ return false;
+ }
+
+ /* We can only calculate block offsets if the decl has a known
+ constant size. */
+ if (DECL_SIZE_UNIT (decl) == NULL)
+ return false;
+ if (!host_integerp (DECL_SIZE_UNIT (decl), 1))
+ return false;
+
+ /* Detect decls created by dw2_force_const_mem. Such decls are
+ special because DECL_INITIAL doesn't specify the decl's true value.
+ dw2_output_indirect_constants will instead call assemble_variable
+ with dont_output_data set to 1 and then print the contents itself. */
+ if (DECL_INITIAL (decl) == decl)
+ return false;
+
+ return true;
+}
+
+/* Make sure block symbol SYMBOL is in section SECT, moving it to a
+ different block if necessary. */
+
+static void
+change_symbol_section (rtx symbol, section *sect)
+{
+ if (sect != SYMBOL_REF_BLOCK (symbol)->sect)
+ {
+ gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
+ SYMBOL_REF_BLOCK (symbol) = get_block_for_section (sect);
+ }
+}
+
+/* Return the section into which the given VAR_DECL or CONST_DECL
+ should be placed. */
+
+static section *
+get_variable_section (tree decl)
+{
+ int reloc;
+
+ if (DECL_INITIAL (decl) == error_mark_node)
+ reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
+ else if (DECL_INITIAL (decl))
+ reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
+ else
+ reloc = 0;
+
+ resolve_unique_section (decl, reloc, flag_data_sections);
+ if (IN_NAMED_SECTION (decl))
+ return get_named_section (decl, NULL, reloc);
+ else
+ return targetm.asm_out.select_section (decl, reloc, DECL_ALIGN (decl));
+}
+
/* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
have static storage duration. In other words, it should not be an
automatic variable, including PARM_DECLs.
if (DECL_RTL_SET_P (decl))
{
/* If the old RTL had the wrong mode, fix the mode. */
- if (GET_MODE (DECL_RTL (decl)) != DECL_MODE (decl))
- SET_DECL_RTL (decl, adjust_address_nv (DECL_RTL (decl),
- DECL_MODE (decl), 0));
+ x = DECL_RTL (decl);
+ if (GET_MODE (x) != DECL_MODE (decl))
+ SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
return;
decl attribute overrides another. */
targetm.encode_section_info (decl, DECL_RTL (decl), false);
+ /* If the old address was assigned to an object block, see whether
+ that block is still in the right section. */
+ if (MEM_P (x)
+ && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
+ && SYMBOL_REF_IN_BLOCK_P (XEXP (x, 0)))
+ change_symbol_section (XEXP (x, 0), get_variable_section (decl));
+
/* Make this function static known to the mudflap runtime. */
if (flag_mudflap && TREE_CODE (decl) == VAR_DECL)
mudflap_enqueue_decl (decl);
if (TREE_CODE (decl) == VAR_DECL && DECL_WEAK (decl))
DECL_COMMON (decl) = 0;
- x = gen_rtx_SYMBOL_REF (Pmode, name);
+ if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
+ {
+ section *sect = get_variable_section (decl);
+ x = create_block_symbol (name, get_block_for_section (sect), -1);
+ }
+ else
+ x = gen_rtx_SYMBOL_REF (Pmode, name);
SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
SET_SYMBOL_REF_DECL (x, decl);
return true;
}
+/* A subroutine of assemble_variable. Output the label and contents of
+ DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
+ is as for assemble_variable. */
+
+static void
+assemble_variable_contents (tree decl, const char *name,
+ bool dont_output_data)
+{
+ /* Do any machine/system dependent processing of the object. */
+#ifdef ASM_DECLARE_OBJECT_NAME
+ last_assemble_variable_decl = decl;
+ ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
+#else
+ /* Standard thing is just output label for the object. */
+ ASM_OUTPUT_LABEL (asm_out_file, name);
+#endif /* ASM_DECLARE_OBJECT_NAME */
+
+ if (!dont_output_data)
+ {
+ if (DECL_INITIAL (decl)
+ && DECL_INITIAL (decl) != error_mark_node
+ && !initializer_zerop (DECL_INITIAL (decl)))
+ /* Output the actual data. */
+ output_constant (DECL_INITIAL (decl),
+ tree_low_cst (DECL_SIZE_UNIT (decl), 1),
+ DECL_ALIGN (decl));
+ else
+ /* Leave space for it. */
+ assemble_zeros (tree_low_cst (DECL_SIZE_UNIT (decl), 1));
+ }
+}
+
/* Assemble everything that is needed for a variable or function declaration.
Not used for automatic variables, and not used for function definitions.
Should not be called for variables of incomplete structure type.
{
const char *name;
unsigned int align;
- int reloc = 0;
rtx decl_rtl;
+ bool in_block_p;
if (lang_hooks.decls.prepare_assemble_variable)
lang_hooks.decls.prepare_assemble_variable (decl);
return;
}
+ gcc_assert (MEM_P (decl_rtl));
+ gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
+ in_block_p = SYMBOL_REF_IN_BLOCK_P (XEXP (decl_rtl, 0));
name = XSTR (XEXP (decl_rtl, 0), 0);
if (TREE_PUBLIC (decl) && DECL_NAME (decl))
notice_global_symbol (decl);
#endif
}
}
+ /* Do not handle decls as common if they will be assigned a
+ specific section position. */
+ else if (in_block_p)
+ ;
else if (DECL_INITIAL (decl) == 0
|| DECL_INITIAL (decl) == error_mark_node
|| (flag_zero_initialized_in_bss
globalize_decl (decl);
/* Output any data that we will need to use the address of. */
- if (DECL_INITIAL (decl) == error_mark_node)
- reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
- else if (DECL_INITIAL (decl))
- {
- reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
- output_addressed_constants (DECL_INITIAL (decl));
- }
-
- /* Switch to the appropriate section. */
- resolve_unique_section (decl, reloc, flag_data_sections);
- variable_section (decl, reloc);
+ if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
+ output_addressed_constants (DECL_INITIAL (decl));
/* dbxout.c needs to know this. */
if (in_section && (in_section->common.flags & SECTION_CODE) != 0)
DECL_IN_TEXT_SECTION (decl) = 1;
- /* Output the alignment of this data. */
- if (align > BITS_PER_UNIT)
- ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (DECL_ALIGN_UNIT (decl)));
-
- /* Do any machine/system dependent processing of the object. */
-#ifdef ASM_DECLARE_OBJECT_NAME
- last_assemble_variable_decl = decl;
- ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
-#else
- /* Standard thing is just output label for the object. */
- ASM_OUTPUT_LABEL (asm_out_file, name);
-#endif /* ASM_DECLARE_OBJECT_NAME */
-
- if (!dont_output_data)
+ /* If the decl is part of an object_block, make sure that the decl
+ has been positioned within its block, but do not write out its
+ definition yet. output_object_blocks will do that later. */
+ if (in_block_p)
{
- if (DECL_INITIAL (decl)
- && DECL_INITIAL (decl) != error_mark_node
- && !initializer_zerop (DECL_INITIAL (decl)))
- /* Output the actual data. */
- output_constant (DECL_INITIAL (decl),
- tree_low_cst (DECL_SIZE_UNIT (decl), 1),
- align);
- else
- /* Leave space for it. */
- assemble_zeros (tree_low_cst (DECL_SIZE_UNIT (decl), 1));
+ gcc_assert (!dont_output_data);
+ place_block_symbol (XEXP (decl_rtl, 0));
+ }
+ else
+ {
+ switch_to_section (get_variable_section (decl));
+ if (align > BITS_PER_UNIT)
+ ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (DECL_ALIGN_UNIT (decl)));
+ assemble_variable_contents (decl, name, dont_output_data);
}
}
}
}
\f
+/* Return the alignment of constant EXP in bits. */
+
+static unsigned int
+get_constant_alignment (tree exp)
+{
+ unsigned int align;
+
+ align = TYPE_ALIGN (TREE_TYPE (exp));
+#ifdef CONSTANT_ALIGNMENT
+ align = CONSTANT_ALIGNMENT (exp, align);
+#endif
+ return align;
+}
+
+/* Return the section into which constant EXP should be placed. */
+
+static section *
+get_constant_section (tree exp)
+{
+ if (IN_NAMED_SECTION (exp))
+ return get_named_section (exp, NULL, compute_reloc_for_constant (exp));
+ else
+ return targetm.asm_out.select_section (exp,
+ compute_reloc_for_constant (exp),
+ get_constant_alignment (exp));
+}
+
+/* Return the size of constant EXP in bytes. */
+
+static HOST_WIDE_INT
+get_constant_size (tree exp)
+{
+ HOST_WIDE_INT size;
+
+ size = int_size_in_bytes (TREE_TYPE (exp));
+ if (TREE_CODE (exp) == STRING_CST)
+ size = MAX (TREE_STRING_LENGTH (exp), size);
+ return size;
+}
+
/* Subroutine of output_constant_def:
No constant equal to EXP is known to have been output.
Make a constant descriptor to enter EXP in the hash table.
ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
/* We have a symbol name; construct the SYMBOL_REF and the MEM. */
- symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
- SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
+ if (use_object_blocks_p ())
+ {
+ section *sect = get_constant_section (exp);
+ symbol = create_block_symbol (ggc_strdup (label),
+ get_block_for_section (sect), -1);
+ }
+ else
+ symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
+ SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
SET_SYMBOL_REF_DECL (symbol, desc->value);
TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
output_constant_def_contents (symbol);
}
+/* Subroutine of output_constant_def_contents. Output the definition
+ of constant EXP, which is pointed to by label LABEL. ALIGN is the
+ constant's alignment in bits. */
+
+static void
+assemble_constant_contents (tree exp, const char *label, unsigned int align)
+{
+ HOST_WIDE_INT size;
+
+ size = get_constant_size (exp);
+
+ /* Do any machine/system dependent processing of the constant. */
+#ifdef ASM_DECLARE_CONSTANT_NAME
+ ASM_DECLARE_CONSTANT_NAME (asm_out_file, label, exp, size);
+#else
+ /* Standard thing is just output label for the constant. */
+ ASM_OUTPUT_LABEL (asm_out_file, label);
+#endif /* ASM_DECLARE_CONSTANT_NAME */
+
+ /* Output the value of EXP. */
+ output_constant (exp, size, align);
+}
+
/* We must output the constant data referred to by SYMBOL; do so. */
static void
output_constant_def_contents (rtx symbol)
{
tree exp = SYMBOL_REF_DECL (symbol);
- const char *label = XSTR (symbol, 0);
- HOST_WIDE_INT size;
+ unsigned int align;
/* Make sure any other constants whose addresses appear in EXP
are assigned label numbers. */
- int reloc = compute_reloc_for_constant (exp);
-
- /* Align the location counter as required by EXP's data type. */
- unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
-#ifdef CONSTANT_ALIGNMENT
- align = CONSTANT_ALIGNMENT (exp, align);
-#endif
-
output_addressed_constants (exp);
/* We are no longer deferring this constant. */
TREE_ASM_WRITTEN (exp) = 1;
- if (IN_NAMED_SECTION (exp))
- switch_to_section (get_named_section (exp, NULL, reloc));
+ /* If the constant is part of an object block, make sure that the
+ decl has been positioned within its block, but do not write out
+ its definition yet. output_object_blocks will do that later. */
+ if (SYMBOL_REF_IN_BLOCK_P (symbol))
+ place_block_symbol (symbol);
else
- switch_to_section (targetm.asm_out.select_section (exp, reloc, align));
-
- if (align > BITS_PER_UNIT)
{
- ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
+ switch_to_section (get_constant_section (exp));
+ align = get_constant_alignment (exp);
+ if (align > BITS_PER_UNIT)
+ ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
+ assemble_constant_contents (exp, XSTR (symbol, 0), align);
}
-
- size = int_size_in_bytes (TREE_TYPE (exp));
- if (TREE_CODE (exp) == STRING_CST)
- size = MAX (TREE_STRING_LENGTH (exp), size);
-
- /* Do any machine/system dependent processing of the constant. */
-#ifdef ASM_DECLARE_CONSTANT_NAME
- ASM_DECLARE_CONSTANT_NAME (asm_out_file, label, exp, size);
-#else
- /* Standard thing is just output label for the constant. */
- ASM_OUTPUT_LABEL (asm_out_file, label);
-#endif /* ASM_DECLARE_CONSTANT_NAME */
-
- /* Output the value of EXP. */
- output_constant (exp, size, align);
if (flag_mudflap)
mudflap_enqueue_constant (exp);
}
/* Construct the SYMBOL_REF. Make sure to mark it as belonging to
the constants pool. */
- desc->sym = symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
- SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
+ if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
+ {
+ section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
+ symbol = create_block_symbol (ggc_strdup (label),
+ get_block_for_section (sect), -1);
+ }
+ else
+ symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
+ desc->sym = symbol;
+ SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
CONSTANT_POOL_ADDRESS_P (symbol) = 1;
SET_SYMBOL_REF_CONSTANT (symbol, desc);
current_function_uses_const_pool = 1;
}
}
-/* Worker function for output_constant_pool. Emit POOL. */
+/* Worker function for output_constant_pool. Emit constant DESC,
+ giving it ALIGN bits of alignment. */
static void
-output_constant_pool_1 (struct constant_descriptor_rtx *desc)
+output_constant_pool_1 (struct constant_descriptor_rtx *desc,
+ unsigned int align)
{
rtx x, tmp;
- if (!desc->mark)
- return;
x = desc->constant;
/* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
break;
}
- /* First switch to correct section. */
- switch_to_section (targetm.asm_out.select_rtx_section (desc->mode, x,
- desc->align));
-
#ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
- desc->align, desc->labelno, done);
+ align, desc->labelno, done);
#endif
- assemble_align (desc->align);
+ assemble_align (align);
/* Output the label. */
targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
/* Output the data. */
- output_constant_pool_2 (desc->mode, x, desc->align);
+ output_constant_pool_2 (desc->mode, x, align);
/* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
sections have proper size. */
- if (desc->align > GET_MODE_BITSIZE (desc->mode)
+ if (align > GET_MODE_BITSIZE (desc->mode)
&& in_section
&& (in_section->common.flags & SECTION_MERGE))
- assemble_align (desc->align);
+ assemble_align (align);
#ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
done:
#endif
for (desc = pool->first; desc ; desc = desc->next)
- output_constant_pool_1 (desc);
+ if (desc->mark)
+ {
+ /* If the constant is part of an object_block, make sure that
+ the constant has been positioned within its block, but do not
+ write out its definition yet. output_object_blocks will do
+ that later. */
+ if (SYMBOL_REF_IN_BLOCK_P (desc->sym))
+ place_block_symbol (desc->sym);
+ else
+ {
+ switch_to_section (targetm.asm_out.select_rtx_section
+ (desc->mode, desc->constant, desc->align));
+ output_constant_pool_1 (desc, desc->align);
+ }
+ }
#ifdef ASM_OUTPUT_POOL_EPILOGUE
ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
{
section_htab = htab_create_ggc (31, section_entry_hash,
section_entry_eq, NULL);
+ object_block_htab = htab_create_ggc (31, object_block_entry_hash,
+ object_block_entry_eq, NULL);
const_desc_htab = htab_create_ggc (1009, const_desc_hash,
const_desc_eq, NULL);
if (GET_CODE (symbol) != SYMBOL_REF)
return;
- flags = 0;
+ flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_IN_BLOCK;
if (TREE_CODE (decl) == FUNCTION_DECL)
flags |= SYMBOL_FLAG_FUNCTION;
if (targetm.binds_local_p (decl))
return str + (*str == '*');
}
+#ifdef ASM_OUTPUT_DEF
+/* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
+ anchor relative to ".", the current section position. */
+
+void
+default_asm_output_anchor (rtx symbol)
+{
+ char buffer[100];
+
+ sprintf (buffer, ". + " HOST_WIDE_INT_PRINT_DEC,
+ SYMBOL_REF_BLOCK_OFFSET (symbol));
+ ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
+}
+#endif
+
+/* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
+
+bool
+default_use_anchors_for_symbol_p (rtx symbol)
+{
+ section *sect;
+ tree decl;
+
+ /* Don't use anchors for mergeable sections. The linker might move
+ the objects around. */
+ sect = SYMBOL_REF_BLOCK (symbol)->sect;
+ if (sect->common.flags & SECTION_MERGE)
+ return false;
+
+ /* Don't use anchors for small data sections. The small data register
+ acts as an anchor for such sections. */
+ if (sect->common.flags & SECTION_SMALL)
+ return false;
+
+ decl = SYMBOL_REF_DECL (symbol);
+ if (decl && DECL_P (decl))
+ {
+ /* Don't use section anchors for decls that might be defined by
+ other modules. */
+ if (!targetm.binds_local_p (decl))
+ return false;
+
+ /* Don't use section anchors for decls that will be placed in a
+ small data section. */
+ /* ??? Ideally, this check would be redundant with the SECTION_SMALL
+ one above. The problem is that we only use SECTION_SMALL for
+ sections that should be marked as small in the section directive. */
+ if (targetm.in_small_data_p (decl))
+ return false;
+ }
+ return true;
+}
+
/* Assume ELF-ish defaults, since that's pretty much the most liberal
wrt cross-module name binding. */
new_section->common.flags |= SECTION_DECLARED;
}
+/* If block symbol SYMBOL has not yet been assigned an offset, place
+ it at the end of its block. */
+
+void
+place_block_symbol (rtx symbol)
+{
+ unsigned HOST_WIDE_INT size, mask, offset;
+ struct constant_descriptor_rtx *desc;
+ unsigned int alignment;
+ struct object_block *block;
+ tree decl;
+
+ if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
+ return;
+
+ /* Work out the symbol's size and alignment. */
+ if (CONSTANT_POOL_ADDRESS_P (symbol))
+ {
+ desc = SYMBOL_REF_CONSTANT (symbol);
+ alignment = desc->align;
+ size = GET_MODE_SIZE (desc->mode);
+ }
+ else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
+ {
+ decl = SYMBOL_REF_DECL (symbol);
+ alignment = get_constant_alignment (decl);
+ size = get_constant_size (decl);
+ }
+ else
+ {
+ decl = SYMBOL_REF_DECL (symbol);
+ alignment = DECL_ALIGN (decl);
+ size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ }
+
+ /* Calculate the object's offset from the start of the block. */
+ block = SYMBOL_REF_BLOCK (symbol);
+ mask = alignment / BITS_PER_UNIT - 1;
+ offset = (block->size + mask) & ~mask;
+ SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
+
+ /* Record the block's new alignment and size. */
+ block->alignment = MAX (block->alignment, alignment);
+ block->size = offset + size;
+
+ VEC_safe_push (rtx, gc, block->objects, symbol);
+}
+
+/* Return the anchor that should be used to address byte offset OFFSET
+ from the first object in BLOCK. MODEL is the TLS model used
+ to access it. */
+
+rtx
+get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
+ enum tls_model model)
+{
+ char label[100];
+ unsigned int begin, middle, end;
+ unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
+ rtx anchor;
+
+ /* Work out the anchor's offset. Use an offset of 0 for the first
+ anchor so that we don't pessimize the case where we take the address
+ of a variable at the beginning of the block. This is particularly
+ useful when a block has only one variable assigned to it.
+
+ We try to place anchors RANGE bytes apart, so there can then be
+ anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
+ a ptr_mode offset. With some target settings, the lowest such
+ anchor might be out of range for the lowest ptr_mode offset;
+ likewise the highest anchor for the highest offset. Use anchors
+ at the extreme ends of the ptr_mode range in such cases.
+
+ All arithmetic uses unsigned integers in order to avoid
+ signed overflow. */
+ max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
+ min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
+ range = max_offset - min_offset + 1;
+ if (range == 0)
+ offset = 0;
+ else
+ {
+ bias = 1 << (GET_MODE_BITSIZE (ptr_mode) - 1);
+ if (offset < 0)
+ {
+ delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
+ delta -= delta % range;
+ if (delta > bias)
+ delta = bias;
+ offset = (HOST_WIDE_INT) (-delta);
+ }
+ else
+ {
+ delta = (unsigned HOST_WIDE_INT) offset - min_offset;
+ delta -= delta % range;
+ if (delta > bias - 1)
+ delta = bias - 1;
+ offset = (HOST_WIDE_INT) delta;
+ }
+ }
+
+ /* Do a binary search to see if there's already an anchor we can use.
+ Set BEGIN to the new anchor's index if not. */
+ begin = 0;
+ end = VEC_length (rtx, block->anchors);
+ while (begin != end)
+ {
+ middle = (end + begin) / 2;
+ anchor = VEC_index (rtx, block->anchors, middle);
+ if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
+ end = middle;
+ else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
+ begin = middle + 1;
+ else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
+ end = middle;
+ else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
+ begin = middle + 1;
+ else
+ return anchor;
+ }
+
+ /* Create a new anchor with a unique label. */
+ ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
+ anchor = create_block_symbol (ggc_strdup (label), block, offset);
+ SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
+ SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
+
+ /* Insert it at index BEGIN. */
+ VEC_safe_insert (rtx, gc, block->anchors, begin, anchor);
+ return anchor;
+}
+
+/* Output the objects in BLOCK. */
+
+static void
+output_object_block (struct object_block *block)
+{
+ struct constant_descriptor_rtx *desc;
+ unsigned int i;
+ HOST_WIDE_INT offset;
+ tree decl;
+ rtx symbol;
+
+ if (block->objects == NULL)
+ return;
+
+ /* Switch to the section and make sure that the first byte is
+ suitably aligned. */
+ switch_to_section (block->sect);
+ assemble_align (block->alignment);
+
+ /* Define the values of all anchors relative to the current section
+ position. */
+ for (i = 0; VEC_iterate (rtx, block->anchors, i, symbol); i++)
+ targetm.asm_out.output_anchor (symbol);
+
+ /* Output the objects themselves. */
+ offset = 0;
+ for (i = 0; VEC_iterate (rtx, block->objects, i, symbol); i++)
+ {
+ /* Move to the object's offset, padding with zeros if necessary. */
+ assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
+ offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
+ if (CONSTANT_POOL_ADDRESS_P (symbol))
+ {
+ desc = SYMBOL_REF_CONSTANT (symbol);
+ output_constant_pool_1 (desc, 1);
+ offset += GET_MODE_SIZE (desc->mode);
+ }
+ else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
+ {
+ decl = SYMBOL_REF_DECL (symbol);
+ assemble_constant_contents (decl, XSTR (symbol, 0),
+ get_constant_alignment (decl));
+ offset += get_constant_size (decl);
+ }
+ else
+ {
+ decl = SYMBOL_REF_DECL (symbol);
+ assemble_variable_contents (decl, XSTR (symbol, 0), false);
+ offset += tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ }
+ }
+}
+
+/* A htab_traverse callback used to call output_object_block for
+ each member of object_block_htab. */
+
+static int
+output_object_block_htab (void **slot, void *data ATTRIBUTE_UNUSED)
+{
+ output_object_block ((struct object_block *) (*slot));
+ return 1;
+}
+
+/* Output the definitions of all object_blocks. */
+
+void
+output_object_blocks (void)
+{
+ htab_traverse (object_block_htab, output_object_block_htab, NULL);
+}
+
#include "gt-varasm.h"