+\f
+/* Length in units of the trampoline instruction code. */
+
+#define TRAMPOLINE_CODE_SIZE (TARGET_64BIT ? 24 : (TARGET_PA_20 ? 32 : 40))
+
+
+/* Output assembler code for a block containing the constant parts
+ of a trampoline, leaving space for the variable parts.\
+
+ The trampoline sets the static chain pointer to STATIC_CHAIN_REGNUM
+ and then branches to the specified routine.
+
+ This code template is copied from text segment to stack location
+ and then patched with pa_trampoline_init to contain valid values,
+ and then entered as a subroutine.
+
+ It is best to keep this as small as possible to avoid having to
+ flush multiple lines in the cache. */
+
+static void
+pa_asm_trampoline_template (FILE *f)
+{
+ if (!TARGET_64BIT)
+ {
+ fputs ("\tldw 36(%r22),%r21\n", f);
+ fputs ("\tbb,>=,n %r21,30,.+16\n", f);
+ if (ASSEMBLER_DIALECT == 0)
+ fputs ("\tdepi 0,31,2,%r21\n", f);
+ else
+ fputs ("\tdepwi 0,31,2,%r21\n", f);
+ fputs ("\tldw 4(%r21),%r19\n", f);
+ fputs ("\tldw 0(%r21),%r21\n", f);
+ if (TARGET_PA_20)
+ {
+ fputs ("\tbve (%r21)\n", f);
+ fputs ("\tldw 40(%r22),%r29\n", f);
+ fputs ("\t.word 0\n", f);
+ fputs ("\t.word 0\n", f);
+ }
+ else
+ {
+ fputs ("\tldsid (%r21),%r1\n", f);
+ fputs ("\tmtsp %r1,%sr0\n", f);
+ fputs ("\tbe 0(%sr0,%r21)\n", f);
+ fputs ("\tldw 40(%r22),%r29\n", f);
+ }
+ fputs ("\t.word 0\n", f);
+ fputs ("\t.word 0\n", f);
+ fputs ("\t.word 0\n", f);
+ fputs ("\t.word 0\n", f);
+ }
+ else
+ {
+ fputs ("\t.dword 0\n", f);
+ fputs ("\t.dword 0\n", f);
+ fputs ("\t.dword 0\n", f);
+ fputs ("\t.dword 0\n", f);
+ fputs ("\tmfia %r31\n", f);
+ fputs ("\tldd 24(%r31),%r1\n", f);
+ fputs ("\tldd 24(%r1),%r27\n", f);
+ fputs ("\tldd 16(%r1),%r1\n", f);
+ fputs ("\tbve (%r1)\n", f);
+ fputs ("\tldd 32(%r31),%r31\n", f);
+ fputs ("\t.dword 0 ; fptr\n", f);
+ fputs ("\t.dword 0 ; static link\n", f);
+ }
+}
+
+/* Emit RTL insns to initialize the variable parts of a trampoline.
+ FNADDR is an RTX for the address of the function's pure code.
+ CXT is an RTX for the static chain value for the function.
+
+ Move the function address to the trampoline template at offset 36.
+ Move the static chain value to trampoline template at offset 40.
+ Move the trampoline address to trampoline template at offset 44.
+ Move r19 to trampoline template at offset 48. The latter two
+ words create a plabel for the indirect call to the trampoline.
+
+ A similar sequence is used for the 64-bit port but the plabel is
+ at the beginning of the trampoline.
+
+ Finally, the cache entries for the trampoline code are flushed.
+ This is necessary to ensure that the trampoline instruction sequence
+ is written to memory prior to any attempts at prefetching the code
+ sequence. */
+
+static void
+pa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
+{
+ rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
+ rtx start_addr = gen_reg_rtx (Pmode);
+ rtx end_addr = gen_reg_rtx (Pmode);
+ rtx line_length = gen_reg_rtx (Pmode);
+ rtx r_tramp, tmp;
+
+ emit_block_move (m_tramp, assemble_trampoline_template (),
+ GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
+ r_tramp = force_reg (Pmode, XEXP (m_tramp, 0));
+
+ if (!TARGET_64BIT)
+ {
+ tmp = adjust_address (m_tramp, Pmode, 36);
+ emit_move_insn (tmp, fnaddr);
+ tmp = adjust_address (m_tramp, Pmode, 40);
+ emit_move_insn (tmp, chain_value);
+
+ /* Create a fat pointer for the trampoline. */
+ tmp = adjust_address (m_tramp, Pmode, 44);
+ emit_move_insn (tmp, r_tramp);
+ tmp = adjust_address (m_tramp, Pmode, 48);
+ emit_move_insn (tmp, gen_rtx_REG (Pmode, 19));
+
+ /* fdc and fic only use registers for the address to flush,
+ they do not accept integer displacements. We align the
+ start and end addresses to the beginning of their respective
+ cache lines to minimize the number of lines flushed. */
+ emit_insn (gen_andsi3 (start_addr, r_tramp,
+ GEN_INT (-MIN_CACHELINE_SIZE)));
+ tmp = force_reg (Pmode, plus_constant (r_tramp, TRAMPOLINE_CODE_SIZE-1));
+ emit_insn (gen_andsi3 (end_addr, tmp,
+ GEN_INT (-MIN_CACHELINE_SIZE)));
+ emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
+ emit_insn (gen_dcacheflushsi (start_addr, end_addr, line_length));
+ emit_insn (gen_icacheflushsi (start_addr, end_addr, line_length,
+ gen_reg_rtx (Pmode),
+ gen_reg_rtx (Pmode)));
+ }
+ else
+ {
+ tmp = adjust_address (m_tramp, Pmode, 56);
+ emit_move_insn (tmp, fnaddr);
+ tmp = adjust_address (m_tramp, Pmode, 64);
+ emit_move_insn (tmp, chain_value);
+
+ /* Create a fat pointer for the trampoline. */
+ tmp = adjust_address (m_tramp, Pmode, 16);
+ emit_move_insn (tmp, force_reg (Pmode, plus_constant (r_tramp, 32)));
+ tmp = adjust_address (m_tramp, Pmode, 24);
+ emit_move_insn (tmp, gen_rtx_REG (Pmode, 27));
+
+ /* fdc and fic only use registers for the address to flush,
+ they do not accept integer displacements. We align the
+ start and end addresses to the beginning of their respective
+ cache lines to minimize the number of lines flushed. */
+ tmp = force_reg (Pmode, plus_constant (r_tramp, 32));
+ emit_insn (gen_anddi3 (start_addr, tmp,
+ GEN_INT (-MIN_CACHELINE_SIZE)));
+ tmp = force_reg (Pmode, plus_constant (tmp, TRAMPOLINE_CODE_SIZE - 1));
+ emit_insn (gen_anddi3 (end_addr, tmp,
+ GEN_INT (-MIN_CACHELINE_SIZE)));
+ emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
+ emit_insn (gen_dcacheflushdi (start_addr, end_addr, line_length));
+ emit_insn (gen_icacheflushdi (start_addr, end_addr, line_length,
+ gen_reg_rtx (Pmode),
+ gen_reg_rtx (Pmode)));
+ }
+}
+
+/* Perform any machine-specific adjustment in the address of the trampoline.
+ ADDR contains the address that was passed to pa_trampoline_init.
+ Adjust the trampoline address to point to the plabel at offset 44. */
+
+static rtx
+pa_trampoline_adjust_address (rtx addr)
+{
+ if (!TARGET_64BIT)
+ addr = memory_address (Pmode, plus_constant (addr, 46));
+ return addr;
+}
+
+static rtx
+pa_delegitimize_address (rtx orig_x)
+{
+ rtx x = delegitimize_mem_from_attrs (orig_x);
+
+ if (GET_CODE (x) == LO_SUM
+ && GET_CODE (XEXP (x, 1)) == UNSPEC
+ && XINT (XEXP (x, 1), 1) == UNSPEC_DLTIND14R)
+ return gen_const_mem (Pmode, XVECEXP (XEXP (x, 1), 0, 0));
+ return x;
+}
+\f
+static rtx
+pa_internal_arg_pointer (void)
+{
+ /* The argument pointer and the hard frame pointer are the same in
+ the 32-bit runtime, so we don't need a copy. */
+ if (TARGET_64BIT)
+ return copy_to_reg (virtual_incoming_args_rtx);
+ else
+ return virtual_incoming_args_rtx;
+}
+
+/* Given FROM and TO register numbers, say whether this elimination is allowed.
+ Frame pointer elimination is automatically handled. */
+
+static bool
+pa_can_eliminate (const int from, const int to)
+{
+ /* The argument cannot be eliminated in the 64-bit runtime. */
+ if (TARGET_64BIT && from == ARG_POINTER_REGNUM)
+ return false;
+
+ return (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM
+ ? ! frame_pointer_needed
+ : true);
+}
+
+/* Define the offset between two registers, FROM to be eliminated and its
+ replacement TO, at the start of a routine. */
+HOST_WIDE_INT
+pa_initial_elimination_offset (int from, int to)
+{
+ HOST_WIDE_INT offset;
+
+ if ((from == HARD_FRAME_POINTER_REGNUM || from == FRAME_POINTER_REGNUM)
+ && to == STACK_POINTER_REGNUM)
+ offset = -pa_compute_frame_size (get_frame_size (), 0);
+ else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
+ offset = 0;
+ else
+ gcc_unreachable ();
+
+ return offset;
+}
+
+static void
+pa_conditional_register_usage (void)
+{
+ int i;
+
+ if (!TARGET_64BIT && !TARGET_PA_11)
+ {
+ for (i = 56; i <= FP_REG_LAST; i++)
+ fixed_regs[i] = call_used_regs[i] = 1;
+ for (i = 33; i < 56; i += 2)
+ fixed_regs[i] = call_used_regs[i] = 1;
+ }
+ if (TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
+ {
+ for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
+ fixed_regs[i] = call_used_regs[i] = 1;
+ }
+ if (flag_pic)
+ fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
+}
+
+/* Target hook for c_mode_for_suffix. */
+
+static enum machine_mode
+pa_c_mode_for_suffix (char suffix)
+{
+ if (HPUX_LONG_DOUBLE_LIBRARY)
+ {
+ if (suffix == 'q')
+ return TFmode;
+ }
+
+ return VOIDmode;
+}
+
+/* Target hook for function_section. */
+
+static section *
+pa_function_section (tree decl, enum node_frequency freq,
+ bool startup, bool exit)
+{
+ /* Put functions in text section if target doesn't have named sections. */
+ if (!targetm_common.have_named_sections)
+ return text_section;
+
+ /* Force nested functions into the same section as the containing
+ function. */
+ if (decl
+ && DECL_SECTION_NAME (decl) == NULL_TREE
+ && DECL_CONTEXT (decl) != NULL_TREE
+ && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
+ && DECL_SECTION_NAME (DECL_CONTEXT (decl)) == NULL_TREE)
+ return function_section (DECL_CONTEXT (decl));
+
+ /* Otherwise, use the default function section. */
+ return default_function_section (decl, freq, startup, exit);
+}
+
+/* Implement TARGET_LEGITIMATE_CONSTANT_P.
+
+ In 64-bit mode, we reject CONST_DOUBLES. We also reject CONST_INTS
+ that need more than three instructions to load prior to reload. This
+ limit is somewhat arbitrary. It takes three instructions to load a
+ CONST_INT from memory but two are memory accesses. It may be better
+ to increase the allowed range for CONST_INTS. We may also be able
+ to handle CONST_DOUBLES. */
+
+static bool
+pa_legitimate_constant_p (enum machine_mode mode, rtx x)
+{
+ if (GET_MODE_CLASS (mode) == MODE_FLOAT && x != CONST0_RTX (mode))
+ return false;
+
+ if (!NEW_HP_ASSEMBLER && !TARGET_GAS && GET_CODE (x) == LABEL_REF)
+ return false;
+
+ /* TLS_MODEL_GLOBAL_DYNAMIC and TLS_MODEL_LOCAL_DYNAMIC are not
+ legitimate constants. */
+ if (PA_SYMBOL_REF_TLS_P (x))
+ {
+ enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
+
+ if (model == TLS_MODEL_GLOBAL_DYNAMIC || model == TLS_MODEL_LOCAL_DYNAMIC)
+ return false;
+ }
+
+ if (TARGET_64BIT && GET_CODE (x) == CONST_DOUBLE)
+ return false;
+
+ if (TARGET_64BIT
+ && HOST_BITS_PER_WIDE_INT > 32
+ && GET_CODE (x) == CONST_INT
+ && !reload_in_progress
+ && !reload_completed
+ && !LEGITIMATE_64BIT_CONST_INT_P (INTVAL (x))
+ && !pa_cint_ok_for_move (INTVAL (x)))
+ return false;
+
+ if (function_label_operand (x, mode))
+ return false;
+
+ return true;
+}
+
+/* Implement TARGET_SECTION_TYPE_FLAGS. */
+
+static unsigned int
+pa_section_type_flags (tree decl, const char *name, int reloc)
+{
+ unsigned int flags;
+
+ flags = default_section_type_flags (decl, name, reloc);
+
+ /* Function labels are placed in the constant pool. This can
+ cause a section conflict if decls are put in ".data.rel.ro"
+ or ".data.rel.ro.local" using the __attribute__ construct. */
+ if (strcmp (name, ".data.rel.ro") == 0
+ || strcmp (name, ".data.rel.ro.local") == 0)
+ flags |= SECTION_WRITE | SECTION_RELRO;
+
+ return flags;
+}
+