}
else
{
- rtx r3, got, tga, tmp1, tmp2, eqv;
+ rtx r3, got, tga, tmp1, tmp2, call_insn;
/* We currently use relocations like @got@tlsgd for tls, which
means the linker will handle allocation of tls entries, placing
{
r3 = gen_rtx_REG (Pmode, 3);
tga = rs6000_tls_get_addr ();
+ emit_library_call_value (tga, dest, LCT_CONST, Pmode, 1, r3, Pmode);
if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
else
gcc_unreachable ();
-
- start_sequence ();
- insn = emit_call_insn (insn);
- RTL_CONST_CALL_P (insn) = 1;
- use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
+ call_insn = last_call_insn ();
+ PATTERN (call_insn) = insn;
if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
- use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
- insn = get_insns ();
- end_sequence ();
- emit_libcall_block (insn, dest, r3, addr);
+ use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn),
+ pic_offset_table_rtx);
}
else if (model == TLS_MODEL_LOCAL_DYNAMIC)
{
r3 = gen_rtx_REG (Pmode, 3);
tga = rs6000_tls_get_addr ();
+ tmp1 = gen_reg_rtx (Pmode);
+ emit_library_call_value (tga, tmp1, LCT_CONST, Pmode, 1, r3, Pmode);
if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
else
gcc_unreachable ();
-
- start_sequence ();
- insn = emit_call_insn (insn);
- RTL_CONST_CALL_P (insn) = 1;
- use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
+ call_insn = last_call_insn ();
+ PATTERN (call_insn) = insn;
if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
- use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
- insn = get_insns ();
- end_sequence ();
- tmp1 = gen_reg_rtx (Pmode);
- eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
- UNSPEC_TLSLD);
- emit_libcall_block (insn, tmp1, r3, eqv);
+ use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn),
+ pic_offset_table_rtx);
+
if (rs6000_tls_size == 16)
{
if (TARGET_64BIT)