int rs6000_debug_stack; /* debug stack applications */
int rs6000_debug_arg; /* debug argument handling */
+const char *rs6000_traceback_name;
+static enum {
+ traceback_default = 0,
+ traceback_none,
+ traceback_part,
+ traceback_full
+} rs6000_traceback;
+
/* Flag to say the TOC is initialized */
int toc_initialized;
char toc_label_name[10];
error ("unknown -mdebug-%s switch", rs6000_debug_name);
}
+ if (rs6000_traceback_name)
+ {
+ if (! strncmp (rs6000_traceback_name, "full", 4))
+ rs6000_traceback = traceback_full;
+ else if (! strncmp (rs6000_traceback_name, "part", 4))
+ rs6000_traceback = traceback_part;
+ else if (! strncmp (rs6000_traceback_name, "no", 2))
+ rs6000_traceback = traceback_none;
+ else
+ error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
+ rs6000_traceback_name);
+ }
+
/* Set size of long double */
rs6000_long_double_type_size = 64;
if (rs6000_long_double_size_string)
else
cum->words += RS6000_ARG_SIZE (mode, type);
}
- else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
- {
- cum->words += RS6000_ARG_SIZE (mode, type);
- cum->sysv_gregno++;
- }
+ else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
+ && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
+ cum->sysv_gregno++;
else if (DEFAULT_ABI == ABI_V4)
{
if (TARGET_HARD_FLOAT && TARGET_FPRS
else
n_words = RS6000_ARG_SIZE (mode, type);
- /* Long long is put in odd registers. */
+ /* Long long and SPE vectors are put in odd registers. */
if (n_words == 2 && (gregno & 1) == 0)
gregno += 1;
- /* Long long is not split between registers and stack. */
+ /* Long long and SPE vectors are not split between registers
+ and stack. */
if (gregno + n_words - 1 > GP_ARG_MAX_REG)
{
/* Long long is aligned on the stack. */
else
return NULL;
}
- else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
+ else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
{
- if (cum->sysv_gregno - 1 <= GP_ARG_MAX_REG)
+ if (cum->sysv_gregno <= GP_ARG_MAX_REG)
return gen_rtx_REG (mode, cum->sysv_gregno);
else
return NULL;
else
n_words = RS6000_ARG_SIZE (mode, type);
- /* Long long is put in odd registers. */
+ /* Long long and SPE vectors are put in odd registers. */
if (n_words == 2 && (gregno & 1) == 0)
gregno += 1;
- /* Long long is not split between registers and stack. */
+ /* Long long and SPE vectors are not split between registers
+ and stack. */
if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
- return gen_rtx_REG (mode, gregno);
+ {
+ /* SPE vectors in ... get split into 2 registers. */
+ if (TARGET_SPE && TARGET_SPE_ABI
+ && SPE_VECTOR_MODE (mode) && !named)
+ {
+ rtx r1, r2;
+ enum machine_mode m = GET_MODE_INNER (mode);
+
+ r1 = gen_rtx_REG (m, gregno);
+ r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
+ r2 = gen_rtx_REG (m, gregno + 1);
+ r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
+ return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
+ }
+ return gen_rtx_REG (mode, gregno);
+ }
else
return NULL;
}
enum machine_mode tmode = insn_data[icode].operand[0].mode;
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
+ if (icode == CODE_FOR_nothing)
+ /* Builtin not supported on this processor. */
+ return 0;
+
/* If we got invalid arguments bail out before generating bad rtl. */
if (arg0 == error_mark_node)
return const0_rtx;
- switch (icode)
+ if (icode == CODE_FOR_altivec_vspltisb
+ || icode == CODE_FOR_altivec_vspltish
+ || icode == CODE_FOR_altivec_vspltisw
+ || icode == CODE_FOR_spe_evsplatfi
+ || icode == CODE_FOR_spe_evsplati)
{
/* Only allow 5-bit *signed* literals. */
- case CODE_FOR_altivec_vspltisb:
- case CODE_FOR_altivec_vspltish:
- case CODE_FOR_altivec_vspltisw:
- case CODE_FOR_spe_evsplatfi:
- case CODE_FOR_spe_evsplati:
if (GET_CODE (op0) != CONST_INT
|| INTVAL (op0) > 0x1f
|| INTVAL (op0) < -0x1f)
error ("argument 1 must be a 5-bit signed literal");
return const0_rtx;
}
- break;
- default:
- break;
}
if (target == 0
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
enum machine_mode mode1 = insn_data[icode].operand[2].mode;
+ if (icode == CODE_FOR_nothing)
+ /* Builtin not supported on this processor. */
+ return 0;
+
/* If we got invalid arguments bail out before generating bad rtl. */
if (arg0 == error_mark_node || arg1 == error_mark_node)
return const0_rtx;
- switch (icode)
+ if (icode == CODE_FOR_altivec_vcfux
+ || icode == CODE_FOR_altivec_vcfsx
+ || icode == CODE_FOR_altivec_vctsxs
+ || icode == CODE_FOR_altivec_vctuxs
+ || icode == CODE_FOR_altivec_vspltb
+ || icode == CODE_FOR_altivec_vsplth
+ || icode == CODE_FOR_altivec_vspltw
+ || icode == CODE_FOR_spe_evaddiw
+ || icode == CODE_FOR_spe_evldd
+ || icode == CODE_FOR_spe_evldh
+ || icode == CODE_FOR_spe_evldw
+ || icode == CODE_FOR_spe_evlhhesplat
+ || icode == CODE_FOR_spe_evlhhossplat
+ || icode == CODE_FOR_spe_evlhhousplat
+ || icode == CODE_FOR_spe_evlwhe
+ || icode == CODE_FOR_spe_evlwhos
+ || icode == CODE_FOR_spe_evlwhou
+ || icode == CODE_FOR_spe_evlwhsplat
+ || icode == CODE_FOR_spe_evlwwsplat
+ || icode == CODE_FOR_spe_evrlwi
+ || icode == CODE_FOR_spe_evslwi
+ || icode == CODE_FOR_spe_evsrwis
+ || icode == CODE_FOR_spe_evsrwiu)
{
/* Only allow 5-bit unsigned literals. */
- case CODE_FOR_altivec_vcfux:
- case CODE_FOR_altivec_vcfsx:
- case CODE_FOR_altivec_vctsxs:
- case CODE_FOR_altivec_vctuxs:
- case CODE_FOR_altivec_vspltb:
- case CODE_FOR_altivec_vsplth:
- case CODE_FOR_altivec_vspltw:
- case CODE_FOR_spe_evaddiw:
- case CODE_FOR_spe_evldd:
- case CODE_FOR_spe_evldh:
- case CODE_FOR_spe_evldw:
- case CODE_FOR_spe_evlhhesplat:
- case CODE_FOR_spe_evlhhossplat:
- case CODE_FOR_spe_evlhhousplat:
- case CODE_FOR_spe_evlwhe:
- case CODE_FOR_spe_evlwhos:
- case CODE_FOR_spe_evlwhou:
- case CODE_FOR_spe_evlwhsplat:
- case CODE_FOR_spe_evlwwsplat:
- case CODE_FOR_spe_evrlwi:
- case CODE_FOR_spe_evslwi:
- case CODE_FOR_spe_evsrwis:
- case CODE_FOR_spe_evsrwiu:
if (TREE_CODE (arg1) != INTEGER_CST
|| TREE_INT_CST_LOW (arg1) & ~0x1f)
{
error ("argument 2 must be a 5-bit unsigned literal");
return const0_rtx;
}
- break;
- default:
- break;
}
if (target == 0
enum machine_mode mode1 = insn_data[icode].operand[2].mode;
enum machine_mode mode2 = insn_data[icode].operand[3].mode;
+ if (icode == CODE_FOR_nothing)
+ /* Builtin not supported on this processor. */
+ return 0;
+
/* If we got invalid arguments bail out before generating bad rtl. */
if (arg0 == error_mark_node
|| arg1 == error_mark_node
|| arg2 == error_mark_node)
return const0_rtx;
- switch (icode)
+ if (icode == CODE_FOR_altivec_vsldoi_4sf
+ || icode == CODE_FOR_altivec_vsldoi_4si
+ || icode == CODE_FOR_altivec_vsldoi_8hi
+ || icode == CODE_FOR_altivec_vsldoi_16qi)
{
/* Only allow 4-bit unsigned literals. */
- case CODE_FOR_altivec_vsldoi_4sf:
- case CODE_FOR_altivec_vsldoi_4si:
- case CODE_FOR_altivec_vsldoi_8hi:
- case CODE_FOR_altivec_vsldoi_16qi:
if (TREE_CODE (arg2) != INTEGER_CST
|| TREE_INT_CST_LOW (arg2) & ~0xf)
{
error ("argument 3 must be a 4-bit unsigned literal");
return const0_rtx;
}
- break;
- default:
- break;
}
if (target == 0
return ret;
}
- /* Handle simple unary operations. */
- d = (struct builtin_description *) bdesc_1arg;
- for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
- if (d->code == fcode)
- return rs6000_expand_unop_builtin (d->icode, arglist, target);
+ if (TARGET_ALTIVEC || TARGET_SPE)
+ {
+ /* Handle simple unary operations. */
+ d = (struct builtin_description *) bdesc_1arg;
+ for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
+ if (d->code == fcode)
+ return rs6000_expand_unop_builtin (d->icode, arglist, target);
- /* Handle simple binary operations. */
- d = (struct builtin_description *) bdesc_2arg;
- for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
- if (d->code == fcode)
- return rs6000_expand_binop_builtin (d->icode, arglist, target);
+ /* Handle simple binary operations. */
+ d = (struct builtin_description *) bdesc_2arg;
+ for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
+ if (d->code == fcode)
+ return rs6000_expand_binop_builtin (d->icode, arglist, target);
- /* Handle simple ternary operations. */
- d = (struct builtin_description *) bdesc_3arg;
- for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
- if (d->code == fcode)
- return rs6000_expand_ternop_builtin (d->icode, arglist, target);
+ /* Handle simple ternary operations. */
+ d = (struct builtin_description *) bdesc_3arg;
+ for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
+ if (d->code == fcode)
+ return rs6000_expand_ternop_builtin (d->icode, arglist, target);
+ }
abort ();
return NULL_RTX;
spe_init_builtins ();
if (TARGET_ALTIVEC)
altivec_init_builtins ();
- rs6000_common_init_builtins ();
+ if (TARGET_ALTIVEC || TARGET_SPE)
+ rs6000_common_init_builtins ();
}
/* Search through a set of builtins and enable the mask bits.
}
static void
-spe_init_builtins (void)
+spe_init_builtins ()
{
tree endlink = void_list_node;
tree puint_type_node = build_pointer_type (unsigned_type_node);
}
static void
-altivec_init_builtins (void)
+altivec_init_builtins ()
{
struct builtin_description *d;
struct builtin_description_predicates *dp;
}
static void
-rs6000_common_init_builtins (void)
+rs6000_common_init_builtins ()
{
struct builtin_description *d;
size_t i;
enum machine_mode mode0, mode1, mode2, mode3;
tree type;
- if (d->name == 0)
+ if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue;
mode0 = insn_data[d->icode].operand[0].mode;
enum machine_mode mode0, mode1, mode2;
tree type;
- if (d->name == 0)
+ if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue;
mode0 = insn_data[d->icode].operand[0].mode;
enum machine_mode mode0, mode1;
tree type;
- if (d->name == 0)
+ if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue;
mode0 = insn_data[d->icode].operand[0].mode;
abort ();
/* These should never be generated except for
- flag_unsafe_math_optimizations. */
+ flag_unsafe_math_optimizations and flag_finite_math_only. */
if (mode == CCFPmode
&& ! flag_unsafe_math_optimizations
+ && ! flag_finite_math_only
&& (code == LE || code == GE
|| code == UNEQ || code == LTGT
|| code == UNGT || code == UNLT))
/* Maybe we have a guess as to how likely the branch is.
The old mnemonics don't have a way to specify this information. */
+ pred = "";
note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
if (note != NULL_RTX)
{
/* PROB is the difference from 50%. */
int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
-
- /* For branches that are very close to 50%, assume not-taken. */
- if (abs (prob) > REG_BR_PROB_BASE / 20
- && ((prob > 0) ^ need_longbranch))
- pred = "+";
- else
- pred = "-";
+ bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
+
+ /* Only hint for highly probable/improbable branches on newer
+ cpus as static prediction overrides processor dynamic
+ prediction. For older cpus we may as well always hint, but
+ assume not taken for branches that are very close to 50% as a
+ mispredicted taken branch is more expensive than a
+ mispredicted not-taken branch. */
+ if (always_hint
+ || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
+ {
+ if (abs (prob) > REG_BR_PROB_BASE / 20
+ && ((prob > 0) ^ need_longbranch))
+ pred = "+";
+ else
+ pred = "-";
+ }
}
- else
- pred = "";
if (label == NULL)
s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
the store-multiple instructions. */
if (using_store_multiple)
{
- rtvec p, dwarfp;
+ rtvec p;
int i;
p = rtvec_alloc (32 - info->first_gp_reg_save);
- dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
for (i = 0; i < 32 - info->first_gp_reg_save; i++)
{
rtx addr, reg, mem;
HOST_WIDE_INT size ATTRIBUTE_UNUSED;
{
rs6000_stack_t *info = rs6000_stack_info ();
- int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
if (! HAVE_epilogue)
{
System V.4 Powerpc's (and the embedded ABI derived from it) use a
different traceback table. */
- if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
+ if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
+ && rs6000_traceback != traceback_none)
{
const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
const char *language_string = lang_hooks.name;
int fixed_parms = 0, float_parms = 0, parm_info = 0;
int i;
+ int optional_tbtab;
+
+ if (rs6000_traceback == traceback_full)
+ optional_tbtab = 1;
+ else if (rs6000_traceback == traceback_part)
+ optional_tbtab = 0;
+ else
+ optional_tbtab = !optimize_size && !TARGET_ELF;
while (*fname == '.') /* V.4 encodes . in the name */
fname++;
fputs ("\t.align 2\n", file);
}
- return;
}
\f
/* A C compound statement that outputs the assembler code for a thunk
int fidx;
if (GET_CODE (k) == LABEL_REF)
- return result * 1231 + X0INT (XEXP (k, 0), 3);
+ return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
if (GET_CODE (k) == CODE_LABEL)
fidx = 3;
abort ();/* It would be easy to make this work, but it doesn't now. */
if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
- lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
- POINTER_SIZE, &low, &high, 0);
+ {
+#if HOST_BITS_PER_WIDE_INT == 32
+ lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
+ POINTER_SIZE, &low, &high, 0);
+#else
+ low |= high << 32;
+ low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
+ high = (HOST_WIDE_INT) low >> 32;
+ low &= 0xffffffff;
+#endif
+ }
if (TARGET_64BIT)
{
}
len = strlen (after_last_slash) + strlen (section_desc) + 2;
- *buf = (char *) permalloc (len);
+ *buf = (char *) xmalloc (len);
p = *buf;
*p++ = '_';
void
output_profile_hook (labelno)
- int labelno;
+ int labelno ATTRIBUTE_UNUSED;
{
if (DEFAULT_ABI == ABI_AIX)
{
+#ifdef NO_PROFILE_COUNTERS
+ emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
+#else
char buf[30];
const char *label_name;
rtx fun;
emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
fun, Pmode);
+#endif
}
else if (DEFAULT_ABI == ABI_DARWIN)
{
{
char tmp_buf[256];
char label_buf[256];
- char *label;
- tree tmp_stub, stub;
+ tree stub;
if (!flag_pic)
for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
if (no_previous_def (funname))
{
- int line_number;
+ int line_number = 0;
rtx label_rtx = gen_label_rtx ();
char *label_buf, temp_buf[256];
ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",