/* Subroutines for assembler code output on the TMS320C[34]x
- Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003
+ Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
Free Software Foundation, Inc.
Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
static bool c4x_rtx_costs (rtx, int, int, int *);
static int c4x_address_cost (rtx);
static void c4x_init_libfuncs (void);
+static void c4x_external_libcall (rtx);
+static rtx c4x_struct_value_rtx (tree, int);
+static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
\f
/* Initialize the GCC target structure. */
#undef TARGET_ASM_BYTE_OP
#undef TARGET_ASM_FILE_END
#define TARGET_ASM_FILE_END c4x_file_end
+#undef TARGET_ASM_EXTERNAL_LIBCALL
+#define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
+
#undef TARGET_ATTRIBUTE_TABLE
#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
#undef TARGET_SCHED_ADJUST_COST
#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
+#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
+#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
+
#undef TARGET_ASM_GLOBALIZE_LABEL
#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
#undef TARGET_INIT_LIBFUNCS
#define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
+#undef TARGET_STRUCT_VALUE_RTX
+#define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
+
+#undef TARGET_GIMPLIFY_VA_ARG_EXPR
+#define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
+
struct gcc_target targetm = TARGET_INITIALIZER;
\f
/* Override command line options.
if ((mode = TYPE_MODE (type)))
{
- if (! MUST_PASS_IN_STACK (mode, type))
+ if (! targetm.calls.must_pass_in_stack (mode, type))
{
/* Look for float, double, or long double argument. */
if (mode == QFmode || mode == HFmode)
if (! TARGET_MEMPARM
&& named
&& type
- && ! MUST_PASS_IN_STACK (mode, type))
+ && ! targetm.calls.must_pass_in_stack (mode, type))
{
/* Look for float, double, or long double argument. */
if (mode == QFmode || mode == HFmode)
if (! TARGET_MEMPARM
&& named
&& type
- && ! MUST_PASS_IN_STACK (mode, type))
+ && ! targetm.calls.must_pass_in_stack (mode, type))
{
/* Look for float, double, or long double argument. */
if (mode == QFmode || mode == HFmode)
/* C[34]x arguments grow in weird ways (downwards) that the standard
varargs stuff can't handle.. */
-rtx
-c4x_va_arg (tree valist, tree type)
+
+static tree
+c4x_gimplify_va_arg_expr (tree valist, tree type,
+ tree *pre_p ATTRIBUTE_UNUSED,
+ tree *post_p ATTRIBUTE_UNUSED)
{
tree t;
t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
build_int_2 (int_size_in_bytes (type), 0));
- TREE_SIDE_EFFECTS (t) = 1;
+ t = fold_convert (build_pointer_type (type), t);
+ t = build_fold_indirect_ref (t);
- return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
+ return t;
}
to save/restore the registers we actually use.
For the global variable leaf_function to be set, we need
to define LEAF_REGISTERS and all that it entails.
- Let's check ourselves... */
+ Let's check ourselves.... */
if (lookup_attribute ("leaf_pretend",
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
}
else
{
- /* If we use ar3, we need to push it. */
+ /* If we use ar3, we need to push it. */
dont_push_ar3 = 0;
if ((size != 0) || (current_function_args_size != 0))
{
gen_rtx_PLUS
(QImode, gen_rtx_REG (QImode,
AR3_REGNO),
- GEN_INT(-1)))));
+ constm1_rtx))));
RTX_FRAME_RELATED_P (insn) = 1;
/* We already have the return value and the fp,
&& dp_reg_operand (XEXP (op1, 0), mode))
{
/* expand_increment will sometimes create a LO_SUM immediate
- address. */
+ address. Undo this silliness. */
op1 = XEXP (op1, 1);
}
- else if (symbolic_address_operand (op1, mode))
+
+ if (symbolic_address_operand (op1, mode))
{
if (TARGET_LOAD_ADDRESS)
{
case 2:
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
operands[1], smode);
- equiv = gen_rtx (code, dmode, operands[1]);
+ equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
break;
case 3:
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
operands[1], smode, operands[2], smode);
- equiv = gen_rtx (code, dmode, operands[1], operands[2]);
+ equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
break;
default:
equiv = gen_rtx_TRUNCATE (mode,
gen_rtx_LSHIFTRT (HImode,
gen_rtx_MULT (HImode,
- gen_rtx (code, HImode, operands[1]),
- gen_rtx (code, HImode, operands[2])),
+ gen_rtx_fmt_e (code, HImode, operands[1]),
+ gen_rtx_fmt_e (code, HImode, operands[2])),
GEN_INT (32)));
insns = get_insns ();
end_sequence ();
int
-c4x_check_legit_addr (enum machine_mode mode, rtx addr, int strict)
+c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
{
rtx base = NULL_RTX; /* Base register (AR0-AR7). */
rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
|| (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
return 0;
base = XEXP (op1, 0);
- if (base != op0)
+ if (! REG_P (base))
+ return 0;
+ if (REGNO (base) != REGNO (op0))
return 0;
if (REG_P (XEXP (op1, 1)))
indx = XEXP (op1, 1);
rtx count_reg;
/* If the count register has not been allocated to RC, say if
- there is a movstr pattern in the loop, then do not insert a
+ there is a movmem pattern in the loop, then do not insert a
RPTB instruction. Instead we emit a decrement and branch
at the end of the loop. */
count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
{
/* We can not use the rptb insn. Replace it so reorg can use
the delay slots of the jump insn. */
- emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
- emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
+ emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
+ emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
emit_insn_before (gen_bge (start_label), insn);
LABEL_NUSES (start_label)++;
delete_insn (insn);
case Pmode:
#endif
case QImode:
- if (GET_CODE (op) == CONSTANT_P_RTX)
- return 1;
-
if (GET_CODE (op) != CONST_INT
|| (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
|| GET_MODE_CLASS (mode) != MODE_INT)
&& ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
|| GET_CODE (XEXP (op, 0)) == LABEL_REF
|| GET_CODE (XEXP (op, 0)) == CONST)))
- return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
+ return !TARGET_EXPOSE_LDP &&
+ ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
return general_operand (op, mode);
}
return;
}
}
- /* Fallthrough. */
+ /* Fall through. */
default:
fatal_insn ("invalid indirect (S) memory address", op);
enum machine_mode mode ATTRIBUTE_UNUSED,
int force)
{
+ rtx op0;
rtx op1;
rtx op2;
enum rtx_code code1;
enum rtx_code code2;
+
+ /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
+ if (code == IF_THEN_ELSE)
+ return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
+
if (code == COMPARE)
{
op1 = operands[0];
op2 = operands[2];
}
+ op0 = operands[0];
+
+ if (GET_CODE (op0) == SUBREG)
+ op0 = SUBREG_REG (op0);
if (GET_CODE (op1) == SUBREG)
op1 = SUBREG_REG (op1);
if (GET_CODE (op2) == SUBREG)
code1 = GET_CODE (op1);
code2 = GET_CODE (op2);
+
if (code1 == REG && code2 == REG)
return 1;
return c4x_R_indirect (op1) && c4x_R_indirect (op2);
}
+ /* We cannot handle two MEMs or two CONSTS, etc. */
if (code1 == code2)
return 0;
/* Any valid memory operand screened by src_operand is OK. */
case MEM:
-
- /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
- into a stack slot memory address comprising a PLUS and a
- constant. */
- case ADDRESSOF:
break;
default:
break;
}
+ if (GET_CODE (op0) == SCRATCH)
+ return 1;
+
+ if (!REG_P (op0))
+ return 0;
+
/* Check that we have a valid destination register for a two operand
instruction. */
- return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
+ return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
}
- /* We assume MINUS is commutative since the subtract patterns
- also support the reverse subtract instructions. Since op1
- is not a register, and op2 is a register, op1 can only
- be a restricted memory operand for a shift instruction. */
+
+ /* Check non-commutative operators. */
if (code == ASHIFTRT || code == LSHIFTRT
|| code == ASHIFT || code == COMPARE)
return code2 == REG
&& (c4x_S_indirect (op1) || c4x_R_indirect (op1));
-
- switch (code1)
+
+
+ /* Assume MINUS is commutative since the subtract patterns
+ also support the reverse subtract instructions. Since op1
+ is not a register, and op2 is a register, op1 can only
+ be a restricted memory operand for a shift instruction. */
+ if (code2 == REG)
{
- case CONST_INT:
- if (c4x_J_constant (op1) && c4x_R_indirect (op2))
- return 1;
- break;
+ switch (code1)
+ {
+ case CONST_INT:
+ break;
- case CONST_DOUBLE:
- if (! c4x_H_constant (op1))
- return 0;
- break;
+ case CONST_DOUBLE:
+ if (! c4x_H_constant (op1))
+ return 0;
+ break;
- /* Any valid memory operand screened by src_operand is OK. */
- case MEM:
-#if 0
- if (code2 != REG)
- return 0;
-#endif
- break;
+ /* Any valid memory operand screened by src_operand is OK. */
+ case MEM:
+ break;
+
+ default:
+ abort ();
+ break;
+ }
- /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
- into a stack slot memory address comprising a PLUS and a
- constant. */
- case ADDRESSOF:
- break;
-
- default:
- abort ();
- break;
+ if (GET_CODE (op0) == SCRATCH)
+ return 1;
+
+ if (!REG_P (op0))
+ return 0;
+
+ /* Check that we have a valid destination register for a two operand
+ instruction. */
+ return ! force || REGNO (op1) == REGNO (op0);
}
- /* Check that we have a valid destination register for a two operand
- instruction. */
- return ! force || REGNO (op1) == REGNO (operands[0]);
+ if (c4x_J_constant (op1) && c4x_R_indirect (op2))
+ return 1;
+
+ return 0;
}
/* We can get better code on a C30 if we force constant shift counts
into a register. This way they can get hoisted out of loops,
- tying up a register, but saving an instruction. The downside is
+ tying up a register but saving an instruction. The downside is
that they may get allocated to an address or index register, and
thus we will get a pipeline conflict if there is a nearby
indirect address using an address register.
&& (GET_CODE (operands[2]) != CONST_INT))
operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
+
+ /* When the shift count is greater than 32 then the result
+ can be implementation dependent. We truncate the result to
+ fit in 5 bits so that we do not emit invalid code when
+ optimizing---such as trying to generate lhu2 with 20021124-1.c. */
+ if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
+ && (GET_CODE (operands[2]) == CONST_INT))
+ && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
+ operands[2]
+ = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
+
return 1;
}
case C4X_BUILTIN_FIX:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fixqfqi_clobber (target, r0));
case C4X_BUILTIN_FIX_ANSI:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fix_truncqfqi2 (target, r0));
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
- r0 = protect_from_queue (r0, 0);
- r1 = protect_from_queue (r1, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_toieee (target, r0));
if (TARGET_C3X)
break;
arg0 = TREE_VALUE (arglist);
- if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
- put_var_into_stack (arg0, /*rescan=*/true);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (register_operand (r0, QFmode))
{
r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_rcpfqf_clobber (target, r0));
return false;
}
}
+\f
+/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
+
+static void
+c4x_external_libcall (rtx fun)
+{
+ /* This is only needed to keep asm30 happy for ___divqf3 etc. */
+ c4x_external_ref (XSTR (fun, 0));
+}
+
+/* Worker function for TARGET_STRUCT_VALUE_RTX. */
+
+static rtx
+c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
+ int incoming ATTRIBUTE_UNUSED)
+{
+ return gen_rtx_REG (Pmode, AR0_REGNO);
+}