; a given symbolic address.
(UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
(UNSPEC_RBIT 26) ; rbit operation.
+ (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
+ ; another symbolic address.
]
)
; patterns that share the same RTL in both ARM and Thumb code.
(define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
-; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
-; scheduling decisions for the load unit and the multiplier.
-(define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
-
-; IS_XSCALE is set to 'yes' when compiling for XScale.
-(define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
-
;; Operand number of an input operand that is shifted. Zero if the
;; given instruction does not shift one of its input operands.
(define_attr "shift" "" (const_int 0))
(define_expand "negdi2"
[(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (neg:DI (match_operand:DI 1 "s_register_operand" "")))
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (neg:DI (match_operand:DI 1 "s_register_operand" "")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
- "
- if (TARGET_THUMB1)
- {
- if (GET_CODE (operands[1]) != REG)
- operands[1] = force_reg (DImode, operands[1]);
- }
- "
+ ""
)
;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
)
(define_insn "*thumb1_negdi2"
- [(set (match_operand:DI 0 "register_operand" "=&l")
- (neg:DI (match_operand:DI 1 "register_operand" "l")))
+ [(set (match_operand:DI 0 "register_operand" "=&l")
+ (neg:DI (match_operand:DI 1 "register_operand" "l")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_THUMB1"
"mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
operands[2] = force_reg (SImode, operands[2]);
")
+;; A pattern to recognize a special situation and optimize for it.
+;; On the thumb, zero-extension from memory is preferrable to sign-extension
+;; due to the available addressing modes. Hence, convert a signed comparison
+;; with zero into an unsigned comparison with 127 if possible.
+(define_expand "cbranchqi4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "lt_ge_comparison_operator"
+ [(match_operand:QI 1 "memory_operand" "")
+ (match_operand:QI 2 "const0_operand" "")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_THUMB1"
+{
+ rtx xops[3];
+ xops[1] = gen_reg_rtx (SImode);
+ emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
+ xops[2] = GEN_INT (127);
+ xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
+ VOIDmode, xops[1], xops[2]);
+ xops[3] = operands[3];
+ emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
+ DONE;
+})
+
(define_expand "cbranchsf4"
[(set (pc) (if_then_else
(match_operator 0 "arm_comparison_operator"
operands[3])); DONE;"
)
-(define_insn "*cbranchsi4_insn"
+(define_insn "cbranchsi4_insn"
[(set (pc) (if_then_else
(match_operator 0 "arm_comparison_operator"
[(match_operand:SI 1 "s_register_operand" "l,*h")
(pc)))]
"TARGET_THUMB1"
"*
- output_asm_insn (\"cmp\\t%1, %2\", operands);
+ rtx t = prev_nonnote_insn (insn);
+ if (t != NULL_RTX
+ && INSN_P (t)
+ && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
+ {
+ t = XEXP (SET_SRC (PATTERN (t)), 0);
+ if (!rtx_equal_p (XEXP (t, 0), operands[1])
+ || !rtx_equal_p (XEXP (t, 1), operands[2]))
+ t = NULL_RTX;
+ }
+ else
+ t = NULL_RTX;
+ if (t == NULL_RTX)
+ output_asm_insn (\"cmp\\t%1, %2\", operands);
switch (get_attr_length (insn))
{
(if_then_else
(match_operator 4 "arm_comparison_operator"
[(plus:SI
- (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
- (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
+ (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
+ (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
(const_int 0)])
(label_ref (match_operand 5 "" ""))
(pc)))
(set
(match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
(plus:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
+ (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
"TARGET_THUMB1
&& (GET_CODE (operands[4]) == EQ
|| GET_CODE (operands[4]) == NE
{
rtx cond[3];
-
- cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
+ cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
cond[1] = operands[2];
cond[2] = operands[3];
else
output_asm_insn (\"add\\t%0, %1, %2\", cond);
- if (which_alternative >= 3
+ if (which_alternative >= 2
&& which_alternative < 4)
output_asm_insn (\"mov\\t%0, %1\", operands);
else if (which_alternative >= 4)