;;- Machine description for ARM for GNU compiler
;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
-;; 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
;; and Martin Simmons (@harleqn.co.uk).
;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
;; UNSPEC Usage:
;; Note: sin and cos are no-longer used.
+;; Unspec constants for Neon are defined in neon.md.
(define_constants
[(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
(UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
(UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
(UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
+ (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
+ (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
+ ; instruction stream.
+ (UNSPEC_STACK_ALIGN 20) ; Doubleword aligned stack pointer. Used to
+ ; generate correct unwind information.
+ (UNSPEC_PIC_OFFSET 22) ; A symbolic 12-bit OFFSET that has been treated
+ ; correctly for PIC usage.
]
)
; a 32-bit object.
(VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
; a 64-bit object.
- (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction.
- (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction.
- (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN
- (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions
- (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions
- (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions
+ (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
+ ; a 128-bit object.
+ (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
+ (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
+ (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
+ (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
+ (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
+ (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
(VUNSPEC_EH_RETURN 20); Use to override the return address for exception
; handling.
]
;; scheduling information.
(define_attr "insn"
- "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other"
+ "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,other"
(const_string "other"))
; TYPE attribute is used to detect floating point instructions which, if
; mav_farith Floating point arithmetic (4 cycle)
; mav_dmult Double multiplies (7 cycle)
;
+
(define_attr "type"
- "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
+ "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
(if_then_else
(eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
(const_string "mult")
(define_attr "far_jump" "yes,no" (const_string "no"))
+;; The number of machine instructions this pattern expands to.
+;; Used for Thumb-2 conditional execution.
+(define_attr "ce_count" "" (const_int 1))
+
;;---------------------------------------------------------------------------
;; Mode macros
;; Predicates
(include "predicates.md")
+(include "constraints.md")
;;---------------------------------------------------------------------------
;; Pipeline descriptions
(define_attr "generic_sched" "yes,no"
(const (if_then_else
- (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs")
+ (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8")
(const_string "no")
(const_string "yes"))))
(define_attr "generic_vfp" "yes,no"
(const (if_then_else
(and (eq_attr "fpu" "vfp")
- (eq_attr "tune" "!arm1020e,arm1022e"))
+ (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
(const_string "yes")
(const_string "no"))))
(include "arm1020e.md")
(include "arm1026ejs.md")
(include "arm1136jfs.md")
+(include "cortex-a8.md")
\f
;;---------------------------------------------------------------------------
DONE;
}
- if (TARGET_THUMB)
+ if (TARGET_THUMB1)
{
if (GET_CODE (operands[1]) != REG)
operands[1] = force_reg (SImode, operands[1]);
"
)
-(define_insn "*thumb_adddi3"
+(define_insn "*thumb1_adddi3"
[(set (match_operand:DI 0 "register_operand" "=l")
(plus:DI (match_operand:DI 1 "register_operand" "%0")
(match_operand:DI 2 "register_operand" "l")))
(clobber (reg:CC CC_REGNUM))
]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
[(set_attr "length" "4")]
)
(plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
(match_operand:DI 2 "s_register_operand" "r, 0")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
+ "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(parallel [(set (reg:CC_C CC_REGNUM)
(compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
(match_dup 1)))
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "r,0")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
+ "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(parallel [(set (reg:CC_C CC_REGNUM)
(compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
(match_dup 1)))
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "r,0")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
+ "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(parallel [(set (reg:CC_C CC_REGNUM)
(compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
(match_dup 1)))
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_EITHER"
"
- if (TARGET_ARM && GET_CODE (operands[2]) == CONST_INT)
+ if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
{
arm_split_constant (PLUS, SImode, NULL_RTX,
INTVAL (operands[2]), operands[0], operands[1],
- optimize && !no_new_pseudos);
+ optimize && can_create_pseudo_p ());
DONE;
}
"
(set (match_operand:SI 0 "arm_general_register_operand" "")
(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
(match_operand:SI 2 "const_int_operand" "")))]
- "TARGET_ARM &&
+ "TARGET_32BIT &&
!(const_ok_for_arm (INTVAL (operands[2]))
|| const_ok_for_arm (-INTVAL (operands[2])))
&& const_ok_for_arm (~INTVAL (operands[2]))"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
(plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
(match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
add%?\\t%0, %1, %2
sub%?\\t%0, %1, #%n2
#"
- "TARGET_ARM &&
+ "TARGET_32BIT &&
GET_CODE (operands[2]) == CONST_INT
&& !(const_ok_for_arm (INTVAL (operands[2]))
|| const_ok_for_arm (-INTVAL (operands[2])))"
;; register. Trying to reload it will always fail catastrophically,
;; so never allow those alternatives to match if reloading is needed.
-(define_insn "*thumb_addsi3"
+(define_insn "*thumb1_addsi3"
[(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
(match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
static const char * const asms[] =
{
(match_operand:SI 1 "const_int_operand" ""))
(set (match_dup 0)
(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
&& (INTVAL (operands[1]) & 3) == 0"
[(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
""
)
+;; ??? Make Thumb-2 variants which prefer low regs
(define_insn "*addsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
- add%?s\\t%0, %1, %2
- sub%?s\\t%0, %1, #%n2"
+ add%.\\t%0, %1, %2
+ sub%.\\t%0, %1, #%n2"
[(set_attr "conds" "set")]
)
(plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
(match_operand:SI 1 "arm_add_operand" "rI,L"))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
(compare:CC_Z
(neg:SI (match_operand:SI 0 "s_register_operand" "r"))
(match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"cmn%?\\t%1, %0"
[(set_attr "conds" "set")]
)
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_dup 1)
(match_operand:SI 3 "arm_addimm_operand" "L,I")))]
- "TARGET_ARM && INTVAL (operands[2]) == -INTVAL (operands[3])"
+ "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
"@
- sub%?s\\t%0, %1, %2
- add%?s\\t%0, %1, #%n2"
+ sub%.\\t%0, %1, %2
+ add%.\\t%0, %1, #%n2"
[(set_attr "conds" "set")]
)
[(match_dup 2) (const_int 0)])
(match_operand 4 "" "")
(match_operand 5 "" "")))]
- "TARGET_ARM && peep2_reg_dead_p (3, operands[2])"
+ "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
[(parallel[
(set (match_dup 2)
(compare:CC
(match_dup 1)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
- add%?s\\t%0, %1, %2
- sub%?s\\t%0, %1, #%n2"
+ add%.\\t%0, %1, %2
+ sub%.\\t%0, %1, #%n2"
[(set_attr "conds" "set")]
)
(match_dup 2)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
- add%?s\\t%0, %1, %2
- sub%?s\\t%0, %1, #%n2"
+ add%.\\t%0, %1, %2
+ sub%.\\t%0, %1, #%n2"
[(set_attr "conds" "set")]
)
(plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
(match_operand:SI 1 "arm_add_operand" "rI,L"))
(match_dup 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
(plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
(match_operand:SI 1 "arm_add_operand" "rI,L"))
(match_dup 1)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
(plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
(plus:SI (match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "arm_rhs_operand" "rI"))))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"adc%?\\t%0, %1, %2"
[(set_attr "conds" "use")]
)
[(match_operand:SI 3 "s_register_operand" "r")
(match_operand:SI 4 "reg_or_int_operand" "rM")])
(match_operand:SI 1 "s_register_operand" "r"))))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"adc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
(set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "arm_rhs_operand" "rI"))
(ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"adc%?\\t%0, %1, %2"
[(set_attr "conds" "use")]
)
(plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
(match_operand:SI 1 "s_register_operand" "r"))
(match_operand:SI 2 "arm_rhs_operand" "rI")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"adc%?\\t%0, %1, %2"
[(set_attr "conds" "use")]
)
(plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
(match_operand:SI 2 "arm_rhs_operand" "rI"))
(match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"adc%?\\t%0, %1, %2"
[(set_attr "conds" "use")]
)
-(define_insn "incscc"
+(define_expand "incscc"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (plus:SI (match_operator:SI 2 "arm_comparison_operator"
+ [(match_operand:CC 3 "cc_register" "") (const_int 0)])
+ (match_operand:SI 1 "s_register_operand" "0,?r")))]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_incscc"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_operator:SI 2 "arm_comparison_operator"
[(match_operand:CC 3 "cc_register" "") (const_int 0)])
(match_operand:SI 2 "s_register_operand" ""))
(const_int -1)))
(clobber (match_operand:SI 3 "s_register_operand" ""))]
- "TARGET_ARM"
+ "TARGET_32BIT"
[(set (match_dup 3) (match_dup 1))
(set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
"
[(set (match_operand:SF 0 "s_register_operand" "")
(plus:SF (match_operand:SF 1 "s_register_operand" "")
(match_operand:SF 2 "arm_float_add_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], SFmode))
[(set (match_operand:DF 0 "s_register_operand" "")
(plus:DF (match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_add_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], DFmode))
"TARGET_EITHER"
"
if (TARGET_HARD_FLOAT && TARGET_MAVERICK
- && TARGET_ARM
+ && TARGET_32BIT
&& cirrus_fp_register (operands[0], DImode)
&& cirrus_fp_register (operands[1], DImode))
{
DONE;
}
- if (TARGET_THUMB)
+ if (TARGET_THUMB1)
{
if (GET_CODE (operands[1]) != REG)
operands[1] = force_reg (SImode, operands[1]);
(minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
(match_operand:DI 2 "s_register_operand" "r,0,0")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
(minus:DI (match_operand:DI 1 "register_operand" "0")
(match_operand:DI 2 "register_operand" "l")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
[(set_attr "length" "4")]
)
(zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
(sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
(zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r"))))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
- "subs\\t%Q0, %1, %2\;rsc\\t%R0, %1, %1"
+ "TARGET_32BIT"
+ "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
)
"
if (GET_CODE (operands[1]) == CONST_INT)
{
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
arm_split_constant (MINUS, SImode, NULL_RTX,
INTVAL (operands[1]), operands[0],
- operands[2], optimize && !no_new_pseudos);
+ operands[2], optimize && can_create_pseudo_p ());
DONE;
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
operands[1] = force_reg (SImode, operands[1]);
}
"
)
-(define_insn "*thumb_subsi3_insn"
+(define_insn "*thumb1_subsi3_insn"
[(set (match_operand:SI 0 "register_operand" "=l")
(minus:SI (match_operand:SI 1 "register_operand" "l")
(match_operand:SI 2 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"sub\\t%0, %1, %2"
[(set_attr "length" "2")]
)
+; ??? Check Thumb-2 split length
(define_insn_and_split "*arm_subsi3_insn"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
(match_operand:SI 2 "s_register_operand" "r,r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
rsb%?\\t%0, %2, %1
#"
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[1]) == CONST_INT
&& !const_ok_for_arm (INTVAL (operands[1]))"
[(clobber (const_int 0))]
(set (match_operand:SI 0 "arm_general_register_operand" "")
(minus:SI (match_operand:SI 1 "const_int_operand" "")
(match_operand:SI 2 "arm_general_register_operand" "")))]
- "TARGET_ARM
+ "TARGET_32BIT
&& !const_ok_for_arm (INTVAL (operands[1]))
&& const_ok_for_arm (~INTVAL (operands[1]))"
[(set (match_dup 3) (match_dup 1))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
- sub%?s\\t%0, %1, %2
- rsb%?s\\t%0, %2, %1"
+ sub%.\\t%0, %1, %2
+ rsb%.\\t%0, %2, %1"
[(set_attr "conds" "set")]
)
-(define_insn "decscc"
+(define_expand "decscc"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
+ (match_operator:SI 2 "arm_comparison_operator"
+ [(match_operand 3 "cc_register" "") (const_int 0)])))]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_decscc"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
(match_operator:SI 2 "arm_comparison_operator"
[(set (match_operand:SF 0 "s_register_operand" "")
(minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
(match_operand:SF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
[(set (match_operand:DF 0 "s_register_operand" "")
(minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
(match_operand:SI 1 "s_register_operand" "%?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT && !arm_arch6"
"mul%?\\t%0, %2, %1"
[(set_attr "insn" "mul")
(set_attr "predicable" "yes")]
)
+(define_insn "*arm_mulsi3_v6"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (mult:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")))]
+ "TARGET_32BIT && arm_arch6"
+ "mul%?\\t%0, %1, %2"
+ [(set_attr "insn" "mul")
+ (set_attr "predicable" "yes")]
+)
+
; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
; 1 and 2; are the same, because reload will make operand 0 match
; operand 1 without realizing that this conflicts with operand 2. We fix
[(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
(match_operand:SI 2 "register_operand" "l,l,l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1 && !arm_arch6"
"*
if (which_alternative < 2)
return \"mov\\t%0, %1\;mul\\t%0, %2\";
(set_attr "insn" "mul")]
)
+(define_insn "*thumb_mulsi3_v6"
+ [(set (match_operand:SI 0 "register_operand" "=l,l,l")
+ (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
+ (match_operand:SI 2 "register_operand" "l,0,0")))]
+ "TARGET_THUMB1 && arm_arch6"
+ "@
+ mul\\t%0, %2
+ mul\\t%0, %1
+ mul\\t%0, %1"
+ [(set_attr "length" "2")
+ (set_attr "insn" "mul")]
+)
+
(define_insn "*mulsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (mult:SI
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(mult:SI (match_dup 2) (match_dup 1)))]
- "TARGET_ARM"
- "mul%?s\\t%0, %2, %1"
+ "TARGET_ARM && !arm_arch6"
+ "mul%.\\t%0, %2, %1"
+ [(set_attr "conds" "set")
+ (set_attr "insn" "muls")]
+)
+
+(define_insn "*mulsi3_compare0_v6"
+ [(set (reg:CC_NOOV CC_REGNUM)
+ (compare:CC_NOOV (mult:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "s_register_operand" "=r")
+ (mult:SI (match_dup 2) (match_dup 1)))]
+ "TARGET_ARM && arm_arch6 && optimize_size"
+ "mul%.\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "insn" "muls")]
)
(match_operand:SI 1 "s_register_operand" "%?r,0"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r"))]
- "TARGET_ARM"
- "mul%?s\\t%0, %2, %1"
+ "TARGET_ARM && !arm_arch6"
+ "mul%.\\t%0, %2, %1"
+ [(set_attr "conds" "set")
+ (set_attr "insn" "muls")]
+)
+
+(define_insn "*mulsi_compare0_scratch_v6"
+ [(set (reg:CC_NOOV CC_REGNUM)
+ (compare:CC_NOOV (mult:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=r"))]
+ "TARGET_ARM && arm_arch6 && optimize_size"
+ "mul%.\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "insn" "muls")]
)
(mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
(match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
(match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT && !arm_arch6"
+ "mla%?\\t%0, %2, %1, %3"
+ [(set_attr "insn" "mla")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "*mulsi3addsi_v6"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI
+ (mult:SI (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))
+ (match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_32BIT && arm_arch6"
"mla%?\\t%0, %2, %1, %3"
[(set_attr "insn" "mla")
(set_attr "predicable" "yes")]
(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
(plus:SI (mult:SI (match_dup 2) (match_dup 1))
(match_dup 3)))]
- "TARGET_ARM"
- "mla%?s\\t%0, %2, %1, %3"
+ "TARGET_ARM && arm_arch6"
+ "mla%.\\t%0, %2, %1, %3"
+ [(set_attr "conds" "set")
+ (set_attr "insn" "mlas")]
+)
+
+(define_insn "*mulsi3addsi_compare0_v6"
+ [(set (reg:CC_NOOV CC_REGNUM)
+ (compare:CC_NOOV
+ (plus:SI (mult:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))
+ (match_operand:SI 3 "s_register_operand" "r"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (match_dup 2) (match_dup 1))
+ (match_dup 3)))]
+ "TARGET_ARM && arm_arch6 && optimize_size"
+ "mla%.\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "insn" "mlas")]
)
(match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
- "TARGET_ARM"
- "mla%?s\\t%0, %2, %1, %3"
+ "TARGET_ARM && !arm_arch6"
+ "mla%.\\t%0, %2, %1, %3"
+ [(set_attr "conds" "set")
+ (set_attr "insn" "mlas")]
+)
+
+(define_insn "*mulsi3addsi_compare0_scratch_v6"
+ [(set (reg:CC_NOOV CC_REGNUM)
+ (compare:CC_NOOV
+ (plus:SI (mult:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))
+ (match_operand:SI 3 "s_register_operand" "r"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=r"))]
+ "TARGET_ARM && arm_arch6 && optimize_size"
+ "mla%.\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "insn" "mlas")]
)
+(define_insn "*mulsi3subsi"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (minus:SI
+ (match_operand:SI 3 "s_register_operand" "r")
+ (mult:SI (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r"))))]
+ "TARGET_32BIT && arm_arch_thumb2"
+ "mls%?\\t%0, %2, %1, %3"
+ [(set_attr "insn" "mla")
+ (set_attr "predicable" "yes")]
+)
+
;; Unnamed template to match long long multiply-accumulate (smlal)
(define_insn "*mulsidi3adddi"
(sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
(sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
(match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "smlal%?\\t%Q0, %R0, %3, %2"
+ [(set_attr "insn" "smlal")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "*mulsidi3adddi_v6"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (plus:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
+ (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
+ (match_operand:DI 1 "s_register_operand" "0")))]
+ "TARGET_32BIT && arm_arch6"
"smlal%?\\t%Q0, %R0, %3, %2"
[(set_attr "insn" "smlal")
(set_attr "predicable" "yes")]
(mult:DI
(sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
(sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "smull%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "insn" "smull")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "mulsidi3_v6"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
+ "TARGET_32BIT && arm_arch6"
"smull%?\\t%Q0, %R0, %1, %2"
[(set_attr "insn" "smull")
(set_attr "predicable" "yes")]
(mult:DI
(zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
(zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "umull%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "insn" "umull")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "umulsidi3_v6"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (mult:DI
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
+ "TARGET_32BIT && arm_arch6"
"umull%?\\t%Q0, %R0, %1, %2"
[(set_attr "insn" "umull")
(set_attr "predicable" "yes")]
(zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
(zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
(match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "umlal%?\\t%Q0, %R0, %3, %2"
+ [(set_attr "insn" "umlal")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "*umulsidi3adddi_v6"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (plus:DI
+ (mult:DI
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
+ (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
+ (match_operand:DI 1 "s_register_operand" "0")))]
+ "TARGET_32BIT && arm_arch6"
"umlal%?\\t%Q0, %R0, %3, %2"
[(set_attr "insn" "umlal")
(set_attr "predicable" "yes")]
(sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
(const_int 32))))
(clobber (match_scratch:SI 3 "=&r,&r"))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "smull%?\\t%3, %0, %2, %1"
+ [(set_attr "insn" "smull")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "smulsi3_highpart_v6"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
+ (const_int 32))))
+ (clobber (match_scratch:SI 3 "=r"))]
+ "TARGET_32BIT && arm_arch6"
"smull%?\\t%3, %0, %2, %1"
[(set_attr "insn" "smull")
(set_attr "predicable" "yes")]
(zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
(const_int 32))))
(clobber (match_scratch:SI 3 "=&r,&r"))]
- "TARGET_ARM && arm_arch3m"
+ "TARGET_32BIT && arm_arch3m && !arm_arch6"
+ "umull%?\\t%3, %0, %2, %1"
+ [(set_attr "insn" "umull")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "umulsi3_highpart_v6"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
+ (const_int 32))))
+ (clobber (match_scratch:SI 3 "=r"))]
+ "TARGET_32BIT && arm_arch6"
"umull%?\\t%3, %0, %2, %1"
[(set_attr "insn" "umull")
(set_attr "predicable" "yes")]
(match_operand:HI 1 "s_register_operand" "%r"))
(sign_extend:SI
(match_operand:HI 2 "s_register_operand" "r"))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smulbb%?\\t%0, %1, %2"
[(set_attr "insn" "smulxy")
(set_attr "predicable" "yes")]
(const_int 16))
(sign_extend:SI
(match_operand:HI 2 "s_register_operand" "r"))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smultb%?\\t%0, %1, %2"
[(set_attr "insn" "smulxy")
(set_attr "predicable" "yes")]
(ashiftrt:SI
(match_operand:SI 2 "s_register_operand" "r")
(const_int 16))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smulbt%?\\t%0, %1, %2"
[(set_attr "insn" "smulxy")
(set_attr "predicable" "yes")]
(ashiftrt:SI
(match_operand:SI 2 "s_register_operand" "r")
(const_int 16))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smultt%?\\t%0, %1, %2"
[(set_attr "insn" "smulxy")
(set_attr "predicable" "yes")]
(match_operand:HI 2 "s_register_operand" "%r"))
(sign_extend:SI
(match_operand:HI 3 "s_register_operand" "r")))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smlabb%?\\t%0, %2, %3, %1"
[(set_attr "insn" "smlaxy")
(set_attr "predicable" "yes")]
(match_operand:HI 2 "s_register_operand" "%r"))
(sign_extend:DI
(match_operand:HI 3 "s_register_operand" "r")))))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_DSP_MULTIPLY"
"smlalbb%?\\t%Q0, %R0, %2, %3"
[(set_attr "insn" "smlalxy")
(set_attr "predicable" "yes")])
[(set (match_operand:SF 0 "s_register_operand" "")
(mult:SF (match_operand:SF 1 "s_register_operand" "")
(match_operand:SF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], SFmode))
[(set (match_operand:DF 0 "s_register_operand" "")
(mult:DF (match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], DFmode))
[(set (match_operand:SF 0 "s_register_operand" "")
(div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
(match_operand:SF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"")
(define_expand "divdf3"
[(set (match_operand:DF 0 "s_register_operand" "")
(div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"")
\f
;; Modulo insns
[(set (match_operand:SF 0 "s_register_operand" "")
(mod:SF (match_operand:SF 1 "s_register_operand" "")
(match_operand:SF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
"")
(define_expand "moddf3"
[(set (match_operand:DF 0 "s_register_operand" "")
(mod:DF (match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
"")
\f
;; Boolean and,ior,xor insns
(match_operator:DI 6 "logical_binary_operator"
[(match_operand:DI 1 "s_register_operand" "")
(match_operand:DI 2 "s_register_operand" "")]))]
- "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
+ "TARGET_32BIT && reload_completed
+ && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
[(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
(set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
"
(match_operator:DI 6 "logical_binary_operator"
[(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
(match_operand:DI 1 "s_register_operand" "")]))]
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
(set (match_dup 3) (match_op_dup:SI 6
[(ashiftrt:SI (match_dup 2) (const_int 31))
(ior:DI
(zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
(match_operand:DI 1 "s_register_operand" "")))]
- "TARGET_ARM && operands[0] != operands[1] && reload_completed"
+ "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
[(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
(set (match_dup 3) (match_dup 4))]
"
(xor:DI
(zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
(match_operand:DI 1 "s_register_operand" "")))]
- "TARGET_ARM && operands[0] != operands[1] && reload_completed"
+ "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
[(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
(set (match_dup 3) (match_dup 4))]
"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
(match_operand:DI 2 "s_register_operand" "r,r")))]
- "TARGET_ARM && ! TARGET_IWMMXT"
+ "TARGET_32BIT && ! TARGET_IWMMXT"
"#"
[(set_attr "length" "8")]
)
(and:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
; The zero extend of operand 2 clears the high word of the output
; operand.
[(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
(and:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
[(set_attr "length" "8")]
)
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
if (GET_CODE (operands[2]) == CONST_INT)
{
arm_split_constant (AND, SImode, NULL_RTX,
INTVAL (operands[2]), operands[0],
- operands[1], optimize && !no_new_pseudos);
+ operands[1], optimize && can_create_pseudo_p ());
DONE;
}
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
{
if (GET_CODE (operands[2]) != CONST_INT)
operands[2] = force_reg (SImode, operands[2]);
"
)
+; ??? Check split length for Thumb-2
(define_insn_and_split "*arm_andsi3_insn"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
(and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
(match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
and%?\\t%0, %1, %2
bic%?\\t%0, %1, #%B2
#"
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[2]) == CONST_INT
&& !(const_ok_for_arm (INTVAL (operands[2]))
|| const_ok_for_arm (~INTVAL (operands[2])))"
(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_andsi3_insn"
+(define_insn "*thumb1_andsi3_insn"
[(set (match_operand:SI 0 "register_operand" "=l")
(and:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"and\\t%0, %0, %2"
[(set_attr "length" "2")]
)
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(and:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
- and%?s\\t%0, %1, %2
- bic%?s\\t%0, %1, #%B2"
+ and%.\\t%0, %1, %2
+ bic%.\\t%0, %1, #%B2"
[(set_attr "conds" "set")]
)
(match_operand:SI 1 "arm_not_operand" "rI,K"))
(const_int 0)))
(clobber (match_scratch:SI 2 "=X,r"))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
tst%?\\t%0, %1
- bic%?s\\t%2, %0, #%B1"
+ bic%.\\t%2, %0, #%B1"
[(set_attr "conds" "set")]
)
(match_operand 1 "const_int_operand" "n")
(match_operand 2 "const_int_operand" "n"))
(const_int 0)))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
&& INTVAL (operands[1]) > 0
&& INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
(match_operand:SI 3 "const_int_operand" "n"))
(const_int 0)))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
&& INTVAL (operands[2]) > 0
&& INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
&& INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
"#"
- "TARGET_ARM
+ "TARGET_32BIT
&& (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
&& INTVAL (operands[2]) > 0
&& INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
<< INTVAL (operands[3]));
"
[(set_attr "conds" "clob")
- (set_attr "length" "8")]
+ (set (attr "length")
+ (if_then_else (eq_attr "is_thumb" "yes")
+ (const_int 12)
+ (const_int 8)))]
)
(define_insn_and_split "*ne_zeroextractsi_shifted"
(match_operand:SI 2 "const_int_operand" "")
(match_operand:SI 3 "const_int_operand" "")))
(clobber (match_operand:SI 4 "s_register_operand" ""))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
[(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
(set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
"{
}"
)
+;; ??? Use Thumb-2 has bitfield insert/extract instructions.
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(match_operator:SI 1 "shiftable_operator"
(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
(match_operand:SI 2 "const_int_operand" "")
(match_operand:SI 3 "const_int_operand" "")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
[(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
(set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
"{
;;; the value before we insert. This loses some of the advantage of having
;;; this insv pattern, so this pattern needs to be reevalutated.
+; ??? Use Thumb-2 bitfield insert/extract instructions
(define_expand "insv"
[(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
(match_operand:SI 1 "general_operand" "")
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
(match_operand:DI 2 "s_register_operand" "0,r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
+ "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
[(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
(set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
"
(and:DI (not:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r")))
(match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
bic%?\\t%Q0, %Q1, %2
#"
; (not (zero_extend ...)) allows us to just copy the high word from
; operand1 to operand0.
- "TARGET_ARM
+ "TARGET_32BIT
&& reload_completed
&& operands[0] != operands[1]"
[(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
(and:DI (not:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r")))
(match_operand:DI 1 "s_register_operand" "0,r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
(set (match_dup 3) (and:SI (not:SI
(ashiftrt:SI (match_dup 2) (const_int 31)))
[(set (match_operand:SI 0 "s_register_operand" "=r")
(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
(match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"bic%?\\t%0, %1, %2"
[(set_attr "predicable" "yes")]
)
[(set (match_operand:SI 0 "register_operand" "=l")
(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
(match_operand:SI 2 "register_operand" "0")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"bic\\t%0, %0, %1"
[(set_attr "length" "2")]
)
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
- "TARGET_ARM"
- "bic%?s\\t%0, %1, %2"
+ "TARGET_32BIT"
+ "bic%.\\t%0, %1, %2"
[(set_attr "conds" "set")]
)
(match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
- "bic%?s\\t%0, %1, %2"
+ "TARGET_32BIT"
+ "bic%.\\t%0, %1, %2"
[(set_attr "conds" "set")]
)
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
(match_operand:DI 2 "s_register_operand" "r,r")))]
- "TARGET_ARM && ! TARGET_IWMMXT"
+ "TARGET_32BIT && ! TARGET_IWMMXT"
"#"
[(set_attr "length" "8")
(set_attr "predicable" "yes")]
(ior:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
orr%?\\t%Q0, %Q1, %2
#"
(ior:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
[(set_attr "length" "8")
(set_attr "predicable" "yes")]
"
if (GET_CODE (operands[2]) == CONST_INT)
{
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
arm_split_constant (IOR, SImode, NULL_RTX,
INTVAL (operands[2]), operands[0], operands[1],
- optimize && !no_new_pseudos);
+ optimize && can_create_pseudo_p ());
DONE;
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
operands [2] = force_reg (SImode, operands [2]);
}
"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
orr%?\\t%0, %1, %2
#"
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[2]) == CONST_INT
&& !const_ok_for_arm (INTVAL (operands[2]))"
[(clobber (const_int 0))]
(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_iorsi3"
+(define_insn "*thumb1_iorsi3"
[(set (match_operand:SI 0 "register_operand" "=l")
(ior:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"orr\\t%0, %0, %2"
[(set_attr "length" "2")]
)
(set (match_operand:SI 0 "arm_general_register_operand" "")
(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
(match_operand:SI 2 "const_int_operand" "")))]
- "TARGET_ARM
+ "TARGET_32BIT
&& !const_ok_for_arm (INTVAL (operands[2]))
&& const_ok_for_arm (~INTVAL (operands[2]))"
[(set (match_dup 3) (match_dup 2))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(ior:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
- "orr%?s\\t%0, %1, %2"
+ "TARGET_32BIT"
+ "orr%.\\t%0, %1, %2"
[(set_attr "conds" "set")]
)
(match_operand:SI 2 "arm_rhs_operand" "rI"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
- "orr%?s\\t%0, %1, %2"
+ "TARGET_32BIT"
+ "orr%.\\t%0, %1, %2"
[(set_attr "conds" "set")]
)
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
(match_operand:DI 2 "s_register_operand" "r,r")))]
- "TARGET_ARM && !TARGET_IWMMXT"
+ "TARGET_32BIT && !TARGET_IWMMXT"
"#"
[(set_attr "length" "8")
(set_attr "predicable" "yes")]
(xor:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
eor%?\\t%Q0, %Q1, %2
#"
(xor:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
(match_operand:DI 1 "s_register_operand" "?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
[(set_attr "length" "8")
(set_attr "predicable" "yes")]
(xor:SI (match_operand:SI 1 "s_register_operand" "")
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
- "if (TARGET_THUMB)
+ "if (TARGET_THUMB1)
if (GET_CODE (operands[2]) == CONST_INT)
operands[2] = force_reg (SImode, operands[2]);
"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(xor:SI (match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "arm_rhs_operand" "rI")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"eor%?\\t%0, %1, %2"
[(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_xorsi3"
+(define_insn "*thumb1_xorsi3"
[(set (match_operand:SI 0 "register_operand" "=l")
(xor:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"eor\\t%0, %0, %2"
[(set_attr "length" "2")]
)
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(xor:SI (match_dup 1) (match_dup 2)))]
- "TARGET_ARM"
- "eor%?s\\t%0, %1, %2"
+ "TARGET_32BIT"
+ "eor%.\\t%0, %1, %2"
[(set_attr "conds" "set")]
)
(compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
(match_operand:SI 1 "arm_rhs_operand" "rI"))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"teq%?\\t%0, %1"
[(set_attr "conds" "set")]
)
(not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
(match_operand:SI 3 "arm_rhs_operand" "")))
(clobber (match_operand:SI 4 "s_register_operand" ""))]
- "TARGET_ARM"
+ "TARGET_32BIT"
[(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
(not:SI (match_dup 3))))
(set (match_dup 0) (not:SI (match_dup 4)))]
(and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
[(set_attr "length" "8")
+ (set_attr "ce_count" "2")
(set_attr "predicable" "yes")]
)
+; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
+; insns are available?
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(match_operator:SI 1 "logical_binary_operator"
(match_operand:SI 6 "const_int_operand" ""))
(match_operand:SI 7 "s_register_operand" "")])]))
(clobber (match_operand:SI 8 "s_register_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[1]) == GET_CODE (operands[9])
&& INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
[(set (match_dup 8)
(match_operand:SI 3 "const_int_operand" "")
(match_operand:SI 4 "const_int_operand" ""))]))
(clobber (match_operand:SI 8 "s_register_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[1]) == GET_CODE (operands[9])
&& INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
[(set (match_dup 8)
(match_operand:SI 6 "const_int_operand" ""))
(match_operand:SI 7 "s_register_operand" "")])]))
(clobber (match_operand:SI 8 "s_register_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[1]) == GET_CODE (operands[9])
&& INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
[(set (match_dup 8)
(match_operand:SI 3 "const_int_operand" "")
(match_operand:SI 4 "const_int_operand" ""))]))
(clobber (match_operand:SI 8 "s_register_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[1]) == GET_CODE (operands[9])
&& INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
[(set (match_dup 8)
;; Minimum and maximum insns
-(define_insn "smaxsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
- (smax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
- (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
+(define_expand "smaxsi3"
+ [(parallel [
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (smax:SI (match_operand:SI 1 "s_register_operand" "")
+ (match_operand:SI 2 "arm_rhs_operand" "")))
+ (clobber (reg:CC CC_REGNUM))])]
+ "TARGET_32BIT"
+ "
+ if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
+ {
+ /* No need for a clobber of the condition code register here. */
+ emit_insn (gen_rtx_SET (VOIDmode, operands[0],
+ gen_rtx_SMAX (SImode, operands[1],
+ operands[2])));
+ DONE;
+ }
+")
+
+(define_insn "*smax_0"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (smax:SI (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 0)))]
+ "TARGET_32BIT"
+ "bic%?\\t%0, %1, %1, asr #31"
+ [(set_attr "predicable" "yes")]
+)
+
+(define_insn "*smax_m1"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (smax:SI (match_operand:SI 1 "s_register_operand" "r")
+ (const_int -1)))]
+ "TARGET_32BIT"
+ "orr%?\\t%0, %1, %1, asr #31"
+ [(set_attr "predicable" "yes")]
+)
+
+(define_insn "*arm_smax_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
+ (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"@
cmp\\t%1, %2\;movlt\\t%0, %2
- cmp\\t%1, %2\;movge\\t%0, %1
cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
[(set_attr "conds" "clob")
- (set_attr "length" "8,8,12")]
+ (set_attr "length" "8,12")]
)
-(define_insn "sminsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
- (smin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
- (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
+(define_expand "sminsi3"
+ [(parallel [
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (smin:SI (match_operand:SI 1 "s_register_operand" "")
+ (match_operand:SI 2 "arm_rhs_operand" "")))
+ (clobber (reg:CC CC_REGNUM))])]
+ "TARGET_32BIT"
+ "
+ if (operands[2] == const0_rtx)
+ {
+ /* No need for a clobber of the condition code register here. */
+ emit_insn (gen_rtx_SET (VOIDmode, operands[0],
+ gen_rtx_SMIN (SImode, operands[1],
+ operands[2])));
+ DONE;
+ }
+")
+
+(define_insn "*smin_0"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (smin:SI (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 0)))]
+ "TARGET_32BIT"
+ "and%?\\t%0, %1, %1, asr #31"
+ [(set_attr "predicable" "yes")]
+)
+
+(define_insn "*arm_smin_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
+ (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"@
cmp\\t%1, %2\;movge\\t%0, %2
- cmp\\t%1, %2\;movlt\\t%0, %1
cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
[(set_attr "conds" "clob")
- (set_attr "length" "8,8,12")]
+ (set_attr "length" "8,12")]
+)
+
+(define_expand "umaxsi3"
+ [(parallel [
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (umax:SI (match_operand:SI 1 "s_register_operand" "")
+ (match_operand:SI 2 "arm_rhs_operand" "")))
+ (clobber (reg:CC CC_REGNUM))])]
+ "TARGET_32BIT"
+ ""
)
-(define_insn "umaxsi3"
+(define_insn "*arm_umaxsi3"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
(umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
(set_attr "length" "8,8,12")]
)
-(define_insn "uminsi3"
+(define_expand "uminsi3"
+ [(parallel [
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (umin:SI (match_operand:SI 1 "s_register_operand" "")
+ (match_operand:SI 2 "arm_rhs_operand" "")))
+ (clobber (reg:CC CC_REGNUM))])]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_uminsi3"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
(umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
[(match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "s_register_operand" "r")]))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
operands[1], operands[2]);
output_asm_insn (\"cmp\\t%1, %2\", operands);
+ if (TARGET_THUMB2)
+ output_asm_insn (\"ite\t%d3\", operands);
output_asm_insn (\"str%d3\\t%1, %0\", operands);
output_asm_insn (\"str%D3\\t%2, %0\", operands);
return \"\";
"
[(set_attr "conds" "clob")
- (set_attr "length" "12")
+ (set (attr "length")
+ (if_then_else (eq_attr "is_thumb" "yes")
+ (const_int 14)
+ (const_int 12)))
(set_attr "type" "store1")]
)
(match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
(match_operand:SI 1 "s_register_operand" "0,?r")]))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM && !arm_eliminable_register (operands[1])"
+ "TARGET_32BIT && !arm_eliminable_register (operands[1])"
"*
{
enum rtx_code code = GET_CODE (operands[4]);
+ bool need_else;
+
+ if (which_alternative != 0 || operands[3] != const0_rtx
+ || (code != PLUS && code != MINUS && code != IOR && code != XOR))
+ need_else = true;
+ else
+ need_else = false;
operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
operands[2], operands[3]);
output_asm_insn (\"cmp\\t%2, %3\", operands);
+ if (TARGET_THUMB2)
+ {
+ if (need_else)
+ output_asm_insn (\"ite\\t%d5\", operands);
+ else
+ output_asm_insn (\"it\\t%d5\", operands);
+ }
output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
- if (which_alternative != 0 || operands[3] != const0_rtx
- || (code != PLUS && code != MINUS && code != IOR && code != XOR))
+ if (need_else)
output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
return \"\";
}"
[(set_attr "conds" "clob")
- (set_attr "length" "12")]
+ (set (attr "length")
+ (if_then_else (eq_attr "is_thumb" "yes")
+ (const_int 14)
+ (const_int 12)))]
)
\f
[(set (match_operand:DI 0 "s_register_operand" "")
(ashift:DI (match_operand:DI 1 "s_register_operand" "")
(match_operand:SI 2 "reg_or_int_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
if (GET_CODE (operands[2]) == CONST_INT)
{
(ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
"
)
-(define_insn "*thumb_ashlsi3"
+(define_insn "*thumb1_ashlsi3"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"lsl\\t%0, %1, %2"
[(set_attr "length" "2")]
)
[(set (match_operand:DI 0 "s_register_operand" "")
(ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
(match_operand:SI 2 "reg_or_int_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
if (GET_CODE (operands[2]) == CONST_INT)
{
(ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
"
)
-(define_insn "*thumb_ashrsi3"
+(define_insn "*thumb1_ashrsi3"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"asr\\t%0, %1, %2"
[(set_attr "length" "2")]
)
[(set (match_operand:DI 0 "s_register_operand" "")
(lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
(match_operand:SI 2 "reg_or_int_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
if (GET_CODE (operands[2]) == CONST_INT)
{
(lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
[(set_attr "conds" "clob")
(set_attr "length" "8")]
"
)
-(define_insn "*thumb_lshrsi3"
+(define_insn "*thumb1_lshrsi3"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"lsr\\t%0, %1, %2"
[(set_attr "length" "2")]
)
[(set (match_operand:SI 0 "s_register_operand" "")
(rotatert:SI (match_operand:SI 1 "s_register_operand" "")
(match_operand:SI 2 "reg_or_int_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
if (GET_CODE (operands[2]) == CONST_INT)
operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
if (GET_CODE (operands[2]) == CONST_INT
&& ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
{
if (GET_CODE (operands [2]) == CONST_INT)
operands [2] = force_reg (SImode, operands[2]);
"
)
-(define_insn "*thumb_rotrsi3"
+(define_insn "*thumb1_rotrsi3"
[(set (match_operand:SI 0 "register_operand" "=l")
(rotatert:SI (match_operand:SI 1 "register_operand" "0")
(match_operand:SI 2 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"ror\\t%0, %0, %2"
[(set_attr "length" "2")]
)
(match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "reg_or_int_operand" "rM")]))]
- "TARGET_ARM"
- "mov%?\\t%0, %1%S3"
+ "TARGET_32BIT"
+ "* return arm_output_shift(operands, 0);"
[(set_attr "predicable" "yes")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
- "TARGET_ARM"
- "mov%?s\\t%0, %1%S3"
+ "TARGET_32BIT"
+ "* return arm_output_shift(operands, 1);"
[(set_attr "conds" "set")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
(match_operand:SI 2 "arm_rhs_operand" "rM")])
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
- "mov%?s\\t%0, %1%S3"
+ "TARGET_32BIT"
+ "* return arm_output_shift(operands, 1);"
[(set_attr "conds" "set")
(set_attr "shift" "1")]
)
-(define_insn "*notsi_shiftsi"
+(define_insn "*arm_notsi_shiftsi"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r")
(const_string "alu_shift_reg")))]
)
-(define_insn "*notsi_shiftsi_compare0"
+(define_insn "*arm_notsi_shiftsi_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r")
(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
"TARGET_ARM"
- "mvn%?s\\t%0, %1%S3"
+ "mvn%.\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
(const_string "alu_shift_reg")))]
)
-(define_insn "*not_shiftsi_compare0_scratch"
+(define_insn "*arm_not_shiftsi_compare0_scratch"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r")
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_ARM"
- "mvn%?s\\t%0, %1%S3"
+ "mvn%.\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
(set (match_operand:SI 0 "register_operand" "")
(lshiftrt:SI (match_dup 4)
(match_operand:SI 3 "const_int_operand" "")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"
{
HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
"
- if (TARGET_THUMB)
+ if (TARGET_THUMB1)
{
if (GET_CODE (operands[1]) != REG)
operands[1] = force_reg (SImode, operands[1]);
(set_attr "length" "8")]
)
-(define_insn "*thumb_negdi2"
+(define_insn "*thumb1_negdi2"
[(set (match_operand:DI 0 "register_operand" "=&l")
(neg:DI (match_operand:DI 1 "register_operand" "l")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
[(set_attr "length" "6")]
)
(define_insn "*arm_negsi2"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"rsb%?\\t%0, %1, #0"
[(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_negsi2"
+(define_insn "*thumb1_negsi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(neg:SI (match_operand:SI 1 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"neg\\t%0, %1"
[(set_attr "length" "2")]
)
(define_expand "negsf2"
[(set (match_operand:SF 0 "s_register_operand" "")
(neg:SF (match_operand:SF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
""
)
(define_expand "negdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(neg:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"")
;; abssi2 doesn't really clobber the condition codes if a different register
[(parallel
[(set (match_operand:SI 0 "s_register_operand" "")
(abs:SI (match_operand:SI 1 "s_register_operand" "")))
- (clobber (reg:CC CC_REGNUM))])]
- "TARGET_ARM"
- "")
+ (clobber (match_dup 2))])]
+ "TARGET_EITHER"
+ "
+ if (TARGET_THUMB1)
+ operands[2] = gen_rtx_SCRATCH (SImode);
+ else
+ operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
+")
(define_insn "*arm_abssi2"
- [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
(abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
(set_attr "length" "8")]
)
-(define_insn "*neg_abssi2"
+(define_insn_and_split "*thumb1_abssi2"
+ [(set (match_operand:SI 0 "s_register_operand" "=l")
+ (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
+ (clobber (match_scratch:SI 2 "=&l"))]
+ "TARGET_THUMB1"
+ "#"
+ "TARGET_THUMB1 && reload_completed"
+ [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
+ (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
+ (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
+ ""
+ [(set_attr "length" "6")]
+)
+
+(define_insn "*arm_neg_abssi2"
[(set (match_operand:SI 0 "s_register_operand" "=r,&r")
(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
(clobber (reg:CC CC_REGNUM))]
(set_attr "length" "8")]
)
+(define_insn_and_split "*thumb1_neg_abssi2"
+ [(set (match_operand:SI 0 "s_register_operand" "=l")
+ (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
+ (clobber (match_scratch:SI 2 "=&l"))]
+ "TARGET_THUMB1"
+ "#"
+ "TARGET_THUMB1 && reload_completed"
+ [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
+ (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
+ (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
+ ""
+ [(set_attr "length" "6")]
+)
+
(define_expand "abssf2"
[(set (match_operand:SF 0 "s_register_operand" "")
(abs:SF (match_operand:SF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"")
(define_expand "absdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(abs:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"")
(define_expand "sqrtsf2"
[(set (match_operand:SF 0 "s_register_operand" "")
(sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"")
(define_expand "sqrtdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"")
(define_insn_and_split "one_cmpldi2"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 0) (not:SI (match_dup 1)))
(set (match_dup 2) (not:SI (match_dup 3)))]
"
(define_insn "*arm_one_cmplsi2"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"mvn%?\\t%0, %1"
[(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_one_cmplsi2"
+(define_insn "*thumb1_one_cmplsi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(not:SI (match_operand:SI 1 "register_operand" "l")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"mvn\\t%0, %1"
[(set_attr "length" "2")]
)
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_dup 1)))]
- "TARGET_ARM"
- "mvn%?s\\t%0, %1"
+ "TARGET_32BIT"
+ "mvn%.\\t%0, %1"
[(set_attr "conds" "set")]
)
(compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
- "mvn%?s\\t%0, %1"
+ "TARGET_32BIT"
+ "mvn%.\\t%0, %1"
[(set_attr "conds" "set")]
)
\f
(define_expand "floatsisf2"
[(set (match_operand:SF 0 "s_register_operand" "")
(float:SF (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
(define_expand "floatsidf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(float:DF (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
(define_expand "fix_truncsfsi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
(define_expand "fix_truncdfsi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
if (TARGET_MAVERICK)
{
[(set (match_operand:SF 0 "s_register_operand" "")
(float_truncate:SF
(match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
""
)
\f
;; Zero and sign extension instructions.
-(define_insn "zero_extendsidi2"
+(define_expand "zero_extendsidi2"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_zero_extendsidi2"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
"TARGET_ARM"
(set_attr "predicable" "yes")]
)
-(define_insn "zero_extendqidi2"
+(define_expand "zero_extendqidi2"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_zero_extendqidi2"
[(set (match_operand:DI 0 "s_register_operand" "=r,r")
(zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
"TARGET_ARM"
"@
and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
- ldr%?b\\t%Q0, %1\;mov%?\\t%R0, #0"
+ ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
[(set_attr "length" "8")
(set_attr "predicable" "yes")
(set_attr "type" "*,load_byte")
(set_attr "neg_pool_range" "*,4084")]
)
-(define_insn "extendsidi2"
+(define_expand "extendsidi2"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
+ "TARGET_32BIT"
+ ""
+)
+
+(define_insn "*arm_extendsidi2"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
"TARGET_ARM"
"TARGET_EITHER"
"
{
- if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
+ if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_ZERO_EXTEND (SImode, operands[1])));
}"
)
-(define_insn "*thumb_zero_extendhisi2"
+(define_insn "*thumb1_zero_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
- "TARGET_THUMB && !arm_arch6"
+ "TARGET_THUMB1 && !arm_arch6"
"*
rtx mem = XEXP (operands[1], 0);
(set_attr "pool_range" "60")]
)
-(define_insn "*thumb_zero_extendhisi2_v6"
+(define_insn "*thumb1_zero_extendhisi2_v6"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
- "TARGET_THUMB && arm_arch6"
+ "TARGET_THUMB1 && arm_arch6"
"*
rtx mem;
[(set (match_operand:SI 0 "s_register_operand" "=r")
(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
- "ldr%?h\\t%0, %1"
+ "ldr%(h%)\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "256")
"TARGET_ARM && arm_arch6"
"@
uxth%?\\t%0, %1
- ldr%?h\\t%0, %1"
+ ldr%(h%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,256")
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
(match_operand:SI 2 "s_register_operand" "r")))]
- "TARGET_ARM && arm_arch6"
+ "TARGET_INT_SIMD"
"uxtah%?\\t%0, %2, %1"
[(set_attr "type" "alu_shift")
(set_attr "predicable" "yes")]
"
)
-(define_insn "*thumb_zero_extendqisi2"
+(define_insn "*thumb1_zero_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
- "TARGET_THUMB && !arm_arch6"
+ "TARGET_THUMB1 && !arm_arch6"
"ldrb\\t%0, %1"
[(set_attr "length" "2")
(set_attr "type" "load_byte")
(set_attr "pool_range" "32")]
)
-(define_insn "*thumb_zero_extendqisi2_v6"
+(define_insn "*thumb1_zero_extendqisi2_v6"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
- "TARGET_THUMB && arm_arch6"
+ "TARGET_THUMB1 && arm_arch6"
"@
uxtb\\t%0, %1
ldrb\\t%0, %1"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
"TARGET_ARM && !arm_arch6"
- "ldr%?b\\t%0, %1\\t%@ zero_extendqisi2"
+ "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "4096")
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
"TARGET_ARM && arm_arch6"
"@
- uxtb%?\\t%0, %1
- ldr%?b\\t%0, %1\\t%@ zero_extendqisi2"
+ uxtb%(%)\\t%0, %1
+ ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,4096")
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
(match_operand:SI 2 "s_register_operand" "r")))]
- "TARGET_ARM && arm_arch6"
+ "TARGET_INT_SIMD"
"uxtab%?\\t%0, %2, %1"
[(set_attr "predicable" "yes")
+ (set_attr "insn" "xtab")
(set_attr "type" "alu_shift")]
)
[(set (match_operand:SI 0 "s_register_operand" "")
(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
(clobber (match_operand:SI 2 "s_register_operand" ""))]
- "TARGET_ARM && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
+ "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
+ ""
+)
+
+(define_split
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
+ (clobber (match_operand:SI 2 "s_register_operand" ""))]
+ "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
""
[(set (reg:CC_Z CC_REGNUM)
(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"tst\\t%0, #255"
[(set_attr "conds" "set")]
)
{
if (GET_CODE (operands[1]) == MEM)
{
- if (TARGET_THUMB)
+ if (TARGET_THUMB1)
{
- emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1]));
+ emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
DONE;
}
else if (arm_arch4)
if (arm_arch6)
{
- if (TARGET_THUMB)
- emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1]));
+ if (TARGET_THUMB1)
+ emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
else
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_SIGN_EXTEND (SImode, operands[1])));
}"
)
-(define_insn "thumb_extendhisi2"
+(define_insn "thumb1_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=l")
(sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
(clobber (match_scratch:SI 2 "=&l"))]
- "TARGET_THUMB && !arm_arch6"
+ "TARGET_THUMB1 && !arm_arch6"
"*
{
rtx ops[4];
;; we try to verify the operands. Fortunately, we don't really need
;; the early-clobber: we can always use operand 0 if operand 2
;; overlaps the address.
-(define_insn "*thumb_extendhisi2_insn_v6"
+(define_insn "*thumb1_extendhisi2_insn_v6"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
(clobber (match_scratch:SI 2 "=X,l"))]
- "TARGET_THUMB && arm_arch6"
+ "TARGET_THUMB1 && arm_arch6"
"*
{
rtx ops[4];
(set_attr "pool_range" "*,1020")]
)
+;; This pattern will only be used when ldsh is not available
(define_expand "extendhisi2_mem"
[(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
(set (match_dup 3)
[(set (match_operand:SI 0 "s_register_operand" "=r")
(sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
- "ldr%?sh\\t%0, %1"
+ "ldr%(sh%)\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "256")
(set_attr "neg_pool_range" "244")]
)
+;; ??? Check Thumb-2 pool range
(define_insn "*arm_extendhisi2_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
- "TARGET_ARM && arm_arch6"
+ "TARGET_32BIT && arm_arch6"
"@
sxth%?\\t%0, %1
- ldr%?sh\\t%0, %1"
+ ldr%(sh%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,256")
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
(match_operand:SI 2 "s_register_operand" "r")))]
- "TARGET_ARM && arm_arch6"
+ "TARGET_INT_SIMD"
"sxtah%?\\t%0, %2, %1"
)
}"
)
-(define_insn "*extendqihi_insn"
+(define_insn "*arm_extendqihi_insn"
[(set (match_operand:HI 0 "s_register_operand" "=r")
(sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
"TARGET_ARM && arm_arch4"
- "ldr%?sb\\t%0, %1"
+ "ldr%(sb%)\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "256")
[(set (match_operand:SI 0 "s_register_operand" "=r")
(sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
- "ldr%?sb\\t%0, %1"
+ "ldr%(sb%)\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "256")
"TARGET_ARM && arm_arch6"
"@
sxtb%?\\t%0, %1
- ldr%?sb\\t%0, %1"
+ ldr%(sb%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,256")
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
(match_operand:SI 2 "s_register_operand" "r")))]
- "TARGET_ARM && arm_arch6"
+ "TARGET_INT_SIMD"
"sxtab%?\\t%0, %2, %1"
[(set_attr "type" "alu_shift")
+ (set_attr "insn" "xtab")
(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_extendqisi2"
+(define_insn "*thumb1_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
- "TARGET_THUMB && !arm_arch6"
+ "TARGET_THUMB1 && !arm_arch6"
"*
{
rtx ops[3];
(set_attr "pool_range" "32,32")]
)
-(define_insn "*thumb_extendqisi2_v6"
+(define_insn "*thumb1_extendqisi2_v6"
[(set (match_operand:SI 0 "register_operand" "=l,l,l")
(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
- "TARGET_THUMB && arm_arch6"
+ "TARGET_THUMB1 && arm_arch6"
"*
{
rtx ops[3];
(define_expand "extendsfdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
""
)
\f
(match_operand:DI 1 "general_operand" ""))]
"TARGET_EITHER"
"
- if (TARGET_THUMB)
+ if (can_create_pseudo_p ())
{
- if (!no_new_pseudos)
- {
- if (GET_CODE (operands[0]) != REG)
- operands[1] = force_reg (DImode, operands[1]);
- }
+ if (GET_CODE (operands[0]) != REG)
+ operands[1] = force_reg (DImode, operands[1]);
}
"
)
[(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
(match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
"TARGET_ARM
- && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
- && !TARGET_IWMMXT"
+ && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
+ && !TARGET_IWMMXT
+ && ( register_operand (operands[0], DImode)
+ || register_operand (operands[1], DImode))"
"*
switch (which_alternative)
{
(define_split
[(set (match_operand:ANY64 0 "arm_general_register_operand" "")
(match_operand:ANY64 1 "const_double_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& reload_completed
&& (arm_const_double_inline_cost (operands[1])
<= ((optimize_size || arm_ld_sched) ? 3 : 4))"
;;; ??? This was originally identical to the movdf_insn pattern.
;;; ??? The 'i' constraint looks funny, but it should always be replaced by
;;; thumb_reorg with a memory reference.
-(define_insn "*thumb_movdi_insn"
+(define_insn "*thumb1_movdi_insn"
[(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
(match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
&& ( register_operand (operands[0], DImode)
|| register_operand (operands[1], DImode))"
(match_operand:SI 1 "general_operand" ""))]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ rtx base, offset, tmp;
+
+ if (TARGET_32BIT)
{
/* Everything except mem = const or mem = mem can be done easily. */
if (GET_CODE (operands[0]) == MEM)
{
arm_split_constant (SET, SImode, NULL_RTX,
INTVAL (operands[1]), operands[0], NULL_RTX,
- optimize && !no_new_pseudos);
+ optimize && can_create_pseudo_p ());
DONE;
}
}
- else /* TARGET_THUMB.... */
+ else /* TARGET_THUMB1... */
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[0]) != REG)
operands[1] = force_reg (SImode, operands[1]);
}
}
-
- if (flag_pic
- && (CONSTANT_P (operands[1])
- || symbol_mentioned_p (operands[1])
- || label_mentioned_p (operands[1])))
- operands[1] = legitimize_pic_address (operands[1], SImode,
- (no_new_pseudos ? operands[0] : 0));
+
+ if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
+ {
+ split_const (operands[1], &base, &offset);
+ if (GET_CODE (base) == SYMBOL_REF
+ && !offset_within_block_p (base, INTVAL (offset)))
+ {
+ tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
+ emit_move_insn (tmp, base);
+ emit_insn (gen_addsi3 (operands[0], tmp, offset));
+ DONE;
+ }
+ }
+
+ /* Recognize the case where operand[1] is a reference to thread-local
+ data and load its address to a register. */
+ if (arm_tls_referenced_p (operands[1]))
+ {
+ rtx tmp = operands[1];
+ rtx addend = NULL;
+
+ if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
+ {
+ addend = XEXP (XEXP (tmp, 0), 1);
+ tmp = XEXP (XEXP (tmp, 0), 0);
+ }
+
+ gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
+ gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
+
+ tmp = legitimize_tls_address (tmp,
+ !can_create_pseudo_p () ? operands[0] : 0);
+ if (addend)
+ {
+ tmp = gen_rtx_PLUS (SImode, tmp, addend);
+ tmp = force_operand (tmp, operands[0]);
+ }
+ operands[1] = tmp;
+ }
+ else if (flag_pic
+ && (CONSTANT_P (operands[1])
+ || symbol_mentioned_p (operands[1])
+ || label_mentioned_p (operands[1])))
+ operands[1] = legitimize_pic_address (operands[1], SImode,
+ (!can_create_pseudo_p ()
+ ? operands[0]
+ : 0));
"
)
(define_insn "*arm_movsi_insn"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r, m")
- (match_operand:SI 1 "general_operand" "rI,K,mi,r"))]
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m")
+ (match_operand:SI 1 "general_operand" "rI,K,N,mi,r"))]
"TARGET_ARM && ! TARGET_IWMMXT
&& !(TARGET_HARD_FLOAT && TARGET_VFP)
&& ( register_operand (operands[0], SImode)
"@
mov%?\\t%0, %1
mvn%?\\t%0, #%B1
+ movw%?\\t%0, %1
ldr%?\\t%0, %1
str%?\\t%1, %0"
- [(set_attr "type" "*,*,load1,store1")
+ [(set_attr "type" "*,*,*,load1,store1")
(set_attr "predicable" "yes")
- (set_attr "pool_range" "*,*,4096,*")
- (set_attr "neg_pool_range" "*,*,4084,*")]
+ (set_attr "pool_range" "*,*,*,4096,*")
+ (set_attr "neg_pool_range" "*,*,*,4084,*")]
)
(define_split
[(set (match_operand:SI 0 "arm_general_register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (!(const_ok_for_arm (INTVAL (operands[1]))
|| const_ok_for_arm (~INTVAL (operands[1]))))"
[(clobber (const_int 0))]
"
)
-(define_insn "*thumb_movsi_insn"
+(define_insn "*thumb1_movsi_insn"
[(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
(match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& ( register_operand (operands[0], SImode)
|| register_operand (operands[1], SImode))"
"@
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
- "TARGET_THUMB && CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'J')"
+ "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
[(set (match_dup 0) (match_dup 1))
(set (match_dup 0) (neg:SI (match_dup 0)))]
"operands[1] = GEN_INT (- INTVAL (operands[1]));"
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
- "TARGET_THUMB && CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'K')"
+ "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
[(set (match_dup 0) (match_dup 1))
(set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
"
(set (attr "neg_pool_range") (const_int 4084))]
)
-(define_insn "pic_load_addr_thumb"
+(define_insn "pic_load_addr_thumb1"
[(set (match_operand:SI 0 "s_register_operand" "=l")
(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
- "TARGET_THUMB && flag_pic"
+ "TARGET_THUMB1 && flag_pic"
"ldr\\t%0, %1"
[(set_attr "type" "load1")
(set (attr "pool_range") (const_int 1024))]
[(set (match_operand:SI 0 "s_register_operand" "")
(unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
"TARGET_ARM && flag_pic"
- "operands[2] = pic_offset_table_rtx;"
+ "operands[2] = cfun->machine->pic_reg;"
)
(define_insn "*pic_load_addr_based_insn"
(unspec:SI [(match_operand 1 "" "")
(match_operand 2 "s_register_operand" "r")]
UNSPEC_PIC_SYM))]
- "TARGET_EITHER && flag_pic && operands[2] == pic_offset_table_rtx"
+ "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg"
"*
#ifdef AOF_ASSEMBLER
operands[1] = aof_pic_entry (operands[1]);
)
(define_insn "pic_add_dot_plus_four"
- [(set (match_operand:SI 0 "register_operand" "+r")
- (unspec:SI [(plus:SI (match_dup 0)
- (const (plus:SI (pc) (const_int 4))))]
- UNSPEC_PIC_BASE))
- (use (label_ref (match_operand 1 "" "")))]
- "TARGET_THUMB && flag_pic"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
+ (const (plus:SI (pc) (const_int 4))))
+ (match_operand 2 "" "")]
+ UNSPEC_PIC_BASE))]
+ "TARGET_THUMB1"
"*
- (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
- CODE_LABEL_NUMBER (operands[1]));
+ (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
+ INTVAL (operands[2]));
return \"add\\t%0, %|pc\";
"
[(set_attr "length" "2")]
)
(define_insn "pic_add_dot_plus_eight"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
+ (const (plus:SI (pc) (const_int 8))))
+ (match_operand 2 "" "")]
+ UNSPEC_PIC_BASE))]
+ "TARGET_ARM"
+ "*
+ (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
+ INTVAL (operands[2]));
+ return \"add%?\\t%0, %|pc, %1\";
+ "
+ [(set_attr "predicable" "yes")]
+)
+
+(define_insn "tls_load_dot_plus_eight"
[(set (match_operand:SI 0 "register_operand" "+r")
- (unspec:SI [(plus:SI (match_dup 0)
- (const (plus:SI (pc) (const_int 8))))]
- UNSPEC_PIC_BASE))
- (use (label_ref (match_operand 1 "" "")))]
- "TARGET_ARM && flag_pic"
+ (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
+ (const (plus:SI (pc) (const_int 8))))
+ (match_operand 2 "" "")]
+ UNSPEC_PIC_BASE)))]
+ "TARGET_ARM"
"*
- (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
- CODE_LABEL_NUMBER (operands[1]));
- return \"add%?\\t%0, %|pc, %0\";
+ (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
+ INTVAL (operands[2]));
+ return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
"
[(set_attr "predicable" "yes")]
)
+;; PIC references to local variables can generate pic_add_dot_plus_eight
+;; followed by a load. These sequences can be crunched down to
+;; tls_load_dot_plus_eight by a peephole.
+
+(define_peephole2
+ [(parallel [(set (match_operand:SI 0 "register_operand" "")
+ (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
+ (const (plus:SI (pc) (const_int 8))))]
+ UNSPEC_PIC_BASE))
+ (use (label_ref (match_operand 1 "" "")))])
+ (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
+ "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
+ [(parallel [(set (match_dup 2)
+ (mem:SI (unspec:SI [(plus:SI (match_dup 3)
+ (const (plus:SI (pc) (const_int 8))))]
+ UNSPEC_PIC_BASE)))
+ (use (label_ref (match_dup 1)))])]
+ ""
+)
+
+(define_insn "pic_offset_arm"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
+ (unspec:SI [(match_operand:SI 2 "" "X")]
+ UNSPEC_PIC_OFFSET))))]
+ "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
+ "ldr%?\\t%0, [%1,%2]"
+ [(set_attr "type" "load1")]
+)
+
(define_expand "builtin_setjmp_receiver"
[(label_ref (match_operand 0 "" ""))]
"flag_pic"
{
/* r3 is clobbered by set/longjmp, so we can use it as a scratch
register. */
- arm_load_pic_register (3);
+ if (arm_pic_register != INVALID_REGNUM)
+ arm_load_pic_register (1UL << 3);
DONE;
}")
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(match_dup 1))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
cmp%?\\t%0, #0
- sub%?s\\t%0, %1, #0"
+ sub%.\\t%0, %1, #0"
[(set_attr "conds" "set")]
)
(define_expand "storehi_single_op"
[(set (match_operand:HI 0 "memory_operand" "")
(match_operand:HI 1 "general_operand" ""))]
- "TARGET_ARM && arm_arch4"
+ "TARGET_32BIT && arm_arch4"
"
if (!s_register_operand (operands[1], HImode))
operands[1] = copy_to_mode_reg (HImode, operands[1]);
"
if (TARGET_ARM)
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[0]) == MEM)
{
emit_insn (gen_movsi (reg, GEN_INT (val)));
operands[1] = gen_lowpart (HImode, reg);
}
- else if (arm_arch4 && optimize && !no_new_pseudos
+ else if (arm_arch4 && optimize && can_create_pseudo_p ()
&& GET_CODE (operands[1]) == MEM)
{
rtx reg = gen_reg_rtx (SImode);
DONE;
}
}
- else /* TARGET_THUMB */
+ else if (TARGET_THUMB2)
+ {
+ /* Thumb-2 can do everything except mem=mem and mem=const easily. */
+ if (can_create_pseudo_p ())
+ {
+ if (GET_CODE (operands[0]) != REG)
+ operands[1] = force_reg (HImode, operands[1]);
+ /* Zero extend a constant, and keep it in an SImode reg. */
+ else if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ rtx reg = gen_reg_rtx (SImode);
+ HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
+
+ emit_insn (gen_movsi (reg, GEN_INT (val)));
+ operands[1] = gen_lowpart (HImode, reg);
+ }
+ }
+ }
+ else /* TARGET_THUMB1 */
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[1]) == CONST_INT)
{
operands[1] = force_reg (HImode, operands[1]);
}
else if (GET_CODE (operands[1]) == CONST_INT
- && !CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'I'))
+ && !satisfies_constraint_I (operands[1]))
{
/* Handle loading a large integer during reload. */
"
)
-(define_insn "*thumb_movhi_insn"
+(define_insn "*thumb1_movhi_insn"
[(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
(match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& ( register_operand (operands[0], HImode)
|| register_operand (operands[1], HImode))"
"*
"@
mov%?\\t%0, %1\\t%@ movhi
mvn%?\\t%0, #%B1\\t%@ movhi
- str%?h\\t%1, %0\\t%@ movhi
- ldr%?h\\t%0, %1\\t%@ movhi"
+ str%(h%)\\t%1, %0\\t%@ movhi
+ ldr%(h%)\\t%0, %1\\t%@ movhi"
[(set_attr "type" "*,*,store1,load1")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,*,*,256")
[(set (match_operand:HI 0 "memory_operand" "")
(match_operand:HI 1 "register_operand" ""))
(clobber (match_operand:DI 2 "register_operand" ""))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"
if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
&& REGNO (operands[1]) <= LAST_LO_REGNUM)
"
/* Everything except mem = const or mem = mem can be done easily */
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[1]) == CONST_INT)
{
}
else if (TARGET_THUMB
&& GET_CODE (operands[1]) == CONST_INT
- && !CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I'))
+ && !satisfies_constraint_I (operands[1]))
{
/* Handle loading a large integer during reload. */
(define_insn "*arm_movqi_insn"
[(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
(match_operand:QI 1 "general_operand" "rI,K,m,r"))]
- "TARGET_ARM
+ "TARGET_32BIT
&& ( register_operand (operands[0], QImode)
|| register_operand (operands[1], QImode))"
"@
mov%?\\t%0, %1
mvn%?\\t%0, #%B1
- ldr%?b\\t%0, %1
- str%?b\\t%1, %0"
+ ldr%(b%)\\t%0, %1
+ str%(b%)\\t%1, %0"
[(set_attr "type" "*,*,load1,store1")
(set_attr "predicable" "yes")]
)
-(define_insn "*thumb_movqi_insn"
+(define_insn "*thumb1_movqi_insn"
[(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
(match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& ( register_operand (operands[0], QImode)
|| register_operand (operands[1], QImode))"
"@
(match_operand:SF 1 "general_operand" ""))]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
if (GET_CODE (operands[0]) == MEM)
operands[1] = force_reg (SFmode, operands[1]);
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[0]) != REG)
operands[1] = force_reg (SFmode, operands[1]);
"
)
+;; Transform a floating-point move of a constant into a core register into
+;; an SImode operation.
(define_split
- [(set (match_operand:SF 0 "nonimmediate_operand" "")
+ [(set (match_operand:SF 0 "arm_general_register_operand" "")
(match_operand:SF 1 "immediate_operand" ""))]
- "TARGET_ARM
- && !(TARGET_HARD_FLOAT && TARGET_FPA)
+ "TARGET_32BIT
&& reload_completed
&& GET_CODE (operands[1]) == CONST_DOUBLE"
[(set (match_dup 2) (match_dup 3))]
)
;;; ??? This should have alternatives for constants.
-(define_insn "*thumb_movsf_insn"
+(define_insn "*thumb1_movsf_insn"
[(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
(match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& ( register_operand (operands[0], SFmode)
|| register_operand (operands[1], SFmode))"
"@
(match_operand:DF 1 "general_operand" ""))]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
if (GET_CODE (operands[0]) == MEM)
operands[1] = force_reg (DFmode, operands[1]);
}
else /* TARGET_THUMB */
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
{
if (GET_CODE (operands[0]) != REG)
operands[1] = force_reg (DFmode, operands[1]);
[(match_operand:DF 0 "arm_reload_memory_operand" "=o")
(match_operand:DF 1 "s_register_operand" "r")
(match_operand:SI 2 "s_register_operand" "=&r")]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
{
enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
[(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
(match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
"TARGET_ARM && TARGET_SOFT_FLOAT
- "
+ && ( register_operand (operands[0], DFmode)
+ || register_operand (operands[1], DFmode))"
"*
switch (which_alternative)
{
(define_insn "*thumb_movdf_insn"
[(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
(match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& ( register_operand (operands[0], DFmode)
|| register_operand (operands[1], DFmode))"
"*
(define_expand "movxf"
[(set (match_operand:XF 0 "general_operand" "")
(match_operand:XF 1 "general_operand" ""))]
- "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
"
if (GET_CODE (operands[0]) == MEM)
operands[1] = force_reg (XFmode, operands[1]);
"
)
-;; Vector Moves
-(define_expand "movv2si"
- [(set (match_operand:V2SI 0 "nonimmediate_operand" "")
- (match_operand:V2SI 1 "general_operand" ""))]
- "TARGET_REALLY_IWMMXT"
-{
-})
-
-(define_expand "movv4hi"
- [(set (match_operand:V4HI 0 "nonimmediate_operand" "")
- (match_operand:V4HI 1 "general_operand" ""))]
- "TARGET_REALLY_IWMMXT"
-{
-})
-
-(define_expand "movv8qi"
- [(set (match_operand:V8QI 0 "nonimmediate_operand" "")
- (match_operand:V8QI 1 "general_operand" ""))]
- "TARGET_REALLY_IWMMXT"
-{
-})
\f
;; load- and store-multiple insns
[(match_par_dup 3 [(set (match_operand:SI 0 "" "")
(match_operand:SI 1 "" ""))
(use (match_operand:SI 2 "" ""))])]
- "TARGET_ARM"
+ "TARGET_32BIT"
{
HOST_WIDE_INT offset = 0;
(mem:SI (plus:SI (match_dup 2) (const_int 8))))
(set (match_operand:SI 6 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 5"
- "ldm%?ia\\t%1!, {%3, %4, %5, %6}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
+ "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")]
)
-(define_insn "*ldmsi_postinc4_thumb"
+(define_insn "*ldmsi_postinc4_thumb1"
[(match_parallel 0 "load_multiple_operation"
[(set (match_operand:SI 1 "s_register_operand" "=l")
(plus:SI (match_operand:SI 2 "s_register_operand" "1")
(mem:SI (plus:SI (match_dup 2) (const_int 8))))
(set (match_operand:SI 6 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
- "TARGET_THUMB && XVECLEN (operands[0], 0) == 5"
+ "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
"ldmia\\t%1!, {%3, %4, %5, %6}"
[(set_attr "type" "load4")]
)
(mem:SI (plus:SI (match_dup 2) (const_int 4))))
(set (match_operand:SI 5 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
- "ldm%?ia\\t%1!, {%3, %4, %5}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
+ "ldm%(ia%)\\t%1!, {%3, %4, %5}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")]
)
(mem:SI (match_dup 2)))
(set (match_operand:SI 4 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
- "ldm%?ia\\t%1!, {%3, %4}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
+ "ldm%(ia%)\\t%1!, {%3, %4}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")]
)
(mem:SI (plus:SI (match_dup 1) (const_int 8))))
(set (match_operand:SI 5 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
- "ldm%?ia\\t%1, {%2, %3, %4, %5}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
+ "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")]
)
(mem:SI (plus:SI (match_dup 1) (const_int 4))))
(set (match_operand:SI 4 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
- "ldm%?ia\\t%1, {%2, %3, %4}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
+ "ldm%(ia%)\\t%1, {%2, %3, %4}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")]
)
(mem:SI (match_operand:SI 1 "s_register_operand" "r")))
(set (match_operand:SI 3 "arm_hard_register_operand" "")
(mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 2"
- "ldm%?ia\\t%1, {%2, %3}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
+ "ldm%(ia%)\\t%1, {%2, %3}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")]
)
[(match_par_dup 3 [(set (match_operand:SI 0 "" "")
(match_operand:SI 1 "" ""))
(use (match_operand:SI 2 "" ""))])]
- "TARGET_ARM"
+ "TARGET_32BIT"
{
HOST_WIDE_INT offset = 0;
(match_operand:SI 5 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
(match_operand:SI 6 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 5"
- "stm%?ia\\t%1!, {%3, %4, %5, %6}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
+ "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
[(set_attr "predicable" "yes")
(set_attr "type" "store4")]
)
-(define_insn "*stmsi_postinc4_thumb"
+(define_insn "*stmsi_postinc4_thumb1"
[(match_parallel 0 "store_multiple_operation"
[(set (match_operand:SI 1 "s_register_operand" "=l")
(plus:SI (match_operand:SI 2 "s_register_operand" "1")
(match_operand:SI 5 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
(match_operand:SI 6 "arm_hard_register_operand" ""))])]
- "TARGET_THUMB && XVECLEN (operands[0], 0) == 5"
+ "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
"stmia\\t%1!, {%3, %4, %5, %6}"
[(set_attr "type" "store4")]
)
(match_operand:SI 4 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
(match_operand:SI 5 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
- "stm%?ia\\t%1!, {%3, %4, %5}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
+ "stm%(ia%)\\t%1!, {%3, %4, %5}"
[(set_attr "predicable" "yes")
(set_attr "type" "store3")]
)
(match_operand:SI 3 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
(match_operand:SI 4 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
- "stm%?ia\\t%1!, {%3, %4}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
+ "stm%(ia%)\\t%1!, {%3, %4}"
[(set_attr "predicable" "yes")
(set_attr "type" "store2")]
)
(match_operand:SI 4 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
(match_operand:SI 5 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
- "stm%?ia\\t%1, {%2, %3, %4, %5}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
+ "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
[(set_attr "predicable" "yes")
(set_attr "type" "store4")]
)
(match_operand:SI 3 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
(match_operand:SI 4 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
- "stm%?ia\\t%1, {%2, %3, %4}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
+ "stm%(ia%)\\t%1, {%2, %3, %4}"
[(set_attr "predicable" "yes")
(set_attr "type" "store3")]
)
(match_operand:SI 2 "arm_hard_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
(match_operand:SI 3 "arm_hard_register_operand" ""))])]
- "TARGET_ARM && XVECLEN (operands[0], 0) == 2"
- "stm%?ia\\t%1, {%2, %3}"
+ "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
+ "stm%(ia%)\\t%1, {%2, %3}"
[(set_attr "predicable" "yes")
(set_attr "type" "store2")]
)
(match_operand:SI 3 "const_int_operand" "")]
"TARGET_EITHER"
"
- if (TARGET_ARM)
+ if (TARGET_32BIT)
{
if (arm_gen_movmemqi (operands))
DONE;
FAIL;
}
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
{
if ( INTVAL (operands[3]) != 4
|| INTVAL (operands[2]) > 48)
(clobber (match_scratch:SI 4 "=&l"))
(clobber (match_scratch:SI 5 "=&l"))
(clobber (match_scratch:SI 6 "=&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"* return thumb_output_move_mem_multiple (3, operands);"
[(set_attr "length" "4")
; This isn't entirely accurate... It loads as well, but in terms of
(plus:SI (match_dup 3) (const_int 8)))
(clobber (match_scratch:SI 4 "=&l"))
(clobber (match_scratch:SI 5 "=&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"* return thumb_output_move_mem_multiple (2, operands);"
[(set_attr "length" "4")
; This isn't entirely accurate... It loads as well, but in terms of
(match_operand:SI 2 "nonmemory_operand" "")])
(label_ref (match_operand 3 "" ""))
(pc)))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"
- if (thumb_cmpneg_operand (operands[2], SImode))
+ if (thumb1_cmpneg_operand (operands[2], SImode))
{
emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
operands[3], operands[0]));
DONE;
}
- if (!thumb_cmp_operand (operands[2], SImode))
+ if (!thumb1_cmp_operand (operands[2], SImode))
operands[2] = force_reg (SImode, operands[2]);
")
[(set (pc) (if_then_else
(match_operator 0 "arm_comparison_operator"
[(match_operand:SI 1 "s_register_operand" "l,*h")
- (match_operand:SI 2 "thumb_cmp_operand" "lI*h,*r")])
+ (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
(label_ref (match_operand 3 "" ""))
(pc)))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
output_asm_insn (\"cmp\\t%1, %2\", operands);
[(set (pc) (if_then_else
(match_operator 4 "arm_comparison_operator"
[(match_operand:SI 1 "s_register_operand" "l,0")
- (match_operand:SI 2 "thumb_cmpneg_operand" "L,J")])
+ (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (match_scratch:SI 0 "=l,l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
(pc)))
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
(match_dup 1))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*{
if (which_alternative == 0)
output_asm_insn (\"cmp\t%0, #0\", operands);
(neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
(label_ref (match_operand 3 "" ""))
(pc)))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
output_asm_insn (\"cmn\\t%1, %2\", operands);
switch (get_attr_length (insn))
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (match_scratch:SI 4 "=l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
rtx op[3];
(const_int 8))))]
)
+(define_insn "*tlobits_cbranch"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "equality_operator"
+ [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
+ (match_operand:SI 2 "const_int_operand" "i")
+ (const_int 0))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))
+ (clobber (match_scratch:SI 4 "=l"))]
+ "TARGET_THUMB1"
+ "*
+ {
+ rtx op[3];
+ op[0] = operands[4];
+ op[1] = operands[1];
+ op[2] = GEN_INT (32 - INTVAL (operands[2]));
+
+ output_asm_insn (\"lsl\\t%0, %1, %2\", op);
+ switch (get_attr_length (insn))
+ {
+ case 4: return \"b%d0\\t%l3\";
+ case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
+ default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
+ }
+ }"
+ [(set (attr "far_jump")
+ (if_then_else
+ (eq_attr "length" "8")
+ (const_string "yes")
+ (const_string "no")))
+ (set (attr "length")
+ (if_then_else
+ (and (ge (minus (match_dup 3) (pc)) (const_int -250))
+ (le (minus (match_dup 3) (pc)) (const_int 256)))
+ (const_int 4)
+ (if_then_else
+ (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
+ (le (minus (match_dup 3) (pc)) (const_int 2048)))
+ (const_int 6)
+ (const_int 8))))]
+)
+
(define_insn "*tstsi3_cbranch"
[(set (pc)
(if_then_else
(const_int 0)])
(label_ref (match_operand 2 "" ""))
(pc)))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
output_asm_insn (\"tst\\t%0, %1\", operands);
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
(and:SI (match_dup 2) (match_dup 3)))
(clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
if (which_alternative == 0)
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
output_asm_insn (\"orr\\t%0, %2\", operands);
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
(ior:SI (match_dup 2) (match_dup 3)))
(clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
if (which_alternative == 0)
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
output_asm_insn (\"eor\\t%0, %2\", operands);
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
(xor:SI (match_dup 2) (match_dup 3)))
(clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
if (which_alternative == 0)
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
output_asm_insn (\"bic\\t%0, %2\", operands);
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
(and:SI (not:SI (match_dup 3)) (match_dup 2)))
(clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
if (which_alternative == 0)
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
(plus:SI (match_dup 2) (const_int -1)))
(clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
{
rtx cond[2];
(match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
(plus:SI (match_dup 2) (match_dup 3)))
(clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& (GET_CODE (operands[4]) == EQ
|| GET_CODE (operands[4]) == NE
|| GET_CODE (operands[4]) == GE
(label_ref (match_operand 4 "" ""))
(pc)))
(clobber (match_scratch:SI 0 "=X,X,l,l"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& (GET_CODE (operands[3]) == EQ
|| GET_CODE (operands[3]) == NE
|| GET_CODE (operands[3]) == GE
(set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
(minus:SI (match_dup 2) (match_dup 3)))
(clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& (GET_CODE (operands[4]) == EQ
|| GET_CODE (operands[4]) == NE
|| GET_CODE (operands[4]) == GE
(const_int 0)])
(label_ref (match_operand 3 "" ""))
(pc)))]
- "TARGET_THUMB
+ "TARGET_THUMB1
&& (GET_CODE (operands[0]) == EQ
|| GET_CODE (operands[0]) == NE
|| GET_CODE (operands[0]) == GE
(define_expand "cmpsi"
[(match_operand:SI 0 "s_register_operand" "")
(match_operand:SI 1 "arm_add_operand" "")]
- "TARGET_ARM"
+ "TARGET_32BIT"
"{
arm_compare_op0 = operands[0];
arm_compare_op1 = operands[1];
(define_expand "cmpsf"
[(match_operand:SF 0 "s_register_operand" "")
(match_operand:SF 1 "arm_float_compare_operand" "")]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
arm_compare_op0 = operands[0];
arm_compare_op1 = operands[1];
(define_expand "cmpdf"
[(match_operand:DF 0 "s_register_operand" "")
(match_operand:DF 1 "arm_float_compare_operand" "")]
- "TARGET_ARM && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
arm_compare_op0 = operands[0];
arm_compare_op1 = operands[1];
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
(match_operand:SI 1 "arm_add_operand" "rI,L")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"@
cmp%?\\t%0, %1
cmn%?\\t%0, #%n1"
[(set_attr "conds" "set")]
)
-(define_insn "*cmpsi_shiftsi"
+(define_insn "*arm_cmpsi_shiftsi"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:SI 0 "s_register_operand" "r")
(match_operator:SI 3 "shift_operator"
(const_string "alu_shift_reg")))]
)
-(define_insn "*cmpsi_shiftsi_swp"
+(define_insn "*arm_cmpsi_shiftsi_swp"
[(set (reg:CC_SWP CC_REGNUM)
(compare:CC_SWP (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r")
(const_string "alu_shift_reg")))]
)
-(define_insn "*cmpsi_negshiftsi_si"
+(define_insn "*arm_cmpsi_negshiftsi_si"
[(set (reg:CC_Z CC_REGNUM)
(compare:CC_Z
(neg:SI (match_operator:SI 1 "shift_operator"
(define_insn "*deleted_compare"
[(set (match_operand 0 "cc_register" "") (match_dup 0))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"\\t%@ deleted compare"
[(set_attr "conds" "set")
(set_attr "length" "0")]
(if_then_else (eq (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (ne (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (gt (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (le (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (ge (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (lt (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (gtu (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (leu (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (geu (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (ltu (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (unordered (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
arm_compare_op1);"
)
(if_then_else (ordered (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
arm_compare_op1);"
)
(if_then_else (ungt (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (unlt (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (unge (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (unle (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (uneq (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (ltgt (match_dup 1) (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
)
(if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"*
gcc_assert (!arm_ccfsm_state);
(if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"*
gcc_assert (!arm_ccfsm_state);
[(match_operand 2 "cc_register" "") (const_int 0)])
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
{
[(match_operand 2 "cc_register" "") (const_int 0)])
(pc)
(label_ref (match_operand 0 "" ""))))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
{
(define_expand "seq"
[(set (match_operand:SI 0 "s_register_operand" "")
(eq:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sne"
[(set (match_operand:SI 0 "s_register_operand" "")
(ne:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sgt"
[(set (match_operand:SI 0 "s_register_operand" "")
(gt:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sle"
[(set (match_operand:SI 0 "s_register_operand" "")
(le:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sge"
[(set (match_operand:SI 0 "s_register_operand" "")
(ge:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
)
(define_expand "slt"
[(set (match_operand:SI 0 "s_register_operand" "")
(lt:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sgtu"
[(set (match_operand:SI 0 "s_register_operand" "")
(gtu:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sleu"
[(set (match_operand:SI 0 "s_register_operand" "")
(leu:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sgeu"
[(set (match_operand:SI 0 "s_register_operand" "")
(geu:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sltu"
[(set (match_operand:SI 0 "s_register_operand" "")
(ltu:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
)
(define_expand "sunordered"
[(set (match_operand:SI 0 "s_register_operand" "")
(unordered:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
arm_compare_op1);"
)
(define_expand "sordered"
[(set (match_operand:SI 0 "s_register_operand" "")
(ordered:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
arm_compare_op1);"
)
(define_expand "sungt"
[(set (match_operand:SI 0 "s_register_operand" "")
(ungt:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
arm_compare_op1);"
)
(define_expand "sunge"
[(set (match_operand:SI 0 "s_register_operand" "")
(unge:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
arm_compare_op1);"
)
(define_expand "sunlt"
[(set (match_operand:SI 0 "s_register_operand" "")
(unlt:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
arm_compare_op1);"
)
(define_expand "sunle"
[(set (match_operand:SI 0 "s_register_operand" "")
(unle:SI (match_dup 1) (const_int 0)))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
arm_compare_op1);"
)
; (define_expand "suneq"
; [(set (match_operand:SI 0 "s_register_operand" "")
; (uneq:SI (match_dup 1) (const_int 0)))]
-; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
; "gcc_unreachable ();"
; )
;
; (define_expand "sltgt"
; [(set (match_operand:SI 0 "s_register_operand" "")
; (ltgt:SI (match_dup 1) (const_int 0)))]
-; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
; "gcc_unreachable ();"
; )
(set_attr "length" "8")]
)
+(define_expand "cstoresi4"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (match_operator:SI 1 "arm_comparison_operator"
+ [(match_operand:SI 2 "s_register_operand" "")
+ (match_operand:SI 3 "reg_or_int_operand" "")]))]
+ "TARGET_THUMB1"
+ "{
+ rtx op3, scratch, scratch2;
+
+ if (operands[3] == const0_rtx)
+ {
+ switch (GET_CODE (operands[1]))
+ {
+ case EQ:
+ emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
+ break;
+
+ case NE:
+ emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
+ break;
+
+ case LE:
+ scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
+ NULL_RTX, 0, OPTAB_WIDEN);
+ scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
+ NULL_RTX, 0, OPTAB_WIDEN);
+ expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
+ operands[0], 1, OPTAB_WIDEN);
+ break;
+
+ case GE:
+ scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
+ NULL_RTX, 1);
+ expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
+ NULL_RTX, 1, OPTAB_WIDEN);
+ break;
+
+ case GT:
+ scratch = expand_binop (SImode, ashr_optab, operands[2],
+ GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
+ scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
+ NULL_RTX, 0, OPTAB_WIDEN);
+ expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
+ 0, OPTAB_WIDEN);
+ break;
+
+ /* LT is handled by generic code. No need for unsigned with 0. */
+ default:
+ FAIL;
+ }
+ DONE;
+ }
+
+ switch (GET_CODE (operands[1]))
+ {
+ case EQ:
+ scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
+ NULL_RTX, 0, OPTAB_WIDEN);
+ emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
+ break;
+
+ case NE:
+ scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
+ NULL_RTX, 0, OPTAB_WIDEN);
+ emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
+ break;
+
+ case LE:
+ op3 = force_reg (SImode, operands[3]);
+
+ scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
+ NULL_RTX, 1, OPTAB_WIDEN);
+ scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
+ NULL_RTX, 0, OPTAB_WIDEN);
+ emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
+ op3, operands[2]));
+ break;
+
+ case GE:
+ op3 = operands[3];
+ if (!thumb1_cmp_operand (op3, SImode))
+ op3 = force_reg (SImode, op3);
+ scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
+ NULL_RTX, 0, OPTAB_WIDEN);
+ scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
+ NULL_RTX, 1, OPTAB_WIDEN);
+ emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
+ operands[2], op3));
+ break;
+
+ case LEU:
+ op3 = force_reg (SImode, operands[3]);
+ scratch = force_reg (SImode, const0_rtx);
+ emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
+ op3, operands[2]));
+ break;
+
+ case GEU:
+ op3 = operands[3];
+ if (!thumb1_cmp_operand (op3, SImode))
+ op3 = force_reg (SImode, op3);
+ scratch = force_reg (SImode, const0_rtx);
+ emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
+ operands[2], op3));
+ break;
+
+ case LTU:
+ op3 = operands[3];
+ if (!thumb1_cmp_operand (op3, SImode))
+ op3 = force_reg (SImode, op3);
+ scratch = gen_reg_rtx (SImode);
+ emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
+ emit_insn (gen_negsi2 (operands[0], scratch));
+ break;
+
+ case GTU:
+ op3 = force_reg (SImode, operands[3]);
+ scratch = gen_reg_rtx (SImode);
+ emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
+ emit_insn (gen_negsi2 (operands[0], scratch));
+ break;
+
+ /* No good sequences for GT, LT. */
+ default:
+ FAIL;
+ }
+ DONE;
+}")
+
+(define_expand "cstoresi_eq0_thumb1"
+ [(parallel
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (eq:SI (match_operand:SI 1 "s_register_operand" "")
+ (const_int 0)))
+ (clobber (match_dup:SI 2))])]
+ "TARGET_THUMB1"
+ "operands[2] = gen_reg_rtx (SImode);"
+)
+
+(define_expand "cstoresi_ne0_thumb1"
+ [(parallel
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (ne:SI (match_operand:SI 1 "s_register_operand" "")
+ (const_int 0)))
+ (clobber (match_dup:SI 2))])]
+ "TARGET_THUMB1"
+ "operands[2] = gen_reg_rtx (SImode);"
+)
+
+(define_insn "*cstoresi_eq0_thumb1_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
+ (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
+ (const_int 0)))
+ (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
+ "TARGET_THUMB1"
+ "@
+ neg\\t%0, %1\;adc\\t%0, %0, %1
+ neg\\t%2, %1\;adc\\t%0, %1, %2"
+ [(set_attr "length" "4")]
+)
+
+(define_insn "*cstoresi_ne0_thumb1_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=l")
+ (ne:SI (match_operand:SI 1 "s_register_operand" "0")
+ (const_int 0)))
+ (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
+ "TARGET_THUMB1"
+ "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
+ [(set_attr "length" "4")]
+)
+
+(define_insn "cstoresi_nltu_thumb1"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l")
+ (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
+ (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
+ "TARGET_THUMB1"
+ "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
+ [(set_attr "length" "4")]
+)
+
+;; Used as part of the expansion of thumb les sequence.
+(define_insn "thumb1_addsi3_addgeu"
+ [(set (match_operand:SI 0 "s_register_operand" "=l")
+ (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
+ (match_operand:SI 2 "s_register_operand" "l"))
+ (geu:SI (match_operand:SI 3 "s_register_operand" "l")
+ (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
+ "TARGET_THUMB1"
+ "cmp\\t%3, %4\;adc\\t%0, %1, %2"
+ [(set_attr "length" "4")]
+)
+
\f
;; Conditional move insns
(if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
(match_operand:SI 2 "arm_not_operand" "")
(match_operand:SI 3 "arm_not_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
{
enum rtx_code code = GET_CODE (operands[1]);
(if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
(match_operand:SF 2 "s_register_operand" "")
(match_operand:SF 3 "nonmemory_operand" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"
{
enum rtx_code code = GET_CODE (operands[1]);
(if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
(match_operand:DF 2 "s_register_operand" "")
(match_operand:DF 3 "arm_float_add_operand" "")))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
"
{
enum rtx_code code = GET_CODE (operands[1]);
(define_insn "*arm_jump"
[(set (pc)
(label_ref (match_operand 0 "" "")))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
{
if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
(define_insn "*thumb_jump"
[(set (pc)
(label_ref (match_operand 0 "" "")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
if (get_attr_length (insn) == 2)
return \"b\\t%l0\";
"TARGET_EITHER"
"
{
- rtx callee;
+ rtx callee, pat;
/* In an untyped call, we can get NULL for operand 2. */
if (operands[2] == NULL_RTX)
operands[2] = const0_rtx;
- /* This is to decide if we should generate indirect calls by loading the
- 32 bit address of the callee into a register before performing the
- branch and link. operand[2] encodes the long_call/short_call
- attribute of the function being called. This attribute is set whenever
- __attribute__((long_call/short_call)) or #pragma long_call/no_long_call
- is used, and the short_call attribute can also be set if function is
- declared as static or if it has already been defined in the current
- compilation unit. See arm.c and arm.h for info about this. The third
- parameter to arm_is_longcall_p is used to tell it which pattern
- invoked it. */
- callee = XEXP (operands[0], 0);
-
- if ((GET_CODE (callee) == SYMBOL_REF
- && arm_is_longcall_p (operands[0], INTVAL (operands[2]), 0))
- || (GET_CODE (callee) != SYMBOL_REF
- && GET_CODE (callee) != REG))
+ /* Decide if we should generate indirect calls by loading the
+ 32-bit address of the callee into a register before performing the
+ branch and link. */
+ callee = XEXP (operands[0], 0);
+ if (GET_CODE (callee) == SYMBOL_REF
+ ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
+ : !REG_P (callee))
XEXP (operands[0], 0) = force_reg (Pmode, callee);
+
+ pat = gen_call_internal (operands[0], operands[1], operands[2]);
+ arm_emit_call_insn (pat, XEXP (operands[0], 0));
+ DONE;
}"
)
+(define_expand "call_internal"
+ [(parallel [(call (match_operand 0 "memory_operand" "")
+ (match_operand 1 "general_operand" ""))
+ (use (match_operand 2 "" ""))
+ (clobber (reg:SI LR_REGNUM))])])
+
(define_insn "*call_reg_armv5"
[(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
(match_operand 1 "" ""))
)
(define_insn "*call_mem"
- [(call (mem:SI (match_operand:SI 0 "memory_operand" "m"))
+ [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
(set_attr "type" "call")]
)
-(define_insn "*call_reg_thumb_v5"
+(define_insn "*call_reg_thumb1_v5"
[(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_THUMB && arm_arch5"
+ "TARGET_THUMB1 && arm_arch5"
"blx\\t%0"
[(set_attr "length" "2")
(set_attr "type" "call")]
)
-(define_insn "*call_reg_thumb"
+(define_insn "*call_reg_thumb1"
[(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_THUMB && !arm_arch5"
+ "TARGET_THUMB1 && !arm_arch5"
"*
{
if (!TARGET_CALLER_INTERWORKING)
"TARGET_EITHER"
"
{
- rtx callee = XEXP (operands[1], 0);
+ rtx pat, callee;
/* In an untyped call, we can get NULL for operand 2. */
if (operands[3] == 0)
operands[3] = const0_rtx;
- /* See the comment in define_expand \"call\". */
- if ((GET_CODE (callee) == SYMBOL_REF
- && arm_is_longcall_p (operands[1], INTVAL (operands[3]), 0))
- || (GET_CODE (callee) != SYMBOL_REF
- && GET_CODE (callee) != REG))
+ /* Decide if we should generate indirect calls by loading the
+ 32-bit address of the callee into a register before performing the
+ branch and link. */
+ callee = XEXP (operands[1], 0);
+ if (GET_CODE (callee) == SYMBOL_REF
+ ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
+ : !REG_P (callee))
XEXP (operands[1], 0) = force_reg (Pmode, callee);
+
+ pat = gen_call_value_internal (operands[0], operands[1],
+ operands[2], operands[3]);
+ arm_emit_call_insn (pat, XEXP (operands[1], 0));
+ DONE;
}"
)
+(define_expand "call_value_internal"
+ [(parallel [(set (match_operand 0 "" "")
+ (call (match_operand 1 "memory_operand" "")
+ (match_operand 2 "general_operand" "")))
+ (use (match_operand 3 "" ""))
+ (clobber (reg:SI LR_REGNUM))])])
+
(define_insn "*call_value_reg_armv5"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
(define_insn "*call_value_mem"
[(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:SI 1 "memory_operand" "m"))
+ (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
(set_attr "type" "call")]
)
-(define_insn "*call_value_reg_thumb_v5"
+(define_insn "*call_value_reg_thumb1_v5"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_THUMB && arm_arch5"
+ "TARGET_THUMB1 && arm_arch5"
"blx\\t%1"
[(set_attr "length" "2")
(set_attr "type" "call")]
)
-(define_insn "*call_value_reg_thumb"
+(define_insn "*call_value_reg_thumb1"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_THUMB && !arm_arch5"
+ "TARGET_THUMB1 && !arm_arch5"
"*
{
if (!TARGET_CALLER_INTERWORKING)
(clobber (reg:SI LR_REGNUM))]
"TARGET_ARM
&& (GET_CODE (operands[0]) == SYMBOL_REF)
- && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
+ && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
"*
{
return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
(clobber (reg:SI LR_REGNUM))]
"TARGET_ARM
&& (GET_CODE (operands[1]) == SYMBOL_REF)
- && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
+ && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
"*
{
return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
(clobber (reg:SI LR_REGNUM))]
"TARGET_THUMB
&& GET_CODE (operands[0]) == SYMBOL_REF
- && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
+ && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
"bl\\t%a0"
[(set_attr "length" "4")
(set_attr "type" "call")]
(clobber (reg:SI LR_REGNUM))]
"TARGET_THUMB
&& GET_CODE (operands[1]) == SYMBOL_REF
- && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
+ && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
"bl\\t%a1"
[(set_attr "length" "4")
(set_attr "type" "call")]
(match_operand:SI 2 "const_int_operand" "") ; total range
(match_operand:SI 3 "" "") ; table label
(match_operand:SI 4 "" "")] ; Out of range label
- "TARGET_ARM"
+ "TARGET_32BIT"
"
{
rtx reg;
if (!const_ok_for_arm (INTVAL (operands[2])))
operands[2] = force_reg (SImode, operands[2]);
- emit_jump_insn (gen_casesi_internal (operands[0], operands[2], operands[3],
- operands[4]));
+ if (TARGET_ARM)
+ {
+ emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
+ operands[3], operands[4]));
+ }
+ else if (flag_pic)
+ {
+ emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
+ operands[2], operands[3], operands[4]));
+ }
+ else
+ {
+ emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
+ operands[3], operands[4]));
+ }
DONE;
}"
)
;; The USE in this pattern is needed to tell flow analysis that this is
;; a CASESI insn. It has no other purpose.
-(define_insn "casesi_internal"
+(define_insn "arm_casesi_internal"
[(parallel [(set (pc)
(if_then_else
(leu (match_operand:SI 0 "s_register_operand" "r")
[(set (pc)
(match_operand:SI 0 "s_register_operand" ""))]
"TARGET_EITHER"
- ""
+ "
+ /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
+ address and use bx. */
+ if (TARGET_THUMB2)
+ {
+ rtx tmp;
+ tmp = gen_reg_rtx (SImode);
+ emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
+ operands[0] = tmp;
+ }
+ "
)
;; NB Never uses BX.
)
;; NB Never uses BX.
-(define_insn "*thumb_indirect_jump"
+(define_insn "*thumb1_indirect_jump"
[(set (pc)
(match_operand:SI 0 "register_operand" "l*r"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"mov\\tpc, %0"
[(set_attr "conds" "clob")
(set_attr "length" "2")]
[(const_int 0)]
"TARGET_EITHER"
"*
+ if (TARGET_UNIFIED_ASM)
+ return \"nop\";
if (TARGET_ARM)
return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
return \"mov\\tr8, r8\";
(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
(match_dup 2)]))]
"TARGET_ARM"
- "%i1%?s\\t%0, %2, %4%S3"
+ "%i1%.\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
(set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_ARM"
- "%i1%?s\\t%0, %2, %4%S3"
+ "%i1%.\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
(set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
(minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
(match_dup 4)])))]
"TARGET_ARM"
- "sub%?s\\t%0, %1, %3%S2"
+ "sub%.\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
(set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_ARM"
- "sub%?s\\t%0, %1, %3%S2"
+ "sub%.\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
(set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
(set_attr "length" "8,12")]
)
+;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
(define_insn "*cmp_ite0"
[(set (match_operand 6 "dominant_cc_register" "")
(compare
(compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
(const_int 0)))]
"")
+;; ??? The conditional patterns above need checking for Thumb-2 usefulness
(define_insn "*negscc"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"*
- if (GET_CODE (operands[3]) == LT && operands[3] == const0_rtx)
+ if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
return \"mov\\t%0, %1, asr #31\";
if (GET_CODE (operands[3]) == NE)
return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
- if (GET_CODE (operands[3]) == GT)
- return \"subs\\t%0, %1, %2\;mvnne\\t%0, %0, asr #31\";
-
output_asm_insn (\"cmp\\t%1, %2\", operands);
output_asm_insn (\"mov%D3\\t%0, #0\", operands);
return \"mvn%d3\\t%0, #0\";
(set_attr "length" "8,8,12")]
)
+;; ??? The patterns below need checking for Thumb-2 usefulness.
+
(define_insn "*ifcompare_plus_move"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(if_then_else:SI (match_operator 6 "arm_comparison_operator"
ldm[0] = base_reg;
if (val1 !=0 && val2 != 0)
{
+ rtx ops[3];
+
if (val1 == 4 || val2 == 4)
/* Other val must be 8, since we know they are adjacent and neither
is zero. */
- output_asm_insn (\"ldm%?ib\\t%0, {%1, %2}\", ldm);
- else
+ output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
+ else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
{
- rtx ops[3];
-
ldm[0] = ops[0] = operands[4];
ops[1] = base_reg;
ops[2] = GEN_INT (val1);
output_add_immediate (ops);
if (val1 < val2)
- output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
else
- output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
+ }
+ else
+ {
+ /* Offset is out of range for a single add, so use two ldr. */
+ ops[0] = ldm[1];
+ ops[1] = base_reg;
+ ops[2] = GEN_INT (val1);
+ output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
+ ops[0] = ldm[2];
+ ops[2] = GEN_INT (val2);
+ output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
}
}
else if (val1 != 0)
{
if (val1 < val2)
- output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
else
- output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
}
else
{
if (val1 < val2)
- output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
else
- output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
+ output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
}
output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
return \"\";
operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
"
)
+;; ??? Check the patterns above for Thumb-2 usefulness
(define_expand "prologue"
[(clobber (const_int 0))]
"TARGET_EITHER"
- "if (TARGET_ARM)
+ "if (TARGET_32BIT)
arm_expand_prologue ();
else
- thumb_expand_prologue ();
+ thumb1_expand_prologue ();
DONE;
"
)
"
if (current_function_calls_eh_return)
emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
- if (TARGET_THUMB)
- thumb_expand_epilogue ();
+ if (TARGET_THUMB1)
+ thumb1_expand_epilogue ();
else if (USE_RETURN_INSN (FALSE))
{
emit_jump_insn (gen_return ());
(define_insn "sibcall_epilogue"
[(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
(unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
if (use_return_insn (FALSE, next_nonnote_insn (insn)))
return output_return_instruction (const_true_rtx, FALSE, FALSE);
[(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
"TARGET_EITHER"
"*
- if (TARGET_ARM)
+ if (TARGET_32BIT)
return arm_output_epilogue (NULL);
- else /* TARGET_THUMB */
+ else /* TARGET_THUMB1 */
return thumb_unexpanded_epilogue ();
"
; Length is absolute worst case
;; some extent with the conditional data operations, so we have to split them
;; up again here.
+;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
+;; conditional execution sufficient?
+
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(if_then_else:SI (match_operator 1 "arm_comparison_operator"
[(set_attr "conds" "clob")
(set_attr "length" "12")]
)
+;; ??? The above patterns need auditing for Thumb-2
;; Push multiple registers to the stack. Registers are in parallel (use ...)
;; expressions. For simplicity, the first register is also in the unspec
[(set (match_operand:BLK 0 "memory_operand" "=m")
(unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
UNSPEC_PUSH_MULT))])]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
{
int num_saves = XVECLEN (operands[2], 0);
/* For the StrongARM at least it is faster to
- use STR to store only a single register. */
- if (num_saves == 1)
+ use STR to store only a single register.
+ In Thumb mode always use push, and the assembler will pick
+ something appropriate. */
+ if (num_saves == 1 && TARGET_ARM)
output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
else
{
int i;
char pattern[100];
- strcpy (pattern, \"stmfd\\t%m0!, {%1\");
+ if (TARGET_ARM)
+ strcpy (pattern, \"stmfd\\t%m0!, {%1\");
+ else
+ strcpy (pattern, \"push\\t{%1\");
for (i = 1; i < num_saves; i++)
{
[(set (match_operand:BLK 0 "memory_operand" "=m")
(unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
UNSPEC_PUSH_MULT))])]
- "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
"*
{
char pattern[100];
(define_insn "consttable_1"
[(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
making_const_table = TRUE;
assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
(define_insn "consttable_2"
[(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"*
making_const_table = TRUE;
assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
[(set_attr "length" "8")]
)
+(define_insn "consttable_16"
+ [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
+ "TARGET_EITHER"
+ "*
+ {
+ making_const_table = TRUE;
+ switch (GET_MODE_CLASS (GET_MODE (operands[0])))
+ {
+ case MODE_FLOAT:
+ {
+ REAL_VALUE_TYPE r;
+ REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
+ assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
+ break;
+ }
+ default:
+ assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
+ break;
+ }
+ return \"\";
+ }"
+ [(set_attr "length" "16")]
+)
+
;; Miscellaneous Thumb patterns
(define_expand "tablejump"
[(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
(use (label_ref (match_operand 1 "" "")))])]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"
if (flag_pic)
{
)
;; NB never uses BX.
-(define_insn "*thumb_tablejump"
+(define_insn "*thumb1_tablejump"
[(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
(use (label_ref (match_operand 1 "" "")))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"mov\\t%|pc, %0"
[(set_attr "length" "2")]
)
(define_insn "clzsi2"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM && arm_arch5"
+ "TARGET_32BIT && arm_arch5"
"clz%?\\t%0, %1"
- [(set_attr "predicable" "yes")])
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "clz")])
(define_expand "ffssi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_ARM && arm_arch5"
+ "TARGET_32BIT && arm_arch5"
"
{
rtx t1, t2, t3;
(define_expand "ctzsi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_ARM && arm_arch5"
+ "TARGET_32BIT && arm_arch5"
"
{
rtx t1, t2, t3;
[(prefetch (match_operand:SI 0 "address_operand" "p")
(match_operand:SI 1 "" "")
(match_operand:SI 2 "" ""))]
- "TARGET_ARM && arm_arch5e"
+ "TARGET_32BIT && arm_arch5e"
"pld\\t%a0")
;; General predication pattern
[(match_operator 0 "arm_comparison_operator"
[(match_operand 1 "cc_register" "")
(const_int 0)])]
- "TARGET_ARM"
+ "TARGET_32BIT"
""
)
"TARGET_EITHER"
"
{
- if (TARGET_ARM)
+ if (TARGET_32BIT)
emit_insn (gen_arm_eh_return (operands[0]));
else
emit_insn (gen_thumb_eh_return (operands[0]));
[(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
VUNSPEC_EH_RETURN)
(clobber (match_scratch:SI 1 "=&l"))]
- "TARGET_THUMB"
+ "TARGET_THUMB1"
"#"
"&& reload_completed"
[(const_int 0)]
}"
)
+\f
+;; TLS support
+
+(define_insn "load_tp_hard"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(const_int 0)] UNSPEC_TLS))]
+ "TARGET_HARD_TP"
+ "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
+ [(set_attr "predicable" "yes")]
+)
+
+;; Doesn't clobber R1-R3. Must use r0 for the first operand.
+(define_insn "load_tp_soft"
+ [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
+ (clobber (reg:SI LR_REGNUM))
+ (clobber (reg:SI IP_REGNUM))
+ (clobber (reg:CC CC_REGNUM))]
+ "TARGET_SOFT_TP"
+ "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
+ [(set_attr "conds" "clob")]
+)
+
;; Load the FPA co-processor patterns
(include "fpa.md")
;; Load the Maverick co-processor patterns
(include "cirrus.md")
+;; Vector bits common to IWMMXT and Neon
+(include "vec-common.md")
;; Load the Intel Wireless Multimedia Extension patterns
(include "iwmmxt.md")
;; Load the VFP co-processor patterns
(include "vfp.md")
+;; Thumb-2 patterns
+(include "thumb2.md")
+;; Neon patterns
+(include "neon.md")