;;- Machine description for ARM for GNU compiler
;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
-;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+;; Free Software Foundation, Inc.
;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
;; and Martin Simmons (@harleqn.co.uk).
;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
;; GCC is free software; you can redistribute it and/or modify it
;; under the terms of the GNU General Public License as published
-;; by the Free Software Foundation; either version 2, or (at your
+;; by the Free Software Foundation; either version 3, or (at your
;; option) any later version.
;; GCC is distributed in the hope that it will be useful, but WITHOUT
;; License for more details.
;; You should have received a copy of the GNU General Public License
-;; along with GCC; see the file COPYING. If not, write to
-;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-;; Boston, MA 02110-1301, USA.
+;; along with GCC; see the file COPYING3. If not see
+;; <http://www.gnu.org/licenses/>.
;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
(UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
; usage, that is, we will add the pic_register
; value to it before trying to dereference it.
- (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
- ; GLOBAL_OFFSET_TABLE. The operation is fully
- ; described by the RTL but must be wrapped to
- ; prevent combine from trying to rip it apart.
+ (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
+ ; The last operand is the number of a PIC_LABEL
+ ; that points at the containing instruction.
(UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
; being scheduled before the stack adjustment insn.
(UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
(UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
(UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
; instruction stream.
- (UNSPEC_STACK_ALIGN 20) ; Doubleword aligned stack pointer. Used to
+ (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
; generate correct unwind information.
- (UNSPEC_PIC_OFFSET 22) ; A symbolic 12-bit OFFSET that has been treated
+ (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
; correctly for PIC usage.
+ (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
+ ; a given symbolic address.
+ (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
+ (UNSPEC_RBIT 26) ; rbit operation.
+ (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
+ ; another symbolic address.
]
)
; patterns that share the same RTL in both ARM and Thumb code.
(define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
-; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
-; scheduling decisions for the load unit and the multiplier.
-(define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
-
-; IS_XSCALE is set to 'yes' when compiling for XScale.
-(define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
-
;; Operand number of an input operand that is shifted. Zero if the
;; given instruction does not shift one of its input operands.
(define_attr "shift" "" (const_int 0))
;; scheduling information.
(define_attr "insn"
- "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,other"
+ "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
(const_string "other"))
; TYPE attribute is used to detect floating point instructions which, if
; scheduling of writes.
; Classification of each insn
+; Note: vfp.md has different meanings for some of these, and some further
+; types as well. See that file for details.
; alu any alu instruction that doesn't hit memory or fp
; regs or have a shifted source operand
; alu_shift any data instruction that doesn't hit memory or fp
;
(define_attr "type"
- "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
+ "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
(if_then_else
(eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
(const_string "mult")
; initialized by arm_override_options()
(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
+;; Classification of NEON instructions for scheduling purposes.
+;; Do not set this attribute and the "type" attribute together in
+;; any one instruction pattern.
+(define_attr "neon_type"
+ "neon_int_1,\
+ neon_int_2,\
+ neon_int_3,\
+ neon_int_4,\
+ neon_int_5,\
+ neon_vqneg_vqabs,\
+ neon_vmov,\
+ neon_vaba,\
+ neon_vsma,\
+ neon_vaba_qqq,\
+ neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
+ neon_mul_qqq_8_16_32_ddd_32,\
+ neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
+ neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
+ neon_mla_qqq_8_16,\
+ neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
+ neon_mla_qqq_32_qqd_32_scalar,\
+ neon_mul_ddd_16_scalar_32_16_long_scalar,\
+ neon_mul_qqd_32_scalar,\
+ neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
+ neon_shift_1,\
+ neon_shift_2,\
+ neon_shift_3,\
+ neon_vshl_ddd,\
+ neon_vqshl_vrshl_vqrshl_qqq,\
+ neon_vsra_vrsra,\
+ neon_fp_vadd_ddd_vabs_dd,\
+ neon_fp_vadd_qqq_vabs_qq,\
+ neon_fp_vsum,\
+ neon_fp_vmul_ddd,\
+ neon_fp_vmul_qqd,\
+ neon_fp_vmla_ddd,\
+ neon_fp_vmla_qqq,\
+ neon_fp_vmla_ddd_scalar,\
+ neon_fp_vmla_qqq_scalar,\
+ neon_fp_vrecps_vrsqrts_ddd,\
+ neon_fp_vrecps_vrsqrts_qqq,\
+ neon_bp_simple,\
+ neon_bp_2cycle,\
+ neon_bp_3cycle,\
+ neon_ldr,\
+ neon_str,\
+ neon_vld1_1_2_regs,\
+ neon_vld1_3_4_regs,\
+ neon_vld2_2_regs_vld1_vld2_all_lanes,\
+ neon_vld2_4_regs,\
+ neon_vld3_vld4,\
+ neon_vst1_1_2_regs_vst2_2_regs,\
+ neon_vst1_3_4_regs,\
+ neon_vst2_4_regs_vst3_vst4,\
+ neon_vst3_vst4,\
+ neon_vld1_vld2_lane,\
+ neon_vld3_vld4_lane,\
+ neon_vst1_vst2_lane,\
+ neon_vst3_vst4_lane,\
+ neon_vld3_vld4_all_lanes,\
+ neon_mcr,\
+ neon_mcr_2_mcrr,\
+ neon_mrc,\
+ neon_mrrc,\
+ neon_ldm_2,\
+ neon_stm_2,\
+ none"
+ (const_string "none"))
+
; condition codes: this one is used by final_prescan_insn to speed up
; conditionalizing instructions. It saves having to scan the rtl to see if
; it uses or alters the condition codes.
; CLOB means that the condition codes are altered in an undefined manner, if
; they are altered at all
;
-; JUMP_CLOB is used when the condition cannot be represented by a single
-; instruction (UNEQ and LTGT). These cannot be predicated.
+; UNCONDITIONAL means the instions can not be conditionally executed.
;
; NOCOND means that the condition codes are neither altered nor affect the
; output of this insn
-(define_attr "conds" "use,set,clob,jump_clob,nocond"
+(define_attr "conds" "use,set,clob,unconditional,nocond"
(if_then_else (eq_attr "type" "call")
(const_string "clob")
- (const_string "nocond")))
+ (if_then_else (eq_attr "neon_type" "none")
+ (const_string "nocond")
+ (const_string "unconditional"))))
; Predicable means that the insn can be conditionally executed based on
; an automatically added predicate (additional patterns are generated by
(define_attr "ce_count" "" (const_int 1))
;;---------------------------------------------------------------------------
-;; Mode macros
+;; Mode iterators
; A list of modes that are exactly 64 bits in size. We use this to expand
; some splits that are the same for all modes when operating on ARM
; registers.
-(define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
+(define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
+
+;; The integer modes up to word size
+(define_mode_iterator QHSI [QI HI SI])
;;---------------------------------------------------------------------------
;; Predicates
;; Processor type. This is created automatically from arm-cores.def.
(include "arm-tune.md")
+(define_attr "tune_cortexr4" "yes,no"
+ (const (if_then_else
+ (eq_attr "tune" "cortexr4,cortexr4f")
+ (const_string "yes")
+ (const_string "no"))))
+
;; True if the generic scheduling description should be used.
(define_attr "generic_sched" "yes,no"
(const (if_then_else
- (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8")
+ (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
+ (eq_attr "tune_cortexr4" "yes"))
(const_string "no")
(const_string "yes"))))
(define_attr "generic_vfp" "yes,no"
(const (if_then_else
(and (eq_attr "fpu" "vfp")
- (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
+ (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
+ (eq_attr "tune_cortexr4" "no"))
(const_string "yes")
(const_string "no"))))
(include "arm1026ejs.md")
(include "arm1136jfs.md")
(include "cortex-a8.md")
+(include "cortex-a9.md")
+(include "cortex-r4.md")
+(include "cortex-r4f.md")
+(include "vfp11.md")
\f
;;---------------------------------------------------------------------------
if (TARGET_THUMB1)
{
if (GET_CODE (operands[1]) != REG)
- operands[1] = force_reg (SImode, operands[1]);
+ operands[1] = force_reg (DImode, operands[1]);
if (GET_CODE (operands[2]) != REG)
- operands[2] = force_reg (SImode, operands[2]);
+ operands[2] = force_reg (DImode, operands[2]);
}
"
)
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(plus:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "r,0")))
+ (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
"#"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(plus:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "r,0")))
+ (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
"#"
""
)
+;; The r/r/k alternative is required when reloading the address
+;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
+;; put the duplicated register first, and not try the commutative version.
(define_insn_and_split "*arm_addsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
- (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
- (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
+ [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
+ (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
+ (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
"TARGET_32BIT"
"@
add%?\\t%0, %1, %2
+ add%?\\t%0, %1, %2
+ add%?\\t%0, %2, %1
+ sub%?\\t%0, %1, #%n2
sub%?\\t%0, %1, #%n2
#"
- "TARGET_32BIT &&
- GET_CODE (operands[2]) == CONST_INT
+ "TARGET_32BIT
+ && GET_CODE (operands[2]) == CONST_INT
&& !(const_ok_for_arm (INTVAL (operands[2]))
- || const_ok_for_arm (-INTVAL (operands[2])))"
+ || const_ok_for_arm (-INTVAL (operands[2])))
+ && (reload_completed || !arm_eliminable_register (operands[1]))"
[(clobber (const_int 0))]
"
arm_split_constant (PLUS, SImode, curr_insn,
operands[1], 0);
DONE;
"
- [(set_attr "length" "4,4,16")
+ [(set_attr "length" "4,4,4,4,4,16")
(set_attr "predicable" "yes")]
)
;; register. Trying to reload it will always fail catastrophically,
;; so never allow those alternatives to match if reloading is needed.
-(define_insn "*thumb1_addsi3"
- [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
- (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
- (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
+(define_insn_and_split "*thumb1_addsi3"
+ [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
+ (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
+ (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
"TARGET_THUMB1"
"*
static const char * const asms[] =
\"add\\t%0, %0, %2\",
\"add\\t%0, %0, %2\",
\"add\\t%0, %1, %2\",
- \"add\\t%0, %1, %2\"
+ \"add\\t%0, %1, %2\",
+ \"#\",
+ \"#\"
};
if ((which_alternative == 2 || which_alternative == 6)
&& GET_CODE (operands[2]) == CONST_INT
return \"sub\\t%0, %1, #%n2\";
return asms[which_alternative];
"
- [(set_attr "length" "2")]
+ "&& reload_completed && CONST_INT_P (operands[2])
+ && operands[1] != stack_pointer_rtx
+ && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
+ [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
+ (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
+ {
+ HOST_WIDE_INT offset = INTVAL (operands[2]);
+ if (offset > 255)
+ offset = 255;
+ else if (offset < -255)
+ offset = -255;
+
+ operands[3] = GEN_INT (offset);
+ operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
+ }
+ [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
)
;; Reloading and elimination of the frame pointer can
[(set (match_operand:DF 0 "s_register_operand" "")
(plus:DF (match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_add_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], DFmode))
if (TARGET_THUMB1)
{
if (GET_CODE (operands[1]) != REG)
- operands[1] = force_reg (SImode, operands[1]);
+ operands[1] = force_reg (DImode, operands[1]);
if (GET_CODE (operands[2]) != REG)
- operands[2] = force_reg (SImode, operands[2]);
+ operands[2] = force_reg (DImode, operands[2]);
}
"
)
(define_insn "*subdi_di_zesidi"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
+ (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
(zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))))
(clobber (reg:CC CC_REGNUM))]
(define_insn "*subdi_di_sesidi"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
+ (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
(sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))))
(clobber (reg:CC CC_REGNUM))]
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(minus:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))
+ (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(minus:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))
+ (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
; ??? Check Thumb-2 split length
(define_insn_and_split "*arm_subsi3_insn"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
- (match_operand:SI 2 "s_register_operand" "r,r")))]
+ [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
+ (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
+ (match_operand:SI 2 "s_register_operand" "r, r, r")))]
"TARGET_32BIT"
"@
rsb%?\\t%0, %2, %1
+ sub%?\\t%0, %1, %2
#"
"TARGET_32BIT
&& GET_CODE (operands[1]) == CONST_INT
INTVAL (operands[1]), operands[0], operands[2], 0);
DONE;
"
- [(set_attr "length" "4,16")
+ [(set_attr "length" "4,4,16")
(set_attr "predicable" "yes")]
)
[(set (match_operand:DF 0 "s_register_operand" "")
(minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
if (TARGET_MAVERICK)
{
(define_insn "*arm_mulsi3"
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
- (match_operand:SI 1 "s_register_operand" "%?r,0")))]
+ (match_operand:SI 1 "s_register_operand" "%0,r")))]
"TARGET_32BIT && !arm_arch6"
"mul%?\\t%0, %2, %1"
[(set_attr "insn" "mul")
(match_operand:SI 2 "register_operand" "l,0,0")))]
"TARGET_THUMB1 && arm_arch6"
"@
- mul\\t%0, %2
- mul\\t%0, %1
+ mul\\t%0, %2
+ mul\\t%0, %1
mul\\t%0, %1"
[(set_attr "length" "2")
(set_attr "insn" "mul")]
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r")
- (match_operand:SI 1 "s_register_operand" "%?r,0"))
+ (match_operand:SI 1 "s_register_operand" "%0,r"))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(mult:SI (match_dup 2) (match_dup 1)))]
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r")
- (match_operand:SI 1 "s_register_operand" "%?r,0"))
+ (match_operand:SI 1 "s_register_operand" "%0,r"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r"))]
"TARGET_ARM && !arm_arch6"
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
(plus:SI
(mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
- (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
+ (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
+ (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
"TARGET_32BIT && !arm_arch6"
"mla%?\\t%0, %2, %1, %3"
[(set_attr "insn" "mla")
(compare:CC_NOOV
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
- (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
+ (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
+ (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
(plus:SI (mult:SI (match_dup 2) (match_dup 1))
(compare:CC_NOOV
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
+ (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
(match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
(set_attr "predicable" "yes")]
)
-(define_insn "mulsidi3"
+;; 32x32->64 widening multiply.
+;; As with mulsi3, the only difference between the v3-5 and v6+
+;; versions of these patterns is the requirement that the output not
+;; overlap the inputs, but that still means we have to have a named
+;; expander and two different starred insns.
+
+(define_expand "mulsidi3"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
+ "TARGET_32BIT && arm_arch3m"
+ ""
+)
+
+(define_insn "*mulsidi3_nov6"
[(set (match_operand:DI 0 "s_register_operand" "=&r")
(mult:DI
(sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
(set_attr "predicable" "yes")]
)
-(define_insn "mulsidi3_v6"
+(define_insn "*mulsidi3_v6"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(mult:DI
(sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
(set_attr "predicable" "yes")]
)
-(define_insn "umulsidi3"
+(define_expand "umulsidi3"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (mult:DI
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
+ "TARGET_32BIT && arm_arch3m"
+ ""
+)
+
+(define_insn "*umulsidi3_nov6"
[(set (match_operand:DI 0 "s_register_operand" "=&r")
(mult:DI
(zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
(set_attr "predicable" "yes")]
)
-(define_insn "umulsidi3_v6"
+(define_insn "*umulsidi3_v6"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(mult:DI
(zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
(set_attr "predicable" "yes")]
)
-(define_insn "smulsi3_highpart"
+(define_expand "smulsi3_highpart"
+ [(parallel
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
+ (const_int 32))))
+ (clobber (match_scratch:SI 3 ""))])]
+ "TARGET_32BIT && arm_arch3m"
+ ""
+)
+
+(define_insn "*smulsi3_highpart_nov6"
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(truncate:SI
(lshiftrt:DI
(mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
(sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
(const_int 32))))
(clobber (match_scratch:SI 3 "=&r,&r"))]
(set_attr "predicable" "yes")]
)
-(define_insn "smulsi3_highpart_v6"
+(define_insn "*smulsi3_highpart_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(truncate:SI
(lshiftrt:DI
(set_attr "predicable" "yes")]
)
-(define_insn "umulsi3_highpart"
+(define_expand "umulsi3_highpart"
+ [(parallel
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
+ (const_int 32))))
+ (clobber (match_scratch:SI 3 ""))])]
+ "TARGET_32BIT && arm_arch3m"
+ ""
+)
+
+(define_insn "*umulsi3_highpart_nov6"
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(truncate:SI
(lshiftrt:DI
(mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
(zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
(const_int 32))))
(clobber (match_scratch:SI 3 "=&r,&r"))]
(set_attr "predicable" "yes")]
)
-(define_insn "umulsi3_highpart_v6"
+(define_insn "*umulsi3_highpart_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(truncate:SI
(lshiftrt:DI
[(set (match_operand:DF 0 "s_register_operand" "")
(mult:DF (match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
if (TARGET_MAVERICK
&& !cirrus_fp_register (operands[2], DFmode))
[(set (match_operand:DF 0 "s_register_operand" "")
(div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
(match_operand:DF 2 "arm_float_rhs_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
"")
\f
;; Modulo insns
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(and:DI (zero_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))]
+ (match_operand:DI 1 "s_register_operand" "0,r")))]
"TARGET_32BIT"
"#"
"TARGET_32BIT && reload_completed"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(and:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))]
+ (match_operand:DI 1 "s_register_operand" "0,r")))]
"TARGET_32BIT"
"#"
[(set_attr "length" "8")]
else /* TARGET_THUMB1 */
{
if (GET_CODE (operands[2]) != CONST_INT)
- operands[2] = force_reg (SImode, operands[2]);
+ {
+ rtx tmp = force_reg (SImode, operands[2]);
+ if (rtx_equal_p (operands[0], operands[1]))
+ operands[2] = tmp;
+ else
+ {
+ operands[2] = operands[1];
+ operands[1] = tmp;
+ }
+ }
else
{
int i;
;;; the value before we insert. This loses some of the advantage of having
;;; this insv pattern, so this pattern needs to be reevalutated.
-; ??? Use Thumb-2 bitfield insert/extract instructions
(define_expand "insv"
[(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
(match_operand:SI 1 "general_operand" "")
(match_operand:SI 2 "general_operand" ""))
(match_operand:SI 3 "reg_or_int_operand" ""))]
- "TARGET_ARM"
+ "TARGET_ARM || arm_arch_thumb2"
"
{
int start_bit = INTVAL (operands[2]);
HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
rtx target, subtarget;
- target = operands[0];
+ if (arm_arch_thumb2)
+ {
+ bool use_bfi = TRUE;
+
+ if (GET_CODE (operands[3]) == CONST_INT)
+ {
+ HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
+
+ if (val == 0)
+ {
+ emit_insn (gen_insv_zero (operands[0], operands[1],
+ operands[2]));
+ DONE;
+ }
+
+ /* See if the set can be done with a single orr instruction. */
+ if (val == mask && const_ok_for_arm (val << start_bit))
+ use_bfi = FALSE;
+ }
+
+ if (use_bfi)
+ {
+ if (GET_CODE (operands[3]) != REG)
+ operands[3] = force_reg (SImode, operands[3]);
+
+ emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
+ }
+
+ target = copy_rtx (operands[0]);
/* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
subreg as the final target. */
if (GET_CODE (target) == SUBREG)
}"
)
+(define_insn "insv_zero"
+ [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
+ (match_operand:SI 1 "const_int_operand" "M")
+ (match_operand:SI 2 "const_int_operand" "M"))
+ (const_int 0))]
+ "arm_arch_thumb2"
+ "bfc%?\t%0, %2, %1"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "insv_t2"
+ [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
+ (match_operand:SI 1 "const_int_operand" "M")
+ (match_operand:SI 2 "const_int_operand" "M"))
+ (match_operand:SI 3 "s_register_operand" "r"))]
+ "arm_arch_thumb2"
+ "bfi%?\t%0, %3, %2, %1"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
; constants for op 2 will never be given to these patterns.
(define_insn_and_split "*anddi_notdi_di"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
- (match_operand:DI 2 "s_register_operand" "0,r")))]
+ (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
+ (match_operand:DI 2 "s_register_operand" "r,0")))]
"TARGET_32BIT"
"#"
"TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(ior:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))]
+ (match_operand:DI 1 "s_register_operand" "0,r")))]
"TARGET_32BIT"
"#"
[(set_attr "length" "8")
DONE;
}
else /* TARGET_THUMB1 */
- operands [2] = force_reg (SImode, operands [2]);
+ {
+ rtx tmp = force_reg (SImode, operands[2]);
+ if (rtx_equal_p (operands[0], operands[1]))
+ operands[2] = tmp;
+ else
+ {
+ operands[2] = operands[1];
+ operands[1] = tmp;
+ }
+ }
}
"
)
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
- "TARGET_32BIT"
+ "TARGET_ARM"
"@
orr%?\\t%0, %1, %2
#"
- "TARGET_32BIT
+ "TARGET_ARM
&& GET_CODE (operands[2]) == CONST_INT
&& !const_ok_for_arm (INTVAL (operands[2]))"
[(clobber (const_int 0))]
(set (match_operand:SI 0 "arm_general_register_operand" "")
(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
(match_operand:SI 2 "const_int_operand" "")))]
- "TARGET_32BIT
+ "TARGET_ARM
&& !const_ok_for_arm (INTVAL (operands[2]))
&& const_ok_for_arm (~INTVAL (operands[2]))"
[(set (match_dup 3) (match_dup 2))
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
(xor:DI (sign_extend:DI
(match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "?r,0")))]
+ (match_operand:DI 1 "s_register_operand" "0,r")))]
"TARGET_32BIT"
"#"
[(set_attr "length" "8")
(define_expand "xorsi3"
[(set (match_operand:SI 0 "s_register_operand" "")
(xor:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))]
+ (match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_EITHER"
- "if (TARGET_THUMB1)
- if (GET_CODE (operands[2]) == CONST_INT)
- operands[2] = force_reg (SImode, operands[2]);
- "
+ "if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ if (TARGET_32BIT)
+ {
+ arm_split_constant (XOR, SImode, NULL_RTX,
+ INTVAL (operands[2]), operands[0], operands[1],
+ optimize && can_create_pseudo_p ());
+ DONE;
+ }
+ else /* TARGET_THUMB1 */
+ {
+ rtx tmp = force_reg (SImode, operands[2]);
+ if (rtx_equal_p (operands[0], operands[1]))
+ operands[2] = tmp;
+ else
+ {
+ operands[2] = operands[1];
+ operands[1] = tmp;
+ }
+ }
+ }"
)
(define_insn "*arm_xorsi3"
(define_insn "*andsi_iorsi3_notsi"
[(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
- (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
+ (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
"TARGET_32BIT"
(define_insn "arm_ashldi3_1bit"
[(set (match_operand:DI 0 "s_register_operand" "=&r,r")
- (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
+ (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
(define_insn "arm_ashrdi3_1bit"
[(set (match_operand:DI 0 "s_register_operand" "=&r,r")
- (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
+ (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
(define_insn "arm_lshrdi3_1bit"
[(set (match_operand:DI 0 "s_register_operand" "=&r,r")
- (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
+ (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
(set (match_operand:SI 0 "register_operand" "")
(lshiftrt:SI (match_dup 4)
(match_operand:SI 3 "const_int_operand" "")))]
- "TARGET_THUMB1"
+ "TARGET_THUMB1 || arm_arch_thumb2"
"
{
HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
+ if (arm_arch_thumb2)
+ {
+ emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
+
operands[3] = GEN_INT (rshift);
if (lshift == 0)
}"
)
+(define_insn "extv"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "const_int_operand" "M")
+ (match_operand:SI 3 "const_int_operand" "M")))]
+ "arm_arch_thumb2"
+ "sbfx%?\t%0, %1, %3, %2"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "extzv_t2"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "const_int_operand" "M")
+ (match_operand:SI 3 "const_int_operand" "M")))]
+ "arm_arch_thumb2"
+ "ubfx%?\t%0, %1, %3, %2"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
\f
;; Unary arithmetic insns
(define_expand "negdi2"
[(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (neg:DI (match_operand:DI 1 "s_register_operand" "")))
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (neg:DI (match_operand:DI 1 "s_register_operand" "")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
- "
- if (TARGET_THUMB1)
- {
- if (GET_CODE (operands[1]) != REG)
- operands[1] = force_reg (SImode, operands[1]);
- }
- "
+ ""
)
;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
-;; The second alternative is to allow the common case of a *full* overlap.
+;; The first alternative allows the common case of a *full* overlap.
(define_insn "*arm_negdi2"
[(set (match_operand:DI 0 "s_register_operand" "=&r,r")
- (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
+ (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
"rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
)
(define_insn "*thumb1_negdi2"
- [(set (match_operand:DI 0 "register_operand" "=&l")
- (neg:DI (match_operand:DI 1 "register_operand" "l")))
+ [(set (match_operand:DI 0 "register_operand" "=&l")
+ (neg:DI (match_operand:DI 1 "register_operand" "l")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_THUMB1"
"mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
(define_expand "negdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(neg:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
"")
;; abssi2 doesn't really clobber the condition codes if a different register
(define_expand "absdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(abs:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"")
(define_expand "sqrtsf2"
(define_expand "sqrtdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
"")
(define_insn_and_split "one_cmpldi2"
[(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
+ (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
"TARGET_32BIT"
"#"
"TARGET_32BIT && reload_completed"
\f
;; Fixed <--> Floating conversion insns
+(define_expand "floatsihf2"
+ [(set (match_operand:HF 0 "general_operand" "")
+ (float:HF (match_operand:SI 1 "general_operand" "")))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1 = gen_reg_rtx (SFmode);
+ expand_float (op1, operands[1], 0);
+ op1 = convert_to_mode (HFmode, op1, 0);
+ emit_move_insn (operands[0], op1);
+ DONE;
+ }"
+)
+
+(define_expand "floatdihf2"
+ [(set (match_operand:HF 0 "general_operand" "")
+ (float:HF (match_operand:DI 1 "general_operand" "")))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1 = gen_reg_rtx (SFmode);
+ expand_float (op1, operands[1], 0);
+ op1 = convert_to_mode (HFmode, op1, 0);
+ emit_move_insn (operands[0], op1);
+ DONE;
+ }"
+)
+
(define_expand "floatsisf2"
[(set (match_operand:SF 0 "s_register_operand" "")
(float:SF (match_operand:SI 1 "s_register_operand" "")))]
(define_expand "floatsidf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(float:DF (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
if (TARGET_MAVERICK)
{
}
")
+(define_expand "fix_trunchfsi2"
+ [(set (match_operand:SI 0 "general_operand" "")
+ (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1 = convert_to_mode (SFmode, operands[1], 0);
+ expand_fix (operands[0], op1, 0);
+ DONE;
+ }"
+)
+
+(define_expand "fix_trunchfdi2"
+ [(set (match_operand:DI 0 "general_operand" "")
+ (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1 = convert_to_mode (SFmode, operands[1], 0);
+ expand_fix (operands[0], op1, 0);
+ DONE;
+ }"
+)
+
(define_expand "fix_truncsfsi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
(define_expand "fix_truncdfsi2"
[(set (match_operand:SI 0 "s_register_operand" "")
(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
if (TARGET_MAVERICK)
{
[(set (match_operand:SF 0 "s_register_operand" "")
(float_truncate:SF
(match_operand:DF 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
""
)
+
+/* DFmode -> HFmode conversions have to go through SFmode. */
+(define_expand "truncdfhf2"
+ [(set (match_operand:HF 0 "general_operand" "")
+ (float_truncate:HF
+ (match_operand:DF 1 "general_operand" "")))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1;
+ op1 = convert_to_mode (SFmode, operands[1], 0);
+ op1 = convert_to_mode (HFmode, op1, 0);
+ emit_move_insn (operands[0], op1);
+ DONE;
+ }"
+)
\f
;; Zero and sign extension instructions.
""
)
+(define_code_iterator ior_xor [ior xor])
+
+(define_split
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (ior_xor:SI (and:SI (ashift:SI
+ (match_operand:SI 1 "s_register_operand" "")
+ (match_operand:SI 2 "const_int_operand" ""))
+ (match_operand:SI 3 "const_int_operand" ""))
+ (zero_extend:SI
+ (match_operator 5 "subreg_lowpart_operator"
+ [(match_operand:SI 4 "s_register_operand" "")]))))]
+ "TARGET_32BIT
+ && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
+ == (GET_MODE_MASK (GET_MODE (operands[5]))
+ & (GET_MODE_MASK (GET_MODE (operands[5]))
+ << (INTVAL (operands[2])))))"
+ [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
+ (match_dup 4)))
+ (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
+ "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
+)
+
(define_insn "*compareqi_eq0"
[(set (reg:CC_Z CC_REGNUM)
(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
(define_expand "extendqihi2"
[(set (match_dup 2)
- (ashift:SI (match_operand:QI 1 "general_operand" "")
+ (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
(const_int 24)))
(set (match_operand:HI 0 "s_register_operand" "")
(ashiftrt:SI (match_dup 2)
(define_insn "*arm_extendqihi_insn"
[(set (match_operand:HI 0 "s_register_operand" "=r")
- (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
+ (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
"TARGET_ARM && arm_arch4"
"ldr%(sb%)\\t%0, %1"
[(set_attr "type" "load_byte")
(define_expand "extendqisi2"
[(set (match_dup 2)
- (ashift:SI (match_operand:QI 1 "general_operand" "")
+ (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
(const_int 24)))
(set (match_operand:SI 0 "s_register_operand" "")
(ashiftrt:SI (match_dup 2)
(define_insn "*arm_extendqisi"
[(set (match_operand:SI 0 "s_register_operand" "=r")
- (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
+ (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
"ldr%(sb%)\\t%0, %1"
[(set_attr "type" "load_byte")
(define_insn "*arm_extendqisi_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
+ (sign_extend:SI
+ (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
"TARGET_ARM && arm_arch6"
"@
sxtb%?\\t%0, %1
(define_expand "extendsfdf2"
[(set (match_operand:DF 0 "s_register_operand" "")
(float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
""
)
+
+/* HFmode -> DFmode conversions have to go through SFmode. */
+(define_expand "extendhfdf2"
+ [(set (match_operand:DF 0 "general_operand" "")
+ (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
+ "TARGET_EITHER"
+ "
+ {
+ rtx op1;
+ op1 = convert_to_mode (SFmode, operands[1], 0);
+ op1 = convert_to_mode (DFmode, op1, 0);
+ emit_insn (gen_movdf (operands[0], op1));
+ DONE;
+ }"
+)
\f
;; Move insns (including loads and stores)
(match_operand:SI 1 "general_operand" ""))]
"TARGET_EITHER"
"
+ {
rtx base, offset, tmp;
if (TARGET_32BIT)
optimize && can_create_pseudo_p ());
DONE;
}
+
+ if (TARGET_USE_MOVT && !target_word_relocations
+ && GET_CODE (operands[1]) == SYMBOL_REF
+ && !flag_pic && !arm_tls_referenced_p (operands[1]))
+ {
+ arm_emit_movpair (operands[0], operands[1]);
+ DONE;
+ }
}
else /* TARGET_THUMB1... */
{
(!can_create_pseudo_p ()
? operands[0]
: 0));
+ }
"
)
+;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
+;; LO_SUM adds in the high bits. Fortunately these are opaque operations
+;; so this does not matter.
+(define_insn "*arm_movt"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
+ (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
+ (match_operand:SI 2 "general_operand" "i")))]
+ "TARGET_32BIT"
+ "movt%?\t%0, #:upper16:%c2"
+ [(set_attr "predicable" "yes")
+ (set_attr "length" "4")]
+)
+
(define_insn "*arm_movsi_insn"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m")
- (match_operand:SI 1 "general_operand" "rI,K,N,mi,r"))]
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
+ (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
"TARGET_ARM && ! TARGET_IWMMXT
&& !(TARGET_HARD_FLOAT && TARGET_VFP)
&& ( register_operand (operands[0], SImode)
|| register_operand (operands[1], SImode))"
"@
mov%?\\t%0, %1
+ mov%?\\t%0, %1
mvn%?\\t%0, #%B1
movw%?\\t%0, %1
ldr%?\\t%0, %1
str%?\\t%1, %0"
- [(set_attr "type" "*,*,*,load1,store1")
+ [(set_attr "type" "*,*,*,*,load1,store1")
(set_attr "predicable" "yes")
- (set_attr "pool_range" "*,*,*,4096,*")
- (set_attr "neg_pool_range" "*,*,*,4084,*")]
+ (set_attr "pool_range" "*,*,*,*,4096,*")
+ (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
)
(define_split
)
(define_insn "*thumb1_movsi_insn"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
- (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
+ (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
"TARGET_THUMB1
&& ( register_operand (operands[0], SImode)
|| register_operand (operands[1], SImode))"
(set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
"
{
- unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
+ unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
unsigned HOST_WIDE_INT mask = 0xff;
int i;
;; the insn alone, and to force the minipool generation pass to then move
;; the GOT symbol to memory.
-(define_insn "pic_load_addr_arm"
+(define_insn "pic_load_addr_32bit"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
- "TARGET_ARM && flag_pic"
+ "TARGET_32BIT && flag_pic"
"ldr%?\\t%0, %1"
[(set_attr "type" "load1")
- (set (attr "pool_range") (const_int 4096))
- (set (attr "neg_pool_range") (const_int 4084))]
+ (set_attr "pool_range" "4096")
+ (set (attr "neg_pool_range")
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 4084)
+ (const_int 0)))]
)
(define_insn "pic_load_addr_thumb1"
(set (attr "pool_range") (const_int 1024))]
)
-;; This variant is used for AOF assembly, since it needs to mention the
-;; pic register in the rtl.
-(define_expand "pic_load_addr_based"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
- "TARGET_ARM && flag_pic"
- "operands[2] = cfun->machine->pic_reg;"
-)
-
-(define_insn "*pic_load_addr_based_insn"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (unspec:SI [(match_operand 1 "" "")
- (match_operand 2 "s_register_operand" "r")]
- UNSPEC_PIC_SYM))]
- "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg"
- "*
-#ifdef AOF_ASSEMBLER
- operands[1] = aof_pic_entry (operands[1]);
-#endif
- output_asm_insn (\"ldr%?\\t%0, %a1\", operands);
- return \"\";
- "
- [(set_attr "type" "load1")
- (set (attr "pool_range")
- (if_then_else (eq_attr "is_thumb" "yes")
- (const_int 1024)
- (const_int 4096)))
- (set (attr "neg_pool_range")
- (if_then_else (eq_attr "is_thumb" "yes")
- (const_int 0)
- (const_int 4084)))]
-)
-
(define_insn "pic_add_dot_plus_four"
[(set (match_operand:SI 0 "register_operand" "=r")
- (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
- (const (plus:SI (pc) (const_int 4))))
+ (unspec:SI [(match_operand:SI 1 "register_operand" "0")
+ (const_int 4)
(match_operand 2 "" "")]
UNSPEC_PIC_BASE))]
- "TARGET_THUMB1"
+ "TARGET_THUMB"
"*
(*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
INTVAL (operands[2]));
(define_insn "pic_add_dot_plus_eight"
[(set (match_operand:SI 0 "register_operand" "=r")
- (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
- (const (plus:SI (pc) (const_int 8))))
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (const_int 8)
(match_operand 2 "" "")]
UNSPEC_PIC_BASE))]
"TARGET_ARM"
)
(define_insn "tls_load_dot_plus_eight"
- [(set (match_operand:SI 0 "register_operand" "+r")
- (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
- (const (plus:SI (pc) (const_int 8))))
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (const_int 8)
(match_operand 2 "" "")]
UNSPEC_PIC_BASE)))]
"TARGET_ARM"
;; tls_load_dot_plus_eight by a peephole.
(define_peephole2
- [(parallel [(set (match_operand:SI 0 "register_operand" "")
- (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
- (const (plus:SI (pc) (const_int 8))))]
- UNSPEC_PIC_BASE))
- (use (label_ref (match_operand 1 "" "")))])
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unspec:SI [(match_operand:SI 3 "register_operand" "")
+ (const_int 8)
+ (match_operand 1 "" "")]
+ UNSPEC_PIC_BASE))
(set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
"TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
- [(parallel [(set (match_dup 2)
- (mem:SI (unspec:SI [(plus:SI (match_dup 3)
- (const (plus:SI (pc) (const_int 8))))]
- UNSPEC_PIC_BASE)))
- (use (label_ref (match_dup 1)))])]
+ [(set (match_dup 2)
+ (mem:SI (unspec:SI [(match_dup 3)
+ (const_int 8)
+ (match_dup 1)]
+ UNSPEC_PIC_BASE)))]
""
)
&& GET_CODE (base = XEXP (base, 0)) == REG))
&& REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
{
- rtx new;
+ rtx new_rtx;
- new = widen_memory_access (operands[1], SImode,
- ((INTVAL (offset) & ~3)
- - INTVAL (offset)));
- emit_insn (gen_movsi (reg, new));
+ new_rtx = widen_memory_access (operands[1], SImode,
+ ((INTVAL (offset) & ~3)
+ - INTVAL (offset)));
+ emit_insn (gen_movsi (reg, new_rtx));
if (((INTVAL (offset) & 2) != 0)
^ (BYTES_BIG_ENDIAN ? 1 : 0))
{
/* ??? We shouldn't really get invalid addresses here, but this can
happen if we are passed a SP (never OK for HImode/QImode) or
- virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
- HImode/QImode) relative address. */
+ virtual register (also rejected as illegitimate for HImode/QImode)
+ relative address. */
/* ??? This should perhaps be fixed elsewhere, for instance, in
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
{
rtx reg = gen_reg_rtx (SImode);
+ /* For thumb we want an unsigned immediate, then we are more likely
+ to be able to use a movs insn. */
+ if (TARGET_THUMB)
+ operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
+
emit_insn (gen_movsi (reg, operands[1]));
operands[1] = gen_lowpart (QImode, reg);
}
{
/* ??? We shouldn't really get invalid addresses here, but this can
happen if we are passed a SP (never OK for HImode/QImode) or
- virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
- HImode/QImode) relative address. */
+ virtual register (also rejected as illegitimate for HImode/QImode)
+ relative address. */
/* ??? This should perhaps be fixed elsewhere, for instance, in
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
(set_attr "pool_range" "*,32,*,*,*,*")]
)
-(define_expand "movsf"
- [(set (match_operand:SF 0 "general_operand" "")
- (match_operand:SF 1 "general_operand" ""))]
+;; HFmode moves
+(define_expand "movhf"
+ [(set (match_operand:HF 0 "general_operand" "")
+ (match_operand:HF 1 "general_operand" ""))]
"TARGET_EITHER"
"
if (TARGET_32BIT)
{
if (GET_CODE (operands[0]) == MEM)
- operands[1] = force_reg (SFmode, operands[1]);
+ operands[1] = force_reg (HFmode, operands[1]);
}
else /* TARGET_THUMB1 */
{
if (can_create_pseudo_p ())
{
if (GET_CODE (operands[0]) != REG)
- operands[1] = force_reg (SFmode, operands[1]);
+ operands[1] = force_reg (HFmode, operands[1]);
}
}
"
)
-;; Transform a floating-point move of a constant into a core register into
-;; an SImode operation.
-(define_split
- [(set (match_operand:SF 0 "arm_general_register_operand" "")
- (match_operand:SF 1 "immediate_operand" ""))]
- "TARGET_32BIT
- && reload_completed
- && GET_CODE (operands[1]) == CONST_DOUBLE"
- [(set (match_dup 2) (match_dup 3))]
- "
- operands[2] = gen_lowpart (SImode, operands[0]);
- operands[3] = gen_lowpart (SImode, operands[1]);
- if (operands[2] == 0 || operands[3] == 0)
- FAIL;
+(define_insn "*arm32_movhf"
+ [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
+ (match_operand:HF 1 "general_operand" " m,r,r,F"))]
+ "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
+ && ( s_register_operand (operands[0], HFmode)
+ || s_register_operand (operands[1], HFmode))"
+ "*
+ switch (which_alternative)
+ {
+ case 0: /* ARM register from memory */
+ return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
+ case 1: /* memory from ARM register */
+ return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
+ case 2: /* ARM register from ARM register */
+ return \"mov%?\\t%0, %1\\t%@ __fp16\";
+ case 3: /* ARM register from constant */
+ {
+ REAL_VALUE_TYPE r;
+ long bits;
+ rtx ops[4];
+
+ REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
+ bits = real_to_target (NULL, &r, HFmode);
+ ops[0] = operands[0];
+ ops[1] = GEN_INT (bits);
+ ops[2] = GEN_INT (bits & 0xff00);
+ ops[3] = GEN_INT (bits & 0x00ff);
+
+ if (arm_arch_thumb2)
+ output_asm_insn (\"movw%?\\t%0, %1\", ops);
+ else
+ output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
+ return \"\";
+ }
+ default:
+ gcc_unreachable ();
+ }
"
+ [(set_attr "conds" "unconditional")
+ (set_attr "type" "load1,store1,*,*")
+ (set_attr "length" "4,4,4,8")
+ (set_attr "predicable" "yes")
+ ]
)
-(define_insn "*arm_movsf_soft_insn"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
- (match_operand:SF 1 "general_operand" "r,mE,r"))]
- "TARGET_ARM
- && TARGET_SOFT_FLOAT
- && (GET_CODE (operands[0]) != MEM
- || register_operand (operands[1], SFmode))"
- "@
- mov%?\\t%0, %1
+(define_insn "*thumb1_movhf"
+ [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
+ (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
+ "TARGET_THUMB1
+ && ( s_register_operand (operands[0], HFmode)
+ || s_register_operand (operands[1], HFmode))"
+ "*
+ switch (which_alternative)
+ {
+ case 1:
+ {
+ rtx addr;
+ gcc_assert (GET_CODE(operands[1]) == MEM);
+ addr = XEXP (operands[1], 0);
+ if (GET_CODE (addr) == LABEL_REF
+ || (GET_CODE (addr) == CONST
+ && GET_CODE (XEXP (addr, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
+ && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
+ {
+ /* Constant pool entry. */
+ return \"ldr\\t%0, %1\";
+ }
+ return \"ldrh\\t%0, %1\";
+ }
+ case 2: return \"strh\\t%1, %0\";
+ default: return \"mov\\t%0, %1\";
+ }
+ "
+ [(set_attr "length" "2")
+ (set_attr "type" "*,load1,store1,*,*")
+ (set_attr "pool_range" "*,1020,*,*,*")]
+)
+
+(define_expand "movsf"
+ [(set (match_operand:SF 0 "general_operand" "")
+ (match_operand:SF 1 "general_operand" ""))]
+ "TARGET_EITHER"
+ "
+ if (TARGET_32BIT)
+ {
+ if (GET_CODE (operands[0]) == MEM)
+ operands[1] = force_reg (SFmode, operands[1]);
+ }
+ else /* TARGET_THUMB1 */
+ {
+ if (can_create_pseudo_p ())
+ {
+ if (GET_CODE (operands[0]) != REG)
+ operands[1] = force_reg (SFmode, operands[1]);
+ }
+ }
+ "
+)
+
+;; Transform a floating-point move of a constant into a core register into
+;; an SImode operation.
+(define_split
+ [(set (match_operand:SF 0 "arm_general_register_operand" "")
+ (match_operand:SF 1 "immediate_operand" ""))]
+ "TARGET_EITHER
+ && reload_completed
+ && GET_CODE (operands[1]) == CONST_DOUBLE"
+ [(set (match_dup 2) (match_dup 3))]
+ "
+ operands[2] = gen_lowpart (SImode, operands[0]);
+ operands[3] = gen_lowpart (SImode, operands[1]);
+ if (operands[2] == 0 || operands[3] == 0)
+ FAIL;
+ "
+)
+
+(define_insn "*arm_movsf_soft_insn"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
+ (match_operand:SF 1 "general_operand" "r,mE,r"))]
+ "TARGET_ARM
+ && TARGET_SOFT_FLOAT
+ && (GET_CODE (operands[0]) != MEM
+ || register_operand (operands[1], SFmode))"
+ "@
+ mov%?\\t%0, %1
ldr%?\\t%0, %1\\t%@ float
str%?\\t%1, %0\\t%@ float"
[(set_attr "length" "4,4,4")
(match_operand:SI 2 "nonmemory_operand" "")])
(label_ref (match_operand 3 "" ""))
(pc)))]
- "TARGET_THUMB1"
+ "TARGET_THUMB1 || TARGET_32BIT"
"
+ if (!TARGET_THUMB1)
+ {
+ if (!arm_add_operand (operands[2], SImode))
+ operands[2] = force_reg (SImode, operands[2]);
+ emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
if (thumb1_cmpneg_operand (operands[2], SImode))
{
emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
operands[2] = force_reg (SImode, operands[2]);
")
-(define_insn "*cbranchsi4_insn"
+;; A pattern to recognize a special situation and optimize for it.
+;; On the thumb, zero-extension from memory is preferrable to sign-extension
+;; due to the available addressing modes. Hence, convert a signed comparison
+;; with zero into an unsigned comparison with 127 if possible.
+(define_expand "cbranchqi4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "lt_ge_comparison_operator"
+ [(match_operand:QI 1 "memory_operand" "")
+ (match_operand:QI 2 "const0_operand" "")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_THUMB1"
+{
+ rtx xops[3];
+ xops[1] = gen_reg_rtx (SImode);
+ emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
+ xops[2] = GEN_INT (127);
+ xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
+ VOIDmode, xops[1], xops[2]);
+ xops[3] = operands[3];
+ emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
+ DONE;
+})
+
+(define_expand "cbranchsf4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "arm_comparison_operator"
+ [(match_operand:SF 1 "s_register_operand" "")
+ (match_operand:SF 2 "arm_float_compare_operand" "")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
+ operands[3])); DONE;"
+)
+
+(define_expand "cbranchdf4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "arm_comparison_operator"
+ [(match_operand:DF 1 "s_register_operand" "")
+ (match_operand:DF 2 "arm_float_compare_operand" "")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
+ "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
+ operands[3])); DONE;"
+)
+
+;; this uses the Cirrus DI compare instruction
+(define_expand "cbranchdi4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "arm_comparison_operator"
+ [(match_operand:DI 1 "cirrus_fp_register" "")
+ (match_operand:DI 2 "cirrus_fp_register" "")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
+ "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
+ operands[3])); DONE;"
+)
+
+(define_insn "cbranchsi4_insn"
[(set (pc) (if_then_else
(match_operator 0 "arm_comparison_operator"
[(match_operand:SI 1 "s_register_operand" "l,*h")
(pc)))]
"TARGET_THUMB1"
"*
- output_asm_insn (\"cmp\\t%1, %2\", operands);
+ rtx t = prev_nonnote_insn (insn);
+ if (t != NULL_RTX
+ && INSN_P (t)
+ && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
+ {
+ t = XEXP (SET_SRC (PATTERN (t)), 0);
+ if (!rtx_equal_p (XEXP (t, 0), operands[1])
+ || !rtx_equal_p (XEXP (t, 1), operands[2]))
+ t = NULL_RTX;
+ }
+ else
+ t = NULL_RTX;
+ if (t == NULL_RTX)
+ output_asm_insn (\"cmp\\t%1, %2\", operands);
switch (get_attr_length (insn))
{
(const_int 6)
(const_int 8))))]
)
+
(define_insn "*movsi_cbranchsi4"
[(set (pc)
(if_then_else
(const_int 10)))))]
)
+(define_peephole2
+ [(set (match_operand:SI 0 "low_register_operand" "")
+ (match_operand:SI 1 "low_register_operand" ""))
+ (set (pc)
+ (if_then_else (match_operator 2 "arm_comparison_operator"
+ [(match_dup 1) (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_THUMB1"
+ [(parallel
+ [(set (pc)
+ (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
+ (label_ref (match_dup 3))
+ (pc)))
+ (set (match_dup 0) (match_dup 1))])]
+ ""
+)
+
+;; Sigh! This variant shouldn't be needed, but combine often fails to
+;; merge cases like this because the op1 is a hard register in
+;; CLASS_LIKELY_SPILLED_P.
+(define_peephole2
+ [(set (match_operand:SI 0 "low_register_operand" "")
+ (match_operand:SI 1 "low_register_operand" ""))
+ (set (pc)
+ (if_then_else (match_operator 2 "arm_comparison_operator"
+ [(match_dup 0) (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "TARGET_THUMB1"
+ [(parallel
+ [(set (pc)
+ (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
+ (label_ref (match_dup 3))
+ (pc)))
+ (set (match_dup 0) (match_dup 1))])]
+ ""
+)
+
(define_insn "*negated_cbranchsi4"
[(set (pc)
(if_then_else
(define_insn "*addsi3_cbranch"
[(set (pc)
(if_then_else
- (match_operator 4 "comparison_operator"
+ (match_operator 4 "arm_comparison_operator"
[(plus:SI
- (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
- (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
+ (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
+ (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
(const_int 0)])
(label_ref (match_operand 5 "" ""))
(pc)))
(set
(match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
(plus:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
+ (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
"TARGET_THUMB1
&& (GET_CODE (operands[4]) == EQ
|| GET_CODE (operands[4]) == NE
{
rtx cond[3];
-
- cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
+ cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
cond[1] = operands[2];
cond[2] = operands[3];
else
output_asm_insn (\"add\\t%0, %1, %2\", cond);
- if (which_alternative >= 3
+ if (which_alternative >= 2
&& which_alternative < 4)
output_asm_insn (\"mov\\t%0, %1\", operands);
else if (which_alternative >= 4)
(define_insn "*addsi3_cbranch_scratch"
[(set (pc)
(if_then_else
- (match_operator 3 "comparison_operator"
+ (match_operator 3 "arm_comparison_operator"
[(plus:SI
(match_operand:SI 1 "s_register_operand" "%l,l,l,0")
(match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
(define_insn "*subsi3_cbranch"
[(set (pc)
(if_then_else
- (match_operator 4 "comparison_operator"
+ (match_operator 4 "arm_comparison_operator"
[(minus:SI
(match_operand:SI 2 "s_register_operand" "l,l,1,l")
(match_operand:SI 3 "s_register_operand" "l,l,l,l"))
;; Comparison and test insns
-(define_expand "cmpsi"
- [(match_operand:SI 0 "s_register_operand" "")
- (match_operand:SI 1 "arm_add_operand" "")]
- "TARGET_32BIT"
- "{
- arm_compare_op0 = operands[0];
- arm_compare_op1 = operands[1];
- DONE;
- }"
-)
-
-(define_expand "cmpsf"
- [(match_operand:SF 0 "s_register_operand" "")
- (match_operand:SF 1 "arm_float_compare_operand" "")]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
- "
- arm_compare_op0 = operands[0];
- arm_compare_op1 = operands[1];
- DONE;
- "
-)
-
-(define_expand "cmpdf"
- [(match_operand:DF 0 "s_register_operand" "")
- (match_operand:DF 1 "arm_float_compare_operand" "")]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
- "
- arm_compare_op0 = operands[0];
- arm_compare_op1 = operands[1];
- DONE;
- "
-)
-
(define_insn "*arm_cmpsi_insn"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
(set_attr "cirrus" "compare")]
)
-;; Cirrus DI compare instruction
-(define_expand "cmpdi"
- [(match_operand:DI 0 "cirrus_fp_register" "")
- (match_operand:DI 1 "cirrus_fp_register" "")]
- "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
- "{
- arm_compare_op0 = operands[0];
- arm_compare_op1 = operands[1];
- DONE;
- }")
-
(define_insn "*cirrus_cmpdi"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
\f
;; Conditional branch insns
-(define_expand "beq"
- [(set (pc)
- (if_then_else (eq (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bne"
- [(set (pc)
- (if_then_else (ne (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bgt"
- [(set (pc)
- (if_then_else (gt (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "ble"
- [(set (pc)
- (if_then_else (le (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bge"
+(define_expand "cbranch_cc"
[(set (pc)
- (if_then_else (ge (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "blt"
- [(set (pc)
- (if_then_else (lt (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
+ (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
+ (match_operand 2 "" "")])
+ (label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bgtu"
- [(set (pc)
- (if_then_else (gtu (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bleu"
- [(set (pc)
- (if_then_else (leu (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bgeu"
- [(set (pc)
- (if_then_else (geu (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bltu"
- [(set (pc)
- (if_then_else (ltu (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bunordered"
- [(set (pc)
- (if_then_else (unordered (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "bordered"
- [(set (pc)
- (if_then_else (ordered (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "bungt"
- [(set (pc)
- (if_then_else (ungt (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bunlt"
- [(set (pc)
- (if_then_else (unlt (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bunge"
- [(set (pc)
- (if_then_else (unge (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bunle"
- [(set (pc)
- (if_then_else (unle (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
-)
-
-;; The following two patterns need two branch instructions, since there is
-;; no single instruction that will handle all cases.
-(define_expand "buneq"
- [(set (pc)
- (if_then_else (uneq (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "bltgt"
- [(set (pc)
- (if_then_else (ltgt (match_dup 1) (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
+ "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
+ operands[1], operands[2]);
+ operands[2] = const0_rtx;"
)
;;
;; Patterns to match conditional branch insns.
;;
-; Special pattern to match UNEQ.
-(define_insn "*arm_buneq"
- [(set (pc)
- (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "*
- gcc_assert (!arm_ccfsm_state);
-
- return \"bvs\\t%l0\;beq\\t%l0\";
- "
- [(set_attr "conds" "jump_clob")
- (set_attr "length" "8")]
-)
-
-; Special pattern to match LTGT.
-(define_insn "*arm_bltgt"
- [(set (pc)
- (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "*
- gcc_assert (!arm_ccfsm_state);
-
- return \"bmi\\t%l0\;bgt\\t%l0\";
- "
- [(set_attr "conds" "jump_clob")
- (set_attr "length" "8")]
-)
-
(define_insn "*arm_cond_branch"
[(set (pc)
(if_then_else (match_operator 1 "arm_comparison_operator"
(set_attr "type" "branch")]
)
-; Special pattern to match reversed UNEQ.
-(define_insn "*arm_buneq_reversed"
- [(set (pc)
- (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
- (pc)
- (label_ref (match_operand 0 "" ""))))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "*
- gcc_assert (!arm_ccfsm_state);
-
- return \"bmi\\t%l0\;bgt\\t%l0\";
- "
- [(set_attr "conds" "jump_clob")
- (set_attr "length" "8")]
-)
-
-; Special pattern to match reversed LTGT.
-(define_insn "*arm_bltgt_reversed"
- [(set (pc)
- (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
- (pc)
- (label_ref (match_operand 0 "" ""))))]
- "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "*
- gcc_assert (!arm_ccfsm_state);
-
- return \"bvs\\t%l0\;beq\\t%l0\";
- "
- [(set_attr "conds" "jump_clob")
- (set_attr "length" "8")]
-)
-
(define_insn "*arm_cond_branch_reversed"
[(set (pc)
(if_then_else (match_operator 1 "arm_comparison_operator"
; scc insns
-(define_expand "seq"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (eq:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sne"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ne:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sgt"
+(define_expand "cstore_cc"
[(set (match_operand:SI 0 "s_register_operand" "")
- (gt:SI (match_dup 1) (const_int 0)))]
+ (match_operator:SI 1 "" [(match_operand 2 "" "")
+ (match_operand 3 "" "")]))]
"TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
+ "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
+ operands[2], operands[3]);
+ operands[3] = const0_rtx;"
)
-(define_expand "sle"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (le:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sge"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ge:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "slt"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (lt:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sgtu"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (gtu:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sleu"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (leu:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sgeu"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (geu:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sltu"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ltu:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT"
- "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
-)
-
-(define_expand "sunordered"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (unordered:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "sordered"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ordered:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "sungt"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ungt:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "sunge"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (unge:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "sunlt"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (unlt:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
- arm_compare_op1);"
-)
-
-(define_expand "sunle"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (unle:SI (match_dup 1) (const_int 0)))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
- "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
- arm_compare_op1);"
-)
-
-;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
-;;; simple ARM instructions.
-;
-; (define_expand "suneq"
-; [(set (match_operand:SI 0 "s_register_operand" "")
-; (uneq:SI (match_dup 1) (const_int 0)))]
-; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
-; "gcc_unreachable ();"
-; )
-;
-; (define_expand "sltgt"
-; [(set (match_operand:SI 0 "s_register_operand" "")
-; (ltgt:SI (match_dup 1) (const_int 0)))]
-; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
-; "gcc_unreachable ();"
-; )
-
(define_insn "*mov_scc"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(match_operator:SI 1 "arm_comparison_operator"
(match_operator:SI 1 "arm_comparison_operator"
[(match_operand:SI 2 "s_register_operand" "")
(match_operand:SI 3 "reg_or_int_operand" "")]))]
- "TARGET_THUMB1"
+ "TARGET_32BIT || TARGET_THUMB1"
"{
rtx op3, scratch, scratch2;
+ if (!TARGET_THUMB1)
+ {
+ if (!arm_add_operand (operands[3], SImode))
+ operands[3] = force_reg (SImode, operands[3]);
+ emit_insn (gen_cstore_cc (operands[0], operands[1],
+ operands[2], operands[3]));
+ DONE;
+ }
+
if (operands[3] == const0_rtx)
{
switch (GET_CODE (operands[1]))
if (!thumb1_cmp_operand (op3, SImode))
op3 = force_reg (SImode, op3);
scratch = gen_reg_rtx (SImode);
- emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
- emit_insn (gen_negsi2 (operands[0], scratch));
+ emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
break;
case GTU:
op3 = force_reg (SImode, operands[3]);
scratch = gen_reg_rtx (SImode);
- emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
- emit_insn (gen_negsi2 (operands[0], scratch));
+ emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
break;
/* No good sequences for GT, LT. */
DONE;
}")
+(define_expand "cstoresf4"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (match_operator:SI 1 "arm_comparison_operator"
+ [(match_operand:SF 2 "s_register_operand" "")
+ (match_operand:SF 3 "arm_float_compare_operand" "")]))]
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "emit_insn (gen_cstore_cc (operands[0], operands[1],
+ operands[2], operands[3])); DONE;"
+)
+
+(define_expand "cstoredf4"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (match_operator:SI 1 "arm_comparison_operator"
+ [(match_operand:DF 2 "s_register_operand" "")
+ (match_operand:DF 3 "arm_float_compare_operand" "")]))]
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "emit_insn (gen_cstore_cc (operands[0], operands[1],
+ operands[2], operands[3])); DONE;"
+)
+
+;; this uses the Cirrus DI compare instruction
+(define_expand "cstoredi4"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (match_operator:SI 1 "arm_comparison_operator"
+ [(match_operand:DI 2 "cirrus_fp_register" "")
+ (match_operand:DI 3 "cirrus_fp_register" "")]))]
+ "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
+ "emit_insn (gen_cstore_cc (operands[0], operands[1],
+ operands[2], operands[3])); DONE;"
+)
+
+
(define_expand "cstoresi_eq0_thumb1"
[(parallel
[(set (match_operand:SI 0 "s_register_operand" "")
[(set_attr "length" "4")]
)
+;; Used as part of the expansion of thumb ltu and gtu sequences
(define_insn "cstoresi_nltu_thumb1"
[(set (match_operand:SI 0 "s_register_operand" "=l,l")
- (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
+ (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
"TARGET_THUMB1"
"cmp\\t%1, %2\;sbc\\t%0, %0, %0"
[(set_attr "length" "4")]
)
+(define_insn_and_split "cstoresi_ltu_thumb1"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l")
+ (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
+ (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
+ "TARGET_THUMB1"
+ "#"
+ "TARGET_THUMB1"
+ [(set (match_dup 3)
+ (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
+ (set (match_dup 0) (neg:SI (match_dup 3)))]
+ "operands[3] = gen_reg_rtx (SImode);"
+ [(set_attr "length" "4")]
+)
+
;; Used as part of the expansion of thumb les sequence.
(define_insn "thumb1_addsi3_addgeu"
[(set (match_operand:SI 0 "s_register_operand" "=l")
if (code == UNEQ || code == LTGT)
FAIL;
- ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
+ ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
+ XEXP (operands[1], 1));
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
(if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
(match_operand:SF 2 "s_register_operand" "")
(match_operand:SF 3 "nonmemory_operand" "")))]
- "TARGET_32BIT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT"
"
{
enum rtx_code code = GET_CODE (operands[1]);
|| (!arm_float_add_operand (operands[3], SFmode)))
operands[3] = force_reg (SFmode, operands[3]);
- ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
+ ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
+ XEXP (operands[1], 1));
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
(if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
(match_operand:DF 2 "s_register_operand" "")
(match_operand:DF 3 "arm_float_add_operand" "")))]
- "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
"
{
enum rtx_code code = GET_CODE (operands[1]);
if (code == UNEQ || code == LTGT)
FAIL;
- ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
+ ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
+ XEXP (operands[1], 1));
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
(set_attr "type" "call")]
)
+
+;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
+;; considered a function call by the branch predictor of some cores (PR40887).
+;; Falls back to blx rN (*call_reg_armv5).
+
(define_insn "*call_mem"
[(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM"
+ "TARGET_ARM && !arm_arch5"
"*
return output_call_mem (operands);
"
(set_attr "type" "call")]
)
+;; Note: see *call_mem
+
(define_insn "*call_value_mem"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
+ "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
"*
return output_call_mem (&operands[1]);
"
/* Emit USE insns before the return. */
for (i = 0; i < XVECLEN (operands[1], 0); i++)
- emit_insn (gen_rtx_USE (VOIDmode,
- SET_DEST (XVECEXP (operands[1], 0, i))));
+ emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
/* Construct the return. */
expand_naked_return ();
(match_operand:SI 2 "const_int_operand" "") ; total range
(match_operand:SI 3 "" "") ; table label
(match_operand:SI 4 "" "")] ; Out of range label
- "TARGET_32BIT"
+ "TARGET_32BIT || optimize_size || flag_pic"
"
{
- rtx reg;
+ enum insn_code code;
if (operands[1] != const0_rtx)
{
- reg = gen_reg_rtx (SImode);
+ rtx reg = gen_reg_rtx (SImode);
emit_insn (gen_addsi3 (reg, operands[0],
GEN_INT (-INTVAL (operands[1]))));
operands[0] = reg;
}
- if (!const_ok_for_arm (INTVAL (operands[2])))
- operands[2] = force_reg (SImode, operands[2]);
-
if (TARGET_ARM)
- {
- emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
- operands[3], operands[4]));
- }
+ code = CODE_FOR_arm_casesi_internal;
+ else if (TARGET_THUMB1)
+ code = CODE_FOR_thumb1_casesi_internal_pic;
else if (flag_pic)
- {
- emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
- operands[2], operands[3], operands[4]));
- }
+ code = CODE_FOR_thumb2_casesi_internal_pic;
else
- {
- emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
- operands[3], operands[4]));
- }
+ code = CODE_FOR_thumb2_casesi_internal;
+
+ if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
+ operands[2] = force_reg (SImode, operands[2]);
+
+ emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
+ operands[3], operands[4]));
DONE;
}"
)
(set_attr "length" "12")]
)
+(define_expand "thumb1_casesi_internal_pic"
+ [(match_operand:SI 0 "s_register_operand" "")
+ (match_operand:SI 1 "thumb1_cmp_operand" "")
+ (match_operand 2 "" "")
+ (match_operand 3 "" "")]
+ "TARGET_THUMB1"
+ {
+ rtx reg0;
+ rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
+ emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
+ operands[3]));
+ reg0 = gen_rtx_REG (SImode, 0);
+ emit_move_insn (reg0, operands[0]);
+ emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
+ DONE;
+ }
+)
+
+(define_insn "thumb1_casesi_dispatch"
+ [(parallel [(set (pc) (unspec [(reg:SI 0)
+ (label_ref (match_operand 0 "" ""))
+;; (label_ref (match_operand 1 "" ""))
+]
+ UNSPEC_THUMB1_CASESI))
+ (clobber (reg:SI IP_REGNUM))
+ (clobber (reg:SI LR_REGNUM))])]
+ "TARGET_THUMB1"
+ "* return thumb1_output_casesi(operands);"
+ [(set_attr "length" "4")]
+)
+
(define_expand "indirect_jump"
[(set (pc)
(match_operand:SI 0 "s_register_operand" ""))]
(compare:CC_NOOV (ior:SI
(and:SI (match_operand:SI 0 "s_register_operand" "")
(const_int 1))
- (match_operator:SI 1 "comparison_operator"
+ (match_operator:SI 1 "arm_comparison_operator"
[(match_operand:SI 2 "s_register_operand" "")
(match_operand:SI 3 "arm_add_operand" "")]))
(const_int 0)))
(define_split
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV (ior:SI
- (match_operator:SI 1 "comparison_operator"
+ (match_operator:SI 1 "arm_comparison_operator"
[(match_operand:SI 2 "s_register_operand" "")
(match_operand:SI 3 "arm_add_operand" "")])
(and:SI (match_operand:SI 0 "s_register_operand" "")
; reversed, check that the memory references aren't volatile.
(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=rk")
(match_operand:SI 4 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=r")
+ (set (match_operand:SI 1 "s_register_operand" "=rk")
(match_operand:SI 5 "memory_operand" "m"))
- (set (match_operand:SI 2 "s_register_operand" "=r")
+ (set (match_operand:SI 2 "s_register_operand" "=rk")
(match_operand:SI 6 "memory_operand" "m"))
- (set (match_operand:SI 3 "s_register_operand" "=r")
+ (set (match_operand:SI 3 "s_register_operand" "=rk")
(match_operand:SI 7 "memory_operand" "m"))]
"TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
"*
)
(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=rk")
(match_operand:SI 3 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=r")
+ (set (match_operand:SI 1 "s_register_operand" "=rk")
(match_operand:SI 4 "memory_operand" "m"))
- (set (match_operand:SI 2 "s_register_operand" "=r")
+ (set (match_operand:SI 2 "s_register_operand" "=rk")
(match_operand:SI 5 "memory_operand" "m"))]
"TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
"*
)
(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=rk")
(match_operand:SI 2 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=r")
+ (set (match_operand:SI 1 "s_register_operand" "=rk")
(match_operand:SI 3 "memory_operand" "m"))]
"TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
"*
(define_peephole
[(set (match_operand:SI 4 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "r"))
+ (match_operand:SI 0 "s_register_operand" "rk"))
(set (match_operand:SI 5 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "r"))
+ (match_operand:SI 1 "s_register_operand" "rk"))
(set (match_operand:SI 6 "memory_operand" "=m")
- (match_operand:SI 2 "s_register_operand" "r"))
+ (match_operand:SI 2 "s_register_operand" "rk"))
(set (match_operand:SI 7 "memory_operand" "=m")
- (match_operand:SI 3 "s_register_operand" "r"))]
+ (match_operand:SI 3 "s_register_operand" "rk"))]
"TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
"*
return emit_stm_seq (operands, 4);
(define_peephole
[(set (match_operand:SI 3 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "r"))
+ (match_operand:SI 0 "s_register_operand" "rk"))
(set (match_operand:SI 4 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "r"))
+ (match_operand:SI 1 "s_register_operand" "rk"))
(set (match_operand:SI 5 "memory_operand" "=m")
- (match_operand:SI 2 "s_register_operand" "r"))]
+ (match_operand:SI 2 "s_register_operand" "rk"))]
"TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
"*
return emit_stm_seq (operands, 3);
(define_peephole
[(set (match_operand:SI 2 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "r"))
+ (match_operand:SI 0 "s_register_operand" "rk"))
(set (match_operand:SI 3 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "r"))]
+ (match_operand:SI 1 "s_register_operand" "rk"))]
"TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
"*
return emit_stm_seq (operands, 2);
[(clobber (const_int 0))]
"TARGET_EITHER"
"
- if (current_function_calls_eh_return)
+ if (crtl->calls_eh_return)
emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
if (TARGET_THUMB1)
thumb1_expand_epilogue ();
(define_insn "stack_tie"
[(set (mem:BLK (scratch))
- (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r")
- (match_operand:SI 1 "s_register_operand" "r")]
+ (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
+ (match_operand:SI 1 "s_register_operand" "rk")]
UNSPEC_PRLG_STK))]
""
""
"TARGET_THUMB1"
"*
making_const_table = TRUE;
+ gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
assemble_zeros (2);
return \"\";
"TARGET_EITHER"
"*
{
+ rtx x = operands[0];
making_const_table = TRUE;
- switch (GET_MODE_CLASS (GET_MODE (operands[0])))
+ switch (GET_MODE_CLASS (GET_MODE (x)))
{
case MODE_FLOAT:
- {
- REAL_VALUE_TYPE r;
- REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
- assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
- break;
- }
+ if (GET_MODE (x) == HFmode)
+ arm_emit_fp16_const (x);
+ else
+ {
+ REAL_VALUE_TYPE r;
+ REAL_VALUE_FROM_CONST_DOUBLE (r, x);
+ assemble_real (r, GET_MODE (x), BITS_PER_WORD);
+ }
+ break;
default:
- assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
+ /* XXX: Sometimes gcc does something really dumb and ends up with
+ a HIGH in a constant pool entry, usually because it's trying to
+ load into a VFP register. We know this will always be used in
+ combination with a LO_SUM which ignores the high bits, so just
+ strip off the HIGH. */
+ if (GET_CODE (x) == HIGH)
+ x = XEXP (x, 0);
+ assemble_integer (x, 4, BITS_PER_WORD, 1);
+ mark_symbol_refs_as_used (x);
break;
}
return \"\";
[(set_attr "predicable" "yes")
(set_attr "insn" "clz")])
-(define_expand "ffssi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_32BIT && arm_arch5"
- "
- {
- rtx t1, t2, t3;
-
- t1 = gen_reg_rtx (SImode);
- t2 = gen_reg_rtx (SImode);
- t3 = gen_reg_rtx (SImode);
-
- emit_insn (gen_negsi2 (t1, operands[1]));
- emit_insn (gen_andsi3 (t2, operands[1], t1));
- emit_insn (gen_clzsi2 (t3, t2));
- emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3));
- DONE;
- }"
-)
+(define_insn "rbitsi2"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
+ "TARGET_32BIT && arm_arch_thumb2"
+ "rbit%?\\t%0, %1"
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "clz")])
(define_expand "ctzsi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_32BIT && arm_arch5"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
+ "TARGET_32BIT && arm_arch_thumb2"
+ "
+ {
+ rtx tmp = gen_reg_rtx (SImode);
+ emit_insn (gen_rbitsi2 (tmp, operands[1]));
+ emit_insn (gen_clzsi2 (operands[0], tmp));
+ }
+ DONE;
"
- {
- rtx t1, t2, t3;
-
- t1 = gen_reg_rtx (SImode);
- t2 = gen_reg_rtx (SImode);
- t3 = gen_reg_rtx (SImode);
-
- emit_insn (gen_negsi2 (t1, operands[1]));
- emit_insn (gen_andsi3 (t2, operands[1], t1));
- emit_insn (gen_clzsi2 (t3, t2));
- emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3));
- DONE;
- }"
)
;; V5E instructions.
[(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
""
"%@ %0 needed for prologue"
+ [(set_attr "length" "0")]
)
[(set_attr "conds" "clob")]
)
+(define_insn "*arm_movtas_ze"
+ [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
+ (const_int 16)
+ (const_int 16))
+ (match_operand:SI 1 "const_int_operand" ""))]
+ "TARGET_32BIT"
+ "movt%?\t%0, %c1"
+ [(set_attr "predicable" "yes")
+ (set_attr "length" "4")]
+)
+
+(define_insn "arm_rev"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
+ "TARGET_EITHER && arm_arch6"
+ "rev\t%0, %1"
+ [(set (attr "length")
+ (if_then_else (eq_attr "is_thumb" "yes")
+ (const_int 2)
+ (const_int 4)))]
+)
+
+(define_expand "arm_legacy_rev"
+ [(set (match_operand:SI 2 "s_register_operand" "")
+ (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
+ (const_int 16))
+ (match_dup 1)))
+ (set (match_dup 2)
+ (lshiftrt:SI (match_dup 2)
+ (const_int 8)))
+ (set (match_operand:SI 3 "s_register_operand" "")
+ (rotatert:SI (match_dup 1)
+ (const_int 8)))
+ (set (match_dup 2)
+ (and:SI (match_dup 2)
+ (const_int -65281)))
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (xor:SI (match_dup 3)
+ (match_dup 2)))]
+ "TARGET_32BIT"
+ ""
+)
+
+;; Reuse temporaries to keep register pressure down.
+(define_expand "thumb_legacy_rev"
+ [(set (match_operand:SI 2 "s_register_operand" "")
+ (ashift:SI (match_operand:SI 1 "s_register_operand" "")
+ (const_int 24)))
+ (set (match_operand:SI 3 "s_register_operand" "")
+ (lshiftrt:SI (match_dup 1)
+ (const_int 24)))
+ (set (match_dup 3)
+ (ior:SI (match_dup 3)
+ (match_dup 2)))
+ (set (match_operand:SI 4 "s_register_operand" "")
+ (const_int 16))
+ (set (match_operand:SI 5 "s_register_operand" "")
+ (rotatert:SI (match_dup 1)
+ (match_dup 4)))
+ (set (match_dup 2)
+ (ashift:SI (match_dup 5)
+ (const_int 24)))
+ (set (match_dup 5)
+ (lshiftrt:SI (match_dup 5)
+ (const_int 24)))
+ (set (match_dup 5)
+ (ior:SI (match_dup 5)
+ (match_dup 2)))
+ (set (match_dup 5)
+ (rotatert:SI (match_dup 5)
+ (match_dup 4)))
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (ior:SI (match_dup 5)
+ (match_dup 3)))]
+ "TARGET_THUMB"
+ ""
+)
+
+(define_expand "bswapsi2"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
+"TARGET_EITHER"
+"
+ if (!arm_arch6)
+ {
+ if (!optimize_size)
+ {
+ rtx op2 = gen_reg_rtx (SImode);
+ rtx op3 = gen_reg_rtx (SImode);
+
+ if (TARGET_THUMB)
+ {
+ rtx op4 = gen_reg_rtx (SImode);
+ rtx op5 = gen_reg_rtx (SImode);
+
+ emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
+ op2, op3, op4, op5));
+ }
+ else
+ {
+ emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
+ op2, op3));
+ }
+
+ DONE;
+ }
+ else
+ FAIL;
+ }
+ "
+)
+
;; Load the FPA co-processor patterns
(include "fpa.md")
;; Load the Maverick co-processor patterns