;;- Machine description for ARM for GNU compiler
;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
-;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
;; Free Software Foundation, Inc.
;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
;; and Martin Simmons (@harleqn.co.uk).
;; Register numbers
(define_constants
[(R0_REGNUM 0) ; First CORE register
+ (R1_REGNUM 1) ; Second CORE register
(IP_REGNUM 12) ; Scratch register
(SP_REGNUM 13) ; Stack pointer
(LR_REGNUM 14) ; Return address register
(DOM_CC_X_OR_Y 2)
]
)
-
-;; UNSPEC Usage:
-;; Note: sin and cos are no-longer used.
-;; Unspec constants for Neon are defined in neon.md.
-
+;; conditional compare combination
(define_constants
- [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
- ; operand 0 is the result,
- ; operand 1 the parameter.
- (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
- ; operand 0 is the result,
- ; operand 1 the parameter.
- (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
- ; operand 0 is the first register,
- ; subsequent registers are in parallel (use ...)
- ; expressions.
- (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
- ; usage, that is, we will add the pic_register
- ; value to it before trying to dereference it.
- (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
- ; The last operand is the number of a PIC_LABEL
- ; that points at the containing instruction.
- (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
- ; being scheduled before the stack adjustment insn.
- (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
- ; this unspec is used to prevent the deletion of
- ; instructions setting registers for EH handling
- ; and stack frame generation. Operand 0 is the
- ; register to "use".
- (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
- (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
- (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
- (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
- (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
- (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
- (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
- (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
- (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
- (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
- (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
- (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
- (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
- (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
- (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
- ; instruction stream.
- (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
- ; generate correct unwind information.
- (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
- ; correctly for PIC usage.
- (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
- ; a given symbolic address.
- (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
- (UNSPEC_RBIT 26) ; rbit operation.
- (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
- ; another symbolic address.
+ [(CMP_CMP 0)
+ (CMN_CMP 1)
+ (CMP_CMN 2)
+ (CMN_CMN 3)
+ (NUM_OF_COND_CMP 4)
]
)
+;; UNSPEC Usage:
+;; Note: sin and cos are no-longer used.
+;; Unspec enumerators for Neon are defined in neon.md.
+
+(define_c_enum "unspec" [
+ UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
+ ; operand 0 is the result,
+ ; operand 1 the parameter.
+ UNPSEC_COS ; `cos' operation (MODE_FLOAT):
+ ; operand 0 is the result,
+ ; operand 1 the parameter.
+ UNSPEC_PUSH_MULT ; `push multiple' operation:
+ ; operand 0 is the first register,
+ ; subsequent registers are in parallel (use ...)
+ ; expressions.
+ UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
+ ; usage, that is, we will add the pic_register
+ ; value to it before trying to dereference it.
+ UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
+ ; The last operand is the number of a PIC_LABEL
+ ; that points at the containing instruction.
+ UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
+ ; being scheduled before the stack adjustment insn.
+ UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
+ ; this unspec is used to prevent the deletion of
+ ; instructions setting registers for EH handling
+ ; and stack frame generation. Operand 0 is the
+ ; register to "use".
+ UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
+ UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
+ UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
+ UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
+ UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
+ UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
+ UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
+ UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
+ UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
+ UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
+ UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
+ UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
+ UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
+ UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
+ UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
+ ; instruction stream.
+ UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
+ ; correctly for PIC usage.
+ UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
+ ; a given symbolic address.
+ UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
+ UNSPEC_RBIT ; rbit operation.
+ UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
+ ; another symbolic address.
+ UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
+ UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access
+ ; unaligned locations, on architectures which support
+ ; that.
+ UNSPEC_UNALIGNED_STORE ; Same for str/strh.
+ UNSPEC_PIC_UNIFIED ; Create a common pic addressing form.
+])
+
;; UNSPEC_VOLATILE Usage:
-(define_constants
- [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
- ; insn in the code.
- (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
- ; instruction epilogue sequence that isn't expanded
- ; into normal RTL. Used for both normal and sibcall
- ; epilogues.
- (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
- ; for inlined constants.
- (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
- ; table.
- (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
- ; an 8-bit object.
- (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
- ; a 16-bit object.
- (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
- ; a 32-bit object.
- (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
- ; a 64-bit object.
- (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
- ; a 128-bit object.
- (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
- (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
- (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
- (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
- (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
- (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
- (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
- ; handling.
- ]
-)
+(define_c_enum "unspecv" [
+ VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
+ ; insn in the code.
+ VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
+ ; instruction epilogue sequence that isn't expanded
+ ; into normal RTL. Used for both normal and sibcall
+ ; epilogues.
+ VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
+ ; modes from arm to thumb.
+ VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
+ ; for inlined constants.
+ VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
+ ; table.
+ VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
+ ; an 8-bit object.
+ VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
+ ; a 16-bit object.
+ VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
+ ; a 32-bit object.
+ VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
+ ; a 64-bit object.
+ VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
+ ; a 128-bit object.
+ VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
+ VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
+ VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
+ VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
+ VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
+ VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
+ VUNSPEC_EH_RETURN ; Use to override the return address for exception
+ ; handling.
+ VUNSPEC_ATOMIC_CAS ; Represent an atomic compare swap.
+ VUNSPEC_ATOMIC_XCHG ; Represent an atomic exchange.
+ VUNSPEC_ATOMIC_OP ; Represent an atomic operation.
+ VUNSPEC_LL ; Represent a load-register-exclusive.
+ VUNSPEC_SC ; Represent a store-register-exclusive.
+])
\f
;;---------------------------------------------------------------------------
;; Attributes
+;; Processor type. This is created automatically from arm-cores.def.
+(include "arm-tune.md")
+
; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
; generating ARM code. This is used to control the length of some insn
; patterns that share the same RTL in both ARM and Thumb code.
; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
+; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
+(define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
+
;; Operand number of an input operand that is shifted. Zero if the
;; given instruction does not shift one of its input operands.
(define_attr "shift" "" (const_int 0))
(const (symbol_ref "arm_fpu_attr")))
; LENGTH of an instruction (in bytes)
-(define_attr "length" "" (const_int 4))
+(define_attr "length" ""
+ (const_int 4))
+
+; The architecture which supports the instruction (or alternative).
+; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
+; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
+; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
+; arm_arch6. This attribute is used to compute attribute "enabled",
+; use type "any" to enable an alternative in all cases.
+(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
+ (const_string "any"))
+
+(define_attr "arch_enabled" "no,yes"
+ (cond [(eq_attr "arch" "any")
+ (const_string "yes")
+
+ (and (eq_attr "arch" "a")
+ (match_test "TARGET_ARM"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "t")
+ (match_test "TARGET_THUMB"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "t1")
+ (match_test "TARGET_THUMB1"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "t2")
+ (match_test "TARGET_THUMB2"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "32")
+ (match_test "TARGET_32BIT"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "v6")
+ (match_test "TARGET_32BIT && arm_arch6"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "nov6")
+ (match_test "TARGET_32BIT && !arm_arch6"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "onlya8")
+ (eq_attr "tune" "cortexa8"))
+ (const_string "yes")
+
+ (and (eq_attr "arch" "nota8")
+ (not (eq_attr "tune" "cortexa8")))
+ (const_string "yes")]
+ (const_string "no")))
+
+; Allows an insn to disable certain alternatives for reasons other than
+; arch support.
+(define_attr "insn_enabled" "no,yes"
+ (const_string "yes"))
+
+; Enable all alternatives that are both arch_enabled and insn_enabled.
+ (define_attr "enabled" "no,yes"
+ (if_then_else (eq_attr "insn_enabled" "yes")
+ (if_then_else (eq_attr "arch_enabled" "yes")
+ (const_string "yes")
+ (const_string "no"))
+ (const_string "no")))
; POOL_RANGE is how far away from a constant pool entry that this insn
; can be placed. If the distance is zero, then this insn will never
; reference the pool.
; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
-; before its address.
-(define_attr "pool_range" "" (const_int 0))
-(define_attr "neg_pool_range" "" (const_int 0))
+; before its address. It is set to <max_range> - (8 + <data_size>).
+(define_attr "arm_pool_range" "" (const_int 0))
+(define_attr "thumb2_pool_range" "" (const_int 0))
+(define_attr "arm_neg_pool_range" "" (const_int 0))
+(define_attr "thumb2_neg_pool_range" "" (const_int 0))
+
+(define_attr "pool_range" ""
+ (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
+ (attr "arm_pool_range")))
+(define_attr "neg_pool_range" ""
+ (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
+ (attr "arm_neg_pool_range")))
; An assembler sequence may clobber the condition codes without us knowing.
; If such an insn references the pool, then we have no way of knowing how,
; ffarith Fast floating point arithmetic (2 cycle)
; float_em a floating point arithmetic operation that is normally emulated
; even on a machine with an fpa.
-; f_load a floating point load from memory
-; f_store a floating point store to memory
-; f_load[sd] single/double load from memory
-; f_store[sd] single/double store to memory
+; f_fpa_load a floating point load from memory. Only for the FPA.
+; f_fpa_store a floating point store to memory. Only for the FPA.
+; f_load[sd] A single/double load from memory. Used for VFP unit.
+; f_store[sd] A single/double store to memory. Used for VFP unit.
; f_flag a transfer of co-processor flags to the CPSR
; f_mem_r a transfer of a floating point register to a real reg via mem
; r_mem_f the reverse of f_mem_r
;
(define_attr "type"
- "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
+ "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
(if_then_else
(eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
(const_string "mult")
(const_string "alu")))
+; Is this an (integer side) multiply with a 64-bit result?
+(define_attr "mul64" "no,yes"
+ (if_then_else
+ (eq_attr "insn" "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
+ (const_string "yes")
+ (const_string "no")))
+
; Load scheduling, set from the arm_ld_sched variable
-; initialized by arm_override_options()
+; initialized by arm_option_override()
(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
;; Classification of NEON instructions for scheduling purposes.
; CLOB means that the condition codes are altered in an undefined manner, if
; they are altered at all
;
-; UNCONDITIONAL means the instions can not be conditionally executed.
+; UNCONDITIONAL means the instruction can not be conditionally executed and
+; that the instruction does not use or alter the condition codes.
;
-; NOCOND means that the condition codes are neither altered nor affect the
-; output of this insn
+; NOCOND means that the instruction does not use or alter the condition
+; codes but can be converted into a conditionally exectuted instruction.
(define_attr "conds" "use,set,clob,unconditional,nocond"
- (if_then_else (eq_attr "type" "call")
+ (if_then_else
+ (ior (eq_attr "is_thumb1" "yes")
+ (eq_attr "type" "call"))
(const_string "clob")
(if_then_else (eq_attr "neon_type" "none")
(const_string "nocond")
; to stall the processor. Used with model_wbuf above.
(define_attr "write_conflict" "no,yes"
(if_then_else (eq_attr "type"
- "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
+ "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
(const_string "yes")
(const_string "no")))
;;---------------------------------------------------------------------------
;; Mode iterators
-; A list of modes that are exactly 64 bits in size. We use this to expand
-; some splits that are the same for all modes when operating on ARM
-; registers.
-(define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
-
-;; The integer modes up to word size
-(define_mode_iterator QHSI [QI HI SI])
+(include "iterators.md")
;;---------------------------------------------------------------------------
;; Predicates
;;---------------------------------------------------------------------------
;; Pipeline descriptions
-;; Processor type. This is created automatically from arm-cores.def.
-(include "arm-tune.md")
-
(define_attr "tune_cortexr4" "yes,no"
(const (if_then_else
- (eq_attr "tune" "cortexr4,cortexr4f")
+ (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
(const_string "yes")
(const_string "no"))))
;; True if the generic scheduling description should be used.
(define_attr "generic_sched" "yes,no"
- (const (if_then_else
- (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
- (eq_attr "tune_cortexr4" "yes"))
+ (const (if_then_else
+ (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexa15,cortexm4")
+ (eq_attr "tune_cortexr4" "yes"))
(const_string "no")
(const_string "yes"))))
(define_attr "generic_vfp" "yes,no"
(const (if_then_else
(and (eq_attr "fpu" "vfp")
- (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
+ (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
(eq_attr "tune_cortexr4" "no"))
(const_string "yes")
(const_string "no"))))
(include "arm1020e.md")
(include "arm1026ejs.md")
(include "arm1136jfs.md")
+(include "fa526.md")
+(include "fa606te.md")
+(include "fa626te.md")
+(include "fmp626.md")
+(include "fa726te.md")
+(include "cortex-a5.md")
(include "cortex-a8.md")
(include "cortex-a9.md")
+(include "cortex-a15.md")
(include "cortex-r4.md")
(include "cortex-r4f.md")
+(include "cortex-m4.md")
+(include "cortex-m4-fpu.md")
(include "vfp11.md")
\f
;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
;; put the duplicated register first, and not try the commutative version.
(define_insn_and_split "*arm_addsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
- (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
- (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
+ [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
+ (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
+ (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
"TARGET_32BIT"
"@
add%?\\t%0, %1, %2
add%?\\t%0, %1, %2
add%?\\t%0, %2, %1
+ addw%?\\t%0, %1, %2
+ addw%?\\t%0, %1, %2
sub%?\\t%0, %1, #%n2
sub%?\\t%0, %1, #%n2
+ subw%?\\t%0, %1, #%n2
+ subw%?\\t%0, %1, #%n2
#"
"TARGET_32BIT
&& GET_CODE (operands[2]) == CONST_INT
- && !(const_ok_for_arm (INTVAL (operands[2]))
- || const_ok_for_arm (-INTVAL (operands[2])))
+ && !const_ok_for_op (INTVAL (operands[2]), PLUS)
&& (reload_completed || !arm_eliminable_register (operands[1]))"
[(clobber (const_int 0))]
"
operands[1], 0);
DONE;
"
- [(set_attr "length" "4,4,4,4,4,16")
- (set_attr "predicable" "yes")]
+ [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
+ (set_attr "predicable" "yes")
+ (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
)
-;; Register group 'k' is a single register group containing only the stack
-;; register. Trying to reload it will always fail catastrophically,
-;; so never allow those alternatives to match if reloading is needed.
-
(define_insn_and_split "*thumb1_addsi3"
- [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
- (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
- (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
+ [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
+ (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
+ (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
"TARGET_THUMB1"
"*
static const char * const asms[] =
\"add\\t%0, %1, %2\",
\"add\\t%0, %1, %2\",
\"#\",
+ \"#\",
\"#\"
};
if ((which_alternative == 2 || which_alternative == 6)
return asms[which_alternative];
"
"&& reload_completed && CONST_INT_P (operands[2])
- && operands[1] != stack_pointer_rtx
- && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
+ && ((operands[1] != stack_pointer_rtx
+ && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
+ || (operands[1] == stack_pointer_rtx
+ && INTVAL (operands[2]) > 1020))"
[(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
(set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
{
HOST_WIDE_INT offset = INTVAL (operands[2]);
- if (offset > 255)
- offset = 255;
- else if (offset < -255)
- offset = -255;
-
+ if (operands[1] == stack_pointer_rtx)
+ offset -= 1020;
+ else
+ {
+ if (offset > 255)
+ offset = 255;
+ else if (offset < -255)
+ offset = -255;
+ }
operands[3] = GEN_INT (offset);
operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
}
- [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
+ [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
)
;; Reloading and elimination of the frame pointer can
""
)
-(define_insn "*addsi3_compare0"
+(define_insn "addsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
(plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
(define_insn "*compare_negsi_si"
(match_operand:SI 1 "s_register_operand" "r")))]
"TARGET_32BIT"
"cmn%?\\t%1, %0"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
;; This is the canonicalization of addsi3_compare0_for_combiner when the
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
(define_insn "*compare_addsi2_op1"
"@
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
-(define_code_iterator LTUGEU [ltu geu])
-(define_code_attr cnb [(ltu "CC_C") (geu "CC")])
-(define_code_attr optab [(ltu "ltu") (geu "geu")])
-
(define_insn "*addsi3_carryin_<optab>"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
(const_string "alu_shift_reg")))]
)
+(define_insn "*addsi3_carryin_clobercc_<optab>"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
+ (match_operand:SI 2 "arm_rhs_operand" "rI"))
+ (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
+ (clobber (reg:CC CC_REGNUM))]
+ "TARGET_32BIT"
+ "adc%.\\t%0, %1, %2"
+ [(set_attr "conds" "set")]
+)
+
(define_expand "incscc"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_operator:SI 2 "arm_comparison_operator"
"
)
-(define_insn "*thumb1_subsi3_insn"
+(define_insn "thumb1_subsi3_insn"
[(set (match_operand:SI 0 "register_operand" "=l")
(minus:SI (match_operand:SI 1 "register_operand" "l")
- (match_operand:SI 2 "register_operand" "l")))]
+ (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
"TARGET_THUMB1"
"sub\\t%0, %1, %2"
- [(set_attr "length" "2")]
-)
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
; ??? Check Thumb-2 split length
(define_insn_and_split "*arm_subsi3_insn"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
- (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,!k,?n,r")
- (match_operand:SI 2 "reg_or_int_operand" "r,rI, r, r,?n")))]
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r")
+ (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n")
+ (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r")))]
"TARGET_32BIT"
"@
rsb%?\\t%0, %2, %1
sub%?\\t%0, %1, %2
sub%?\\t%0, %1, %2
- #
#"
- "&& ((GET_CODE (operands[1]) == CONST_INT
- && !const_ok_for_arm (INTVAL (operands[1])))
- || (GET_CODE (operands[2]) == CONST_INT
- && !const_ok_for_arm (INTVAL (operands[2]))))"
+ "&& (GET_CODE (operands[1]) == CONST_INT
+ && !const_ok_for_arm (INTVAL (operands[1])))"
[(clobber (const_int 0))]
"
arm_split_constant (MINUS, SImode, curr_insn,
INTVAL (operands[1]), operands[0], operands[2], 0);
DONE;
"
- [(set_attr "length" "4,4,4,16,16")
+ [(set_attr "length" "4,4,4,16")
(set_attr "predicable" "yes")]
)
(define_insn "maddhisi4"
[(set (match_operand:SI 0 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 3 "s_register_operand" "r")
- (mult:SI (sign_extend:SI
- (match_operand:HI 1 "s_register_operand" "%r"))
+ (plus:SI (mult:SI (sign_extend:SI
+ (match_operand:HI 1 "s_register_operand" "r"))
(sign_extend:SI
- (match_operand:HI 2 "s_register_operand" "r")))))]
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:SI 3 "s_register_operand" "r")))]
"TARGET_DSP_MULTIPLY"
"smlabb%?\\t%0, %1, %2, %3"
[(set_attr "insn" "smlaxy")
(set_attr "predicable" "yes")]
)
-(define_insn "*maddhidi4"
+;; Note: there is no maddhisi4ibt because this one is canonical form
+(define_insn "*maddhisi4tb"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16))
+ (sign_extend:SI
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY"
+ "smlatb%?\\t%0, %1, %2, %3"
+ [(set_attr "insn" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "*maddhisi4tt"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16))
+ (ashiftrt:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (const_int 16)))
+ (match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY"
+ "smlatt%?\\t%0, %1, %2, %3"
+ [(set_attr "insn" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "maddhidi4"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(plus:DI
- (match_operand:DI 3 "s_register_operand" "0")
(mult:DI (sign_extend:DI
- (match_operand:HI 1 "s_register_operand" "%r"))
+ (match_operand:HI 1 "s_register_operand" "r"))
(sign_extend:DI
- (match_operand:HI 2 "s_register_operand" "r")))))]
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:DI 3 "s_register_operand" "0")))]
"TARGET_DSP_MULTIPLY"
"smlalbb%?\\t%Q0, %R0, %1, %2"
[(set_attr "insn" "smlalxy")
(set_attr "predicable" "yes")])
+;; Note: there is no maddhidi4ibt because this one is canonical form
+(define_insn "*maddhidi4tb"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (plus:DI
+ (mult:DI (sign_extend:DI
+ (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16)))
+ (sign_extend:DI
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:DI 3 "s_register_operand" "0")))]
+ "TARGET_DSP_MULTIPLY"
+ "smlaltb%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "insn" "smlalxy")
+ (set_attr "predicable" "yes")])
+
+(define_insn "*maddhidi4tt"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (plus:DI
+ (mult:DI (sign_extend:DI
+ (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16)))
+ (sign_extend:DI
+ (ashiftrt:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (const_int 16))))
+ (match_operand:DI 3 "s_register_operand" "0")))]
+ "TARGET_DSP_MULTIPLY"
+ "smlaltt%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "insn" "smlalxy")
+ (set_attr "predicable" "yes")])
+
(define_expand "mulsf3"
[(set (match_operand:SF 0 "s_register_operand" "")
(mult:SF (match_operand:SF 1 "s_register_operand" "")
{
if (GET_CODE (operands[2]) == CONST_INT)
{
- arm_split_constant (AND, SImode, NULL_RTX,
- INTVAL (operands[2]), operands[0],
- operands[1], optimize && can_create_pseudo_p ());
+ if (INTVAL (operands[2]) == 255 && arm_arch6)
+ {
+ operands[1] = convert_to_mode (QImode, operands[1], 1);
+ emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
+ operands[1]));
+ }
+ else
+ arm_split_constant (AND, SImode, NULL_RTX,
+ INTVAL (operands[2]), operands[0],
+ operands[1],
+ optimize && can_create_pseudo_p ());
DONE;
}
operands[2] = force_reg (SImode,
GEN_INT (~INTVAL (operands[2])));
- emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
+ emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
DONE;
}
(and:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
"TARGET_THUMB1"
- "and\\t%0, %0, %2"
- [(set_attr "length" "2")]
-)
+ "and\\t%0, %2"
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_insn "*andsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
output_asm_insn (\"tst%?\\t%0, %1\", operands);
return \"\";
"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
(define_insn_and_split "*ne_zeroextractsi"
;;; this insv pattern, so this pattern needs to be reevalutated.
(define_expand "insv"
- [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
- (match_operand:SI 1 "general_operand" "")
- (match_operand:SI 2 "general_operand" ""))
- (match_operand:SI 3 "reg_or_int_operand" ""))]
+ [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
+ (match_operand 1 "general_operand" "")
+ (match_operand 2 "general_operand" ""))
+ (match_operand 3 "reg_or_int_operand" ""))]
"TARGET_ARM || arm_arch_thumb2"
"
{
if (arm_arch_thumb2)
{
- bool use_bfi = TRUE;
-
- if (GET_CODE (operands[3]) == CONST_INT)
+ if (unaligned_access && MEM_P (operands[0])
+ && s_register_operand (operands[3], GET_MODE (operands[3]))
+ && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
{
- HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
+ rtx base_addr;
- if (val == 0)
+ if (BYTES_BIG_ENDIAN)
+ start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
+ - start_bit;
+
+ if (width == 32)
{
- emit_insn (gen_insv_zero (operands[0], operands[1],
- operands[2]));
- DONE;
+ base_addr = adjust_address (operands[0], SImode,
+ start_bit / BITS_PER_UNIT);
+ emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
}
+ else
+ {
+ rtx tmp = gen_reg_rtx (HImode);
- /* See if the set can be done with a single orr instruction. */
- if (val == mask && const_ok_for_arm (val << start_bit))
- use_bfi = FALSE;
+ base_addr = adjust_address (operands[0], HImode,
+ start_bit / BITS_PER_UNIT);
+ emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
+ emit_insn (gen_unaligned_storehi (base_addr, tmp));
+ }
+ DONE;
}
-
- if (use_bfi)
+ else if (s_register_operand (operands[0], GET_MODE (operands[0])))
{
- if (GET_CODE (operands[3]) != REG)
- operands[3] = force_reg (SImode, operands[3]);
+ bool use_bfi = TRUE;
- emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
- operands[3]));
- DONE;
+ if (GET_CODE (operands[3]) == CONST_INT)
+ {
+ HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
+
+ if (val == 0)
+ {
+ emit_insn (gen_insv_zero (operands[0], operands[1],
+ operands[2]));
+ DONE;
+ }
+
+ /* See if the set can be done with a single orr instruction. */
+ if (val == mask && const_ok_for_arm (val << start_bit))
+ use_bfi = FALSE;
+ }
+
+ if (use_bfi)
+ {
+ if (GET_CODE (operands[3]) != REG)
+ operands[3] = force_reg (SImode, operands[3]);
+
+ emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
}
+ else
+ FAIL;
}
+ if (!s_register_operand (operands[0], GET_MODE (operands[0])))
+ FAIL;
+
target = copy_rtx (operands[0]);
/* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
subreg as the final target. */
[(set_attr "predicable" "yes")]
)
-(define_insn "bicsi3"
+(define_insn "thumb1_bicsi3"
[(set (match_operand:SI 0 "register_operand" "=l")
(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
(match_operand:SI 2 "register_operand" "0")))]
"TARGET_THUMB1"
- "bic\\t%0, %0, %1"
- [(set_attr "length" "2")]
-)
+ "bic\\t%0, %1"
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_insn "andsi_not_shiftsi_si"
[(set (match_operand:SI 0 "s_register_operand" "=r")
"
)
-(define_insn_and_split "*arm_iorsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
- (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
- "TARGET_ARM"
+(define_insn_and_split "*iorsi3_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
+ (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
+ (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
+ "TARGET_32BIT"
"@
orr%?\\t%0, %1, %2
+ orn%?\\t%0, %1, #%B2
#"
- "TARGET_ARM
+ "TARGET_32BIT
&& GET_CODE (operands[2]) == CONST_INT
- && !const_ok_for_arm (INTVAL (operands[2]))"
+ && !(const_ok_for_arm (INTVAL (operands[2]))
+ || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
[(clobber (const_int 0))]
- "
+{
arm_split_constant (IOR, SImode, curr_insn,
INTVAL (operands[2]), operands[0], operands[1], 0);
DONE;
- "
- [(set_attr "length" "4,16")
- (set_attr "predicable" "yes")]
-)
+}
+ [(set_attr "length" "4,4,16")
+ (set_attr "arch" "32,t2,32")
+ (set_attr "predicable" "yes")])
-(define_insn "*thumb1_iorsi3"
+(define_insn "*thumb1_iorsi3_insn"
[(set (match_operand:SI 0 "register_operand" "=l")
(ior:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
"TARGET_THUMB1"
- "orr\\t%0, %0, %2"
- [(set_attr "length" "2")]
-)
+ "orr\\t%0, %2"
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_peephole2
[(match_scratch:SI 3 "r")
}"
)
-(define_insn "*arm_xorsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (xor:SI (match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rI")))]
+(define_insn_and_split "*arm_xorsi3"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r")
+ (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
"TARGET_32BIT"
- "eor%?\\t%0, %1, %2"
- [(set_attr "predicable" "yes")]
+ "@
+ eor%?\\t%0, %1, %2
+ #"
+ "TARGET_32BIT
+ && GET_CODE (operands[2]) == CONST_INT
+ && !const_ok_for_arm (INTVAL (operands[2]))"
+ [(clobber (const_int 0))]
+{
+ arm_split_constant (XOR, SImode, curr_insn,
+ INTVAL (operands[2]), operands[0], operands[1], 0);
+ DONE;
+}
+ [(set_attr "length" "4,16")
+ (set_attr "predicable" "yes")]
)
-(define_insn "*thumb1_xorsi3"
+(define_insn "*thumb1_xorsi3_insn"
[(set (match_operand:SI 0 "register_operand" "=l")
(xor:SI (match_operand:SI 1 "register_operand" "%0")
(match_operand:SI 2 "register_operand" "l")))]
"TARGET_THUMB1"
- "eor\\t%0, %0, %2"
- [(set_attr "length" "2")]
-)
+ "eor\\t%0, %2"
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_insn "*xorsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
bool need_else;
if (which_alternative != 0 || operands[3] != const0_rtx
- || (code != PLUS && code != MINUS && code != IOR && code != XOR))
+ || (code != PLUS && code != IOR && code != XOR))
need_else = true;
else
need_else = false;
)
(define_insn "arm_ashldi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
+ [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
(ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
"TARGET_THUMB1"
"lsl\\t%0, %1, %2"
- [(set_attr "length" "2")]
-)
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_expand "ashrdi3"
[(set (match_operand:DI 0 "s_register_operand" "")
)
(define_insn "arm_ashrdi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
+ [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
(ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
"movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
[(set_attr "conds" "clob")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
"TARGET_THUMB1"
"asr\\t%0, %1, %2"
- [(set_attr "length" "2")]
-)
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_expand "lshrdi3"
[(set (match_operand:DI 0 "s_register_operand" "")
)
(define_insn "arm_lshrdi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
+ [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
(lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
(const_int 1)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
"movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
[(set_attr "conds" "clob")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
(match_operand:SI 2 "nonmemory_operand" "N,l")))]
"TARGET_THUMB1"
"lsr\\t%0, %1, %2"
- [(set_attr "length" "2")]
-)
+ [(set_attr "length" "2")
+ (set_attr "conds" "set")])
(define_expand "rotlsi3"
[(set (match_operand:SI 0 "s_register_operand" "")
(set_attr "shift" "1")]
)
-(define_insn "*arm_notsi_shiftsi"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+(define_insn "*not_shiftsi"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(not:SI (match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
- "TARGET_ARM"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
+ "TARGET_32BIT"
"mvn%?\\t%0, %1%S3"
[(set_attr "predicable" "yes")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "insn" "mvn")
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
-(define_insn "*arm_notsi_shiftsi_compare0"
+(define_insn "*not_shiftsi_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")]))
- (const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "=r")
+ (compare:CC_NOOV
+ (not:SI (match_operator:SI 3 "shift_operator"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
+ (const_int 0)))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"mvn%.\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "insn" "mvn")
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
-(define_insn "*arm_not_shiftsi_compare0_scratch"
+(define_insn "*not_shiftsi_compare0_scratch"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")]))
- (const_int 0)))
- (clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
+ (compare:CC_NOOV
+ (not:SI (match_operator:SI 3 "shift_operator"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=r,r"))]
+ "TARGET_32BIT"
"mvn%.\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "insn" "mvn")
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
;; We don't really have extzv, but defining this using shifts helps
;; to reduce register pressure later on.
(define_expand "extzv"
- [(set (match_dup 4)
- (ashift:SI (match_operand:SI 1 "register_operand" "")
- (match_operand:SI 2 "const_int_operand" "")))
- (set (match_operand:SI 0 "register_operand" "")
- (lshiftrt:SI (match_dup 4)
- (match_operand:SI 3 "const_int_operand" "")))]
+ [(set (match_operand 0 "s_register_operand" "")
+ (zero_extract (match_operand 1 "nonimmediate_operand" "")
+ (match_operand 2 "const_int_operand" "")
+ (match_operand 3 "const_int_operand" "")))]
"TARGET_THUMB1 || arm_arch_thumb2"
"
{
if (arm_arch_thumb2)
{
- emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
- operands[3]));
- DONE;
+ HOST_WIDE_INT width = INTVAL (operands[2]);
+ HOST_WIDE_INT bitpos = INTVAL (operands[3]);
+
+ if (unaligned_access && MEM_P (operands[1])
+ && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
+ {
+ rtx base_addr;
+
+ if (BYTES_BIG_ENDIAN)
+ bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
+ - bitpos;
+
+ if (width == 32)
+ {
+ base_addr = adjust_address (operands[1], SImode,
+ bitpos / BITS_PER_UNIT);
+ emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
+ }
+ else
+ {
+ rtx dest = operands[0];
+ rtx tmp = gen_reg_rtx (SImode);
+
+ /* We may get a paradoxical subreg here. Strip it off. */
+ if (GET_CODE (dest) == SUBREG
+ && GET_MODE (dest) == SImode
+ && GET_MODE (SUBREG_REG (dest)) == HImode)
+ dest = SUBREG_REG (dest);
+
+ if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
+ FAIL;
+
+ base_addr = adjust_address (operands[1], HImode,
+ bitpos / BITS_PER_UNIT);
+ emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
+ emit_move_insn (gen_lowpart (SImode, dest), tmp);
+ }
+ DONE;
+ }
+ else if (s_register_operand (operands[1], GET_MODE (operands[1])))
+ {
+ emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
+ else
+ FAIL;
}
+
+ if (!s_register_operand (operands[1], GET_MODE (operands[1])))
+ FAIL;
operands[3] = GEN_INT (rshift);
DONE;
}
- operands[2] = GEN_INT (lshift);
- operands[4] = gen_reg_rtx (SImode);
+ emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
+ operands[3], gen_reg_rtx (SImode)));
+ DONE;
}"
)
-(define_insn "extv"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "const_int_operand" "M")
- (match_operand:SI 3 "const_int_operand" "M")))]
- "arm_arch_thumb2"
- "sbfx%?\t%0, %1, %3, %2"
- [(set_attr "length" "4")
- (set_attr "predicable" "yes")]
-)
+;; Helper for extzv, for the Thumb-1 register-shifts case.
-(define_insn "extzv_t2"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "const_int_operand" "M")
- (match_operand:SI 3 "const_int_operand" "M")))]
- "arm_arch_thumb2"
- "ubfx%?\t%0, %1, %3, %2"
- [(set_attr "length" "4")
- (set_attr "predicable" "yes")]
-)
+(define_expand "extzv_t1"
+ [(set (match_operand:SI 4 "s_register_operand" "")
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:SI 2 "const_int_operand" "")))
+ (set (match_operand:SI 0 "s_register_operand" "")
+ (lshiftrt:SI (match_dup 4)
+ (match_operand:SI 3 "const_int_operand" "")))]
+ "TARGET_THUMB1"
+ "")
-\f
-;; Unary arithmetic insns
+(define_expand "extv"
+ [(set (match_operand 0 "s_register_operand" "")
+ (sign_extract (match_operand 1 "nonimmediate_operand" "")
+ (match_operand 2 "const_int_operand" "")
+ (match_operand 3 "const_int_operand" "")))]
+ "arm_arch_thumb2"
+{
+ HOST_WIDE_INT width = INTVAL (operands[2]);
+ HOST_WIDE_INT bitpos = INTVAL (operands[3]);
-(define_expand "negdi2"
- [(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (neg:DI (match_operand:DI 1 "s_register_operand" "")))
- (clobber (reg:CC CC_REGNUM))])]
- "TARGET_EITHER"
- ""
-)
+ if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
+ && (bitpos % BITS_PER_UNIT) == 0)
+ {
+ rtx base_addr;
+
+ if (BYTES_BIG_ENDIAN)
+ bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
+
+ if (width == 32)
+ {
+ base_addr = adjust_address (operands[1], SImode,
+ bitpos / BITS_PER_UNIT);
+ emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
+ }
+ else
+ {
+ rtx dest = operands[0];
+ rtx tmp = gen_reg_rtx (SImode);
+
+ /* We may get a paradoxical subreg here. Strip it off. */
+ if (GET_CODE (dest) == SUBREG
+ && GET_MODE (dest) == SImode
+ && GET_MODE (SUBREG_REG (dest)) == HImode)
+ dest = SUBREG_REG (dest);
+
+ if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
+ FAIL;
+
+ base_addr = adjust_address (operands[1], HImode,
+ bitpos / BITS_PER_UNIT);
+ emit_insn (gen_unaligned_loadhis (tmp, base_addr));
+ emit_move_insn (gen_lowpart (SImode, dest), tmp);
+ }
+
+ DONE;
+ }
+ else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
+ FAIL;
+ else if (GET_MODE (operands[0]) == SImode
+ && GET_MODE (operands[1]) == SImode)
+ {
+ emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
+ operands[3]));
+ DONE;
+ }
+
+ FAIL;
+})
+
+; Helper to expand register forms of extv with the proper modes.
+
+(define_expand "extv_regsi"
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
+ (match_operand 2 "const_int_operand" "")
+ (match_operand 3 "const_int_operand" "")))]
+ ""
+{
+})
+
+; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
+
+(define_insn "unaligned_loadsi"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
+ UNSPEC_UNALIGNED_LOAD))]
+ "unaligned_access && TARGET_32BIT"
+ "ldr%?\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t2,any")
+ (set_attr "length" "2,4")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "load1")])
+
+(define_insn "unaligned_loadhis"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (sign_extend:SI
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
+ UNSPEC_UNALIGNED_LOAD)))]
+ "unaligned_access && TARGET_32BIT"
+ "ldr%(sh%)\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t2,any")
+ (set_attr "length" "2,4")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "load_byte")])
+
+(define_insn "unaligned_loadhiu"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (zero_extend:SI
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
+ UNSPEC_UNALIGNED_LOAD)))]
+ "unaligned_access && TARGET_32BIT"
+ "ldr%(h%)\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t2,any")
+ (set_attr "length" "2,4")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "load_byte")])
+
+(define_insn "unaligned_storesi"
+ [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
+ (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
+ UNSPEC_UNALIGNED_STORE))]
+ "unaligned_access && TARGET_32BIT"
+ "str%?\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t2,any")
+ (set_attr "length" "2,4")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "store1")])
+
+(define_insn "unaligned_storehi"
+ [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
+ (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
+ UNSPEC_UNALIGNED_STORE))]
+ "unaligned_access && TARGET_32BIT"
+ "str%(h%)\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t2,any")
+ (set_attr "length" "2,4")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "store1")])
+
+(define_insn "*extv_reg"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "const_int_operand" "M")
+ (match_operand:SI 3 "const_int_operand" "M")))]
+ "arm_arch_thumb2"
+ "sbfx%?\t%0, %1, %3, %2"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "extzv_t2"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "const_int_operand" "M")
+ (match_operand:SI 3 "const_int_operand" "M")))]
+ "arm_arch_thumb2"
+ "ubfx%?\t%0, %1, %3, %2"
+ [(set_attr "length" "4")
+ (set_attr "predicable" "yes")]
+)
+
+
+;; Division instructions
+(define_insn "divsi3"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (div:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")))]
+ "TARGET_IDIV"
+ "sdiv%?\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "sdiv")]
+)
+
+(define_insn "udivsi3"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")))]
+ "TARGET_IDIV"
+ "udiv%?\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "udiv")]
+)
+
+\f
+;; Unary arithmetic insns
+
+(define_expand "negdi2"
+ [(parallel
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (neg:DI (match_operand:DI 1 "s_register_operand" "")))
+ (clobber (reg:CC CC_REGNUM))])]
+ "TARGET_EITHER"
+ ""
+)
;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
;; The first alternative allows the common case of a *full* overlap.
(define_insn "*arm_negdi2"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
+ [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
(neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_ARM"
(not:SI (match_operand:SI 1 "s_register_operand" "r")))]
"TARGET_32BIT"
"mvn%?\\t%0, %1"
- [(set_attr "predicable" "yes")]
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "mvn")]
)
(define_insn "*thumb1_one_cmplsi2"
(not:SI (match_operand:SI 1 "register_operand" "l")))]
"TARGET_THUMB1"
"mvn\\t%0, %1"
- [(set_attr "length" "2")]
+ [(set_attr "length" "2")
+ (set_attr "insn" "mvn")]
)
(define_insn "*notsi_compare0"
(not:SI (match_dup 1)))]
"TARGET_32BIT"
"mvn%.\\t%0, %1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "insn" "mvn")]
)
(define_insn "*notsi_compare0_scratch"
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_32BIT"
"mvn%.\\t%0, %1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "insn" "mvn")]
)
\f
;; Fixed <--> Floating conversion insns
\f
;; Zero and sign extension instructions.
-(define_expand "zero_extendsidi2"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
- "TARGET_32BIT"
- ""
+(define_insn "zero_extend<mode>di2"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
+ "<qhs_zextenddi_cstr>")))]
+ "TARGET_32BIT <qhs_zextenddi_cond>"
+ "#"
+ [(set_attr "length" "8")
+ (set_attr "ce_count" "2")
+ (set_attr "predicable" "yes")]
)
-(define_insn "*arm_zero_extendsidi2"
+(define_insn "extend<mode>di2"
[(set (match_operand:DI 0 "s_register_operand" "=r")
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
- "*
- if (REGNO (operands[1])
- != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
- output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
- return \"mov%?\\t%R0, #0\";
- "
+ (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
+ "<qhs_extenddi_cstr>")))]
+ "TARGET_32BIT <qhs_sextenddi_cond>"
+ "#"
[(set_attr "length" "8")
+ (set_attr "ce_count" "2")
+ (set_attr "shift" "1")
(set_attr "predicable" "yes")]
)
-(define_expand "zero_extendqidi2"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
+;; Splits for all extensions to DImode
+(define_split
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
"TARGET_32BIT"
- ""
-)
-
-(define_insn "*arm_zero_extendqidi2"
- [(set (match_operand:DI 0 "s_register_operand" "=r,r")
- (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
- "TARGET_ARM"
- "@
- and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
- ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "type" "*,load_byte")
- (set_attr "pool_range" "*,4092")
- (set_attr "neg_pool_range" "*,4084")]
-)
+ [(set (match_dup 0) (match_dup 1))]
+{
+ rtx lo_part = gen_lowpart (SImode, operands[0]);
+ enum machine_mode src_mode = GET_MODE (operands[1]);
+
+ if (REG_P (operands[0])
+ && !reg_overlap_mentioned_p (operands[0], operands[1]))
+ emit_clobber (operands[0]);
+ if (!REG_P (lo_part) || src_mode != SImode
+ || !rtx_equal_p (lo_part, operands[1]))
+ {
+ if (src_mode == SImode)
+ emit_move_insn (lo_part, operands[1]);
+ else
+ emit_insn (gen_rtx_SET (VOIDmode, lo_part,
+ gen_rtx_ZERO_EXTEND (SImode, operands[1])));
+ operands[1] = lo_part;
+ }
+ operands[0] = gen_highpart (SImode, operands[0]);
+ operands[1] = const0_rtx;
+})
-(define_expand "extendsidi2"
+(define_split
[(set (match_operand:DI 0 "s_register_operand" "")
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
+ (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
"TARGET_32BIT"
- ""
-)
+ [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
+{
+ rtx lo_part = gen_lowpart (SImode, operands[0]);
+ enum machine_mode src_mode = GET_MODE (operands[1]);
-(define_insn "*arm_extendsidi2"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
- "*
- if (REGNO (operands[1])
- != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
- output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
- return \"mov%?\\t%R0, %Q0, asr #31\";
- "
- [(set_attr "length" "8")
- (set_attr "shift" "1")
- (set_attr "predicable" "yes")]
-)
+ if (REG_P (operands[0])
+ && !reg_overlap_mentioned_p (operands[0], operands[1]))
+ emit_clobber (operands[0]);
+
+ if (!REG_P (lo_part) || src_mode != SImode
+ || !rtx_equal_p (lo_part, operands[1]))
+ {
+ if (src_mode == SImode)
+ emit_move_insn (lo_part, operands[1]);
+ else
+ emit_insn (gen_rtx_SET (VOIDmode, lo_part,
+ gen_rtx_SIGN_EXTEND (SImode, operands[1])));
+ operands[1] = lo_part;
+ }
+ operands[0] = gen_highpart (SImode, operands[0]);
+})
(define_expand "zero_extendhisi2"
[(set (match_operand:SI 0 "s_register_operand" "")
})
(define_split
- [(set (match_operand:SI 0 "register_operand" "")
- (zero_extend:SI (match_operand:HI 1 "register_operand" "l,m")))]
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
"!TARGET_THUMB2 && !arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
[(set (match_operand:SI 0 "register_operand" "=l,l")
(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
"TARGET_THUMB1"
- "*
+{
rtx mem;
if (which_alternative == 0 && arm_arch6)
- return \"uxth\\t%0, %1\";
+ return "uxth\t%0, %1";
if (which_alternative == 0)
- return \"#\";
+ return "#";
mem = XEXP (operands[1], 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
- if (GET_CODE (mem) == LABEL_REF)
- return \"ldr\\t%0, %1\";
-
if (GET_CODE (mem) == PLUS)
{
rtx a = XEXP (mem, 0);
- rtx b = XEXP (mem, 1);
/* This can happen due to bugs in reload. */
if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
ops[0] = operands[0];
ops[1] = a;
- output_asm_insn (\"mov %0, %1\", ops);
+ output_asm_insn ("mov\t%0, %1", ops);
XEXP (mem, 0) = operands[0];
}
-
- else if ( GET_CODE (a) == LABEL_REF
- && GET_CODE (b) == CONST_INT)
- return \"ldr\\t%0, %1\";
}
- return \"ldrh\\t%0, %1\";
- "
+ return "ldrh\t%0, %1";
+}
[(set_attr_alternative "length"
[(if_then_else (eq_attr "is_arch6" "yes")
(const_int 2) (const_int 4))
(const_int 4)])
- (set_attr "type" "alu_shift,load_byte")
- (set_attr "pool_range" "*,60")]
+ (set_attr "type" "alu_shift,load_byte")]
)
(define_insn "*arm_zero_extendhisi2"
#
ldr%(h%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_zero_extendhisi2_v6"
uxth%?\\t%0, %1
ldr%(h%)\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_zero_extendhisi2addsi"
})
(define_split
- [(set (match_operand:SI 0 "register_operand" "")
- (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand" "")
+ (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
"!arm_arch6"
[(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
"@
uxtb\\t%0, %1
ldrb\\t%0, %1"
- [(set_attr "length" "2,2")
- (set_attr "type" "alu_shift,load_byte")
- (set_attr "pool_range" "*,32")]
+ [(set_attr "length" "2")
+ (set_attr "type" "alu_shift,load_byte")]
)
(define_insn "*arm_zero_extendqisi2"
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,4096")
- (set_attr "neg_pool_range" "*,4084")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_zero_extendqisi2_v6"
uxtb%(%)\\t%0, %1
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "type" "alu_shift,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,4096")
- (set_attr "neg_pool_range" "*,4084")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_zero_extendqisi2addsi"
""
)
-(define_code_iterator ior_xor [ior xor])
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
(const_int 0)))]
"TARGET_32BIT"
- "tst\\t%0, #255"
- [(set_attr "conds" "set")]
+ "tst%?\\t%0, #255"
+ [(set_attr "conds" "set")
+ (set_attr "predicable" "yes")]
)
(define_expand "extendhisi2"
operands[3] = change_address (operands[1], QImode, addr);
})
+(define_peephole2
+ [(set (match_operand:SI 0 "register_operand" "")
+ (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
+ (set (match_operand:SI 2 "register_operand" "") (const_int 0))
+ (set (match_operand:SI 3 "register_operand" "")
+ (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
+ "TARGET_THUMB1
+ && GET_CODE (XEXP (operands[4], 0)) == PLUS
+ && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
+ && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
+ && (peep2_reg_dead_p (3, operands[0])
+ || rtx_equal_p (operands[0], operands[3]))
+ && (peep2_reg_dead_p (3, operands[2])
+ || rtx_equal_p (operands[2], operands[3]))"
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
+{
+ rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
+ operands[4] = change_address (operands[4], QImode, addr);
+})
+
(define_insn "thumb1_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l,l,l")
(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
(define_insn "*arm_movdi"
[(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
(match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
- "TARGET_ARM
+ "TARGET_32BIT
&& !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
&& !TARGET_IWMMXT
&& ( register_operand (operands[0], DImode)
case 2:
return \"#\";
default:
- return output_move_double (operands);
+ return output_move_double (operands, true, NULL);
}
"
[(set_attr "length" "8,12,16,8,8")
(set_attr "type" "*,*,*,load2,store2")
- (set_attr "pool_range" "*,*,*,1020,*")
- (set_attr "neg_pool_range" "*,*,*,1008,*")]
+ (set_attr "arm_pool_range" "*,*,*,1020,*")
+ (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
+ (set_attr "thumb2_pool_range" "*,*,*,4096,*")
+ (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
)
(define_split
}"
[(set_attr "length" "4,4,6,2,2,6,4,4")
(set_attr "type" "*,*,*,load2,store2,load2,store2,*")
+ (set_attr "insn" "*,mov,*,*,*,*,*,mov")
(set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
)
[(set (match_operand:SI 0 "nonimmediate_operand" "=r")
(lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
(match_operand:SI 2 "general_operand" "i")))]
- "TARGET_32BIT"
+ "arm_arch_thumb2"
"movt%?\t%0, #:upper16:%c2"
[(set_attr "predicable" "yes")
(set_attr "length" "4")]
ldr%?\\t%0, %1
str%?\\t%1, %0"
[(set_attr "type" "*,*,*,*,load1,store1")
+ (set_attr "insn" "mov,mov,mvn,mov,*,*")
(set_attr "predicable" "yes")
(set_attr "pool_range" "*,*,*,*,4096,*")
(set_attr "neg_pool_range" "*,*,*,*,4084,*")]
)
(define_insn "*thumb1_movsi_insn"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
- (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
+ (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
"TARGET_THUMB1
&& ( register_operand (operands[0], SImode)
|| register_operand (operands[1], SImode))"
mov\\t%0, %1"
[(set_attr "length" "2,2,4,4,2,2,2,2,2")
(set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
- (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
-)
+ (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
+ (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
"TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
- [(set (match_dup 0) (match_dup 1))
- (set (match_dup 0) (neg:SI (match_dup 0)))]
- "operands[1] = GEN_INT (- INTVAL (operands[1]));"
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 0) (neg:SI (match_dup 2)))]
+ "
+ {
+ operands[1] = GEN_INT (- INTVAL (operands[1]));
+ operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
+ }"
)
(define_split
[(set (match_operand:SI 0 "register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
"TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
- [(set (match_dup 0) (match_dup 1))
- (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
"
{
unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
if ((val & (mask << i)) == val)
break;
- /* Shouldn't happen, but we don't want to split if the shift is zero. */
+ /* Don't split if the shift is zero. */
if (i == 0)
FAIL;
operands[1] = GEN_INT (val >> i);
- operands[2] = GEN_INT (i);
+ operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
+ operands[3] = GEN_INT (i);
}"
)
;; we use an unspec. The offset will be loaded from a constant pool entry,
;; since that is the only type of relocation we can use.
+;; Wrap calculation of the whole PIC address in a single pattern for the
+;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
+;; a PIC address involves two loads from memory, so we want to CSE it
+;; as often as possible.
+;; This pattern will be split into one of the pic_load_addr_* patterns
+;; and a move after GCSE optimizations.
+;;
+;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
+(define_expand "calculate_pic_address"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
+ (unspec:SI [(match_operand:SI 2 "" "")]
+ UNSPEC_PIC_SYM))))]
+ "flag_pic"
+)
+
+;; Split calculate_pic_address into pic_load_addr_* and a move.
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
+ (unspec:SI [(match_operand:SI 2 "" "")]
+ UNSPEC_PIC_SYM))))]
+ "flag_pic"
+ [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
+ (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
+ "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
+)
+
+;; operand1 is the memory address to go into
+;; pic_load_addr_32bit.
+;; operand2 is the PIC label to be emitted
+;; from pic_add_dot_plus_eight.
+;; We do this to allow hoisting of the entire insn.
+(define_insn_and_split "pic_load_addr_unified"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
+ (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
+ (match_operand:SI 2 "" "")]
+ UNSPEC_PIC_UNIFIED))]
+ "flag_pic"
+ "#"
+ "&& reload_completed"
+ [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
+ (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
+ (match_dup 2)] UNSPEC_PIC_BASE))]
+ "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
+ [(set_attr "type" "load1,load1,load1")
+ (set_attr "pool_range" "4096,4096,1024")
+ (set_attr "neg_pool_range" "4084,0,0")
+ (set_attr "arch" "a,t2,t1")
+ (set_attr "length" "8,6,4")]
+)
+
;; The rather odd constraints on the following are to force reload to leave
;; the insn alone, and to force the minipool generation pass to then move
;; the GOT symbol to memory.
(const_int 8)
(match_operand 1 "" "")]
UNSPEC_PIC_BASE))
- (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
+ (set (match_operand:SI 2 "arm_general_register_operand" "")
+ (mem:SI (match_dup 0)))]
"TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
[(set (match_dup 2)
(mem:SI (unspec:SI [(match_dup 3)
return \"ldrh %0, %1\";
}"
[(set_attr "length" "2,4,2,2,2,2")
- (set_attr "type" "*,load1,store1,*,*,*")]
-)
+ (set_attr "type" "*,load1,store1,*,*,*")
+ (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
(define_expand "movhi_bytes"
;; Pattern to recognize insn generated default case above
(define_insn "*movhi_insn_arch4"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
- (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
+ (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
"TARGET_ARM
&& arm_arch4
- && (GET_CODE (operands[1]) != CONST_INT
- || const_ok_for_arm (INTVAL (operands[1]))
- || const_ok_for_arm (~INTVAL (operands[1])))"
+ && (register_operand (operands[0], HImode)
+ || register_operand (operands[1], HImode))"
"@
mov%?\\t%0, %1\\t%@ movhi
mvn%?\\t%0, #%B1\\t%@ movhi
ldr%(h%)\\t%0, %1\\t%@ movhi"
[(set_attr "type" "*,*,store1,load1")
(set_attr "predicable" "yes")
+ (set_attr "insn" "mov,mvn,*,*")
(set_attr "pool_range" "*,*,*,256")
(set_attr "neg_pool_range" "*,*,*,244")]
)
"@
mov%?\\t%0, %1\\t%@ movhi
mvn%?\\t%0, #%B1\\t%@ movhi"
- [(set_attr "predicable" "yes")]
+ [(set_attr "predicable" "yes")
+ (set_attr "insn" "mov,mvn")]
)
(define_expand "thumb_movhi_clobber"
(define_insn "*arm_movqi_insn"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
- (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
+ (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
"TARGET_32BIT
&& ( register_operand (operands[0], QImode)
|| register_operand (operands[1], QImode))"
mov%?\\t%0, %1
mvn%?\\t%0, #%B1
ldr%(b%)\\t%0, %1
+ str%(b%)\\t%1, %0
+ ldr%(b%)\\t%0, %1
str%(b%)\\t%1, %0"
- [(set_attr "type" "*,*,load1,store1")
- (set_attr "predicable" "yes")]
+ [(set_attr "type" "*,*,load1,store1,load1,store1")
+ (set_attr "insn" "mov,mvn,*,*,*,*")
+ (set_attr "predicable" "yes")
+ (set_attr "arch" "any,any,t2,t2,any,any")
+ (set_attr "length" "4,4,2,2,4,4")]
)
(define_insn "*thumb1_movqi_insn"
mov\\t%0, %1"
[(set_attr "length" "2")
(set_attr "type" "*,load1,store1,*,*,*")
- (set_attr "pool_range" "*,32,*,*,*,*")]
-)
+ (set_attr "insn" "*,*,*,mov,mov,mov")
+ (set_attr "pool_range" "*,32,*,*,*,*")
+ (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
;; HFmode moves
(define_expand "movhf"
"
[(set_attr "conds" "unconditional")
(set_attr "type" "load1,store1,*,*")
+ (set_attr "insn" "*,*,mov,mov")
(set_attr "length" "4,4,4,8")
- (set_attr "predicable" "yes")
- ]
+ (set_attr "predicable" "yes")]
)
(define_insn "*thumb1_movhf"
"
[(set_attr "length" "2")
(set_attr "type" "*,load1,store1,*,*")
- (set_attr "pool_range" "*,1020,*,*,*")]
-)
+ (set_attr "insn" "mov,*,*,mov,mov")
+ (set_attr "pool_range" "*,1020,*,*,*")
+ (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
(define_expand "movsf"
[(set (match_operand:SF 0 "general_operand" "")
(define_insn "*arm_movsf_soft_insn"
[(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
(match_operand:SF 1 "general_operand" "r,mE,r"))]
- "TARGET_ARM
+ "TARGET_32BIT
&& TARGET_SOFT_FLOAT
&& (GET_CODE (operands[0]) != MEM
|| register_operand (operands[1], SFmode))"
mov%?\\t%0, %1
ldr%?\\t%0, %1\\t%@ float
str%?\\t%1, %0\\t%@ float"
- [(set_attr "length" "4,4,4")
- (set_attr "predicable" "yes")
+ [(set_attr "predicable" "yes")
(set_attr "type" "*,load1,store1")
+ (set_attr "insn" "mov,*,*")
(set_attr "pool_range" "*,4096,*")
- (set_attr "neg_pool_range" "*,4084,*")]
+ (set_attr "arm_neg_pool_range" "*,4084,*")
+ (set_attr "thumb2_neg_pool_range" "*,0,*")]
)
;;; ??? This should have alternatives for constants.
mov\\t%0, %1"
[(set_attr "length" "2")
(set_attr "type" "*,load1,store1,load1,store1,*,*")
- (set_attr "pool_range" "*,*,*,1020,*,*,*")]
+ (set_attr "pool_range" "*,*,*,1020,*,*,*")
+ (set_attr "insn" "*,*,*,*,*,mov,mov")
+ (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
)
(define_expand "movdf"
[(match_operand:DF 0 "arm_reload_memory_operand" "=o")
(match_operand:DF 1 "s_register_operand" "r")
(match_operand:SI 2 "s_register_operand" "=&r")]
- "TARGET_32BIT"
+ "TARGET_THUMB2"
"
{
enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
(define_insn "*movdf_soft_insn"
[(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
(match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
- "TARGET_ARM && TARGET_SOFT_FLOAT
+ "TARGET_32BIT && TARGET_SOFT_FLOAT
&& ( register_operand (operands[0], DFmode)
|| register_operand (operands[1], DFmode))"
"*
case 2:
return \"#\";
default:
- return output_move_double (operands);
+ return output_move_double (operands, true, NULL);
}
"
[(set_attr "length" "8,12,16,8,8")
(set_attr "type" "*,*,*,load2,store2")
- (set_attr "pool_range" "1020")
- (set_attr "neg_pool_range" "1008")]
+ (set_attr "pool_range" "*,*,*,1020,*")
+ (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
+ (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
)
;;; ??? This should have alternatives for constants.
"
[(set_attr "length" "4,2,2,6,4,4")
(set_attr "type" "*,load2,store2,load2,store2,*")
+ (set_attr "insn" "*,*,*,*,*,mov")
(set_attr "pool_range" "*,*,*,1020,*,*")]
)
;; load- and store-multiple insns
;; The arm can load/store any set of registers, provided that they are in
-;; ascending order; but that is beyond GCC so stick with what it knows.
+;; ascending order, but these expanders assume a contiguous set.
(define_expand "load_multiple"
[(match_par_dup 3 [(set (match_operand:SI 0 "" "")
FAIL;
operands[3]
- = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
+ = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
+ INTVAL (operands[2]),
force_reg (SImode, XEXP (operands[1], 0)),
- TRUE, FALSE, operands[1], &offset);
+ FALSE, operands[1], &offset);
})
-;; Load multiple with write-back
-
-(define_insn "*ldmsi_postinc4"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 16)))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (match_dup 2)))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 4))))
- (set (match_operand:SI 5 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 8))))
- (set (match_operand:SI 6 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
- "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
- [(set_attr "type" "load4")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*ldmsi_postinc4_thumb1"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=l")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 16)))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (match_dup 2)))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 4))))
- (set (match_operand:SI 5 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 8))))
- (set (match_operand:SI 6 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
- "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
- "ldmia\\t%1!, {%3, %4, %5, %6}"
- [(set_attr "type" "load4")]
-)
-
-(define_insn "*ldmsi_postinc3"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 12)))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (match_dup 2)))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 4))))
- (set (match_operand:SI 5 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
- "ldm%(ia%)\\t%1!, {%3, %4, %5}"
- [(set_attr "type" "load3")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*ldmsi_postinc2"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 8)))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (match_dup 2)))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
- "ldm%(ia%)\\t%1!, {%3, %4}"
- [(set_attr "type" "load2")
- (set_attr "predicable" "yes")]
-)
-
-;; Ordinary load multiple
-
-(define_insn "*ldmsi4"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 2 "arm_hard_register_operand" "")
- (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 4))))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 8))))
- (set (match_operand:SI 5 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
- "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
- [(set_attr "type" "load4")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*ldmsi3"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 2 "arm_hard_register_operand" "")
- (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 4))))
- (set (match_operand:SI 4 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
- "ldm%(ia%)\\t%1, {%2, %3, %4}"
- [(set_attr "type" "load3")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*ldmsi2"
- [(match_parallel 0 "load_multiple_operation"
- [(set (match_operand:SI 2 "arm_hard_register_operand" "")
- (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
- (set (match_operand:SI 3 "arm_hard_register_operand" "")
- (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
- "ldm%(ia%)\\t%1, {%2, %3}"
- [(set_attr "type" "load2")
- (set_attr "predicable" "yes")]
-)
-
(define_expand "store_multiple"
[(match_par_dup 3 [(set (match_operand:SI 0 "" "")
(match_operand:SI 1 "" ""))
FAIL;
operands[3]
- = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
+ = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
+ INTVAL (operands[2]),
force_reg (SImode, XEXP (operands[0], 0)),
- TRUE, FALSE, operands[0], &offset);
+ FALSE, operands[0], &offset);
})
-;; Store multiple with write-back
-
-(define_insn "*stmsi_postinc4"
- [(match_parallel 0 "store_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 16)))
- (set (mem:SI (match_dup 2))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
- (match_operand:SI 5 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
- (match_operand:SI 6 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
- "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store4")]
-)
-
-(define_insn "*stmsi_postinc4_thumb1"
- [(match_parallel 0 "store_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=l")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 16)))
- (set (mem:SI (match_dup 2))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
- (match_operand:SI 5 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
- (match_operand:SI 6 "arm_hard_register_operand" ""))])]
- "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
- "stmia\\t%1!, {%3, %4, %5, %6}"
- [(set_attr "type" "store4")]
-)
-
-(define_insn "*stmsi_postinc3"
- [(match_parallel 0 "store_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 12)))
- (set (mem:SI (match_dup 2))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
- (match_operand:SI 5 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
- "stm%(ia%)\\t%1!, {%3, %4, %5}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store3")]
-)
-
-(define_insn "*stmsi_postinc2"
- [(match_parallel 0 "store_multiple_operation"
- [(set (match_operand:SI 1 "s_register_operand" "=r")
- (plus:SI (match_operand:SI 2 "s_register_operand" "1")
- (const_int 8)))
- (set (mem:SI (match_dup 2))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
- "stm%(ia%)\\t%1!, {%3, %4}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store2")]
-)
-
-;; Ordinary store multiple
-
-(define_insn "*stmsi4"
- [(match_parallel 0 "store_multiple_operation"
- [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
- (match_operand:SI 2 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
- (match_operand:SI 5 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
- "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store4")]
-)
-
-(define_insn "*stmsi3"
- [(match_parallel 0 "store_multiple_operation"
- [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
- (match_operand:SI 2 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
- (match_operand:SI 3 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
- (match_operand:SI 4 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
- "stm%(ia%)\\t%1, {%2, %3, %4}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store3")]
-)
-
-(define_insn "*stmsi2"
- [(match_parallel 0 "store_multiple_operation"
- [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
- (match_operand:SI 2 "arm_hard_register_operand" ""))
- (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
- (match_operand:SI 3 "arm_hard_register_operand" ""))])]
- "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
- "stm%(ia%)\\t%1, {%2, %3}"
- [(set_attr "predicable" "yes")
- (set_attr "type" "store2")]
-)
;; Move a block of memory if it is word aligned and MORE than 2 words long.
;; We could let this apply for blocks of less than this, but it clobbers so
(define_expand "cbranchsi4"
[(set (pc) (if_then_else
- (match_operator 0 "arm_comparison_operator"
+ (match_operator 0 "expandable_comparison_operator"
[(match_operand:SI 1 "s_register_operand" "")
(match_operand:SI 2 "nonmemory_operand" "")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_THUMB1"
{
- rtx xops[3];
+ rtx xops[4];
xops[1] = gen_reg_rtx (SImode);
emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
xops[2] = GEN_INT (127);
(define_expand "cbranchsf4"
[(set (pc) (if_then_else
- (match_operator 0 "arm_comparison_operator"
+ (match_operator 0 "expandable_comparison_operator"
[(match_operand:SF 1 "s_register_operand" "")
(match_operand:SF 2 "arm_float_compare_operand" "")])
(label_ref (match_operand 3 "" ""))
(define_expand "cbranchdf4"
[(set (pc) (if_then_else
- (match_operator 0 "arm_comparison_operator"
+ (match_operator 0 "expandable_comparison_operator"
[(match_operand:DF 1 "s_register_operand" "")
(match_operand:DF 2 "arm_float_compare_operand" "")])
(label_ref (match_operand 3 "" ""))
(define_expand "cbranchdi4"
[(set (pc) (if_then_else
- (match_operator 0 "arm_comparison_operator"
+ (match_operator 0 "expandable_comparison_operator"
[(match_operand:DI 1 "cmpdi_operand" "")
(match_operand:DI 2 "cmpdi_operand" "")])
(label_ref (match_operand 3 "" ""))
(define_insn "cbranchsi4_insn"
[(set (pc) (if_then_else
(match_operator 0 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "l,*h")
+ [(match_operand:SI 1 "s_register_operand" "l,l*h")
(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_THUMB1"
- "*
- rtx t = prev_nonnote_insn (insn);
- if (t != NULL_RTX
- && INSN_P (t)
- && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
+{
+ rtx t = cfun->machine->thumb1_cc_insn;
+ if (t != NULL_RTX)
{
- t = XEXP (SET_SRC (PATTERN (t)), 0);
- if (!rtx_equal_p (XEXP (t, 0), operands[1])
- || !rtx_equal_p (XEXP (t, 1), operands[2]))
+ if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
+ || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
+ t = NULL_RTX;
+ if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
+ {
+ if (!noov_comparison_operator (operands[0], VOIDmode))
+ t = NULL_RTX;
+ }
+ else if (cfun->machine->thumb1_cc_mode != CCmode)
t = NULL_RTX;
}
- else
- t = NULL_RTX;
if (t == NULL_RTX)
- output_asm_insn (\"cmp\\t%1, %2\", operands);
+ {
+ output_asm_insn ("cmp\t%1, %2", operands);
+ cfun->machine->thumb1_cc_insn = insn;
+ cfun->machine->thumb1_cc_op0 = operands[1];
+ cfun->machine->thumb1_cc_op1 = operands[2];
+ cfun->machine->thumb1_cc_mode = CCmode;
+ }
+ else
+ /* Ensure we emit the right type of condition code on the jump. */
+ XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
+ CC_REGNUM);
switch (get_attr_length (insn))
{
case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
}
- "
+}
[(set (attr "far_jump")
(if_then_else
(eq_attr "length" "8")
(const_int 8))))]
)
-(define_insn "*movsi_cbranchsi4"
- [(set (pc)
- (if_then_else
- (match_operator 3 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
- (const_int 0)])
- (label_ref (match_operand 2 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
- (match_dup 1))]
- "TARGET_THUMB1"
- "*{
- if (which_alternative == 0)
- output_asm_insn (\"cmp\t%0, #0\", operands);
- else if (which_alternative == 1)
- output_asm_insn (\"sub\t%0, %1, #0\", operands);
- else
- {
- output_asm_insn (\"cmp\t%1, #0\", operands);
- if (which_alternative == 2)
- output_asm_insn (\"mov\t%0, %1\", operands);
- else
- output_asm_insn (\"str\t%1, %0\", operands);
- }
- switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
- {
- case 4: return \"b%d3\\t%l2\";
- case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (gt (symbol_ref ("which_alternative"))
- (const_int 1))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (le (symbol_ref ("which_alternative"))
- (const_int 1))
- (if_then_else
- (and (ge (minus (match_dup 2) (pc)) (const_int -250))
- (le (minus (match_dup 2) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
- (le (minus (match_dup 2) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 2) (pc)) (const_int -248))
- (le (minus (match_dup 2) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
- (le (minus (match_dup 2) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
+;; Two peepholes to generate subtract of 0 instead of a move if the
+;; condition codes will be useful.
(define_peephole2
[(set (match_operand:SI 0 "low_register_operand" "")
(match_operand:SI 1 "low_register_operand" ""))
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_THUMB1"
- [(parallel
- [(set (pc)
- (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
+ [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
+ (set (pc)
+ (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
(label_ref (match_dup 3))
- (pc)))
- (set (match_dup 0) (match_dup 1))])]
- ""
-)
+ (pc)))]
+ "")
;; Sigh! This variant shouldn't be needed, but combine often fails to
;; merge cases like this because the op1 is a hard register in
-;; CLASS_LIKELY_SPILLED_P.
+;; arm_class_likely_spilled_p.
(define_peephole2
[(set (match_operand:SI 0 "low_register_operand" "")
(match_operand:SI 1 "low_register_operand" ""))
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_THUMB1"
- [(parallel
- [(set (pc)
- (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
+ [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
+ (set (pc)
+ (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
(label_ref (match_dup 3))
- (pc)))
- (set (match_dup 0) (match_dup 1))])]
- ""
-)
+ (pc)))]
+ "")
(define_insn "*negated_cbranchsi4"
[(set (pc)
(const_int 6)
(const_int 8))))]
)
-
+
(define_insn "*tstsi3_cbranch"
[(set (pc)
(if_then_else
(const_int 8))))]
)
-(define_insn "*andsi3_cbranch"
- [(set (pc)
- (if_then_else
- (match_operator 5 "equality_operator"
- [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
- (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
- (const_int 0)])
- (label_ref (match_operand 4 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
- (and:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB1"
- "*
- {
- if (which_alternative == 0)
- output_asm_insn (\"and\\t%0, %3\", operands);
- else if (which_alternative == 1)
- {
- output_asm_insn (\"and\\t%1, %3\", operands);
- output_asm_insn (\"mov\\t%0, %1\", operands);
- }
- else
- {
- output_asm_insn (\"and\\t%1, %3\", operands);
- output_asm_insn (\"str\\t%1, %0\", operands);
- }
-
- switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
- {
- case 4: return \"b%d5\\t%l4\";
- case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -250))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -248))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
-(define_insn "*orrsi3_cbranch_scratch"
- [(set (pc)
- (if_then_else
- (match_operator 4 "equality_operator"
- [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
- (match_operand:SI 2 "s_register_operand" "l"))
- (const_int 0)])
- (label_ref (match_operand 3 "" ""))
- (pc)))
- (clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB1"
- "*
- {
- output_asm_insn (\"orr\\t%0, %2\", operands);
- switch (get_attr_length (insn))
- {
- case 4: return \"b%d4\\t%l3\";
- case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (eq_attr "length" "8")
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -250))
- (le (minus (match_dup 3) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
- (le (minus (match_dup 3) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8))))]
-)
-
-(define_insn "*orrsi3_cbranch"
- [(set (pc)
- (if_then_else
- (match_operator 5 "equality_operator"
- [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
- (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
- (const_int 0)])
- (label_ref (match_operand 4 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
- (ior:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB1"
- "*
- {
- if (which_alternative == 0)
- output_asm_insn (\"orr\\t%0, %3\", operands);
- else if (which_alternative == 1)
- {
- output_asm_insn (\"orr\\t%1, %3\", operands);
- output_asm_insn (\"mov\\t%0, %1\", operands);
- }
- else
- {
- output_asm_insn (\"orr\\t%1, %3\", operands);
- output_asm_insn (\"str\\t%1, %0\", operands);
- }
-
- switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
- {
- case 4: return \"b%d5\\t%l4\";
- case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -250))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -248))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
-(define_insn "*xorsi3_cbranch_scratch"
- [(set (pc)
- (if_then_else
- (match_operator 4 "equality_operator"
- [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
- (match_operand:SI 2 "s_register_operand" "l"))
- (const_int 0)])
- (label_ref (match_operand 3 "" ""))
- (pc)))
- (clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB1"
- "*
- {
- output_asm_insn (\"eor\\t%0, %2\", operands);
- switch (get_attr_length (insn))
- {
- case 4: return \"b%d4\\t%l3\";
- case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (eq_attr "length" "8")
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -250))
- (le (minus (match_dup 3) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
- (le (minus (match_dup 3) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8))))]
-)
-
-(define_insn "*xorsi3_cbranch"
- [(set (pc)
- (if_then_else
- (match_operator 5 "equality_operator"
- [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
- (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
- (const_int 0)])
- (label_ref (match_operand 4 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
- (xor:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB1"
- "*
- {
- if (which_alternative == 0)
- output_asm_insn (\"eor\\t%0, %3\", operands);
- else if (which_alternative == 1)
- {
- output_asm_insn (\"eor\\t%1, %3\", operands);
- output_asm_insn (\"mov\\t%0, %1\", operands);
- }
- else
- {
- output_asm_insn (\"eor\\t%1, %3\", operands);
- output_asm_insn (\"str\\t%1, %0\", operands);
- }
-
- switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
- {
- case 4: return \"b%d5\\t%l4\";
- case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -250))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -248))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
-(define_insn "*bicsi3_cbranch_scratch"
- [(set (pc)
- (if_then_else
- (match_operator 4 "equality_operator"
- [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
- (match_operand:SI 1 "s_register_operand" "0"))
- (const_int 0)])
- (label_ref (match_operand 3 "" ""))
- (pc)))
- (clobber (match_scratch:SI 0 "=l"))]
- "TARGET_THUMB1"
- "*
- {
- output_asm_insn (\"bic\\t%0, %2\", operands);
- switch (get_attr_length (insn))
- {
- case 4: return \"b%d4\\t%l3\";
- case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (eq_attr "length" "8")
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -250))
- (le (minus (match_dup 3) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
- (le (minus (match_dup 3) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8))))]
-)
-
-(define_insn "*bicsi3_cbranch"
- [(set (pc)
- (if_then_else
- (match_operator 5 "equality_operator"
- [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
- (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
- (const_int 0)])
- (label_ref (match_operand 4 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
- (and:SI (not:SI (match_dup 3)) (match_dup 2)))
- (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
- "TARGET_THUMB1"
- "*
- {
- if (which_alternative == 0)
- output_asm_insn (\"bic\\t%0, %3\", operands);
- else if (which_alternative <= 2)
- {
- output_asm_insn (\"bic\\t%1, %3\", operands);
- /* It's ok if OP0 is a lo-reg, even though the mov will set the
- conditions again, since we're only testing for equality. */
- output_asm_insn (\"mov\\t%0, %1\", operands);
- }
- else
- {
- output_asm_insn (\"bic\\t%1, %3\", operands);
- output_asm_insn (\"str\\t%1, %0\", operands);
- }
-
- switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
- {
- case 4: return \"b%d5\\t%l4\";
- case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
- }
- }"
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -250))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -248))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
(define_insn "*cbranchne_decr1"
[(set (pc)
(if_then_else (match_operator 3 "equality_operator"
(if_then_else
(match_operator 4 "arm_comparison_operator"
[(plus:SI
- (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
- (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
+ (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
+ (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
(const_int 0)])
(label_ref (match_operand 5 "" ""))
(pc)))
else if (which_alternative >= 4)
output_asm_insn (\"str\\t%1, %0\", operands);
- switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
+ switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
{
case 4:
return \"b%d4\\t%l5\";
[(set (attr "far_jump")
(if_then_else
(ior (and (lt (symbol_ref ("which_alternative"))
- (const_int 3))
+ (const_int 2))
(eq_attr "length" "8"))
(eq_attr "length" "10"))
(const_string "yes")
(set (attr "length")
(if_then_else
(lt (symbol_ref ("which_alternative"))
- (const_int 3))
+ (const_int 2))
(if_then_else
(and (ge (minus (match_dup 5) (pc)) (const_int -250))
(le (minus (match_dup 5) (pc)) (const_int 256)))
}
}
"
- [(set (attr "far_jump")
- (if_then_else
- (eq_attr "length" "8")
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -250))
- (le (minus (match_dup 4) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
- (le (minus (match_dup 4) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8))))]
-)
-
-(define_insn "*subsi3_cbranch"
- [(set (pc)
- (if_then_else
- (match_operator 4 "arm_comparison_operator"
- [(minus:SI
- (match_operand:SI 2 "s_register_operand" "l,l,1,l")
- (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
- (const_int 0)])
- (label_ref (match_operand 5 "" ""))
- (pc)))
- (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
- (minus:SI (match_dup 2) (match_dup 3)))
- (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
- "TARGET_THUMB1
- && (GET_CODE (operands[4]) == EQ
- || GET_CODE (operands[4]) == NE
- || GET_CODE (operands[4]) == GE
- || GET_CODE (operands[4]) == LT)"
- "*
- {
- if (which_alternative == 0)
- output_asm_insn (\"sub\\t%0, %2, %3\", operands);
- else if (which_alternative == 1)
- {
- /* We must provide an alternative for a hi reg because reload
- cannot handle output reloads on a jump instruction, but we
- can't subtract into that. Fortunately a mov from lo to hi
- does not clobber the condition codes. */
- output_asm_insn (\"sub\\t%1, %2, %3\", operands);
- output_asm_insn (\"mov\\t%0, %1\", operands);
- }
- else
- {
- /* Similarly, but the target is memory. */
- output_asm_insn (\"sub\\t%1, %2, %3\", operands);
- output_asm_insn (\"str\\t%1, %0\", operands);
- }
-
- switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
- {
- case 4:
- return \"b%d4\\t%l5\";
- case 6:
- return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
- default:
- return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
- }
- }
- "
- [(set (attr "far_jump")
- (if_then_else
- (ior (and (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (eq_attr "length" "8"))
- (eq_attr "length" "10"))
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (eq (symbol_ref ("which_alternative"))
- (const_int 0))
- (if_then_else
- (and (ge (minus (match_dup 5) (pc)) (const_int -250))
- (le (minus (match_dup 5) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
- (le (minus (match_dup 5) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8)))
- (if_then_else
- (and (ge (minus (match_dup 5) (pc)) (const_int -248))
- (le (minus (match_dup 5) (pc)) (const_int 256)))
- (const_int 6)
- (if_then_else
- (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
- (le (minus (match_dup 5) (pc)) (const_int 2048)))
- (const_int 8)
- (const_int 10)))))]
-)
-
-(define_insn "*subsi3_cbranch_scratch"
- [(set (pc)
- (if_then_else
- (match_operator 0 "arm_comparison_operator"
- [(minus:SI (match_operand:SI 1 "register_operand" "l")
- (match_operand:SI 2 "nonmemory_operand" "l"))
- (const_int 0)])
- (label_ref (match_operand 3 "" ""))
- (pc)))]
- "TARGET_THUMB1
- && (GET_CODE (operands[0]) == EQ
- || GET_CODE (operands[0]) == NE
- || GET_CODE (operands[0]) == GE
- || GET_CODE (operands[0]) == LT)"
- "*
- output_asm_insn (\"cmp\\t%1, %2\", operands);
- switch (get_attr_length (insn))
- {
- case 4: return \"b%d0\\t%l3\";
- case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
- default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
- }
- "
- [(set (attr "far_jump")
- (if_then_else
- (eq_attr "length" "8")
- (const_string "yes")
- (const_string "no")))
- (set (attr "length")
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -250))
- (le (minus (match_dup 3) (pc)) (const_int 256)))
- (const_int 4)
- (if_then_else
- (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
- (le (minus (match_dup 3) (pc)) (const_int 2048)))
- (const_int 6)
- (const_int 8))))]
+ [(set (attr "far_jump")
+ (if_then_else
+ (eq_attr "length" "8")
+ (const_string "yes")
+ (const_string "no")))
+ (set (attr "length")
+ (if_then_else
+ (and (ge (minus (match_dup 4) (pc)) (const_int -250))
+ (le (minus (match_dup 4) (pc)) (const_int 256)))
+ (const_int 4)
+ (if_then_else
+ (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
+ (le (minus (match_dup 4) (pc)) (const_int 2048)))
+ (const_int 6)
+ (const_int 8))))]
)
+
;; Comparison and test insns
(define_insn "*arm_cmpsi_insn"
[(set (reg:CC CC_REGNUM)
- (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
- (match_operand:SI 1 "arm_add_operand" "rI,L")))]
+ (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
+ (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
"TARGET_32BIT"
"@
cmp%?\\t%0, %1
+ cmp%?\\t%0, %1
+ cmp%?\\t%0, %1
cmn%?\\t%0, #%n1"
- [(set_attr "conds" "set")]
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,t2,any,any")
+ (set_attr "length" "2,2,4,4")
+ (set_attr "predicable" "yes")]
)
-(define_insn "*arm_cmpsi_shiftsi"
+(define_insn "*cmpsi_shiftsi"
[(set (reg:CC CC_REGNUM)
- (compare:CC (match_operand:SI 0 "s_register_operand" "r")
+ (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
(match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
- "TARGET_ARM"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
+ "TARGET_32BIT"
"cmp%?\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
-(define_insn "*arm_cmpsi_shiftsi_swp"
+(define_insn "*cmpsi_shiftsi_swp"
[(set (reg:CC_SWP CC_REGNUM)
(compare:CC_SWP (match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "reg_or_int_operand" "rM")])
- (match_operand:SI 0 "s_register_operand" "r")))]
- "TARGET_ARM"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,rM")])
+ (match_operand:SI 0 "s_register_operand" "r,r")))]
+ "TARGET_32BIT"
"cmp%?\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
(define_insn "*arm_cmpsi_negshiftsi_si"
[(set (reg:CC_Z CC_REGNUM)
[(set_attr "conds" "set")
(set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
(const_string "alu_shift")
- (const_string "alu_shift_reg")))]
+ (const_string "alu_shift_reg")))
+ (set_attr "predicable" "yes")]
)
;; DImode comparisons. The generic code generates branches that
[(set (reg:CC_CZ CC_REGNUM)
(compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
(match_operand:DI 1 "arm_di_operand" "rDi")))]
- "TARGET_ARM"
- "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
+ "TARGET_32BIT"
+ "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
[(set_attr "conds" "set")
(set_attr "length" "8")]
)
(pc)))]
"TARGET_32BIT"
"operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
- operands[1], operands[2]);
+ operands[1], operands[2], NULL_RTX);
operands[2] = const0_rtx;"
)
return \"b%d1\\t%l0\";
"
[(set_attr "conds" "use")
- (set_attr "type" "branch")]
+ (set_attr "type" "branch")
+ (set (attr "length")
+ (if_then_else
+ (and (match_test "TARGET_THUMB2")
+ (and (ge (minus (match_dup 0) (pc)) (const_int -250))
+ (le (minus (match_dup 0) (pc)) (const_int 256))))
+ (const_int 2)
+ (const_int 4)))]
)
(define_insn "*arm_cond_branch_reversed"
return \"b%D1\\t%l0\";
"
[(set_attr "conds" "use")
- (set_attr "type" "branch")]
+ (set_attr "type" "branch")
+ (set (attr "length")
+ (if_then_else
+ (and (match_test "TARGET_THUMB2")
+ (and (ge (minus (match_dup 0) (pc)) (const_int -250))
+ (le (minus (match_dup 0) (pc)) (const_int 256))))
+ (const_int 2)
+ (const_int 4)))]
)
\f
(match_operand 3 "" "")]))]
"TARGET_32BIT"
"operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
- operands[2], operands[3]);
+ operands[2], operands[3], NULL_RTX);
operands[3] = const0_rtx;"
)
"TARGET_ARM"
"mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
[(set_attr "conds" "use")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
"TARGET_ARM"
"mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
[(set_attr "conds" "use")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
(not:SI (match_operator:SI 1 "arm_comparison_operator"
[(match_operand 2 "cc_register" "") (const_int 0)])))]
"TARGET_ARM"
- "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
+ "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
[(set_attr "conds" "use")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
(define_expand "cstoresi4"
[(set (match_operand:SI 0 "s_register_operand" "")
- (match_operator:SI 1 "arm_comparison_operator"
+ (match_operator:SI 1 "expandable_comparison_operator"
[(match_operand:SI 2 "s_register_operand" "")
(match_operand:SI 3 "reg_or_int_operand" "")]))]
"TARGET_32BIT || TARGET_THUMB1"
(define_expand "cstoresf4"
[(set (match_operand:SI 0 "s_register_operand" "")
- (match_operator:SI 1 "arm_comparison_operator"
+ (match_operator:SI 1 "expandable_comparison_operator"
[(match_operand:SF 2 "s_register_operand" "")
(match_operand:SF 3 "arm_float_compare_operand" "")]))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
(define_expand "cstoredf4"
[(set (match_operand:SI 0 "s_register_operand" "")
- (match_operator:SI 1 "arm_comparison_operator"
+ (match_operator:SI 1 "expandable_comparison_operator"
[(match_operand:DF 2 "s_register_operand" "")
(match_operand:DF 3 "arm_float_compare_operand" "")]))]
- "TARGET_32BIT && TARGET_HARD_FLOAT"
+ "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"emit_insn (gen_cstore_cc (operands[0], operands[1],
operands[2], operands[3])); DONE;"
)
(define_expand "cstoredi4"
[(set (match_operand:SI 0 "s_register_operand" "")
- (match_operator:SI 1 "arm_comparison_operator"
+ (match_operator:SI 1 "expandable_comparison_operator"
[(match_operand:DI 2 "cmpdi_operand" "")
(match_operand:DI 3 "cmpdi_operand" "")]))]
"TARGET_32BIT"
(define_expand "movsicc"
[(set (match_operand:SI 0 "s_register_operand" "")
- (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
+ (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
(match_operand:SI 2 "arm_not_operand" "")
(match_operand:SI 3 "arm_not_operand" "")))]
"TARGET_32BIT"
FAIL;
ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
- XEXP (operands[1], 1));
+ XEXP (operands[1], 1), NULL_RTX);
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
(define_expand "movsfcc"
[(set (match_operand:SF 0 "s_register_operand" "")
- (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
+ (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
(match_operand:SF 2 "s_register_operand" "")
(match_operand:SF 3 "nonmemory_operand" "")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
operands[3] = force_reg (SFmode, operands[3]);
ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
- XEXP (operands[1], 1));
+ XEXP (operands[1], 1), NULL_RTX);
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
(define_expand "movdfcc"
[(set (match_operand:DF 0 "s_register_operand" "")
- (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
+ (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
(match_operand:DF 2 "s_register_operand" "")
(match_operand:DF 3 "arm_float_add_operand" "")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
FAIL;
ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
- XEXP (operands[1], 1));
+ XEXP (operands[1], 1), NULL_RTX);
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}"
)
mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
[(set_attr "length" "4,4,4,4,8,8,8,8")
- (set_attr "conds" "use")]
+ (set_attr "conds" "use")
+ (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
)
(define_insn "*movsfcc_soft_insn"
"@
mov%D3\\t%0, %2
mov%d3\\t%0, %1"
- [(set_attr "conds" "use")]
+ [(set_attr "conds" "use")
+ (set_attr "insn" "mov")]
)
\f
return \"b%?\\t%l0\";
}
"
- [(set_attr "predicable" "yes")]
+ [(set_attr "predicable" "yes")
+ (set (attr "length")
+ (if_then_else
+ (and (match_test "TARGET_THUMB2")
+ (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
+ (le (minus (match_dup 0) (pc)) (const_int 2048))))
+ (const_int 2)
+ (const_int 4)))]
)
(define_insn "*thumb_jump"
if (REGNO (reg) == R0_REGNUM)
{
/* On thumb we have to use a write-back instruction. */
- emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
- TARGET_THUMB ? TRUE : FALSE, mem, &offset));
+ emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
+ TARGET_THUMB ? TRUE : FALSE, mem, &offset));
size = TARGET_ARM ? 16 : 0;
}
else
if (REGNO (reg) == R0_REGNUM)
{
/* On thumb we have to use a write-back instruction. */
- emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
- TARGET_THUMB ? TRUE : FALSE, mem, &offset));
+ emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
+ TARGET_THUMB ? TRUE : FALSE, mem, &offset));
size = TARGET_ARM ? 16 : 0;
}
else
rtx reg = gen_reg_rtx (SImode);
emit_insn (gen_addsi3 (reg, operands[0],
- GEN_INT (-INTVAL (operands[1]))));
+ gen_int_mode (-INTVAL (operands[1]),
+ SImode)));
operands[0] = reg;
}
;; Patterns to allow combination of arithmetic, cond code and shifts
(define_insn "*arith_shiftsi"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
(match_operator:SI 1 "shiftable_operator"
[(match_operator:SI 3 "shift_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "reg_or_int_operand" "rI")])
- (match_operand:SI 2 "s_register_operand" "rk")]))]
- "TARGET_ARM"
+ [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
+ (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
+ (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
+ "TARGET_32BIT"
"%i1%?\\t%0, %2, %4%S3"
[(set_attr "predicable" "yes")
(set_attr "shift" "4")
- (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "a,t2,t2,a")
+ ;; Thumb2 doesn't allow the stack pointer to be used for
+ ;; operand1 for all operations other than add and sub. In this case
+ ;; the minus operation is a candidate for an rsub and hence needs
+ ;; to be disabled.
+ ;; We have to make sure to disable the fourth alternative if
+ ;; the shift_operator is MULT, since otherwise the insn will
+ ;; also match a multiply_accumulate pattern and validate_change
+ ;; will allow a replacement of the constant with a register
+ ;; despite the checks done in shift_operator.
+ (set_attr_alternative "insn_enabled"
+ [(const_string "yes")
+ (if_then_else
+ (match_operand:SI 1 "add_operator" "")
+ (const_string "yes") (const_string "no"))
+ (const_string "yes")
+ (if_then_else
+ (match_operand:SI 3 "mult_operator" "")
+ (const_string "no") (const_string "yes"))])
+ (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(match_operand:SI 6 "s_register_operand" "")])
(match_operand:SI 7 "arm_rhs_operand" "")]))
(clobber (match_operand:SI 8 "s_register_operand" ""))]
- "TARGET_ARM"
+ "TARGET_32BIT"
[(set (match_dup 8)
(match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
(match_dup 6)]))
(define_insn "*arith_shiftsi_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
- [(match_operator:SI 3 "shift_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "reg_or_int_operand" "rI")])
- (match_operand:SI 2 "s_register_operand" "r")])
- (const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "=r")
+ (compare:CC_NOOV
+ (match_operator:SI 1 "shiftable_operator"
+ [(match_operator:SI 3 "shift_operator"
+ [(match_operand:SI 4 "s_register_operand" "r,r")
+ (match_operand:SI 5 "shift_amount_operand" "M,r")])
+ (match_operand:SI 2 "s_register_operand" "r,r")])
+ (const_int 0)))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
(match_dup 2)]))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"%i1%.\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
- (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
(define_insn "*arith_shiftsi_compare0_scratch"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
- [(match_operator:SI 3 "shift_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "reg_or_int_operand" "rI")])
- (match_operand:SI 2 "s_register_operand" "r")])
- (const_int 0)))
- (clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
+ (compare:CC_NOOV
+ (match_operator:SI 1 "shiftable_operator"
+ [(match_operator:SI 3 "shift_operator"
+ [(match_operand:SI 4 "s_register_operand" "r,r")
+ (match_operand:SI 5 "shift_amount_operand" "M,r")])
+ (match_operand:SI 2 "s_register_operand" "r,r")])
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=r,r"))]
+ "TARGET_32BIT"
"%i1%.\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
- (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
(define_insn "*sub_shiftsi"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (match_operand:SI 1 "s_register_operand" "r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
- "TARGET_ARM"
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
+ "TARGET_32BIT"
"sub%?\\t%0, %1, %3%S2"
[(set_attr "predicable" "yes")
(set_attr "shift" "3")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
(define_insn "*sub_shiftsi_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
- (minus:SI (match_operand:SI 1 "s_register_operand" "r")
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")]))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
(const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
- (match_dup 4)])))]
- "TARGET_ARM"
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (minus:SI (match_dup 1)
+ (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
+ "TARGET_32BIT"
"sub%.\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
(define_insn "*sub_shiftsi_compare0_scratch"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
- (minus:SI (match_operand:SI 1 "s_register_operand" "r")
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")]))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
(const_int 0)))
- (clobber (match_scratch:SI 0 "=r"))]
- "TARGET_ARM"
+ (clobber (match_scratch:SI 0 "=r,r"))]
+ "TARGET_32BIT"
"sub%.\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift")
- (const_string "alu_shift_reg")))]
-)
-
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alu_shift,alu_shift_reg")])
\f
(define_insn "*and_scc"
"TARGET_ARM"
"mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
[(set_attr "conds" "use")
+ (set_attr "insn" "mov")
(set_attr "length" "8")]
)
(set (match_dup 0) (const_int 1)))
(match_scratch:SI 3 "r")]
"TARGET_32BIT"
- [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
+ [(parallel
+ [(set (reg:CC CC_REGNUM)
+ (compare:CC (match_dup 1) (match_dup 2)))
+ (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
(parallel
[(set (reg:CC CC_REGNUM)
(compare:CC (const_int 0) (match_dup 3)))
(set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
- (set (match_dup 0)
- (plus:SI (plus:SI (match_dup 0) (match_dup 3))
- (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
+ (parallel
+ [(set (match_dup 0)
+ (plus:SI (plus:SI (match_dup 0) (match_dup 3))
+ (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
+ (clobber (reg:CC CC_REGNUM))])])
(define_insn "*cond_move"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
return \"\";
"
[(set_attr "conds" "use")
+ (set_attr "insn" "mov")
(set_attr "length" "4,4,8")]
)
(set_attr "length" "8,12")]
)
-;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
(define_insn "*cmp_ite0"
[(set (match_operand 6 "dominant_cc_register" "")
(compare
(if_then_else:SI
(match_operator 4 "arm_comparison_operator"
- [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
+ [(match_operand:SI 0 "s_register_operand"
+ "l,l,l,r,r,r,r,r,r")
+ (match_operand:SI 1 "arm_add_operand"
+ "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
- [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
+ [(match_operand:SI 2 "s_register_operand"
+ "l,r,r,l,l,r,r,r,r")
+ (match_operand:SI 3 "arm_add_operand"
+ "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
(const_int 0))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
{
- static const char * const opcodes[4][2] =
+ static const char * const cmp1[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp%d5\\t%0, %1\",
+ \"cmp%d4\\t%2, %3\"},
+ {\"cmn%d5\\t%0, #%n1\",
+ \"cmp%d4\\t%2, %3\"},
+ {\"cmp%d5\\t%0, %1\",
+ \"cmn%d4\\t%2, #%n3\"},
+ {\"cmn%d5\\t%0, #%n1\",
+ \"cmn%d4\\t%2, #%n3\"}
+ };
+ static const char * const cmp2[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp\\t%2, %3\",
+ \"cmp\\t%0, %1\"},
+ {\"cmp\\t%2, %3\",
+ \"cmn\\t%0, #%n1\"},
+ {\"cmn\\t%2, #%n3\",
+ \"cmp\\t%0, %1\"},
+ {\"cmn\\t%2, #%n3\",
+ \"cmn\\t%0, #%n1\"}
+ };
+ static const char * const ite[2] =
{
- {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
- \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
- {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
- \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
- {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
- \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
- {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
- \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
+ \"it\\t%d5\",
+ \"it\\t%d4\"
};
+ static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMN_CMP, CMP_CMN, CMN_CMN};
int swap =
comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
- return opcodes[which_alternative][swap];
+ output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
+ if (TARGET_THUMB2) {
+ output_asm_insn (ite[swap], operands);
+ }
+ output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
+ return \"\";
}"
[(set_attr "conds" "set")
- (set_attr "length" "8")]
+ (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr_alternative "length"
+ [(const_int 6)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))])]
)
(define_insn "*cmp_ite1"
(compare
(if_then_else:SI
(match_operator 4 "arm_comparison_operator"
- [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
+ [(match_operand:SI 0 "s_register_operand"
+ "l,l,l,r,r,r,r,r,r")
+ (match_operand:SI 1 "arm_add_operand"
+ "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
- [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
+ [(match_operand:SI 2 "s_register_operand"
+ "l,r,r,l,l,r,r,r,r")
+ (match_operand:SI 3 "arm_add_operand"
+ "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
(const_int 1))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
{
- static const char * const opcodes[4][2] =
+ static const char * const cmp1[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp\\t%0, %1\",
+ \"cmp\\t%2, %3\"},
+ {\"cmn\\t%0, #%n1\",
+ \"cmp\\t%2, %3\"},
+ {\"cmp\\t%0, %1\",
+ \"cmn\\t%2, #%n3\"},
+ {\"cmn\\t%0, #%n1\",
+ \"cmn\\t%2, #%n3\"}
+ };
+ static const char * const cmp2[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp%d4\\t%2, %3\",
+ \"cmp%D5\\t%0, %1\"},
+ {\"cmp%d4\\t%2, %3\",
+ \"cmn%D5\\t%0, #%n1\"},
+ {\"cmn%d4\\t%2, #%n3\",
+ \"cmp%D5\\t%0, %1\"},
+ {\"cmn%d4\\t%2, #%n3\",
+ \"cmn%D5\\t%0, #%n1\"}
+ };
+ static const char * const ite[2] =
{
- {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
- \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
- {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
- \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
- {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
- \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
- {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
- \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
+ \"it\\t%d4\",
+ \"it\\t%D5\"
};
+ static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMN_CMP, CMP_CMN, CMN_CMN};
int swap =
comparison_dominates_p (GET_CODE (operands[5]),
reverse_condition (GET_CODE (operands[4])));
- return opcodes[which_alternative][swap];
+ output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
+ if (TARGET_THUMB2) {
+ output_asm_insn (ite[swap], operands);
+ }
+ output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
+ return \"\";
}"
[(set_attr "conds" "set")
- (set_attr "length" "8")]
+ (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr_alternative "length"
+ [(const_int 6)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))])]
)
(define_insn "*cmp_and"
(compare
(and:SI
(match_operator 4 "arm_comparison_operator"
- [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
+ [(match_operand:SI 0 "s_register_operand"
+ "l,l,l,r,r,r,r,r,r")
+ (match_operand:SI 1 "arm_add_operand"
+ "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
- [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
+ [(match_operand:SI 2 "s_register_operand"
+ "l,r,r,l,l,r,r,r,r")
+ (match_operand:SI 3 "arm_add_operand"
+ "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
{
- static const char *const opcodes[4][2] =
+ static const char *const cmp1[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp%d5\\t%0, %1\",
+ \"cmp%d4\\t%2, %3\"},
+ {\"cmn%d5\\t%0, #%n1\",
+ \"cmp%d4\\t%2, %3\"},
+ {\"cmp%d5\\t%0, %1\",
+ \"cmn%d4\\t%2, #%n3\"},
+ {\"cmn%d5\\t%0, #%n1\",
+ \"cmn%d4\\t%2, #%n3\"}
+ };
+ static const char *const cmp2[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp\\t%2, %3\",
+ \"cmp\\t%0, %1\"},
+ {\"cmp\\t%2, %3\",
+ \"cmn\\t%0, #%n1\"},
+ {\"cmn\\t%2, #%n3\",
+ \"cmp\\t%0, %1\"},
+ {\"cmn\\t%2, #%n3\",
+ \"cmn\\t%0, #%n1\"}
+ };
+ static const char *const ite[2] =
{
- {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
- \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
- {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
- \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
- {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
- \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
- {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
- \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
+ \"it\\t%d5\",
+ \"it\\t%d4\"
};
+ static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMN_CMP, CMP_CMN, CMN_CMN};
int swap =
comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
- return opcodes[which_alternative][swap];
+ output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
+ if (TARGET_THUMB2) {
+ output_asm_insn (ite[swap], operands);
+ }
+ output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
+ return \"\";
}"
[(set_attr "conds" "set")
(set_attr "predicable" "no")
- (set_attr "length" "8")]
+ (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr_alternative "length"
+ [(const_int 6)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))])]
)
(define_insn "*cmp_ior"
(compare
(ior:SI
(match_operator 4 "arm_comparison_operator"
- [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
+ [(match_operand:SI 0 "s_register_operand"
+ "l,l,l,r,r,r,r,r,r")
+ (match_operand:SI 1 "arm_add_operand"
+ "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
- [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
+ [(match_operand:SI 2 "s_register_operand"
+ "l,r,r,l,l,r,r,r,r")
+ (match_operand:SI 3 "arm_add_operand"
+ "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
(const_int 0)))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"*
-{
- static const char *const opcodes[4][2] =
{
- {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
- \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
- {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
- \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
- {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
- \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
- {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
- \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
- };
- int swap =
- comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
-
- return opcodes[which_alternative][swap];
-}
-"
+ static const char *const cmp1[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp\\t%0, %1\",
+ \"cmp\\t%2, %3\"},
+ {\"cmn\\t%0, #%n1\",
+ \"cmp\\t%2, %3\"},
+ {\"cmp\\t%0, %1\",
+ \"cmn\\t%2, #%n3\"},
+ {\"cmn\\t%0, #%n1\",
+ \"cmn\\t%2, #%n3\"}
+ };
+ static const char *const cmp2[NUM_OF_COND_CMP][2] =
+ {
+ {\"cmp%D4\\t%2, %3\",
+ \"cmp%D5\\t%0, %1\"},
+ {\"cmp%D4\\t%2, %3\",
+ \"cmn%D5\\t%0, #%n1\"},
+ {\"cmn%D4\\t%2, #%n3\",
+ \"cmp%D5\\t%0, %1\"},
+ {\"cmn%D4\\t%2, #%n3\",
+ \"cmn%D5\\t%0, #%n1\"}
+ };
+ static const char *const ite[2] =
+ {
+ \"it\\t%D4\",
+ \"it\\t%D5\"
+ };
+ static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMN_CMP, CMP_CMN, CMN_CMN};
+ int swap =
+ comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
+
+ output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
+ if (TARGET_THUMB2) {
+ output_asm_insn (ite[swap], operands);
+ }
+ output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
+ return \"\";
+ }
+ "
[(set_attr "conds" "set")
- (set_attr "length" "8")]
+ (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr_alternative "length"
+ [(const_int 6)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (const_int 8)
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))
+ (if_then_else (eq_attr "is_thumb" "no")
+ (const_int 8)
+ (const_int 10))])]
)
(define_insn_and_split "*ior_scc_scc"
[(match_operand:SI 4 "s_register_operand" "r")
(match_operand:SI 5 "arm_add_operand" "rIL")])))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
!= CCmode)"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 7)
(compare
(ior:SI
(set (match_operand:SI 7 "s_register_operand" "=r")
(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 0)
(compare
(ior:SI
[(match_operand:SI 4 "s_register_operand" "r")
(match_operand:SI 5 "arm_add_operand" "rIL")])))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
!= CCmode)"
"#"
- "TARGET_ARM && reload_completed
+ "TARGET_32BIT && reload_completed
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
!= CCmode)"
[(set (match_dup 7)
(set (match_operand:SI 7 "s_register_operand" "=r")
(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
- "TARGET_ARM"
+ "TARGET_32BIT"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(set (match_dup 0)
(compare
(and:SI
[(match_operand:SI 4 "s_register_operand" "r,r,r")
(match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM
+ "TARGET_32BIT
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
== CCmode)"
"#"
- "TARGET_ARM && reload_completed"
+ "TARGET_32BIT && reload_completed"
[(parallel [(set (match_dup 0)
(match_op_dup 3 [(match_dup 1) (match_dup 2)]))
(clobber (reg:CC CC_REGNUM))])
mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
[(set_attr "conds" "use")
+ (set_attr "insn" "mvn")
(set_attr "length" "4,8,8")]
)
mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
[(set_attr "conds" "use")
+ (set_attr "insn" "mvn")
(set_attr "length" "4,8,8")]
)
[(set_attr "conds" "use")
(set_attr "shift" "2")
(set_attr "length" "4,8,8")
+ (set_attr "insn" "mov")
(set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
(const_string "alu_shift")
(const_string "alu_shift_reg")))]
[(set_attr "conds" "use")
(set_attr "shift" "2")
(set_attr "length" "4,8,8")
+ (set_attr "insn" "mov")
(set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
(const_string "alu_shift")
(const_string "alu_shift_reg")))]
[(set_attr "conds" "use")
(set_attr "shift" "1")
(set_attr "length" "8")
+ (set_attr "insn" "mov")
(set (attr "type") (if_then_else
(and (match_operand 2 "const_int_operand" "")
(match_operand 4 "const_int_operand" ""))
"TARGET_ARM"
"mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
[(set_attr "conds" "use")
+ (set_attr "insn" "mvn")
(set_attr "length" "8")]
)
"TARGET_ARM"
"mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
[(set_attr "conds" "use")
+ (set_attr "insn" "mvn")
(set_attr "length" "8")]
)
""
)
-; Peepholes to spot possible load- and store-multiples, if the ordering is
-; reversed, check that the memory references aren't volatile.
-
-(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=rk")
- (match_operand:SI 4 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=rk")
- (match_operand:SI 5 "memory_operand" "m"))
- (set (match_operand:SI 2 "s_register_operand" "=rk")
- (match_operand:SI 6 "memory_operand" "m"))
- (set (match_operand:SI 3 "s_register_operand" "=rk")
- (match_operand:SI 7 "memory_operand" "m"))]
- "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
- "*
- return emit_ldm_seq (operands, 4);
- "
-)
-
-(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=rk")
- (match_operand:SI 3 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=rk")
- (match_operand:SI 4 "memory_operand" "m"))
- (set (match_operand:SI 2 "s_register_operand" "=rk")
- (match_operand:SI 5 "memory_operand" "m"))]
- "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
- "*
- return emit_ldm_seq (operands, 3);
- "
-)
-
-(define_peephole
- [(set (match_operand:SI 0 "s_register_operand" "=rk")
- (match_operand:SI 2 "memory_operand" "m"))
- (set (match_operand:SI 1 "s_register_operand" "=rk")
- (match_operand:SI 3 "memory_operand" "m"))]
- "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
- "*
- return emit_ldm_seq (operands, 2);
- "
-)
-
-(define_peephole
- [(set (match_operand:SI 4 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "rk"))
- (set (match_operand:SI 5 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "rk"))
- (set (match_operand:SI 6 "memory_operand" "=m")
- (match_operand:SI 2 "s_register_operand" "rk"))
- (set (match_operand:SI 7 "memory_operand" "=m")
- (match_operand:SI 3 "s_register_operand" "rk"))]
- "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
- "*
- return emit_stm_seq (operands, 4);
- "
-)
-
-(define_peephole
- [(set (match_operand:SI 3 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "rk"))
- (set (match_operand:SI 4 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "rk"))
- (set (match_operand:SI 5 "memory_operand" "=m")
- (match_operand:SI 2 "s_register_operand" "rk"))]
- "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
- "*
- return emit_stm_seq (operands, 3);
- "
-)
-
-(define_peephole
- [(set (match_operand:SI 2 "memory_operand" "=m")
- (match_operand:SI 0 "s_register_operand" "rk"))
- (set (match_operand:SI 3 "memory_operand" "=m")
- (match_operand:SI 1 "s_register_operand" "rk"))]
- "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
- "*
- return emit_stm_seq (operands, 2);
- "
-)
-
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
DONE;
}
emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
- gen_rtvec (1,
- gen_rtx_RETURN (VOIDmode)),
- VUNSPEC_EPILOGUE));
+ gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
DONE;
"
)
+(define_insn "prologue_thumb1_interwork"
+ [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
+ "TARGET_THUMB1"
+ "* return thumb1_output_interwork ();"
+ [(set_attr "length" "8")]
+)
+
;; Note - although unspec_volatile's USE all hard registers,
;; USEs are ignored after relaod has completed. Thus we need
;; to add an unspec of the link register to ensure that flow
mvn%D4\\t%0, %2
mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
[(set_attr "conds" "use")
+ (set_attr "insn" "mvn")
(set_attr "length" "4,8")]
)
;; Push multiple registers to the stack. Registers are in parallel (use ...)
;; expressions. For simplicity, the first register is also in the unspec
;; part.
+;; To avoid the usage of GNU extension, the length attribute is computed
+;; in a C function arm_attr_length_push_multi.
(define_insn "*push_multi"
[(match_parallel 2 "multi_register_push"
- [(set (match_operand:BLK 0 "memory_operand" "=m")
+ [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
(unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
UNSPEC_PUSH_MULT))])]
- "TARGET_32BIT"
+ ""
"*
{
int num_saves = XVECLEN (operands[2], 0);
In Thumb mode always use push, and the assembler will pick
something appropriate. */
if (num_saves == 1 && TARGET_ARM)
- output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
+ output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
else
{
int i;
char pattern[100];
if (TARGET_ARM)
- strcpy (pattern, \"stmfd\\t%m0!, {%1\");
+ strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
+ else if (TARGET_THUMB2)
+ strcpy (pattern, \"push%?\\t{%1\");
else
strcpy (pattern, \"push\\t{%1\");
return \"\";
}"
- [(set_attr "type" "store4")]
+ [(set_attr "type" "store4")
+ (set (attr "length")
+ (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
)
(define_insn "stack_tie"
{
char pattern[100];
- sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
+ sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
output_asm_insn (pattern, operands);
return \"\";
}"
- [(set_attr "type" "f_store")]
+ [(set_attr "type" "f_fpa_store")]
)
;; Special patterns for dealing with the constant pool
[(set_attr "conds" "clob")]
)
+;; tls descriptor call
+(define_insn "tlscall"
+ [(set (reg:SI R0_REGNUM)
+ (unspec:SI [(reg:SI R0_REGNUM)
+ (match_operand:SI 0 "" "X")
+ (match_operand 1 "" "")] UNSPEC_TLS))
+ (clobber (reg:SI R1_REGNUM))
+ (clobber (reg:SI LR_REGNUM))
+ (clobber (reg:SI CC_REGNUM))]
+ "TARGET_GNU2_TLS"
+ {
+ targetm.asm_out.internal_label (asm_out_file, "LPIC",
+ INTVAL (operands[1]));
+ return "bl\\t%c0(tlscall)";
+ }
+ [(set_attr "conds" "clob")
+ (set_attr "length" "4")]
+)
+
+;;
+
+;; We only care about the lower 16 bits of the constant
+;; being inserted into the upper 16 bits of the register.
(define_insn "*arm_movtas_ze"
[(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
(const_int 16)
(const_int 16))
(match_operand:SI 1 "const_int_operand" ""))]
- "TARGET_32BIT"
- "movt%?\t%0, %c1"
+ "arm_arch_thumb2"
+ "movt%?\t%0, %L1"
[(set_attr "predicable" "yes")
(set_attr "length" "4")]
)
-(define_insn "arm_rev"
+(define_insn "*arm_rev"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_EITHER && arm_arch6"
- "rev\t%0, %1"
- [(set (attr "length")
- (if_then_else (eq_attr "is_thumb" "yes")
- (const_int 2)
- (const_int 4)))]
+ "TARGET_32BIT && arm_arch6"
+ "rev%?\t%0, %1"
+ [(set_attr "predicable" "yes")
+ (set_attr "length" "4")]
+)
+
+(define_insn "*thumb1_rev"
+ [(set (match_operand:SI 0 "s_register_operand" "=l")
+ (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
+ "TARGET_THUMB1 && arm_arch6"
+ "rev\t%0, %1"
+ [(set_attr "length" "2")]
)
(define_expand "arm_legacy_rev"
(define_expand "bswapsi2"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
-"TARGET_EITHER"
+"TARGET_EITHER && (arm_arch6 || !optimize_size)"
"
- if (!arm_arch6)
- {
- if (!optimize_size)
- {
- rtx op2 = gen_reg_rtx (SImode);
- rtx op3 = gen_reg_rtx (SImode);
+ if (!arm_arch6)
+ {
+ rtx op2 = gen_reg_rtx (SImode);
+ rtx op3 = gen_reg_rtx (SImode);
- if (TARGET_THUMB)
- {
- rtx op4 = gen_reg_rtx (SImode);
- rtx op5 = gen_reg_rtx (SImode);
+ if (TARGET_THUMB)
+ {
+ rtx op4 = gen_reg_rtx (SImode);
+ rtx op5 = gen_reg_rtx (SImode);
- emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
- op2, op3, op4, op5));
- }
- else
- {
- emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
- op2, op3));
- }
+ emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
+ op2, op3, op4, op5));
+ }
+ else
+ {
+ emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
+ op2, op3));
+ }
- DONE;
- }
- else
- FAIL;
- }
+ DONE;
+ }
"
)
+;; Load the load/store multiple patterns
+(include "ldmstm.md")
;; Load the FPA co-processor patterns
(include "fpa.md")
;; Load the Maverick co-processor patterns
(include "thumb2.md")
;; Neon patterns
(include "neon.md")
-
+;; Synchronization Primitives
+(include "sync.md")
+;; Fixed-point patterns
+(include "arm-fixed.md")