X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fconfig%2Farm%2Farm.md;h=5f3a5e0e96f3bd084688322109b94a2b44d4fa1d;hb=a3ffc3152cdca3e15bd51c611e2f9698cb4fadd7;hp=b28e3d01a573f95010de5a79b3a24632efc10402;hpb=cdb1295a71a64755c39d8f4e7b0354c10d16398a;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/config/arm/arm.md b/gcc/config/arm/arm.md index b28e3d01a57..5f3a5e0e96f 100644 --- a/gcc/config/arm/arm.md +++ b/gcc/config/arm/arm.md @@ -1,6 +1,7 @@ ;;- Machine description for ARM for GNU compiler ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000, -;; 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 +;; Free Software Foundation, Inc. ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl) ;; and Martin Simmons (@harleqn.co.uk). ;; More major hacks by Richard Earnshaw (rearnsha@arm.com). @@ -9,7 +10,7 @@ ;; GCC is free software; you can redistribute it and/or modify it ;; under the terms of the GNU General Public License as published -;; by the Free Software Foundation; either version 2, or (at your +;; by the Free Software Foundation; either version 3, or (at your ;; option) any later version. ;; GCC is distributed in the hope that it will be useful, but WITHOUT @@ -18,9 +19,8 @@ ;; License for more details. ;; You should have received a copy of the GNU General Public License -;; along with GCC; see the file COPYING. If not, write to -;; the Free Software Foundation, 51 Franklin Street, Fifth Floor, -;; Boston, MA 02110-1301, USA. +;; along with GCC; see the file COPYING3. If not see +;; . ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al. @@ -51,6 +51,7 @@ ;; UNSPEC Usage: ;; Note: sin and cos are no-longer used. +;; Unspec constants for Neon are defined in neon.md. (define_constants [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT): @@ -66,10 +67,9 @@ (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic ; usage, that is, we will add the pic_register ; value to it before trying to dereference it. - (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the - ; GLOBAL_OFFSET_TABLE. The operation is fully - ; described by the RTL but must be wrapped to - ; prevent combine from trying to rip it apart. + (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together, + ; The last operand is the number of a PIC_LABEL + ; that points at the containing instruction. (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses ; being scheduled before the stack adjustment insn. (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload, @@ -93,6 +93,12 @@ (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage. (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the ; instruction stream. + (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to + ; generate correct unwind information. + (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated + ; correctly for PIC usage. + (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a + ; a given symbolic address. ] ) @@ -117,12 +123,14 @@ ; a 32-bit object. (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for ; a 64-bit object. - (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction. - (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction. - (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN - (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions - (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions - (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions + (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for + ; a 128-bit object. + (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction. + (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction. + (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN + (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions + (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions + (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions (VUNSPEC_EH_RETURN 20); Use to override the return address for exception ; handling. ] @@ -150,7 +158,7 @@ ; Floating Point Unit. If we only have floating point emulation, then there ; is no point in scheduling the floating point insns. (Well, for best ; performance we should try and group them together). -(define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp" +(define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon" (const (symbol_ref "arm_fpu_attr"))) ; LENGTH of an instruction (in bytes) @@ -177,7 +185,7 @@ ;; scheduling information. (define_attr "insn" - "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other" + "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other" (const_string "other")) ; TYPE attribute is used to detect floating point instructions which, if @@ -186,6 +194,8 @@ ; scheduling of writes. ; Classification of each insn +; Note: vfp.md has different meanings for some of these, and some further +; types as well. See that file for details. ; alu any alu instruction that doesn't hit memory or fp ; regs or have a shifted source operand ; alu_shift any data instruction that doesn't hit memory or fp @@ -228,8 +238,9 @@ ; mav_farith Floating point arithmetic (4 cycle) ; mav_dmult Double multiplies (7 cycle) ; + (define_attr "type" - "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult" + "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys" (if_then_else (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals") (const_string "mult") @@ -239,6 +250,75 @@ ; initialized by arm_override_options() (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched"))) +;; Classification of NEON instructions for scheduling purposes. +;; Do not set this attribute and the "type" attribute together in +;; any one instruction pattern. +(define_attr "neon_type" + "neon_int_1,\ + neon_int_2,\ + neon_int_3,\ + neon_int_4,\ + neon_int_5,\ + neon_vqneg_vqabs,\ + neon_vmov,\ + neon_vaba,\ + neon_vsma,\ + neon_vaba_qqq,\ + neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\ + neon_mul_qqq_8_16_32_ddd_32,\ + neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\ + neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\ + neon_mla_qqq_8_16,\ + neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\ + neon_mla_qqq_32_qqd_32_scalar,\ + neon_mul_ddd_16_scalar_32_16_long_scalar,\ + neon_mul_qqd_32_scalar,\ + neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\ + neon_shift_1,\ + neon_shift_2,\ + neon_shift_3,\ + neon_vshl_ddd,\ + neon_vqshl_vrshl_vqrshl_qqq,\ + neon_vsra_vrsra,\ + neon_fp_vadd_ddd_vabs_dd,\ + neon_fp_vadd_qqq_vabs_qq,\ + neon_fp_vsum,\ + neon_fp_vmul_ddd,\ + neon_fp_vmul_qqd,\ + neon_fp_vmla_ddd,\ + neon_fp_vmla_qqq,\ + neon_fp_vmla_ddd_scalar,\ + neon_fp_vmla_qqq_scalar,\ + neon_fp_vrecps_vrsqrts_ddd,\ + neon_fp_vrecps_vrsqrts_qqq,\ + neon_bp_simple,\ + neon_bp_2cycle,\ + neon_bp_3cycle,\ + neon_ldr,\ + neon_str,\ + neon_vld1_1_2_regs,\ + neon_vld1_3_4_regs,\ + neon_vld2_2_regs_vld1_vld2_all_lanes,\ + neon_vld2_4_regs,\ + neon_vld3_vld4,\ + neon_vst1_1_2_regs_vst2_2_regs,\ + neon_vst1_3_4_regs,\ + neon_vst2_4_regs_vst3_vst4,\ + neon_vst3_vst4,\ + neon_vld1_vld2_lane,\ + neon_vld3_vld4_lane,\ + neon_vst1_vst2_lane,\ + neon_vst3_vst4_lane,\ + neon_vld3_vld4_all_lanes,\ + neon_mcr,\ + neon_mcr_2_mcrr,\ + neon_mrc,\ + neon_mrrc,\ + neon_ldm_2,\ + neon_stm_2,\ + none" + (const_string "none")) + ; condition codes: this one is used by final_prescan_insn to speed up ; conditionalizing instructions. It saves having to scan the rtl to see if ; it uses or alters the condition codes. @@ -256,13 +336,17 @@ ; JUMP_CLOB is used when the condition cannot be represented by a single ; instruction (UNEQ and LTGT). These cannot be predicated. ; +; UNCONDITIONAL means the instions can not be conditionally executed. +; ; NOCOND means that the condition codes are neither altered nor affect the ; output of this insn -(define_attr "conds" "use,set,clob,jump_clob,nocond" +(define_attr "conds" "use,set,clob,jump_clob,unconditional,nocond" (if_then_else (eq_attr "type" "call") (const_string "clob") - (const_string "nocond"))) + (if_then_else (eq_attr "neon_type" "none") + (const_string "nocond") + (const_string "unconditional")))) ; Predicable means that the insn can be conditionally executed based on ; an automatically added predicate (additional patterns are generated by @@ -297,13 +381,17 @@ (define_attr "far_jump" "yes,no" (const_string "no")) +;; The number of machine instructions this pattern expands to. +;; Used for Thumb-2 conditional execution. +(define_attr "ce_count" "" (const_int 1)) + ;;--------------------------------------------------------------------------- -;; Mode macros +;; Mode iterators ; A list of modes that are exactly 64 bits in size. We use this to expand ; some splits that are the same for all modes when operating on ARM ; registers. -(define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF]) +(define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF]) ;;--------------------------------------------------------------------------- ;; Predicates @@ -317,18 +405,26 @@ ;; Processor type. This is created automatically from arm-cores.def. (include "arm-tune.md") +(define_attr "tune_cortexr4" "yes,no" + (const (if_then_else + (eq_attr "tune" "cortexr4,cortexr4f") + (const_string "yes") + (const_string "no")))) + ;; True if the generic scheduling description should be used. (define_attr "generic_sched" "yes,no" (const (if_then_else - (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs") + (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9") + (eq_attr "tune_cortexr4" "yes")) (const_string "no") (const_string "yes")))) (define_attr "generic_vfp" "yes,no" (const (if_then_else (and (eq_attr "fpu" "vfp") - (eq_attr "tune" "!arm1020e,arm1022e")) + (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9") + (eq_attr "tune_cortexr4" "no")) (const_string "yes") (const_string "no")))) @@ -337,6 +433,11 @@ (include "arm1020e.md") (include "arm1026ejs.md") (include "arm1136jfs.md") +(include "cortex-a8.md") +(include "cortex-a9.md") +(include "cortex-r4.md") +(include "cortex-r4f.md") +(include "vfp11.md") ;;--------------------------------------------------------------------------- @@ -368,23 +469,23 @@ DONE; } - if (TARGET_THUMB) + if (TARGET_THUMB1) { if (GET_CODE (operands[1]) != REG) - operands[1] = force_reg (SImode, operands[1]); + operands[1] = force_reg (DImode, operands[1]); if (GET_CODE (operands[2]) != REG) - operands[2] = force_reg (SImode, operands[2]); + operands[2] = force_reg (DImode, operands[2]); } " ) -(define_insn "*thumb_adddi3" +(define_insn "*thumb1_adddi3" [(set (match_operand:DI 0 "register_operand" "=l") (plus:DI (match_operand:DI 1 "register_operand" "%0") (match_operand:DI 2 "register_operand" "l"))) (clobber (reg:CC CC_REGNUM)) ] - "TARGET_THUMB" + "TARGET_THUMB1" "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2" [(set_attr "length" "4")] ) @@ -394,9 +495,9 @@ (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0") (match_operand:DI 2 "s_register_operand" "r, 0"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" + "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(parallel [(set (reg:CC_C CC_REGNUM) (compare:CC_C (plus:SI (match_dup 1) (match_dup 2)) (match_dup 1))) @@ -422,9 +523,9 @@ (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "r,0"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" + "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(parallel [(set (reg:CC_C CC_REGNUM) (compare:CC_C (plus:SI (match_dup 1) (match_dup 2)) (match_dup 1))) @@ -451,9 +552,9 @@ (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "r,0"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" + "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(parallel [(set (reg:CC_C CC_REGNUM) (compare:CC_C (plus:SI (match_dup 1) (match_dup 2)) (match_dup 1))) @@ -478,11 +579,11 @@ (match_operand:SI 2 "reg_or_int_operand" "")))] "TARGET_EITHER" " - if (TARGET_ARM && GET_CODE (operands[2]) == CONST_INT) + if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT) { arm_split_constant (PLUS, SImode, NULL_RTX, INTVAL (operands[2]), operands[0], operands[1], - optimize && !no_new_pseudos); + optimize && can_create_pseudo_p ()); DONE; } " @@ -495,7 +596,7 @@ (set (match_operand:SI 0 "arm_general_register_operand" "") (plus:SI (match_operand:SI 1 "arm_general_register_operand" "") (match_operand:SI 2 "const_int_operand" "")))] - "TARGET_ARM && + "TARGET_32BIT && !(const_ok_for_arm (INTVAL (operands[2])) || const_ok_for_arm (-INTVAL (operands[2]))) && const_ok_for_arm (~INTVAL (operands[2]))" @@ -504,19 +605,26 @@ "" ) +;; The r/r/k alternative is required when reloading the address +;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will +;; put the duplicated register first, and not try the commutative version. (define_insn_and_split "*arm_addsi3" - [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") - (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r") - (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))] - "TARGET_ARM" + [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r") + (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk") + (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))] + "TARGET_32BIT" "@ add%?\\t%0, %1, %2 + add%?\\t%0, %1, %2 + add%?\\t%0, %2, %1 + sub%?\\t%0, %1, #%n2 sub%?\\t%0, %1, #%n2 #" - "TARGET_ARM && - GET_CODE (operands[2]) == CONST_INT + "TARGET_32BIT + && GET_CODE (operands[2]) == CONST_INT && !(const_ok_for_arm (INTVAL (operands[2])) - || const_ok_for_arm (-INTVAL (operands[2])))" + || const_ok_for_arm (-INTVAL (operands[2]))) + && (reload_completed || !arm_eliminable_register (operands[1]))" [(clobber (const_int 0))] " arm_split_constant (PLUS, SImode, curr_insn, @@ -524,7 +632,7 @@ operands[1], 0); DONE; " - [(set_attr "length" "4,4,16") + [(set_attr "length" "4,4,4,4,4,16") (set_attr "predicable" "yes")] ) @@ -532,11 +640,11 @@ ;; register. Trying to reload it will always fail catastrophically, ;; so never allow those alternatives to match if reloading is needed. -(define_insn "*thumb_addsi3" - [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k") +(define_insn "*thumb1_addsi3" + [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k") (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k") - (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))] - "TARGET_THUMB" + (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))] + "TARGET_THUMB1" "* static const char * const asms[] = { @@ -564,13 +672,14 @@ (match_operand:SI 1 "const_int_operand" "")) (set (match_dup 0) (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))] - "TARGET_THUMB + "TARGET_THUMB1 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024 && (INTVAL (operands[1]) & 3) == 0" [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))] "" ) +;; ??? Make Thumb-2 variants which prefer low regs (define_insn "*addsi3_compare0" [(set (reg:CC_NOOV CC_REGNUM) (compare:CC_NOOV @@ -579,10 +688,10 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (plus:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" + "TARGET_32BIT" "@ - add%?s\\t%0, %1, %2 - sub%?s\\t%0, %1, #%n2" + add%.\\t%0, %1, %2 + sub%.\\t%0, %1, #%n2" [(set_attr "conds" "set")] ) @@ -592,7 +701,7 @@ (plus:SI (match_operand:SI 0 "s_register_operand" "r, r") (match_operand:SI 1 "arm_add_operand" "rI,L")) (const_int 0)))] - "TARGET_ARM" + "TARGET_32BIT" "@ cmn%?\\t%0, %1 cmp%?\\t%0, #%n1" @@ -604,7 +713,7 @@ (compare:CC_Z (neg:SI (match_operand:SI 0 "s_register_operand" "r")) (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM" + "TARGET_32BIT" "cmn%?\\t%1, %0" [(set_attr "conds" "set")] ) @@ -619,10 +728,10 @@ (set (match_operand:SI 0 "s_register_operand" "=r,r") (plus:SI (match_dup 1) (match_operand:SI 3 "arm_addimm_operand" "L,I")))] - "TARGET_ARM && INTVAL (operands[2]) == -INTVAL (operands[3])" + "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])" "@ - sub%?s\\t%0, %1, %2 - add%?s\\t%0, %1, #%n2" + sub%.\\t%0, %1, %2 + add%.\\t%0, %1, #%n2" [(set_attr "conds" "set")] ) @@ -646,7 +755,7 @@ [(match_dup 2) (const_int 0)]) (match_operand 4 "" "") (match_operand 5 "" "")))] - "TARGET_ARM && peep2_reg_dead_p (3, operands[2])" + "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])" [(parallel[ (set (match_dup 2) (compare:CC @@ -675,10 +784,10 @@ (match_dup 1))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (plus:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" + "TARGET_32BIT" "@ - add%?s\\t%0, %1, %2 - sub%?s\\t%0, %1, #%n2" + add%.\\t%0, %1, %2 + sub%.\\t%0, %1, #%n2" [(set_attr "conds" "set")] ) @@ -690,10 +799,10 @@ (match_dup 2))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (plus:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" + "TARGET_32BIT" "@ - add%?s\\t%0, %1, %2 - sub%?s\\t%0, %1, #%n2" + add%.\\t%0, %1, %2 + sub%.\\t%0, %1, #%n2" [(set_attr "conds" "set")] ) @@ -703,7 +812,7 @@ (plus:SI (match_operand:SI 0 "s_register_operand" "r,r") (match_operand:SI 1 "arm_add_operand" "rI,L")) (match_dup 0)))] - "TARGET_ARM" + "TARGET_32BIT" "@ cmn%?\\t%0, %1 cmp%?\\t%0, #%n1" @@ -716,7 +825,7 @@ (plus:SI (match_operand:SI 0 "s_register_operand" "r,r") (match_operand:SI 1 "arm_add_operand" "rI,L")) (match_dup 1)))] - "TARGET_ARM" + "TARGET_32BIT" "@ cmn%?\\t%0, %1 cmp%?\\t%0, #%n1" @@ -728,7 +837,7 @@ (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0)) (plus:SI (match_operand:SI 1 "s_register_operand" "r") (match_operand:SI 2 "arm_rhs_operand" "rI"))))] - "TARGET_ARM" + "TARGET_32BIT" "adc%?\\t%0, %1, %2" [(set_attr "conds" "use")] ) @@ -741,7 +850,7 @@ [(match_operand:SI 3 "s_register_operand" "r") (match_operand:SI 4 "reg_or_int_operand" "rM")]) (match_operand:SI 1 "s_register_operand" "r"))))] - "TARGET_ARM" + "TARGET_32BIT" "adc%?\\t%0, %1, %3%S2" [(set_attr "conds" "use") (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "") @@ -754,7 +863,7 @@ (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r") (match_operand:SI 2 "arm_rhs_operand" "rI")) (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))] - "TARGET_ARM" + "TARGET_32BIT" "adc%?\\t%0, %1, %2" [(set_attr "conds" "use")] ) @@ -764,7 +873,7 @@ (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0)) (match_operand:SI 1 "s_register_operand" "r")) (match_operand:SI 2 "arm_rhs_operand" "rI")))] - "TARGET_ARM" + "TARGET_32BIT" "adc%?\\t%0, %1, %2" [(set_attr "conds" "use")] ) @@ -774,12 +883,21 @@ (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0)) (match_operand:SI 2 "arm_rhs_operand" "rI")) (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM" + "TARGET_32BIT" "adc%?\\t%0, %1, %2" [(set_attr "conds" "use")] ) -(define_insn "incscc" +(define_expand "incscc" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (plus:SI (match_operator:SI 2 "arm_comparison_operator" + [(match_operand:CC 3 "cc_register" "") (const_int 0)]) + (match_operand:SI 1 "s_register_operand" "0,?r")))] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_incscc" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (plus:SI (match_operator:SI 2 "arm_comparison_operator" [(match_operand:CC 3 "cc_register" "") (const_int 0)]) @@ -799,7 +917,7 @@ (match_operand:SI 2 "s_register_operand" "")) (const_int -1))) (clobber (match_operand:SI 3 "s_register_operand" ""))] - "TARGET_ARM" + "TARGET_32BIT" [(set (match_dup 3) (match_dup 1)) (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))] " @@ -810,7 +928,7 @@ [(set (match_operand:SF 0 "s_register_operand" "") (plus:SF (match_operand:SF 1 "s_register_operand" "") (match_operand:SF 2 "arm_float_add_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK && !cirrus_fp_register (operands[2], SFmode)) @@ -821,7 +939,7 @@ [(set (match_operand:DF 0 "s_register_operand" "") (plus:DF (match_operand:DF 1 "s_register_operand" "") (match_operand:DF 2 "arm_float_add_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK && !cirrus_fp_register (operands[2], DFmode)) @@ -837,7 +955,7 @@ "TARGET_EITHER" " if (TARGET_HARD_FLOAT && TARGET_MAVERICK - && TARGET_ARM + && TARGET_32BIT && cirrus_fp_register (operands[0], DImode) && cirrus_fp_register (operands[1], DImode)) { @@ -845,7 +963,7 @@ DONE; } - if (TARGET_THUMB) + if (TARGET_THUMB1) { if (GET_CODE (operands[1]) != REG) operands[1] = force_reg (SImode, operands[1]); @@ -860,7 +978,7 @@ (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0") (match_operand:DI 2 "s_register_operand" "r,0,0"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -871,7 +989,7 @@ (minus:DI (match_operand:DI 1 "register_operand" "0") (match_operand:DI 2 "register_operand" "l"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_THUMB" + "TARGET_THUMB1" "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2" [(set_attr "length" "4")] ) @@ -882,7 +1000,7 @@ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -894,7 +1012,7 @@ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -931,8 +1049,8 @@ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" - "subs\\t%Q0, %1, %2\;rsc\\t%R0, %1, %1" + "TARGET_32BIT" + "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1" [(set_attr "conds" "clob") (set_attr "length" "8")] ) @@ -945,37 +1063,39 @@ " if (GET_CODE (operands[1]) == CONST_INT) { - if (TARGET_ARM) + if (TARGET_32BIT) { arm_split_constant (MINUS, SImode, NULL_RTX, INTVAL (operands[1]), operands[0], - operands[2], optimize && !no_new_pseudos); + operands[2], optimize && can_create_pseudo_p ()); DONE; } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ operands[1] = force_reg (SImode, operands[1]); } " ) -(define_insn "*thumb_subsi3_insn" +(define_insn "*thumb1_subsi3_insn" [(set (match_operand:SI 0 "register_operand" "=l") (minus:SI (match_operand:SI 1 "register_operand" "l") (match_operand:SI 2 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "sub\\t%0, %1, %2" [(set_attr "length" "2")] ) +; ??? Check Thumb-2 split length (define_insn_and_split "*arm_subsi3_insn" - [(set (match_operand:SI 0 "s_register_operand" "=r,r") - (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n") - (match_operand:SI 2 "s_register_operand" "r,r")))] - "TARGET_ARM" + [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r") + (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n") + (match_operand:SI 2 "s_register_operand" "r, r, r")))] + "TARGET_32BIT" "@ rsb%?\\t%0, %2, %1 + sub%?\\t%0, %1, %2 #" - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[1]) == CONST_INT && !const_ok_for_arm (INTVAL (operands[1]))" [(clobber (const_int 0))] @@ -984,7 +1104,7 @@ INTVAL (operands[1]), operands[0], operands[2], 0); DONE; " - [(set_attr "length" "4,16") + [(set_attr "length" "4,4,16") (set_attr "predicable" "yes")] ) @@ -993,7 +1113,7 @@ (set (match_operand:SI 0 "arm_general_register_operand" "") (minus:SI (match_operand:SI 1 "const_int_operand" "") (match_operand:SI 2 "arm_general_register_operand" "")))] - "TARGET_ARM + "TARGET_32BIT && !const_ok_for_arm (INTVAL (operands[1])) && const_ok_for_arm (~INTVAL (operands[1]))" [(set (match_dup 3) (match_dup 1)) @@ -1009,14 +1129,23 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (minus:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" + "TARGET_32BIT" "@ - sub%?s\\t%0, %1, %2 - rsb%?s\\t%0, %2, %1" + sub%.\\t%0, %1, %2 + rsb%.\\t%0, %2, %1" [(set_attr "conds" "set")] ) -(define_insn "decscc" +(define_expand "decscc" + [(set (match_operand:SI 0 "s_register_operand" "=r,r") + (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r") + (match_operator:SI 2 "arm_comparison_operator" + [(match_operand 3 "cc_register" "") (const_int 0)])))] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_decscc" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r") (match_operator:SI 2 "arm_comparison_operator" @@ -1033,7 +1162,7 @@ [(set (match_operand:SF 0 "s_register_operand" "") (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "") (match_operand:SF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -1048,7 +1177,7 @@ [(set (match_operand:DF 0 "s_register_operand" "") (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "") (match_operand:DF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -1075,12 +1204,22 @@ [(set (match_operand:SI 0 "s_register_operand" "=&r,&r") (mult:SI (match_operand:SI 2 "s_register_operand" "r,r") (match_operand:SI 1 "s_register_operand" "%?r,0")))] - "TARGET_ARM" + "TARGET_32BIT && !arm_arch6" "mul%?\\t%0, %2, %1" [(set_attr "insn" "mul") (set_attr "predicable" "yes")] ) +(define_insn "*arm_mulsi3_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (mult:SI (match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "s_register_operand" "r")))] + "TARGET_32BIT && arm_arch6" + "mul%?\\t%0, %1, %2" + [(set_attr "insn" "mul") + (set_attr "predicable" "yes")] +) + ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands ; 1 and 2; are the same, because reload will make operand 0 match ; operand 1 without realizing that this conflicts with operand 2. We fix @@ -1090,7 +1229,7 @@ [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l") (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0") (match_operand:SI 2 "register_operand" "l,l,l")))] - "TARGET_THUMB" + "TARGET_THUMB1 && !arm_arch6" "* if (which_alternative < 2) return \"mov\\t%0, %1\;mul\\t%0, %2\"; @@ -1101,6 +1240,19 @@ (set_attr "insn" "mul")] ) +(define_insn "*thumb_mulsi3_v6" + [(set (match_operand:SI 0 "register_operand" "=l,l,l") + (mult:SI (match_operand:SI 1 "register_operand" "0,l,0") + (match_operand:SI 2 "register_operand" "l,0,0")))] + "TARGET_THUMB1 && arm_arch6" + "@ + mul\\t%0, %2 + mul\\t%0, %1 + mul\\t%0, %1" + [(set_attr "length" "2") + (set_attr "insn" "mul")] +) + (define_insn "*mulsi3_compare0" [(set (reg:CC_NOOV CC_REGNUM) (compare:CC_NOOV (mult:SI @@ -1109,8 +1261,22 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=&r,&r") (mult:SI (match_dup 2) (match_dup 1)))] - "TARGET_ARM" - "mul%?s\\t%0, %2, %1" + "TARGET_ARM && !arm_arch6" + "mul%.\\t%0, %2, %1" + [(set_attr "conds" "set") + (set_attr "insn" "muls")] +) + +(define_insn "*mulsi3_compare0_v6" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV (mult:SI + (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r")) + (const_int 0))) + (set (match_operand:SI 0 "s_register_operand" "=r") + (mult:SI (match_dup 2) (match_dup 1)))] + "TARGET_ARM && arm_arch6 && optimize_size" + "mul%.\\t%0, %2, %1" [(set_attr "conds" "set") (set_attr "insn" "muls")] ) @@ -1122,8 +1288,21 @@ (match_operand:SI 1 "s_register_operand" "%?r,0")) (const_int 0))) (clobber (match_scratch:SI 0 "=&r,&r"))] - "TARGET_ARM" - "mul%?s\\t%0, %2, %1" + "TARGET_ARM && !arm_arch6" + "mul%.\\t%0, %2, %1" + [(set_attr "conds" "set") + (set_attr "insn" "muls")] +) + +(define_insn "*mulsi_compare0_scratch_v6" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV (mult:SI + (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r")) + (const_int 0))) + (clobber (match_scratch:SI 0 "=r"))] + "TARGET_ARM && arm_arch6 && optimize_size" + "mul%.\\t%0, %2, %1" [(set_attr "conds" "set") (set_attr "insn" "muls")] ) @@ -1136,7 +1315,19 @@ (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r") (match_operand:SI 1 "s_register_operand" "%r,0,r,0")) (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))] - "TARGET_ARM" + "TARGET_32BIT && !arm_arch6" + "mla%?\\t%0, %2, %1, %3" + [(set_attr "insn" "mla") + (set_attr "predicable" "yes")] +) + +(define_insn "*mulsi3addsi_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (plus:SI + (mult:SI (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r")) + (match_operand:SI 3 "s_register_operand" "r")))] + "TARGET_32BIT && arm_arch6" "mla%?\\t%0, %2, %1, %3" [(set_attr "insn" "mla") (set_attr "predicable" "yes")] @@ -1153,8 +1344,25 @@ (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r") (plus:SI (mult:SI (match_dup 2) (match_dup 1)) (match_dup 3)))] - "TARGET_ARM" - "mla%?s\\t%0, %2, %1, %3" + "TARGET_ARM && arm_arch6" + "mla%.\\t%0, %2, %1, %3" + [(set_attr "conds" "set") + (set_attr "insn" "mlas")] +) + +(define_insn "*mulsi3addsi_compare0_v6" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (plus:SI (mult:SI + (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r")) + (match_operand:SI 3 "s_register_operand" "r")) + (const_int 0))) + (set (match_operand:SI 0 "s_register_operand" "=r") + (plus:SI (mult:SI (match_dup 2) (match_dup 1)) + (match_dup 3)))] + "TARGET_ARM && arm_arch6 && optimize_size" + "mla%.\\t%0, %2, %1, %3" [(set_attr "conds" "set") (set_attr "insn" "mlas")] ) @@ -1168,12 +1376,39 @@ (match_operand:SI 3 "s_register_operand" "?r,r,0,0")) (const_int 0))) (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))] - "TARGET_ARM" - "mla%?s\\t%0, %2, %1, %3" + "TARGET_ARM && !arm_arch6" + "mla%.\\t%0, %2, %1, %3" [(set_attr "conds" "set") (set_attr "insn" "mlas")] ) +(define_insn "*mulsi3addsi_compare0_scratch_v6" + [(set (reg:CC_NOOV CC_REGNUM) + (compare:CC_NOOV + (plus:SI (mult:SI + (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r")) + (match_operand:SI 3 "s_register_operand" "r")) + (const_int 0))) + (clobber (match_scratch:SI 0 "=r"))] + "TARGET_ARM && arm_arch6 && optimize_size" + "mla%.\\t%0, %2, %1, %3" + [(set_attr "conds" "set") + (set_attr "insn" "mlas")] +) + +(define_insn "*mulsi3subsi" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (minus:SI + (match_operand:SI 3 "s_register_operand" "r") + (mult:SI (match_operand:SI 2 "s_register_operand" "r") + (match_operand:SI 1 "s_register_operand" "r"))))] + "TARGET_32BIT && arm_arch_thumb2" + "mls%?\\t%0, %2, %1, %3" + [(set_attr "insn" "mla") + (set_attr "predicable" "yes")] +) + ;; Unnamed template to match long long multiply-accumulate (smlal) (define_insn "*mulsidi3adddi" @@ -1183,29 +1418,88 @@ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r")) (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r"))) (match_operand:DI 1 "s_register_operand" "0")))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" "smlal%?\\t%Q0, %R0, %3, %2" [(set_attr "insn" "smlal") (set_attr "predicable" "yes")] ) -(define_insn "mulsidi3" +(define_insn "*mulsidi3adddi_v6" + [(set (match_operand:DI 0 "s_register_operand" "=r") + (plus:DI + (mult:DI + (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")) + (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r"))) + (match_operand:DI 1 "s_register_operand" "0")))] + "TARGET_32BIT && arm_arch6" + "smlal%?\\t%Q0, %R0, %3, %2" + [(set_attr "insn" "smlal") + (set_attr "predicable" "yes")] +) + +;; 32x32->64 widening multiply. +;; As with mulsi3, the only difference between the v3-5 and v6+ +;; versions of these patterns is the requirement that the output not +;; overlap the inputs, but that still means we have to have a named +;; expander and two different starred insns. + +(define_expand "mulsidi3" + [(set (match_operand:DI 0 "s_register_operand" "") + (mult:DI + (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")) + (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))] + "TARGET_32BIT && arm_arch3m" + "" +) + +(define_insn "*mulsidi3_nov6" [(set (match_operand:DI 0 "s_register_operand" "=&r") (mult:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r")) (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" "smull%?\\t%Q0, %R0, %1, %2" [(set_attr "insn" "smull") (set_attr "predicable" "yes")] ) -(define_insn "umulsidi3" +(define_insn "*mulsidi3_v6" + [(set (match_operand:DI 0 "s_register_operand" "=r") + (mult:DI + (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")) + (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))] + "TARGET_32BIT && arm_arch6" + "smull%?\\t%Q0, %R0, %1, %2" + [(set_attr "insn" "smull") + (set_attr "predicable" "yes")] +) + +(define_expand "umulsidi3" + [(set (match_operand:DI 0 "s_register_operand" "") + (mult:DI + (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")) + (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))] + "TARGET_32BIT && arm_arch3m" + "" +) + +(define_insn "*umulsidi3_nov6" [(set (match_operand:DI 0 "s_register_operand" "=&r") (mult:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r")) (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" + "umull%?\\t%Q0, %R0, %1, %2" + [(set_attr "insn" "umull") + (set_attr "predicable" "yes")] +) + +(define_insn "*umulsidi3_v6" + [(set (match_operand:DI 0 "s_register_operand" "=r") + (mult:DI + (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")) + (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))] + "TARGET_32BIT && arm_arch6" "umull%?\\t%Q0, %R0, %1, %2" [(set_attr "insn" "umull") (set_attr "predicable" "yes")] @@ -1220,13 +1514,40 @@ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r")) (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r"))) (match_operand:DI 1 "s_register_operand" "0")))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" "umlal%?\\t%Q0, %R0, %3, %2" [(set_attr "insn" "umlal") (set_attr "predicable" "yes")] ) -(define_insn "smulsi3_highpart" +(define_insn "*umulsidi3adddi_v6" + [(set (match_operand:DI 0 "s_register_operand" "=r") + (plus:DI + (mult:DI + (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")) + (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r"))) + (match_operand:DI 1 "s_register_operand" "0")))] + "TARGET_32BIT && arm_arch6" + "umlal%?\\t%Q0, %R0, %3, %2" + [(set_attr "insn" "umlal") + (set_attr "predicable" "yes")] +) + +(define_expand "smulsi3_highpart" + [(parallel + [(set (match_operand:SI 0 "s_register_operand" "") + (truncate:SI + (lshiftrt:DI + (mult:DI + (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")) + (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))) + (const_int 32)))) + (clobber (match_scratch:SI 3 ""))])] + "TARGET_32BIT && arm_arch3m" + "" +) + +(define_insn "*smulsi3_highpart_nov6" [(set (match_operand:SI 0 "s_register_operand" "=&r,&r") (truncate:SI (lshiftrt:DI @@ -1235,13 +1556,42 @@ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r"))) (const_int 32)))) (clobber (match_scratch:SI 3 "=&r,&r"))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" "smull%?\\t%3, %0, %2, %1" [(set_attr "insn" "smull") (set_attr "predicable" "yes")] ) -(define_insn "umulsi3_highpart" +(define_insn "*smulsi3_highpart_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (truncate:SI + (lshiftrt:DI + (mult:DI + (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")) + (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))) + (const_int 32)))) + (clobber (match_scratch:SI 3 "=r"))] + "TARGET_32BIT && arm_arch6" + "smull%?\\t%3, %0, %2, %1" + [(set_attr "insn" "smull") + (set_attr "predicable" "yes")] +) + +(define_expand "umulsi3_highpart" + [(parallel + [(set (match_operand:SI 0 "s_register_operand" "") + (truncate:SI + (lshiftrt:DI + (mult:DI + (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")) + (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))) + (const_int 32)))) + (clobber (match_scratch:SI 3 ""))])] + "TARGET_32BIT && arm_arch3m" + "" +) + +(define_insn "*umulsi3_highpart_nov6" [(set (match_operand:SI 0 "s_register_operand" "=&r,&r") (truncate:SI (lshiftrt:DI @@ -1250,7 +1600,22 @@ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r"))) (const_int 32)))) (clobber (match_scratch:SI 3 "=&r,&r"))] - "TARGET_ARM && arm_arch3m" + "TARGET_32BIT && arm_arch3m && !arm_arch6" + "umull%?\\t%3, %0, %2, %1" + [(set_attr "insn" "umull") + (set_attr "predicable" "yes")] +) + +(define_insn "*umulsi3_highpart_v6" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (truncate:SI + (lshiftrt:DI + (mult:DI + (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")) + (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))) + (const_int 32)))) + (clobber (match_scratch:SI 3 "=r"))] + "TARGET_32BIT && arm_arch6" "umull%?\\t%3, %0, %2, %1" [(set_attr "insn" "umull") (set_attr "predicable" "yes")] @@ -1262,7 +1627,7 @@ (match_operand:HI 1 "s_register_operand" "%r")) (sign_extend:SI (match_operand:HI 2 "s_register_operand" "r"))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smulbb%?\\t%0, %1, %2" [(set_attr "insn" "smulxy") (set_attr "predicable" "yes")] @@ -1275,7 +1640,7 @@ (const_int 16)) (sign_extend:SI (match_operand:HI 2 "s_register_operand" "r"))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smultb%?\\t%0, %1, %2" [(set_attr "insn" "smulxy") (set_attr "predicable" "yes")] @@ -1288,7 +1653,7 @@ (ashiftrt:SI (match_operand:SI 2 "s_register_operand" "r") (const_int 16))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smulbt%?\\t%0, %1, %2" [(set_attr "insn" "smulxy") (set_attr "predicable" "yes")] @@ -1302,7 +1667,7 @@ (ashiftrt:SI (match_operand:SI 2 "s_register_operand" "r") (const_int 16))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smultt%?\\t%0, %1, %2" [(set_attr "insn" "smulxy") (set_attr "predicable" "yes")] @@ -1315,7 +1680,7 @@ (match_operand:HI 2 "s_register_operand" "%r")) (sign_extend:SI (match_operand:HI 3 "s_register_operand" "r")))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smlabb%?\\t%0, %2, %3, %1" [(set_attr "insn" "smlaxy") (set_attr "predicable" "yes")] @@ -1329,7 +1694,7 @@ (match_operand:HI 2 "s_register_operand" "%r")) (sign_extend:DI (match_operand:HI 3 "s_register_operand" "r")))))] - "TARGET_ARM && arm_arch5e" + "TARGET_DSP_MULTIPLY" "smlalbb%?\\t%Q0, %R0, %2, %3" [(set_attr "insn" "smlalxy") (set_attr "predicable" "yes")]) @@ -1338,7 +1703,7 @@ [(set (match_operand:SF 0 "s_register_operand" "") (mult:SF (match_operand:SF 1 "s_register_operand" "") (match_operand:SF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK && !cirrus_fp_register (operands[2], SFmode)) @@ -1349,7 +1714,7 @@ [(set (match_operand:DF 0 "s_register_operand" "") (mult:DF (match_operand:DF 1 "s_register_operand" "") (match_operand:DF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK && !cirrus_fp_register (operands[2], DFmode)) @@ -1362,14 +1727,14 @@ [(set (match_operand:SF 0 "s_register_operand" "") (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "") (match_operand:SF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "") (define_expand "divdf3" [(set (match_operand:DF 0 "s_register_operand" "") (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "") (match_operand:DF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "") ;; Modulo insns @@ -1378,14 +1743,14 @@ [(set (match_operand:SF 0 "s_register_operand" "") (mod:SF (match_operand:SF 1 "s_register_operand" "") (match_operand:SF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA" + "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA" "") (define_expand "moddf3" [(set (match_operand:DF 0 "s_register_operand" "") (mod:DF (match_operand:DF 1 "s_register_operand" "") (match_operand:DF 2 "arm_float_rhs_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA" + "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA" "") ;; Boolean and,ior,xor insns @@ -1399,7 +1764,8 @@ (match_operator:DI 6 "logical_binary_operator" [(match_operand:DI 1 "s_register_operand" "") (match_operand:DI 2 "s_register_operand" "")]))] - "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))" + "TARGET_32BIT && reload_completed + && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))" [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)])) (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))] " @@ -1418,7 +1784,7 @@ (match_operator:DI 6 "logical_binary_operator" [(sign_extend:DI (match_operand:SI 2 "s_register_operand" "")) (match_operand:DI 1 "s_register_operand" "")]))] - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)])) (set (match_dup 3) (match_op_dup:SI 6 [(ashiftrt:SI (match_dup 2) (const_int 31)) @@ -1441,7 +1807,7 @@ (ior:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")) (match_operand:DI 1 "s_register_operand" "")))] - "TARGET_ARM && operands[0] != operands[1] && reload_completed" + "TARGET_32BIT && operands[0] != operands[1] && reload_completed" [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2))) (set (match_dup 3) (match_dup 4))] " @@ -1460,7 +1826,7 @@ (xor:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")) (match_operand:DI 1 "s_register_operand" "")))] - "TARGET_ARM && operands[0] != operands[1] && reload_completed" + "TARGET_32BIT && operands[0] != operands[1] && reload_completed" [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2))) (set (match_dup 3) (match_dup 4))] " @@ -1476,7 +1842,7 @@ [(set (match_operand:DI 0 "s_register_operand" "=&r,&r") (and:DI (match_operand:DI 1 "s_register_operand" "%0,r") (match_operand:DI 2 "s_register_operand" "r,r")))] - "TARGET_ARM && ! TARGET_IWMMXT" + "TARGET_32BIT && ! TARGET_IWMMXT" "#" [(set_attr "length" "8")] ) @@ -1486,9 +1852,9 @@ (and:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "?r,0")))] - "TARGET_ARM" + "TARGET_32BIT" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" ; The zero extend of operand 2 clears the high word of the output ; operand. [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2))) @@ -1507,7 +1873,7 @@ (and:DI (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "?r,0")))] - "TARGET_ARM" + "TARGET_32BIT" "#" [(set_attr "length" "8")] ) @@ -1518,18 +1884,18 @@ (match_operand:SI 2 "reg_or_int_operand" "")))] "TARGET_EITHER" " - if (TARGET_ARM) + if (TARGET_32BIT) { if (GET_CODE (operands[2]) == CONST_INT) { arm_split_constant (AND, SImode, NULL_RTX, INTVAL (operands[2]), operands[0], - operands[1], optimize && !no_new_pseudos); + operands[1], optimize && can_create_pseudo_p ()); DONE; } } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ { if (GET_CODE (operands[2]) != CONST_INT) operands[2] = force_reg (SImode, operands[2]); @@ -1574,16 +1940,17 @@ " ) +; ??? Check split length for Thumb-2 (define_insn_and_split "*arm_andsi3_insn" [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r") (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))] - "TARGET_ARM" + "TARGET_32BIT" "@ and%?\\t%0, %1, %2 bic%?\\t%0, %1, #%B2 #" - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT && !(const_ok_for_arm (INTVAL (operands[2])) || const_ok_for_arm (~INTVAL (operands[2])))" @@ -1597,11 +1964,11 @@ (set_attr "predicable" "yes")] ) -(define_insn "*thumb_andsi3_insn" +(define_insn "*thumb1_andsi3_insn" [(set (match_operand:SI 0 "register_operand" "=l") (and:SI (match_operand:SI 1 "register_operand" "%0") (match_operand:SI 2 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "and\\t%0, %0, %2" [(set_attr "length" "2")] ) @@ -1614,10 +1981,10 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (and:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" + "TARGET_32BIT" "@ - and%?s\\t%0, %1, %2 - bic%?s\\t%0, %1, #%B2" + and%.\\t%0, %1, %2 + bic%.\\t%0, %1, #%B2" [(set_attr "conds" "set")] ) @@ -1628,10 +1995,10 @@ (match_operand:SI 1 "arm_not_operand" "rI,K")) (const_int 0))) (clobber (match_scratch:SI 2 "=X,r"))] - "TARGET_ARM" + "TARGET_32BIT" "@ tst%?\\t%0, %1 - bic%?s\\t%2, %0, #%B1" + bic%.\\t%2, %0, #%B1" [(set_attr "conds" "set")] ) @@ -1642,7 +2009,7 @@ (match_operand 1 "const_int_operand" "n") (match_operand 2 "const_int_operand" "n")) (const_int 0)))] - "TARGET_ARM + "TARGET_32BIT && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32 && INTVAL (operands[1]) > 0 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8 @@ -1664,13 +2031,13 @@ (match_operand:SI 3 "const_int_operand" "n")) (const_int 0))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM + "TARGET_32BIT && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 && INTVAL (operands[2]) > 0 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)" "#" - "TARGET_ARM + "TARGET_32BIT && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 && INTVAL (operands[2]) > 0 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 @@ -1687,7 +2054,10 @@ << INTVAL (operands[3])); " [(set_attr "conds" "clob") - (set_attr "length" "8")] + (set (attr "length") + (if_then_else (eq_attr "is_thumb" "yes") + (const_int 12) + (const_int 8)))] ) (define_insn_and_split "*ne_zeroextractsi_shifted" @@ -1786,7 +2156,7 @@ (match_operand:SI 2 "const_int_operand" "") (match_operand:SI 3 "const_int_operand" ""))) (clobber (match_operand:SI 4 "s_register_operand" ""))] - "TARGET_THUMB" + "TARGET_THUMB1" [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2))) (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))] "{ @@ -1797,6 +2167,7 @@ }" ) +;; ??? Use Thumb-2 has bitfield insert/extract instructions. (define_split [(set (match_operand:SI 0 "s_register_operand" "") (match_operator:SI 1 "shiftable_operator" @@ -1824,7 +2195,7 @@ (sign_extract:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "const_int_operand" "") (match_operand:SI 3 "const_int_operand" "")))] - "TARGET_THUMB" + "TARGET_THUMB1" [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2))) (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))] "{ @@ -1871,7 +2242,7 @@ (match_operand:SI 1 "general_operand" "") (match_operand:SI 2 "general_operand" "")) (match_operand:SI 3 "reg_or_int_operand" ""))] - "TARGET_ARM" + "TARGET_ARM || arm_arch_thumb2" " { int start_bit = INTVAL (operands[2]); @@ -1879,7 +2250,38 @@ HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1; rtx target, subtarget; - target = operands[0]; + if (arm_arch_thumb2) + { + bool use_bfi = TRUE; + + if (GET_CODE (operands[3]) == CONST_INT) + { + HOST_WIDE_INT val = INTVAL (operands[3]) & mask; + + if (val == 0) + { + emit_insn (gen_insv_zero (operands[0], operands[1], + operands[2])); + DONE; + } + + /* See if the set can be done with a single orr instruction. */ + if (val == mask && const_ok_for_arm (val << start_bit)) + use_bfi = FALSE; + } + + if (use_bfi) + { + if (GET_CODE (operands[3]) != REG) + operands[3] = force_reg (SImode, operands[3]); + + emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2], + operands[3])); + DONE; + } + } + + target = copy_rtx (operands[0]); /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical subreg as the final target. */ if (GET_CODE (target) == SUBREG) @@ -2002,14 +2404,36 @@ }" ) +(define_insn "insv_zero" + [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r") + (match_operand:SI 1 "const_int_operand" "M") + (match_operand:SI 2 "const_int_operand" "M")) + (const_int 0))] + "arm_arch_thumb2" + "bfc%?\t%0, %2, %1" + [(set_attr "length" "4") + (set_attr "predicable" "yes")] +) + +(define_insn "insv_t2" + [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r") + (match_operand:SI 1 "const_int_operand" "M") + (match_operand:SI 2 "const_int_operand" "M")) + (match_operand:SI 3 "s_register_operand" "r"))] + "arm_arch_thumb2" + "bfi%?\t%0, %3, %2, %1" + [(set_attr "length" "4") + (set_attr "predicable" "yes")] +) + ; constants for op 2 will never be given to these patterns. (define_insn_and_split "*anddi_notdi_di" [(set (match_operand:DI 0 "s_register_operand" "=&r,&r") (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0")) (match_operand:DI 2 "s_register_operand" "0,r")))] - "TARGET_ARM" + "TARGET_32BIT" "#" - "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))" + "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))" [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2))) (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))] " @@ -2030,13 +2454,13 @@ (and:DI (not:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r"))) (match_operand:DI 1 "s_register_operand" "0,?r")))] - "TARGET_ARM" + "TARGET_32BIT" "@ bic%?\\t%Q0, %Q1, %2 #" ; (not (zero_extend ...)) allows us to just copy the high word from ; operand1 to operand0. - "TARGET_ARM + "TARGET_32BIT && reload_completed && operands[0] != operands[1]" [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1))) @@ -2057,9 +2481,9 @@ (and:DI (not:DI (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r"))) (match_operand:DI 1 "s_register_operand" "0,r")))] - "TARGET_ARM" + "TARGET_32BIT" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1))) (set (match_dup 3) (and:SI (not:SI (ashiftrt:SI (match_dup 2) (const_int 31))) @@ -2079,7 +2503,7 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r")) (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM" + "TARGET_32BIT" "bic%?\\t%0, %1, %2" [(set_attr "predicable" "yes")] ) @@ -2088,7 +2512,7 @@ [(set (match_operand:SI 0 "register_operand" "=l") (and:SI (not:SI (match_operand:SI 1 "register_operand" "l")) (match_operand:SI 2 "register_operand" "0")))] - "TARGET_THUMB" + "TARGET_THUMB1" "bic\\t%0, %0, %1" [(set_attr "length" "2")] ) @@ -2116,8 +2540,8 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r") (and:SI (not:SI (match_dup 2)) (match_dup 1)))] - "TARGET_ARM" - "bic%?s\\t%0, %1, %2" + "TARGET_32BIT" + "bic%.\\t%0, %1, %2" [(set_attr "conds" "set")] ) @@ -2128,8 +2552,8 @@ (match_operand:SI 1 "s_register_operand" "r")) (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] - "TARGET_ARM" - "bic%?s\\t%0, %1, %2" + "TARGET_32BIT" + "bic%.\\t%0, %1, %2" [(set_attr "conds" "set")] ) @@ -2137,7 +2561,7 @@ [(set (match_operand:DI 0 "s_register_operand" "=&r,&r") (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r") (match_operand:DI 2 "s_register_operand" "r,r")))] - "TARGET_ARM && ! TARGET_IWMMXT" + "TARGET_32BIT && ! TARGET_IWMMXT" "#" [(set_attr "length" "8") (set_attr "predicable" "yes")] @@ -2148,7 +2572,7 @@ (ior:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "0,?r")))] - "TARGET_ARM" + "TARGET_32BIT" "@ orr%?\\t%Q0, %Q1, %2 #" @@ -2161,7 +2585,7 @@ (ior:DI (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "?r,0")))] - "TARGET_ARM" + "TARGET_32BIT" "#" [(set_attr "length" "8") (set_attr "predicable" "yes")] @@ -2175,14 +2599,14 @@ " if (GET_CODE (operands[2]) == CONST_INT) { - if (TARGET_ARM) + if (TARGET_32BIT) { arm_split_constant (IOR, SImode, NULL_RTX, INTVAL (operands[2]), operands[0], operands[1], - optimize && !no_new_pseudos); + optimize && can_create_pseudo_p ()); DONE; } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ operands [2] = force_reg (SImode, operands [2]); } " @@ -2209,11 +2633,11 @@ (set_attr "predicable" "yes")] ) -(define_insn "*thumb_iorsi3" +(define_insn "*thumb1_iorsi3" [(set (match_operand:SI 0 "register_operand" "=l") (ior:SI (match_operand:SI 1 "register_operand" "%0") (match_operand:SI 2 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "orr\\t%0, %0, %2" [(set_attr "length" "2")] ) @@ -2238,8 +2662,8 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r") (ior:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" - "orr%?s\\t%0, %1, %2" + "TARGET_32BIT" + "orr%.\\t%0, %1, %2" [(set_attr "conds" "set")] ) @@ -2249,8 +2673,8 @@ (match_operand:SI 2 "arm_rhs_operand" "rI")) (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] - "TARGET_ARM" - "orr%?s\\t%0, %1, %2" + "TARGET_32BIT" + "orr%.\\t%0, %1, %2" [(set_attr "conds" "set")] ) @@ -2258,7 +2682,7 @@ [(set (match_operand:DI 0 "s_register_operand" "=&r,&r") (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r") (match_operand:DI 2 "s_register_operand" "r,r")))] - "TARGET_ARM && !TARGET_IWMMXT" + "TARGET_32BIT && !TARGET_IWMMXT" "#" [(set_attr "length" "8") (set_attr "predicable" "yes")] @@ -2269,7 +2693,7 @@ (xor:DI (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "0,?r")))] - "TARGET_ARM" + "TARGET_32BIT" "@ eor%?\\t%Q0, %Q1, %2 #" @@ -2282,7 +2706,7 @@ (xor:DI (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")) (match_operand:DI 1 "s_register_operand" "?r,0")))] - "TARGET_ARM" + "TARGET_32BIT" "#" [(set_attr "length" "8") (set_attr "predicable" "yes")] @@ -2293,7 +2717,7 @@ (xor:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "arm_rhs_operand" "")))] "TARGET_EITHER" - "if (TARGET_THUMB) + "if (TARGET_THUMB1) if (GET_CODE (operands[2]) == CONST_INT) operands[2] = force_reg (SImode, operands[2]); " @@ -2303,16 +2727,16 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (xor:SI (match_operand:SI 1 "s_register_operand" "r") (match_operand:SI 2 "arm_rhs_operand" "rI")))] - "TARGET_ARM" + "TARGET_32BIT" "eor%?\\t%0, %1, %2" [(set_attr "predicable" "yes")] ) -(define_insn "*thumb_xorsi3" +(define_insn "*thumb1_xorsi3" [(set (match_operand:SI 0 "register_operand" "=l") (xor:SI (match_operand:SI 1 "register_operand" "%0") (match_operand:SI 2 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "eor\\t%0, %0, %2" [(set_attr "length" "2")] ) @@ -2324,8 +2748,8 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r") (xor:SI (match_dup 1) (match_dup 2)))] - "TARGET_ARM" - "eor%?s\\t%0, %1, %2" + "TARGET_32BIT" + "eor%.\\t%0, %1, %2" [(set_attr "conds" "set")] ) @@ -2334,7 +2758,7 @@ (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r") (match_operand:SI 1 "arm_rhs_operand" "rI")) (const_int 0)))] - "TARGET_ARM" + "TARGET_32BIT" "teq%?\\t%0, %1" [(set_attr "conds" "set")] ) @@ -2349,7 +2773,7 @@ (not:SI (match_operand:SI 2 "arm_rhs_operand" ""))) (match_operand:SI 3 "arm_rhs_operand" ""))) (clobber (match_operand:SI 4 "s_register_operand" ""))] - "TARGET_ARM" + "TARGET_32BIT" [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2)) (not:SI (match_dup 3)))) (set (match_dup 0) (not:SI (match_dup 4)))] @@ -2361,12 +2785,15 @@ (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0") (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")) (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))] - "TARGET_ARM" + "TARGET_32BIT" "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3" [(set_attr "length" "8") + (set_attr "ce_count" "2") (set_attr "predicable" "yes")] ) +; ??? Are these four splitters still beneficial when the Thumb-2 bitfield +; insns are available? (define_split [(set (match_operand:SI 0 "s_register_operand" "") (match_operator:SI 1 "logical_binary_operator" @@ -2378,7 +2805,7 @@ (match_operand:SI 6 "const_int_operand" "")) (match_operand:SI 7 "s_register_operand" "")])])) (clobber (match_operand:SI 8 "s_register_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[1]) == GET_CODE (operands[9]) && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" [(set (match_dup 8) @@ -2404,7 +2831,7 @@ (match_operand:SI 3 "const_int_operand" "") (match_operand:SI 4 "const_int_operand" ""))])) (clobber (match_operand:SI 8 "s_register_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[1]) == GET_CODE (operands[9]) && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" [(set (match_dup 8) @@ -2430,7 +2857,7 @@ (match_operand:SI 6 "const_int_operand" "")) (match_operand:SI 7 "s_register_operand" "")])])) (clobber (match_operand:SI 8 "s_register_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[1]) == GET_CODE (operands[9]) && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" [(set (match_dup 8) @@ -2456,7 +2883,7 @@ (match_operand:SI 3 "const_int_operand" "") (match_operand:SI 4 "const_int_operand" ""))])) (clobber (match_operand:SI 8 "s_register_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && GET_CODE (operands[1]) == GET_CODE (operands[9]) && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" [(set (match_dup 8) @@ -2480,7 +2907,7 @@ (smax:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "arm_rhs_operand" ""))) (clobber (reg:CC CC_REGNUM))])] - "TARGET_ARM" + "TARGET_32BIT" " if (operands[2] == const0_rtx || operands[2] == constm1_rtx) { @@ -2496,7 +2923,7 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (smax:SI (match_operand:SI 1 "s_register_operand" "r") (const_int 0)))] - "TARGET_ARM" + "TARGET_32BIT" "bic%?\\t%0, %1, %1, asr #31" [(set_attr "predicable" "yes")] ) @@ -2505,12 +2932,12 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (smax:SI (match_operand:SI 1 "s_register_operand" "r") (const_int -1)))] - "TARGET_ARM" + "TARGET_32BIT" "orr%?\\t%0, %1, %1, asr #31" [(set_attr "predicable" "yes")] ) -(define_insn "*smax_insn" +(define_insn "*arm_smax_insn" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r") (match_operand:SI 2 "arm_rhs_operand" "rI,rI"))) @@ -2529,7 +2956,7 @@ (smin:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "arm_rhs_operand" ""))) (clobber (reg:CC CC_REGNUM))])] - "TARGET_ARM" + "TARGET_32BIT" " if (operands[2] == const0_rtx) { @@ -2545,12 +2972,12 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (smin:SI (match_operand:SI 1 "s_register_operand" "r") (const_int 0)))] - "TARGET_ARM" + "TARGET_32BIT" "and%?\\t%0, %1, %1, asr #31" [(set_attr "predicable" "yes")] ) -(define_insn "*smin_insn" +(define_insn "*arm_smin_insn" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r") (match_operand:SI 2 "arm_rhs_operand" "rI,rI"))) @@ -2563,7 +2990,17 @@ (set_attr "length" "8,12")] ) -(define_insn "umaxsi3" +(define_expand "umaxsi3" + [(parallel [ + (set (match_operand:SI 0 "s_register_operand" "") + (umax:SI (match_operand:SI 1 "s_register_operand" "") + (match_operand:SI 2 "arm_rhs_operand" ""))) + (clobber (reg:CC CC_REGNUM))])] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_umaxsi3" [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) @@ -2577,7 +3014,17 @@ (set_attr "length" "8,8,12")] ) -(define_insn "uminsi3" +(define_expand "uminsi3" + [(parallel [ + (set (match_operand:SI 0 "s_register_operand" "") + (umin:SI (match_operand:SI 1 "s_register_operand" "") + (match_operand:SI 2 "arm_rhs_operand" ""))) + (clobber (reg:CC CC_REGNUM))])] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_uminsi3" [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) @@ -2597,17 +3044,22 @@ [(match_operand:SI 1 "s_register_operand" "r") (match_operand:SI 2 "s_register_operand" "r")])) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "* operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode, operands[1], operands[2]); output_asm_insn (\"cmp\\t%1, %2\", operands); + if (TARGET_THUMB2) + output_asm_insn (\"ite\t%d3\", operands); output_asm_insn (\"str%d3\\t%1, %0\", operands); output_asm_insn (\"str%D3\\t%2, %0\", operands); return \"\"; " [(set_attr "conds" "clob") - (set_attr "length" "12") + (set (attr "length") + (if_then_else (eq_attr "is_thumb" "yes") + (const_int 14) + (const_int 12))) (set_attr "type" "store1")] ) @@ -2621,22 +3073,38 @@ (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]) (match_operand:SI 1 "s_register_operand" "0,?r")])) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM && !arm_eliminable_register (operands[1])" + "TARGET_32BIT && !arm_eliminable_register (operands[1])" "* { enum rtx_code code = GET_CODE (operands[4]); + bool need_else; + + if (which_alternative != 0 || operands[3] != const0_rtx + || (code != PLUS && code != MINUS && code != IOR && code != XOR)) + need_else = true; + else + need_else = false; operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode, operands[2], operands[3]); output_asm_insn (\"cmp\\t%2, %3\", operands); + if (TARGET_THUMB2) + { + if (need_else) + output_asm_insn (\"ite\\t%d5\", operands); + else + output_asm_insn (\"it\\t%d5\", operands); + } output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands); - if (which_alternative != 0 || operands[3] != const0_rtx - || (code != PLUS && code != MINUS && code != IOR && code != XOR)) + if (need_else) output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands); return \"\"; }" [(set_attr "conds" "clob") - (set_attr "length" "12")] + (set (attr "length") + (if_then_else (eq_attr "is_thumb" "yes") + (const_int 14) + (const_int 12)))] ) @@ -2646,7 +3114,7 @@ [(set (match_operand:DI 0 "s_register_operand" "") (ashift:DI (match_operand:DI 1 "s_register_operand" "") (match_operand:SI 2 "reg_or_int_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT" " if (GET_CODE (operands[2]) == CONST_INT) { @@ -2671,7 +3139,7 @@ (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0") (const_int 1))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -2692,11 +3160,11 @@ " ) -(define_insn "*thumb_ashlsi3" +(define_insn "*thumb1_ashlsi3" [(set (match_operand:SI 0 "register_operand" "=l,l") (ashift:SI (match_operand:SI 1 "register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "lsl\\t%0, %1, %2" [(set_attr "length" "2")] ) @@ -2705,7 +3173,7 @@ [(set (match_operand:DI 0 "s_register_operand" "") (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "") (match_operand:SI 2 "reg_or_int_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT" " if (GET_CODE (operands[2]) == CONST_INT) { @@ -2730,7 +3198,7 @@ (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0") (const_int 1))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -2748,11 +3216,11 @@ " ) -(define_insn "*thumb_ashrsi3" +(define_insn "*thumb1_ashrsi3" [(set (match_operand:SI 0 "register_operand" "=l,l") (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "asr\\t%0, %1, %2" [(set_attr "length" "2")] ) @@ -2761,7 +3229,7 @@ [(set (match_operand:DI 0 "s_register_operand" "") (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "") (match_operand:SI 2 "reg_or_int_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT" " if (GET_CODE (operands[2]) == CONST_INT) { @@ -2786,7 +3254,7 @@ (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0") (const_int 1))) (clobber (reg:CC CC_REGNUM))] - "TARGET_ARM" + "TARGET_32BIT" "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx" [(set_attr "conds" "clob") (set_attr "length" "8")] @@ -2807,11 +3275,11 @@ " ) -(define_insn "*thumb_lshrsi3" +(define_insn "*thumb1_lshrsi3" [(set (match_operand:SI 0 "register_operand" "=l,l") (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "lsr\\t%0, %1, %2" [(set_attr "length" "2")] ) @@ -2820,7 +3288,7 @@ [(set (match_operand:SI 0 "s_register_operand" "") (rotatert:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "reg_or_int_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT" " if (GET_CODE (operands[2]) == CONST_INT) operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32); @@ -2839,13 +3307,13 @@ (match_operand:SI 2 "arm_rhs_operand" "")))] "TARGET_EITHER" " - if (TARGET_ARM) + if (TARGET_32BIT) { if (GET_CODE (operands[2]) == CONST_INT && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31) operands[2] = GEN_INT (INTVAL (operands[2]) % 32); } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ { if (GET_CODE (operands [2]) == CONST_INT) operands [2] = force_reg (SImode, operands[2]); @@ -2853,11 +3321,11 @@ " ) -(define_insn "*thumb_rotrsi3" +(define_insn "*thumb1_rotrsi3" [(set (match_operand:SI 0 "register_operand" "=l") (rotatert:SI (match_operand:SI 1 "register_operand" "0") (match_operand:SI 2 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "ror\\t%0, %0, %2" [(set_attr "length" "2")] ) @@ -2867,8 +3335,8 @@ (match_operator:SI 3 "shift_operator" [(match_operand:SI 1 "s_register_operand" "r") (match_operand:SI 2 "reg_or_int_operand" "rM")]))] - "TARGET_ARM" - "mov%?\\t%0, %1%S3" + "TARGET_32BIT" + "* return arm_output_shift(operands, 0);" [(set_attr "predicable" "yes") (set_attr "shift" "1") (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") @@ -2884,8 +3352,8 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r") (match_op_dup 3 [(match_dup 1) (match_dup 2)]))] - "TARGET_ARM" - "mov%?s\\t%0, %1%S3" + "TARGET_32BIT" + "* return arm_output_shift(operands, 1);" [(set_attr "conds" "set") (set_attr "shift" "1") (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") @@ -2900,13 +3368,13 @@ (match_operand:SI 2 "arm_rhs_operand" "rM")]) (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] - "TARGET_ARM" - "mov%?s\\t%0, %1%S3" + "TARGET_32BIT" + "* return arm_output_shift(operands, 1);" [(set_attr "conds" "set") (set_attr "shift" "1")] ) -(define_insn "*notsi_shiftsi" +(define_insn "*arm_notsi_shiftsi" [(set (match_operand:SI 0 "s_register_operand" "=r") (not:SI (match_operator:SI 3 "shift_operator" [(match_operand:SI 1 "s_register_operand" "r") @@ -2920,7 +3388,7 @@ (const_string "alu_shift_reg")))] ) -(define_insn "*notsi_shiftsi_compare0" +(define_insn "*arm_notsi_shiftsi_compare0" [(set (reg:CC_NOOV CC_REGNUM) (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator" [(match_operand:SI 1 "s_register_operand" "r") @@ -2929,7 +3397,7 @@ (set (match_operand:SI 0 "s_register_operand" "=r") (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))] "TARGET_ARM" - "mvn%?s\\t%0, %1%S3" + "mvn%.\\t%0, %1%S3" [(set_attr "conds" "set") (set_attr "shift" "1") (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") @@ -2937,7 +3405,7 @@ (const_string "alu_shift_reg")))] ) -(define_insn "*not_shiftsi_compare0_scratch" +(define_insn "*arm_not_shiftsi_compare0_scratch" [(set (reg:CC_NOOV CC_REGNUM) (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator" [(match_operand:SI 1 "s_register_operand" "r") @@ -2945,7 +3413,7 @@ (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] "TARGET_ARM" - "mvn%?s\\t%0, %1%S3" + "mvn%.\\t%0, %1%S3" [(set_attr "conds" "set") (set_attr "shift" "1") (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") @@ -2963,12 +3431,19 @@ (set (match_operand:SI 0 "register_operand" "") (lshiftrt:SI (match_dup 4) (match_operand:SI 3 "const_int_operand" "")))] - "TARGET_THUMB" + "TARGET_THUMB1 || arm_arch_thumb2" " { HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]); HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]); + if (arm_arch_thumb2) + { + emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2], + operands[3])); + DONE; + } + operands[3] = GEN_INT (rshift); if (lshift == 0) @@ -2982,6 +3457,28 @@ }" ) +(define_insn "extv" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "const_int_operand" "M") + (match_operand:SI 3 "const_int_operand" "M")))] + "arm_arch_thumb2" + "sbfx%?\t%0, %1, %3, %2" + [(set_attr "length" "4") + (set_attr "predicable" "yes")] +) + +(define_insn "extzv_t2" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r") + (match_operand:SI 2 "const_int_operand" "M") + (match_operand:SI 3 "const_int_operand" "M")))] + "arm_arch_thumb2" + "ubfx%?\t%0, %1, %3, %2" + [(set_attr "length" "4") + (set_attr "predicable" "yes")] +) + ;; Unary arithmetic insns @@ -2992,7 +3489,7 @@ (clobber (reg:CC CC_REGNUM))])] "TARGET_EITHER" " - if (TARGET_THUMB) + if (TARGET_THUMB1) { if (GET_CODE (operands[1]) != REG) operands[1] = force_reg (SImode, operands[1]); @@ -3012,11 +3509,11 @@ (set_attr "length" "8")] ) -(define_insn "*thumb_negdi2" +(define_insn "*thumb1_negdi2" [(set (match_operand:DI 0 "register_operand" "=&l") (neg:DI (match_operand:DI 1 "register_operand" "l"))) (clobber (reg:CC CC_REGNUM))] - "TARGET_THUMB" + "TARGET_THUMB1" "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1" [(set_attr "length" "6")] ) @@ -3031,15 +3528,15 @@ (define_insn "*arm_negsi2" [(set (match_operand:SI 0 "s_register_operand" "=r") (neg:SI (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM" + "TARGET_32BIT" "rsb%?\\t%0, %1, #0" [(set_attr "predicable" "yes")] ) -(define_insn "*thumb_negsi2" +(define_insn "*thumb1_negsi2" [(set (match_operand:SI 0 "register_operand" "=l") (neg:SI (match_operand:SI 1 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "neg\\t%0, %1" [(set_attr "length" "2")] ) @@ -3047,14 +3544,14 @@ (define_expand "negsf2" [(set (match_operand:SF 0 "s_register_operand" "") (neg:SF (match_operand:SF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "" ) (define_expand "negdf2" [(set (match_operand:DF 0 "s_register_operand" "") (neg:DF (match_operand:DF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "") ;; abssi2 doesn't really clobber the condition codes if a different register @@ -3066,12 +3563,17 @@ [(parallel [(set (match_operand:SI 0 "s_register_operand" "") (abs:SI (match_operand:SI 1 "s_register_operand" ""))) - (clobber (reg:CC CC_REGNUM))])] - "TARGET_ARM" - "") + (clobber (match_dup 2))])] + "TARGET_EITHER" + " + if (TARGET_THUMB1) + operands[2] = gen_rtx_SCRATCH (SImode); + else + operands[2] = gen_rtx_REG (CCmode, CC_REGNUM); +") (define_insn "*arm_abssi2" - [(set (match_operand:SI 0 "s_register_operand" "=r,&r") + [(set (match_operand:SI 0 "s_register_operand" "=r,&r") (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))) (clobber (reg:CC CC_REGNUM))] "TARGET_ARM" @@ -3084,7 +3586,21 @@ (set_attr "length" "8")] ) -(define_insn "*neg_abssi2" +(define_insn_and_split "*thumb1_abssi2" + [(set (match_operand:SI 0 "s_register_operand" "=l") + (abs:SI (match_operand:SI 1 "s_register_operand" "l"))) + (clobber (match_scratch:SI 2 "=&l"))] + "TARGET_THUMB1" + "#" + "TARGET_THUMB1 && reload_completed" + [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31))) + (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2))) + (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))] + "" + [(set_attr "length" "6")] +) + +(define_insn "*arm_neg_abssi2" [(set (match_operand:SI 0 "s_register_operand" "=r,&r") (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))) (clobber (reg:CC CC_REGNUM))] @@ -3098,36 +3614,50 @@ (set_attr "length" "8")] ) +(define_insn_and_split "*thumb1_neg_abssi2" + [(set (match_operand:SI 0 "s_register_operand" "=l") + (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l")))) + (clobber (match_scratch:SI 2 "=&l"))] + "TARGET_THUMB1" + "#" + "TARGET_THUMB1 && reload_completed" + [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31))) + (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1))) + (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))] + "" + [(set_attr "length" "6")] +) + (define_expand "abssf2" [(set (match_operand:SF 0 "s_register_operand" "") (abs:SF (match_operand:SF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" "") (define_expand "absdf2" [(set (match_operand:DF 0 "s_register_operand" "") (abs:DF (match_operand:DF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" "") (define_expand "sqrtsf2" [(set (match_operand:SF 0 "s_register_operand" "") (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "") (define_expand "sqrtdf2" [(set (match_operand:DF 0 "s_register_operand" "") (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "") (define_insn_and_split "one_cmpldi2" [(set (match_operand:DI 0 "s_register_operand" "=&r,&r") (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))] - "TARGET_ARM" + "TARGET_32BIT" "#" - "TARGET_ARM && reload_completed" + "TARGET_32BIT && reload_completed" [(set (match_dup 0) (not:SI (match_dup 1))) (set (match_dup 2) (not:SI (match_dup 3)))] " @@ -3151,15 +3681,15 @@ (define_insn "*arm_one_cmplsi2" [(set (match_operand:SI 0 "s_register_operand" "=r") (not:SI (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM" + "TARGET_32BIT" "mvn%?\\t%0, %1" [(set_attr "predicable" "yes")] ) -(define_insn "*thumb_one_cmplsi2" +(define_insn "*thumb1_one_cmplsi2" [(set (match_operand:SI 0 "register_operand" "=l") (not:SI (match_operand:SI 1 "register_operand" "l")))] - "TARGET_THUMB" + "TARGET_THUMB1" "mvn\\t%0, %1" [(set_attr "length" "2")] ) @@ -3170,8 +3700,8 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r") (not:SI (match_dup 1)))] - "TARGET_ARM" - "mvn%?s\\t%0, %1" + "TARGET_32BIT" + "mvn%.\\t%0, %1" [(set_attr "conds" "set")] ) @@ -3180,8 +3710,8 @@ (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r")) (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] - "TARGET_ARM" - "mvn%?s\\t%0, %1" + "TARGET_32BIT" + "mvn%.\\t%0, %1" [(set_attr "conds" "set")] ) @@ -3190,7 +3720,7 @@ (define_expand "floatsisf2" [(set (match_operand:SF 0 "s_register_operand" "") (float:SF (match_operand:SI 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -3202,7 +3732,7 @@ (define_expand "floatsidf2" [(set (match_operand:DF 0 "s_register_operand" "") (float:DF (match_operand:SI 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -3214,7 +3744,7 @@ (define_expand "fix_truncsfsi2" [(set (match_operand:SI 0 "s_register_operand" "") (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -3230,7 +3760,7 @@ (define_expand "fix_truncdfsi2" [(set (match_operand:SI 0 "s_register_operand" "") (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" " if (TARGET_MAVERICK) { @@ -3247,13 +3777,20 @@ [(set (match_operand:SF 0 "s_register_operand" "") (float_truncate:SF (match_operand:DF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" "" ) ;; Zero and sign extension instructions. -(define_insn "zero_extendsidi2" +(define_expand "zero_extendsidi2" + [(set (match_operand:DI 0 "s_register_operand" "") + (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_zero_extendsidi2" [(set (match_operand:DI 0 "s_register_operand" "=r") (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))] "TARGET_ARM" @@ -3267,13 +3804,20 @@ (set_attr "predicable" "yes")] ) -(define_insn "zero_extendqidi2" +(define_expand "zero_extendqidi2" + [(set (match_operand:DI 0 "s_register_operand" "") + (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_zero_extendqidi2" [(set (match_operand:DI 0 "s_register_operand" "=r,r") (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))] "TARGET_ARM" "@ and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0 - ldr%?b\\t%Q0, %1\;mov%?\\t%R0, #0" + ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0" [(set_attr "length" "8") (set_attr "predicable" "yes") (set_attr "type" "*,load_byte") @@ -3281,7 +3825,14 @@ (set_attr "neg_pool_range" "*,4084")] ) -(define_insn "extendsidi2" +(define_expand "extendsidi2" + [(set (match_operand:DI 0 "s_register_operand" "") + (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))] + "TARGET_32BIT" + "" +) + +(define_insn "*arm_extendsidi2" [(set (match_operand:DI 0 "s_register_operand" "=r") (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))] "TARGET_ARM" @@ -3305,7 +3856,7 @@ "TARGET_EITHER" " { - if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM) + if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM) { emit_insn (gen_rtx_SET (VOIDmode, operands[0], gen_rtx_ZERO_EXTEND (SImode, operands[1]))); @@ -3333,10 +3884,10 @@ }" ) -(define_insn "*thumb_zero_extendhisi2" +(define_insn "*thumb1_zero_extendhisi2" [(set (match_operand:SI 0 "register_operand" "=l") (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))] - "TARGET_THUMB && !arm_arch6" + "TARGET_THUMB1 && !arm_arch6" "* rtx mem = XEXP (operands[1], 0); @@ -3375,10 +3926,10 @@ (set_attr "pool_range" "60")] ) -(define_insn "*thumb_zero_extendhisi2_v6" +(define_insn "*thumb1_zero_extendhisi2_v6" [(set (match_operand:SI 0 "register_operand" "=l,l") (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))] - "TARGET_THUMB && arm_arch6" + "TARGET_THUMB1 && arm_arch6" "* rtx mem; @@ -3426,7 +3977,7 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))] "TARGET_ARM && arm_arch4 && !arm_arch6" - "ldr%?h\\t%0, %1" + "ldr%(h%)\\t%0, %1" [(set_attr "type" "load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "256") @@ -3439,7 +3990,7 @@ "TARGET_ARM && arm_arch6" "@ uxth%?\\t%0, %1 - ldr%?h\\t%0, %1" + ldr%(h%)\\t%0, %1" [(set_attr "type" "alu_shift,load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "*,256") @@ -3450,7 +4001,7 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r")) (match_operand:SI 2 "s_register_operand" "r")))] - "TARGET_ARM && arm_arch6" + "TARGET_INT_SIMD" "uxtah%?\\t%0, %2, %1" [(set_attr "type" "alu_shift") (set_attr "predicable" "yes")] @@ -3496,20 +4047,20 @@ " ) -(define_insn "*thumb_zero_extendqisi2" +(define_insn "*thumb1_zero_extendqisi2" [(set (match_operand:SI 0 "register_operand" "=l") (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))] - "TARGET_THUMB && !arm_arch6" + "TARGET_THUMB1 && !arm_arch6" "ldrb\\t%0, %1" [(set_attr "length" "2") (set_attr "type" "load_byte") (set_attr "pool_range" "32")] ) -(define_insn "*thumb_zero_extendqisi2_v6" +(define_insn "*thumb1_zero_extendqisi2_v6" [(set (match_operand:SI 0 "register_operand" "=l,l") (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))] - "TARGET_THUMB && arm_arch6" + "TARGET_THUMB1 && arm_arch6" "@ uxtb\\t%0, %1 ldrb\\t%0, %1" @@ -3522,7 +4073,7 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))] "TARGET_ARM && !arm_arch6" - "ldr%?b\\t%0, %1\\t%@ zero_extendqisi2" + "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2" [(set_attr "type" "load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "4096") @@ -3534,8 +4085,8 @@ (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))] "TARGET_ARM && arm_arch6" "@ - uxtb%?\\t%0, %1 - ldr%?b\\t%0, %1\\t%@ zero_extendqisi2" + uxtb%(%)\\t%0, %1 + ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2" [(set_attr "type" "alu_shift,load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "*,4096") @@ -3546,9 +4097,10 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r")) (match_operand:SI 2 "s_register_operand" "r")))] - "TARGET_ARM && arm_arch6" + "TARGET_INT_SIMD" "uxtab%?\\t%0, %2, %1" [(set_attr "predicable" "yes") + (set_attr "insn" "xtab") (set_attr "type" "alu_shift")] ) @@ -3556,7 +4108,7 @@ [(set (match_operand:SI 0 "s_register_operand" "") (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0))) (clobber (match_operand:SI 2 "s_register_operand" ""))] - "TARGET_ARM && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN" + "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN" [(set (match_dup 2) (match_dup 1)) (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))] "" @@ -3566,7 +4118,7 @@ [(set (match_operand:SI 0 "s_register_operand" "") (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3))) (clobber (match_operand:SI 2 "s_register_operand" ""))] - "TARGET_ARM && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN" + "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN" [(set (match_dup 2) (match_dup 1)) (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))] "" @@ -3576,7 +4128,7 @@ [(set (reg:CC_Z CC_REGNUM) (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r") (const_int 0)))] - "TARGET_ARM" + "TARGET_32BIT" "tst\\t%0, #255" [(set_attr "conds" "set")] ) @@ -3593,9 +4145,9 @@ { if (GET_CODE (operands[1]) == MEM) { - if (TARGET_THUMB) + if (TARGET_THUMB1) { - emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1])); + emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1])); DONE; } else if (arm_arch4) @@ -3617,8 +4169,8 @@ if (arm_arch6) { - if (TARGET_THUMB) - emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1])); + if (TARGET_THUMB1) + emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1])); else emit_insn (gen_rtx_SET (VOIDmode, operands[0], gen_rtx_SIGN_EXTEND (SImode, operands[1]))); @@ -3631,11 +4183,11 @@ }" ) -(define_insn "thumb_extendhisi2" +(define_insn "thumb1_extendhisi2" [(set (match_operand:SI 0 "register_operand" "=l") (sign_extend:SI (match_operand:HI 1 "memory_operand" "m"))) (clobber (match_scratch:SI 2 "=&l"))] - "TARGET_THUMB && !arm_arch6" + "TARGET_THUMB1 && !arm_arch6" "* { rtx ops[4]; @@ -3692,11 +4244,11 @@ ;; we try to verify the operands. Fortunately, we don't really need ;; the early-clobber: we can always use operand 0 if operand 2 ;; overlaps the address. -(define_insn "*thumb_extendhisi2_insn_v6" +(define_insn "*thumb1_extendhisi2_insn_v6" [(set (match_operand:SI 0 "register_operand" "=l,l") (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m"))) (clobber (match_scratch:SI 2 "=X,l"))] - "TARGET_THUMB && arm_arch6" + "TARGET_THUMB1 && arm_arch6" "* { rtx ops[4]; @@ -3754,6 +4306,7 @@ (set_attr "pool_range" "*,1020")] ) +;; This pattern will only be used when ldsh is not available (define_expand "extendhisi2_mem" [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" ""))) (set (match_dup 3) @@ -3793,20 +4346,21 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))] "TARGET_ARM && arm_arch4 && !arm_arch6" - "ldr%?sh\\t%0, %1" + "ldr%(sh%)\\t%0, %1" [(set_attr "type" "load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "256") (set_attr "neg_pool_range" "244")] ) +;; ??? Check Thumb-2 pool range (define_insn "*arm_extendhisi2_v6" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))] - "TARGET_ARM && arm_arch6" + "TARGET_32BIT && arm_arch6" "@ sxth%?\\t%0, %1 - ldr%?sh\\t%0, %1" + ldr%(sh%)\\t%0, %1" [(set_attr "type" "alu_shift,load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "*,256") @@ -3817,13 +4371,13 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r")) (match_operand:SI 2 "s_register_operand" "r")))] - "TARGET_ARM && arm_arch6" + "TARGET_INT_SIMD" "sxtah%?\\t%0, %2, %1" ) (define_expand "extendqihi2" [(set (match_dup 2) - (ashift:SI (match_operand:QI 1 "general_operand" "") + (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "") (const_int 24))) (set (match_operand:HI 0 "s_register_operand" "") (ashiftrt:SI (match_dup 2) @@ -3846,11 +4400,11 @@ }" ) -(define_insn "*extendqihi_insn" +(define_insn "*arm_extendqihi_insn" [(set (match_operand:HI 0 "s_register_operand" "=r") - (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))] + (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))] "TARGET_ARM && arm_arch4" - "ldr%?sb\\t%0, %1" + "ldr%(sb%)\\t%0, %1" [(set_attr "type" "load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "256") @@ -3859,7 +4413,7 @@ (define_expand "extendqisi2" [(set (match_dup 2) - (ashift:SI (match_operand:QI 1 "general_operand" "") + (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "") (const_int 24))) (set (match_operand:SI 0 "s_register_operand" "") (ashiftrt:SI (match_dup 2) @@ -3891,9 +4445,9 @@ (define_insn "*arm_extendqisi" [(set (match_operand:SI 0 "s_register_operand" "=r") - (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))] + (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))] "TARGET_ARM && arm_arch4 && !arm_arch6" - "ldr%?sb\\t%0, %1" + "ldr%(sb%)\\t%0, %1" [(set_attr "type" "load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "256") @@ -3902,11 +4456,12 @@ (define_insn "*arm_extendqisi_v6" [(set (match_operand:SI 0 "s_register_operand" "=r,r") - (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))] + (sign_extend:SI + (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))] "TARGET_ARM && arm_arch6" "@ sxtb%?\\t%0, %1 - ldr%?sb\\t%0, %1" + ldr%(sb%)\\t%0, %1" [(set_attr "type" "alu_shift,load_byte") (set_attr "predicable" "yes") (set_attr "pool_range" "*,256") @@ -3917,16 +4472,17 @@ [(set (match_operand:SI 0 "s_register_operand" "=r") (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r")) (match_operand:SI 2 "s_register_operand" "r")))] - "TARGET_ARM && arm_arch6" + "TARGET_INT_SIMD" "sxtab%?\\t%0, %2, %1" [(set_attr "type" "alu_shift") + (set_attr "insn" "xtab") (set_attr "predicable" "yes")] ) -(define_insn "*thumb_extendqisi2" +(define_insn "*thumb1_extendqisi2" [(set (match_operand:SI 0 "register_operand" "=l,l") (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))] - "TARGET_THUMB && !arm_arch6" + "TARGET_THUMB1 && !arm_arch6" "* { rtx ops[3]; @@ -4001,10 +4557,10 @@ (set_attr "pool_range" "32,32")] ) -(define_insn "*thumb_extendqisi2_v6" +(define_insn "*thumb1_extendqisi2_v6" [(set (match_operand:SI 0 "register_operand" "=l,l,l") (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))] - "TARGET_THUMB && arm_arch6" + "TARGET_THUMB1 && arm_arch6" "* { rtx ops[3]; @@ -4084,7 +4640,7 @@ (define_expand "extendsfdf2" [(set (match_operand:DF 0 "s_register_operand" "") (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT" + "TARGET_32BIT && TARGET_HARD_FLOAT" "" ) @@ -4154,7 +4710,7 @@ (match_operand:DI 1 "general_operand" ""))] "TARGET_EITHER" " - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (DImode, operands[1]); @@ -4190,7 +4746,7 @@ (define_split [(set (match_operand:ANY64 0 "arm_general_register_operand" "") (match_operand:ANY64 1 "const_double_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && reload_completed && (arm_const_double_inline_cost (operands[1]) <= ((optimize_size || arm_ld_sched) ? 3 : 4))" @@ -4282,10 +4838,10 @@ ;;; ??? This was originally identical to the movdf_insn pattern. ;;; ??? The 'i' constraint looks funny, but it should always be replaced by ;;; thumb_reorg with a memory reference. -(define_insn "*thumb_movdi_insn" +(define_insn "*thumb1_movdi_insn" [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r") (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))] - "TARGET_THUMB + "TARGET_THUMB1 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && ( register_operand (operands[0], DImode) || register_operand (operands[1], DImode))" @@ -4330,7 +4886,10 @@ (match_operand:SI 1 "general_operand" ""))] "TARGET_EITHER" " - if (TARGET_ARM) + { + rtx base, offset, tmp; + + if (TARGET_32BIT) { /* Everything except mem = const or mem = mem can be done easily. */ if (GET_CODE (operands[0]) == MEM) @@ -4342,19 +4901,40 @@ { arm_split_constant (SET, SImode, NULL_RTX, INTVAL (operands[1]), operands[0], NULL_RTX, - optimize && !no_new_pseudos); + optimize && can_create_pseudo_p ()); DONE; } + + if (TARGET_USE_MOVT && !target_word_relocations + && GET_CODE (operands[1]) == SYMBOL_REF + && !flag_pic && !arm_tls_referenced_p (operands[1])) + { + arm_emit_movpair (operands[0], operands[1]); + DONE; + } } - else /* TARGET_THUMB.... */ + else /* TARGET_THUMB1... */ { - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (SImode, operands[1]); } } + if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P) + { + split_const (operands[1], &base, &offset); + if (GET_CODE (base) == SYMBOL_REF + && !offset_within_block_p (base, INTVAL (offset))) + { + tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0]; + emit_move_insn (tmp, base); + emit_insn (gen_addsi3 (operands[0], tmp, offset)); + DONE; + } + } + /* Recognize the case where operand[1] is a reference to thread-local data and load its address to a register. */ if (arm_tls_referenced_p (operands[1])) @@ -4371,7 +4951,8 @@ gcc_assert (GET_CODE (tmp) == SYMBOL_REF); gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0); - tmp = legitimize_tls_address (tmp, no_new_pseudos ? operands[0] : 0); + tmp = legitimize_tls_address (tmp, + !can_create_pseudo_p () ? operands[0] : 0); if (addend) { tmp = gen_rtx_PLUS (SImode, tmp, addend); @@ -4384,32 +4965,59 @@ || symbol_mentioned_p (operands[1]) || label_mentioned_p (operands[1]))) operands[1] = legitimize_pic_address (operands[1], SImode, - (no_new_pseudos ? operands[0] : 0)); + (!can_create_pseudo_p () + ? operands[0] + : 0)); + } " ) +;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and +;; LO_SUM adds in the high bits. Fortunately these are opaque operations +;; so this does not matter. +(define_insn "*arm_movt" + [(set (match_operand:SI 0 "nonimmediate_operand" "=r") + (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0") + (match_operand:SI 2 "general_operand" "i")))] + "TARGET_32BIT" + "movt%?\t%0, #:upper16:%c2" + [(set_attr "predicable" "yes") + (set_attr "length" "4")] +) + +(define_insn "*arm_movw" + [(set (match_operand:SI 0 "nonimmediate_operand" "=r") + (high:SI (match_operand:SI 1 "general_operand" "i")))] + "TARGET_32BIT" + "movw%?\t%0, #:lower16:%c1" + [(set_attr "predicable" "yes") + (set_attr "length" "4")] +) + (define_insn "*arm_movsi_insn" - [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r, m") - (match_operand:SI 1 "general_operand" "rI,K,mi,r"))] + [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m") + (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))] "TARGET_ARM && ! TARGET_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_VFP) && ( register_operand (operands[0], SImode) || register_operand (operands[1], SImode))" "@ mov%?\\t%0, %1 + mov%?\\t%0, %1 mvn%?\\t%0, #%B1 + movw%?\\t%0, %1 ldr%?\\t%0, %1 str%?\\t%1, %0" - [(set_attr "type" "*,*,load1,store1") + [(set_attr "type" "*,*,*,*,load1,store1") (set_attr "predicable" "yes") - (set_attr "pool_range" "*,*,4096,*") - (set_attr "neg_pool_range" "*,*,4084,*")] + (set_attr "pool_range" "*,*,*,*,4096,*") + (set_attr "neg_pool_range" "*,*,*,*,4084,*")] ) (define_split [(set (match_operand:SI 0 "arm_general_register_operand" "") (match_operand:SI 1 "const_int_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && (!(const_ok_for_arm (INTVAL (operands[1])) || const_ok_for_arm (~INTVAL (operands[1]))))" [(clobber (const_int 0))] @@ -4420,10 +5028,10 @@ " ) -(define_insn "*thumb_movsi_insn" - [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh") - (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))] - "TARGET_THUMB +(define_insn "*thumb1_movsi_insn" + [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk") + (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))] + "TARGET_THUMB1 && ( register_operand (operands[0], SImode) || register_operand (operands[1], SImode))" "@ @@ -4444,7 +5052,7 @@ (define_split [(set (match_operand:SI 0 "register_operand" "") (match_operand:SI 1 "const_int_operand" ""))] - "TARGET_THUMB && satisfies_constraint_J (operands[1])" + "TARGET_THUMB1 && satisfies_constraint_J (operands[1])" [(set (match_dup 0) (match_dup 1)) (set (match_dup 0) (neg:SI (match_dup 0)))] "operands[1] = GEN_INT (- INTVAL (operands[1]));" @@ -4453,7 +5061,7 @@ (define_split [(set (match_operand:SI 0 "register_operand" "") (match_operand:SI 1 "const_int_operand" ""))] - "TARGET_THUMB && satisfies_constraint_K (operands[1])" + "TARGET_THUMB1 && satisfies_constraint_K (operands[1])" [(set (match_dup 0) (match_dup 1)) (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))] " @@ -4494,55 +5102,22 @@ (set (attr "neg_pool_range") (const_int 4084))] ) -(define_insn "pic_load_addr_thumb" +(define_insn "pic_load_addr_thumb1" [(set (match_operand:SI 0 "s_register_operand" "=l") (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))] - "TARGET_THUMB && flag_pic" + "TARGET_THUMB1 && flag_pic" "ldr\\t%0, %1" [(set_attr "type" "load1") (set (attr "pool_range") (const_int 1024))] ) -;; This variant is used for AOF assembly, since it needs to mention the -;; pic register in the rtl. -(define_expand "pic_load_addr_based" - [(set (match_operand:SI 0 "s_register_operand" "") - (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))] - "TARGET_ARM && flag_pic" - "operands[2] = cfun->machine->pic_reg;" -) - -(define_insn "*pic_load_addr_based_insn" - [(set (match_operand:SI 0 "s_register_operand" "=r") - (unspec:SI [(match_operand 1 "" "") - (match_operand 2 "s_register_operand" "r")] - UNSPEC_PIC_SYM))] - "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg" - "* -#ifdef AOF_ASSEMBLER - operands[1] = aof_pic_entry (operands[1]); -#endif - output_asm_insn (\"ldr%?\\t%0, %a1\", operands); - return \"\"; - " - [(set_attr "type" "load1") - (set (attr "pool_range") - (if_then_else (eq_attr "is_thumb" "yes") - (const_int 1024) - (const_int 4096))) - (set (attr "neg_pool_range") - (if_then_else (eq_attr "is_thumb" "yes") - (const_int 0) - (const_int 4084)))] -) - (define_insn "pic_add_dot_plus_four" [(set (match_operand:SI 0 "register_operand" "=r") - (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0") - (const (plus:SI (pc) (const_int 4))))] - UNSPEC_PIC_BASE)) - (use (match_operand 2 "" ""))] - "TARGET_THUMB" + (unspec:SI [(match_operand:SI 1 "register_operand" "0") + (const_int 4) + (match_operand 2 "" "")] + UNSPEC_PIC_BASE))] + "TARGET_THUMB1" "* (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", INTVAL (operands[2])); @@ -4553,10 +5128,10 @@ (define_insn "pic_add_dot_plus_eight" [(set (match_operand:SI 0 "register_operand" "=r") - (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r") - (const (plus:SI (pc) (const_int 8))))] - UNSPEC_PIC_BASE)) - (use (match_operand 2 "" ""))] + (unspec:SI [(match_operand:SI 1 "register_operand" "r") + (const_int 8) + (match_operand 2 "" "")] + UNSPEC_PIC_BASE))] "TARGET_ARM" "* (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", @@ -4568,10 +5143,10 @@ (define_insn "tls_load_dot_plus_eight" [(set (match_operand:SI 0 "register_operand" "+r") - (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r") - (const (plus:SI (pc) (const_int 8))))] - UNSPEC_PIC_BASE))) - (use (match_operand 2 "" ""))] + (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r") + (const_int 8) + (match_operand 2 "" "")] + UNSPEC_PIC_BASE)))] "TARGET_ARM" "* (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", @@ -4586,21 +5161,31 @@ ;; tls_load_dot_plus_eight by a peephole. (define_peephole2 - [(parallel [(set (match_operand:SI 0 "register_operand" "") - (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "") - (const (plus:SI (pc) (const_int 8))))] - UNSPEC_PIC_BASE)) - (use (label_ref (match_operand 1 "" "")))]) + [(set (match_operand:SI 0 "register_operand" "") + (unspec:SI [(match_operand:SI 3 "register_operand" "") + (const_int 8) + (match_operand 1 "" "")] + UNSPEC_PIC_BASE)) (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))] "TARGET_ARM && peep2_reg_dead_p (2, operands[0])" - [(parallel [(set (match_dup 2) - (mem:SI (unspec:SI [(plus:SI (match_dup 3) - (const (plus:SI (pc) (const_int 8))))] - UNSPEC_PIC_BASE))) - (use (label_ref (match_dup 1)))])] + [(set (match_dup 2) + (mem:SI (unspec:SI [(match_dup 3) + (const_int 8) + (match_dup 1)] + UNSPEC_PIC_BASE)))] "" ) +(define_insn "pic_offset_arm" + [(set (match_operand:SI 0 "register_operand" "=r") + (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r") + (unspec:SI [(match_operand:SI 2 "" "X")] + UNSPEC_PIC_OFFSET))))] + "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic" + "ldr%?\\t%0, [%1,%2]" + [(set_attr "type" "load1")] +) + (define_expand "builtin_setjmp_receiver" [(label_ref (match_operand 0 "" ""))] "flag_pic" @@ -4623,10 +5208,10 @@ (const_int 0))) (set (match_operand:SI 0 "s_register_operand" "=r,r") (match_dup 1))] - "TARGET_ARM" + "TARGET_32BIT" "@ cmp%?\\t%0, #0 - sub%?s\\t%0, %1, #0" + sub%.\\t%0, %1, #0" [(set_attr "conds" "set")] ) @@ -4742,7 +5327,7 @@ (define_expand "storehi_single_op" [(set (match_operand:HI 0 "memory_operand" "") (match_operand:HI 1 "general_operand" ""))] - "TARGET_ARM && arm_arch4" + "TARGET_32BIT && arm_arch4" " if (!s_register_operand (operands[1], HImode)) operands[1] = copy_to_mode_reg (HImode, operands[1]); @@ -4756,7 +5341,7 @@ " if (TARGET_ARM) { - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[0]) == MEM) { @@ -4800,7 +5385,7 @@ emit_insn (gen_movsi (reg, GEN_INT (val))); operands[1] = gen_lowpart (HImode, reg); } - else if (arm_arch4 && optimize && !no_new_pseudos + else if (arm_arch4 && optimize && can_create_pseudo_p () && GET_CODE (operands[1]) == MEM) { rtx reg = gen_reg_rtx (SImode); @@ -4824,12 +5409,12 @@ && GET_CODE (base = XEXP (base, 0)) == REG)) && REGNO_POINTER_ALIGN (REGNO (base)) >= 32) { - rtx new; + rtx new_rtx; - new = widen_memory_access (operands[1], SImode, - ((INTVAL (offset) & ~3) - - INTVAL (offset))); - emit_insn (gen_movsi (reg, new)); + new_rtx = widen_memory_access (operands[1], SImode, + ((INTVAL (offset) & ~3) + - INTVAL (offset))); + emit_insn (gen_movsi (reg, new_rtx)); if (((INTVAL (offset) & 2) != 0) ^ (BYTES_BIG_ENDIAN ? 1 : 0)) { @@ -4860,9 +5445,27 @@ DONE; } } - else /* TARGET_THUMB */ + else if (TARGET_THUMB2) + { + /* Thumb-2 can do everything except mem=mem and mem=const easily. */ + if (can_create_pseudo_p ()) + { + if (GET_CODE (operands[0]) != REG) + operands[1] = force_reg (HImode, operands[1]); + /* Zero extend a constant, and keep it in an SImode reg. */ + else if (GET_CODE (operands[1]) == CONST_INT) + { + rtx reg = gen_reg_rtx (SImode); + HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff; + + emit_insn (gen_movsi (reg, GEN_INT (val))); + operands[1] = gen_lowpart (HImode, reg); + } + } + } + else /* TARGET_THUMB1 */ { - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[1]) == CONST_INT) { @@ -4874,8 +5477,8 @@ /* ??? We shouldn't really get invalid addresses here, but this can happen if we are passed a SP (never OK for HImode/QImode) or - virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for - HImode/QImode) relative address. */ + virtual register (also rejected as illegitimate for HImode/QImode) + relative address. */ /* ??? This should perhaps be fixed elsewhere, for instance, in fixup_stack_1, by checking for other kinds of invalid addresses, e.g. a bare reference to a virtual register. This may confuse the @@ -4922,10 +5525,10 @@ " ) -(define_insn "*thumb_movhi_insn" +(define_insn "*thumb1_movhi_insn" [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l") (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))] - "TARGET_THUMB + "TARGET_THUMB1 && ( register_operand (operands[0], HImode) || register_operand (operands[1], HImode))" "* @@ -5021,8 +5624,8 @@ "@ mov%?\\t%0, %1\\t%@ movhi mvn%?\\t%0, #%B1\\t%@ movhi - str%?h\\t%1, %0\\t%@ movhi - ldr%?h\\t%0, %1\\t%@ movhi" + str%(h%)\\t%1, %0\\t%@ movhi + ldr%(h%)\\t%0, %1\\t%@ movhi" [(set_attr "type" "*,*,store1,load1") (set_attr "predicable" "yes") (set_attr "pool_range" "*,*,*,256") @@ -5043,7 +5646,7 @@ [(set (match_operand:HI 0 "memory_operand" "") (match_operand:HI 1 "register_operand" "")) (clobber (match_operand:DI 2 "register_operand" ""))] - "TARGET_THUMB" + "TARGET_THUMB1" " if (strict_memory_address_p (HImode, XEXP (operands[0], 0)) && REGNO (operands[1]) <= LAST_LO_REGNUM) @@ -5092,7 +5695,7 @@ " /* Everything except mem = const or mem = mem can be done easily */ - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[1]) == CONST_INT) { @@ -5106,8 +5709,8 @@ { /* ??? We shouldn't really get invalid addresses here, but this can happen if we are passed a SP (never OK for HImode/QImode) or - virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for - HImode/QImode) relative address. */ + virtual register (also rejected as illegitimate for HImode/QImode) + relative address. */ /* ??? This should perhaps be fixed elsewhere, for instance, in fixup_stack_1, by checking for other kinds of invalid addresses, e.g. a bare reference to a virtual register. This may confuse the @@ -5158,22 +5761,22 @@ (define_insn "*arm_movqi_insn" [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m") (match_operand:QI 1 "general_operand" "rI,K,m,r"))] - "TARGET_ARM + "TARGET_32BIT && ( register_operand (operands[0], QImode) || register_operand (operands[1], QImode))" "@ mov%?\\t%0, %1 mvn%?\\t%0, #%B1 - ldr%?b\\t%0, %1 - str%?b\\t%1, %0" + ldr%(b%)\\t%0, %1 + str%(b%)\\t%1, %0" [(set_attr "type" "*,*,load1,store1") (set_attr "predicable" "yes")] ) -(define_insn "*thumb_movqi_insn" +(define_insn "*thumb1_movqi_insn" [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l") (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))] - "TARGET_THUMB + "TARGET_THUMB1 && ( register_operand (operands[0], QImode) || register_operand (operands[1], QImode))" "@ @@ -5193,14 +5796,14 @@ (match_operand:SF 1 "general_operand" ""))] "TARGET_EITHER" " - if (TARGET_ARM) + if (TARGET_32BIT) { if (GET_CODE (operands[0]) == MEM) operands[1] = force_reg (SFmode, operands[1]); } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ { - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (SFmode, operands[1]); @@ -5214,7 +5817,7 @@ (define_split [(set (match_operand:SF 0 "arm_general_register_operand" "") (match_operand:SF 1 "immediate_operand" ""))] - "TARGET_ARM + "TARGET_32BIT && reload_completed && GET_CODE (operands[1]) == CONST_DOUBLE" [(set (match_dup 2) (match_dup 3))] @@ -5245,10 +5848,10 @@ ) ;;; ??? This should have alternatives for constants. -(define_insn "*thumb_movsf_insn" +(define_insn "*thumb1_movsf_insn" [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h") (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))] - "TARGET_THUMB + "TARGET_THUMB1 && ( register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode))" "@ @@ -5269,14 +5872,14 @@ (match_operand:DF 1 "general_operand" ""))] "TARGET_EITHER" " - if (TARGET_ARM) + if (TARGET_32BIT) { if (GET_CODE (operands[0]) == MEM) operands[1] = force_reg (DFmode, operands[1]); } else /* TARGET_THUMB */ { - if (!no_new_pseudos) + if (can_create_pseudo_p ()) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (DFmode, operands[1]); @@ -5291,7 +5894,7 @@ [(match_operand:DF 0 "arm_reload_memory_operand" "=o") (match_operand:DF 1 "s_register_operand" "r") (match_operand:SI 2 "s_register_operand" "=&r")] - "TARGET_ARM" + "TARGET_32BIT" " { enum rtx_code code = GET_CODE (XEXP (operands[0], 0)); @@ -5359,7 +5962,7 @@ (define_insn "*thumb_movdf_insn" [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r") (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))] - "TARGET_THUMB + "TARGET_THUMB1 && ( register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode))" "* @@ -5395,34 +5998,13 @@ (define_expand "movxf" [(set (match_operand:XF 0 "general_operand" "") (match_operand:XF 1 "general_operand" ""))] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA" + "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA" " if (GET_CODE (operands[0]) == MEM) operands[1] = force_reg (XFmode, operands[1]); " ) -;; Vector Moves -(define_expand "movv2si" - [(set (match_operand:V2SI 0 "nonimmediate_operand" "") - (match_operand:V2SI 1 "general_operand" ""))] - "TARGET_REALLY_IWMMXT" -{ -}) - -(define_expand "movv4hi" - [(set (match_operand:V4HI 0 "nonimmediate_operand" "") - (match_operand:V4HI 1 "general_operand" ""))] - "TARGET_REALLY_IWMMXT" -{ -}) - -(define_expand "movv8qi" - [(set (match_operand:V8QI 0 "nonimmediate_operand" "") - (match_operand:V8QI 1 "general_operand" ""))] - "TARGET_REALLY_IWMMXT" -{ -}) ;; load- and store-multiple insns @@ -5433,7 +6015,7 @@ [(match_par_dup 3 [(set (match_operand:SI 0 "" "") (match_operand:SI 1 "" "")) (use (match_operand:SI 2 "" ""))])] - "TARGET_ARM" + "TARGET_32BIT" { HOST_WIDE_INT offset = 0; @@ -5468,13 +6050,13 @@ (mem:SI (plus:SI (match_dup 2) (const_int 8)))) (set (match_operand:SI 6 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 2) (const_int 12))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 5" - "ldm%?ia\\t%1!, {%3, %4, %5, %6}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 5" + "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}" [(set_attr "type" "load4") (set_attr "predicable" "yes")] ) -(define_insn "*ldmsi_postinc4_thumb" +(define_insn "*ldmsi_postinc4_thumb1" [(match_parallel 0 "load_multiple_operation" [(set (match_operand:SI 1 "s_register_operand" "=l") (plus:SI (match_operand:SI 2 "s_register_operand" "1") @@ -5487,7 +6069,7 @@ (mem:SI (plus:SI (match_dup 2) (const_int 8)))) (set (match_operand:SI 6 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 2) (const_int 12))))])] - "TARGET_THUMB && XVECLEN (operands[0], 0) == 5" + "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5" "ldmia\\t%1!, {%3, %4, %5, %6}" [(set_attr "type" "load4")] ) @@ -5503,8 +6085,8 @@ (mem:SI (plus:SI (match_dup 2) (const_int 4)))) (set (match_operand:SI 5 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 2) (const_int 8))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 4" - "ldm%?ia\\t%1!, {%3, %4, %5}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 4" + "ldm%(ia%)\\t%1!, {%3, %4, %5}" [(set_attr "type" "load3") (set_attr "predicable" "yes")] ) @@ -5518,8 +6100,8 @@ (mem:SI (match_dup 2))) (set (match_operand:SI 4 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 2) (const_int 4))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 3" - "ldm%?ia\\t%1!, {%3, %4}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 3" + "ldm%(ia%)\\t%1!, {%3, %4}" [(set_attr "type" "load2") (set_attr "predicable" "yes")] ) @@ -5536,8 +6118,8 @@ (mem:SI (plus:SI (match_dup 1) (const_int 8)))) (set (match_operand:SI 5 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 1) (const_int 12))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 4" - "ldm%?ia\\t%1, {%2, %3, %4, %5}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 4" + "ldm%(ia%)\\t%1, {%2, %3, %4, %5}" [(set_attr "type" "load4") (set_attr "predicable" "yes")] ) @@ -5550,8 +6132,8 @@ (mem:SI (plus:SI (match_dup 1) (const_int 4)))) (set (match_operand:SI 4 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 1) (const_int 8))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 3" - "ldm%?ia\\t%1, {%2, %3, %4}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 3" + "ldm%(ia%)\\t%1, {%2, %3, %4}" [(set_attr "type" "load3") (set_attr "predicable" "yes")] ) @@ -5562,8 +6144,8 @@ (mem:SI (match_operand:SI 1 "s_register_operand" "r"))) (set (match_operand:SI 3 "arm_hard_register_operand" "") (mem:SI (plus:SI (match_dup 1) (const_int 4))))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 2" - "ldm%?ia\\t%1, {%2, %3}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 2" + "ldm%(ia%)\\t%1, {%2, %3}" [(set_attr "type" "load2") (set_attr "predicable" "yes")] ) @@ -5572,7 +6154,7 @@ [(match_par_dup 3 [(set (match_operand:SI 0 "" "") (match_operand:SI 1 "" "")) (use (match_operand:SI 2 "" ""))])] - "TARGET_ARM" + "TARGET_32BIT" { HOST_WIDE_INT offset = 0; @@ -5607,13 +6189,13 @@ (match_operand:SI 5 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 2) (const_int 12))) (match_operand:SI 6 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 5" - "stm%?ia\\t%1!, {%3, %4, %5, %6}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 5" + "stm%(ia%)\\t%1!, {%3, %4, %5, %6}" [(set_attr "predicable" "yes") (set_attr "type" "store4")] ) -(define_insn "*stmsi_postinc4_thumb" +(define_insn "*stmsi_postinc4_thumb1" [(match_parallel 0 "store_multiple_operation" [(set (match_operand:SI 1 "s_register_operand" "=l") (plus:SI (match_operand:SI 2 "s_register_operand" "1") @@ -5626,7 +6208,7 @@ (match_operand:SI 5 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 2) (const_int 12))) (match_operand:SI 6 "arm_hard_register_operand" ""))])] - "TARGET_THUMB && XVECLEN (operands[0], 0) == 5" + "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5" "stmia\\t%1!, {%3, %4, %5, %6}" [(set_attr "type" "store4")] ) @@ -5642,8 +6224,8 @@ (match_operand:SI 4 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 2) (const_int 8))) (match_operand:SI 5 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 4" - "stm%?ia\\t%1!, {%3, %4, %5}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 4" + "stm%(ia%)\\t%1!, {%3, %4, %5}" [(set_attr "predicable" "yes") (set_attr "type" "store3")] ) @@ -5657,8 +6239,8 @@ (match_operand:SI 3 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 2) (const_int 4))) (match_operand:SI 4 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 3" - "stm%?ia\\t%1!, {%3, %4}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 3" + "stm%(ia%)\\t%1!, {%3, %4}" [(set_attr "predicable" "yes") (set_attr "type" "store2")] ) @@ -5675,8 +6257,8 @@ (match_operand:SI 4 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 1) (const_int 12))) (match_operand:SI 5 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 4" - "stm%?ia\\t%1, {%2, %3, %4, %5}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 4" + "stm%(ia%)\\t%1, {%2, %3, %4, %5}" [(set_attr "predicable" "yes") (set_attr "type" "store4")] ) @@ -5689,8 +6271,8 @@ (match_operand:SI 3 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 1) (const_int 8))) (match_operand:SI 4 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 3" - "stm%?ia\\t%1, {%2, %3, %4}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 3" + "stm%(ia%)\\t%1, {%2, %3, %4}" [(set_attr "predicable" "yes") (set_attr "type" "store3")] ) @@ -5701,8 +6283,8 @@ (match_operand:SI 2 "arm_hard_register_operand" "")) (set (mem:SI (plus:SI (match_dup 1) (const_int 4))) (match_operand:SI 3 "arm_hard_register_operand" ""))])] - "TARGET_ARM && XVECLEN (operands[0], 0) == 2" - "stm%?ia\\t%1, {%2, %3}" + "TARGET_32BIT && XVECLEN (operands[0], 0) == 2" + "stm%(ia%)\\t%1, {%2, %3}" [(set_attr "predicable" "yes") (set_attr "type" "store2")] ) @@ -5718,13 +6300,13 @@ (match_operand:SI 3 "const_int_operand" "")] "TARGET_EITHER" " - if (TARGET_ARM) + if (TARGET_32BIT) { if (arm_gen_movmemqi (operands)) DONE; FAIL; } - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ { if ( INTVAL (operands[3]) != 4 || INTVAL (operands[2]) > 48) @@ -5752,7 +6334,7 @@ (clobber (match_scratch:SI 4 "=&l")) (clobber (match_scratch:SI 5 "=&l")) (clobber (match_scratch:SI 6 "=&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* return thumb_output_move_mem_multiple (3, operands);" [(set_attr "length" "4") ; This isn't entirely accurate... It loads as well, but in terms of @@ -5771,7 +6353,7 @@ (plus:SI (match_dup 3) (const_int 8))) (clobber (match_scratch:SI 4 "=&l")) (clobber (match_scratch:SI 5 "=&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* return thumb_output_move_mem_multiple (2, operands);" [(set_attr "length" "4") ; This isn't entirely accurate... It loads as well, but in terms of @@ -5805,26 +6387,71 @@ (match_operand:SI 2 "nonmemory_operand" "")]) (label_ref (match_operand 3 "" "")) (pc)))] - "TARGET_THUMB" + "TARGET_THUMB1 || TARGET_32BIT" " - if (thumb_cmpneg_operand (operands[2], SImode)) + if (!TARGET_THUMB1) + { + if (!arm_add_operand (operands[2], SImode)) + operands[2] = force_reg (SImode, operands[2]); + emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], + operands[3])); + DONE; + } + if (thumb1_cmpneg_operand (operands[2], SImode)) { emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2], operands[3], operands[0])); DONE; } - if (!thumb_cmp_operand (operands[2], SImode)) + if (!thumb1_cmp_operand (operands[2], SImode)) operands[2] = force_reg (SImode, operands[2]); ") +(define_expand "cbranchsf4" + [(set (pc) (if_then_else + (match_operator 0 "arm_comparison_operator" + [(match_operand:SF 1 "s_register_operand" "") + (match_operand:SF 2 "arm_float_compare_operand" "")]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "TARGET_32BIT && TARGET_HARD_FLOAT" + "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], + operands[3])); DONE;" +) + +(define_expand "cbranchdf4" + [(set (pc) (if_then_else + (match_operator 0 "arm_comparison_operator" + [(match_operand:DF 1 "s_register_operand" "") + (match_operand:DF 2 "arm_float_compare_operand" "")]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "TARGET_32BIT && TARGET_HARD_FLOAT" + "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], + operands[3])); DONE;" +) + +;; this uses the Cirrus DI compare instruction +(define_expand "cbranchdi4" + [(set (pc) (if_then_else + (match_operator 0 "arm_comparison_operator" + [(match_operand:DI 1 "cirrus_fp_register" "") + (match_operand:DI 2 "cirrus_fp_register" "")]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" + "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], + operands[3])); DONE;" +) + (define_insn "*cbranchsi4_insn" [(set (pc) (if_then_else (match_operator 0 "arm_comparison_operator" [(match_operand:SI 1 "s_register_operand" "l,*h") - (match_operand:SI 2 "thumb_cmp_operand" "lI*h,*r")]) + (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")]) (label_ref (match_operand 3 "" "")) (pc)))] - "TARGET_THUMB" + "TARGET_THUMB1" "* output_asm_insn (\"cmp\\t%1, %2\", operands); @@ -5856,11 +6483,11 @@ [(set (pc) (if_then_else (match_operator 4 "arm_comparison_operator" [(match_operand:SI 1 "s_register_operand" "l,0") - (match_operand:SI 2 "thumb_cmpneg_operand" "L,J")]) + (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")]) (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 0 "=l,l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* output_asm_insn (\"add\\t%0, %1, #%n2\", operands); @@ -5897,7 +6524,7 @@ (pc))) (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m") (match_dup 1))] - "TARGET_THUMB" + "TARGET_THUMB1" "*{ if (which_alternative == 0) output_asm_insn (\"cmp\t%0, #0\", operands); @@ -5958,7 +6585,7 @@ (neg:SI (match_operand:SI 2 "s_register_operand" "l"))]) (label_ref (match_operand 3 "" "")) (pc)))] - "TARGET_THUMB" + "TARGET_THUMB1" "* output_asm_insn (\"cmn\\t%1, %2\", operands); switch (get_attr_length (insn)) @@ -5996,7 +6623,7 @@ (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 4 "=l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { rtx op[3]; @@ -6040,7 +6667,7 @@ (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 4 "=l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { rtx op[3]; @@ -6082,7 +6709,7 @@ (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { output_asm_insn (\"tst\\t%0, %1\", operands); @@ -6122,7 +6749,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") (and:SI (match_dup 2) (match_dup 3))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { if (which_alternative == 0) @@ -6187,7 +6814,7 @@ (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 0 "=l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { output_asm_insn (\"orr\\t%0, %2\", operands); @@ -6227,7 +6854,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") (ior:SI (match_dup 2) (match_dup 3))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { if (which_alternative == 0) @@ -6292,7 +6919,7 @@ (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 0 "=l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { output_asm_insn (\"eor\\t%0, %2\", operands); @@ -6332,7 +6959,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") (xor:SI (match_dup 2) (match_dup 3))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { if (which_alternative == 0) @@ -6397,7 +7024,7 @@ (label_ref (match_operand 3 "" "")) (pc))) (clobber (match_scratch:SI 0 "=l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { output_asm_insn (\"bic\\t%0, %2\", operands); @@ -6437,7 +7064,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m") (and:SI (not:SI (match_dup 3)) (match_dup 2))) (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { if (which_alternative == 0) @@ -6504,7 +7131,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") (plus:SI (match_dup 2) (const_int -1))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "* { rtx cond[2]; @@ -6611,7 +7238,7 @@ (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m") (plus:SI (match_dup 2) (match_dup 3))) (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))] - "TARGET_THUMB + "TARGET_THUMB1 && (GET_CODE (operands[4]) == EQ || GET_CODE (operands[4]) == NE || GET_CODE (operands[4]) == GE @@ -6690,7 +7317,7 @@ (label_ref (match_operand 4 "" "")) (pc))) (clobber (match_scratch:SI 0 "=X,X,l,l"))] - "TARGET_THUMB + "TARGET_THUMB1 && (GET_CODE (operands[3]) == EQ || GET_CODE (operands[3]) == NE || GET_CODE (operands[3]) == GE @@ -6760,7 +7387,7 @@ (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") (minus:SI (match_dup 2) (match_dup 3))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] - "TARGET_THUMB + "TARGET_THUMB1 && (GET_CODE (operands[4]) == EQ || GET_CODE (operands[4]) == NE || GET_CODE (operands[4]) == GE @@ -6837,7 +7464,7 @@ (const_int 0)]) (label_ref (match_operand 3 "" "")) (pc)))] - "TARGET_THUMB + "TARGET_THUMB1 && (GET_CODE (operands[0]) == EQ || GET_CODE (operands[0]) == NE || GET_CODE (operands[0]) == GE @@ -6870,51 +7497,18 @@ ;; Comparison and test insns -(define_expand "cmpsi" - [(match_operand:SI 0 "s_register_operand" "") - (match_operand:SI 1 "arm_add_operand" "")] - "TARGET_ARM" - "{ - arm_compare_op0 = operands[0]; - arm_compare_op1 = operands[1]; - DONE; - }" -) - -(define_expand "cmpsf" - [(match_operand:SF 0 "s_register_operand" "") - (match_operand:SF 1 "arm_float_compare_operand" "")] - "TARGET_ARM && TARGET_HARD_FLOAT" - " - arm_compare_op0 = operands[0]; - arm_compare_op1 = operands[1]; - DONE; - " -) - -(define_expand "cmpdf" - [(match_operand:DF 0 "s_register_operand" "") - (match_operand:DF 1 "arm_float_compare_operand" "")] - "TARGET_ARM && TARGET_HARD_FLOAT" - " - arm_compare_op0 = operands[0]; - arm_compare_op1 = operands[1]; - DONE; - " -) - (define_insn "*arm_cmpsi_insn" [(set (reg:CC CC_REGNUM) (compare:CC (match_operand:SI 0 "s_register_operand" "r,r") (match_operand:SI 1 "arm_add_operand" "rI,L")))] - "TARGET_ARM" + "TARGET_32BIT" "@ cmp%?\\t%0, %1 cmn%?\\t%0, #%n1" [(set_attr "conds" "set")] ) -(define_insn "*cmpsi_shiftsi" +(define_insn "*arm_cmpsi_shiftsi" [(set (reg:CC CC_REGNUM) (compare:CC (match_operand:SI 0 "s_register_operand" "r") (match_operator:SI 3 "shift_operator" @@ -6929,7 +7523,7 @@ (const_string "alu_shift_reg")))] ) -(define_insn "*cmpsi_shiftsi_swp" +(define_insn "*arm_cmpsi_shiftsi_swp" [(set (reg:CC_SWP CC_REGNUM) (compare:CC_SWP (match_operator:SI 3 "shift_operator" [(match_operand:SI 1 "s_register_operand" "r") @@ -6944,7 +7538,7 @@ (const_string "alu_shift_reg")))] ) -(define_insn "*cmpsi_negshiftsi_si" +(define_insn "*arm_cmpsi_negshiftsi_si" [(set (reg:CC_Z CC_REGNUM) (compare:CC_Z (neg:SI (match_operator:SI 1 "shift_operator" @@ -6970,219 +7564,54 @@ (set_attr "cirrus" "compare")] ) -;; Cirrus DF compare instruction -(define_insn "*cirrus_cmpdf" - [(set (reg:CCFP CC_REGNUM) - (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v") - (match_operand:DF 1 "cirrus_fp_register" "v")))] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" - "cfcmpd%?\\tr15, %V0, %V1" - [(set_attr "type" "mav_farith") - (set_attr "cirrus" "compare")] -) - -;; Cirrus DI compare instruction -(define_expand "cmpdi" - [(match_operand:DI 0 "cirrus_fp_register" "") - (match_operand:DI 1 "cirrus_fp_register" "")] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" - "{ - arm_compare_op0 = operands[0]; - arm_compare_op1 = operands[1]; - DONE; - }") - -(define_insn "*cirrus_cmpdi" - [(set (reg:CC CC_REGNUM) - (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v") - (match_operand:DI 1 "cirrus_fp_register" "v")))] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" - "cfcmp64%?\\tr15, %V0, %V1" - [(set_attr "type" "mav_farith") - (set_attr "cirrus" "compare")] -) - -; This insn allows redundant compares to be removed by cse, nothing should -; ever appear in the output file since (set (reg x) (reg x)) is a no-op that -; is deleted later on. The match_dup will match the mode here, so that -; mode changes of the condition codes aren't lost by this even though we don't -; specify what they are. - -(define_insn "*deleted_compare" - [(set (match_operand 0 "cc_register" "") (match_dup 0))] - "TARGET_ARM" - "\\t%@ deleted compare" - [(set_attr "conds" "set") - (set_attr "length" "0")] -) - - -;; Conditional branch insns - -(define_expand "beq" - [(set (pc) - (if_then_else (eq (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bne" - [(set (pc) - (if_then_else (ne (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bgt" - [(set (pc) - (if_then_else (gt (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "ble" - [(set (pc) - (if_then_else (le (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bge" - [(set (pc) - (if_then_else (ge (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "blt" - [(set (pc) - (if_then_else (lt (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bgtu" - [(set (pc) - (if_then_else (gtu (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bleu" - [(set (pc) - (if_then_else (leu (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bgeu" - [(set (pc) - (if_then_else (geu (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bltu" - [(set (pc) - (if_then_else (ltu (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bunordered" - [(set (pc) - (if_then_else (unordered (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0, - arm_compare_op1);" -) - -(define_expand "bordered" - [(set (pc) - (if_then_else (ordered (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0, - arm_compare_op1);" -) - -(define_expand "bungt" - [(set (pc) - (if_then_else (ungt (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);" -) - -(define_expand "bunlt" - [(set (pc) - (if_then_else (unlt (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);" +;; Cirrus DF compare instruction +(define_insn "*cirrus_cmpdf" + [(set (reg:CCFP CC_REGNUM) + (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v") + (match_operand:DF 1 "cirrus_fp_register" "v")))] + "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" + "cfcmpd%?\\tr15, %V0, %V1" + [(set_attr "type" "mav_farith") + (set_attr "cirrus" "compare")] ) -(define_expand "bunge" - [(set (pc) - (if_then_else (unge (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);" +(define_insn "*cirrus_cmpdi" + [(set (reg:CC CC_REGNUM) + (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v") + (match_operand:DI 1 "cirrus_fp_register" "v")))] + "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" + "cfcmp64%?\\tr15, %V0, %V1" + [(set_attr "type" "mav_farith") + (set_attr "cirrus" "compare")] ) -(define_expand "bunle" - [(set (pc) - (if_then_else (unle (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);" -) +; This insn allows redundant compares to be removed by cse, nothing should +; ever appear in the output file since (set (reg x) (reg x)) is a no-op that +; is deleted later on. The match_dup will match the mode here, so that +; mode changes of the condition codes aren't lost by this even though we don't +; specify what they are. -;; The following two patterns need two branch instructions, since there is -;; no single instruction that will handle all cases. -(define_expand "buneq" - [(set (pc) - (if_then_else (uneq (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) - (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);" +(define_insn "*deleted_compare" + [(set (match_operand 0 "cc_register" "") (match_dup 0))] + "TARGET_32BIT" + "\\t%@ deleted compare" + [(set_attr "conds" "set") + (set_attr "length" "0")] ) -(define_expand "bltgt" + +;; Conditional branch insns + +(define_expand "cbranch_cc" [(set (pc) - (if_then_else (ltgt (match_dup 1) (const_int 0)) - (label_ref (match_operand 0 "" "")) + (if_then_else (match_operator 0 "" [(match_operand 1 "" "") + (match_operand 2 "" "")]) + (label_ref (match_operand 3 "" "")) (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);" + "TARGET_32BIT" + "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]), + operands[1], operands[2]); + operands[2] = const0_rtx;" ) ;; @@ -7195,7 +7624,7 @@ (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "* gcc_assert (!arm_ccfsm_state); @@ -7211,7 +7640,7 @@ (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" "* gcc_assert (!arm_ccfsm_state); @@ -7227,7 +7656,7 @@ [(match_operand 2 "cc_register" "") (const_int 0)]) (label_ref (match_operand 0 "" "")) (pc)))] - "TARGET_ARM" + "TARGET_32BIT" "* if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) { @@ -7278,7 +7707,7 @@ [(match_operand 2 "cc_register" "") (const_int 0)]) (pc) (label_ref (match_operand 0 "" ""))))] - "TARGET_ARM" + "TARGET_32BIT" "* if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) { @@ -7295,169 +7724,277 @@ ; scc insns -(define_expand "seq" +(define_expand "cstore_cc" [(set (match_operand:SI 0 "s_register_operand" "") - (eq:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);" + (match_operator:SI 1 "" [(match_operand 2 "" "") + (match_operand 3 "" "")]))] + "TARGET_32BIT" + "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]), + operands[2], operands[3]); + operands[3] = const0_rtx;" ) -(define_expand "sne" - [(set (match_operand:SI 0 "s_register_operand" "") - (ne:SI (match_dup 1) (const_int 0)))] +(define_insn "*mov_scc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)]))] "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);" + "mov%D1\\t%0, #0\;mov%d1\\t%0, #1" + [(set_attr "conds" "use") + (set_attr "length" "8")] ) -(define_expand "sgt" - [(set (match_operand:SI 0 "s_register_operand" "") - (gt:SI (match_dup 1) (const_int 0)))] +(define_insn "*mov_negscc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (neg:SI (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)])))] "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);" + "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0" + [(set_attr "conds" "use") + (set_attr "length" "8")] ) -(define_expand "sle" - [(set (match_operand:SI 0 "s_register_operand" "") - (le:SI (match_dup 1) (const_int 0)))] +(define_insn "*mov_notscc" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (not:SI (match_operator:SI 1 "arm_comparison_operator" + [(match_operand 2 "cc_register" "") (const_int 0)])))] "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);" + "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1" + [(set_attr "conds" "use") + (set_attr "length" "8")] ) -(define_expand "sge" +(define_expand "cstoresi4" [(set (match_operand:SI 0 "s_register_operand" "") - (ge:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);" -) + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand:SI 2 "s_register_operand" "") + (match_operand:SI 3 "reg_or_int_operand" "")]))] + "TARGET_32BIT || TARGET_THUMB1" + "{ + rtx op3, scratch, scratch2; -(define_expand "slt" - [(set (match_operand:SI 0 "s_register_operand" "") - (lt:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);" -) + if (!TARGET_THUMB1) + { + if (!arm_add_operand (operands[3], SImode)) + operands[3] = force_reg (SImode, operands[3]); + emit_insn (gen_cstore_cc (operands[0], operands[1], + operands[2], operands[3])); + DONE; + } -(define_expand "sgtu" - [(set (match_operand:SI 0 "s_register_operand" "") - (gtu:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);" -) + if (operands[3] == const0_rtx) + { + switch (GET_CODE (operands[1])) + { + case EQ: + emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2])); + break; + + case NE: + emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2])); + break; + + case LE: + scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx, + NULL_RTX, 0, OPTAB_WIDEN); + scratch = expand_binop (SImode, ior_optab, operands[2], scratch, + NULL_RTX, 0, OPTAB_WIDEN); + expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), + operands[0], 1, OPTAB_WIDEN); + break; + + case GE: + scratch = expand_unop (SImode, one_cmpl_optab, operands[2], + NULL_RTX, 1); + expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), + NULL_RTX, 1, OPTAB_WIDEN); + break; -(define_expand "sleu" - [(set (match_operand:SI 0 "s_register_operand" "") - (leu:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);" -) + case GT: + scratch = expand_binop (SImode, ashr_optab, operands[2], + GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN); + scratch = expand_binop (SImode, sub_optab, scratch, operands[2], + NULL_RTX, 0, OPTAB_WIDEN); + expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0], + 0, OPTAB_WIDEN); + break; -(define_expand "sgeu" - [(set (match_operand:SI 0 "s_register_operand" "") - (geu:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);" -) + /* LT is handled by generic code. No need for unsigned with 0. */ + default: + FAIL; + } + DONE; + } -(define_expand "sltu" - [(set (match_operand:SI 0 "s_register_operand" "") - (ltu:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM" - "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);" -) + switch (GET_CODE (operands[1])) + { + case EQ: + scratch = expand_binop (SImode, sub_optab, operands[2], operands[3], + NULL_RTX, 0, OPTAB_WIDEN); + emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch)); + break; + + case NE: + scratch = expand_binop (SImode, sub_optab, operands[2], operands[3], + NULL_RTX, 0, OPTAB_WIDEN); + emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch)); + break; + + case LE: + op3 = force_reg (SImode, operands[3]); + + scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31), + NULL_RTX, 1, OPTAB_WIDEN); + scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31), + NULL_RTX, 0, OPTAB_WIDEN); + emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2, + op3, operands[2])); + break; + + case GE: + op3 = operands[3]; + if (!thumb1_cmp_operand (op3, SImode)) + op3 = force_reg (SImode, op3); + scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31), + NULL_RTX, 0, OPTAB_WIDEN); + scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31), + NULL_RTX, 1, OPTAB_WIDEN); + emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2, + operands[2], op3)); + break; + + case LEU: + op3 = force_reg (SImode, operands[3]); + scratch = force_reg (SImode, const0_rtx); + emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch, + op3, operands[2])); + break; + + case GEU: + op3 = operands[3]; + if (!thumb1_cmp_operand (op3, SImode)) + op3 = force_reg (SImode, op3); + scratch = force_reg (SImode, const0_rtx); + emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch, + operands[2], op3)); + break; + + case LTU: + op3 = operands[3]; + if (!thumb1_cmp_operand (op3, SImode)) + op3 = force_reg (SImode, op3); + scratch = gen_reg_rtx (SImode); + emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3)); + emit_insn (gen_negsi2 (operands[0], scratch)); + break; + + case GTU: + op3 = force_reg (SImode, operands[3]); + scratch = gen_reg_rtx (SImode); + emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2])); + emit_insn (gen_negsi2 (operands[0], scratch)); + break; + + /* No good sequences for GT, LT. */ + default: + FAIL; + } + DONE; +}") -(define_expand "sunordered" +(define_expand "cstoresf4" [(set (match_operand:SI 0 "s_register_operand" "") - (unordered:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0, - arm_compare_op1);" + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand:SF 2 "s_register_operand" "") + (match_operand:SF 3 "arm_float_compare_operand" "")]))] + "TARGET_32BIT && TARGET_HARD_FLOAT" + "emit_insn (gen_cstore_cc (operands[0], operands[1], + operands[2], operands[3])); DONE;" ) -(define_expand "sordered" +(define_expand "cstoredf4" [(set (match_operand:SI 0 "s_register_operand" "") - (ordered:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0, - arm_compare_op1);" + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand:DF 2 "s_register_operand" "") + (match_operand:DF 3 "arm_float_compare_operand" "")]))] + "TARGET_32BIT && TARGET_HARD_FLOAT" + "emit_insn (gen_cstore_cc (operands[0], operands[1], + operands[2], operands[3])); DONE;" ) -(define_expand "sungt" +;; this uses the Cirrus DI compare instruction +(define_expand "cstoredi4" [(set (match_operand:SI 0 "s_register_operand" "") - (ungt:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, - arm_compare_op1);" + (match_operator:SI 1 "arm_comparison_operator" + [(match_operand:DI 2 "cirrus_fp_register" "") + (match_operand:DI 3 "cirrus_fp_register" "")]))] + "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK" + "emit_insn (gen_cstore_cc (operands[0], operands[1], + operands[2], operands[3])); DONE;" ) -(define_expand "sunge" - [(set (match_operand:SI 0 "s_register_operand" "") - (unge:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, - arm_compare_op1);" -) -(define_expand "sunlt" - [(set (match_operand:SI 0 "s_register_operand" "") - (unlt:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, - arm_compare_op1);" +(define_expand "cstoresi_eq0_thumb1" + [(parallel + [(set (match_operand:SI 0 "s_register_operand" "") + (eq:SI (match_operand:SI 1 "s_register_operand" "") + (const_int 0))) + (clobber (match_dup:SI 2))])] + "TARGET_THUMB1" + "operands[2] = gen_reg_rtx (SImode);" ) -(define_expand "sunle" - [(set (match_operand:SI 0 "s_register_operand" "") - (unle:SI (match_dup 1) (const_int 0)))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" - "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, - arm_compare_op1);" +(define_expand "cstoresi_ne0_thumb1" + [(parallel + [(set (match_operand:SI 0 "s_register_operand" "") + (ne:SI (match_operand:SI 1 "s_register_operand" "") + (const_int 0))) + (clobber (match_dup:SI 2))])] + "TARGET_THUMB1" + "operands[2] = gen_reg_rtx (SImode);" ) -;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with -;;; simple ARM instructions. -; -; (define_expand "suneq" -; [(set (match_operand:SI 0 "s_register_operand" "") -; (uneq:SI (match_dup 1) (const_int 0)))] -; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" -; "gcc_unreachable ();" -; ) -; -; (define_expand "sltgt" -; [(set (match_operand:SI 0 "s_register_operand" "") -; (ltgt:SI (match_dup 1) (const_int 0)))] -; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" -; "gcc_unreachable ();" -; ) +(define_insn "*cstoresi_eq0_thumb1_insn" + [(set (match_operand:SI 0 "s_register_operand" "=&l,l") + (eq:SI (match_operand:SI 1 "s_register_operand" "l,0") + (const_int 0))) + (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))] + "TARGET_THUMB1" + "@ + neg\\t%0, %1\;adc\\t%0, %0, %1 + neg\\t%2, %1\;adc\\t%0, %1, %2" + [(set_attr "length" "4")] +) -(define_insn "*mov_scc" - [(set (match_operand:SI 0 "s_register_operand" "=r") - (match_operator:SI 1 "arm_comparison_operator" - [(match_operand 2 "cc_register" "") (const_int 0)]))] - "TARGET_ARM" - "mov%D1\\t%0, #0\;mov%d1\\t%0, #1" - [(set_attr "conds" "use") - (set_attr "length" "8")] +(define_insn "*cstoresi_ne0_thumb1_insn" + [(set (match_operand:SI 0 "s_register_operand" "=l") + (ne:SI (match_operand:SI 1 "s_register_operand" "0") + (const_int 0))) + (clobber (match_operand:SI 2 "s_register_operand" "=l"))] + "TARGET_THUMB1" + "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2" + [(set_attr "length" "4")] ) -(define_insn "*mov_negscc" - [(set (match_operand:SI 0 "s_register_operand" "=r") - (neg:SI (match_operator:SI 1 "arm_comparison_operator" - [(match_operand 2 "cc_register" "") (const_int 0)])))] - "TARGET_ARM" - "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0" - [(set_attr "conds" "use") - (set_attr "length" "8")] +(define_insn "cstoresi_nltu_thumb1" + [(set (match_operand:SI 0 "s_register_operand" "=l,l") + (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h") + (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))] + "TARGET_THUMB1" + "cmp\\t%1, %2\;sbc\\t%0, %0, %0" + [(set_attr "length" "4")] ) -(define_insn "*mov_notscc" - [(set (match_operand:SI 0 "s_register_operand" "=r") - (not:SI (match_operator:SI 1 "arm_comparison_operator" - [(match_operand 2 "cc_register" "") (const_int 0)])))] - "TARGET_ARM" - "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1" - [(set_attr "conds" "use") - (set_attr "length" "8")] +;; Used as part of the expansion of thumb les sequence. +(define_insn "thumb1_addsi3_addgeu" + [(set (match_operand:SI 0 "s_register_operand" "=l") + (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0") + (match_operand:SI 2 "s_register_operand" "l")) + (geu:SI (match_operand:SI 3 "s_register_operand" "l") + (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))] + "TARGET_THUMB1" + "cmp\\t%3, %4\;adc\\t%0, %1, %2" + [(set_attr "length" "4")] ) @@ -7468,7 +8005,7 @@ (if_then_else:SI (match_operand 1 "arm_comparison_operator" "") (match_operand:SI 2 "arm_not_operand" "") (match_operand:SI 3 "arm_not_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT" " { enum rtx_code code = GET_CODE (operands[1]); @@ -7477,7 +8014,8 @@ if (code == UNEQ || code == LTGT) FAIL; - ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1); + ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), + XEXP (operands[1], 1)); operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); }" ) @@ -7487,7 +8025,7 @@ (if_then_else:SF (match_operand 1 "arm_comparison_operator" "") (match_operand:SF 2 "s_register_operand" "") (match_operand:SF 3 "nonmemory_operand" "")))] - "TARGET_ARM" + "TARGET_32BIT && TARGET_HARD_FLOAT" " { enum rtx_code code = GET_CODE (operands[1]); @@ -7502,7 +8040,8 @@ || (!arm_float_add_operand (operands[3], SFmode))) operands[3] = force_reg (SFmode, operands[3]); - ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1); + ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), + XEXP (operands[1], 1)); operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); }" ) @@ -7512,7 +8051,7 @@ (if_then_else:DF (match_operand 1 "arm_comparison_operator" "") (match_operand:DF 2 "s_register_operand" "") (match_operand:DF 3 "arm_float_add_operand" "")))] - "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" + "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)" " { enum rtx_code code = GET_CODE (operands[1]); @@ -7521,7 +8060,8 @@ if (code == UNEQ || code == LTGT) FAIL; - ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1); + ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), + XEXP (operands[1], 1)); operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); }" ) @@ -7573,7 +8113,7 @@ (define_insn "*arm_jump" [(set (pc) (label_ref (match_operand 0 "" "")))] - "TARGET_ARM" + "TARGET_32BIT" "* { if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) @@ -7590,7 +8130,7 @@ (define_insn "*thumb_jump" [(set (pc) (label_ref (match_operand 0 "" "")))] - "TARGET_THUMB" + "TARGET_THUMB1" "* if (get_attr_length (insn) == 2) return \"b\\t%l0\"; @@ -7617,32 +8157,33 @@ "TARGET_EITHER" " { - rtx callee; + rtx callee, pat; /* In an untyped call, we can get NULL for operand 2. */ if (operands[2] == NULL_RTX) operands[2] = const0_rtx; - /* This is to decide if we should generate indirect calls by loading the - 32 bit address of the callee into a register before performing the - branch and link. operand[2] encodes the long_call/short_call - attribute of the function being called. This attribute is set whenever - __attribute__((long_call/short_call)) or #pragma long_call/no_long_call - is used, and the short_call attribute can also be set if function is - declared as static or if it has already been defined in the current - compilation unit. See arm.c and arm.h for info about this. The third - parameter to arm_is_longcall_p is used to tell it which pattern - invoked it. */ - callee = XEXP (operands[0], 0); - - if ((GET_CODE (callee) == SYMBOL_REF - && arm_is_longcall_p (operands[0], INTVAL (operands[2]), 0)) - || (GET_CODE (callee) != SYMBOL_REF - && GET_CODE (callee) != REG)) + /* Decide if we should generate indirect calls by loading the + 32-bit address of the callee into a register before performing the + branch and link. */ + callee = XEXP (operands[0], 0); + if (GET_CODE (callee) == SYMBOL_REF + ? arm_is_long_call_p (SYMBOL_REF_DECL (callee)) + : !REG_P (callee)) XEXP (operands[0], 0) = force_reg (Pmode, callee); + + pat = gen_call_internal (operands[0], operands[1], operands[2]); + arm_emit_call_insn (pat, XEXP (operands[0], 0)); + DONE; }" ) +(define_expand "call_internal" + [(parallel [(call (match_operand 0 "memory_operand" "") + (match_operand 1 "general_operand" "")) + (use (match_operand 2 "" "")) + (clobber (reg:SI LR_REGNUM))])]) + (define_insn "*call_reg_armv5" [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r")) (match_operand 1 "" "")) @@ -7680,23 +8221,23 @@ (set_attr "type" "call")] ) -(define_insn "*call_reg_thumb_v5" +(define_insn "*call_reg_thumb1_v5" [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r")) (match_operand 1 "" "")) (use (match_operand 2 "" "")) (clobber (reg:SI LR_REGNUM))] - "TARGET_THUMB && arm_arch5" + "TARGET_THUMB1 && arm_arch5" "blx\\t%0" [(set_attr "length" "2") (set_attr "type" "call")] ) -(define_insn "*call_reg_thumb" +(define_insn "*call_reg_thumb1" [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r")) (match_operand 1 "" "")) (use (match_operand 2 "" "")) (clobber (reg:SI LR_REGNUM))] - "TARGET_THUMB && !arm_arch5" + "TARGET_THUMB1 && !arm_arch5" "* { if (!TARGET_CALLER_INTERWORKING) @@ -7720,21 +8261,35 @@ "TARGET_EITHER" " { - rtx callee = XEXP (operands[1], 0); + rtx pat, callee; /* In an untyped call, we can get NULL for operand 2. */ if (operands[3] == 0) operands[3] = const0_rtx; - /* See the comment in define_expand \"call\". */ - if ((GET_CODE (callee) == SYMBOL_REF - && arm_is_longcall_p (operands[1], INTVAL (operands[3]), 0)) - || (GET_CODE (callee) != SYMBOL_REF - && GET_CODE (callee) != REG)) + /* Decide if we should generate indirect calls by loading the + 32-bit address of the callee into a register before performing the + branch and link. */ + callee = XEXP (operands[1], 0); + if (GET_CODE (callee) == SYMBOL_REF + ? arm_is_long_call_p (SYMBOL_REF_DECL (callee)) + : !REG_P (callee)) XEXP (operands[1], 0) = force_reg (Pmode, callee); + + pat = gen_call_value_internal (operands[0], operands[1], + operands[2], operands[3]); + arm_emit_call_insn (pat, XEXP (operands[1], 0)); + DONE; }" ) +(define_expand "call_value_internal" + [(parallel [(set (match_operand 0 "" "") + (call (match_operand 1 "memory_operand" "") + (match_operand 2 "general_operand" ""))) + (use (match_operand 3 "" "")) + (clobber (reg:SI LR_REGNUM))])]) + (define_insn "*call_value_reg_armv5" [(set (match_operand 0 "" "") (call (mem:SI (match_operand:SI 1 "s_register_operand" "r")) @@ -7774,25 +8329,25 @@ (set_attr "type" "call")] ) -(define_insn "*call_value_reg_thumb_v5" +(define_insn "*call_value_reg_thumb1_v5" [(set (match_operand 0 "" "") (call (mem:SI (match_operand:SI 1 "register_operand" "l*r")) (match_operand 2 "" ""))) (use (match_operand 3 "" "")) (clobber (reg:SI LR_REGNUM))] - "TARGET_THUMB && arm_arch5" + "TARGET_THUMB1 && arm_arch5" "blx\\t%1" [(set_attr "length" "2") (set_attr "type" "call")] ) -(define_insn "*call_value_reg_thumb" +(define_insn "*call_value_reg_thumb1" [(set (match_operand 0 "" "") (call (mem:SI (match_operand:SI 1 "register_operand" "l*r")) (match_operand 2 "" ""))) (use (match_operand 3 "" "")) (clobber (reg:SI LR_REGNUM))] - "TARGET_THUMB && !arm_arch5" + "TARGET_THUMB1 && !arm_arch5" "* { if (!TARGET_CALLER_INTERWORKING) @@ -7817,7 +8372,7 @@ (clobber (reg:SI LR_REGNUM))] "TARGET_ARM && (GET_CODE (operands[0]) == SYMBOL_REF) - && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)" + && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))" "* { return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\"; @@ -7833,7 +8388,7 @@ (clobber (reg:SI LR_REGNUM))] "TARGET_ARM && (GET_CODE (operands[1]) == SYMBOL_REF) - && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)" + && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))" "* { return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\"; @@ -7848,7 +8403,7 @@ (clobber (reg:SI LR_REGNUM))] "TARGET_THUMB && GET_CODE (operands[0]) == SYMBOL_REF - && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)" + && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))" "bl\\t%a0" [(set_attr "length" "4") (set_attr "type" "call")] @@ -7862,7 +8417,7 @@ (clobber (reg:SI LR_REGNUM))] "TARGET_THUMB && GET_CODE (operands[1]) == SYMBOL_REF - && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)" + && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))" "bl\\t%a1" [(set_attr "length" "4") (set_attr "type" "call")] @@ -8119,8 +8674,7 @@ /* Emit USE insns before the return. */ for (i = 0; i < XVECLEN (operands[1], 0); i++) - emit_insn (gen_rtx_USE (VOIDmode, - SET_DEST (XVECEXP (operands[1], 0, i)))); + emit_use (SET_DEST (XVECEXP (operands[1], 0, i))); /* Construct the return. */ expand_naked_return (); @@ -8146,7 +8700,7 @@ (match_operand:SI 2 "const_int_operand" "") ; total range (match_operand:SI 3 "" "") ; table label (match_operand:SI 4 "" "")] ; Out of range label - "TARGET_ARM" + "TARGET_32BIT" " { rtx reg; @@ -8162,15 +8716,28 @@ if (!const_ok_for_arm (INTVAL (operands[2]))) operands[2] = force_reg (SImode, operands[2]); - emit_jump_insn (gen_casesi_internal (operands[0], operands[2], operands[3], - operands[4])); + if (TARGET_ARM) + { + emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2], + operands[3], operands[4])); + } + else if (flag_pic) + { + emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0], + operands[2], operands[3], operands[4])); + } + else + { + emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2], + operands[3], operands[4])); + } DONE; }" ) ;; The USE in this pattern is needed to tell flow analysis that this is ;; a CASESI insn. It has no other purpose. -(define_insn "casesi_internal" +(define_insn "arm_casesi_internal" [(parallel [(set (pc) (if_then_else (leu (match_operand:SI 0 "s_register_operand" "r") @@ -8194,7 +8761,17 @@ [(set (pc) (match_operand:SI 0 "s_register_operand" ""))] "TARGET_EITHER" - "" + " + /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the + address and use bx. */ + if (TARGET_THUMB2) + { + rtx tmp; + tmp = gen_reg_rtx (SImode); + emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1))); + operands[0] = tmp; + } + " ) ;; NB Never uses BX. @@ -8218,10 +8795,10 @@ ) ;; NB Never uses BX. -(define_insn "*thumb_indirect_jump" +(define_insn "*thumb1_indirect_jump" [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))] - "TARGET_THUMB" + "TARGET_THUMB1" "mov\\tpc, %0" [(set_attr "conds" "clob") (set_attr "length" "2")] @@ -8234,6 +8811,8 @@ [(const_int 0)] "TARGET_EITHER" "* + if (TARGET_UNIFIED_ASM) + return \"nop\"; if (TARGET_ARM) return \"mov%?\\t%|r0, %|r0\\t%@ nop\"; return \"mov\\tr8, r8\"; @@ -8293,7 +8872,7 @@ (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)]) (match_dup 2)]))] "TARGET_ARM" - "%i1%?s\\t%0, %2, %4%S3" + "%i1%.\\t%0, %2, %4%S3" [(set_attr "conds" "set") (set_attr "shift" "4") (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "") @@ -8311,7 +8890,7 @@ (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] "TARGET_ARM" - "%i1%?s\\t%0, %2, %4%S3" + "%i1%.\\t%0, %2, %4%S3" [(set_attr "conds" "set") (set_attr "shift" "4") (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "") @@ -8346,7 +8925,7 @@ (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3) (match_dup 4)])))] "TARGET_ARM" - "sub%?s\\t%0, %1, %3%S2" + "sub%.\\t%0, %1, %3%S2" [(set_attr "conds" "set") (set_attr "shift" "3") (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "") @@ -8364,7 +8943,7 @@ (const_int 0))) (clobber (match_scratch:SI 0 "=r"))] "TARGET_ARM" - "sub%?s\\t%0, %1, %3%S2" + "sub%.\\t%0, %1, %3%S2" [(set_attr "conds" "set") (set_attr "shift" "3") (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "") @@ -8506,6 +9085,7 @@ (set_attr "length" "8,12")] ) +;; ??? Is it worth using these conditional patterns in Thumb-2 mode? (define_insn "*cmp_ite0" [(set (match_operand 6 "dominant_cc_register" "") (compare @@ -8832,6 +9412,7 @@ (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1)) (const_int 0)))] "") +;; ??? The conditional patterns above need checking for Thumb-2 usefulness (define_insn "*negscc" [(set (match_operand:SI 0 "s_register_operand" "=r") @@ -8841,15 +9422,12 @@ (clobber (reg:CC CC_REGNUM))] "TARGET_ARM" "* - if (GET_CODE (operands[3]) == LT && operands[3] == const0_rtx) + if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx) return \"mov\\t%0, %1, asr #31\"; if (GET_CODE (operands[3]) == NE) return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\"; - if (GET_CODE (operands[3]) == GT) - return \"subs\\t%0, %1, %2\;mvnne\\t%0, %0, asr #31\"; - output_asm_insn (\"cmp\\t%1, %2\", operands); output_asm_insn (\"mov%D3\\t%0, #0\", operands); return \"mvn%d3\\t%0, #0\"; @@ -8921,6 +9499,8 @@ (set_attr "length" "8,8,12")] ) +;; ??? The patterns below need checking for Thumb-2 usefulness. + (define_insn "*ifcompare_plus_move" [(set (match_operand:SI 0 "s_register_operand" "=r,r") (if_then_else:SI (match_operator 6 "arm_comparison_operator" @@ -9513,7 +10093,7 @@ if (val1 == 4 || val2 == 4) /* Other val must be 8, since we know they are adjacent and neither is zero. */ - output_asm_insn (\"ldm%?ib\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm); else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1)) { ldm[0] = ops[0] = operands[4]; @@ -9521,9 +10101,9 @@ ops[2] = GEN_INT (val1); output_add_immediate (ops); if (val1 < val2) - output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm); else - output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm); } else { @@ -9540,16 +10120,16 @@ else if (val1 != 0) { if (val1 < val2) - output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm); else - output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm); } else { if (val1 < val2) - output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm); else - output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm); + output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm); } output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith); return \"\"; @@ -9576,13 +10156,13 @@ ; reversed, check that the memory references aren't volatile. (define_peephole - [(set (match_operand:SI 0 "s_register_operand" "=r") + [(set (match_operand:SI 0 "s_register_operand" "=rk") (match_operand:SI 4 "memory_operand" "m")) - (set (match_operand:SI 1 "s_register_operand" "=r") + (set (match_operand:SI 1 "s_register_operand" "=rk") (match_operand:SI 5 "memory_operand" "m")) - (set (match_operand:SI 2 "s_register_operand" "=r") + (set (match_operand:SI 2 "s_register_operand" "=rk") (match_operand:SI 6 "memory_operand" "m")) - (set (match_operand:SI 3 "s_register_operand" "=r") + (set (match_operand:SI 3 "s_register_operand" "=rk") (match_operand:SI 7 "memory_operand" "m"))] "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)" "* @@ -9591,11 +10171,11 @@ ) (define_peephole - [(set (match_operand:SI 0 "s_register_operand" "=r") + [(set (match_operand:SI 0 "s_register_operand" "=rk") (match_operand:SI 3 "memory_operand" "m")) - (set (match_operand:SI 1 "s_register_operand" "=r") + (set (match_operand:SI 1 "s_register_operand" "=rk") (match_operand:SI 4 "memory_operand" "m")) - (set (match_operand:SI 2 "s_register_operand" "=r") + (set (match_operand:SI 2 "s_register_operand" "=rk") (match_operand:SI 5 "memory_operand" "m"))] "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)" "* @@ -9604,9 +10184,9 @@ ) (define_peephole - [(set (match_operand:SI 0 "s_register_operand" "=r") + [(set (match_operand:SI 0 "s_register_operand" "=rk") (match_operand:SI 2 "memory_operand" "m")) - (set (match_operand:SI 1 "s_register_operand" "=r") + (set (match_operand:SI 1 "s_register_operand" "=rk") (match_operand:SI 3 "memory_operand" "m"))] "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)" "* @@ -9616,13 +10196,13 @@ (define_peephole [(set (match_operand:SI 4 "memory_operand" "=m") - (match_operand:SI 0 "s_register_operand" "r")) + (match_operand:SI 0 "s_register_operand" "rk")) (set (match_operand:SI 5 "memory_operand" "=m") - (match_operand:SI 1 "s_register_operand" "r")) + (match_operand:SI 1 "s_register_operand" "rk")) (set (match_operand:SI 6 "memory_operand" "=m") - (match_operand:SI 2 "s_register_operand" "r")) + (match_operand:SI 2 "s_register_operand" "rk")) (set (match_operand:SI 7 "memory_operand" "=m") - (match_operand:SI 3 "s_register_operand" "r"))] + (match_operand:SI 3 "s_register_operand" "rk"))] "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)" "* return emit_stm_seq (operands, 4); @@ -9631,11 +10211,11 @@ (define_peephole [(set (match_operand:SI 3 "memory_operand" "=m") - (match_operand:SI 0 "s_register_operand" "r")) + (match_operand:SI 0 "s_register_operand" "rk")) (set (match_operand:SI 4 "memory_operand" "=m") - (match_operand:SI 1 "s_register_operand" "r")) + (match_operand:SI 1 "s_register_operand" "rk")) (set (match_operand:SI 5 "memory_operand" "=m") - (match_operand:SI 2 "s_register_operand" "r"))] + (match_operand:SI 2 "s_register_operand" "rk"))] "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)" "* return emit_stm_seq (operands, 3); @@ -9644,9 +10224,9 @@ (define_peephole [(set (match_operand:SI 2 "memory_operand" "=m") - (match_operand:SI 0 "s_register_operand" "r")) + (match_operand:SI 0 "s_register_operand" "rk")) (set (match_operand:SI 3 "memory_operand" "=m") - (match_operand:SI 1 "s_register_operand" "r"))] + (match_operand:SI 1 "s_register_operand" "rk"))] "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)" "* return emit_stm_seq (operands, 2); @@ -9688,14 +10268,15 @@ operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24); " ) +;; ??? Check the patterns above for Thumb-2 usefulness (define_expand "prologue" [(clobber (const_int 0))] "TARGET_EITHER" - "if (TARGET_ARM) + "if (TARGET_32BIT) arm_expand_prologue (); else - thumb_expand_prologue (); + thumb1_expand_prologue (); DONE; " ) @@ -9704,10 +10285,10 @@ [(clobber (const_int 0))] "TARGET_EITHER" " - if (current_function_calls_eh_return) + if (crtl->calls_eh_return) emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2))); - if (TARGET_THUMB) - thumb_expand_epilogue (); + if (TARGET_THUMB1) + thumb1_expand_epilogue (); else if (USE_RETURN_INSN (FALSE)) { emit_jump_insn (gen_return ()); @@ -9729,7 +10310,7 @@ (define_insn "sibcall_epilogue" [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE) (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])] - "TARGET_ARM" + "TARGET_32BIT" "* if (use_return_insn (FALSE, next_nonnote_insn (insn))) return output_return_instruction (const_true_rtx, FALSE, FALSE); @@ -9748,9 +10329,9 @@ [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)] "TARGET_EITHER" "* - if (TARGET_ARM) + if (TARGET_32BIT) return arm_output_epilogue (NULL); - else /* TARGET_THUMB */ + else /* TARGET_THUMB1 */ return thumb_unexpanded_epilogue (); " ; Length is absolute worst case @@ -9790,6 +10371,9 @@ ;; some extent with the conditional data operations, so we have to split them ;; up again here. +;; ??? Need to audit these splitters for Thumb-2. Why isn't normal +;; conditional execution sufficient? + (define_split [(set (match_operand:SI 0 "s_register_operand" "") (if_then_else:SI (match_operator 1 "arm_comparison_operator" @@ -9952,6 +10536,7 @@ [(set_attr "conds" "clob") (set_attr "length" "12")] ) +;; ??? The above patterns need auditing for Thumb-2 ;; Push multiple registers to the stack. Registers are in parallel (use ...) ;; expressions. For simplicity, the first register is also in the unspec @@ -9961,21 +10546,26 @@ [(set (match_operand:BLK 0 "memory_operand" "=m") (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_PUSH_MULT))])] - "TARGET_ARM" + "TARGET_32BIT" "* { int num_saves = XVECLEN (operands[2], 0); /* For the StrongARM at least it is faster to - use STR to store only a single register. */ - if (num_saves == 1) + use STR to store only a single register. + In Thumb mode always use push, and the assembler will pick + something appropriate. */ + if (num_saves == 1 && TARGET_ARM) output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands); else { int i; char pattern[100]; - strcpy (pattern, \"stmfd\\t%m0!, {%1\"); + if (TARGET_ARM) + strcpy (pattern, \"stmfd\\t%m0!, {%1\"); + else + strcpy (pattern, \"push\\t{%1\"); for (i = 1; i < num_saves; i++) { @@ -9995,8 +10585,8 @@ (define_insn "stack_tie" [(set (mem:BLK (scratch)) - (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r") - (match_operand:SI 1 "s_register_operand" "r")] + (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk") + (match_operand:SI 1 "s_register_operand" "rk")] UNSPEC_PRLG_STK))] "" "" @@ -10009,7 +10599,7 @@ [(set (match_operand:BLK 0 "memory_operand" "=m") (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")] UNSPEC_PUSH_MULT))])] - "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA" + "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA" "* { char pattern[100]; @@ -10052,7 +10642,7 @@ (define_insn "consttable_1" [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)] - "TARGET_THUMB" + "TARGET_THUMB1" "* making_const_table = TRUE; assemble_integer (operands[0], 1, BITS_PER_WORD, 1); @@ -10064,7 +10654,7 @@ (define_insn "consttable_2" [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)] - "TARGET_THUMB" + "TARGET_THUMB1" "* making_const_table = TRUE; assemble_integer (operands[0], 2, BITS_PER_WORD, 1); @@ -10091,6 +10681,7 @@ } default: assemble_integer (operands[0], 4, BITS_PER_WORD, 1); + mark_symbol_refs_as_used (operands[0]); break; } return \"\"; @@ -10122,12 +10713,36 @@ [(set_attr "length" "8")] ) +(define_insn "consttable_16" + [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)] + "TARGET_EITHER" + "* + { + making_const_table = TRUE; + switch (GET_MODE_CLASS (GET_MODE (operands[0]))) + { + case MODE_FLOAT: + { + REAL_VALUE_TYPE r; + REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]); + assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD); + break; + } + default: + assemble_integer (operands[0], 16, BITS_PER_WORD, 1); + break; + } + return \"\"; + }" + [(set_attr "length" "16")] +) + ;; Miscellaneous Thumb patterns (define_expand "tablejump" [(parallel [(set (pc) (match_operand:SI 0 "register_operand" "")) (use (label_ref (match_operand 1 "" "")))])] - "TARGET_THUMB" + "TARGET_THUMB1" " if (flag_pic) { @@ -10142,10 +10757,10 @@ ) ;; NB never uses BX. -(define_insn "*thumb_tablejump" +(define_insn "*thumb1_tablejump" [(set (pc) (match_operand:SI 0 "register_operand" "l*r")) (use (label_ref (match_operand 1 "" "")))] - "TARGET_THUMB" + "TARGET_THUMB1" "mov\\t%|pc, %0" [(set_attr "length" "2")] ) @@ -10155,49 +10770,10 @@ (define_insn "clzsi2" [(set (match_operand:SI 0 "s_register_operand" "=r") (clz:SI (match_operand:SI 1 "s_register_operand" "r")))] - "TARGET_ARM && arm_arch5" + "TARGET_32BIT && arm_arch5" "clz%?\\t%0, %1" - [(set_attr "predicable" "yes")]) - -(define_expand "ffssi2" - [(set (match_operand:SI 0 "s_register_operand" "") - (ffs:SI (match_operand:SI 1 "s_register_operand" "")))] - "TARGET_ARM && arm_arch5" - " - { - rtx t1, t2, t3; - - t1 = gen_reg_rtx (SImode); - t2 = gen_reg_rtx (SImode); - t3 = gen_reg_rtx (SImode); - - emit_insn (gen_negsi2 (t1, operands[1])); - emit_insn (gen_andsi3 (t2, operands[1], t1)); - emit_insn (gen_clzsi2 (t3, t2)); - emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3)); - DONE; - }" -) - -(define_expand "ctzsi2" - [(set (match_operand:SI 0 "s_register_operand" "") - (ctz:SI (match_operand:SI 1 "s_register_operand" "")))] - "TARGET_ARM && arm_arch5" - " - { - rtx t1, t2, t3; - - t1 = gen_reg_rtx (SImode); - t2 = gen_reg_rtx (SImode); - t3 = gen_reg_rtx (SImode); - - emit_insn (gen_negsi2 (t1, operands[1])); - emit_insn (gen_andsi3 (t2, operands[1], t1)); - emit_insn (gen_clzsi2 (t3, t2)); - emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3)); - DONE; - }" -) + [(set_attr "predicable" "yes") + (set_attr "insn" "clz")]) ;; V5E instructions. @@ -10205,7 +10781,7 @@ [(prefetch (match_operand:SI 0 "address_operand" "p") (match_operand:SI 1 "" "") (match_operand:SI 2 "" ""))] - "TARGET_ARM && arm_arch5e" + "TARGET_32BIT && arm_arch5e" "pld\\t%a0") ;; General predication pattern @@ -10214,7 +10790,7 @@ [(match_operator 0 "arm_comparison_operator" [(match_operand 1 "cc_register" "") (const_int 0)])] - "TARGET_ARM" + "TARGET_32BIT" "" ) @@ -10232,7 +10808,7 @@ "TARGET_EITHER" " { - if (TARGET_ARM) + if (TARGET_32BIT) emit_insn (gen_arm_eh_return (operands[0])); else emit_insn (gen_thumb_eh_return (operands[0])); @@ -10260,7 +10836,7 @@ [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")] VUNSPEC_EH_RETURN) (clobber (match_scratch:SI 1 "=&l"))] - "TARGET_THUMB" + "TARGET_THUMB1" "#" "&& reload_completed" [(const_int 0)] @@ -10297,8 +10873,14 @@ (include "fpa.md") ;; Load the Maverick co-processor patterns (include "cirrus.md") +;; Vector bits common to IWMMXT and Neon +(include "vec-common.md") ;; Load the Intel Wireless Multimedia Extension patterns (include "iwmmxt.md") ;; Load the VFP co-processor patterns (include "vfp.md") +;; Thumb-2 patterns +(include "thumb2.md") +;; Neon patterns +(include "neon.md")