UNSPEC_VCVTPH2PS
UNSPEC_VCVTPS2PH
+ ;; For AVX2 support
+ UNSPEC_VPERMSI
+ UNSPEC_VPERMDF
+ UNSPEC_VPERMSF
+ UNSPEC_VPERMDI
+ UNSPEC_VPERMTI
+ UNSPEC_GATHER
+
;; For BMI support
UNSPEC_BEXTR
;; For RDRAND support
UNSPEC_RDRAND
+
+ ;; For BMI2 support
+ UNSPEC_PDEP
+ UNSPEC_PEXT
])
(define_c_enum "unspecv" [
(define_attr "type"
"other,multi,
alu,alu1,negnot,imov,imovx,lea,
- incdec,ishift,ishift1,rotate,rotate1,imul,idiv,
+ incdec,ishift,ishiftx,ishift1,rotate,rotatex,rotate1,imul,imulx,idiv,
icmp,test,ibr,setcc,icmov,
push,pop,call,callv,leave,
str,bitmanip,
;; The (bounding maximum) length of an instruction immediate.
(define_attr "length_immediate" ""
(cond [(eq_attr "type" "incdec,setcc,icmov,str,lea,other,multi,idiv,leave,
- bitmanip")
+ bitmanip,imulx")
(const_int 0)
(eq_attr "unit" "i387,sse,mmx")
(const_int 0)
- (eq_attr "type" "alu,alu1,negnot,imovx,ishift,rotate,ishift1,rotate1,
- imul,icmp,push,pop")
+ (eq_attr "type" "alu,alu1,negnot,imovx,ishift,ishiftx,ishift1,
+ rotate,rotatex,rotate1,imul,icmp,push,pop")
(symbol_ref "ix86_attr_length_immediate_default (insn, true)")
(eq_attr "type" "imov,test")
(symbol_ref "ix86_attr_length_immediate_default (insn, false)")
(and (match_operand 0 "memory_displacement_operand" "")
(match_operand 1 "immediate_operand" "")))
(const_string "true")
- (and (eq_attr "type" "alu,ishift,rotate,imul,idiv")
+ (and (eq_attr "type" "alu,ishift,ishiftx,rotate,rotatex,imul,idiv")
(and (match_operand 0 "memory_displacement_operand" "")
(match_operand 2 "immediate_operand" "")))
(const_string "true")
(define_attr "movu" "0,1" (const_string "0"))
;; Used to control the "enabled" attribute on a per-instruction basis.
-(define_attr "isa" "base,noavx,avx"
+(define_attr "isa" "base,sse2,sse2_noavx,sse3,sse4,sse4_noavx,noavx,avx,bmi2"
(const_string "base"))
(define_attr "enabled" ""
- (cond [(eq_attr "isa" "noavx") (symbol_ref "!TARGET_AVX")
+ (cond [(eq_attr "isa" "sse2") (symbol_ref "TARGET_SSE2")
+ (eq_attr "isa" "sse2_noavx")
+ (symbol_ref "TARGET_SSE2 && !TARGET_AVX")
+ (eq_attr "isa" "sse3") (symbol_ref "TARGET_SSE3")
+ (eq_attr "isa" "sse4") (symbol_ref "TARGET_SSE4_1")
+ (eq_attr "isa" "sse4_noavx")
+ (symbol_ref "TARGET_SSE4_1 && !TARGET_AVX")
(eq_attr "isa" "avx") (symbol_ref "TARGET_AVX")
+ (eq_attr "isa" "noavx") (symbol_ref "!TARGET_AVX")
+ (eq_attr "isa" "bmi2") (symbol_ref "TARGET_BMI2")
]
(const_int 1)))
[(SF "ss") (DF "sd")
(V8SF "ps") (V4DF "pd")
(V4SF "ps") (V2DF "pd")
- (V16QI "b") (V8HI "w") (V4SI "d") (V2DI "q")])
+ (V16QI "b") (V8HI "w") (V4SI "d") (V2DI "q")
+ (V32QI "b") (V16HI "w") (V8SI "d") (V4DI "q")])
;; SSE vector suffix for floating point modes
(define_mode_attr ssevecmodesuffix [(SF "ps") (DF "pd")])
(define_insn "*movdi_internal"
[(set (match_operand:DI 0 "nonimmediate_operand"
- "=r ,o ,*y,m*y,*y,*Y2,m ,*Y2,*Y2,*x,m ,*x,*x,?*Y2,?*Ym")
+ "=r ,o ,*y,m*y,*y,*x,m ,*x,*x,*x,m ,*x,*x,?*x,?*Ym")
(match_operand:DI 1 "general_operand"
- "riFo,riF,C ,*y ,m ,C ,*Y2,*Y2,m ,C ,*x,*x,m ,*Ym ,*Y2"))]
+ "riFo,riF,C ,*y ,m ,C ,*x,*x,m ,C ,*x,*x,m ,*Ym,*x"))]
"!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
}
}
[(set (attr "isa")
- (if_then_else (eq_attr "alternative" "9,10,11,12")
- (const_string "noavx")
- (const_string "*")))
+ (cond [(eq_attr "alternative" "5,6,7,8,13,14")
+ (const_string "sse2")
+ (eq_attr "alternative" "9,10,11,12")
+ (const_string "noavx")
+ ]
+ (const_string "*")))
(set (attr "type")
(cond [(eq_attr "alternative" "0,1")
(const_string "multi")
(define_insn "*pushdf_rex64"
[(set (match_operand:DF 0 "push_operand" "=<,<,<")
- (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFm,Y2"))]
+ (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFm,x"))]
"TARGET_64BIT"
{
/* This insn should be already split before reg-stack. */
(define_insn "*pushdf"
[(set (match_operand:DF 0 "push_operand" "=<,<,<")
- (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFo,Y2"))]
+ (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFo,x"))]
"!TARGET_64BIT"
{
/* This insn should be already split before reg-stack. */
gcc_unreachable ();
}
- [(set_attr "type" "multi")
+ [(set_attr "isa" "*,*,sse2")
+ (set_attr "type" "multi")
(set_attr "unit" "i387,*,*")
(set_attr "mode" "DF,DI,DF")])
(define_insn "*movdf_internal_rex64"
[(set (match_operand:DF 0 "nonimmediate_operand"
- "=f,m,f,?r,?m,?r,!o,Y2*x,Y2*x,Y2*x,m ,Yi,r ")
+ "=f,m,f,?r,?m,?r,!o,x,x,x,m,Yi,r ")
(match_operand:DF 1 "general_operand"
- "fm,f,G,rm,r ,F ,F ,C ,Y2*x,m ,Y2*x,r ,Yi"))]
+ "fm,f,G,rm,r ,F ,F ,C,x,m,x,r ,Yi"))]
"TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& (!can_create_pseudo_p ()
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
;; Possible store forwarding (partial memory) stall in alternative 4.
(define_insn "*movdf_internal"
[(set (match_operand:DF 0 "nonimmediate_operand"
- "=f,m,f,?Yd*r ,!o ,Y2*x,Y2*x,Y2*x,m ")
+ "=f,m,f,?Yd*r ,!o ,x,x,x,m,*x,*x,*x,m")
(match_operand:DF 1 "general_operand"
- "fm,f,G,Yd*roF,FYd*r,C ,Y2*x,m ,Y2*x"))]
+ "fm,f,G,Yd*roF,FYd*r,C,x,m,x,C ,*x,m ,*x"))]
"!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& (!can_create_pseudo_p ()
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
return "#";
case 5:
+ case 9:
return standard_sse_constant_opcode (insn, operands[1]);
case 6:
case 7:
case 8:
+ case 10:
+ case 11:
+ case 12:
switch (get_attr_mode (insn))
{
case MODE_V2DF:
gcc_unreachable ();
}
}
- [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
+ [(set (attr "isa")
+ (if_then_else (eq_attr "alternative" "5,6,7,8")
+ (const_string "sse2")
+ (const_string "*")))
+ (set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
(set (attr "prefix")
(if_then_else (eq_attr "alternative" "0,1,2,3,4")
(const_string "orig")
/* For SSE1, we have many fewer alternatives. */
(eq (symbol_ref "TARGET_SSE2") (const_int 0))
(if_then_else
- (eq_attr "alternative" "5,6")
+ (eq_attr "alternative" "5,6,9,10")
(const_string "V4SF")
(const_string "V2SF"))
/* xorps is one byte shorter. */
- (eq_attr "alternative" "5")
+ (eq_attr "alternative" "5,9")
(cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
(const_int 0))
(const_string "V4SF")
chains, otherwise use short move to avoid extra work.
movaps encodes one byte shorter. */
- (eq_attr "alternative" "6")
+ (eq_attr "alternative" "6,10")
(cond
[(ne (symbol_ref "optimize_function_for_size_p (cfun)")
(const_int 0))
/* For architectures resolving dependencies on register
parts we may avoid extra work to zero out upper part
of register. */
- (eq_attr "alternative" "7")
+ (eq_attr "alternative" "7,11")
(if_then_else
(ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
(const_int 0))
})
(define_insn "*zero_extendsidi2_rex64"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,o,?*Ym,?*y,?*Yi,*Y2")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r,o,?*Ym,?*y,?*Yi,*x")
(zero_extend:DI
(match_operand:SI 1 "nonimmediate_operand" "rm,0,r ,m ,r ,m")))]
"TARGET_64BIT"
;; %%% Kill me once multi-word ops are sane.
(define_insn "zero_extendsidi2_1"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,?r,?o,?*Ym,?*y,?*Yi,*Y2")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r,?r,?o,?*Ym,?*y,?*Yi,*x")
(zero_extend:DI
(match_operand:SI 1 "nonimmediate_operand" "0,rm,r ,r ,m ,r ,m")))
(clobber (reg:CC FLAGS_REG))]
movd\t{%1, %0|%0, %1}
%vmovd\t{%1, %0|%0, %1}
%vmovd\t{%1, %0|%0, %1}"
- [(set_attr "type" "multi,multi,multi,mmxmov,mmxmov,ssemov,ssemov")
+ [(set_attr "isa" "*,*,*,*,*,*,sse2")
+ (set_attr "type" "multi,multi,multi,mmxmov,mmxmov,ssemov,ssemov")
(set_attr "prefix" "*,*,*,orig,orig,maybe_vex,maybe_vex")
(set_attr "mode" "SI,SI,SI,DI,DI,TI,TI")])
(set_attr "mode" "SF")])
(define_insn "*truncdfsf_mixed"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=m,Y2 ,?f,?x,?*r")
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=m,x ,?f,?x,?*r")
(float_truncate:SF
- (match_operand:DF 1 "nonimmediate_operand" "f ,Y2m,f ,f ,f")))
- (clobber (match_operand:SF 2 "memory_operand" "=X,X ,m ,m ,m"))]
+ (match_operand:DF 1 "nonimmediate_operand" "f ,xm,f ,f ,f")))
+ (clobber (match_operand:SF 2 "memory_operand" "=X,X ,m ,m ,m"))]
"TARGET_MIX_SSE_I387"
{
switch (which_alternative)
return "#";
}
}
- [(set_attr "type" "fmov,ssecvt,multi,multi,multi")
+ [(set_attr "isa" "*,sse2,*,*,*")
+ (set_attr "type" "fmov,ssecvt,multi,multi,multi")
(set_attr "unit" "*,*,i387,i387,i387")
(set_attr "prefix" "orig,maybe_vex,orig,orig,orig")
(set_attr "mode" "SF")])
(set_attr "mode" "SF")])
(define_insn "*truncxfdf2_mixed"
- [(set (match_operand:DF 0 "nonimmediate_operand" "=m,?f,?Y2,?*r")
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=m,?f,?x,?*r")
(float_truncate:DF
(match_operand:XF 1 "register_operand" "f ,f ,f ,f")))
(clobber (match_operand:DF 2 "memory_operand" "=X,m ,m ,m"))]
gcc_assert (!which_alternative);
return output_387_reg_move (insn, operands);
}
- [(set_attr "type" "fmov,multi,multi,multi")
+ [(set_attr "isa" "*,*,sse2,*")
+ (set_attr "type" "fmov,multi,multi,multi")
(set_attr "unit" "*,i387,i387,i387")
(set_attr "mode" "DF")])
;; Avoid vector decoded forms of the instruction.
(define_peephole2
- [(match_scratch:DF 2 "Y2")
+ [(match_scratch:DF 2 "x")
(set (match_operand:SWI48x 0 "register_operand" "")
(fix:SWI48x (match_operand:DF 1 "memory_operand" "")))]
- "TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
+ "TARGET_SSE2 && TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (fix:SWI48x (match_dup 2)))])
(set_attr "mode" "SI")])
(define_insn "*addhi_1"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r")
- (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0")
- (match_operand:HI 2 "general_operand" "rn,rm")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, HImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_INCDEC:
- if (operands[2] == const1_rtx)
- return "inc{w}\t%0";
- else
- {
- gcc_assert (operands[2] == constm1_rtx);
- return "dec{w}\t%0";
- }
-
- default:
- if (x86_maybe_negate_const_int (&operands[2], HImode))
- return "sub{w}\t{%2, %0|%0, %2}";
-
- return "add{w}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (if_then_else (match_operand:HI 2 "incdec_operand" "")
- (const_string "incdec")
- (const_string "alu")))
- (set (attr "length_immediate")
- (if_then_else
- (and (eq_attr "type" "alu") (match_operand 2 "const128_operand" ""))
- (const_string "1")
- (const_string "*")))
- (set_attr "mode" "HI")])
-
-(define_insn "*addhi_1_lea"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,rm,r,r")
- (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0,r,r")
- (match_operand:HI 2 "general_operand" "rmn,rn,0,ln")))
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r,r,Yp")
+ (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0,r,Yp")
+ (match_operand:HI 2 "general_operand" "rn,rm,0,ln")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, HImode, operands)"
+ "ix86_binary_operator_ok (PLUS, HImode, operands)"
{
switch (get_attr_type (insn))
{
(const_string "*")))
(set_attr "mode" "HI,HI,HI,SI")])
-;; %%% Potential partial reg stall on alternative 2. What to do?
-(define_insn "*addqi_1"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,q,r")
- (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,0")
- (match_operand:QI 2 "general_operand" "qn,qmn,rn")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, QImode, operands)"
-{
- int widen = (which_alternative == 2);
- switch (get_attr_type (insn))
- {
- case TYPE_INCDEC:
- if (operands[2] == const1_rtx)
- return widen ? "inc{l}\t%k0" : "inc{b}\t%0";
- else
- {
- gcc_assert (operands[2] == constm1_rtx);
- return widen ? "dec{l}\t%k0" : "dec{b}\t%0";
- }
-
- default:
- if (x86_maybe_negate_const_int (&operands[2], QImode))
- {
- if (widen)
- return "sub{l}\t{%2, %k0|%k0, %2}";
- else
- return "sub{b}\t{%2, %0|%0, %2}";
- }
- if (widen)
- return "add{l}\t{%k2, %k0|%k0, %k2}";
- else
- return "add{b}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (if_then_else (match_operand:QI 2 "incdec_operand" "")
- (const_string "incdec")
- (const_string "alu")))
- (set (attr "length_immediate")
- (if_then_else
- (and (eq_attr "type" "alu") (match_operand 2 "const128_operand" ""))
- (const_string "1")
- (const_string "*")))
- (set_attr "mode" "QI,QI,SI")])
-
;; %%% Potential partial reg stall on alternatives 3 and 4. What to do?
-(define_insn "*addqi_1_lea"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=q,qm,q,r,r,r")
- (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,q,0,r,r")
- (match_operand:QI 2 "general_operand" "qmn,qn,0,rn,0,ln")))
+(define_insn "*addqi_1"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,q,q,r,r,Yp")
+ (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,q,0,r,Yp")
+ (match_operand:QI 2 "general_operand" "qn,qm,0,rn,0,ln")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, QImode, operands)"
+ "ix86_binary_operator_ok (PLUS, QImode, operands)"
{
- int widen = (which_alternative == 3 || which_alternative == 4);
+ bool widen = (which_alternative == 3 || which_alternative == 4);
switch (get_attr_type (insn))
{
(define_insn "*addqi_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm,q"))
(plus:QI (match_dup 0)
- (match_operand:QI 1 "general_operand" "qn,qnm")))
+ (match_operand:QI 1 "general_operand" "qn,qm")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
(clobber (reg:CC FLAGS_REG))])]
"TARGET_QIMODE_MATH")
-(define_insn "*<u>mul<mode><dwi>3_1"
+(define_insn "*bmi2_umulditi3_1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (mult:DI
+ (match_operand:DI 2 "nonimmediate_operand" "%d")
+ (match_operand:DI 3 "nonimmediate_operand" "rm")))
+ (set (match_operand:DI 1 "register_operand" "=r")
+ (truncate:DI
+ (lshiftrt:TI
+ (mult:TI (zero_extend:TI (match_dup 2))
+ (zero_extend:TI (match_dup 3)))
+ (const_int 64))))]
+ "TARGET_64BIT && TARGET_BMI2
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "mulx\t{%3, %0, %1|%1, %0, %3}"
+ [(set_attr "type" "imulx")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "DI")])
+
+(define_insn "*bmi2_umulsidi3_1"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (mult:SI
+ (match_operand:SI 2 "nonimmediate_operand" "%d")
+ (match_operand:SI 3 "nonimmediate_operand" "rm")))
+ (set (match_operand:SI 1 "register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI (zero_extend:DI (match_dup 2))
+ (zero_extend:DI (match_dup 3)))
+ (const_int 32))))]
+ "!TARGET_64BIT && TARGET_BMI2
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "mulx\t{%3, %0, %1|%1, %0, %3}"
+ [(set_attr "type" "imulx")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "SI")])
+
+(define_insn "*umul<mode><dwi>3_1"
+ [(set (match_operand:<DWI> 0 "register_operand" "=A,r")
+ (mult:<DWI>
+ (zero_extend:<DWI>
+ (match_operand:DWIH 1 "nonimmediate_operand" "%0,d"))
+ (zero_extend:<DWI>
+ (match_operand:DWIH 2 "nonimmediate_operand" "rm,rm"))))
+ (clobber (reg:CC FLAGS_REG))]
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "@
+ mul{<imodesuffix>}\t%2
+ #"
+ [(set_attr "isa" "base,bmi2")
+ (set_attr "type" "imul,imulx")
+ (set_attr "length_immediate" "0,*")
+ (set (attr "athlon_decode")
+ (cond [(eq_attr "alternative" "0")
+ (if_then_else (eq_attr "cpu" "athlon")
+ (const_string "vector")
+ (const_string "double"))]
+ (const_string "*")))
+ (set_attr "amdfam10_decode" "double,*")
+ (set_attr "bdver1_decode" "direct,*")
+ (set_attr "prefix" "orig,vex")
+ (set_attr "mode" "<MODE>")])
+
+;; Convert mul to the mulx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:<DWI> 0 "register_operand" "")
+ (mult:<DWI>
+ (zero_extend:<DWI>
+ (match_operand:DWIH 1 "register_operand" ""))
+ (zero_extend:<DWI>
+ (match_operand:DWIH 2 "nonimmediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed
+ && true_regnum (operands[1]) == DX_REG"
+ [(parallel [(set (match_dup 3)
+ (mult:DWIH (match_dup 1) (match_dup 2)))
+ (set (match_dup 4)
+ (truncate:DWIH
+ (lshiftrt:<DWI>
+ (mult:<DWI> (zero_extend:<DWI> (match_dup 1))
+ (zero_extend:<DWI> (match_dup 2)))
+ (match_dup 5))))])]
+{
+ split_double_mode (<DWI>mode, &operands[0], 1, &operands[3], &operands[4]);
+
+ operands[5] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode));
+})
+
+(define_insn "*mul<mode><dwi>3_1"
[(set (match_operand:<DWI> 0 "register_operand" "=A")
(mult:<DWI>
- (any_extend:<DWI>
+ (sign_extend:<DWI>
(match_operand:DWIH 1 "nonimmediate_operand" "%0"))
- (any_extend:<DWI>
+ (sign_extend:<DWI>
(match_operand:DWIH 2 "nonimmediate_operand" "rm"))))
(clobber (reg:CC FLAGS_REG))]
"!(MEM_P (operands[1]) && MEM_P (operands[2]))"
- "<sgnprefix>mul{<imodesuffix>}\t%2"
+ "imul{<imodesuffix>}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
(set (attr "athlon_decode")
[(set_attr "type" "ishift")
(set_attr "mode" "<MODE>")])
+(define_insn "*bmi2_ashl<mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")))]
+ "TARGET_BMI2"
+ "shlx\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "<MODE>")])
+
(define_insn "*ashl<mode>3_1"
- [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
- (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "0,l")
- (match_operand:QI 2 "nonmemory_operand" "c<S>,M")))
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r,r")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "0,l,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,M,r")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (ASHIFT, <MODE>mode, operands)"
{
switch (get_attr_type (insn))
{
case TYPE_LEA:
+ case TYPE_ISHIFTX:
return "#";
case TYPE_ALU:
return "sal{<imodesuffix>}\t{%2, %0|%0, %2}";
}
}
- [(set (attr "type")
+ [(set_attr "isa" "base,base,bmi2")
+ (set (attr "type")
(cond [(eq_attr "alternative" "1")
(const_string "lea")
+ (eq_attr "alternative" "2")
+ (const_string "ishiftx")
(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
(const_int 0))
(match_operand 0 "register_operand" ""))
(const_string "*")))
(set_attr "mode" "<MODE>")])
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (ashift:SWI48 (match_dup 1) (match_dup 2)))]
+ "operands[2] = gen_lowpart (<MODE>mode, operands[2]);")
+
+(define_insn "*bmi2_ashlsi3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:SI 2 "register_operand" "r"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "shlx\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "SI")])
+
(define_insn "*ashlsi3_1_zext"
- [(set (match_operand:DI 0 "register_operand" "=r,r")
+ [(set (match_operand:DI 0 "register_operand" "=r,r,r")
(zero_extend:DI
- (ashift:SI (match_operand:SI 1 "register_operand" "0,l")
- (match_operand:QI 2 "nonmemory_operand" "cI,M"))))
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "0,l,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,M,r"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (ASHIFT, SImode, operands)"
{
switch (get_attr_type (insn))
{
case TYPE_LEA:
+ case TYPE_ISHIFTX:
return "#";
case TYPE_ALU:
return "sal{l}\t{%2, %k0|%k0, %2}";
}
}
- [(set (attr "type")
+ [(set_attr "isa" "base,base,bmi2")
+ (set (attr "type")
(cond [(eq_attr "alternative" "1")
(const_string "lea")
+ (eq_attr "alternative" "2")
+ (const_string "ishiftx")
(and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
(const_int 0))
(match_operand 2 "const1_operand" ""))
(const_string "*")))
(set_attr "mode" "SI")])
-(define_insn "*ashlhi3_1"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm")
- (ashift:HI (match_operand:HI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI")))
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" ""))))
(clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, HImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_ALU:
- gcc_assert (operands[2] == const1_rtx);
- return "add{w}\t%0, %0";
-
- default:
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "sal{w}\t%0";
- else
- return "sal{w}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
- (match_operand 0 "register_operand" ""))
- (match_operand 2 "const1_operand" ""))
- (const_string "alu")
- ]
- (const_string "ishift")))
- (set (attr "length_immediate")
- (if_then_else
- (ior (eq_attr "type" "alu")
- (and (eq_attr "type" "ishift")
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
- (const_string "0")
- (const_string "*")))
- (set_attr "mode" "HI")])
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (ashift:SI (match_dup 1) (match_dup 2))))]
+ "operands[2] = gen_lowpart (SImode, operands[2]);")
-(define_insn "*ashlhi3_1_lea"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r")
+(define_insn "*ashlhi3_1"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,Yp")
(ashift:HI (match_operand:HI 1 "nonimmediate_operand" "0,l")
(match_operand:QI 2 "nonmemory_operand" "cI,M")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, HImode, operands)"
+ "ix86_binary_operator_ok (ASHIFT, HImode, operands)"
{
switch (get_attr_type (insn))
{
(const_string "*")))
(set_attr "mode" "HI,SI")])
+;; %%% Potential partial reg stall on alternative 1. What to do?
(define_insn "*ashlqi3_1"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r")
- (ashift:QI (match_operand:QI 1 "nonimmediate_operand" "0,0")
- (match_operand:QI 2 "nonmemory_operand" "cI,cI")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, QImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_ALU:
- gcc_assert (operands[2] == const1_rtx);
- if (REG_P (operands[1]) && !ANY_QI_REG_P (operands[1]))
- return "add{l}\t%k0, %k0";
- else
- return "add{b}\t%0, %0";
-
- default:
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- {
- if (get_attr_mode (insn) == MODE_SI)
- return "sal{l}\t%k0";
- else
- return "sal{b}\t%0";
- }
- else
- {
- if (get_attr_mode (insn) == MODE_SI)
- return "sal{l}\t{%2, %k0|%k0, %2}";
- else
- return "sal{b}\t{%2, %0|%0, %2}";
- }
- }
-}
- [(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
- (match_operand 0 "register_operand" ""))
- (match_operand 2 "const1_operand" ""))
- (const_string "alu")
- ]
- (const_string "ishift")))
- (set (attr "length_immediate")
- (if_then_else
- (ior (eq_attr "type" "alu")
- (and (eq_attr "type" "ishift")
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
- (const_string "0")
- (const_string "*")))
- (set_attr "mode" "QI,SI")])
-
-;; %%% Potential partial reg stall on alternative 2. What to do?
-(define_insn "*ashlqi3_1_lea"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r,r")
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r,Yp")
(ashift:QI (match_operand:QI 1 "nonimmediate_operand" "0,0,l")
(match_operand:QI 2 "nonmemory_operand" "cI,cI,M")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, QImode, operands)"
+ "ix86_binary_operator_ok (ASHIFT, QImode, operands)"
{
switch (get_attr_type (insn))
{
DONE;
})
+(define_insn "*bmi2_<shiftrt_insn><mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (any_shiftrt:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")))]
+ "TARGET_BMI2"
+ "<shiftrt>x\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "<MODE>")])
+
(define_insn "*<shiftrt_insn><mode>3_1"
- [(set (match_operand:SWI 0 "nonimmediate_operand" "=<r>m")
- (any_shiftrt:SWI (match_operand:SWI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
+ (any_shiftrt:SWI48
+ (match_operand:SWI48 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,r")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{<imodesuffix>}\t%0";
- else
- return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ISHIFTX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<shiftrt>{<imodesuffix>}\t%0";
+ else
+ return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ }
}
- [(set_attr "type" "ishift")
+ [(set_attr "isa" "base,bmi2")
+ (set_attr "type" "ishift,ishiftx")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
(const_string "*")))
(set_attr "mode" "<MODE>")])
-(define_insn "*<shiftrt_insn>si3_1_zext"
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (any_shiftrt:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (any_shiftrt:SWI48 (match_dup 1) (match_dup 2)))]
+ "operands[2] = gen_lowpart (<MODE>mode, operands[2]);")
+
+(define_insn "*bmi2_<shiftrt_insn>si3_1_zext"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
- (any_shiftrt:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI"))))
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:SI 2 "register_operand" "r"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "<shiftrt>x\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "SI")])
+
+(define_insn "*<shiftrt_insn>si3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
+ (zero_extend:DI
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,r"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (<CODE>, SImode, operands)"
{
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ISHIFTX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<shiftrt>{l}\t%k0";
+ else
+ return "<shiftrt>{l}\t{%2, %k0|%k0, %2}";
+ }
+}
+ [(set_attr "isa" "base,bmi2")
+ (set_attr "type" "ishift,ishiftx")
+ (set (attr "length_immediate")
+ (if_then_else
+ (and (match_operand 2 "const1_operand" "")
+ (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
+ (const_int 0)))
+ (const_string "0")
+ (const_string "*")))
+ (set_attr "mode" "SI")])
+
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (any_shiftrt:SI (match_dup 1) (match_dup 2))))]
+ "operands[2] = gen_lowpart (SImode, operands[2]);")
+
+(define_insn "*<shiftrt_insn><mode>3_1"
+ [(set (match_operand:SWI12 0 "nonimmediate_operand" "=<r>m")
+ (any_shiftrt:SWI12
+ (match_operand:SWI12 1 "nonimmediate_operand" "0")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ (clobber (reg:CC FLAGS_REG))]
+ "ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
+{
if (operands[2] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{l}\t%k0";
+ return "<shiftrt>{<imodesuffix>}\t%0";
else
- return "<shiftrt>{l}\t{%2, %k0|%k0, %2}";
+ return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "ishift")
(set (attr "length_immediate")
(const_int 0)))
(const_string "0")
(const_string "*")))
- (set_attr "mode" "SI")])
+ (set_attr "mode" "<MODE>")])
(define_insn "*<shiftrt_insn>qi3_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm"))
split_double_mode (<DWI>mode, &operands[0], 1, &operands[4], &operands[5]);
})
+(define_insn "*bmi2_rorx<mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (rotatert:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 2 "immediate_operand" "<S>")))]
+ "TARGET_BMI2"
+ "rorx\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "rotatex")
+ (set_attr "mode" "<MODE>")])
+
(define_insn "*<rotate_insn><mode>3_1"
- [(set (match_operand:SWI 0 "nonimmediate_operand" "=<r>m")
- (any_rotate:SWI (match_operand:SWI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
+ (any_rotate:SWI48
+ (match_operand:SWI48 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,<S>")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<rotate>{<imodesuffix>}\t%0";
- else
- return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ROTATEX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{<imodesuffix>}\t%0";
+ else
+ return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ }
}
- [(set_attr "type" "rotate")
+ [(set_attr "isa" "base,bmi2")
+ (set_attr "type" "rotate,rotatex")
(set (attr "length_immediate")
(if_then_else
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (and (eq_attr "type" "rotate")
+ (and (match_operand 2 "const1_operand" "")
+ (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
+ (const_int 0))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
-(define_insn "*<rotate_insn>si3_1_zext"
+;; Convert rotate to the rotatex pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (rotate:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (rotatert:SWI48 (match_dup 1) (match_dup 2)))]
+{
+ operands[2]
+ = GEN_INT (GET_MODE_BITSIZE (<MODE>mode) - INTVAL (operands[2]));
+})
+
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (rotatert:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (rotatert:SWI48 (match_dup 1) (match_dup 2)))])
+
+(define_insn "*bmi2_rorxsi3_1_zext"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
- (any_rotate:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI"))))
+ (rotatert:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 2 "immediate_operand" "I"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "rorx\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "rotatex")
+ (set_attr "mode" "SI")])
+
+(define_insn "*<rotate_insn>si3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
+ (zero_extend:DI
+ (any_rotate:SI (match_operand:SI 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,I"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (<CODE>, SImode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<rotate>{l}\t%k0";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ROTATEX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{l}\t%k0";
+ else
+ return "<rotate>{l}\t{%2, %k0|%k0, %2}";
+ }
+}
+ [(set_attr "isa" "base,bmi2")
+ (set_attr "type" "rotate,rotatex")
+ (set (attr "length_immediate")
+ (if_then_else
+ (and (eq_attr "type" "rotate")
+ (and (match_operand 2 "const1_operand" "")
+ (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
+ (const_int 0))))
+ (const_string "0")
+ (const_string "*")))
+ (set_attr "mode" "SI")])
+
+;; Convert rotate to the rotatex pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (rotate:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (rotatert:SI (match_dup 1) (match_dup 2))))]
+{
+ operands[2]
+ = GEN_INT (GET_MODE_BITSIZE (SImode) - INTVAL (operands[2]));
+})
+
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (rotatert:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (rotatert:SI (match_dup 1) (match_dup 2))))])
+
+(define_insn "*<rotate_insn><mode>3_1"
+ [(set (match_operand:SWI12 0 "nonimmediate_operand" "=<r>m")
+ (any_rotate:SWI12 (match_operand:SWI12 1 "nonimmediate_operand" "0")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ (clobber (reg:CC FLAGS_REG))]
+ "ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
+{
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{<imodesuffix>}\t%0";
else
- return "<rotate>{l}\t{%2, %k0|%k0, %2}";
+ return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "rotate")
(set (attr "length_immediate")
(const_int 0)))
(const_string "0")
(const_string "*")))
- (set_attr "mode" "SI")])
+ (set_attr "mode" "<MODE>")])
(define_insn "*<rotate_insn>qi3_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm"))
[(set_attr "type" "bitmanip")
(set_attr "mode" "<MODE>")])
+;; BMI2 instructions.
+(define_insn "bmi2_bzhi_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (and:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (lshiftrt:SWI48 (const_int -1)
+ (match_operand:SWI48 2 "register_operand" "r"))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2"
+ "bzhi\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "bmi2_pdep_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (unspec:SWI48 [(match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")]
+ UNSPEC_PDEP))]
+ "TARGET_BMI2"
+ "pdep\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "bmi2_pext_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (unspec:SWI48 [(match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")]
+ UNSPEC_PEXT))]
+ "TARGET_BMI2"
+ "pext\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
;; TBM instructions.
(define_insn "tbm_bextri_<mode>"
[(set (match_operand:SWI48 0 "register_operand" "=r")
"xor{b}\t{%h0, %b0|%b0, %h0}"
[(set_attr "length" "2")
(set_attr "mode" "HI")])
+
\f
;; Thread-local storage patterns for ELF.
;;