;; GCC machine description for IA-32 and x86-64.
;; Copyright (C) 1988, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
-;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
+;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
;; Free Software Foundation, Inc.
;; Mostly by William Schelter.
;; x86_64 support added by Jan Hubicka
;; d -- print duplicated register operand for AVX instruction.
;; D -- print condition for SSE cmp instruction.
;; P -- if PIC, print an @PLT suffix.
+;; p -- print raw symbol name.
;; X -- don't print any sort of PIC '@' suffix for a symbol.
;; & -- print some in-use local-dynamic symbol name.
;; H -- print a memory address offset by 8; used for sse high-parts
;; ; -- print a semicolon (after prefixes due to bug in older gas).
;; @ -- print a segment register of thread base pointer load
-;; UNSPEC usage:
-
(define_c_enum "unspec" [
;; Relocation specifiers
UNSPEC_GOT
;; Prologue support
UNSPEC_STACK_ALLOC
UNSPEC_SET_GOT
- UNSPEC_REG_SAVE
- UNSPEC_DEF_CFA
UNSPEC_SET_RIP
UNSPEC_SET_GOT_OFFSET
UNSPEC_MEMORY_BLOCKAGE
UNSPEC_LD_MPIC ; load_macho_picbase
UNSPEC_TRUNC_NOOP
UNSPEC_DIV_ALREADY_SPLIT
+ UNSPEC_MS_TO_SYSV_CALL
UNSPEC_CALL_NEEDS_VZEROUPPER
+ UNSPEC_PAUSE
;; For SSE/MMX support:
UNSPEC_FIX_NOTRUNC
UNSPEC_MASKMOV
UNSPEC_MOVMSK
- UNSPEC_MOVNT
- UNSPEC_MOVU
UNSPEC_RCP
UNSPEC_RSQRT
- UNSPEC_SFENCE
- UNSPEC_PFRCP
- UNSPEC_PFRCPIT1
- UNSPEC_PFRCPIT2
- UNSPEC_PFRSQRT
- UNSPEC_PFRSQIT1
- UNSPEC_MFENCE
- UNSPEC_LFENCE
UNSPEC_PSADBW
- UNSPEC_LDDQU
- UNSPEC_MS_TO_SYSV_CALL
;; Generic math support
UNSPEC_COPYSIGN
UNSPEC_SP_TLS_SET
UNSPEC_SP_TLS_TEST
- ;; SSSE3
- UNSPEC_PSHUFB
- UNSPEC_PSIGN
- UNSPEC_PALIGNR
-
- ;; For SSE4A support
- UNSPEC_EXTRQI
- UNSPEC_EXTRQ
- UNSPEC_INSERTQI
- UNSPEC_INSERTQ
-
- ;; For SSE4.1 support
- UNSPEC_BLENDV
- UNSPEC_INSERTPS
- UNSPEC_DP
- UNSPEC_MOVNTDQA
- UNSPEC_MPSADBW
- UNSPEC_PHMINPOSUW
- UNSPEC_PTEST
+ ;; For ROUND support
UNSPEC_ROUND
- ;; For SSE4.2 support
+ ;; For CRC32 support
UNSPEC_CRC32
- UNSPEC_PCMPESTR
- UNSPEC_PCMPISTR
-
- ;; For FMA4 support
- UNSPEC_FMADDSUB
- UNSPEC_XOP_UNSIGNED_CMP
- UNSPEC_XOP_TRUEFALSE
- UNSPEC_XOP_PERMUTE
- UNSPEC_FRCZ
-
- ;; For AES support
- UNSPEC_AESENC
- UNSPEC_AESENCLAST
- UNSPEC_AESDEC
- UNSPEC_AESDECLAST
- UNSPEC_AESIMC
- UNSPEC_AESKEYGENASSIST
-
- ;; For PCLMUL support
- UNSPEC_PCLMUL
-
- ;; For AVX support
- UNSPEC_PCMP
- UNSPEC_VPERMIL
- UNSPEC_VPERMIL2
- UNSPEC_VPERMIL2F128
- UNSPEC_CAST
- UNSPEC_VTESTP
- UNSPEC_VCVTPH2PS
- UNSPEC_VCVTPS2PH
;; For BMI support
UNSPEC_BEXTR
- ;; For RDRAND support
- UNSPEC_RDRAND
+ ;; For BMI2 support
+ UNSPEC_PDEP
+ UNSPEC_PEXT
])
(define_c_enum "unspecv" [
UNSPECV_BLOCKAGE
UNSPECV_STACK_PROBE
UNSPECV_PROBE_STACK_RANGE
- UNSPECV_EMMS
- UNSPECV_LDMXCSR
- UNSPECV_STMXCSR
- UNSPECV_FEMMS
- UNSPECV_CLFLUSH
UNSPECV_ALIGN
- UNSPECV_MONITOR
- UNSPECV_MWAIT
- UNSPECV_CMPXCHG
- UNSPECV_XCHG
- UNSPECV_LOCK
UNSPECV_PROLOGUE_USE
+ UNSPECV_SPLIT_STACK_RETURN
UNSPECV_CLD
UNSPECV_NOPS
- UNSPECV_VZEROALL
- UNSPECV_VZEROUPPER
UNSPECV_RDTSC
UNSPECV_RDTSCP
UNSPECV_RDPMC
UNSPECV_RDGSBASE
UNSPECV_WRFSBASE
UNSPECV_WRGSBASE
- UNSPECV_SPLIT_STACK_RETURN
+
+ ;; For RDRAND support
+ UNSPECV_RDRAND
])
;; Constants to represent rounding modes in the ROUND instruction
\f
;; Processor type.
(define_attr "cpu" "none,pentium,pentiumpro,geode,k6,athlon,k8,core2,corei7,
- atom,generic64,amdfam10,bdver1,btver1"
+ atom,generic64,amdfam10,bdver1,bdver2,btver1"
(const (symbol_ref "ix86_schedule")))
;; A basic instruction type. Refinements due to arguments to be
(define_attr "type"
"other,multi,
alu,alu1,negnot,imov,imovx,lea,
- incdec,ishift,ishift1,rotate,rotate1,imul,idiv,
+ incdec,ishift,ishiftx,ishift1,rotate,rotatex,rotate1,imul,imulx,idiv,
icmp,test,ibr,setcc,icmov,
push,pop,call,callv,leave,
str,bitmanip,
;; The (bounding maximum) length of an instruction immediate.
(define_attr "length_immediate" ""
(cond [(eq_attr "type" "incdec,setcc,icmov,str,lea,other,multi,idiv,leave,
- bitmanip")
+ bitmanip,imulx")
(const_int 0)
(eq_attr "unit" "i387,sse,mmx")
(const_int 0)
- (eq_attr "type" "alu,alu1,negnot,imovx,ishift,rotate,ishift1,rotate1,
- imul,icmp,push,pop")
- (symbol_ref "ix86_attr_length_immediate_default(insn,1)")
+ (eq_attr "type" "alu,alu1,negnot,imovx,ishift,ishiftx,ishift1,
+ rotate,rotatex,rotate1,imul,icmp,push,pop")
+ (symbol_ref "ix86_attr_length_immediate_default (insn, true)")
(eq_attr "type" "imov,test")
- (symbol_ref "ix86_attr_length_immediate_default(insn,0)")
+ (symbol_ref "ix86_attr_length_immediate_default (insn, false)")
(eq_attr "type" "call")
(if_then_else (match_operand 0 "constant_call_address_operand" "")
(const_int 4)
;; Set when REX opcode prefix is used.
(define_attr "prefix_rex" ""
- (cond [(eq (symbol_ref "TARGET_64BIT") (const_int 0))
+ (cond [(not (match_test "TARGET_64BIT"))
(const_int 0)
(and (eq_attr "mode" "DI")
(and (eq_attr "type" "!push,pop,call,callv,leave,ibr")
(eq_attr "unit" "!mmx")))
(const_int 1)
(and (eq_attr "mode" "QI")
- (ne (symbol_ref "x86_extended_QIreg_mentioned_p (insn)")
- (const_int 0)))
+ (match_test "x86_extended_QIreg_mentioned_p (insn)"))
(const_int 1)
- (ne (symbol_ref "x86_extended_reg_mentioned_p (insn)")
- (const_int 0))
+ (match_test "x86_extended_reg_mentioned_p (insn)")
(const_int 1)
(and (eq_attr "type" "imovx")
(match_operand:QI 1 "ext_QIreg_operand" ""))
(if_then_else (and (eq_attr "prefix_0f" "1")
(eq_attr "prefix_extra" "0"))
(if_then_else (eq_attr "prefix_vex_w" "1")
- (symbol_ref "ix86_attr_length_vex_default (insn, 1, 1)")
- (symbol_ref "ix86_attr_length_vex_default (insn, 1, 0)"))
+ (symbol_ref "ix86_attr_length_vex_default (insn, true, true)")
+ (symbol_ref "ix86_attr_length_vex_default (insn, true, false)"))
(if_then_else (eq_attr "prefix_vex_w" "1")
- (symbol_ref "ix86_attr_length_vex_default (insn, 0, 1)")
- (symbol_ref "ix86_attr_length_vex_default (insn, 0, 0)"))))
+ (symbol_ref "ix86_attr_length_vex_default (insn, false, true)")
+ (symbol_ref "ix86_attr_length_vex_default (insn, false, false)"))))
;; Set when modrm byte is used.
(define_attr "modrm" ""
(eq_attr "unit" "i387")
(const_int 0)
(and (eq_attr "type" "incdec")
- (and (eq (symbol_ref "TARGET_64BIT") (const_int 0))
+ (and (not (match_test "TARGET_64BIT"))
(ior (match_operand:SI 1 "register_operand" "")
(match_operand:HI 1 "register_operand" ""))))
(const_int 0)
(attr "length_address")))
(ior (eq_attr "prefix" "vex")
(and (eq_attr "prefix" "maybe_vex")
- (ne (symbol_ref "TARGET_AVX") (const_int 0))))
+ (match_test "TARGET_AVX")))
(plus (attr "length_vex")
(plus (attr "length_immediate")
(plus (attr "modrm")
(and (match_operand 0 "memory_displacement_operand" "")
(match_operand 1 "immediate_operand" "")))
(const_string "true")
- (and (eq_attr "type" "alu,ishift,rotate,imul,idiv")
+ (and (eq_attr "type" "alu,ishift,ishiftx,rotate,rotatex,imul,idiv")
(and (match_operand 0 "memory_displacement_operand" "")
(match_operand 2 "immediate_operand" "")))
(const_string "true")
(define_attr "movu" "0,1" (const_string "0"))
;; Used to control the "enabled" attribute on a per-instruction basis.
-(define_attr "isa" "base,noavx,avx"
+(define_attr "isa" "base,sse2,sse2_noavx,sse3,sse4,sse4_noavx,noavx,avx,bmi2"
(const_string "base"))
(define_attr "enabled" ""
- (cond [(eq_attr "isa" "noavx") (symbol_ref "!TARGET_AVX")
+ (cond [(eq_attr "isa" "sse2") (symbol_ref "TARGET_SSE2")
+ (eq_attr "isa" "sse2_noavx")
+ (symbol_ref "TARGET_SSE2 && !TARGET_AVX")
+ (eq_attr "isa" "sse3") (symbol_ref "TARGET_SSE3")
+ (eq_attr "isa" "sse4") (symbol_ref "TARGET_SSE4_1")
+ (eq_attr "isa" "sse4_noavx")
+ (symbol_ref "TARGET_SSE4_1 && !TARGET_AVX")
(eq_attr "isa" "avx") (symbol_ref "TARGET_AVX")
+ (eq_attr "isa" "noavx") (symbol_ref "!TARGET_AVX")
+ (eq_attr "isa" "bmi2") (symbol_ref "TARGET_BMI2")
]
(const_int 1)))
(define_code_attr comm [(plus "%") (ss_plus "%") (us_plus "%")
(minus "") (ss_minus "") (us_minus "")])
+;; Mapping of max and min
+(define_code_iterator maxmin [smax smin umax umin])
+
;; Mapping of signed max and min
(define_code_iterator smaxmin [smax smin])
;; Base name for insn mnemonic.
(define_code_attr logic [(and "and") (ior "or") (xor "xor")])
+;; Mapping of logic-shift operators
+(define_code_iterator any_lshift [ashift lshiftrt])
+
;; Mapping of shift-right operators
(define_code_iterator any_shiftrt [lshiftrt ashiftrt])
;; Base name for define_insn
-(define_code_attr shiftrt_insn [(lshiftrt "lshr") (ashiftrt "ashr")])
+(define_code_attr shift_insn
+ [(ashift "ashl") (lshiftrt "lshr") (ashiftrt "ashr")])
;; Base name for insn mnemonic.
-(define_code_attr shiftrt [(lshiftrt "shr") (ashiftrt "sar")])
+(define_code_attr shift [(ashift "sll") (lshiftrt "shr") (ashiftrt "sar")])
+(define_code_attr vshift [(ashift "sll") (lshiftrt "srl") (ashiftrt "sra")])
;; Mapping of rotate operators
(define_code_iterator any_rotate [rotate rotatert])
;; Used in signed and unsigned widening multiplications.
(define_code_iterator any_extend [sign_extend zero_extend])
-;; Various insn prefixes for signed and unsigned operations.
-(define_code_attr u [(sign_extend "") (zero_extend "u")
- (div "") (udiv "u")])
-(define_code_attr s [(sign_extend "s") (zero_extend "u")])
-
-;; Used in signed and unsigned divisions.
-(define_code_iterator any_div [div udiv])
+;; Prefix for insn menmonic.
+(define_code_attr sgnprefix [(sign_extend "i") (zero_extend "")])
-;; Instruction prefix for signed and unsigned operations.
-(define_code_attr sgnprefix [(sign_extend "i") (zero_extend "")
- (div "i") (udiv "")])
+;; Prefix for define_insn
+(define_code_attr u [(sign_extend "") (zero_extend "u")])
+(define_code_attr s [(sign_extend "s") (zero_extend "u")])
-;; 64bit single word integer modes.
+;; All integer modes.
(define_mode_iterator SWI1248x [QI HI SI DI])
-;; 64bit single word integer modes without QImode and HImode.
-(define_mode_iterator SWI48x [SI DI])
+;; All integer modes without QImode.
+(define_mode_iterator SWI248x [HI SI DI])
-;; Single word integer modes.
-(define_mode_iterator SWI [QI HI SI (DI "TARGET_64BIT")])
+;; All integer modes without QImode and HImode.
+(define_mode_iterator SWI48x [SI DI])
-;; Single word integer modes without SImode and DImode.
+;; All integer modes without SImode and DImode.
(define_mode_iterator SWI12 [QI HI])
-;; Single word integer modes without DImode.
+;; All integer modes without DImode.
(define_mode_iterator SWI124 [QI HI SI])
-;; Single word integer modes without QImode and DImode.
+;; All integer modes without QImode and DImode.
(define_mode_iterator SWI24 [HI SI])
+;; Single word integer modes.
+(define_mode_iterator SWI [QI HI SI (DI "TARGET_64BIT")])
+
;; Single word integer modes without QImode.
(define_mode_iterator SWI248 [HI SI (DI "TARGET_64BIT")])
(HI "TARGET_HIMODE_MATH")
SI (DI "TARGET_64BIT")])
-;; Math-dependant single word integer modes without DImode.
+;; Math-dependant integer modes without DImode.
(define_mode_iterator SWIM124 [(QI "TARGET_QIMODE_MATH")
(HI "TARGET_HIMODE_MATH")
SI])
(define_mode_attr r [(QI "q") (HI "r") (SI "r") (DI "r")])
;; Immediate operand constraint for integer modes.
-(define_mode_attr i [(QI "n") (HI "n") (SI "i") (DI "e")])
+(define_mode_attr i [(QI "n") (HI "n") (SI "e") (DI "e")])
;; General operand constraint for word modes.
-(define_mode_attr g [(QI "qmn") (HI "rmn") (SI "g") (DI "rme")])
+(define_mode_attr g [(QI "qmn") (HI "rmn") (SI "rme") (DI "rme")])
;; Immediate operand constraint for double integer modes.
-(define_mode_attr di [(SI "iF") (DI "e")])
+(define_mode_attr di [(SI "nF") (DI "e")])
;; Immediate operand constraint for shifts.
(define_mode_attr S [(QI "I") (HI "I") (SI "I") (DI "J") (TI "O")])
(define_mode_attr general_operand
[(QI "general_operand")
(HI "general_operand")
- (SI "general_operand")
+ (SI "x86_64_general_operand")
(DI "x86_64_general_operand")
(TI "x86_64_general_operand")])
(define_mode_attr general_szext_operand
[(QI "general_operand")
(HI "general_operand")
- (SI "general_operand")
+ (SI "x86_64_szext_general_operand")
(DI "x86_64_szext_general_operand")])
;; Immediate operand predicate for integer modes.
(define_mode_attr immediate_operand
[(QI "immediate_operand")
(HI "immediate_operand")
- (SI "immediate_operand")
+ (SI "x86_64_immediate_operand")
(DI "x86_64_immediate_operand")])
;; Nonmemory operand predicate for integer modes.
(define_mode_attr nonmemory_operand
[(QI "nonmemory_operand")
(HI "nonmemory_operand")
- (SI "nonmemory_operand")
+ (SI "x86_64_nonmemory_operand")
(DI "x86_64_nonmemory_operand")])
;; Operand predicate for shifts.
;; All x87 floating point modes
(define_mode_iterator X87MODEF [SF DF XF])
-;; All integer modes handled by x87 fisttp operator.
-(define_mode_iterator X87MODEI [HI SI DI])
-
-;; All integer modes handled by integer x87 operators.
-(define_mode_iterator X87MODEI12 [HI SI])
-
-;; All integer modes handled by SSE cvtts?2si* operators.
-(define_mode_iterator SSEMODEI24 [SI DI])
-
;; SSE instruction suffix for various modes
(define_mode_attr ssemodesuffix
[(SF "ss") (DF "sd")
(V8SF "ps") (V4DF "pd")
(V4SF "ps") (V2DF "pd")
(V16QI "b") (V8HI "w") (V4SI "d") (V2DI "q")
- (V8SI "si")])
+ (V32QI "b") (V16HI "w") (V8SI "d") (V4DI "q")])
;; SSE vector suffix for floating point modes
(define_mode_attr ssevecmodesuffix [(SF "ps") (DF "pd")])
;; This mode iterator allows :P to be used for patterns that operate on
;; pointer-sized quantities. Exactly one of the two alternatives will match.
(define_mode_iterator P [(SI "Pmode == SImode") (DI "Pmode == DImode")])
+
+;; This mode iterator allows :PTR to be used for patterns that operate on
+;; ptr_mode sized quantities.
+(define_mode_iterator PTR
+ [(SI "ptr_mode == SImode") (DI "ptr_mode == DImode")])
\f
;; Scheduling descriptions
UNSPEC_FNSTSW))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
- "* return output_fp_compare (insn, operands, 0, 0);"
+ "* return output_fp_compare (insn, operands, false, false);"
[(set_attr "type" "multi")
(set_attr "unit" "i387")
(set (attr "mode")
(match_operand:XF 2 "register_operand" "f"))]
UNSPEC_FNSTSW))]
"TARGET_80387"
- "* return output_fp_compare (insn, operands, 0, 0);"
+ "* return output_fp_compare (insn, operands, false, false);"
[(set_attr "type" "multi")
(set_attr "unit" "i387")
(set_attr "mode" "XF")])
(match_operand:MODEF 2 "nonimmediate_operand" "fm"))]
UNSPEC_FNSTSW))]
"TARGET_80387"
- "* return output_fp_compare (insn, operands, 0, 0);"
+ "* return output_fp_compare (insn, operands, false, false);"
[(set_attr "type" "multi")
(set_attr "unit" "i387")
(set_attr "mode" "<MODE>")])
UNSPEC_FNSTSW))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
- "* return output_fp_compare (insn, operands, 0, 1);"
+ "* return output_fp_compare (insn, operands, false, true);"
[(set_attr "type" "multi")
(set_attr "unit" "i387")
(set (attr "mode")
[(compare:CCFP
(match_operand 1 "register_operand" "f")
(match_operator 3 "float_operator"
- [(match_operand:X87MODEI12 2 "memory_operand" "m")]))]
+ [(match_operand:SWI24 2 "memory_operand" "m")]))]
UNSPEC_FNSTSW))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))
&& (GET_MODE (operands [3]) == GET_MODE (operands[1]))"
- "* return output_fp_compare (insn, operands, 0, 0);"
+ "* return output_fp_compare (insn, operands, false, false);"
[(set_attr "type" "multi")
(set_attr "unit" "i387")
(set_attr "fp_int_src" "true")
(compare:CCFP
(match_operand 1 "register_operand" "f")
(match_operator 3 "float_operator"
- [(match_operand:X87MODEI12 2 "memory_operand" "m")])))
+ [(match_operand:SWI24 2 "memory_operand" "m")])))
(clobber (match_operand:HI 0 "register_operand" "=a"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& TARGET_SAHF && !TARGET_CMOVE
(unspec:HI [(reg:CCFP FPSR_REG)] UNSPEC_FNSTSW))]
"TARGET_80387"
"fnstsw\t%0"
- [(set (attr "length") (symbol_ref "ix86_attr_length_address_default (insn) + 2"))
+ [(set (attr "length")
+ (symbol_ref "ix86_attr_length_address_default (insn) + 2"))
(set_attr "mode" "SI")
(set_attr "unit" "i387")])
(set_attr "mode" "SI")])
;; Pentium Pro can do steps 1 through 3 in one go.
-;; comi*, ucomi*, fcomi*, ficomi*,fucomi* (i387 instructions set condition codes)
+;; comi*, ucomi*, fcomi*, ficomi*, fucomi*
+;; (these i387 instructions set flags directly)
(define_insn "*cmpfp_i_mixed"
[(set (reg:CCFP FLAGS_REG)
(compare:CCFP (match_operand 0 "register_operand" "f,x")
"TARGET_MIX_SSE_I387
&& SSE_FLOAT_MODE_P (GET_MODE (operands[0]))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 0);"
+ "* return output_fp_compare (insn, operands, true, false);"
[(set_attr "type" "fcmp,ssecomi")
(set_attr "prefix" "orig,maybe_vex")
(set (attr "mode")
"TARGET_SSE_MATH
&& SSE_FLOAT_MODE_P (GET_MODE (operands[0]))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 0);"
+ "* return output_fp_compare (insn, operands, true, false);"
[(set_attr "type" "ssecomi")
(set_attr "prefix" "maybe_vex")
(set (attr "mode")
&& TARGET_CMOVE
&& !(SSE_FLOAT_MODE_P (GET_MODE (operands[0])) && TARGET_SSE_MATH)
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 0);"
+ "* return output_fp_compare (insn, operands, true, false);"
[(set_attr "type" "fcmp")
(set (attr "mode")
(cond [(match_operand:SF 1 "" "")
"TARGET_MIX_SSE_I387
&& SSE_FLOAT_MODE_P (GET_MODE (operands[0]))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 1);"
+ "* return output_fp_compare (insn, operands, true, true);"
[(set_attr "type" "fcmp,ssecomi")
(set_attr "prefix" "orig,maybe_vex")
(set (attr "mode")
"TARGET_SSE_MATH
&& SSE_FLOAT_MODE_P (GET_MODE (operands[0]))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 1);"
+ "* return output_fp_compare (insn, operands, true, true);"
[(set_attr "type" "ssecomi")
(set_attr "prefix" "maybe_vex")
(set (attr "mode")
&& TARGET_CMOVE
&& !(SSE_FLOAT_MODE_P (GET_MODE (operands[0])) && TARGET_SSE_MATH)
&& GET_MODE (operands[0]) == GET_MODE (operands[1])"
- "* return output_fp_compare (insn, operands, 1, 1);"
+ "* return output_fp_compare (insn, operands, true, true);"
[(set_attr "type" "fcmp")
(set (attr "mode")
(cond [(match_operand:SF 1 "" "")
(define_insn "*push<mode>2"
[(set (match_operand:DWI 0 "push_operand" "=<")
- (match_operand:DWI 1 "general_no_elim_operand" "riF*m"))]
+ (match_operand:DWI 1 "general_no_elim_operand" "riF*o"))]
""
- "#")
+ "#"
+ [(set_attr "type" "multi")
+ (set_attr "mode" "<MODE>")])
(define_split
[(set (match_operand:TI 0 "push_operand" "")
switch (which_alternative)
{
case 0:
- return "vxorps\t%0, %0, %0";
+ return standard_sse_constant_opcode (insn, operands[1]);
case 1:
case 2:
if (misaligned_operand (operands[0], OImode)
case 1:
return "#";
case 2:
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
case 3:
case 4:
/* TDmode values are passed as TImode on the stack. Moving them
(set (attr "mode")
(cond [(eq_attr "alternative" "2,3")
(if_then_else
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
(const_string "TI"))
(eq_attr "alternative" "4")
(if_then_else
- (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
- (const_int 0))
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SSE_TYPELESS_STORES")
+ (match_test "optimize_function_for_size_p (cfun)"))
(const_string "V4SF")
(const_string "TI"))]
(const_string "DI")))])
switch (which_alternative)
{
case 0:
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
case 1:
case 2:
/* TDmode values are passed as TImode on the stack. Moving them
[(set_attr "type" "sselog1,ssemov,ssemov")
(set_attr "prefix" "maybe_vex")
(set (attr "mode")
- (cond [(ior (eq (symbol_ref "TARGET_SSE2") (const_int 0))
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (cond [(ior (not (match_test "TARGET_SSE2"))
+ (match_test "optimize_function_for_size_p (cfun)"))
(const_string "V4SF")
(and (eq_attr "alternative" "2")
- (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
- (const_int 0)))
+ (match_test "TARGET_SSE_TYPELESS_STORES"))
(const_string "V4SF")]
(const_string "TI")))])
(define_insn "*movdi_internal_rex64"
[(set (match_operand:DI 0 "nonimmediate_operand"
- "=r,r ,r,m ,!m,*y,*y,?r ,m ,?*Ym,?*y,*x,*x,?r ,m,?*Yi,*x,?*x,?*Ym")
+ "=r,r ,r,m ,!o,*y,m*y,?*y,?r ,?*Ym,*x,m ,*x,*x,?r ,?*Yi,?*x,?*Ym")
(match_operand:DI 1 "general_operand"
- "Z ,rem,i,re,n ,C ,*y,*Ym,*y,r ,m ,C ,*x,*Yi,*x,r ,m ,*Ym,*x"))]
+ "Z ,rem,i,re,n ,C ,*y ,m ,*Ym,r ,C ,*x,*x,m ,*Yi,r ,*Ym,*x"))]
"TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
return "movq\t{%1, %0|%0, %1}";
case TYPE_SSELOG1:
- return "%vpxor\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
case TYPE_MMX:
return "pxor\t%0, %0";
return "mov{l}\t{%k1, %k0|%k0, %k1}";
else if (which_alternative == 2)
return "movabs{q}\t{%1, %0|%0, %1}";
+ else if (ix86_use_lea_for_mov (insn, operands))
+ return "lea{q}\t{%a1, %0|%0, %a1}";
else
return "mov{q}\t{%1, %0|%0, %1}";
}
}
[(set (attr "type")
- (cond [(eq_attr "alternative" "5")
+ (cond [(eq_attr "alternative" "4")
+ (const_string "multi")
+ (eq_attr "alternative" "5")
(const_string "mmx")
- (eq_attr "alternative" "6,7,8,9,10")
+ (eq_attr "alternative" "6,7,8,9")
(const_string "mmxmov")
- (eq_attr "alternative" "11")
+ (eq_attr "alternative" "10")
(const_string "sselog1")
- (eq_attr "alternative" "12,13,14,15,16")
+ (eq_attr "alternative" "11,12,13,14,15")
(const_string "ssemov")
- (eq_attr "alternative" "17,18")
+ (eq_attr "alternative" "16,17")
(const_string "ssecvt")
- (eq_attr "alternative" "4")
- (const_string "multi")
- (match_operand:DI 1 "pic_32bit_operand" "")
+ (match_operand 1 "pic_32bit_operand" "")
(const_string "lea")
]
(const_string "imov")))
(and (eq_attr "alternative" "2") (eq_attr "type" "imov"))
(const_string "8")
(const_string "*")))
- (set_attr "prefix_rex" "*,*,*,*,*,*,*,1,*,1,*,*,*,*,*,*,*,*,*")
- (set_attr "prefix_data16" "*,*,*,*,*,*,*,*,*,*,*,*,*,*,*,1,*,*,*")
+ (set (attr "prefix_rex")
+ (if_then_else (eq_attr "alternative" "8,9")
+ (const_string "1")
+ (const_string "*")))
+ (set (attr "prefix_data16")
+ (if_then_else (eq_attr "alternative" "11")
+ (const_string "1")
+ (const_string "*")))
(set (attr "prefix")
- (if_then_else (eq_attr "alternative" "11,12,13,14,15,16")
+ (if_then_else (eq_attr "alternative" "10,11,12,13,14,15")
(const_string "maybe_vex")
(const_string "orig")))
- (set_attr "mode" "SI,DI,DI,DI,SI,DI,DI,DI,DI,DI,DI,TI,TI,DI,DI,DI,DI,DI,DI")])
+ (set_attr "mode" "SI,DI,DI,DI,SI,DI,DI,DI,DI,DI,TI,DI,TI,DI,DI,DI,DI,DI")])
+
+;; Reload patterns to support multi-word load/store
+;; with non-offsetable address.
+(define_expand "reload_noff_store"
+ [(parallel [(match_operand 0 "memory_operand" "=m")
+ (match_operand 1 "register_operand" "r")
+ (match_operand:DI 2 "register_operand" "=&r")])]
+ "TARGET_64BIT"
+{
+ rtx mem = operands[0];
+ rtx addr = XEXP (mem, 0);
+
+ emit_move_insn (operands[2], addr);
+ mem = replace_equiv_address_nv (mem, operands[2]);
+
+ emit_insn (gen_rtx_SET (VOIDmode, mem, operands[1]));
+ DONE;
+})
+
+(define_expand "reload_noff_load"
+ [(parallel [(match_operand 0 "register_operand" "=r")
+ (match_operand 1 "memory_operand" "m")
+ (match_operand:DI 2 "register_operand" "=r")])]
+ "TARGET_64BIT"
+{
+ rtx mem = operands[1];
+ rtx addr = XEXP (mem, 0);
+
+ emit_move_insn (operands[2], addr);
+ mem = replace_equiv_address_nv (mem, operands[2]);
+
+ emit_insn (gen_rtx_SET (VOIDmode, operands[0], mem));
+ DONE;
+})
;; Convert impossible stores of immediate to existing instructions.
;; First try to get scratch register and go through it. In case this
(define_insn "*movdi_internal"
[(set (match_operand:DI 0 "nonimmediate_operand"
- "=r ,o ,*y,m*y,*y,*Y2,m ,*Y2,*Y2,*x,m ,*x,*x")
+ "=r ,o ,*y,m*y,*y,*x,m ,*x,*x,*x,m ,*x,*x,?*x,?*Ym")
(match_operand:DI 1 "general_operand"
- "riFo,riF,C ,*y ,m ,C ,*Y2,*Y2,m ,C ,*x,*x,m "))]
+ "riFo,riF,C ,*y ,m ,C ,*x,*x,m ,C ,*x,*x,m ,*Ym,*x"))]
"!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
- "@
- #
- #
- pxor\t%0, %0
- movq\t{%1, %0|%0, %1}
- movq\t{%1, %0|%0, %1}
- %vpxor\t%0, %d0
- %vmovq\t{%1, %0|%0, %1}
- %vmovdqa\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}
- xorps\t%0, %0
- movlps\t{%1, %0|%0, %1}
- movaps\t{%1, %0|%0, %1}
- movlps\t{%1, %0|%0, %1}"
+{
+ switch (get_attr_type (insn))
+ {
+ case TYPE_SSECVT:
+ if (SSE_REG_P (operands[0]))
+ return "movq2dq\t{%1, %0|%0, %1}";
+ else
+ return "movdq2q\t{%1, %0|%0, %1}";
+
+ case TYPE_SSEMOV:
+ switch (get_attr_mode (insn))
+ {
+ case MODE_TI:
+ return "%vmovdqa\t{%1, %0|%0, %1}";
+ case MODE_DI:
+ return "%vmovq\t{%1, %0|%0, %1}";
+ case MODE_V4SF:
+ return "movaps\t{%1, %0|%0, %1}";
+ case MODE_V2SF:
+ return "movlps\t{%1, %0|%0, %1}";
+ default:
+ gcc_unreachable ();
+ }
+
+ case TYPE_MMXMOV:
+ return "movq\t{%1, %0|%0, %1}";
+
+ case TYPE_SSELOG1:
+ return standard_sse_constant_opcode (insn, operands[1]);
+
+ case TYPE_MMX:
+ return "pxor\t%0, %0";
+
+ case TYPE_MULTI:
+ return "#";
+
+ default:
+ gcc_unreachable ();
+ }
+}
[(set (attr "isa")
- (if_then_else (eq_attr "alternative" "9,10,11,12")
- (const_string "noavx")
- (const_string "base")))
- (set_attr "type" "*,*,mmx,mmxmov,mmxmov,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
+ (cond [(eq_attr "alternative" "5,6,7,8,13,14")
+ (const_string "sse2")
+ (eq_attr "alternative" "9,10,11,12")
+ (const_string "noavx")
+ ]
+ (const_string "*")))
+ (set (attr "type")
+ (cond [(eq_attr "alternative" "0,1")
+ (const_string "multi")
+ (eq_attr "alternative" "2")
+ (const_string "mmx")
+ (eq_attr "alternative" "3,4")
+ (const_string "mmxmov")
+ (eq_attr "alternative" "5,9")
+ (const_string "sselog1")
+ (eq_attr "alternative" "13,14")
+ (const_string "ssecvt")
+ ]
+ (const_string "ssemov")))
(set (attr "prefix")
(if_then_else (eq_attr "alternative" "5,6,7,8")
(const_string "maybe_vex")
(const_string "orig")))
- (set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF")])
+ (set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF,DI,DI")])
(define_split
[(set (match_operand:DI 0 "nonimmediate_operand" "")
[(set (match_operand:SI 0 "nonimmediate_operand"
"=r,m ,*y,*y,?rm,?*y,*x,*x,?r ,m ,?*Yi,*x")
(match_operand:SI 1 "general_operand"
- "g ,ri,C ,*y,*y ,rm ,C ,*x,*Yi,*x,r ,m "))]
+ "g ,re,C ,*y,*y ,rm ,C ,*x,*Yi,*x,r ,m "))]
"!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
{
case TYPE_SSELOG1:
- if (get_attr_mode (insn) == MODE_TI)
- return "%vpxor\t%0, %d0";
- return "%vxorps\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
case TYPE_SSEMOV:
switch (get_attr_mode (insn))
default:
gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
- return "mov{l}\t{%1, %0|%0, %1}";
+ if (ix86_use_lea_for_mov (insn, operands))
+ return "lea{l}\t{%a1, %0|%0, %a1}";
+ else
+ return "mov{l}\t{%1, %0|%0, %1}";
}
}
[(set (attr "type")
(const_string "sselog1")
(eq_attr "alternative" "7,8,9,10,11")
(const_string "ssemov")
- (match_operand:DI 1 "pic_32bit_operand" "")
+ (match_operand 1 "pic_32bit_operand" "")
(const_string "lea")
]
(const_string "imov")))
(const_string "DI")
(eq_attr "alternative" "6,7")
(if_then_else
- (eq (symbol_ref "TARGET_SSE2") (const_int 0))
+ (not (match_test "TARGET_SSE2"))
(const_string "V4SF")
(const_string "TI"))
(and (eq_attr "alternative" "8,9,10,11")
- (eq (symbol_ref "TARGET_SSE2") (const_int 0)))
+ (not (match_test "TARGET_SSE2")))
(const_string "SF")
]
(const_string "SI")))])
}
}
[(set (attr "type")
- (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (cond [(match_test "optimize_function_for_size_p (cfun)")
(const_string "imov")
(and (eq_attr "alternative" "0")
- (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
- (const_int 0))
- (eq (symbol_ref "TARGET_HIMODE_MATH")
- (const_int 0))))
+ (ior (not (match_test "TARGET_PARTIAL_REG_STALL"))
+ (not (match_test "TARGET_HIMODE_MATH"))))
(const_string "imov")
(and (eq_attr "alternative" "1,2")
(match_operand:HI 1 "aligned_operand" ""))
(const_string "imov")
- (and (ne (symbol_ref "TARGET_MOVX")
- (const_int 0))
+ (and (match_test "TARGET_MOVX")
(eq_attr "alternative" "0,2"))
(const_string "imovx")
]
(match_operand:HI 1 "aligned_operand" ""))
(const_string "SI")
(and (eq_attr "alternative" "0")
- (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
- (const_int 0))
- (eq (symbol_ref "TARGET_HIMODE_MATH")
- (const_int 0))))
+ (ior (not (match_test "TARGET_PARTIAL_REG_STALL"))
+ (not (match_test "TARGET_HIMODE_MATH"))))
(const_string "SI")
]
(const_string "HI")))])
(cond [(and (eq_attr "alternative" "5")
(not (match_operand:QI 1 "aligned_operand" "")))
(const_string "imovx")
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (match_test "optimize_function_for_size_p (cfun)")
(const_string "imov")
(and (eq_attr "alternative" "3")
- (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
- (const_int 0))
- (eq (symbol_ref "TARGET_QIMODE_MATH")
- (const_int 0))))
+ (ior (not (match_test "TARGET_PARTIAL_REG_STALL"))
+ (not (match_test "TARGET_QIMODE_MATH"))))
(const_string "imov")
(eq_attr "alternative" "3,5")
(const_string "imovx")
- (and (ne (symbol_ref "TARGET_MOVX")
- (const_int 0))
+ (and (match_test "TARGET_MOVX")
(eq_attr "alternative" "2"))
(const_string "imovx")
]
(const_string "SI")
(and (eq_attr "type" "imov")
(and (eq_attr "alternative" "0,1")
- (and (ne (symbol_ref "TARGET_PARTIAL_REG_DEPENDENCY")
- (const_int 0))
- (and (eq (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
- (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
- (const_int 0))))))
+ (and (match_test "TARGET_PARTIAL_REG_DEPENDENCY")
+ (and (not (match_test "optimize_function_for_size_p (cfun)"))
+ (not (match_test "TARGET_PARTIAL_REG_STALL"))))))
(const_string "SI")
;; Avoid partial register stalls when not using QImode arithmetic
(and (eq_attr "type" "imov")
(and (eq_attr "alternative" "0,1")
- (and (ne (symbol_ref "TARGET_PARTIAL_REG_STALL")
- (const_int 0))
- (eq (symbol_ref "TARGET_QIMODE_MATH")
- (const_int 0)))))
+ (and (match_test "TARGET_PARTIAL_REG_STALL")
+ (not (match_test "TARGET_QIMODE_MATH")))))
(const_string "SI")
]
(const_string "QI")))])
;; into register when rax is not available
(define_insn "*movabs<mode>_1"
[(set (mem:SWI1248x (match_operand:DI 0 "x86_64_movabs_operand" "i,r"))
- (match_operand:SWI1248x 1 "nonmemory_operand" "a,er"))]
- "TARGET_64BIT && ix86_check_movabs (insn, 0)"
+ (match_operand:SWI1248x 1 "nonmemory_operand" "a,r<i>"))]
+ "TARGET_LP64 && ix86_check_movabs (insn, 0)"
"@
movabs{<imodesuffix>}\t{%1, %P0|%P0, %1}
mov{<imodesuffix>}\t{%1, %a0|%a0, %1}"
(define_insn "*movabs<mode>_2"
[(set (match_operand:SWI1248x 0 "register_operand" "=a,r")
(mem:SWI1248x (match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
- "TARGET_64BIT && ix86_check_movabs (insn, 1)"
+ "TARGET_LP64 && ix86_check_movabs (insn, 1)"
"@
movabs{<imodesuffix>}\t{%P1, %0|%0, %P1}
mov{<imodesuffix>}\t{%a1, %0|%0, %a1}"
}
}
[(set (attr "type")
- (if_then_else (and (match_operand:QI 0 "register_operand" "")
- (ior (not (match_operand:QI 0 "q_regs_operand" ""))
- (ne (symbol_ref "TARGET_MOVX")
- (const_int 0))))
+ (if_then_else (ior (not (match_operand:QI 0 "QIreg_operand" ""))
+ (match_test "TARGET_MOVX"))
(const_string "imovx")
(const_string "imov")))
(set (attr "mode")
}
[(set (attr "type")
(if_then_else (and (match_operand:QI 0 "register_operand" "")
- (ior (not (match_operand:QI 0 "q_regs_operand" ""))
- (ne (symbol_ref "TARGET_MOVX")
- (const_int 0))))
+ (ior (not (match_operand:QI 0 "QIreg_operand" ""))
+ (match_test "TARGET_MOVX")))
(const_string "imovx")
(const_string "imov")))
(set (attr "mode")
}
}
[(set (attr "type")
- (if_then_else (ior (not (match_operand:QI 0 "q_regs_operand" ""))
- (ne (symbol_ref "TARGET_MOVX")
- (const_int 0)))
+ (if_then_else (ior (not (match_operand:QI 0 "QIreg_operand" ""))
+ (match_test "TARGET_MOVX"))
(const_string "imovx")
(const_string "imov")))
(set (attr "mode")
}
[(set (attr "type")
(if_then_else (and (match_operand:QI 0 "register_operand" "")
- (ior (not (match_operand:QI 0 "q_regs_operand" ""))
- (ne (symbol_ref "TARGET_MOVX")
- (const_int 0))))
+ (ior (not (match_operand:QI 0 "QIreg_operand" ""))
+ (match_test "TARGET_MOVX")))
(const_string "imovx")
(const_string "imov")))
(set (attr "mode")
(set_attr "unit" "sse,*,*")
(set_attr "mode" "TF,SI,SI")])
+;; %%% Kill this when call knows how to work this out.
(define_split
[(set (match_operand:TF 0 "push_operand" "")
(match_operand:TF 1 "sse_reg_operand" ""))]
[(set (reg:P SP_REG) (plus:P (reg:P SP_REG) (const_int -16)))
(set (mem:TF (reg:P SP_REG)) (match_dup 1))])
-(define_split
- [(set (match_operand:TF 0 "push_operand" "")
- (match_operand:TF 1 "general_operand" ""))]
- "TARGET_SSE2 && reload_completed
- && !SSE_REG_P (operands[1])"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
(define_insn "*pushxf"
[(set (match_operand:XF 0 "push_operand" "=<,<")
(match_operand:XF 1 "general_no_elim_operand" "f,ro"))]
;; only once, but this ought to be handled elsewhere).
(define_insn "*pushxf_nointeger"
- [(set (match_operand:XF 0 "push_operand" "=X,X,X")
- (match_operand:XF 1 "general_no_elim_operand" "f,Fo,*r"))]
+ [(set (match_operand:XF 0 "push_operand" "=<,<")
+ (match_operand:XF 1 "general_no_elim_operand" "f,*rFo"))]
"optimize_function_for_size_p (cfun)"
{
/* This insn should be already split before reg-stack. */
gcc_unreachable ();
}
[(set_attr "type" "multi")
- (set_attr "unit" "i387,*,*")
- (set_attr "mode" "XF,SI,SI")])
+ (set_attr "unit" "i387,*")
+ (set_attr "mode" "XF,SI")])
+;; %%% Kill this when call knows how to work this out.
(define_split
[(set (match_operand:XF 0 "push_operand" "")
(match_operand:XF 1 "fp_register_operand" ""))]
(set (mem:XF (reg:P SP_REG)) (match_dup 1))]
"operands[2] = GEN_INT (-GET_MODE_SIZE (XFmode));")
-(define_split
- [(set (match_operand:XF 0 "push_operand" "")
- (match_operand:XF 1 "general_operand" ""))]
- "reload_completed
- && !FP_REG_P (operands[1])"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
-(define_insn "*pushdf"
+(define_insn "*pushdf_rex64"
[(set (match_operand:DF 0 "push_operand" "=<,<,<")
- (match_operand:DF 1 "general_no_elim_operand" "f,rFo,Y2"))]
- "TARGET_64BIT || TARGET_INTEGER_DFMODE_MOVES"
+ (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFm,x"))]
+ "TARGET_64BIT"
{
/* This insn should be already split before reg-stack. */
gcc_unreachable ();
}
[(set_attr "type" "multi")
(set_attr "unit" "i387,*,*")
- (set_attr "mode" "DF,SI,DF")])
+ (set_attr "mode" "DF,DI,DF")])
;; Size of pushdf is 3 (for sub) + 2 (for fstp) + memory operand size.
;; Size of pushdf using integer instructions is 2+2*memory operand size
-;; On the average, pushdf using integers can be still shorter. Allow this
-;; pattern for optimize_size too.
+;; On the average, pushdf using integers can be still shorter.
-(define_insn "*pushdf_nointeger"
- [(set (match_operand:DF 0 "push_operand" "=<,<,<,<")
- (match_operand:DF 1 "general_no_elim_operand" "f,Fo,*r,Y2"))]
- "!(TARGET_64BIT || TARGET_INTEGER_DFMODE_MOVES)"
+(define_insn "*pushdf"
+ [(set (match_operand:DF 0 "push_operand" "=<,<,<")
+ (match_operand:DF 1 "general_no_elim_operand" "f,Yd*rFo,x"))]
+ "!TARGET_64BIT"
{
/* This insn should be already split before reg-stack. */
gcc_unreachable ();
}
- [(set_attr "type" "multi")
- (set_attr "unit" "i387,*,*,*")
- (set_attr "mode" "DF,SI,SI,DF")])
+ [(set_attr "isa" "*,*,sse2")
+ (set_attr "type" "multi")
+ (set_attr "unit" "i387,*,*")
+ (set_attr "mode" "DF,DI,DF")])
;; %%% Kill this when call knows how to work this out.
(define_split
[(set (reg:P SP_REG) (plus:P (reg:P SP_REG) (const_int -8)))
(set (mem:DF (reg:P SP_REG)) (match_dup 1))])
-(define_split
- [(set (match_operand:DF 0 "push_operand" "")
- (match_operand:DF 1 "general_operand" ""))]
- "reload_completed
- && !ANY_FP_REG_P (operands[1])"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
(define_insn "*pushsf_rex64"
[(set (match_operand:SF 0 "push_operand" "=X,X,X")
(match_operand:SF 1 "nonmemory_no_elim_operand" "f,rF,x"))]
(set_attr "unit" "i387,*,*")
(set_attr "mode" "SF,SI,SF")])
-(define_split
- [(set (match_operand:SF 0 "push_operand" "")
- (match_operand:SF 1 "memory_operand" ""))]
- "reload_completed
- && MEM_P (operands[1])
- && (operands[2] = find_constant_src (insn))"
- [(set (match_dup 0)
- (match_dup 2))])
-
;; %%% Kill this when call knows how to work this out.
(define_split
[(set (match_operand:SF 0 "push_operand" "")
"reload_completed"
[(set (reg:P SP_REG) (plus:P (reg:P SP_REG) (match_dup 2)))
(set (mem:SF (reg:P SP_REG)) (match_dup 1))]
- "operands[2] = GEN_INT (-GET_MODE_SIZE (<MODE>mode));")
+ "operands[2] = GEN_INT (-GET_MODE_SIZE (<P:MODE>mode));")
+
+(define_split
+ [(set (match_operand:SF 0 "push_operand" "")
+ (match_operand:SF 1 "memory_operand" ""))]
+ "reload_completed
+ && (operands[2] = find_constant_src (insn))"
+ [(set (match_dup 0) (match_dup 2))])
+
+(define_split
+ [(set (match_operand 0 "push_operand" "")
+ (match_operand 1 "general_operand" ""))]
+ "reload_completed
+ && (GET_MODE (operands[0]) == TFmode
+ || GET_MODE (operands[0]) == XFmode
+ || GET_MODE (operands[0]) == DFmode)
+ && !ANY_FP_REG_P (operands[1])"
+ [(const_int 0)]
+ "ix86_split_long_move (operands); DONE;")
\f
;; Floating point move instructions.
"ix86_expand_move (<MODE>mode, operands); DONE;")
(define_insn "*movtf_internal"
- [(set (match_operand:TF 0 "nonimmediate_operand" "=x,m,x,?r,?o")
- (match_operand:TF 1 "general_operand" "xm,x,C,roF,Fr"))]
+ [(set (match_operand:TF 0 "nonimmediate_operand" "=x,m,x,?*r ,!o")
+ (match_operand:TF 1 "general_operand" "xm,x,C,*roF,F*r"))]
"TARGET_SSE2
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))
+ && (!can_create_pseudo_p ()
+ || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
+ || GET_CODE (operands[1]) != CONST_DOUBLE
+ || (optimize_function_for_size_p (cfun)
+ && standard_sse_constant_p (operands[1])
+ && !memory_operand (operands[0], TFmode))
+ || (!TARGET_MEMORY_MISMATCH_STALL
+ && memory_operand (operands[0], TFmode)))"
{
switch (which_alternative)
{
case 0:
case 1:
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vmovaps\t{%1, %0|%0, %1}";
+ /* Handle misaligned load/store since we
+ don't have movmisaligntf pattern. */
+ if (misaligned_operand (operands[0], TFmode)
+ || misaligned_operand (operands[1], TFmode))
+ {
+ if (get_attr_mode (insn) == MODE_V4SF)
+ return "%vmovups\t{%1, %0|%0, %1}";
+ else
+ return "%vmovdqu\t{%1, %0|%0, %1}";
+ }
else
- return "%vmovdqa\t{%1, %0|%0, %1}";
+ {
+ if (get_attr_mode (insn) == MODE_V4SF)
+ return "%vmovaps\t{%1, %0|%0, %1}";
+ else
+ return "%vmovdqa\t{%1, %0|%0, %1}";
+ }
+
case 2:
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
+
case 3:
case 4:
return "#";
+
default:
gcc_unreachable ();
}
(set (attr "mode")
(cond [(eq_attr "alternative" "0,2")
(if_then_else
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
(const_string "TI"))
(eq_attr "alternative" "1")
(if_then_else
- (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
- (const_int 0))
- (ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SSE_TYPELESS_STORES")
+ (match_test "optimize_function_for_size_p (cfun)"))
(const_string "V4SF")
(const_string "TI"))]
(const_string "DI")))])
-(define_split
- [(set (match_operand:TF 0 "nonimmediate_operand" "")
- (match_operand:TF 1 "general_operand" ""))]
- "reload_completed
- && !(SSE_REG_P (operands[0]) || SSE_REG_P (operands[1]))"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
+;; Possible store forwarding (partial memory) stall in alternative 4.
(define_insn "*movxf_internal"
- [(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,r,o")
- (match_operand:XF 1 "general_operand" "fm,f,G,roF,Fr"))]
- "optimize_function_for_speed_p (cfun)
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && (reload_in_progress || reload_completed
+ [(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,?Yx*r ,!o")
+ (match_operand:XF 1 "general_operand" "fm,f,G,Yx*roF,FYx*r"))]
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
+ && (!can_create_pseudo_p ()
+ || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
|| GET_CODE (operands[1]) != CONST_DOUBLE
- || memory_operand (operands[0], XFmode))"
+ || (optimize_function_for_size_p (cfun)
+ && standard_80387_constant_p (operands[1]) > 0
+ && !memory_operand (operands[0], XFmode))
+ || (!TARGET_MEMORY_MISMATCH_STALL
+ && memory_operand (operands[0], XFmode)))"
{
switch (which_alternative)
{
case 2:
return standard_80387_constant_opcode (operands[1]);
- case 3: case 4:
+ case 3:
+ case 4:
return "#";
default:
[(set_attr "type" "fmov,fmov,fmov,multi,multi")
(set_attr "mode" "XF,XF,XF,SI,SI")])
-;; Do not use integer registers when optimizing for size
-(define_insn "*movxf_internal_nointeger"
- [(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,*r,o")
- (match_operand:XF 1 "general_operand" "fm,f,G,*roF,F*r"))]
- "optimize_function_for_size_p (cfun)
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && (reload_in_progress || reload_completed
- || standard_80387_constant_p (operands[1])
- || GET_CODE (operands[1]) != CONST_DOUBLE
- || memory_operand (operands[0], XFmode))"
-{
- switch (which_alternative)
- {
- case 0:
- case 1:
- return output_387_reg_move (insn, operands);
-
- case 2:
- return standard_80387_constant_opcode (operands[1]);
-
- case 3: case 4:
- return "#";
- default:
- gcc_unreachable ();
- }
-}
- [(set_attr "type" "fmov,fmov,fmov,multi,multi")
- (set_attr "mode" "XF,XF,XF,SI,SI")])
-
-(define_split
- [(set (match_operand:XF 0 "nonimmediate_operand" "")
- (match_operand:XF 1 "general_operand" ""))]
- "reload_completed
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && ! (FP_REG_P (operands[0]) ||
- (GET_CODE (operands[0]) == SUBREG
- && FP_REG_P (SUBREG_REG (operands[0]))))
- && ! (FP_REG_P (operands[1]) ||
- (GET_CODE (operands[1]) == SUBREG
- && FP_REG_P (SUBREG_REG (operands[1]))))"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
(define_insn "*movdf_internal_rex64"
[(set (match_operand:DF 0 "nonimmediate_operand"
- "=f,m,f,r ,m,!r,!m,Y2*x,Y2*x,Y2*x,m ,Yi,r ")
+ "=f,m,f,?r,?m,?r,!o,x,x,x,m,Yi,r ")
(match_operand:DF 1 "general_operand"
- "fm,f,G,rm,r,F ,F ,C ,Y2*x,m ,Y2*x,r ,Yi"))]
+ "fm,f,G,rm,r ,F ,F ,C,x,m,x,r ,Yi"))]
"TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && (reload_in_progress || reload_completed
+ && (!can_create_pseudo_p ()
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
- || (!(TARGET_SSE2 && TARGET_SSE_MATH)
- && optimize_function_for_size_p (cfun)
- && standard_80387_constant_p (operands[1]))
|| GET_CODE (operands[1]) != CONST_DOUBLE
+ || (optimize_function_for_size_p (cfun)
+ && ((!(TARGET_SSE2 && TARGET_SSE_MATH)
+ && standard_80387_constant_p (operands[1]) > 0)
+ || (TARGET_SSE2 && TARGET_SSE_MATH
+ && standard_sse_constant_p (operands[1]))))
|| memory_operand (operands[0], DFmode))"
{
switch (which_alternative)
return "#";
case 7:
- switch (get_attr_mode (insn))
- {
- case MODE_V4SF:
- return "%vxorps\t%0, %d0";
- case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vxorpd\t%0, %d0";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
- default:
- gcc_unreachable ();
- }
+ return standard_sse_constant_opcode (insn, operands[1]);
+
case 8:
case 9:
case 10:
switch (get_attr_mode (insn))
{
- case MODE_V4SF:
- return "%vmovaps\t{%1, %0|%0, %1}";
case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
+ if (!TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
return "%vmovapd\t{%1, %0|%0, %1}";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovdqa\t{%1, %0|%0, %1}";
+ case MODE_V4SF:
+ return "%vmovaps\t{%1, %0|%0, %1}";
+
case MODE_DI:
return "%vmovq\t{%1, %0|%0, %1}";
case MODE_DF:
if (TARGET_AVX && REG_P (operands[0]) && REG_P (operands[1]))
return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovsd\t{%1, %0|%0, %1}";
+ return "%vmovsd\t{%1, %0|%0, %1}";
case MODE_V1DF:
return "%vmovlpd\t{%1, %d0|%d0, %1}";
case MODE_V2SF:
gcc_unreachable();
}
}
- [(set_attr "type" "fmov,fmov,fmov,imov,imov,imov,multi,sselog1,ssemov,ssemov,ssemov,ssemov,ssemov")
- (set (attr "modrm")
- (if_then_else
- (and (eq_attr "alternative" "5") (eq_attr "type" "imov"))
- (const_string "0")
+ [(set (attr "type")
+ (cond [(eq_attr "alternative" "0,1,2")
+ (const_string "fmov")
+ (eq_attr "alternative" "3,4,5")
+ (const_string "imov")
+ (eq_attr "alternative" "6")
+ (const_string "multi")
+ (eq_attr "alternative" "7")
+ (const_string "sselog1")
+ ]
+ (const_string "ssemov")))
+ (set (attr "modrm")
+ (if_then_else
+ (and (eq_attr "alternative" "5") (eq_attr "type" "imov"))
+ (const_string "0")
(const_string "*")))
(set (attr "length_immediate")
(if_then_else
(eq_attr "alternative" "3,4,5,6,11,12")
(const_string "DI")
- /* For SSE1, we have many fewer alternatives. */
- (eq (symbol_ref "TARGET_SSE2") (const_int 0))
- (cond [(eq_attr "alternative" "7,8")
- (const_string "V4SF")
- ]
- (const_string "V2SF"))
-
/* xorps is one byte shorter. */
(eq_attr "alternative" "7")
- (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (cond [(match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
- (const_int 0))
+ (match_test "TARGET_SSE_LOAD0_BY_PXOR")
(const_string "TI")
]
(const_string "V2DF"))
movaps encodes one byte shorter. */
(eq_attr "alternative" "8")
(cond
- [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ [(match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
- (const_int 0))
+ (match_test "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
(const_string "V2DF")
]
(const_string "DF"))
of register. */
(eq_attr "alternative" "9")
(if_then_else
- (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
- (const_int 0))
+ (match_test "TARGET_SSE_SPLIT_REGS")
(const_string "V1DF")
(const_string "DF"))
]
(const_string "DF")))])
+;; Possible store forwarding (partial memory) stall in alternative 4.
(define_insn "*movdf_internal"
[(set (match_operand:DF 0 "nonimmediate_operand"
- "=f,m,f,r ,o ,Y2*x,Y2*x,Y2*x,m ")
+ "=f,m,f,?Yd*r ,!o ,x,x,x,m,*x,*x,*x,m")
(match_operand:DF 1 "general_operand"
- "fm,f,G,roF,Fr,C ,Y2*x,m ,Y2*x"))]
+ "fm,f,G,Yd*roF,FYd*r,C,x,m,x,C ,*x,m ,*x"))]
"!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && optimize_function_for_speed_p (cfun)
- && TARGET_INTEGER_DFMODE_MOVES
- && (reload_in_progress || reload_completed
+ && (!can_create_pseudo_p ()
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
- || (!(TARGET_SSE2 && TARGET_SSE_MATH)
- && optimize_function_for_size_p (cfun)
- && standard_80387_constant_p (operands[1]))
|| GET_CODE (operands[1]) != CONST_DOUBLE
- || memory_operand (operands[0], DFmode))"
+ || (optimize_function_for_size_p (cfun)
+ && ((!(TARGET_SSE2 && TARGET_SSE_MATH)
+ && standard_80387_constant_p (operands[1]) > 0)
+ || (TARGET_SSE2 && TARGET_SSE_MATH
+ && standard_sse_constant_p (operands[1])))
+ && !memory_operand (operands[0], DFmode))
+ || (!TARGET_MEMORY_MISMATCH_STALL
+ && memory_operand (operands[0], DFmode)))"
{
switch (which_alternative)
{
return "#";
case 5:
- switch (get_attr_mode (insn))
- {
- case MODE_V4SF:
- return "%vxorps\t%0, %d0";
- case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vxorpd\t%0, %d0";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
- default:
- gcc_unreachable ();
- }
+ case 9:
+ return standard_sse_constant_opcode (insn, operands[1]);
+
case 6:
case 7:
case 8:
+ case 10:
+ case 11:
+ case 12:
switch (get_attr_mode (insn))
{
- case MODE_V4SF:
- return "%vmovaps\t{%1, %0|%0, %1}";
case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
+ if (!TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
return "%vmovapd\t{%1, %0|%0, %1}";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovdqa\t{%1, %0|%0, %1}";
+ case MODE_V4SF:
+ return "%vmovaps\t{%1, %0|%0, %1}";
+
case MODE_DI:
return "%vmovq\t{%1, %0|%0, %1}";
case MODE_DF:
if (TARGET_AVX && REG_P (operands[0]) && REG_P (operands[1]))
return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovsd\t{%1, %0|%0, %1}";
+ return "%vmovsd\t{%1, %0|%0, %1}";
case MODE_V1DF:
- if (TARGET_AVX && REG_P (operands[0]))
- return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovlpd\t{%1, %0|%0, %1}";
+ return "%vmovlpd\t{%1, %d0|%d0, %1}";
case MODE_V2SF:
- if (TARGET_AVX && REG_P (operands[0]))
- return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovlps\t{%1, %0|%0, %1}";
+ return "%vmovlps\t{%1, %d0|%d0, %1}";
default:
gcc_unreachable ();
}
gcc_unreachable ();
}
}
- [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
- (set (attr "prefix")
- (if_then_else (eq_attr "alternative" "0,1,2,3,4")
- (const_string "orig")
- (const_string "maybe_vex")))
- (set (attr "prefix_data16")
- (if_then_else (eq_attr "mode" "V1DF")
- (const_string "1")
+ [(set (attr "isa")
+ (if_then_else (eq_attr "alternative" "5,6,7,8")
+ (const_string "sse2")
(const_string "*")))
- (set (attr "mode")
- (cond [(eq_attr "alternative" "0,1,2")
- (const_string "DF")
+ (set (attr "type")
+ (cond [(eq_attr "alternative" "0,1,2")
+ (const_string "fmov")
(eq_attr "alternative" "3,4")
- (const_string "SI")
-
- /* For SSE1, we have many fewer alternatives. */
- (eq (symbol_ref "TARGET_SSE2") (const_int 0))
- (cond [(eq_attr "alternative" "5,6")
- (const_string "V4SF")
- ]
- (const_string "V2SF"))
-
- /* xorps is one byte shorter. */
- (eq_attr "alternative" "5")
- (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
- (const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
- (const_int 0))
- (const_string "TI")
- ]
- (const_string "V2DF"))
-
- /* For architectures resolving dependencies on
- whole SSE registers use APD move to break dependency
- chains, otherwise use short move to avoid extra work.
-
- movaps encodes one byte shorter. */
- (eq_attr "alternative" "6")
- (cond
- [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
- (const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
- (const_int 0))
- (const_string "V2DF")
- ]
- (const_string "DF"))
- /* For architectures resolving dependencies on register
- parts we may avoid extra work to zero out upper part
- of register. */
- (eq_attr "alternative" "7")
- (if_then_else
- (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
- (const_int 0))
- (const_string "V1DF")
- (const_string "DF"))
+ (const_string "multi")
+ (eq_attr "alternative" "5,9")
+ (const_string "sselog1")
]
- (const_string "DF")))])
-
-;; Moving is usually shorter when only FP registers are used. This separate
-;; movdf pattern avoids the use of integer registers for FP operations
-;; when optimizing for size.
-
-(define_insn "*movdf_internal_nointeger"
- [(set (match_operand:DF 0 "nonimmediate_operand"
- "=f,m,f,*r ,o ,Y2*x,Y2*x,Y2*x ,m ")
- (match_operand:DF 1 "general_operand"
- "fm,f,G,*roF,F*r,C ,Y2*x,mY2*x,Y2*x"))]
- "!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && (optimize_function_for_size_p (cfun)
- || !TARGET_INTEGER_DFMODE_MOVES)
- && (reload_in_progress || reload_completed
- || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
- || (!(TARGET_SSE2 && TARGET_SSE_MATH)
- && optimize_function_for_size_p (cfun)
- && !memory_operand (operands[0], DFmode)
- && standard_80387_constant_p (operands[1]))
- || GET_CODE (operands[1]) != CONST_DOUBLE
- || ((optimize_function_for_size_p (cfun)
- || !TARGET_MEMORY_MISMATCH_STALL
- || reload_in_progress || reload_completed)
- && memory_operand (operands[0], DFmode)))"
-{
- switch (which_alternative)
- {
- case 0:
- case 1:
- return output_387_reg_move (insn, operands);
-
- case 2:
- return standard_80387_constant_opcode (operands[1]);
-
- case 3:
- case 4:
- return "#";
-
- case 5:
- switch (get_attr_mode (insn))
- {
- case MODE_V4SF:
- return "%vxorps\t%0, %d0";
- case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vxorpd\t%0, %d0";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vxorps\t%0, %d0";
- else
- return "%vpxor\t%0, %d0";
- default:
- gcc_unreachable ();
- }
- case 6:
- case 7:
- case 8:
- switch (get_attr_mode (insn))
- {
- case MODE_V4SF:
- return "%vmovaps\t{%1, %0|%0, %1}";
- case MODE_V2DF:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovapd\t{%1, %0|%0, %1}";
- case MODE_TI:
- if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovdqa\t{%1, %0|%0, %1}";
- case MODE_DI:
- return "%vmovq\t{%1, %0|%0, %1}";
- case MODE_DF:
- if (TARGET_AVX && REG_P (operands[0]) && REG_P (operands[1]))
- return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovsd\t{%1, %0|%0, %1}";
- case MODE_V1DF:
- if (TARGET_AVX && REG_P (operands[0]))
- return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovlpd\t{%1, %0|%0, %1}";
- case MODE_V2SF:
- if (TARGET_AVX && REG_P (operands[0]))
- return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovlps\t{%1, %0|%0, %1}";
- default:
- gcc_unreachable ();
- }
-
- default:
- gcc_unreachable ();
- }
-}
- [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
+ (const_string "ssemov")))
(set (attr "prefix")
(if_then_else (eq_attr "alternative" "0,1,2,3,4")
(const_string "orig")
(const_string "SI")
/* For SSE1, we have many fewer alternatives. */
- (eq (symbol_ref "TARGET_SSE2") (const_int 0))
- (cond [(eq_attr "alternative" "5,6")
- (const_string "V4SF")
- ]
+ (not (match_test "TARGET_SSE2"))
+ (if_then_else
+ (eq_attr "alternative" "5,6,9,10")
+ (const_string "V4SF")
(const_string "V2SF"))
/* xorps is one byte shorter. */
- (eq_attr "alternative" "5")
- (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ (eq_attr "alternative" "5,9")
+ (cond [(match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
- (const_int 0))
+ (match_test "TARGET_SSE_LOAD0_BY_PXOR")
(const_string "TI")
]
(const_string "V2DF"))
chains, otherwise use short move to avoid extra work.
movaps encodes one byte shorter. */
- (eq_attr "alternative" "6")
+ (eq_attr "alternative" "6,10")
(cond
- [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0))
+ [(match_test "optimize_function_for_size_p (cfun)")
(const_string "V4SF")
- (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
- (const_int 0))
+ (match_test "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
(const_string "V2DF")
]
(const_string "DF"))
/* For architectures resolving dependencies on register
parts we may avoid extra work to zero out upper part
of register. */
- (eq_attr "alternative" "7")
+ (eq_attr "alternative" "7,11")
(if_then_else
- (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
- (const_int 0))
+ (match_test "TARGET_SSE_SPLIT_REGS")
(const_string "V1DF")
(const_string "DF"))
]
(const_string "DF")))])
-(define_split
- [(set (match_operand:DF 0 "nonimmediate_operand" "")
- (match_operand:DF 1 "general_operand" ""))]
- "reload_completed
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))
- && ! (ANY_FP_REG_P (operands[0]) ||
- (GET_CODE (operands[0]) == SUBREG
- && ANY_FP_REG_P (SUBREG_REG (operands[0]))))
- && ! (ANY_FP_REG_P (operands[1]) ||
- (GET_CODE (operands[1]) == SUBREG
- && ANY_FP_REG_P (SUBREG_REG (operands[1]))))"
- [(const_int 0)]
- "ix86_split_long_move (operands); DONE;")
-
(define_insn "*movsf_internal"
[(set (match_operand:SF 0 "nonimmediate_operand"
- "=f,m,f,r ,m ,x,x,x ,m,!*y,!m,!*y,?Yi,?r,!*Ym,!r")
+ "=f,m,f,?r ,?m,x,x,x,m,!*y,!m,!*y,?Yi,?r,!*Ym,!r")
(match_operand:SF 1 "general_operand"
- "fm,f,G,rmF,Fr,C,x,xm,x,m ,*y,*y ,r ,Yi,r ,*Ym"))]
+ "fm,f,G,rmF,Fr,C,x,m,x,m ,*y,*y ,r ,Yi,r ,*Ym"))]
"!(MEM_P (operands[0]) && MEM_P (operands[1]))
- && (reload_in_progress || reload_completed
+ && (!can_create_pseudo_p ()
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
- || (!TARGET_SSE_MATH && optimize_function_for_size_p (cfun)
- && standard_80387_constant_p (operands[1]))
|| GET_CODE (operands[1]) != CONST_DOUBLE
+ || (optimize_function_for_size_p (cfun)
+ && ((!TARGET_SSE_MATH
+ && standard_80387_constant_p (operands[1]) > 0)
+ || (TARGET_SSE_MATH
+ && standard_sse_constant_p (operands[1]))))
|| memory_operand (operands[0], SFmode))"
{
switch (which_alternative)
case 3:
case 4:
return "mov{l}\t{%1, %0|%0, %1}";
+
case 5:
- if (get_attr_mode (insn) == MODE_TI)
- return "%vpxor\t%0, %d0";
- else
- return "%vxorps\t%0, %d0";
+ return standard_sse_constant_opcode (insn, operands[1]);
+
case 6:
if (get_attr_mode (insn) == MODE_V4SF)
return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovss\t{%1, %d0|%d0, %1}";
- case 7:
- if (TARGET_AVX && REG_P (operands[1]))
+ if (TARGET_AVX)
return "vmovss\t{%1, %0, %0|%0, %0, %1}";
- else
- return "%vmovss\t{%1, %0|%0, %1}";
+
+ case 7:
case 8:
return "%vmovss\t{%1, %0|%0, %1}";
- case 9: case 10: case 14: case 15:
+ case 9:
+ case 10:
+ case 14:
+ case 15:
return "movd\t{%1, %0|%0, %1}";
case 11:
return "movq\t{%1, %0|%0, %1}";
- case 12: case 13:
+ case 12:
+ case 13:
return "%vmovd\t{%1, %0|%0, %1}";
default:
gcc_unreachable ();
}
}
- [(set_attr "type" "fmov,fmov,fmov,imov,imov,sselog1,ssemov,ssemov,ssemov,mmxmov,mmxmov,mmxmov,ssemov,ssemov,mmxmov,mmxmov")
+ [(set (attr "type")
+ (cond [(eq_attr "alternative" "0,1,2")
+ (const_string "fmov")
+ (eq_attr "alternative" "3,4")
+ (const_string "multi")
+ (eq_attr "alternative" "5")
+ (const_string "sselog1")
+ (eq_attr "alternative" "9,10,11,14,15")
+ (const_string "mmxmov")
+ ]
+ (const_string "ssemov")))
(set (attr "prefix")
(if_then_else (eq_attr "alternative" "5,6,7,8,12,13")
(const_string "maybe_vex")
(const_string "SI")
(eq_attr "alternative" "5")
(if_then_else
- (and (and (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
- (const_int 0))
- (ne (symbol_ref "TARGET_SSE2")
- (const_int 0)))
- (eq (symbol_ref "optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (and (and (match_test "TARGET_SSE_LOAD0_BY_PXOR")
+ (match_test "TARGET_SSE2"))
+ (not (match_test "optimize_function_for_size_p (cfun)")))
(const_string "TI")
(const_string "V4SF"))
/* For architectures resolving dependencies on
to avoid problems on using packed logical operations. */
(eq_attr "alternative" "6")
(if_then_else
- (ior (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
- (const_int 0))
- (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
- (const_int 0)))
+ (ior (match_test "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
+ (match_test "TARGET_SSE_SPLIT_REGS"))
(const_string "V4SF")
(const_string "SF"))
(eq_attr "alternative" "11")
(const_string "SF")))])
(define_split
- [(set (match_operand 0 "register_operand" "")
+ [(set (match_operand 0 "any_fp_register_operand" "")
(match_operand 1 "memory_operand" ""))]
"reload_completed
- && MEM_P (operands[1])
&& (GET_MODE (operands[0]) == TFmode
|| GET_MODE (operands[0]) == XFmode
|| GET_MODE (operands[0]) == DFmode
[(set (match_dup 0) (match_dup 2))]
{
rtx c = operands[2];
- rtx r = operands[0];
+ int r = REGNO (operands[0]);
- if (GET_CODE (r) == SUBREG)
- r = SUBREG_REG (r);
-
- if (SSE_REG_P (r))
- {
- if (!standard_sse_constant_p (c))
- FAIL;
- }
- else if (FP_REG_P (r))
- {
- if (!standard_80387_constant_p (c))
- FAIL;
- }
- else if (MMX_REG_P (r))
+ if ((SSE_REGNO_P (r) && !standard_sse_constant_p (c))
+ || (FP_REGNO_P (r) && standard_80387_constant_p (c) < 1))
FAIL;
})
(define_split
- [(set (match_operand 0 "register_operand" "")
+ [(set (match_operand 0 "any_fp_register_operand" "")
(float_extend (match_operand 1 "memory_operand" "")))]
"reload_completed
- && MEM_P (operands[1])
&& (GET_MODE (operands[0]) == TFmode
|| GET_MODE (operands[0]) == XFmode
- || GET_MODE (operands[0]) == DFmode
- || GET_MODE (operands[0]) == SFmode)
+ || GET_MODE (operands[0]) == DFmode)
&& (operands[2] = find_constant_src (insn))"
[(set (match_dup 0) (match_dup 2))]
{
rtx c = operands[2];
- rtx r = operands[0];
+ int r = REGNO (operands[0]);
- if (GET_CODE (r) == SUBREG)
- r = SUBREG_REG (r);
-
- if (SSE_REG_P (r))
- {
- if (!standard_sse_constant_p (c))
- FAIL;
- }
- else if (FP_REG_P (r))
- {
- if (!standard_80387_constant_p (c))
- FAIL;
- }
- else if (MMX_REG_P (r))
+ if ((SSE_REGNO_P (r) && !standard_sse_constant_p (c))
+ || (FP_REGNO_P (r) && standard_80387_constant_p (c) < 1))
FAIL;
})
;; Split the load of -0.0 or -1.0 into fldz;fchs or fld1;fchs sequence
(define_split
- [(set (match_operand:X87MODEF 0 "register_operand" "")
+ [(set (match_operand:X87MODEF 0 "fp_register_operand" "")
(match_operand:X87MODEF 1 "immediate_operand" ""))]
- "reload_completed && FP_REGNO_P (REGNO (operands[0]))
+ "reload_completed
&& (standard_80387_constant_p (operands[1]) == 8
|| standard_80387_constant_p (operands[1]) == 9)"
[(set (match_dup 0)(match_dup 1))
operands[1] = CONST1_RTX (<MODE>mode);
})
+(define_split
+ [(set (match_operand 0 "nonimmediate_operand" "")
+ (match_operand 1 "general_operand" ""))]
+ "reload_completed
+ && (GET_MODE (operands[0]) == TFmode
+ || GET_MODE (operands[0]) == XFmode
+ || GET_MODE (operands[0]) == DFmode)
+ && !(ANY_FP_REG_P (operands[0]) || ANY_FP_REG_P (operands[1]))"
+ [(const_int 0)]
+ "ix86_split_long_move (operands); DONE;")
+
(define_insn "swapxf"
[(set (match_operand:XF 0 "register_operand" "+f")
(match_operand:XF 1 "register_operand" "+f"))
})
(define_insn "*zero_extendsidi2_rex64"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,o,?*Ym,?*y,?*Yi,*Y2")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r,o,?*Ym,?*y,?*Yi,*x")
(zero_extend:DI
(match_operand:SI 1 "nonimmediate_operand" "rm,0,r ,m ,r ,m")))]
"TARGET_64BIT"
"@
- mov\t{%k1, %k0|%k0, %k1}
+ mov{l}\t{%1, %k0|%k0, %1}
#
movd\t{%1, %0|%0, %1}
movd\t{%1, %0|%0, %1}
;; %%% Kill me once multi-word ops are sane.
(define_insn "zero_extendsidi2_1"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,?r,?o,?*Ym,?*y,?*Yi,*Y2")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r,?r,?o,?*Ym,?*y,?*Yi,*x")
(zero_extend:DI
(match_operand:SI 1 "nonimmediate_operand" "0,rm,r ,r ,m ,r ,m")))
(clobber (reg:CC FLAGS_REG))]
movd\t{%1, %0|%0, %1}
%vmovd\t{%1, %0|%0, %1}
%vmovd\t{%1, %0|%0, %1}"
- [(set_attr "type" "multi,multi,multi,mmxmov,mmxmov,ssemov,ssemov")
+ [(set_attr "isa" "*,*,*,*,*,*,sse2")
+ (set_attr "type" "multi,multi,multi,mmxmov,mmxmov,ssemov,ssemov")
(set_attr "prefix" "*,*,*,orig,orig,maybe_vex,maybe_vex")
(set_attr "mode" "SI,SI,SI,DI,DI,TI,TI")])
(set_attr "mode" "SF")])
(define_insn "*truncdfsf_mixed"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=m,Y2 ,?f,?x,?*r")
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=m,x ,?f,?x,?*r")
(float_truncate:SF
- (match_operand:DF 1 "nonimmediate_operand" "f ,Y2m,f ,f ,f")))
- (clobber (match_operand:SF 2 "memory_operand" "=X,X ,m ,m ,m"))]
+ (match_operand:DF 1 "nonimmediate_operand" "f ,xm,f ,f ,f")))
+ (clobber (match_operand:SF 2 "memory_operand" "=X,X ,m ,m ,m"))]
"TARGET_MIX_SSE_I387"
{
switch (which_alternative)
return "#";
}
}
- [(set_attr "type" "fmov,ssecvt,multi,multi,multi")
+ [(set_attr "isa" "*,sse2,*,*,*")
+ (set_attr "type" "fmov,ssecvt,multi,multi,multi")
(set_attr "unit" "*,*,i387,i387,i387")
(set_attr "prefix" "orig,maybe_vex,orig,orig,orig")
(set_attr "mode" "SF")])
(set_attr "mode" "SF")])
(define_insn "*truncxfdf2_mixed"
- [(set (match_operand:DF 0 "nonimmediate_operand" "=m,?f,?Y2,?*r")
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=m,?f,?x,?*r")
(float_truncate:DF
(match_operand:XF 1 "register_operand" "f ,f ,f ,f")))
(clobber (match_operand:DF 2 "memory_operand" "=X,m ,m ,m"))]
gcc_assert (!which_alternative);
return output_387_reg_move (insn, operands);
}
- [(set_attr "type" "fmov,multi,multi,multi")
+ [(set_attr "isa" "*,*,sse2,*")
+ (set_attr "type" "fmov,multi,multi,multi")
(set_attr "unit" "*,i387,i387,i387")
(set_attr "mode" "DF")])
(define_peephole2
[(set (match_operand:MODEF 0 "register_operand" "")
(match_operand:MODEF 1 "memory_operand" ""))
- (set (match_operand:SSEMODEI24 2 "register_operand" "")
- (fix:SSEMODEI24 (match_dup 0)))]
+ (set (match_operand:SWI48x 2 "register_operand" "")
+ (fix:SWI48x (match_dup 0)))]
"TARGET_SHORTEN_X87_SSE
&& !(TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ())
&& peep2_reg_dead_p (2, operands[0])"
- [(set (match_dup 2) (fix:SSEMODEI24 (match_dup 1)))])
+ [(set (match_dup 2) (fix:SWI48x (match_dup 1)))])
;; Avoid vector decoded forms of the instruction.
(define_peephole2
- [(match_scratch:DF 2 "Y2")
- (set (match_operand:SSEMODEI24 0 "register_operand" "")
- (fix:SSEMODEI24 (match_operand:DF 1 "memory_operand" "")))]
- "TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
+ [(match_scratch:DF 2 "x")
+ (set (match_operand:SWI48x 0 "register_operand" "")
+ (fix:SWI48x (match_operand:DF 1 "memory_operand" "")))]
+ "TARGET_SSE2 && TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
[(set (match_dup 2) (match_dup 1))
- (set (match_dup 0) (fix:SSEMODEI24 (match_dup 2)))])
+ (set (match_dup 0) (fix:SWI48x (match_dup 2)))])
(define_peephole2
[(match_scratch:SF 2 "x")
- (set (match_operand:SSEMODEI24 0 "register_operand" "")
- (fix:SSEMODEI24 (match_operand:SF 1 "memory_operand" "")))]
+ (set (match_operand:SWI48x 0 "register_operand" "")
+ (fix:SWI48x (match_operand:SF 1 "memory_operand" "")))]
"TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
[(set (match_dup 2) (match_dup 1))
- (set (match_dup 0) (fix:SSEMODEI24 (match_dup 2)))])
+ (set (match_dup 0) (fix:SWI48x (match_dup 2)))])
(define_insn_and_split "fix_trunc<mode>_fisttp_i387_1"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (fix:X87MODEI (match_operand 1 "register_operand" "")))]
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (fix:SWI248x (match_operand 1 "register_operand" "")))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& TARGET_FISTTP
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
(set_attr "mode" "<MODE>")])
(define_insn "fix_trunc<mode>_i387_fisttp"
- [(set (match_operand:X87MODEI 0 "memory_operand" "=m")
- (fix:X87MODEI (match_operand 1 "register_operand" "f")))
+ [(set (match_operand:SWI248x 0 "memory_operand" "=m")
+ (fix:SWI248x (match_operand 1 "register_operand" "f")))
(clobber (match_scratch:XF 2 "=&1f"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& TARGET_FISTTP
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& (TARGET_64BIT || <MODE>mode != DImode))
&& TARGET_SSE_MATH)"
- "* return output_fix_trunc (insn, operands, 1);"
+ "* return output_fix_trunc (insn, operands, true);"
[(set_attr "type" "fisttp")
(set_attr "mode" "<MODE>")])
(define_insn "fix_trunc<mode>_i387_fisttp_with_temp"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "=m,?r")
- (fix:X87MODEI (match_operand 1 "register_operand" "f,f")))
- (clobber (match_operand:X87MODEI 2 "memory_operand" "=X,m"))
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "=m,?r")
+ (fix:SWI248x (match_operand 1 "register_operand" "f,f")))
+ (clobber (match_operand:SWI248x 2 "memory_operand" "=X,m"))
(clobber (match_scratch:XF 3 "=&1f,&1f"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& TARGET_FISTTP
(set_attr "mode" "<MODE>")])
(define_split
- [(set (match_operand:X87MODEI 0 "register_operand" "")
- (fix:X87MODEI (match_operand 1 "register_operand" "")))
- (clobber (match_operand:X87MODEI 2 "memory_operand" ""))
+ [(set (match_operand:SWI248x 0 "register_operand" "")
+ (fix:SWI248x (match_operand 1 "register_operand" "")))
+ (clobber (match_operand:SWI248x 2 "memory_operand" ""))
(clobber (match_scratch 3 ""))]
"reload_completed"
- [(parallel [(set (match_dup 2) (fix:X87MODEI (match_dup 1)))
+ [(parallel [(set (match_dup 2) (fix:SWI248x (match_dup 1)))
(clobber (match_dup 3))])
(set (match_dup 0) (match_dup 2))])
(define_split
- [(set (match_operand:X87MODEI 0 "memory_operand" "")
- (fix:X87MODEI (match_operand 1 "register_operand" "")))
- (clobber (match_operand:X87MODEI 2 "memory_operand" ""))
+ [(set (match_operand:SWI248x 0 "memory_operand" "")
+ (fix:SWI248x (match_operand 1 "register_operand" "")))
+ (clobber (match_operand:SWI248x 2 "memory_operand" ""))
(clobber (match_scratch 3 ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (fix:X87MODEI (match_dup 1)))
+ [(parallel [(set (match_dup 0) (fix:SWI248x (match_dup 1)))
(clobber (match_dup 3))])])
;; See the comments in i386.h near OPTIMIZE_MODE_SWITCHING for the description
;; clobbering insns can be used. Look at emit_i387_cw_initialization ()
;; function in i386.c.
(define_insn_and_split "*fix_trunc<mode>_i387_1"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (fix:X87MODEI (match_operand 1 "register_operand" "")))
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (fix:SWI248x (match_operand 1 "register_operand" "")))
(clobber (reg:CC FLAGS_REG))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& !TARGET_FISTTP
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& !TARGET_FISTTP
&& !(TARGET_64BIT && SSE_FLOAT_MODE_P (GET_MODE (operands[1])))"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "trunc")
(set_attr "mode" "DI")])
(clobber (match_dup 5))])])
(define_insn "fix_trunc<mode>_i387"
- [(set (match_operand:X87MODEI12 0 "memory_operand" "=m")
- (fix:X87MODEI12 (match_operand 1 "register_operand" "f")))
+ [(set (match_operand:SWI24 0 "memory_operand" "=m")
+ (fix:SWI24 (match_operand 1 "register_operand" "f")))
(use (match_operand:HI 2 "memory_operand" "m"))
(use (match_operand:HI 3 "memory_operand" "m"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& !TARGET_FISTTP
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1]))"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "trunc")
(set_attr "mode" "<MODE>")])
(define_insn "fix_trunc<mode>_i387_with_temp"
- [(set (match_operand:X87MODEI12 0 "nonimmediate_operand" "=m,?r")
- (fix:X87MODEI12 (match_operand 1 "register_operand" "f,f")))
+ [(set (match_operand:SWI24 0 "nonimmediate_operand" "=m,?r")
+ (fix:SWI24 (match_operand 1 "register_operand" "f,f")))
(use (match_operand:HI 2 "memory_operand" "m,m"))
(use (match_operand:HI 3 "memory_operand" "m,m"))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" "=X,m"))]
+ (clobber (match_operand:SWI24 4 "memory_operand" "=X,m"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
&& !TARGET_FISTTP
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1]))"
(set_attr "mode" "<MODE>")])
(define_split
- [(set (match_operand:X87MODEI12 0 "register_operand" "")
- (fix:X87MODEI12 (match_operand 1 "register_operand" "")))
+ [(set (match_operand:SWI24 0 "register_operand" "")
+ (fix:SWI24 (match_operand 1 "register_operand" "")))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 4) (fix:X87MODEI12 (match_dup 1)))
+ [(parallel [(set (match_dup 4) (fix:SWI24 (match_dup 1)))
(use (match_dup 2))
(use (match_dup 3))])
(set (match_dup 0) (match_dup 4))])
(define_split
- [(set (match_operand:X87MODEI12 0 "memory_operand" "")
- (fix:X87MODEI12 (match_operand 1 "register_operand" "")))
+ [(set (match_operand:SWI24 0 "memory_operand" "")
+ (fix:SWI24 (match_operand 1 "register_operand" "")))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (fix:X87MODEI12 (match_dup 1)))
+ [(parallel [(set (match_dup 0) (fix:SWI24 (match_dup 1)))
(use (match_dup 2))
(use (match_dup 3))])])
(define_insn "*floathi<mode>2_i387_with_temp"
[(set (match_operand:X87MODEF 0 "register_operand" "=f,f")
(float:X87MODEF (match_operand:HI 1 "nonimmediate_operand" "m,?r")))
- (clobber (match_operand:HI 2 "memory_operand" "=m,m"))]
+ (clobber (match_operand:HI 2 "memory_operand" "=X,m"))]
"TARGET_80387
&& (!(SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH)
|| TARGET_MIX_SSE_I387)"
&& reload_completed"
[(set (match_dup 0) (float:X87MODEF (match_dup 1)))])
-(define_expand "float<SSEMODEI24:mode><X87MODEF:mode>2"
+(define_expand "float<SWI48x:mode><X87MODEF:mode>2"
[(set (match_operand:X87MODEF 0 "register_operand" "")
(float:X87MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "")))]
+ (match_operand:SWI48x 1 "nonimmediate_operand" "")))]
"TARGET_80387
- || ((<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ || ((<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH)"
{
- if (!((<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ if (!((<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH)
- && !X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode))
+ && !X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode))
{
rtx reg = gen_reg_rtx (XFmode);
rtx (*insn)(rtx, rtx);
- emit_insn (gen_float<SSEMODEI24:mode>xf2 (reg, operands[1]));
+ emit_insn (gen_float<SWI48x:mode>xf2 (reg, operands[1]));
if (<X87MODEF:MODE>mode == SFmode)
insn = gen_truncxfsf2;
})
;; Pre-reload splitter to add memory clobber to the pattern.
-(define_insn_and_split "*float<SSEMODEI24:mode><X87MODEF:mode>2_1"
+(define_insn_and_split "*float<SWI48x:mode><X87MODEF:mode>2_1"
[(set (match_operand:X87MODEF 0 "register_operand" "")
- (float:X87MODEF (match_operand:SSEMODEI24 1 "register_operand" "")))]
+ (float:X87MODEF (match_operand:SWI48x 1 "register_operand" "")))]
"((TARGET_80387
- && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode)
- && (!((<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode)
+ && (!((<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH)
|| TARGET_MIX_SSE_I387))
- || ((<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ || ((<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH
- && ((<SSEMODEI24:MODE>mode == SImode
+ && ((<SWI48x:MODE>mode == SImode
&& TARGET_SSE2 && TARGET_USE_VECTOR_CONVERTS
&& optimize_function_for_speed_p (cfun)
&& flag_trapping_math)
[(parallel [(set (match_dup 0) (float:X87MODEF (match_dup 1)))
(clobber (match_dup 2))])]
{
- operands[2] = assign_386_stack_local (<SSEMODEI24:MODE>mode, SLOT_TEMP);
+ operands[2] = assign_386_stack_local (<SWI48x:MODE>mode, SLOT_TEMP);
/* Avoid store forwarding (partial memory) stall penalty
by passing DImode value through XMM registers. */
- if (<SSEMODEI24:MODE>mode == DImode && !TARGET_64BIT
+ if (<SWI48x:MODE>mode == DImode && !TARGET_64BIT
&& TARGET_80387 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
&& optimize_function_for_speed_p (cfun))
{
(set_attr "bdver1_decode" "*,direct")
(set_attr "fp_int_src" "true")])
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_mixed_with_temp"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_mixed_with_temp"
[(set (match_operand:MODEF 0 "register_operand" "=f,f,x,x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "m,?r,r,m")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" "=X,m,m,X"))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "nonimmediate_operand" "m,?r,r,m")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" "=X,m,m,X"))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387"
"#"
[(set_attr "type" "fmov,multi,sseicvt,sseicvt")
(define_split
[(set (match_operand:MODEF 0 "register_operand" "")
- (float:MODEF (match_operand:SSEMODEI24 1 "register_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (float:MODEF (match_operand:SWI48x 1 "register_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
&& TARGET_INTER_UNIT_CONVERSIONS
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(set (match_dup 0) (float:MODEF (match_dup 1)))])
(define_split
[(set (match_operand:MODEF 0 "register_operand" "")
- (float:MODEF (match_operand:SSEMODEI24 1 "register_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (float:MODEF (match_operand:SWI48x 1 "register_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
&& !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (float:MODEF (match_dup 2)))])
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_mixed_interunit"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_mixed_interunit"
[(set (match_operand:MODEF 0 "register_operand" "=f,x,x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "m,r,m")))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "nonimmediate_operand" "m,r,m")))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
&& (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
"@
fild%Z1\t%1
- %vcvtsi2<MODEF:ssemodesuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}
- %vcvtsi2<MODEF:ssemodesuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
+ %vcvtsi2<MODEF:ssemodesuffix><SWI48x:rex64suffix>\t{%1, %d0|%d0, %1}
+ %vcvtsi2<MODEF:ssemodesuffix><SWI48x:rex64suffix>\t{%1, %d0|%d0, %1}"
[(set_attr "type" "fmov,sseicvt,sseicvt")
(set_attr "prefix" "orig,maybe_vex,maybe_vex")
(set_attr "mode" "<MODEF:MODE>")
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "prefix" "maybe_vex")
- (ne (symbol_ref "<SSEMODEI24:MODE>mode == DImode") (const_int 0)))
+ (match_test "<SWI48x:MODE>mode == DImode"))
(const_string "1")
(const_string "*")))
(set_attr "unit" "i387,*,*")
(set_attr "bdver1_decode" "*,double,direct")
(set_attr "fp_int_src" "true")])
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_mixed_nointerunit"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_mixed_nointerunit"
[(set (match_operand:MODEF 0 "register_operand" "=f,x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "memory_operand" "m,m")))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "memory_operand" "m,m")))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
&& !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
"@
fild%Z1\t%1
- %vcvtsi2<MODEF:ssemodesuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
+ %vcvtsi2<MODEF:ssemodesuffix><SWI48x:rex64suffix>\t{%1, %d0|%d0, %1}"
[(set_attr "type" "fmov,sseicvt")
(set_attr "prefix" "orig,maybe_vex")
(set_attr "mode" "<MODEF:MODE>")
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "prefix" "maybe_vex")
- (ne (symbol_ref "<SSEMODEI24:MODE>mode == DImode") (const_int 0)))
+ (match_test "<SWI48x:MODE>mode == DImode"))
(const_string "1")
(const_string "*")))
(set_attr "athlon_decode" "*,direct")
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(const_int 0)]
{
rtx op1 = operands[1];
emit_insn (gen_sse2_loadld (operands[4],
CONST0_RTX (V4SImode), operands[2]));
}
- emit_insn
- (gen_sse2_cvtdq2<ssevecmodesuffix> (operands[3], operands[4]));
+ if (<ssevecmode>mode == V4SFmode)
+ emit_insn (gen_floatv4siv4sf2 (operands[3], operands[4]));
+ else
+ emit_insn (gen_sse2_cvtdq2pd (operands[3], operands[4]));
DONE;
})
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(const_int 0)]
{
operands[3] = simplify_gen_subreg (<ssevecmode>mode, operands[0],
emit_insn (gen_sse2_loadld (operands[4],
CONST0_RTX (V4SImode), operands[1]));
- emit_insn
- (gen_sse2_cvtdq2<ssevecmodesuffix> (operands[3], operands[4]));
+ if (<ssevecmode>mode == V4SFmode)
+ emit_insn (gen_floatv4siv4sf2 (operands[3], operands[4]));
+ else
+ emit_insn (gen_sse2_cvtdq2pd (operands[3], operands[4]));
DONE;
})
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(const_int 0)]
{
rtx op1 = operands[1];
if (GET_CODE (op1) == SUBREG)
op1 = SUBREG_REG (op1);
- if (GENERAL_REG_P (op1) && TARGET_INTER_UNIT_MOVES)
+ if (GENERAL_REG_P (op1))
{
operands[4] = simplify_gen_subreg (V4SImode, operands[0], <MODE>mode, 0);
- emit_insn (gen_sse2_loadld (operands[4],
- CONST0_RTX (V4SImode), operands[1]));
+ if (TARGET_INTER_UNIT_MOVES)
+ emit_insn (gen_sse2_loadld (operands[4],
+ CONST0_RTX (V4SImode), operands[1]));
+ else
+ {
+ operands[5] = ix86_force_to_memory (GET_MODE (operands[1]),
+ operands[1]);
+ emit_insn (gen_sse2_loadld (operands[4],
+ CONST0_RTX (V4SImode), operands[5]));
+ ix86_free_from_memory (GET_MODE (operands[1]));
+ }
}
/* We can ignore possible trapping value in the
high part of SSE register for non-trapping math. */
operands[4] = simplify_gen_subreg (V4SImode, operands[1], SImode, 0);
else
gcc_unreachable ();
- emit_insn
- (gen_sse2_cvtdq2<ssevecmodesuffix> (operands[3], operands[4]));
+ if (<ssevecmode>mode == V4SFmode)
+ emit_insn (gen_floatv4siv4sf2 (operands[3], operands[4]));
+ else
+ emit_insn (gen_sse2_cvtdq2pd (operands[3], operands[4]));
DONE;
})
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(const_int 0)]
{
operands[3] = simplify_gen_subreg (<ssevecmode>mode, operands[0],
emit_insn (gen_sse2_loadld (operands[4],
CONST0_RTX (V4SImode), operands[1]));
- emit_insn
- (gen_sse2_cvtdq2<ssevecmodesuffix> (operands[3], operands[4]));
+ if (<ssevecmode>mode == V4SFmode)
+ emit_insn (gen_floatv4siv4sf2 (operands[3], operands[4]));
+ else
+ emit_insn (gen_sse2_cvtdq2pd (operands[3], operands[4]));
DONE;
})
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_sse_with_temp"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_sse_with_temp"
[(set (match_operand:MODEF 0 "register_operand" "=x,x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "r,m")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" "=m,X"))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "nonimmediate_operand" "r,m")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" "=m,X"))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH"
"#"
[(set_attr "type" "sseicvt")
(set_attr "bdver1_decode" "double,direct")
(set_attr "fp_int_src" "true")])
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_sse_interunit"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_sse_interunit"
[(set (match_operand:MODEF 0 "register_operand" "=x,x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "r,m")))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "nonimmediate_operand" "r,m")))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
- "%vcvtsi2<MODEF:ssemodesuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
+ "%vcvtsi2<MODEF:ssemodesuffix><SWI48x:rex64suffix>\t{%1, %d0|%d0, %1}"
[(set_attr "type" "sseicvt")
(set_attr "prefix" "maybe_vex")
(set_attr "mode" "<MODEF:MODE>")
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "prefix" "maybe_vex")
- (ne (symbol_ref "<SSEMODEI24:MODE>mode == DImode") (const_int 0)))
+ (match_test "<SWI48x:MODE>mode == DImode"))
(const_string "1")
(const_string "*")))
(set_attr "athlon_decode" "double,direct")
(define_split
[(set (match_operand:MODEF 0 "register_operand" "")
- (float:MODEF (match_operand:SSEMODEI24 1 "nonimmediate_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (float:MODEF (match_operand:SWI48x 1 "nonimmediate_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(set (match_dup 0) (float:MODEF (match_dup 1)))])
-(define_insn "*float<SSEMODEI24:mode><MODEF:mode>2_sse_nointerunit"
+(define_insn "*float<SWI48x:mode><MODEF:mode>2_sse_nointerunit"
[(set (match_operand:MODEF 0 "register_operand" "=x")
(float:MODEF
- (match_operand:SSEMODEI24 1 "memory_operand" "m")))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (match_operand:SWI48x 1 "memory_operand" "m")))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
- "%vcvtsi2<MODEF:ssemodesuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
+ "%vcvtsi2<MODEF:ssemodesuffix><SWI48x:rex64suffix>\t{%1, %d0|%d0, %1}"
[(set_attr "type" "sseicvt")
(set_attr "prefix" "maybe_vex")
(set_attr "mode" "<MODEF:MODE>")
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "prefix" "maybe_vex")
- (ne (symbol_ref "<SSEMODEI24:MODE>mode == DImode") (const_int 0)))
+ (match_test "<SWI48x:MODE>mode == DImode"))
(const_string "1")
(const_string "*")))
(set_attr "athlon_decode" "direct")
(define_split
[(set (match_operand:MODEF 0 "register_operand" "")
- (float:MODEF (match_operand:SSEMODEI24 1 "register_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (float:MODEF (match_operand:SWI48x 1 "register_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (float:MODEF (match_dup 2)))])
(define_split
[(set (match_operand:MODEF 0 "register_operand" "")
- (float:MODEF (match_operand:SSEMODEI24 1 "memory_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
- "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
+ (float:MODEF (match_operand:SWI48x 1 "memory_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
+ "(<SWI48x:MODE>mode != DImode || TARGET_64BIT)
&& SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& reload_completed
&& (SSE_REG_P (operands[0])
|| (GET_CODE (operands[0]) == SUBREG
- && SSE_REG_P (operands[0])))"
+ && SSE_REG_P (SUBREG_REG (operands[0]))))"
[(set (match_dup 0) (float:MODEF (match_dup 1)))])
-(define_insn "*float<SSEMODEI24:mode><X87MODEF:mode>2_i387_with_temp"
+(define_insn "*float<SWI48x:mode><X87MODEF:mode>2_i387_with_temp"
[(set (match_operand:X87MODEF 0 "register_operand" "=f,f")
(float:X87MODEF
- (match_operand:SSEMODEI24 1 "nonimmediate_operand" "m,?r")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" "=X,m"))]
+ (match_operand:SWI48x 1 "nonimmediate_operand" "m,?r")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" "=X,m"))]
"TARGET_80387
- && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode)"
+ && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode)"
"@
fild%Z1\t%1
#"
(set_attr "unit" "*,i387")
(set_attr "fp_int_src" "true")])
-(define_insn "*float<SSEMODEI24:mode><X87MODEF:mode>2_i387"
+(define_insn "*float<SWI48x:mode><X87MODEF:mode>2_i387"
[(set (match_operand:X87MODEF 0 "register_operand" "=f")
(float:X87MODEF
- (match_operand:SSEMODEI24 1 "memory_operand" "m")))]
+ (match_operand:SWI48x 1 "memory_operand" "m")))]
"TARGET_80387
- && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode)"
+ && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode)"
"fild%Z1\t%1"
[(set_attr "type" "fmov")
(set_attr "mode" "<X87MODEF:MODE>")
(set_attr "fp_int_src" "true")])
(define_split
- [(set (match_operand:X87MODEF 0 "register_operand" "")
- (float:X87MODEF (match_operand:SSEMODEI24 1 "register_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
+ [(set (match_operand:X87MODEF 0 "fp_register_operand" "")
+ (float:X87MODEF (match_operand:SWI48x 1 "register_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
"TARGET_80387
- && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode)
- && reload_completed
- && FP_REG_P (operands[0])"
+ && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode)
+ && reload_completed"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (float:X87MODEF (match_dup 2)))])
(define_split
- [(set (match_operand:X87MODEF 0 "register_operand" "")
- (float:X87MODEF (match_operand:SSEMODEI24 1 "memory_operand" "")))
- (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
+ [(set (match_operand:X87MODEF 0 "fp_register_operand" "")
+ (float:X87MODEF (match_operand:SWI48x 1 "memory_operand" "")))
+ (clobber (match_operand:SWI48x 2 "memory_operand" ""))]
"TARGET_80387
- && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SSEMODEI24:MODE>mode)
- && reload_completed
- && FP_REG_P (operands[0])"
+ && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, <SWI48x:MODE>mode)
+ && reload_completed"
[(set (match_dup 0) (float:X87MODEF (match_dup 1)))])
;; Avoid store forwarding (partial memory) stall penalty
(set_attr "fp_int_src" "true")])
(define_split
- [(set (match_operand:X87MODEF 0 "register_operand" "")
+ [(set (match_operand:X87MODEF 0 "fp_register_operand" "")
(float:X87MODEF (match_operand:DI 1 "register_operand" "")))
(clobber (match_scratch:V4SI 3 ""))
(clobber (match_scratch:V4SI 4 ""))
"TARGET_80387 && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, DImode)
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
&& !TARGET_64BIT && optimize_function_for_speed_p (cfun)
- && reload_completed
- && FP_REG_P (operands[0])"
+ && reload_completed"
[(set (match_dup 2) (match_dup 3))
(set (match_dup 0) (float:X87MODEF (match_dup 2)))]
{
})
(define_split
- [(set (match_operand:X87MODEF 0 "register_operand" "")
+ [(set (match_operand:X87MODEF 0 "fp_register_operand" "")
(float:X87MODEF (match_operand:DI 1 "memory_operand" "")))
(clobber (match_scratch:V4SI 3 ""))
(clobber (match_scratch:V4SI 4 ""))
"TARGET_80387 && X87_ENABLE_FLOAT (<X87MODEF:MODE>mode, DImode)
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
&& !TARGET_64BIT && optimize_function_for_speed_p (cfun)
- && reload_completed
- && FP_REG_P (operands[0])"
+ && reload_completed"
[(set (match_dup 0) (float:X87MODEF (match_dup 1)))])
;; Avoid store forwarding (partial memory) stall penalty by extending
[(set_attr "type" "alu")
(set_attr "mode" "QI")])
-(define_insn "*lea_1"
- [(set (match_operand:P 0 "register_operand" "=r")
- (match_operand:P 1 "no_seg_address_operand" "p"))]
- ""
- "lea{<imodesuffix>}\t{%a1, %0|%0, %a1}"
- [(set_attr "type" "lea")
- (set_attr "mode" "<MODE>")])
-
-(define_insn "*lea_2"
+(define_insn_and_split "*lea_1"
[(set (match_operand:SI 0 "register_operand" "=r")
- (subreg:SI (match_operand:DI 1 "no_seg_address_operand" "p") 0))]
+ (subreg:SI (match_operand:DI 1 "lea_address_operand" "p") 0))]
"TARGET_64BIT"
"lea{l}\t{%a1, %0|%0, %a1}"
+ "&& reload_completed && ix86_avoid_lea_for_addr (insn, operands)"
+ [(const_int 0)]
+{
+ ix86_split_lea_for_addr (operands, SImode);
+ DONE;
+}
[(set_attr "type" "lea")
(set_attr "mode" "SI")])
-(define_insn "*lea_2_zext"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (zero_extend:DI
- (subreg:SI (match_operand:DI 1 "no_seg_address_operand" "p") 0)))]
- "TARGET_64BIT"
- "lea{l}\t{%a1, %k0|%k0, %a1}"
- [(set_attr "type" "lea")
+(define_insn_and_split "*lea<mode>_2"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (match_operand:SWI48 1 "lea_address_operand" "p"))]
+ ""
+ "lea{<imodesuffix>}\t{%a1, %0|%0, %a1}"
+ "reload_completed && ix86_avoid_lea_for_addr (insn, operands)"
+ [(const_int 0)]
+{
+ ix86_split_lea_for_addr (operands, <MODE>mode);
+ DONE;
+}
+ [(set_attr "type" "lea")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "*lea_3_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI
+ (subreg:SI (match_operand:DI 1 "lea_address_operand" "j") 0)))]
+ "TARGET_64BIT"
+ "lea{l}\t{%a1, %k0|%k0, %a1}"
+ [(set_attr "type" "lea")
+ (set_attr "mode" "SI")])
+
+(define_insn "*lea_4_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI
+ (match_operand:SI 1 "lea_address_operand" "j")))]
+ "TARGET_64BIT"
+ "lea{l}\t{%a1, %k0|%k0, %a1}"
+ [(set_attr "type" "lea")
+ (set_attr "mode" "SI")])
+
+(define_insn "*lea_5_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (and:DI
+ (subreg:DI (match_operand:SI 1 "lea_address_operand" "p") 0)
+ (match_operand:DI 2 "const_32bit_mask" "n")))]
+ "TARGET_64BIT"
+ "lea{l}\t{%a1, %k0|%k0, %a1}"
+ [(set_attr "type" "lea")
+ (set_attr "mode" "SI")])
+
+(define_insn "*lea_6_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (and:DI
+ (match_operand:DI 1 "lea_address_operand" "p")
+ (match_operand:DI 2 "const_32bit_mask" "n")))]
+ "TARGET_64BIT"
+ "lea{l}\t{%a1, %k0|%k0, %a1}"
+ [(set_attr "type" "lea")
(set_attr "mode" "SI")])
(define_insn "*add<mode>_1"
[(set (match_operand:SWI48 0 "nonimmediate_operand" "=r,rm,r,r")
(plus:SWI48
(match_operand:SWI48 1 "nonimmediate_operand" "%0,0,r,r")
- (match_operand:SWI48 2 "<general_operand>" "<g>,r<i>,0,l<i>")))
+ (match_operand:SWI48 2 "x86_64_general_operand" "rme,re,0,le")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (PLUS, <MODE>mode, operands)"
{
;; operands so proper swapping will be done in reload. This allow
;; patterns constructed from addsi_1 to match.
-(define_insn "*addsi_1_zext"
+(define_insn "addsi_1_zext"
[(set (match_operand:DI 0 "register_operand" "=r,r,r")
(zero_extend:DI
(plus:SI (match_operand:SI 1 "nonimmediate_operand" "%0,r,r")
- (match_operand:SI 2 "general_operand" "g,0,li"))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme,0,le"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (PLUS, SImode, operands)"
{
(set_attr "mode" "SI")])
(define_insn "*addhi_1"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r")
- (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0")
- (match_operand:HI 2 "general_operand" "rn,rm")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, HImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_INCDEC:
- if (operands[2] == const1_rtx)
- return "inc{w}\t%0";
- else
- {
- gcc_assert (operands[2] == constm1_rtx);
- return "dec{w}\t%0";
- }
-
- default:
- if (x86_maybe_negate_const_int (&operands[2], HImode))
- return "sub{w}\t{%2, %0|%0, %2}";
-
- return "add{w}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (if_then_else (match_operand:HI 2 "incdec_operand" "")
- (const_string "incdec")
- (const_string "alu")))
- (set (attr "length_immediate")
- (if_then_else
- (and (eq_attr "type" "alu") (match_operand 2 "const128_operand" ""))
- (const_string "1")
- (const_string "*")))
- (set_attr "mode" "HI")])
-
-(define_insn "*addhi_1_lea"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,rm,r,r")
- (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0,r,r")
- (match_operand:HI 2 "general_operand" "rmn,rn,0,ln")))
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r,r,Yp")
+ (plus:HI (match_operand:HI 1 "nonimmediate_operand" "%0,0,r,Yp")
+ (match_operand:HI 2 "general_operand" "rn,rm,0,ln")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, HImode, operands)"
+ "ix86_binary_operator_ok (PLUS, HImode, operands)"
{
switch (get_attr_type (insn))
{
(const_string "*")))
(set_attr "mode" "HI,HI,HI,SI")])
-;; %%% Potential partial reg stall on alternative 2. What to do?
-(define_insn "*addqi_1"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,q,r")
- (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,0")
- (match_operand:QI 2 "general_operand" "qn,qmn,rn")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, QImode, operands)"
-{
- int widen = (which_alternative == 2);
- switch (get_attr_type (insn))
- {
- case TYPE_INCDEC:
- if (operands[2] == const1_rtx)
- return widen ? "inc{l}\t%k0" : "inc{b}\t%0";
- else
- {
- gcc_assert (operands[2] == constm1_rtx);
- return widen ? "dec{l}\t%k0" : "dec{b}\t%0";
- }
-
- default:
- if (x86_maybe_negate_const_int (&operands[2], QImode))
- {
- if (widen)
- return "sub{l}\t{%2, %k0|%k0, %2}";
- else
- return "sub{b}\t{%2, %0|%0, %2}";
- }
- if (widen)
- return "add{l}\t{%k2, %k0|%k0, %k2}";
- else
- return "add{b}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (if_then_else (match_operand:QI 2 "incdec_operand" "")
- (const_string "incdec")
- (const_string "alu")))
- (set (attr "length_immediate")
- (if_then_else
- (and (eq_attr "type" "alu") (match_operand 2 "const128_operand" ""))
- (const_string "1")
- (const_string "*")))
- (set_attr "mode" "QI,QI,SI")])
-
;; %%% Potential partial reg stall on alternatives 3 and 4. What to do?
-(define_insn "*addqi_1_lea"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=q,qm,q,r,r,r")
- (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,q,0,r,r")
- (match_operand:QI 2 "general_operand" "qmn,qn,0,rn,0,ln")))
+(define_insn "*addqi_1"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,q,q,r,r,Yp")
+ (plus:QI (match_operand:QI 1 "nonimmediate_operand" "%0,0,q,0,r,Yp")
+ (match_operand:QI 2 "general_operand" "qn,qm,0,rn,0,ln")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (PLUS, QImode, operands)"
+ "ix86_binary_operator_ok (PLUS, QImode, operands)"
{
- int widen = (which_alternative == 3 || which_alternative == 4);
+ bool widen = (which_alternative == 3 || which_alternative == 4);
switch (get_attr_type (insn))
{
(define_insn "*addqi_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm,q"))
(plus:QI (match_dup 0)
- (match_operand:QI 1 "general_operand" "qn,qnm")))
+ (match_operand:QI 1 "general_operand" "qn,qm")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
(const_string "none")))
(set_attr "mode" "QI")])
-;; Convert lea to the lea pattern to avoid flags dependency.
+;; Split non destructive adds if we cannot use lea.
(define_split
- [(set (match_operand 0 "register_operand" "")
- (plus (match_operand 1 "register_operand" "")
- (match_operand 2 "nonmemory_operand" "")))
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (plus:SWI48 (match_operand:SWI48 1 "register_operand" "")
+ (match_operand:SWI48 2 "nonmemory_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "reload_completed && ix86_avoid_lea_for_add (insn, operands)"
+ [(set (match_dup 0) (match_dup 1))
+ (parallel [(set (match_dup 0) (plus:<MODE> (match_dup 0) (match_dup 2)))
+ (clobber (reg:CC FLAGS_REG))])])
+
+;; Convert add to the lea pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI 0 "register_operand" "")
+ (plus:SWI (match_operand:SWI 1 "register_operand" "")
+ (match_operand:SWI 2 "<nonmemory_operand>" "")))
(clobber (reg:CC FLAGS_REG))]
"reload_completed && ix86_lea_for_add_ok (insn, operands)"
[(const_int 0)]
{
+ enum machine_mode mode = <MODE>mode;
rtx pat;
- enum machine_mode mode = GET_MODE (operands[0]);
-
- /* In -fPIC mode the constructs like (const (unspec [symbol_ref]))
- may confuse gen_lowpart. */
- if (mode != Pmode)
- {
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[2] = gen_lowpart (Pmode, operands[2]);
- }
-
- pat = gen_rtx_PLUS (Pmode, operands[1], operands[2]);
if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (SImode))
- operands[0] = gen_lowpart (SImode, operands[0]);
+ {
+ mode = SImode;
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ operands[2] = gen_lowpart (mode, operands[2]);
+ }
- if (TARGET_64BIT && mode != Pmode)
- pat = gen_rtx_SUBREG (SImode, pat, 0);
+ pat = gen_rtx_PLUS (mode, operands[1], operands[2]);
emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
DONE;
})
-;; Convert lea to the lea pattern to avoid flags dependency.
-;; ??? This pattern handles immediate operands that do not satisfy immediate
-;; operand predicate (TARGET_LEGITIMATE_CONSTANT_P) in the previous pattern.
-(define_split
- [(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 1 "register_operand" "")
- (match_operand:DI 2 "x86_64_immediate_operand" "")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_64BIT && reload_completed
- && true_regnum (operands[0]) != true_regnum (operands[1])"
- [(set (match_dup 0)
- (plus:DI (match_dup 1) (match_dup 2)))])
-
-;; Convert lea to the lea pattern to avoid flags dependency.
+;; Convert add to the lea pattern to avoid flags dependency.
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(zero_extend:DI
(plus:SI (match_operand:SI 1 "register_operand" "")
- (match_operand:SI 2 "nonmemory_operand" ""))))
+ (match_operand:SI 2 "x86_64_nonmemory_operand" ""))))
(clobber (reg:CC FLAGS_REG))]
- "TARGET_64BIT && reload_completed
- && ix86_lea_for_add_ok (insn, operands)"
+ "TARGET_64BIT && reload_completed && ix86_lea_for_add_ok (insn, operands)"
[(set (match_dup 0)
- (zero_extend:DI (subreg:SI (plus:DI (match_dup 1) (match_dup 2)) 0)))]
-{
- operands[1] = gen_lowpart (DImode, operands[1]);
- operands[2] = gen_lowpart (DImode, operands[2]);
-})
+ (zero_extend:DI (plus:SI (match_dup 1) (match_dup 2))))])
(define_insn "*add<mode>_2"
[(set (reg FLAGS_REG)
(compare
(plus:SWI
- (match_operand:SWI 1 "nonimmediate_operand" "%0,0")
- (match_operand:SWI 2 "<general_operand>" "<g>,<r><i>"))
+ (match_operand:SWI 1 "nonimmediate_operand" "%0,0,<r>")
+ (match_operand:SWI 2 "<general_operand>" "<g>,<r><i>,0"))
(const_int 0)))
- (set (match_operand:SWI 0 "nonimmediate_operand" "=<r>,<r>m")
+ (set (match_operand:SWI 0 "nonimmediate_operand" "=<r>,<r>m,<r>")
(plus:SWI (match_dup 1) (match_dup 2)))]
"ix86_match_ccmode (insn, CCGOCmode)
&& ix86_binary_operator_ok (PLUS, <MODE>mode, operands)"
}
default:
+ if (which_alternative == 2)
+ {
+ rtx tmp;
+ tmp = operands[1], operands[1] = operands[2], operands[2] = tmp;
+ }
+
+ gcc_assert (rtx_equal_p (operands[0], operands[1]));
if (x86_maybe_negate_const_int (&operands[2], <MODE>mode))
return "sub{<imodesuffix>}\t{%2, %0|%0, %2}";
(define_insn "*addsi_2_zext"
[(set (reg FLAGS_REG)
(compare
- (plus:SI (match_operand:SI 1 "nonimmediate_operand" "%0")
- (match_operand:SI 2 "general_operand" "g"))
+ (plus:SI (match_operand:SI 1 "nonimmediate_operand" "%0,r")
+ (match_operand:SI 2 "x86_64_general_operand" "rme,0"))
(const_int 0)))
- (set (match_operand:DI 0 "register_operand" "=r")
+ (set (match_operand:DI 0 "register_operand" "=r,r")
(zero_extend:DI (plus:SI (match_dup 1) (match_dup 2))))]
"TARGET_64BIT && ix86_match_ccmode (insn, CCGOCmode)
&& ix86_binary_operator_ok (PLUS, SImode, operands)"
}
default:
+ if (which_alternative == 1)
+ {
+ rtx tmp;
+ tmp = operands[1], operands[1] = operands[2], operands[2] = tmp;
+ }
+
if (x86_maybe_negate_const_int (&operands[2], SImode))
return "sub{l}\t{%2, %k0|%k0, %2}";
(define_insn "*add<mode>_3"
[(set (reg FLAGS_REG)
(compare
- (neg:SWI (match_operand:SWI 2 "<general_operand>" "<g>"))
- (match_operand:SWI 1 "nonimmediate_operand" "%0")))
- (clobber (match_scratch:SWI 0 "=<r>"))]
+ (neg:SWI (match_operand:SWI 2 "<general_operand>" "<g>,0"))
+ (match_operand:SWI 1 "nonimmediate_operand" "%0,<r>")))
+ (clobber (match_scratch:SWI 0 "=<r>,<r>"))]
"ix86_match_ccmode (insn, CCZmode)
&& !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
}
default:
+ if (which_alternative == 1)
+ {
+ rtx tmp;
+ tmp = operands[1], operands[1] = operands[2], operands[2] = tmp;
+ }
+
+ gcc_assert (rtx_equal_p (operands[0], operands[1]));
if (x86_maybe_negate_const_int (&operands[2], <MODE>mode))
return "sub{<imodesuffix>}\t{%2, %0|%0, %2}";
(define_insn "*addsi_3_zext"
[(set (reg FLAGS_REG)
(compare
- (neg:SI (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 1 "nonimmediate_operand" "%0")))
- (set (match_operand:DI 0 "register_operand" "=r")
+ (neg:SI (match_operand:SI 2 "x86_64_general_operand" "rme,0"))
+ (match_operand:SI 1 "nonimmediate_operand" "%0,r")))
+ (set (match_operand:DI 0 "register_operand" "=r,r")
(zero_extend:DI (plus:SI (match_dup 1) (match_dup 2))))]
"TARGET_64BIT && ix86_match_ccmode (insn, CCZmode)
&& ix86_binary_operator_ok (PLUS, SImode, operands)"
}
default:
+ if (which_alternative == 1)
+ {
+ rtx tmp;
+ tmp = operands[1], operands[1] = operands[2], operands[2] = tmp;
+ }
+
if (x86_maybe_negate_const_int (&operands[2], SImode))
return "sub{l}\t{%2, %k0|%k0, %2}";
[(set (reg FLAGS_REG)
(compare
(plus:SWI
- (match_operand:SWI 1 "nonimmediate_operand" "%0")
- (match_operand:SWI 2 "<general_operand>" "<g>"))
+ (match_operand:SWI 1 "nonimmediate_operand" "%0,<r>")
+ (match_operand:SWI 2 "<general_operand>" "<g>,0"))
(const_int 0)))
- (clobber (match_scratch:SWI 0 "=<r>"))]
+ (clobber (match_scratch:SWI 0 "=<r>,<r>"))]
"ix86_match_ccmode (insn, CCGOCmode)
&& !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
}
default:
+ if (which_alternative == 1)
+ {
+ rtx tmp;
+ tmp = operands[1], operands[1] = operands[2], operands[2] = tmp;
+ }
+
+ gcc_assert (rtx_equal_p (operands[0], operands[1]));
if (x86_maybe_negate_const_int (&operands[2], <MODE>mode))
return "sub{<imodesuffix>}\t{%2, %0|%0, %2}";
[(set_attr "type" "alu")
(set_attr "mode" "QI")])
-;; The lea patterns for non-Pmodes needs to be matched by
+;; The lea patterns for modes less than 32 bits need to be matched by
;; several insns converted to real lea by splitters.
(define_insn_and_split "*lea_general_1"
(plus (plus (match_operand 1 "index_register_operand" "l")
(match_operand 2 "register_operand" "r"))
(match_operand 3 "immediate_operand" "i")))]
- "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
- || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
+ "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode)
&& (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])
&& GET_MODE (operands[0]) == GET_MODE (operands[2])
"&& reload_completed"
[(const_int 0)]
{
+ enum machine_mode mode = SImode;
rtx pat;
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[2] = gen_lowpart (Pmode, operands[2]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
- pat = gen_rtx_PLUS (Pmode, gen_rtx_PLUS (Pmode, operands[1], operands[2]),
+
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ operands[2] = gen_lowpart (mode, operands[2]);
+ operands[3] = gen_lowpart (mode, operands[3]);
+
+ pat = gen_rtx_PLUS (mode, gen_rtx_PLUS (mode, operands[1], operands[2]),
operands[3]);
- if (Pmode != SImode)
- pat = gen_rtx_SUBREG (SImode, pat, 0);
+
emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
DONE;
}
[(set_attr "type" "lea")
(set_attr "mode" "SI")])
-(define_insn_and_split "*lea_general_1_zext"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (zero_extend:DI
- (plus:SI (plus:SI
- (match_operand:SI 1 "index_register_operand" "l")
- (match_operand:SI 2 "register_operand" "r"))
- (match_operand:SI 3 "immediate_operand" "i"))))]
- "TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(set (match_dup 0)
- (zero_extend:DI (subreg:SI (plus:DI (plus:DI (match_dup 1)
- (match_dup 2))
- (match_dup 3)) 0)))]
-{
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[2] = gen_lowpart (Pmode, operands[2]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
-}
- [(set_attr "type" "lea")
- (set_attr "mode" "SI")])
-
(define_insn_and_split "*lea_general_2"
[(set (match_operand 0 "register_operand" "=r")
(plus (mult (match_operand 1 "index_register_operand" "l")
- (match_operand 2 "const248_operand" "i"))
+ (match_operand 2 "const248_operand" "n"))
(match_operand 3 "nonmemory_operand" "ri")))]
- "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
- || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
+ "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode)
&& (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])
&& (GET_MODE (operands[0]) == GET_MODE (operands[3])
"&& reload_completed"
[(const_int 0)]
{
+ enum machine_mode mode = SImode;
rtx pat;
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
- pat = gen_rtx_PLUS (Pmode, gen_rtx_MULT (Pmode, operands[1], operands[2]),
- operands[3]);
- if (Pmode != SImode)
- pat = gen_rtx_SUBREG (SImode, pat, 0);
+
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ operands[3] = gen_lowpart (mode, operands[3]);
+
+ pat = gen_rtx_PLUS (mode, gen_rtx_MULT (mode, operands[1], operands[2]),
+ operands[3]);
+
emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
DONE;
}
[(set_attr "type" "lea")
(set_attr "mode" "SI")])
-(define_insn_and_split "*lea_general_2_zext"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (zero_extend:DI
- (plus:SI (mult:SI
- (match_operand:SI 1 "index_register_operand" "l")
- (match_operand:SI 2 "const248_operand" "n"))
- (match_operand:SI 3 "nonmemory_operand" "ri"))))]
- "TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(set (match_dup 0)
- (zero_extend:DI (subreg:SI (plus:DI (mult:DI (match_dup 1)
- (match_dup 2))
- (match_dup 3)) 0)))]
-{
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
-}
- [(set_attr "type" "lea")
- (set_attr "mode" "SI")])
-
(define_insn_and_split "*lea_general_3"
[(set (match_operand 0 "register_operand" "=r")
(plus (plus (mult (match_operand 1 "index_register_operand" "l")
- (match_operand 2 "const248_operand" "i"))
+ (match_operand 2 "const248_operand" "n"))
(match_operand 3 "register_operand" "r"))
(match_operand 4 "immediate_operand" "i")))]
- "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
- || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
+ "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode)
&& (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
&& GET_MODE (operands[0]) == GET_MODE (operands[1])
&& GET_MODE (operands[0]) == GET_MODE (operands[3])"
"&& reload_completed"
[(const_int 0)]
{
+ enum machine_mode mode = SImode;
rtx pat;
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
- operands[4] = gen_lowpart (Pmode, operands[4]);
- pat = gen_rtx_PLUS (Pmode,
- gen_rtx_PLUS (Pmode, gen_rtx_MULT (Pmode, operands[1],
- operands[2]),
+
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ operands[3] = gen_lowpart (mode, operands[3]);
+ operands[4] = gen_lowpart (mode, operands[4]);
+
+ pat = gen_rtx_PLUS (mode,
+ gen_rtx_PLUS (mode,
+ gen_rtx_MULT (mode, operands[1],
+ operands[2]),
operands[3]),
operands[4]);
- if (Pmode != SImode)
- pat = gen_rtx_SUBREG (SImode, pat, 0);
+
emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
DONE;
}
[(set_attr "type" "lea")
(set_attr "mode" "SI")])
-(define_insn_and_split "*lea_general_3_zext"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (zero_extend:DI
- (plus:SI (plus:SI
- (mult:SI
- (match_operand:SI 1 "index_register_operand" "l")
- (match_operand:SI 2 "const248_operand" "n"))
- (match_operand:SI 3 "register_operand" "r"))
- (match_operand:SI 4 "immediate_operand" "i"))))]
- "TARGET_64BIT"
+(define_insn_and_split "*lea_general_4"
+ [(set (match_operand 0 "register_operand" "=r")
+ (any_or (ashift
+ (match_operand 1 "index_register_operand" "l")
+ (match_operand 2 "const_int_operand" "n"))
+ (match_operand 3 "const_int_operand" "n")))]
+ "(((GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode)
+ && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun)))
+ || GET_MODE (operands[0]) == SImode
+ || (TARGET_64BIT && GET_MODE (operands[0]) == DImode))
+ && GET_MODE (operands[0]) == GET_MODE (operands[1])
+ && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) - 1 < 3
+ && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
+ < ((unsigned HOST_WIDE_INT) 1 << INTVAL (operands[2])))"
"#"
"&& reload_completed"
- [(set (match_dup 0)
- (zero_extend:DI (subreg:SI (plus:DI (plus:DI (mult:DI (match_dup 1)
- (match_dup 2))
- (match_dup 3))
- (match_dup 4)) 0)))]
-{
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[3] = gen_lowpart (Pmode, operands[3]);
- operands[4] = gen_lowpart (Pmode, operands[4]);
+ [(const_int 0)]
+{
+ enum machine_mode mode = GET_MODE (operands[0]);
+ rtx pat;
+
+ if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (SImode))
+ {
+ mode = SImode;
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ }
+
+ operands[2] = GEN_INT (1 << INTVAL (operands[2]));
+
+ pat = plus_constant (gen_rtx_MULT (mode, operands[1], operands[2]),
+ INTVAL (operands[3]));
+
+ emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
+ DONE;
}
[(set_attr "type" "lea")
- (set_attr "mode" "SI")])
+ (set (attr "mode")
+ (if_then_else (match_operand:DI 0 "" "")
+ (const_string "DI")
+ (const_string "SI")))])
\f
;; Subtract instructions
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(minus:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:SI 2 "general_operand" "g"))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (MINUS, SImode, operands)"
"sub{l}\t{%2, %k0|%k0, %2}"
[(set (reg FLAGS_REG)
(compare
(minus:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:SI 2 "general_operand" "g"))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))
(const_int 0)))
(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(define_insn "*subsi_3_zext"
[(set (reg FLAGS_REG)
(compare (match_operand:SI 1 "register_operand" "0")
- (match_operand:SI 2 "general_operand" "g")))
+ (match_operand:SI 2 "x86_64_general_operand" "rme")))
(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(minus:SI (match_dup 1)
(plus:SI (match_operand:SI 1 "nonimmediate_operand" "%0")
(plus:SI (match_operator 3 "ix86_carry_flag_operator"
[(reg FLAGS_REG) (const_int 0)])
- (match_operand:SI 2 "general_operand" "g")))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme")))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (PLUS, SImode, operands)"
"adc{l}\t{%2, %k0|%k0, %2}"
(minus:SI (match_operand:SI 1 "register_operand" "0")
(plus:SI (match_operator 3 "ix86_carry_flag_operator"
[(reg FLAGS_REG) (const_int 0)])
- (match_operand:SI 2 "general_operand" "g")))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme")))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (MINUS, SImode, operands)"
"sbb{l}\t{%2, %k0|%k0, %2}"
(compare:CCC
(plusminus:SI
(match_operand:SI 1 "nonimmediate_operand" "<comm>0")
- (match_operand:SI 2 "general_operand" "g"))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))
(match_dup 1)))
(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI (plusminus:SI (match_dup 1) (match_dup 2))))]
[(set (match_operand:DI 0 "register_operand" "=r,r,r")
(zero_extend:DI
(mult:SI (match_operand:SI 1 "nonimmediate_operand" "%rm,rm,0")
- (match_operand:SI 2 "general_operand" "K,i,mr"))))
+ (match_operand:SI 2 "x86_64_general_operand" "K,e,mr"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
&& !(MEM_P (operands[1]) && MEM_P (operands[2]))"
(clobber (reg:CC FLAGS_REG))])]
"TARGET_QIMODE_MATH")
-(define_insn "*<u>mul<mode><dwi>3_1"
- [(set (match_operand:<DWI> 0 "register_operand" "=A")
+(define_insn "*bmi2_umulditi3_1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (mult:DI
+ (match_operand:DI 2 "nonimmediate_operand" "%d")
+ (match_operand:DI 3 "nonimmediate_operand" "rm")))
+ (set (match_operand:DI 1 "register_operand" "=r")
+ (truncate:DI
+ (lshiftrt:TI
+ (mult:TI (zero_extend:TI (match_dup 2))
+ (zero_extend:TI (match_dup 3)))
+ (const_int 64))))]
+ "TARGET_64BIT && TARGET_BMI2
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "mulx\t{%3, %0, %1|%1, %0, %3}"
+ [(set_attr "type" "imulx")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "DI")])
+
+(define_insn "*bmi2_umulsidi3_1"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (mult:SI
+ (match_operand:SI 2 "nonimmediate_operand" "%d")
+ (match_operand:SI 3 "nonimmediate_operand" "rm")))
+ (set (match_operand:SI 1 "register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI (zero_extend:DI (match_dup 2))
+ (zero_extend:DI (match_dup 3)))
+ (const_int 32))))]
+ "!TARGET_64BIT && TARGET_BMI2
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "mulx\t{%3, %0, %1|%1, %0, %3}"
+ [(set_attr "type" "imulx")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "SI")])
+
+(define_insn "*umul<mode><dwi>3_1"
+ [(set (match_operand:<DWI> 0 "register_operand" "=A,r")
(mult:<DWI>
- (any_extend:<DWI>
- (match_operand:DWIH 1 "nonimmediate_operand" "%0"))
- (any_extend:<DWI>
- (match_operand:DWIH 2 "nonimmediate_operand" "rm"))))
+ (zero_extend:<DWI>
+ (match_operand:DWIH 1 "nonimmediate_operand" "%0,d"))
+ (zero_extend:<DWI>
+ (match_operand:DWIH 2 "nonimmediate_operand" "rm,rm"))))
(clobber (reg:CC FLAGS_REG))]
"!(MEM_P (operands[1]) && MEM_P (operands[2]))"
- "<sgnprefix>mul{<imodesuffix>}\t%2"
- [(set_attr "type" "imul")
- (set_attr "length_immediate" "0")
+ "@
+ mul{<imodesuffix>}\t%2
+ #"
+ [(set_attr "isa" "*,bmi2")
+ (set_attr "type" "imul,imulx")
+ (set_attr "length_immediate" "0,*")
(set (attr "athlon_decode")
- (if_then_else (eq_attr "cpu" "athlon")
+ (cond [(eq_attr "alternative" "0")
+ (if_then_else (eq_attr "cpu" "athlon")
+ (const_string "vector")
+ (const_string "double"))]
+ (const_string "*")))
+ (set_attr "amdfam10_decode" "double,*")
+ (set_attr "bdver1_decode" "direct,*")
+ (set_attr "prefix" "orig,vex")
+ (set_attr "mode" "<MODE>")])
+
+;; Convert mul to the mulx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:<DWI> 0 "register_operand" "")
+ (mult:<DWI>
+ (zero_extend:<DWI>
+ (match_operand:DWIH 1 "register_operand" ""))
+ (zero_extend:<DWI>
+ (match_operand:DWIH 2 "nonimmediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed
+ && true_regnum (operands[1]) == DX_REG"
+ [(parallel [(set (match_dup 3)
+ (mult:DWIH (match_dup 1) (match_dup 2)))
+ (set (match_dup 4)
+ (truncate:DWIH
+ (lshiftrt:<DWI>
+ (mult:<DWI> (zero_extend:<DWI> (match_dup 1))
+ (zero_extend:<DWI> (match_dup 2)))
+ (match_dup 5))))])]
+{
+ split_double_mode (<DWI>mode, &operands[0], 1, &operands[3], &operands[4]);
+
+ operands[5] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode));
+})
+
+(define_insn "*mul<mode><dwi>3_1"
+ [(set (match_operand:<DWI> 0 "register_operand" "=A")
+ (mult:<DWI>
+ (sign_extend:<DWI>
+ (match_operand:DWIH 1 "nonimmediate_operand" "%0"))
+ (sign_extend:<DWI>
+ (match_operand:DWIH 2 "nonimmediate_operand" "rm"))))
+ (clobber (reg:CC FLAGS_REG))]
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
+ "imul{<imodesuffix>}\t%2"
+ [(set_attr "type" "imul")
+ (set_attr "length_immediate" "0")
+ (set (attr "athlon_decode")
+ (if_then_else (eq_attr "cpu" "athlon")
(const_string "vector")
(const_string "double")))
(set_attr "amdfam10_decode" "double")
"(TARGET_80387 && X87_ENABLE_ARITH (SFmode))
|| TARGET_SSE_MATH"
{
- if (TARGET_SSE_MATH && TARGET_RECIP && optimize_insn_for_speed_p ()
+ if (TARGET_SSE_MATH
+ && TARGET_RECIP_DIV
+ && optimize_insn_for_speed_p ()
&& flag_finite_math_only && !flag_trapping_math
&& flag_unsafe_math_optimizations)
{
[(set (reg:CCNO FLAGS_REG)
(compare:CCNO
(and:SI (match_operand:SI 0 "nonimmediate_operand" "")
- (match_operand:SI 1 "nonmemory_operand" ""))
+ (match_operand:SI 1 "x86_64_nonmemory_operand" ""))
(const_int 0)))])
(define_expand "testqi_ccz_1"
(compare
(and:SWI124
(match_operand:SWI124 0 "nonimmediate_operand" "%!*a,<r>,<r>m")
- (match_operand:SWI124 1 "general_operand" "<i>,<i>,<r><i>"))
+ (match_operand:SWI124 1 "<general_operand>" "<i>,<i>,<r><i>"))
(const_int 0)))]
"ix86_match_ccmode (insn, CCNOmode)
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
[(and:SI (zero_extract:SI (match_dup 2) (const_int 8) (const_int 8))
(match_dup 3))
(const_int 0)]))]
- "operands[2] = gen_lowpart (SImode, operands[2]);
- operands[3] = gen_int_mode (INTVAL (operands[3]) >> 8, SImode);")
+{
+ operands[2] = gen_lowpart (SImode, operands[2]);
+ operands[3] = gen_int_mode (INTVAL (operands[3]) >> 8, SImode);
+})
(define_split
[(set (match_operand 0 "flags_reg_operand" "")
[(set (match_dup 0)
(match_op_dup 1 [(and:QI (match_dup 2) (match_dup 3))
(const_int 0)]))]
- "operands[2] = gen_lowpart (QImode, operands[2]);
- operands[3] = gen_lowpart (QImode, operands[3]);")
+{
+ operands[2] = gen_lowpart (QImode, operands[2]);
+ operands[3] = gen_lowpart (QImode, operands[3]);
+})
;; %%% This used to optimize known byte-wide and operations to memory,
;; and sometimes to QImode registers. If this is considered useful,
enum machine_mode mode;
gcc_assert (CONST_INT_P (operands[2]));
- if (INTVAL (operands[2]) == 0xff)
- mode = QImode;
+ if (INTVAL (operands[2]) == (HOST_WIDE_INT) 0xffffffff)
+ mode = SImode;
+ else if (INTVAL (operands[2]) == 0xffff)
+ mode = HImode;
else
{
- gcc_assert (INTVAL (operands[2]) == 0xffff);
- mode = HImode;
+ gcc_assert (INTVAL (operands[2]) == 0xff);
+ mode = QImode;
}
operands[1] = gen_lowpart (mode, operands[1]);
- if (mode == QImode)
- return "movz{bl|x}\t{%1, %k0|%k0, %1}";
- else
+ if (mode == SImode)
+ return "mov{l}\t{%1, %k0|%k0, %1}";
+ else if (mode == HImode)
return "movz{wl|x}\t{%1, %k0|%k0, %1}";
+ else
+ return "movz{bl|x}\t{%1, %k0|%k0, %1}";
}
default:
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "type" "imovx")
- (and (ne (symbol_ref "INTVAL (operands[2]) == 0xff") (const_int 0))
+ (and (match_test "INTVAL (operands[2]) == 0xff")
(match_operand 1 "ext_QIreg_operand" "")))
(const_string "1")
(const_string "*")))
(define_insn "*andsi_1"
[(set (match_operand:SI 0 "nonimmediate_operand" "=rm,r,r")
(and:SI (match_operand:SI 1 "nonimmediate_operand" "%0,0,qm")
- (match_operand:SI 2 "general_operand" "ri,rm,L")))
+ (match_operand:SI 2 "x86_64_general_operand" "re,rm,L")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (AND, SImode, operands)"
{
enum machine_mode mode;
gcc_assert (CONST_INT_P (operands[2]));
- if (INTVAL (operands[2]) == 0xff)
- mode = QImode;
+ if (INTVAL (operands[2]) == 0xffff)
+ mode = HImode;
else
{
- gcc_assert (INTVAL (operands[2]) == 0xffff);
- mode = HImode;
+ gcc_assert (INTVAL (operands[2]) == 0xff);
+ mode = QImode;
}
operands[1] = gen_lowpart (mode, operands[1]);
- if (mode == QImode)
- return "movz{bl|x}\t{%1, %0|%0, %1}";
- else
+ if (mode == HImode)
return "movz{wl|x}\t{%1, %0|%0, %1}";
+ else
+ return "movz{bl|x}\t{%1, %0|%0, %1}";
}
default:
(set (attr "prefix_rex")
(if_then_else
(and (eq_attr "type" "imovx")
- (and (ne (symbol_ref "INTVAL (operands[2]) == 0xff") (const_int 0))
+ (and (match_test "INTVAL (operands[2]) == 0xff")
(match_operand 1 "ext_QIreg_operand" "")))
(const_string "1")
(const_string "*")))
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(and:SI (match_operand:SI 1 "nonimmediate_operand" "%0")
- (match_operand:SI 2 "general_operand" "g"))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (AND, SImode, operands)"
"and{l}\t{%2, %k0|%k0, %2}"
[(set (reg FLAGS_REG)
(compare (and:SWI124
(match_operand:SWI124 1 "nonimmediate_operand" "%0,0")
- (match_operand:SWI124 2 "general_operand" "<g>,<r><i>"))
+ (match_operand:SWI124 2 "<general_operand>" "<g>,<r><i>"))
(const_int 0)))
(set (match_operand:SWI124 0 "nonimmediate_operand" "=<r>,<r>m")
(and:SWI124 (match_dup 1) (match_dup 2)))]
[(set (reg FLAGS_REG)
(compare (and:SI
(match_operand:SI 1 "nonimmediate_operand" "%0")
- (match_operand:SI 2 "general_operand" "g"))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))
(const_int 0)))
(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI (and:SI (match_dup 1) (match_dup 2))))]
(const_int 8) (const_int 8))
(match_dup 2)))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_int_mode ((INTVAL (operands[2]) >> 8) & 0xff, SImode);")
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+ operands[2] = gen_int_mode ((INTVAL (operands[2]) >> 8) & 0xff, SImode);
+})
;; Since AND can be encoded with sign extended immediate, this is only
;; profitable when 7th bit is not set.
(and:QI (match_dup 1)
(match_dup 2)))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (QImode, operands[0]);
- operands[1] = gen_lowpart (QImode, operands[1]);
- operands[2] = gen_lowpart (QImode, operands[2]);")
+{
+ operands[0] = gen_lowpart (QImode, operands[0]);
+ operands[1] = gen_lowpart (QImode, operands[1]);
+ operands[2] = gen_lowpart (QImode, operands[2]);
+})
\f
;; Logical inclusive and exclusive OR instructions
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(any_or:SI (match_operand:SI 1 "nonimmediate_operand" "%0")
- (match_operand:SI 2 "general_operand" "g"))))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (<CODE>, SImode, operands)"
"<logic>{l}\t{%2, %k0|%k0, %2}"
(define_insn "*<code>si_2_zext"
[(set (reg FLAGS_REG)
(compare (any_or:SI (match_operand:SI 1 "nonimmediate_operand" "%0")
- (match_operand:SI 2 "general_operand" "g"))
+ (match_operand:SI 2 "x86_64_general_operand" "rme"))
(const_int 0)))
(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI (any_or:SI (match_dup 1) (match_dup 2))))]
(const_int 8) (const_int 8))
(match_dup 2)))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_int_mode ((INTVAL (operands[2]) >> 8) & 0xff, SImode);")
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+ operands[2] = gen_int_mode ((INTVAL (operands[2]) >> 8) & 0xff, SImode);
+})
;; Since OR can be encoded with sign extended immediate, this is only
;; profitable when 7th bit is set.
(any_or:QI (match_dup 1)
(match_dup 2)))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (QImode, operands[0]);
- operands[1] = gen_lowpart (QImode, operands[1]);
- operands[2] = gen_lowpart (QImode, operands[2]);")
+{
+ operands[0] = gen_lowpart (QImode, operands[0]);
+ operands[1] = gen_lowpart (QImode, operands[1]);
+ operands[2] = gen_lowpart (QImode, operands[2]);
+})
(define_expand "xorqi_cc_ext_1"
[(parallel [
(match_dup 0)))
(set (match_dup 1)
(if_then_else:SWI48 (ne (reg:CCZ FLAGS_REG) (const_int 0))
- (match_operand:SWI48 3 "register_operand" "r")
+ (match_operand:SWI48 3 "register_operand" "")
(match_dup 1)))]
"TARGET_CMOVE"
"operands[4] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode));")
[(set_attr "type" "ishift")
(set_attr "mode" "<MODE>")])
+(define_insn "*bmi2_ashl<mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")))]
+ "TARGET_BMI2"
+ "shlx\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "<MODE>")])
+
(define_insn "*ashl<mode>3_1"
- [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
- (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "0,l")
- (match_operand:QI 2 "nonmemory_operand" "c<S>,M")))
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r,r")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "0,l,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,M,r")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (ASHIFT, <MODE>mode, operands)"
{
switch (get_attr_type (insn))
{
case TYPE_LEA:
+ case TYPE_ISHIFTX:
return "#";
case TYPE_ALU:
return "sal{<imodesuffix>}\t{%2, %0|%0, %2}";
}
}
- [(set (attr "type")
+ [(set_attr "isa" "*,*,bmi2")
+ (set (attr "type")
(cond [(eq_attr "alternative" "1")
(const_string "lea")
- (and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (eq_attr "alternative" "2")
+ (const_string "ishiftx")
+ (and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 2 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (ashift:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (ashift:SWI48 (match_dup 1) (match_dup 2)))]
+ "operands[2] = gen_lowpart (<MODE>mode, operands[2]);")
+
+(define_insn "*bmi2_ashlsi3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:SI 2 "register_operand" "r"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "shlx\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "SI")])
+
(define_insn "*ashlsi3_1_zext"
- [(set (match_operand:DI 0 "register_operand" "=r,r")
+ [(set (match_operand:DI 0 "register_operand" "=r,r,r")
(zero_extend:DI
- (ashift:SI (match_operand:SI 1 "register_operand" "0,l")
- (match_operand:QI 2 "nonmemory_operand" "cI,M"))))
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "0,l,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,M,r"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (ASHIFT, SImode, operands)"
{
switch (get_attr_type (insn))
{
case TYPE_LEA:
+ case TYPE_ISHIFTX:
return "#";
case TYPE_ALU:
return "sal{l}\t{%2, %k0|%k0, %2}";
}
}
- [(set (attr "type")
+ [(set_attr "isa" "*,*,bmi2")
+ (set (attr "type")
(cond [(eq_attr "alternative" "1")
(const_string "lea")
- (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (eq_attr "alternative" "2")
+ (const_string "ishiftx")
+ (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 2 "const1_operand" ""))
(const_string "alu")
]
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "SI")])
-(define_insn "*ashlhi3_1"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm")
- (ashift:HI (match_operand:HI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI")))
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" ""))))
(clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, HImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_ALU:
- gcc_assert (operands[2] == const1_rtx);
- return "add{w}\t%0, %0";
-
- default:
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "sal{w}\t%0";
- else
- return "sal{w}\t{%2, %0|%0, %2}";
- }
-}
- [(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
- (match_operand 0 "register_operand" ""))
- (match_operand 2 "const1_operand" ""))
- (const_string "alu")
- ]
- (const_string "ishift")))
- (set (attr "length_immediate")
- (if_then_else
- (ior (eq_attr "type" "alu")
- (and (eq_attr "type" "ishift")
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
- (const_string "0")
- (const_string "*")))
- (set_attr "mode" "HI")])
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (ashift:SI (match_dup 1) (match_dup 2))))]
+ "operands[2] = gen_lowpart (SImode, operands[2]);")
-(define_insn "*ashlhi3_1_lea"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,r")
+(define_insn "*ashlhi3_1"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=rm,Yp")
(ashift:HI (match_operand:HI 1 "nonimmediate_operand" "0,l")
(match_operand:QI 2 "nonmemory_operand" "cI,M")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, HImode, operands)"
+ "ix86_binary_operator_ok (ASHIFT, HImode, operands)"
{
switch (get_attr_type (insn))
{
[(set (attr "type")
(cond [(eq_attr "alternative" "1")
(const_string "lea")
- (and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 2 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "HI,SI")])
+;; %%% Potential partial reg stall on alternative 1. What to do?
(define_insn "*ashlqi3_1"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r")
- (ashift:QI (match_operand:QI 1 "nonimmediate_operand" "0,0")
- (match_operand:QI 2 "nonmemory_operand" "cI,cI")))
- (clobber (reg:CC FLAGS_REG))]
- "TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, QImode, operands)"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_ALU:
- gcc_assert (operands[2] == const1_rtx);
- if (REG_P (operands[1]) && !ANY_QI_REG_P (operands[1]))
- return "add{l}\t%k0, %k0";
- else
- return "add{b}\t%0, %0";
-
- default:
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- {
- if (get_attr_mode (insn) == MODE_SI)
- return "sal{l}\t%k0";
- else
- return "sal{b}\t%0";
- }
- else
- {
- if (get_attr_mode (insn) == MODE_SI)
- return "sal{l}\t{%2, %k0|%k0, %2}";
- else
- return "sal{b}\t{%2, %0|%0, %2}";
- }
- }
-}
- [(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
- (match_operand 0 "register_operand" ""))
- (match_operand 2 "const1_operand" ""))
- (const_string "alu")
- ]
- (const_string "ishift")))
- (set (attr "length_immediate")
- (if_then_else
- (ior (eq_attr "type" "alu")
- (and (eq_attr "type" "ishift")
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
- (const_string "0")
- (const_string "*")))
- (set_attr "mode" "QI,SI")])
-
-;; %%% Potential partial reg stall on alternative 2. What to do?
-(define_insn "*ashlqi3_1_lea"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r,r")
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,r,Yp")
(ashift:QI (match_operand:QI 1 "nonimmediate_operand" "0,0,l")
(match_operand:QI 2 "nonmemory_operand" "cI,cI,M")))
(clobber (reg:CC FLAGS_REG))]
- "!TARGET_PARTIAL_REG_STALL
- && ix86_binary_operator_ok (ASHIFT, QImode, operands)"
+ "ix86_binary_operator_ok (ASHIFT, QImode, operands)"
{
switch (get_attr_type (insn))
{
[(set (attr "type")
(cond [(eq_attr "alternative" "2")
(const_string "lea")
- (and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 2 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI,SI,SI")])
}
}
[(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (cond [(and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 1 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift1")
(and (match_operand 1 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
-;; Convert lea to the lea pattern to avoid flags dependency.
+;; Convert ashift to the lea pattern to avoid flags dependency.
(define_split
[(set (match_operand 0 "register_operand" "")
(ashift (match_operand 1 "index_register_operand" "")
(match_operand:QI 2 "const_int_operand" "")))
(clobber (reg:CC FLAGS_REG))]
- "reload_completed
+ "GET_MODE (operands[0]) == GET_MODE (operands[1])
+ && reload_completed
&& true_regnum (operands[0]) != true_regnum (operands[1])"
[(const_int 0)]
{
- rtx pat;
enum machine_mode mode = GET_MODE (operands[0]);
-
- if (mode != Pmode)
- operands[1] = gen_lowpart (Pmode, operands[1]);
- operands[2] = gen_int_mode (1 << INTVAL (operands[2]), Pmode);
-
- pat = gen_rtx_MULT (Pmode, operands[1], operands[2]);
+ rtx pat;
if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (SImode))
- operands[0] = gen_lowpart (SImode, operands[0]);
+ {
+ mode = SImode;
+ operands[0] = gen_lowpart (mode, operands[0]);
+ operands[1] = gen_lowpart (mode, operands[1]);
+ }
+
+ operands[2] = gen_int_mode (1 << INTVAL (operands[2]), mode);
- if (TARGET_64BIT && mode != Pmode)
- pat = gen_rtx_SUBREG (SImode, pat, 0);
+ pat = gen_rtx_MULT (mode, operands[1], operands[2]);
emit_insn (gen_rtx_SET (VOIDmode, operands[0], pat));
DONE;
})
-;; Convert lea to the lea pattern to avoid flags dependency.
+;; Convert ashift to the lea pattern to avoid flags dependency.
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(zero_extend:DI
}
}
[(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (cond [(and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 2 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
}
}
[(set (attr "type")
- (cond [(and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (cond [(and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 2 "const1_operand" ""))
(const_string "alu")
]
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "SI")])
}
}
[(set (attr "type")
- (cond [(and (and (ne (symbol_ref "TARGET_DOUBLE_WITH_ADD")
- (const_int 0))
+ (cond [(and (and (match_test "TARGET_DOUBLE_WITH_ADD")
(match_operand 0 "register_operand" ""))
(match_operand 2 "const1_operand" ""))
(const_string "alu")
(ior (eq_attr "type" "alu")
(and (eq_attr "type" "ishift")
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
;; See comment above `ashl<mode>3' about how this works.
-(define_expand "<shiftrt_insn><mode>3"
+(define_expand "<shift_insn><mode>3"
[(set (match_operand:SDWIM 0 "<shift_operand>" "")
(any_shiftrt:SDWIM (match_operand:SDWIM 1 "<shift_operand>" "")
(match_operand:QI 2 "nonmemory_operand" "")))]
"ix86_expand_binary_operator (<CODE>, <MODE>mode, operands); DONE;")
;; Avoid useless masking of count operand.
-(define_insn_and_split "*<shiftrt_insn><mode>3_mask"
+(define_insn_and_split "*<shift_insn><mode>3_mask"
[(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm")
(any_shiftrt:SWI48
(match_operand:SWI48 1 "nonimmediate_operand" "0")
[(set_attr "type" "ishift")
(set_attr "mode" "<MODE>")])
-(define_insn_and_split "*<shiftrt_insn><mode>3_doubleword"
+(define_insn_and_split "*<shift_insn><mode>3_doubleword"
[(set (match_operand:DWI 0 "register_operand" "=r")
(any_shiftrt:DWI (match_operand:DWI 1 "register_operand" "0")
(match_operand:QI 2 "nonmemory_operand" "<S>c")))
"#"
"(optimize && flag_peephole2) ? epilogue_completed : reload_completed"
[(const_int 0)]
- "ix86_split_<shiftrt_insn> (operands, NULL_RTX, <MODE>mode); DONE;"
+ "ix86_split_<shift_insn> (operands, NULL_RTX, <MODE>mode); DONE;"
[(set_attr "type" "multi")])
;; By default we don't ask for a scratch register, because when DWImode
(match_dup 3)]
"TARGET_CMOVE"
[(const_int 0)]
- "ix86_split_<shiftrt_insn> (operands, operands[3], <DWI>mode); DONE;")
+ "ix86_split_<shift_insn> (operands, operands[3], <DWI>mode); DONE;")
(define_insn "x86_64_shrd"
[(set (match_operand:DI 0 "nonimmediate_operand" "+r*m")
DONE;
})
-(define_insn "*<shiftrt_insn><mode>3_1"
- [(set (match_operand:SWI 0 "nonimmediate_operand" "=<r>m")
- (any_shiftrt:SWI (match_operand:SWI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+(define_insn "*bmi2_<shift_insn><mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (any_shiftrt:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:SWI48 2 "register_operand" "r")))]
+ "TARGET_BMI2"
+ "<shift>x\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "*<shift_insn><mode>3_1"
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
+ (any_shiftrt:SWI48
+ (match_operand:SWI48 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,r")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{<imodesuffix>}\t%0";
- else
- return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ISHIFTX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<shift>{<imodesuffix>}\t%0";
+ else
+ return "<shift>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ }
}
- [(set_attr "type" "ishift")
+ [(set_attr "isa" "*,bmi2")
+ (set_attr "type" "ishift,ishiftx")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
-(define_insn "*<shiftrt_insn>si3_1_zext"
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (any_shiftrt:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (any_shiftrt:SWI48 (match_dup 1) (match_dup 2)))]
+ "operands[2] = gen_lowpart (<MODE>mode, operands[2]);")
+
+(define_insn "*bmi2_<shift_insn>si3_1_zext"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
- (any_shiftrt:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI"))))
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:SI 2 "register_operand" "r"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "<shift>x\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "ishiftx")
+ (set_attr "mode" "SI")])
+
+(define_insn "*<shift_insn>si3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
+ (zero_extend:DI
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,r"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (<CODE>, SImode, operands)"
{
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ISHIFTX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<shift>{l}\t%k0";
+ else
+ return "<shift>{l}\t{%2, %k0|%k0, %2}";
+ }
+}
+ [(set_attr "isa" "*,bmi2")
+ (set_attr "type" "ishift,ishiftx")
+ (set (attr "length_immediate")
+ (if_then_else
+ (and (match_operand 2 "const1_operand" "")
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
+ (const_string "0")
+ (const_string "*")))
+ (set_attr "mode" "SI")])
+
+;; Convert shift to the shiftx pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (any_shiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "register_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (any_shiftrt:SI (match_dup 1) (match_dup 2))))]
+ "operands[2] = gen_lowpart (SImode, operands[2]);")
+
+(define_insn "*<shift_insn><mode>3_1"
+ [(set (match_operand:SWI12 0 "nonimmediate_operand" "=<r>m")
+ (any_shiftrt:SWI12
+ (match_operand:SWI12 1 "nonimmediate_operand" "0")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ (clobber (reg:CC FLAGS_REG))]
+ "ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
+{
if (operands[2] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{l}\t%k0";
+ return "<shift>{<imodesuffix>}\t%0";
else
- return "<shiftrt>{l}\t{%2, %k0|%k0, %2}";
+ return "<shift>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "ishift")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
- (set_attr "mode" "SI")])
+ (set_attr "mode" "<MODE>")])
-(define_insn "*<shiftrt_insn>qi3_1_slp"
+(define_insn "*<shift_insn>qi3_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm"))
(any_shiftrt:QI (match_dup 0)
(match_operand:QI 1 "nonmemory_operand" "cI")))
{
if (operands[1] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{b}\t%0";
+ return "<shift>{b}\t%0";
else
- return "<shiftrt>{b}\t{%1, %0|%0, %1}";
+ return "<shift>{b}\t{%1, %0|%0, %1}";
}
[(set_attr "type" "ishift1")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 1 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
;; This pattern can't accept a variable shift count, since shifts by
;; zero don't affect the flags. We assume that shifts by constant
;; zero are optimized away.
-(define_insn "*<shiftrt_insn><mode>3_cmp"
+(define_insn "*<shift_insn><mode>3_cmp"
[(set (reg FLAGS_REG)
(compare
(any_shiftrt:SWI
{
if (operands[2] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{<imodesuffix>}\t%0";
+ return "<shift>{<imodesuffix>}\t%0";
else
- return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ return "<shift>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "ishift")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
-(define_insn "*<shiftrt_insn>si3_cmp_zext"
+(define_insn "*<shift_insn>si3_cmp_zext"
[(set (reg FLAGS_REG)
(compare
(any_shiftrt:SI (match_operand:SI 1 "register_operand" "0")
{
if (operands[2] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{l}\t%k0";
+ return "<shift>{l}\t%k0";
else
- return "<shiftrt>{l}\t{%2, %k0|%k0, %2}";
+ return "<shift>{l}\t{%2, %k0|%k0, %2}";
}
[(set_attr "type" "ishift")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "SI")])
-(define_insn "*<shiftrt_insn><mode>3_cconly"
+(define_insn "*<shift_insn><mode>3_cconly"
[(set (reg FLAGS_REG)
(compare
(any_shiftrt:SWI
{
if (operands[2] == const1_rtx
&& (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<shiftrt>{<imodesuffix>}\t%0";
+ return "<shift>{<imodesuffix>}\t%0";
else
- return "<shiftrt>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ return "<shift>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "ishift")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
split_double_mode (<DWI>mode, &operands[0], 1, &operands[4], &operands[5]);
})
+(define_insn "*bmi2_rorx<mode>3_1"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (rotatert:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 2 "immediate_operand" "<S>")))]
+ "TARGET_BMI2"
+ "rorx\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "rotatex")
+ (set_attr "mode" "<MODE>")])
+
(define_insn "*<rotate_insn><mode>3_1"
- [(set (match_operand:SWI 0 "nonimmediate_operand" "=<r>m")
- (any_rotate:SWI (match_operand:SWI 1 "nonimmediate_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm,r")
+ (any_rotate:SWI48
+ (match_operand:SWI48 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>,<S>")))
(clobber (reg:CC FLAGS_REG))]
"ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<rotate>{<imodesuffix>}\t%0";
- else
- return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ROTATEX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{<imodesuffix>}\t%0";
+ else
+ return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
+ }
}
- [(set_attr "type" "rotate")
+ [(set_attr "isa" "*,bmi2")
+ (set_attr "type" "rotate,rotatex")
(set (attr "length_immediate")
(if_then_else
- (and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (and (eq_attr "type" "rotate")
+ (and (match_operand 2 "const1_operand" "")
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)"))))
(const_string "0")
(const_string "*")))
(set_attr "mode" "<MODE>")])
-(define_insn "*<rotate_insn>si3_1_zext"
+;; Convert rotate to the rotatex pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (rotate:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (rotatert:SWI48 (match_dup 1) (match_dup 2)))]
+{
+ operands[2]
+ = GEN_INT (GET_MODE_BITSIZE (<MODE>mode) - INTVAL (operands[2]));
+})
+
+(define_split
+ [(set (match_operand:SWI48 0 "register_operand" "")
+ (rotatert:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" "")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (rotatert:SWI48 (match_dup 1) (match_dup 2)))])
+
+(define_insn "*bmi2_rorxsi3_1_zext"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
- (any_rotate:SI (match_operand:SI 1 "register_operand" "0")
- (match_operand:QI 2 "nonmemory_operand" "cI"))))
+ (rotatert:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 2 "immediate_operand" "I"))))]
+ "TARGET_64BIT && TARGET_BMI2"
+ "rorx\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "type" "rotatex")
+ (set_attr "mode" "SI")])
+
+(define_insn "*<rotate_insn>si3_1_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
+ (zero_extend:DI
+ (any_rotate:SI (match_operand:SI 1 "nonimmediate_operand" "0,rm")
+ (match_operand:QI 2 "nonmemory_operand" "cI,I"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && ix86_binary_operator_ok (<CODE>, SImode, operands)"
{
- if (operands[2] == const1_rtx
- && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
- return "<rotate>{l}\t%k0";
+ switch (get_attr_type (insn))
+ {
+ case TYPE_ROTATEX:
+ return "#";
+
+ default:
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{l}\t%k0";
+ else
+ return "<rotate>{l}\t{%2, %k0|%k0, %2}";
+ }
+}
+ [(set_attr "isa" "*,bmi2")
+ (set_attr "type" "rotate,rotatex")
+ (set (attr "length_immediate")
+ (if_then_else
+ (and (eq_attr "type" "rotate")
+ (and (match_operand 2 "const1_operand" "")
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)"))))
+ (const_string "0")
+ (const_string "*")))
+ (set_attr "mode" "SI")])
+
+;; Convert rotate to the rotatex pattern to avoid flags dependency.
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (rotate:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (rotatert:SI (match_dup 1) (match_dup 2))))]
+{
+ operands[2]
+ = GEN_INT (GET_MODE_BITSIZE (SImode) - INTVAL (operands[2]));
+})
+
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (zero_extend:DI
+ (rotatert:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:QI 2 "immediate_operand" ""))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_64BIT && TARGET_BMI2 && reload_completed"
+ [(set (match_dup 0)
+ (zero_extend:DI (rotatert:SI (match_dup 1) (match_dup 2))))])
+
+(define_insn "*<rotate_insn><mode>3_1"
+ [(set (match_operand:SWI12 0 "nonimmediate_operand" "=<r>m")
+ (any_rotate:SWI12 (match_operand:SWI12 1 "nonimmediate_operand" "0")
+ (match_operand:QI 2 "nonmemory_operand" "c<S>")))
+ (clobber (reg:CC FLAGS_REG))]
+ "ix86_binary_operator_ok (<CODE>, <MODE>mode, operands)"
+{
+ if (operands[2] == const1_rtx
+ && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
+ return "<rotate>{<imodesuffix>}\t%0";
else
- return "<rotate>{l}\t{%2, %k0|%k0, %2}";
+ return "<rotate>{<imodesuffix>}\t{%2, %0|%0, %2}";
}
[(set_attr "type" "rotate")
(set (attr "length_immediate")
(if_then_else
(and (match_operand 2 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
- (set_attr "mode" "SI")])
+ (set_attr "mode" "<MODE>")])
(define_insn "*<rotate_insn>qi3_1_slp"
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm"))
(set (attr "length_immediate")
(if_then_else
(and (match_operand 1 "const1_operand" "")
- (ne (symbol_ref "TARGET_SHIFT1 || optimize_function_for_size_p (cfun)")
- (const_int 0)))
+ (ior (match_test "TARGET_SHIFT1")
+ (match_test "optimize_function_for_size_p (cfun)")))
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
(zero_extract:SWI48
(match_operand:SWI48 0 "register_operand" "r")
(const_int 1)
- (match_operand:SWI48 1 "nonmemory_operand" "rN"))
+ (match_operand:SWI48 1 "x86_64_nonmemory_operand" "rN"))
(const_int 0)))]
"TARGET_USE_BT || optimize_function_for_size_p (cfun)"
"bt{<imodesuffix>}\t{%1, %0|%0, %1}"
(if_then_else
(match_operator 0 "ix86_swapped_fp_comparison_operator"
[(match_operator 1 "float_operator"
- [(match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r")])
+ [(match_operand:SWI24 2 "nonimmediate_operand" "m,?r")])
(match_operand 3 "register_operand" "f,f")])
(label_ref (match_operand 4 "" ""))
(pc)))
(if_then_else
(match_operator 0 "ix86_swapped_fp_comparison_operator"
[(match_operator 1 "float_operator"
- [(match_operand:X87MODEI12 2 "memory_operand" "")])
+ [(match_operand:SWI24 2 "memory_operand" "")])
(match_operand 3 "register_operand" "")])
(match_operand 4 "" "")
(match_operand 5 "" "")))
(if_then_else
(match_operator 0 "ix86_swapped_fp_comparison_operator"
[(match_operator 1 "float_operator"
- [(match_operand:X87MODEI12 2 "register_operand" "")])
+ [(match_operand:SWI24 2 "register_operand" "")])
(match_operand 3 "register_operand" "")])
(match_operand 4 "" "")
(match_operand 5 "" "")))
(set_attr "modrm" "0")])
(define_expand "indirect_jump"
- [(set (pc) (match_operand 0 "nonimmediate_operand" ""))]
- ""
- "")
+ [(set (pc) (match_operand 0 "indirect_branch_operand" ""))])
(define_insn "*indirect_jump"
- [(set (pc) (match_operand:P 0 "nonimmediate_operand" "rm"))]
+ [(set (pc) (match_operand:P 0 "indirect_branch_operand" "rw"))]
""
"jmp\t%A0"
[(set_attr "type" "ibr")
(set_attr "length_immediate" "0")])
(define_expand "tablejump"
- [(parallel [(set (pc) (match_operand 0 "nonimmediate_operand" ""))
+ [(parallel [(set (pc) (match_operand 0 "indirect_branch_operand" ""))
(use (label_ref (match_operand 1 "" "")))])]
""
{
operands[0] = expand_simple_binop (Pmode, code, op0, op1, NULL_RTX, 0,
OPTAB_DIRECT);
}
+ else if (TARGET_X32)
+ operands[0] = convert_memory_address (Pmode, operands[0]);
})
(define_insn "*tablejump_1"
- [(set (pc) (match_operand:P 0 "nonimmediate_operand" "rm"))
+ [(set (pc) (match_operand:P 0 "indirect_branch_operand" "rw"))
(use (label_ref (match_operand 1 "" "")))]
""
"jmp\t%A0"
;; P6 processors will jump to the address after the decrement when %esp
;; is used as a call operand, so they will execute return address as a code.
;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
-
-;; Call subroutine returning no value.
-
-(define_expand "call_pop"
- [(parallel [(call (match_operand:QI 0 "" "")
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "" "")))])]
- "!TARGET_64BIT"
-{
- ix86_expand_call (NULL, operands[0], operands[1],
- operands[2], operands[3], 0);
- DONE;
-})
-
-(define_insn_and_split "*call_pop_0_vzeroupper"
- [(parallel
- [(call (mem:QI (match_operand:SI 0 "constant_call_address_operand" ""))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "")))])
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "call")])
-
-(define_insn "*call_pop_0"
- [(call (mem:QI (match_operand:SI 0 "constant_call_address_operand" ""))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "")))]
- "!TARGET_64BIT"
-{
- if (SIBLING_CALL_P (insn))
- return "jmp\t%P0";
- else
- return "call\t%P0";
-}
- [(set_attr "type" "call")])
-
-(define_insn_and_split "*call_pop_1_vzeroupper"
- [(parallel
- [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lsm"))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "i")))])
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "call")])
-
-(define_insn "*call_pop_1"
- [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lsm"))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "i")))]
- "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
-{
- if (constant_call_address_operand (operands[0], Pmode))
- return "call\t%P0";
- return "call\t%A0";
-}
- [(set_attr "type" "call")])
-(define_insn_and_split "*sibcall_pop_1_vzeroupper"
- [(parallel
- [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "i,i")))])
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "call")])
+;; Register constraint for call instruction.
+(define_mode_attr c [(SI "l") (DI "r")])
-(define_insn "*sibcall_pop_1"
- [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 1 "" ""))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 2 "immediate_operand" "i,i")))]
- "!TARGET_64BIT && SIBLING_CALL_P (insn)"
- "@
- jmp\t%P0
- jmp\t%A0"
- [(set_attr "type" "call")])
+;; Call subroutine returning no value.
(define_expand "call"
[(call (match_operand:QI 0 "" "")
(use (match_operand 2 "" ""))]
""
{
- ix86_expand_call (NULL, operands[0], operands[1], operands[2], NULL, 0);
+ ix86_expand_call (NULL, operands[0], operands[1],
+ operands[2], NULL, false);
DONE;
})
(use (match_operand 2 "" ""))]
""
{
- ix86_expand_call (NULL, operands[0], operands[1], operands[2], NULL, 1);
+ ix86_expand_call (NULL, operands[0], operands[1],
+ operands[2], NULL, true);
DONE;
})
-(define_insn_and_split "*call_0_vzeroupper"
- [(call (mem:QI (match_operand 0 "constant_call_address_operand" ""))
+(define_insn_and_split "*call_vzeroupper"
+ [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zw"))
(match_operand 1 "" ""))
(unspec [(match_operand 2 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER"
+ "TARGET_VZEROUPPER && !SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
"ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")])
-(define_insn "*call_0"
- [(call (mem:QI (match_operand 0 "constant_call_address_operand" ""))
+(define_insn "*call"
+ [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zw"))
(match_operand 1 "" ""))]
- ""
- { return ix86_output_call_insn (insn, operands[0], 0); }
+ "!SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[0]);"
[(set_attr "type" "call")])
-(define_insn_and_split "*call_1_vzeroupper"
- [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lsm"))
+(define_insn_and_split "*call_rex64_ms_sysv_vzeroupper"
+ [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw"))
(match_operand 1 "" ""))
+ (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
+ (clobber (reg:TI XMM6_REG))
+ (clobber (reg:TI XMM7_REG))
+ (clobber (reg:TI XMM8_REG))
+ (clobber (reg:TI XMM9_REG))
+ (clobber (reg:TI XMM10_REG))
+ (clobber (reg:TI XMM11_REG))
+ (clobber (reg:TI XMM12_REG))
+ (clobber (reg:TI XMM13_REG))
+ (clobber (reg:TI XMM14_REG))
+ (clobber (reg:TI XMM15_REG))
+ (clobber (reg:DI SI_REG))
+ (clobber (reg:DI DI_REG))
(unspec [(match_operand 2 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
"ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")])
-(define_insn "*call_1"
- [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lsm"))
- (match_operand 1 "" ""))]
- "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[0], 0); }
+(define_insn "*call_rex64_ms_sysv"
+ [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw"))
+ (match_operand 1 "" ""))
+ (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
+ (clobber (reg:TI XMM6_REG))
+ (clobber (reg:TI XMM7_REG))
+ (clobber (reg:TI XMM8_REG))
+ (clobber (reg:TI XMM9_REG))
+ (clobber (reg:TI XMM10_REG))
+ (clobber (reg:TI XMM11_REG))
+ (clobber (reg:TI XMM12_REG))
+ (clobber (reg:TI XMM13_REG))
+ (clobber (reg:TI XMM14_REG))
+ (clobber (reg:TI XMM15_REG))
+ (clobber (reg:DI SI_REG))
+ (clobber (reg:DI DI_REG))]
+ "TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[0]);"
[(set_attr "type" "call")])
-(define_insn_and_split "*sibcall_1_vzeroupper"
- [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "s,U"))
+(define_insn_and_split "*sibcall_vzeroupper"
+ [(call (mem:QI (match_operand:P 0 "sibcall_insn_operand" "Uz"))
(match_operand 1 "" ""))
(unspec [(match_operand 2 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
+ "TARGET_VZEROUPPER && SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
"ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")])
-(define_insn "*sibcall_1"
- [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "s,U"))
+(define_insn "*sibcall"
+ [(call (mem:QI (match_operand:P 0 "sibcall_insn_operand" "Uz"))
(match_operand 1 "" ""))]
- "!TARGET_64BIT && SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[0], 0); }
+ "SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[0]);"
[(set_attr "type" "call")])
-(define_insn_and_split "*call_1_rex64_vzeroupper"
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rsm"))
- (match_operand 1 "" ""))
- (unspec [(match_operand 2 "const_int_operand" "")]
+(define_expand "call_pop"
+ [(parallel [(call (match_operand:QI 0 "" "")
+ (match_operand:SI 1 "" ""))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 3 "" "")))])]
+ "!TARGET_64BIT"
+{
+ ix86_expand_call (NULL, operands[0], operands[1],
+ operands[2], operands[3], false);
+ DONE;
+})
+
+(define_insn_and_split "*call_pop_vzeroupper"
+ [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lzm"))
+ (match_operand:SI 1 "" ""))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 2 "immediate_operand" "i")))
+ (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)
- && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
+ "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
+ "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
[(set_attr "type" "call")])
-(define_insn "*call_1_rex64"
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rsm"))
- (match_operand 1 "" ""))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)
- && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
- { return ix86_output_call_insn (insn, operands[0], 0); }
+(define_insn "*call_pop"
+ [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lzm"))
+ (match_operand 1 "" ""))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 2 "immediate_operand" "i")))]
+ "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[0]);"
[(set_attr "type" "call")])
-(define_insn_and_split "*call_1_rex64_ms_sysv_vzeroupper"
- [(parallel
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rsm"))
- (match_operand 1 "" ""))
- (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
- (clobber (reg:TI XMM6_REG))
- (clobber (reg:TI XMM7_REG))
- (clobber (reg:TI XMM8_REG))
- (clobber (reg:TI XMM9_REG))
- (clobber (reg:TI XMM10_REG))
- (clobber (reg:TI XMM11_REG))
- (clobber (reg:TI XMM12_REG))
- (clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))])
- (unspec [(match_operand 2 "const_int_operand" "")]
+(define_insn_and_split "*sibcall_pop_vzeroupper"
+ [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "Uz"))
+ (match_operand 1 "" ""))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 2 "immediate_operand" "i")))
+ (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
+ "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
[(set_attr "type" "call")])
-(define_insn "*call_1_rex64_ms_sysv"
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rsm"))
+(define_insn "*sibcall_pop"
+ [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "Uz"))
(match_operand 1 "" ""))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 2 "immediate_operand" "i")))]
+ "!TARGET_64BIT && SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[0]);"
+ [(set_attr "type" "call")])
+
+;; Call subroutine, returning value in operand 0
+
+(define_expand "call_value"
+ [(set (match_operand 0 "" "")
+ (call (match_operand:QI 1 "" "")
+ (match_operand 2 "" "")))
+ (use (match_operand 3 "" ""))]
+ ""
+{
+ ix86_expand_call (operands[0], operands[1], operands[2],
+ operands[3], NULL, false);
+ DONE;
+})
+
+(define_expand "sibcall_value"
+ [(set (match_operand 0 "" "")
+ (call (match_operand:QI 1 "" "")
+ (match_operand 2 "" "")))
+ (use (match_operand 3 "" ""))]
+ ""
+{
+ ix86_expand_call (operands[0], operands[1], operands[2],
+ operands[3], NULL, true);
+ DONE;
+})
+
+(define_insn_and_split "*call_value_vzeroupper"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zw"))
+ (match_operand 2 "" "")))
+ (unspec [(match_operand 3 "const_int_operand" "")]
+ UNSPEC_CALL_NEEDS_VZEROUPPER)]
+ "TARGET_VZEROUPPER && !SIBLING_CALL_P (insn)"
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
+ [(set_attr "type" "callv")])
+
+(define_insn "*call_value"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zw"))
+ (match_operand 2 "" "")))]
+ "!SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[1]);"
+ [(set_attr "type" "callv")])
+
+(define_insn_and_split "*sibcall_value_vzeroupper"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:P 1 "sibcall_insn_operand" "Uz"))
+ (match_operand 2 "" "")))
+ (unspec [(match_operand 3 "const_int_operand" "")]
+ UNSPEC_CALL_NEEDS_VZEROUPPER)]
+ "TARGET_VZEROUPPER && SIBLING_CALL_P (insn)"
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
+ [(set_attr "type" "callv")])
+
+(define_insn "*sibcall_value"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:P 1 "sibcall_insn_operand" "Uz"))
+ (match_operand 2 "" "")))]
+ "SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[1]);"
+ [(set_attr "type" "callv")])
+
+(define_insn_and_split "*call_value_rex64_ms_sysv_vzeroupper"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw"))
+ (match_operand 2 "" "")))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
(clobber (reg:TI XMM6_REG))
(clobber (reg:TI XMM7_REG))
(clobber (reg:TI XMM11_REG))
(clobber (reg:TI XMM12_REG))
(clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[0], 0); }
- [(set_attr "type" "call")])
-
-(define_insn_and_split "*call_1_rex64_large_vzeroupper"
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rm"))
- (match_operand 1 "" ""))
- (unspec [(match_operand 2 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
- [(set_attr "type" "call")])
-
-(define_insn "*call_1_rex64_large"
- [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rm"))
- (match_operand 1 "" ""))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[0], 0); }
- [(set_attr "type" "call")])
-
-(define_insn_and_split "*sibcall_1_rex64_vzeroupper"
- [(call (mem:QI (match_operand:DI 0 "sibcall_insn_operand" "s,U"))
- (match_operand 1 "" ""))
- (unspec [(match_operand 2 "const_int_operand" "")]
+ (clobber (reg:TI XMM14_REG))
+ (clobber (reg:TI XMM15_REG))
+ (clobber (reg:DI SI_REG))
+ (clobber (reg:DI DI_REG))
+ (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && SIBLING_CALL_P (insn)"
+ "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
"#"
"&& reload_completed"
[(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
- [(set_attr "type" "call")])
+ "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
+ [(set_attr "type" "callv")])
-(define_insn "*sibcall_1_rex64"
- [(call (mem:QI (match_operand:DI 0 "sibcall_insn_operand" "s,U"))
- (match_operand 1 "" ""))]
- "TARGET_64BIT && SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[0], 0); }
- [(set_attr "type" "call")])
+(define_insn "*call_value_rex64_ms_sysv"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw"))
+ (match_operand 2 "" "")))
+ (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
+ (clobber (reg:TI XMM6_REG))
+ (clobber (reg:TI XMM7_REG))
+ (clobber (reg:TI XMM8_REG))
+ (clobber (reg:TI XMM9_REG))
+ (clobber (reg:TI XMM10_REG))
+ (clobber (reg:TI XMM11_REG))
+ (clobber (reg:TI XMM12_REG))
+ (clobber (reg:TI XMM13_REG))
+ (clobber (reg:TI XMM14_REG))
+ (clobber (reg:TI XMM15_REG))
+ (clobber (reg:DI SI_REG))
+ (clobber (reg:DI DI_REG))]
+ "TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[1]);"
+ [(set_attr "type" "callv")])
-;; Call subroutine, returning value in operand 0
(define_expand "call_value_pop"
[(parallel [(set (match_operand 0 "" "")
(call (match_operand:QI 1 "" "")
"!TARGET_64BIT"
{
ix86_expand_call (operands[0], operands[1], operands[2],
- operands[3], operands[4], 0);
+ operands[3], operands[4], false);
DONE;
})
-(define_expand "call_value"
+(define_insn_and_split "*call_value_pop_vzeroupper"
[(set (match_operand 0 "" "")
- (call (match_operand:QI 1 "" "")
- (match_operand:SI 2 "" "")))
- (use (match_operand:SI 3 "" ""))]
- ;; Operand 3 is not used on the i386.
- ""
-{
- ix86_expand_call (operands[0], operands[1], operands[2],
- operands[3], NULL, 0);
- DONE;
-})
+ (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lzm"))
+ (match_operand 2 "" "")))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 3 "immediate_operand" "i")))
+ (unspec [(match_operand 4 "const_int_operand" "")]
+ UNSPEC_CALL_NEEDS_VZEROUPPER)]
+ "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ "ix86_split_call_vzeroupper (curr_insn, operands[4]); DONE;"
+ [(set_attr "type" "callv")])
-(define_expand "sibcall_value"
+(define_insn "*call_value_pop"
[(set (match_operand 0 "" "")
- (call (match_operand:QI 1 "" "")
- (match_operand:SI 2 "" "")))
- (use (match_operand:SI 3 "" ""))]
- ;; Operand 3 is not used on the i386.
- ""
-{
- ix86_expand_call (operands[0], operands[1], operands[2],
- operands[3], NULL, 1);
- DONE;
-})
+ (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lzm"))
+ (match_operand 2 "" "")))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 3 "immediate_operand" "i")))]
+ "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[1]);"
+ [(set_attr "type" "callv")])
+
+(define_insn_and_split "*sibcall_value_pop_vzeroupper"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "Uz"))
+ (match_operand 2 "" "")))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 3 "immediate_operand" "i")))
+ (unspec [(match_operand 4 "const_int_operand" "")]
+ UNSPEC_CALL_NEEDS_VZEROUPPER)]
+ "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ "ix86_split_call_vzeroupper (curr_insn, operands[4]); DONE;"
+ [(set_attr "type" "callv")])
+
+(define_insn "*sibcall_value_pop"
+ [(set (match_operand 0 "" "")
+ (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "Uz"))
+ (match_operand 2 "" "")))
+ (set (reg:SI SP_REG)
+ (plus:SI (reg:SI SP_REG)
+ (match_operand:SI 3 "immediate_operand" "i")))]
+ "!TARGET_64BIT && SIBLING_CALL_P (insn)"
+ "* return ix86_output_call_insn (insn, operands[1]);"
+ [(set_attr "type" "callv")])
;; Call subroutine returning any type.
: X86_64_MS_SSE_REGPARM_MAX)
: X86_32_SSE_REGPARM_MAX)
- 1),
- NULL, 0);
+ NULL, false);
for (i = 0; i < XVECLEN (operands[2], 0); i++)
{
;; See comments for ix86_can_use_return_insn_p in i386.c.
(define_expand "return"
- [(return)]
+ [(simple_return)]
"ix86_can_use_return_insn_p ()"
{
+ ix86_maybe_emit_epilogue_vzeroupper ();
+ if (crtl->args.pops_args)
+ {
+ rtx popc = GEN_INT (crtl->args.pops_args);
+ emit_jump_insn (gen_simple_return_pop_internal (popc));
+ DONE;
+ }
+})
+
+;; We need to disable this for TARGET_SEH, as otherwise
+;; shrink-wrapped prologue gets enabled too. This might exceed
+;; the maximum size of prologue in unwind information.
+
+(define_expand "simple_return"
+ [(simple_return)]
+ "!TARGET_SEH"
+{
+ ix86_maybe_emit_epilogue_vzeroupper ();
if (crtl->args.pops_args)
{
rtx popc = GEN_INT (crtl->args.pops_args);
- emit_jump_insn (gen_return_pop_internal (popc));
+ emit_jump_insn (gen_simple_return_pop_internal (popc));
DONE;
}
})
-(define_insn "return_internal"
- [(return)]
+(define_insn "simple_return_internal"
+ [(simple_return)]
"reload_completed"
"ret"
[(set_attr "length" "1")
;; Used by x86_machine_dependent_reorg to avoid penalty on single byte RET
;; instruction Athlon and K8 have.
-(define_insn "return_internal_long"
- [(return)
+(define_insn "simple_return_internal_long"
+ [(simple_return)
(unspec [(const_int 0)] UNSPEC_REP)]
"reload_completed"
"rep\;ret"
(set_attr "prefix_rep" "1")
(set_attr "modrm" "0")])
-(define_insn "return_pop_internal"
- [(return)
+(define_insn "simple_return_pop_internal"
+ [(simple_return)
(use (match_operand:SI 0 "const_int_operand" ""))]
"reload_completed"
"ret\t%0"
(set_attr "length_immediate" "2")
(set_attr "modrm" "0")])
-(define_insn "return_indirect_internal"
- [(return)
+(define_insn "simple_return_indirect_internal"
+ [(simple_return)
(use (match_operand:SI 0 "register_operand" "r"))]
"reload_completed"
"jmp\t%A0"
(unspec:DI
[(label_ref (match_operand 1 "" ""))]
UNSPEC_SET_GOT_OFFSET))]
- "TARGET_64BIT"
+ "TARGET_LP64"
"movabs{q}\t{$_GLOBAL_OFFSET_TABLE_-%l1, %0|%0, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_-%l1}"
[(set_attr "type" "imov")
(set_attr "length_immediate" "0")
(clobber (reg:CC FLAGS_REG))])]
""
{
- if (TARGET_ABM)
+ if (TARGET_LZCNT)
{
- emit_insn (gen_clz<mode>2_abm (operands[0], operands[1]));
+ emit_insn (gen_clz<mode>2_lzcnt (operands[0], operands[1]));
DONE;
}
operands[2] = GEN_INT (GET_MODE_BITSIZE (<MODE>mode)-1);
})
-(define_insn "clz<mode>2_abm"
+(define_insn "clz<mode>2_lzcnt"
[(set (match_operand:SWI248 0 "register_operand" "=r")
(clz:SWI248 (match_operand:SWI248 1 "nonimmediate_operand" "rm")))
(clobber (reg:CC FLAGS_REG))]
- "TARGET_ABM || TARGET_BMI"
+ "TARGET_LZCNT"
"lzcnt{<imodesuffix>}\t{%1, %0|%0, %1}"
[(set_attr "prefix_rep" "1")
(set_attr "type" "bitmanip")
(define_insn "bmi_bextr_<mode>"
[(set (match_operand:SWI48 0 "register_operand" "=r")
- (unspec:SWI48 [(match_operand:SWI48 1 "nonimmediate_operand" "rm")
- (match_operand:SWI48 2 "register_operand" "r")]
+ (unspec:SWI48 [(match_operand:SWI48 1 "register_operand" "r")
+ (match_operand:SWI48 2 "nonimmediate_operand" "rm")]
UNSPEC_BEXTR))
(clobber (reg:CC FLAGS_REG))]
"TARGET_BMI"
[(set_attr "type" "bitmanip")
(set_attr "mode" "<MODE>")])
+;; BMI2 instructions.
+(define_insn "bmi2_bzhi_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (and:SWI48 (match_operand:SWI48 1 "register_operand" "r")
+ (lshiftrt:SWI48 (const_int -1)
+ (match_operand:SWI48 2 "nonimmediate_operand" "rm"))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_BMI2"
+ "bzhi\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "bmi2_pdep_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (unspec:SWI48 [(match_operand:SWI48 1 "register_operand" "r")
+ (match_operand:SWI48 2 "nonimmediate_operand" "rm")]
+ UNSPEC_PDEP))]
+ "TARGET_BMI2"
+ "pdep\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
+(define_insn "bmi2_pext_<mode>3"
+ [(set (match_operand:SWI48 0 "register_operand" "=r")
+ (unspec:SWI48 [(match_operand:SWI48 1 "register_operand" "r")
+ (match_operand:SWI48 2 "nonimmediate_operand" "rm")]
+ UNSPEC_PEXT))]
+ "TARGET_BMI2"
+ "pext\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "type" "bitmanip")
+ (set_attr "prefix" "vex")
+ (set_attr "mode" "<MODE>")])
+
;; TBM instructions.
(define_insn "tbm_bextri_<mode>"
[(set (match_operand:SWI48 0 "register_operand" "=r")
"xor{b}\t{%h0, %b0|%b0, %h0}"
[(set_attr "length" "2")
(set_attr "mode" "HI")])
+
\f
;; Thread-local storage patterns for ELF.
;;
(define_insn "*tls_global_dynamic_32_gnu"
[(set (match_operand:SI 0 "register_operand" "=a")
- (unspec:SI [(match_operand:SI 1 "register_operand" "b")
- (match_operand:SI 2 "tls_symbolic_operand" "")
- (match_operand:SI 3 "call_insn_operand" "")]
- UNSPEC_TLS_GD))
+ (unspec:SI
+ [(match_operand:SI 1 "register_operand" "b")
+ (match_operand:SI 2 "tls_symbolic_operand" "")
+ (match_operand:SI 3 "constant_call_address_operand" "z")]
+ UNSPEC_TLS_GD))
(clobber (match_scratch:SI 4 "=d"))
(clobber (match_scratch:SI 5 "=c"))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT && TARGET_GNU_TLS"
- "lea{l}\t{%a2@tlsgd(,%1,1), %0|%0, %a2@tlsgd[%1*1]}\;call\t%P3"
+{
+ output_asm_insn
+ ("lea{l}\t{%a2@tlsgd(,%1,1), %0|%0, %a2@tlsgd[%1*1]}", operands);
+ if (TARGET_SUN_TLS)
+#ifdef HAVE_AS_IX86_TLSGDPLT
+ return "call\t%a2@tlsgdplt";
+#else
+ return "call\t%p3@plt";
+#endif
+ return "call\t%P3";
+}
[(set_attr "type" "multi")
(set_attr "length" "12")])
(define_expand "tls_global_dynamic_32"
- [(parallel [(set (match_operand:SI 0 "register_operand" "")
- (unspec:SI
- [(match_operand:SI 2 "register_operand" "")
- (match_operand:SI 1 "tls_symbolic_operand" "")
- (match_operand:SI 3 "call_insn_operand" "")]
- UNSPEC_TLS_GD))
- (clobber (match_scratch:SI 4 ""))
- (clobber (match_scratch:SI 5 ""))
- (clobber (reg:CC FLAGS_REG))])])
+ [(parallel
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unspec:SI [(match_operand:SI 2 "register_operand" "")
+ (match_operand:SI 1 "tls_symbolic_operand" "")
+ (match_operand:SI 3 "constant_call_address_operand" "")]
+ UNSPEC_TLS_GD))
+ (clobber (match_scratch:SI 4 ""))
+ (clobber (match_scratch:SI 5 ""))
+ (clobber (reg:CC FLAGS_REG))])])
(define_insn "*tls_global_dynamic_64"
[(set (match_operand:DI 0 "register_operand" "=a")
- (call:DI (mem:QI (match_operand:DI 2 "call_insn_operand" ""))
- (match_operand:DI 3 "" "")))
- (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ (call:DI
+ (mem:QI (match_operand:DI 2 "constant_call_address_operand" "z"))
+ (match_operand:DI 3 "" "")))
+ (unspec:DI [(match_operand 1 "tls_symbolic_operand" "")]
UNSPEC_TLS_GD)]
"TARGET_64BIT"
- { return ASM_BYTE "0x66\n\tlea{q}\t{%a1@tlsgd(%%rip), %%rdi|rdi, %a1@tlsgd[rip]}\n" ASM_SHORT "0x6666\n\trex64\n\tcall\t%P2"; }
+{
+ if (!TARGET_X32)
+ fputs (ASM_BYTE "0x66\n", asm_out_file);
+ output_asm_insn
+ ("lea{q}\t{%a1@tlsgd(%%rip), %%rdi|rdi, %a1@tlsgd[rip]}", operands);
+ fputs (ASM_SHORT "0x6666\n", asm_out_file);
+ fputs ("\trex64\n", asm_out_file);
+ if (TARGET_SUN_TLS)
+ return "call\t%p2@plt";
+ return "call\t%P2";
+}
[(set_attr "type" "multi")
- (set_attr "length" "16")])
+ (set (attr "length")
+ (symbol_ref "TARGET_X32 ? 15 : 16"))])
(define_expand "tls_global_dynamic_64"
- [(parallel [(set (match_operand:DI 0 "register_operand" "")
- (call:DI
- (mem:QI (match_operand:DI 2 "call_insn_operand" ""))
- (const_int 0)))
- (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
- UNSPEC_TLS_GD)])])
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (call:DI
+ (mem:QI (match_operand:DI 2 "constant_call_address_operand" ""))
+ (const_int 0)))
+ (unspec:DI [(match_operand 1 "tls_symbolic_operand" "")]
+ UNSPEC_TLS_GD)])])
(define_insn "*tls_local_dynamic_base_32_gnu"
[(set (match_operand:SI 0 "register_operand" "=a")
- (unspec:SI [(match_operand:SI 1 "register_operand" "b")
- (match_operand:SI 2 "call_insn_operand" "")]
- UNSPEC_TLS_LD_BASE))
+ (unspec:SI
+ [(match_operand:SI 1 "register_operand" "b")
+ (match_operand:SI 2 "constant_call_address_operand" "z")]
+ UNSPEC_TLS_LD_BASE))
(clobber (match_scratch:SI 3 "=d"))
(clobber (match_scratch:SI 4 "=c"))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT && TARGET_GNU_TLS"
- "lea{l}\t{%&@tlsldm(%1), %0|%0, %&@tlsldm[%1]}\;call\t%P2"
+{
+ output_asm_insn
+ ("lea{l}\t{%&@tlsldm(%1), %0|%0, %&@tlsldm[%1]}", operands);
+ if (TARGET_SUN_TLS)
+#ifdef HAVE_AS_IX86_TLSLDMPLT
+ return "call\t%&@tlsldmplt";
+#else
+ return "call\t%p2@plt";
+#endif
+ return "call\t%P2";
+}
[(set_attr "type" "multi")
(set_attr "length" "11")])
(define_expand "tls_local_dynamic_base_32"
- [(parallel [(set (match_operand:SI 0 "register_operand" "")
- (unspec:SI [(match_operand:SI 1 "register_operand" "")
- (match_operand:SI 2 "call_insn_operand" "")]
- UNSPEC_TLS_LD_BASE))
- (clobber (match_scratch:SI 3 ""))
- (clobber (match_scratch:SI 4 ""))
- (clobber (reg:CC FLAGS_REG))])])
+ [(parallel
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unspec:SI
+ [(match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "constant_call_address_operand" "")]
+ UNSPEC_TLS_LD_BASE))
+ (clobber (match_scratch:SI 3 ""))
+ (clobber (match_scratch:SI 4 ""))
+ (clobber (reg:CC FLAGS_REG))])])
(define_insn "*tls_local_dynamic_base_64"
[(set (match_operand:DI 0 "register_operand" "=a")
- (call:DI (mem:QI (match_operand:DI 1 "call_insn_operand" ""))
- (match_operand:DI 2 "" "")))
+ (call:DI
+ (mem:QI (match_operand:DI 1 "constant_call_address_operand" "z"))
+ (match_operand:DI 2 "" "")))
(unspec:DI [(const_int 0)] UNSPEC_TLS_LD_BASE)]
"TARGET_64BIT"
- "lea{q}\t{%&@tlsld(%%rip), %%rdi|rdi, %&@tlsld[rip]}\;call\t%P1"
+{
+ output_asm_insn
+ ("lea{q}\t{%&@tlsld(%%rip), %%rdi|rdi, %&@tlsld[rip]}", operands);
+ if (TARGET_SUN_TLS)
+ return "call\t%p1@plt";
+ return "call\t%P1";
+}
[(set_attr "type" "multi")
(set_attr "length" "12")])
(define_expand "tls_local_dynamic_base_64"
- [(parallel [(set (match_operand:DI 0 "register_operand" "")
- (call:DI
- (mem:QI (match_operand:DI 1 "call_insn_operand" ""))
- (const_int 0)))
- (unspec:DI [(const_int 0)] UNSPEC_TLS_LD_BASE)])])
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (call:DI
+ (mem:QI (match_operand:DI 1 "constant_call_address_operand" ""))
+ (const_int 0)))
+ (unspec:DI [(const_int 0)] UNSPEC_TLS_LD_BASE)])])
;; Local dynamic of a single variable is a lose. Show combine how
;; to convert that back to global dynamic.
(define_insn_and_split "*tls_local_dynamic_32_once"
[(set (match_operand:SI 0 "register_operand" "=a")
- (plus:SI (unspec:SI [(match_operand:SI 1 "register_operand" "b")
- (match_operand:SI 2 "call_insn_operand" "")]
- UNSPEC_TLS_LD_BASE)
- (const:SI (unspec:SI
- [(match_operand:SI 3 "tls_symbolic_operand" "")]
- UNSPEC_DTPOFF))))
+ (plus:SI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "b")
+ (match_operand:SI 2 "constant_call_address_operand" "z")]
+ UNSPEC_TLS_LD_BASE)
+ (const:SI (unspec:SI
+ [(match_operand:SI 3 "tls_symbolic_operand" "")]
+ UNSPEC_DTPOFF))))
(clobber (match_scratch:SI 4 "=d"))
(clobber (match_scratch:SI 5 "=c"))
(clobber (reg:CC FLAGS_REG))]
""
"#"
""
- [(parallel [(set (match_dup 0)
- (unspec:SI [(match_dup 1) (match_dup 3) (match_dup 2)]
- UNSPEC_TLS_GD))
- (clobber (match_dup 4))
- (clobber (match_dup 5))
- (clobber (reg:CC FLAGS_REG))])])
+ [(parallel
+ [(set (match_dup 0)
+ (unspec:SI [(match_dup 1) (match_dup 3) (match_dup 2)]
+ UNSPEC_TLS_GD))
+ (clobber (match_dup 4))
+ (clobber (match_dup 5))
+ (clobber (reg:CC FLAGS_REG))])])
;; Segment register for the thread base ptr load
(define_mode_attr tp_seg [(SI "gs") (DI "fs")])
-;; Load and add the thread base pointer from %gs:0.
+;; Load and add the thread base pointer from %<tp_seg>:0.
+(define_insn "*load_tp_x32"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(const_int 0)] UNSPEC_TP))]
+ "TARGET_X32"
+ "mov{l}\t{%%fs:0, %0|%0, DWORD PTR fs:0}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0")
+ (set_attr "length" "7")
+ (set_attr "memory" "load")
+ (set_attr "imm_disp" "false")])
+
+(define_insn "*load_tp_x32_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI (unspec:SI [(const_int 0)] UNSPEC_TP)))]
+ "TARGET_X32"
+ "mov{l}\t{%%fs:0, %k0|%k0, DWORD PTR fs:0}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0")
+ (set_attr "length" "7")
+ (set_attr "memory" "load")
+ (set_attr "imm_disp" "false")])
+
(define_insn "*load_tp_<mode>"
[(set (match_operand:P 0 "register_operand" "=r")
(unspec:P [(const_int 0)] UNSPEC_TP))]
- ""
+ "!TARGET_X32"
"mov{<imodesuffix>}\t{%%<tp_seg>:0, %0|%0, <iptrsize> PTR <tp_seg>:0}"
[(set_attr "type" "imov")
(set_attr "modrm" "0")
(set_attr "memory" "load")
(set_attr "imm_disp" "false")])
+(define_insn "*add_tp_x32"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (plus:SI (unspec:SI [(const_int 0)] UNSPEC_TP)
+ (match_operand:SI 1 "register_operand" "0")))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_X32"
+ "add{l}\t{%%fs:0, %0|%0, DWORD PTR fs:0}"
+ [(set_attr "type" "alu")
+ (set_attr "modrm" "0")
+ (set_attr "length" "7")
+ (set_attr "memory" "load")
+ (set_attr "imm_disp" "false")])
+
+(define_insn "*add_tp_x32_zext"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (zero_extend:DI
+ (plus:SI (unspec:SI [(const_int 0)] UNSPEC_TP)
+ (match_operand:SI 1 "register_operand" "0"))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_X32"
+ "add{l}\t{%%fs:0, %k0|%k0, DWORD PTR fs:0}"
+ [(set_attr "type" "alu")
+ (set_attr "modrm" "0")
+ (set_attr "length" "7")
+ (set_attr "memory" "load")
+ (set_attr "imm_disp" "false")])
+
(define_insn "*add_tp_<mode>"
[(set (match_operand:P 0 "register_operand" "=r")
(plus:P (unspec:P [(const_int 0)] UNSPEC_TP)
(match_operand:P 1 "register_operand" "0")))
(clobber (reg:CC FLAGS_REG))]
- ""
+ "!TARGET_X32"
"add{<imodesuffix>}\t{%%<tp_seg>:0, %0|%0, <iptrsize> PTR <tp_seg>:0}"
[(set_attr "type" "alu")
(set_attr "modrm" "0")
UNSPEC_TLS_IE_SUN))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && TARGET_SUN_TLS"
- "mov{q}\t{%%fs:0, %0|%0, QWORD PTR fs:0}\n\tadd{q}\t{%a1@gottpoff(%%rip), %0|%0, %a1@gottpoff[rip]}"
+{
+ output_asm_insn
+ ("mov{q}\t{%%fs:0, %0|%0, QWORD PTR fs:0}", operands);
+ return "add{q}\t{%a1@gottpoff(%%rip), %0|%0, %a1@gottpoff[rip]}";
+}
[(set_attr "type" "multi")])
;; GNU2 TLS patterns can be split.
ix86_tls_descriptor_calls_expanded_in_cfun = true;
})
-(define_insn "*tls_dynamic_lea_32"
+(define_insn "*tls_dynamic_gnu2_lea_32"
[(set (match_operand:SI 0 "register_operand" "=r")
(plus:SI (match_operand:SI 1 "register_operand" "b")
(const:SI
(set_attr "length" "6")
(set_attr "length_address" "4")])
-(define_insn "*tls_dynamic_call_32"
+(define_insn "*tls_dynamic_gnu2_call_32"
[(set (match_operand:SI 0 "register_operand" "=a")
(unspec:SI [(match_operand:SI 1 "tls_symbolic_operand" "")
(match_operand:SI 2 "register_operand" "0")
(define_expand "tls_dynamic_gnu2_64"
[(set (match_dup 2)
- (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ (unspec:DI [(match_operand 1 "tls_symbolic_operand" "")]
UNSPEC_TLSDESC))
(parallel
[(set (match_operand:DI 0 "register_operand" "")
ix86_tls_descriptor_calls_expanded_in_cfun = true;
})
-(define_insn "*tls_dynamic_lea_64"
+(define_insn "*tls_dynamic_gnu2_lea_64"
[(set (match_operand:DI 0 "register_operand" "=r")
- (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ (unspec:DI [(match_operand 1 "tls_symbolic_operand" "")]
UNSPEC_TLSDESC))]
"TARGET_64BIT && TARGET_GNU2_TLS"
"lea{q}\t{%a1@TLSDESC(%%rip), %0|%0, %a1@TLSDESC[rip]}"
(set_attr "length" "7")
(set_attr "length_address" "4")])
-(define_insn "*tls_dynamic_call_64"
+(define_insn "*tls_dynamic_gnu2_call_64"
[(set (match_operand:DI 0 "register_operand" "=a")
- (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")
+ (unspec:DI [(match_operand 1 "tls_symbolic_operand" "")
(match_operand:DI 2 "register_operand" "0")
(reg:DI SP_REG)]
UNSPEC_TLSDESC))
(reg:DI SP_REG)]
UNSPEC_TLSDESC)
(const:DI (unspec:DI
- [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ [(match_operand 1 "tls_symbolic_operand" "")]
UNSPEC_DTPOFF))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT && TARGET_GNU2_TLS"
(if_then_else (match_operand:MODEF 3 "mult_operator" "")
(const_string "fmul")
(const_string "fop"))))
- (set_attr "isa" "base,noavx,avx")
+ (set_attr "isa" "*,noavx,avx")
(set_attr "prefix" "orig,orig,vex")
(set_attr "mode" "<MODE>")])
(const_string "fdiv")
]
(const_string "fop")))
- (set_attr "isa" "base,base,noavx,avx")
+ (set_attr "isa" "*,*,noavx,avx")
(set_attr "prefix" "orig,orig,orig,vex")
(set_attr "mode" "<MODE>")])
[(set (match_operand:MODEF 0 "register_operand" "=f,f")
(match_operator:MODEF 3 "binary_fp_operator"
[(float:MODEF
- (match_operand:X87MODEI12 1 "nonimmediate_operand" "m,?r"))
+ (match_operand:SWI24 1 "nonimmediate_operand" "m,?r"))
(match_operand:MODEF 2 "register_operand" "0,0")]))]
- "TARGET_80387 && X87_ENABLE_FLOAT (<MODEF:MODE>mode, <X87MODEI12:MODE>mode)
+ "TARGET_80387 && X87_ENABLE_FLOAT (<MODEF:MODE>mode, <SWI24:MODE>mode)
&& !(SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH)
- && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
+ && (TARGET_USE_<SWI24:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
"* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(match_operand:MODEF 3 "mult_operator" "")
]
(const_string "fop")))
(set_attr "fp_int_src" "true")
- (set_attr "mode" "<X87MODEI12:MODE>")])
+ (set_attr "mode" "<SWI24:MODE>")])
(define_insn "*fop_<MODEF:mode>_3_i387"
[(set (match_operand:MODEF 0 "register_operand" "=f,f")
(match_operator:MODEF 3 "binary_fp_operator"
[(match_operand:MODEF 1 "register_operand" "0,0")
(float:MODEF
- (match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r"))]))]
- "TARGET_80387 && X87_ENABLE_FLOAT (<MODEF:MODE>mode, <X87MODEI12:MODE>mode)
+ (match_operand:SWI24 2 "nonimmediate_operand" "m,?r"))]))]
+ "TARGET_80387 && X87_ENABLE_FLOAT (<MODEF:MODE>mode, <SWI24:MODE>mode)
&& !(SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH)
- && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
+ && (TARGET_USE_<SWI24:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
"* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(match_operand:MODEF 3 "mult_operator" "")
[(set (match_operand:XF 0 "register_operand" "=f,f")
(match_operator:XF 3 "binary_fp_operator"
[(float:XF
- (match_operand:X87MODEI12 1 "nonimmediate_operand" "m,?r"))
+ (match_operand:SWI24 1 "nonimmediate_operand" "m,?r"))
(match_operand:XF 2 "register_operand" "0,0")]))]
"TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
"* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
(match_operator:XF 3 "binary_fp_operator"
[(match_operand:XF 1 "register_operand" "0,0")
(float:XF
- (match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r"))]))]
+ (match_operand:SWI24 2 "nonimmediate_operand" "m,?r"))]))]
"TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
"* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
[(set (attr "type")
(define_split
[(set (match_operand 0 "register_operand" "")
(match_operator 3 "binary_fp_operator"
- [(float (match_operand:X87MODEI12 1 "register_operand" ""))
+ [(float (match_operand:SWI24 1 "register_operand" ""))
(match_operand 2 "register_operand" "")]))]
"reload_completed
&& X87_FLOAT_MODE_P (GET_MODE (operands[0]))
[(set (match_operand 0 "register_operand" "")
(match_operator 3 "binary_fp_operator"
[(match_operand 1 "register_operand" "")
- (float (match_operand:X87MODEI12 2 "register_operand" ""))]))]
+ (float (match_operand:SWI24 2 "register_operand" ""))]))]
"reload_completed
&& X87_FLOAT_MODE_P (GET_MODE (operands[0]))
&& X87_ENABLE_FLOAT (GET_MODE (operands[0]), GET_MODE (operands[2]))"
|| (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH)"
{
if (<MODE>mode == SFmode
- && TARGET_SSE_MATH && TARGET_RECIP && !optimize_function_for_size_p (cfun)
+ && TARGET_SSE_MATH
+ && TARGET_RECIP_SQRT
+ && !optimize_function_for_size_p (cfun)
&& flag_finite_math_only && !flag_trapping_math
&& flag_unsafe_math_optimizations)
{
(set (match_operand:XF 1 "register_operand" "")
(unspec:XF [(match_dup 2)] UNSPEC_SINCOS_SIN))]
"find_regno_note (insn, REG_UNUSED, REGNO (operands[0]))
- && !(reload_completed || reload_in_progress)"
+ && can_create_pseudo_p ()"
[(set (match_dup 1) (unspec:XF [(match_dup 2)] UNSPEC_SIN))])
(define_split
(set (match_operand:XF 1 "register_operand" "")
(unspec:XF [(match_dup 2)] UNSPEC_SINCOS_SIN))]
"find_regno_note (insn, REG_UNUSED, REGNO (operands[1]))
- && !(reload_completed || reload_in_progress)"
+ && can_create_pseudo_p ()"
[(set (match_dup 0) (unspec:XF [(match_dup 2)] UNSPEC_COS))])
(define_insn "sincos_extend<mode>xf3_i387"
(set (match_operand:XF 1 "register_operand" "")
(unspec:XF [(float_extend:XF (match_dup 2))] UNSPEC_SINCOS_SIN))]
"find_regno_note (insn, REG_UNUSED, REGNO (operands[0]))
- && !(reload_completed || reload_in_progress)"
+ && can_create_pseudo_p ()"
[(set (match_dup 1)
(unspec:XF [(float_extend:XF (match_dup 2))] UNSPEC_SIN))])
(set (match_operand:XF 1 "register_operand" "")
(unspec:XF [(float_extend:XF (match_dup 2))] UNSPEC_SINCOS_SIN))]
"find_regno_note (insn, REG_UNUSED, REGNO (operands[1]))
- && !(reload_completed || reload_in_progress)"
+ && can_create_pseudo_p ()"
[(set (match_dup 0)
(unspec:XF [(float_extend:XF (match_dup 2))] UNSPEC_COS))])
if (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
&& !flag_trapping_math)
{
- if (!TARGET_ROUND && optimize_insn_for_size_p ())
- FAIL;
if (TARGET_ROUND)
emit_insn (gen_sse4_1_round<mode>2
(operands[0], operands[1], GEN_INT (ROUND_MXCSR)));
+ else if (optimize_insn_for_size_p ())
+ FAIL;
else
- ix86_expand_rint (operand0, operand1);
+ ix86_expand_rint (operands[0], operands[1]);
}
else
{
})
(define_expand "round<mode>2"
- [(match_operand:MODEF 0 "register_operand" "")
- (match_operand:MODEF 1 "nonimmediate_operand" "")]
- "SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
- && !flag_trapping_math && !flag_rounding_math"
+ [(match_operand:X87MODEF 0 "register_operand" "")
+ (match_operand:X87MODEF 1 "nonimmediate_operand" "")]
+ "(TARGET_USE_FANCY_MATH_387
+ && (!(SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH)
+ || TARGET_MIX_SSE_I387)
+ && flag_unsafe_math_optimizations)
+ || (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
+ && !flag_trapping_math && !flag_rounding_math)"
{
if (optimize_insn_for_size_p ())
FAIL;
- if (TARGET_64BIT || (<MODE>mode != DFmode))
- ix86_expand_round (operand0, operand1);
+
+ if (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
+ && !flag_trapping_math && !flag_rounding_math)
+ {
+ if (TARGET_ROUND)
+ {
+ operands[1] = force_reg (<MODE>mode, operands[1]);
+ ix86_expand_round_sse4 (operands[0], operands[1]);
+ }
+ else if (TARGET_64BIT || (<MODE>mode != DFmode))
+ ix86_expand_round (operands[0], operands[1]);
+ else
+ ix86_expand_rounddf_32 (operands[0], operands[1]);
+ }
else
- ix86_expand_rounddf_32 (operand0, operand1);
+ {
+ operands[1] = force_reg (<MODE>mode, operands[1]);
+ ix86_emit_i387_round (operands[0], operands[1]);
+ }
DONE;
})
UNSPEC_FIST))
(clobber (match_scratch:XF 2 "=&1f"))]
"TARGET_USE_FANCY_MATH_387"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fpspc")
(set_attr "mode" "DI")])
(clobber (match_dup 3))])])
(define_insn_and_split "*fist<mode>2_1"
- [(set (match_operand:X87MODEI12 0 "register_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST))]
+ [(set (match_operand:SWI24 0 "register_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST))]
"TARGET_USE_FANCY_MATH_387
&& can_create_pseudo_p ()"
"#"
(set_attr "mode" "<MODE>")])
(define_insn "fist<mode>2"
- [(set (match_operand:X87MODEI12 0 "memory_operand" "=m")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST))]
+ [(set (match_operand:SWI24 0 "memory_operand" "=m")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f")]
+ UNSPEC_FIST))]
"TARGET_USE_FANCY_MATH_387"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fpspc")
(set_attr "mode" "<MODE>")])
(define_insn "fist<mode>2_with_temp"
- [(set (match_operand:X87MODEI12 0 "register_operand" "=r")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST))
- (clobber (match_operand:X87MODEI12 2 "memory_operand" "=m"))]
+ [(set (match_operand:SWI24 0 "register_operand" "=r")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f")]
+ UNSPEC_FIST))
+ (clobber (match_operand:SWI24 2 "memory_operand" "=m"))]
"TARGET_USE_FANCY_MATH_387"
"#"
[(set_attr "type" "fpspc")
(set_attr "mode" "<MODE>")])
(define_split
- [(set (match_operand:X87MODEI12 0 "register_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST))
- (clobber (match_operand:X87MODEI12 2 "memory_operand" ""))]
+ [(set (match_operand:SWI24 0 "register_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST))
+ (clobber (match_operand:SWI24 2 "memory_operand" ""))]
"reload_completed"
- [(set (match_dup 2) (unspec:X87MODEI12 [(match_dup 1)] UNSPEC_FIST))
+ [(set (match_dup 2) (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST))
(set (match_dup 0) (match_dup 2))])
(define_split
- [(set (match_operand:X87MODEI12 0 "memory_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST))
- (clobber (match_operand:X87MODEI12 2 "memory_operand" ""))]
+ [(set (match_operand:SWI24 0 "memory_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST))
+ (clobber (match_operand:SWI24 2 "memory_operand" ""))]
"reload_completed"
- [(set (match_dup 0) (unspec:X87MODEI12 [(match_dup 1)] UNSPEC_FIST))])
+ [(set (match_dup 0) (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST))])
(define_expand "lrintxf<mode>2"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (unspec:X87MODEI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST))]
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (unspec:SWI248x [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST))]
"TARGET_USE_FANCY_MATH_387")
-(define_expand "lrint<MODEF:mode><SSEMODEI24:mode>2"
- [(set (match_operand:SSEMODEI24 0 "nonimmediate_operand" "")
- (unspec:SSEMODEI24 [(match_operand:MODEF 1 "register_operand" "")]
+(define_expand "lrint<MODEF:mode><SWI48x:mode>2"
+ [(set (match_operand:SWI48x 0 "nonimmediate_operand" "")
+ (unspec:SWI48x [(match_operand:MODEF 1 "register_operand" "")]
UNSPEC_FIX_NOTRUNC))]
"SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
- && ((<SSEMODEI24:MODE>mode != DImode) || TARGET_64BIT)")
+ && ((<SWI48x:MODE>mode != DImode) || TARGET_64BIT)")
-(define_expand "lround<MODEF:mode><SSEMODEI24:mode>2"
- [(match_operand:SSEMODEI24 0 "nonimmediate_operand" "")
- (match_operand:MODEF 1 "register_operand" "")]
- "SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
- && ((<SSEMODEI24:MODE>mode != DImode) || TARGET_64BIT)
- && !flag_trapping_math && !flag_rounding_math"
+(define_expand "lround<X87MODEF:mode><SWI248x:mode>2"
+ [(match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (match_operand:X87MODEF 1 "register_operand" "")]
+ "(TARGET_USE_FANCY_MATH_387
+ && (!(SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH)
+ || TARGET_MIX_SSE_I387)
+ && flag_unsafe_math_optimizations)
+ || (SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH
+ && <SWI248x:MODE>mode != HImode
+ && ((<SWI248x:MODE>mode != DImode) || TARGET_64BIT)
+ && !flag_trapping_math && !flag_rounding_math)"
{
if (optimize_insn_for_size_p ())
FAIL;
- ix86_expand_lround (operand0, operand1);
+
+ if (SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH
+ && <SWI248x:MODE>mode != HImode
+ && ((<SWI248x:MODE>mode != DImode) || TARGET_64BIT)
+ && !flag_trapping_math && !flag_rounding_math)
+ ix86_expand_lround (operands[0], operands[1]);
+ else
+ ix86_emit_i387_round (operands[0], operands[1]);
DONE;
})
&& !flag_trapping_math)"
{
if (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
- && !flag_trapping_math
- && (TARGET_ROUND || optimize_insn_for_speed_p ()))
+ && !flag_trapping_math)
{
- if (!TARGET_ROUND && optimize_insn_for_size_p ())
- FAIL;
if (TARGET_ROUND)
emit_insn (gen_sse4_1_round<mode>2
(operands[0], operands[1], GEN_INT (ROUND_FLOOR)));
+ else if (optimize_insn_for_size_p ())
+ FAIL;
else if (TARGET_64BIT || (<MODE>mode != DFmode))
- ix86_expand_floorceil (operand0, operand1, true);
+ ix86_expand_floorceil (operands[0], operands[1], true);
else
- ix86_expand_floorceildf_32 (operand0, operand1, true);
+ ix86_expand_floorceildf_32 (operands[0], operands[1], true);
}
else
{
})
(define_insn_and_split "*fist<mode>2_floor_1"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (unspec:X87MODEI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (unspec:SWI248x [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_FLOOR))
(clobber (reg:CC FLAGS_REG))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations
(define_insn "fistdi2_floor"
[(set (match_operand:DI 0 "memory_operand" "=m")
(unspec:DI [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST_FLOOR))
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" "m"))
(use (match_operand:HI 3 "memory_operand" "m"))
(clobber (match_scratch:XF 4 "=&1f"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "floor")
(set_attr "mode" "DI")])
(define_insn "fistdi2_floor_with_temp"
[(set (match_operand:DI 0 "nonimmediate_operand" "=m,?r")
(unspec:DI [(match_operand:XF 1 "register_operand" "f,f")]
- UNSPEC_FIST_FLOOR))
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" "m,m"))
(use (match_operand:HI 3 "memory_operand" "m,m"))
(clobber (match_operand:DI 4 "memory_operand" "=X,m"))
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(unspec:DI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
(clobber (match_operand:DI 4 "memory_operand" ""))
(clobber (match_scratch 5 ""))]
"reload_completed"
- [(parallel [(set (match_dup 4) (unspec:DI [(match_dup 1)] UNSPEC_FIST_FLOOR))
+ [(parallel [(set (match_dup 4)
+ (unspec:DI [(match_dup 1)] UNSPEC_FIST_FLOOR))
(use (match_dup 2))
(use (match_dup 3))
(clobber (match_dup 5))])
(define_split
[(set (match_operand:DI 0 "memory_operand" "")
(unspec:DI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
(clobber (match_operand:DI 4 "memory_operand" ""))
(clobber (match_scratch 5 ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (unspec:DI [(match_dup 1)] UNSPEC_FIST_FLOOR))
+ [(parallel [(set (match_dup 0)
+ (unspec:DI [(match_dup 1)] UNSPEC_FIST_FLOOR))
(use (match_dup 2))
(use (match_dup 3))
(clobber (match_dup 5))])])
(define_insn "fist<mode>2_floor"
- [(set (match_operand:X87MODEI12 0 "memory_operand" "=m")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST_FLOOR))
+ [(set (match_operand:SWI24 0 "memory_operand" "=m")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f")]
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" "m"))
(use (match_operand:HI 3 "memory_operand" "m"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "floor")
(set_attr "mode" "<MODE>")])
(define_insn "fist<mode>2_floor_with_temp"
- [(set (match_operand:X87MODEI12 0 "nonimmediate_operand" "=m,?r")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f,f")]
- UNSPEC_FIST_FLOOR))
+ [(set (match_operand:SWI24 0 "nonimmediate_operand" "=m,?r")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f,f")]
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" "m,m"))
(use (match_operand:HI 3 "memory_operand" "m,m"))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" "=X,m"))]
+ (clobber (match_operand:SWI24 4 "memory_operand" "=X,m"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
"#"
(set_attr "mode" "<MODE>")])
(define_split
- [(set (match_operand:X87MODEI12 0 "register_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ [(set (match_operand:SWI24 0 "register_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 4) (unspec:X87MODEI12 [(match_dup 1)]
- UNSPEC_FIST_FLOOR))
+ [(parallel [(set (match_dup 4)
+ (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST_FLOOR))
(use (match_dup 2))
(use (match_dup 3))])
(set (match_dup 0) (match_dup 4))])
(define_split
- [(set (match_operand:X87MODEI12 0 "memory_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ [(set (match_operand:SWI24 0 "memory_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_FLOOR))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (unspec:X87MODEI12 [(match_dup 1)]
- UNSPEC_FIST_FLOOR))
+ [(parallel [(set (match_dup 0)
+ (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST_FLOOR))
(use (match_dup 2))
(use (match_dup 3))])])
(define_expand "lfloorxf<mode>2"
- [(parallel [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (unspec:X87MODEI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_FLOOR))
+ [(parallel [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (unspec:SWI248x [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_FLOOR))
(clobber (reg:CC FLAGS_REG))])]
"TARGET_USE_FANCY_MATH_387
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387)
{
if (TARGET_64BIT && optimize_insn_for_size_p ())
FAIL;
- ix86_expand_lfloorceil (operand0, operand1, true);
+ ix86_expand_lfloorceil (operands[0], operands[1], true);
DONE;
})
&& !flag_trapping_math)"
{
if (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
- && !flag_trapping_math
- && (TARGET_ROUND || optimize_insn_for_speed_p ()))
+ && !flag_trapping_math)
{
if (TARGET_ROUND)
emit_insn (gen_sse4_1_round<mode>2
else if (optimize_insn_for_size_p ())
FAIL;
else if (TARGET_64BIT || (<MODE>mode != DFmode))
- ix86_expand_floorceil (operand0, operand1, false);
+ ix86_expand_floorceil (operands[0], operands[1], false);
else
- ix86_expand_floorceildf_32 (operand0, operand1, false);
+ ix86_expand_floorceildf_32 (operands[0], operands[1], false);
}
else
{
})
(define_insn_and_split "*fist<mode>2_ceil_1"
- [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (unspec:X87MODEI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (unspec:SWI248x [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_CEIL))
(clobber (reg:CC FLAGS_REG))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations
(define_insn "fistdi2_ceil"
[(set (match_operand:DI 0 "memory_operand" "=m")
(unspec:DI [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST_CEIL))
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" "m"))
(use (match_operand:HI 3 "memory_operand" "m"))
(clobber (match_scratch:XF 4 "=&1f"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "ceil")
(set_attr "mode" "DI")])
(define_insn "fistdi2_ceil_with_temp"
[(set (match_operand:DI 0 "nonimmediate_operand" "=m,?r")
(unspec:DI [(match_operand:XF 1 "register_operand" "f,f")]
- UNSPEC_FIST_CEIL))
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" "m,m"))
(use (match_operand:HI 3 "memory_operand" "m,m"))
(clobber (match_operand:DI 4 "memory_operand" "=X,m"))
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(unspec:DI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
(clobber (match_operand:DI 4 "memory_operand" ""))
(clobber (match_scratch 5 ""))]
"reload_completed"
- [(parallel [(set (match_dup 4) (unspec:DI [(match_dup 1)] UNSPEC_FIST_CEIL))
+ [(parallel [(set (match_dup 4)
+ (unspec:DI [(match_dup 1)] UNSPEC_FIST_CEIL))
(use (match_dup 2))
(use (match_dup 3))
(clobber (match_dup 5))])
(define_split
[(set (match_operand:DI 0 "memory_operand" "")
(unspec:DI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
(clobber (match_operand:DI 4 "memory_operand" ""))
(clobber (match_scratch 5 ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (unspec:DI [(match_dup 1)] UNSPEC_FIST_CEIL))
+ [(parallel [(set (match_dup 0)
+ (unspec:DI [(match_dup 1)] UNSPEC_FIST_CEIL))
(use (match_dup 2))
(use (match_dup 3))
(clobber (match_dup 5))])])
(define_insn "fist<mode>2_ceil"
- [(set (match_operand:X87MODEI12 0 "memory_operand" "=m")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f")]
- UNSPEC_FIST_CEIL))
+ [(set (match_operand:SWI24 0 "memory_operand" "=m")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f")]
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" "m"))
(use (match_operand:HI 3 "memory_operand" "m"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
- "* return output_fix_trunc (insn, operands, 0);"
+ "* return output_fix_trunc (insn, operands, false);"
[(set_attr "type" "fistp")
(set_attr "i387_cw" "ceil")
(set_attr "mode" "<MODE>")])
(define_insn "fist<mode>2_ceil_with_temp"
- [(set (match_operand:X87MODEI12 0 "nonimmediate_operand" "=m,?r")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "f,f")]
- UNSPEC_FIST_CEIL))
+ [(set (match_operand:SWI24 0 "nonimmediate_operand" "=m,?r")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "f,f")]
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" "m,m"))
(use (match_operand:HI 3 "memory_operand" "m,m"))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" "=X,m"))]
+ (clobber (match_operand:SWI24 4 "memory_operand" "=X,m"))]
"TARGET_USE_FANCY_MATH_387
&& flag_unsafe_math_optimizations"
"#"
(set_attr "mode" "<MODE>")])
(define_split
- [(set (match_operand:X87MODEI12 0 "register_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ [(set (match_operand:SWI24 0 "register_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 4) (unspec:X87MODEI12 [(match_dup 1)]
- UNSPEC_FIST_CEIL))
+ [(parallel [(set (match_dup 4)
+ (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST_CEIL))
(use (match_dup 2))
(use (match_dup 3))])
(set (match_dup 0) (match_dup 4))])
(define_split
- [(set (match_operand:X87MODEI12 0 "memory_operand" "")
- (unspec:X87MODEI12 [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ [(set (match_operand:SWI24 0 "memory_operand" "")
+ (unspec:SWI24 [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_CEIL))
(use (match_operand:HI 2 "memory_operand" ""))
(use (match_operand:HI 3 "memory_operand" ""))
- (clobber (match_operand:X87MODEI12 4 "memory_operand" ""))]
+ (clobber (match_operand:SWI24 4 "memory_operand" ""))]
"reload_completed"
- [(parallel [(set (match_dup 0) (unspec:X87MODEI12 [(match_dup 1)]
- UNSPEC_FIST_CEIL))
+ [(parallel [(set (match_dup 0)
+ (unspec:SWI24 [(match_dup 1)] UNSPEC_FIST_CEIL))
(use (match_dup 2))
(use (match_dup 3))])])
(define_expand "lceilxf<mode>2"
- [(parallel [(set (match_operand:X87MODEI 0 "nonimmediate_operand" "")
- (unspec:X87MODEI [(match_operand:XF 1 "register_operand" "")]
- UNSPEC_FIST_CEIL))
+ [(parallel [(set (match_operand:SWI248x 0 "nonimmediate_operand" "")
+ (unspec:SWI248x [(match_operand:XF 1 "register_operand" "")]
+ UNSPEC_FIST_CEIL))
(clobber (reg:CC FLAGS_REG))])]
"TARGET_USE_FANCY_MATH_387
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387)
"SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
&& !flag_trapping_math"
{
- ix86_expand_lfloorceil (operand0, operand1, false);
+ ix86_expand_lfloorceil (operands[0], operands[1], false);
DONE;
})
&& !flag_trapping_math)"
{
if (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH
- && !flag_trapping_math
- && (TARGET_ROUND || optimize_insn_for_speed_p ()))
+ && !flag_trapping_math)
{
if (TARGET_ROUND)
emit_insn (gen_sse4_1_round<mode>2
else if (optimize_insn_for_size_p ())
FAIL;
else if (TARGET_64BIT || (<MODE>mode != DFmode))
- ix86_expand_trunc (operand0, operand1);
+ ix86_expand_trunc (operands[0], operands[1]);
else
- ix86_expand_truncdf_32 (operand0, operand1);
+ ix86_expand_truncdf_32 (operands[0], operands[1]);
}
else
{
(set (match_operand:DI 1 "register_operand" "=S")
(plus:DI (match_dup 3)
(const_int 8)))]
- "TARGET_64BIT"
+ "TARGET_64BIT
+ && !(fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"movsq"
[(set_attr "type" "str")
(set_attr "memory" "both")
(set (match_operand:P 1 "register_operand" "=S")
(plus:P (match_dup 3)
(const_int 4)))]
- ""
+ "!(fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"movs{l|d}"
[(set_attr "type" "str")
(set_attr "memory" "both")
(set (match_operand:P 1 "register_operand" "=S")
(plus:P (match_dup 3)
(const_int 2)))]
- ""
+ "!(fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"movsw"
[(set_attr "type" "str")
(set_attr "memory" "both")
(set (match_operand:P 1 "register_operand" "=S")
(plus:P (match_dup 3)
(const_int 1)))]
- ""
+ "!(fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"movsb"
[(set_attr "type" "str")
(set_attr "memory" "both")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
(set (mem:BLK (match_dup 3))
(mem:BLK (match_dup 4)))
(use (match_dup 5))]
- "TARGET_64BIT"
+ "TARGET_64BIT
+ && !(fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"rep{%;} movsq"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
(set (mem:BLK (match_dup 3))
(mem:BLK (match_dup 4)))
(use (match_dup 5))]
- ""
+ "!(fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"rep{%;} movs{l|d}"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
(set (mem:BLK (match_dup 3))
(mem:BLK (match_dup 4)))
(use (match_dup 5))]
- ""
+ "!(fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"rep{%;} movsb"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
operands[3] = gen_rtx_PLUS (Pmode, operands[0],
GEN_INT (GET_MODE_SIZE (GET_MODE
(operands[2]))));
- if (TARGET_SINGLE_STRINGOP || optimize_insn_for_size_p ())
+ /* Can't use this if the user has appropriated eax or edi. */
+ if ((TARGET_SINGLE_STRINGOP || optimize_insn_for_size_p ())
+ && !(fixed_regs[AX_REG] || fixed_regs[DI_REG]))
{
emit_insn (gen_strset_singleop (operands[0], operands[1], operands[2],
operands[3]));
(set (match_operand:DI 0 "register_operand" "=D")
(plus:DI (match_dup 1)
(const_int 8)))]
- "TARGET_64BIT"
+ "TARGET_64BIT
+ && !(fixed_regs[AX_REG] || fixed_regs[DI_REG])"
"stosq"
[(set_attr "type" "str")
(set_attr "memory" "store")
(set (match_operand:P 0 "register_operand" "=D")
(plus:P (match_dup 1)
(const_int 4)))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[DI_REG])"
"stos{l|d}"
[(set_attr "type" "str")
(set_attr "memory" "store")
(set (match_operand:P 0 "register_operand" "=D")
(plus:P (match_dup 1)
(const_int 2)))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[DI_REG])"
"stosw"
[(set_attr "type" "str")
(set_attr "memory" "store")
(set (match_operand:P 0 "register_operand" "=D")
(plus:P (match_dup 1)
(const_int 1)))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[DI_REG])"
"stosb"
[(set_attr "type" "str")
(set_attr "memory" "store")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
(const_int 0))
(use (match_operand:DI 2 "register_operand" "a"))
(use (match_dup 4))]
- "TARGET_64BIT"
+ "TARGET_64BIT
+ && !(fixed_regs[AX_REG] || fixed_regs[CX_REG] || fixed_regs[DI_REG])"
"rep{%;} stosq"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
(const_int 0))
(use (match_operand:SI 2 "register_operand" "a"))
(use (match_dup 4))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[CX_REG] || fixed_regs[DI_REG])"
"rep{%;} stos{l|d}"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
(const_int 0))
(use (match_operand:QI 2 "register_operand" "a"))
(use (match_dup 4))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[CX_REG] || fixed_regs[DI_REG])"
"rep{%;} stosb"
[(set_attr "type" "str")
(set_attr "prefix_rep" "1")
(set_attr "memory" "store")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "mode" "QI")])
if (optimize_insn_for_size_p () && !TARGET_INLINE_ALL_STRINGOPS)
FAIL;
- /* Can't use this if the user has appropriated esi or edi. */
- if (fixed_regs[SI_REG] || fixed_regs[DI_REG])
+ /* Can't use this if the user has appropriated ecx, esi or edi. */
+ if (fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])
FAIL;
out = operands[0];
(clobber (match_operand:P 0 "register_operand" "=S"))
(clobber (match_operand:P 1 "register_operand" "=D"))
(clobber (match_operand:P 2 "register_operand" "=c"))]
- ""
+ "!(fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"repz{%;} cmpsb"
[(set_attr "type" "str")
(set_attr "mode" "QI")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "prefix_rep" "1")])
(clobber (match_operand:P 0 "register_operand" "=S"))
(clobber (match_operand:P 1 "register_operand" "=D"))
(clobber (match_operand:P 2 "register_operand" "=c"))]
- ""
+ "!(fixed_regs[CX_REG] || fixed_regs[SI_REG] || fixed_regs[DI_REG])"
"repz{%;} cmpsb"
[(set_attr "type" "str")
(set_attr "mode" "QI")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "prefix_rep" "1")])
(define_expand "strlen<mode>"
- [(set (match_operand:SWI48x 0 "register_operand" "")
- (unspec:SWI48x [(match_operand:BLK 1 "general_operand" "")
- (match_operand:QI 2 "immediate_operand" "")
- (match_operand 3 "immediate_operand" "")]
- UNSPEC_SCAS))]
+ [(set (match_operand:P 0 "register_operand" "")
+ (unspec:P [(match_operand:BLK 1 "general_operand" "")
+ (match_operand:QI 2 "immediate_operand" "")
+ (match_operand 3 "immediate_operand" "")]
+ UNSPEC_SCAS))]
""
{
if (ix86_expand_strlen (operands[0], operands[1], operands[2], operands[3]))
(match_operand:P 4 "register_operand" "0")] UNSPEC_SCAS))
(clobber (match_operand:P 1 "register_operand" "=D"))
(clobber (reg:CC FLAGS_REG))]
- ""
+ "!(fixed_regs[AX_REG] || fixed_regs[CX_REG] || fixed_regs[DI_REG])"
"repnz{%;} scasb"
[(set_attr "type" "str")
(set_attr "mode" "QI")
(set (attr "prefix_rex")
(if_then_else
- (ne (symbol_ref "<P:MODE>mode == DImode") (const_int 0))
+ (match_test "<P:MODE>mode == DImode")
(const_string "0")
(const_string "*")))
(set_attr "prefix_rep" "1")])
(define_expand "mov<mode>cc"
[(set (match_operand:SWIM 0 "register_operand" "")
(if_then_else:SWIM (match_operand 1 "ordered_comparison_operator" "")
- (match_operand:SWIM 2 "general_operand" "")
- (match_operand:SWIM 3 "general_operand" "")))]
+ (match_operand:SWIM 2 "<general_operand>" "")
+ (match_operand:SWIM 3 "<general_operand>" "")))]
""
"if (ix86_expand_int_movcc (operands)) DONE; else FAIL;")
(define_insn "*x86_mov<mode>cc_0_m1_neg"
[(set (match_operand:SWI48 0 "register_operand" "=r")
(neg:SWI48 (match_operator 1 "ix86_carry_flag_operator"
- [(reg FLAGS_REG) (const_int 0)])))]
+ [(reg FLAGS_REG) (const_int 0)])))
+ (clobber (reg:CC FLAGS_REG))]
""
"sbb{<imodesuffix>}\t%0, %0"
[(set_attr "type" "alu")
[(set_attr "type" "icmov")
(set_attr "mode" "<MODE>")])
-(define_insn_and_split "*movqicc_noc"
+(define_insn "*movqicc_noc"
[(set (match_operand:QI 0 "register_operand" "=r,r")
(if_then_else:QI (match_operator 1 "ix86_comparison_operator"
- [(match_operand 4 "flags_reg_operand" "")
- (const_int 0)])
+ [(reg FLAGS_REG) (const_int 0)])
(match_operand:QI 2 "register_operand" "r,0")
(match_operand:QI 3 "register_operand" "0,r")))]
"TARGET_CMOVE && !TARGET_PARTIAL_REG_STALL"
"#"
- "&& reload_completed"
- [(set (match_dup 0)
- (if_then_else:SI (match_op_dup 1 [(match_dup 4) (const_int 0)])
- (match_dup 2)
- (match_dup 3)))]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- operands[3] = gen_lowpart (SImode, operands[3]);"
[(set_attr "type" "icmov")
- (set_attr "mode" "SI")])
+ (set_attr "mode" "QI")])
+
+(define_split
+ [(set (match_operand 0 "register_operand")
+ (if_then_else (match_operator 1 "ix86_comparison_operator"
+ [(reg FLAGS_REG) (const_int 0)])
+ (match_operand 2 "register_operand")
+ (match_operand 3 "register_operand")))]
+ "TARGET_CMOVE && !TARGET_PARTIAL_REG_STALL
+ && (GET_MODE (operands[0]) == QImode
+ || GET_MODE (operands[0]) == HImode)
+ && reload_completed"
+ [(set (match_dup 0)
+ (if_then_else:SI (match_dup 1) (match_dup 2) (match_dup 3)))]
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[2] = gen_lowpart (SImode, operands[2]);
+ operands[3] = gen_lowpart (SImode, operands[3]);
+})
(define_expand "mov<mode>cc"
[(set (match_operand:X87MODEF 0 "register_operand" "")
(set_attr "mode" "DF,DF,DI,DI")])
(define_split
- [(set (match_operand:DF 0 "register_and_not_any_fp_reg_operand" "")
+ [(set (match_operand:DF 0 "register_and_not_any_fp_reg_operand")
(if_then_else:DF (match_operator 1 "fcmov_comparison_operator"
- [(match_operand 4 "flags_reg_operand" "")
- (const_int 0)])
- (match_operand:DF 2 "nonimmediate_operand" "")
- (match_operand:DF 3 "nonimmediate_operand" "")))]
+ [(reg FLAGS_REG) (const_int 0)])
+ (match_operand:DF 2 "nonimmediate_operand")
+ (match_operand:DF 3 "nonimmediate_operand")))]
"!TARGET_64BIT && reload_completed"
[(set (match_dup 2)
- (if_then_else:SI (match_op_dup 1 [(match_dup 4) (const_int 0)])
- (match_dup 5)
- (match_dup 6)))
+ (if_then_else:SI (match_dup 1) (match_dup 4) (match_dup 5)))
(set (match_dup 3)
- (if_then_else:SI (match_op_dup 1 [(match_dup 4) (const_int 0)])
- (match_dup 7)
- (match_dup 8)))]
+ (if_then_else:SI (match_dup 1) (match_dup 6) (match_dup 7)))]
{
- split_double_mode (DImode, &operands[2], 2, &operands[5], &operands[7]);
+ split_double_mode (DImode, &operands[2], 2, &operands[4], &operands[6]);
split_double_mode (DImode, &operands[0], 1, &operands[2], &operands[3]);
})
;; The % modifier is not operational anymore in peephole2's, so we have to
;; swap the operands manually in the case of addition and multiplication.
- "if (COMMUTATIVE_ARITH_P (operands[2]))
- operands[4] = gen_rtx_fmt_ee (GET_CODE (operands[2]),
- GET_MODE (operands[2]),
- operands[0], operands[1]);
- else
- operands[4] = gen_rtx_fmt_ee (GET_CODE (operands[2]),
- GET_MODE (operands[2]),
- operands[1], operands[0]);")
+{
+ rtx op0, op1;
+
+ if (COMMUTATIVE_ARITH_P (operands[2]))
+ op0 = operands[0], op1 = operands[1];
+ else
+ op0 = operands[1], op1 = operands[0];
+
+ operands[4] = gen_rtx_fmt_ee (GET_CODE (operands[2]),
+ GET_MODE (operands[2]),
+ op0, op1);
+})
;; Conditional addition patterns
(define_expand "add<mode>cc"
}
[(set (attr "type")
(cond [(and (eq_attr "alternative" "0")
- (eq (symbol_ref "TARGET_OPT_AGU") (const_int 0)))
+ (not (match_test "TARGET_OPT_AGU")))
(const_string "alu")
(match_operand:<MODE> 2 "const0_operand" "")
(const_string "imov")
[(parallel [(set (match_dup 0)
(match_op_dup 3 [(match_dup 1) (match_dup 2)]))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- if (GET_CODE (operands[3]) != ASHIFT)
- operands[2] = gen_lowpart (SImode, operands[2]);
- PUT_MODE (operands[3], SImode);")
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+ if (GET_CODE (operands[3]) != ASHIFT)
+ operands[2] = gen_lowpart (SImode, operands[2]);
+ PUT_MODE (operands[3], SImode);
+})
; Promote the QImode tests, as i386 has encoding of the AND
; instruction with 32-bit sign-extended immediate and thus the
[(parallel [(set (match_dup 0)
(neg:SI (match_dup 1)))
(clobber (reg:CC FLAGS_REG))])]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);")
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+})
(define_split
[(set (match_operand 0 "register_operand" "")
|| optimize_insn_for_size_p ())))"
[(set (match_dup 0)
(not:SI (match_dup 1)))]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);")
-
-(define_split
- [(set (match_operand 0 "register_operand" "")
- (if_then_else (match_operator 1 "ordered_comparison_operator"
- [(reg FLAGS_REG) (const_int 0)])
- (match_operand 2 "register_operand" "")
- (match_operand 3 "register_operand" "")))]
- "! TARGET_PARTIAL_REG_STALL && TARGET_CMOVE
- && (GET_MODE (operands[0]) == HImode
- || (GET_MODE (operands[0]) == QImode
- && (TARGET_PROMOTE_QImode
- || optimize_insn_for_size_p ())))"
- [(set (match_dup 0)
- (if_then_else:SI (match_dup 1) (match_dup 2) (match_dup 3)))]
- "operands[0] = gen_lowpart (SImode, operands[0]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- operands[3] = gen_lowpart (SImode, operands[3]);")
+{
+ operands[0] = gen_lowpart (SImode, operands[0]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+})
\f
;; RTL Peephole optimizations, run before sched2. These primarily look to
;; transform a complex memory operation into two memory to register operations.
[(set (match_operand:SWI 0 "push_operand" "")
(match_operand:SWI 1 "memory_operand" ""))
(match_scratch:SWI 2 "<r>")]
- "optimize_insn_for_speed_p () && !TARGET_PUSH_MEMORY
+ "!(TARGET_PUSH_MEMORY || optimize_insn_for_size_p ())
&& !RTX_FRAME_RELATED_P (peep2_next_insn (0))"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (match_dup 2))])
[(set (match_operand:SF 0 "push_operand" "")
(match_operand:SF 1 "memory_operand" ""))
(match_scratch:SF 2 "r")]
- "optimize_insn_for_speed_p () && !TARGET_PUSH_MEMORY
+ "!(TARGET_PUSH_MEMORY || optimize_insn_for_size_p ())
&& !RTX_FRAME_RELATED_P (peep2_next_insn (0))"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (match_dup 2))])
[(match_dup 0)
(match_operand:SI 1 "memory_operand" "")]))
(clobber (reg:CC FLAGS_REG))])]
- "optimize_insn_for_speed_p () && ! TARGET_READ_MODIFY"
+ "!(TARGET_READ_MODIFY || optimize_insn_for_size_p ())"
[(set (match_dup 2) (match_dup 1))
(parallel [(set (match_dup 0)
(match_op_dup 3 [(match_dup 0) (match_dup 2)]))
[(match_operand:SI 1 "memory_operand" "")
(match_dup 0)]))
(clobber (reg:CC FLAGS_REG))])]
- "optimize_insn_for_speed_p () && ! TARGET_READ_MODIFY"
+ "!(TARGET_READ_MODIFY || optimize_insn_for_size_p ())"
[(set (match_dup 2) (match_dup 1))
(parallel [(set (match_dup 0)
(match_op_dup 3 [(match_dup 2) (match_dup 0)]))
[(match_dup 0)
(match_operand:SI 1 "nonmemory_operand" "")]))
(clobber (reg:CC FLAGS_REG))])]
- "optimize_insn_for_speed_p () && ! TARGET_READ_MODIFY_WRITE
+ "!(TARGET_READ_MODIFY_WRITE || optimize_insn_for_size_p ())
/* Do not split stack checking probes. */
&& GET_CODE (operands[3]) != IOR && operands[1] != const0_rtx"
[(set (match_dup 2) (match_dup 0))
[(match_operand:SI 1 "nonmemory_operand" "")
(match_dup 0)]))
(clobber (reg:CC FLAGS_REG))])]
- "optimize_insn_for_speed_p () && ! TARGET_READ_MODIFY_WRITE
+ "!(TARGET_READ_MODIFY_WRITE || optimize_insn_for_size_p ())
/* Do not split stack checking probes. */
&& GET_CODE (operands[3]) != IOR && operands[1] != const0_rtx"
[(set (match_dup 2) (match_dup 0))
(clobber (reg:CC FLAGS_REG))])
(set (match_dup 0) (match_dup 2))])
+;; Attempt to use arith or logical operations with memory outputs with
+;; setting of flags.
+(define_peephole2
+ [(set (match_operand:SWI 0 "register_operand" "")
+ (match_operand:SWI 1 "memory_operand" ""))
+ (parallel [(set (match_dup 0)
+ (match_operator:SWI 3 "plusminuslogic_operator"
+ [(match_dup 0)
+ (match_operand:SWI 2 "<nonmemory_operand>" "")]))
+ (clobber (reg:CC FLAGS_REG))])
+ (set (match_dup 1) (match_dup 0))
+ (set (reg FLAGS_REG) (compare (match_dup 0) (const_int 0)))]
+ "(TARGET_READ_MODIFY_WRITE || optimize_insn_for_size_p ())
+ && peep2_reg_dead_p (4, operands[0])
+ && !reg_overlap_mentioned_p (operands[0], operands[1])
+ && (<MODE>mode != QImode
+ || immediate_operand (operands[2], QImode)
+ || q_regs_operand (operands[2], QImode))
+ && ix86_match_ccmode (peep2_next_insn (3),
+ (GET_CODE (operands[3]) == PLUS
+ || GET_CODE (operands[3]) == MINUS)
+ ? CCGOCmode : CCNOmode)"
+ [(parallel [(set (match_dup 4) (match_dup 5))
+ (set (match_dup 1) (match_op_dup 3 [(match_dup 1)
+ (match_dup 2)]))])]
+{
+ operands[4] = SET_DEST (PATTERN (peep2_next_insn (3)));
+ operands[5] = gen_rtx_fmt_ee (GET_CODE (operands[3]), <MODE>mode,
+ copy_rtx (operands[1]),
+ copy_rtx (operands[2]));
+ operands[5] = gen_rtx_COMPARE (GET_MODE (operands[4]),
+ operands[5], const0_rtx);
+})
+
+(define_peephole2
+ [(parallel [(set (match_operand:SWI 0 "register_operand" "")
+ (match_operator:SWI 2 "plusminuslogic_operator"
+ [(match_dup 0)
+ (match_operand:SWI 1 "memory_operand" "")]))
+ (clobber (reg:CC FLAGS_REG))])
+ (set (match_dup 1) (match_dup 0))
+ (set (reg FLAGS_REG) (compare (match_dup 0) (const_int 0)))]
+ "(TARGET_READ_MODIFY_WRITE || optimize_insn_for_size_p ())
+ && GET_CODE (operands[2]) != MINUS
+ && peep2_reg_dead_p (3, operands[0])
+ && !reg_overlap_mentioned_p (operands[0], operands[1])
+ && ix86_match_ccmode (peep2_next_insn (2),
+ GET_CODE (operands[2]) == PLUS
+ ? CCGOCmode : CCNOmode)"
+ [(parallel [(set (match_dup 3) (match_dup 4))
+ (set (match_dup 1) (match_op_dup 2 [(match_dup 1)
+ (match_dup 0)]))])]
+{
+ operands[3] = SET_DEST (PATTERN (peep2_next_insn (2)));
+ operands[4] = gen_rtx_fmt_ee (GET_CODE (operands[2]), <MODE>mode,
+ copy_rtx (operands[1]),
+ copy_rtx (operands[0]));
+ operands[4] = gen_rtx_COMPARE (GET_MODE (operands[3]),
+ operands[4], const0_rtx);
+})
+
+(define_peephole2
+ [(set (match_operand:SWI12 0 "register_operand" "")
+ (match_operand:SWI12 1 "memory_operand" ""))
+ (parallel [(set (match_operand:SI 4 "register_operand" "")
+ (match_operator:SI 3 "plusminuslogic_operator"
+ [(match_dup 4)
+ (match_operand:SI 2 "nonmemory_operand" "")]))
+ (clobber (reg:CC FLAGS_REG))])
+ (set (match_dup 1) (match_dup 0))
+ (set (reg FLAGS_REG) (compare (match_dup 0) (const_int 0)))]
+ "(TARGET_READ_MODIFY_WRITE || optimize_insn_for_size_p ())
+ && REG_P (operands[0]) && REG_P (operands[4])
+ && REGNO (operands[0]) == REGNO (operands[4])
+ && peep2_reg_dead_p (4, operands[0])
+ && (<MODE>mode != QImode
+ || immediate_operand (operands[2], SImode)
+ || q_regs_operand (operands[2], SImode))
+ && !reg_overlap_mentioned_p (operands[0], operands[1])
+ && ix86_match_ccmode (peep2_next_insn (3),
+ (GET_CODE (operands[3]) == PLUS
+ || GET_CODE (operands[3]) == MINUS)
+ ? CCGOCmode : CCNOmode)"
+ [(parallel [(set (match_dup 4) (match_dup 5))
+ (set (match_dup 1) (match_dup 6))])]
+{
+ operands[2] = gen_lowpart (<MODE>mode, operands[2]);
+ operands[4] = SET_DEST (PATTERN (peep2_next_insn (3)));
+ operands[5] = gen_rtx_fmt_ee (GET_CODE (operands[3]), <MODE>mode,
+ copy_rtx (operands[1]), operands[2]);
+ operands[5] = gen_rtx_COMPARE (GET_MODE (operands[4]),
+ operands[5], const0_rtx);
+ operands[6] = gen_rtx_fmt_ee (GET_CODE (operands[3]), <MODE>mode,
+ copy_rtx (operands[1]),
+ copy_rtx (operands[2]));
+})
+
;; Attempt to always use XOR for zeroing registers.
(define_peephole2
[(set (match_operand 0 "register_operand" "")
[(parallel
[(set (match_operand:SWI48 0 "register_operand" "")
(mult:SWI48 (match_operand:SWI48 1 "register_operand" "")
- (match_operand:SWI48 2 "const_int_operand" "")))
+ (match_operand:SWI48 2 "const359_operand" "")))
(clobber (reg:CC FLAGS_REG))])]
- "INTVAL (operands[2]) == 3
- || INTVAL (operands[2]) == 5
- || INTVAL (operands[2]) == 9"
+ "!TARGET_PARTIAL_REG_STALL
+ || <MODE>mode == SImode
+ || optimize_function_for_size_p (cfun)"
[(set (match_dup 0)
(plus:SWI48 (mult:SWI48 (match_dup 1) (match_dup 2))
(match_dup 1)))]
[(parallel
[(set (match_operand:SWI48 0 "register_operand" "")
(mult:SWI48 (match_operand:SWI48 1 "nonimmediate_operand" "")
- (match_operand:SWI48 2 "const_int_operand" "")))
+ (match_operand:SWI48 2 "const359_operand" "")))
(clobber (reg:CC FLAGS_REG))])]
"optimize_insn_for_speed_p ()
- && (INTVAL (operands[2]) == 3
- || INTVAL (operands[2]) == 5
- || INTVAL (operands[2]) == 9)"
+ && (!TARGET_PARTIAL_REG_STALL || <MODE>mode == SImode)"
[(set (match_dup 0) (match_dup 1))
(set (match_dup 0)
(plus:SWI48 (mult:SWI48 (match_dup 0) (match_dup 2))
operands[0] = dest;
})
\f
-;; Call-value patterns last so that the wildcard operand does not
-;; disrupt insn-recog's switch tables.
-
-(define_insn_and_split "*call_value_pop_0_vzeroupper"
- [(parallel
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "constant_call_address_operand" ""))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "")))])
- (unspec [(match_operand 4 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[4]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_pop_0"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "constant_call_address_operand" ""))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "")))]
- "!TARGET_64BIT"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_pop_1_vzeroupper"
- [(parallel
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lsm"))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "i")))])
- (unspec [(match_operand 4 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[4]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_pop_1"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lsm"))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "i")))]
- "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*sibcall_value_pop_1_vzeroupper"
- [(parallel
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "i,i")))])
- (unspec [(match_operand 4 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[4]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*sibcall_value_pop_1"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 2 "" "")))
- (set (reg:SI SP_REG)
- (plus:SI (reg:SI SP_REG)
- (match_operand:SI 3 "immediate_operand" "i,i")))]
- "!TARGET_64BIT && SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_0_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "constant_call_address_operand" ""))
- (match_operand:SI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_0"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "constant_call_address_operand" ""))
- (match_operand:SI 2 "" "")))]
- "!TARGET_64BIT"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_0_rex64_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "constant_call_address_operand" ""))
- (match_operand:DI 2 "const_int_operand" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_0_rex64"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "constant_call_address_operand" ""))
- (match_operand:DI 2 "const_int_operand" "")))]
- "TARGET_64BIT"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_0_rex64_ms_sysv_vzeroupper"
- [(parallel
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "constant_call_address_operand" ""))
- (match_operand:DI 2 "const_int_operand" "")))
- (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
- (clobber (reg:TI XMM6_REG))
- (clobber (reg:TI XMM7_REG))
- (clobber (reg:TI XMM8_REG))
- (clobber (reg:TI XMM9_REG))
- (clobber (reg:TI XMM10_REG))
- (clobber (reg:TI XMM11_REG))
- (clobber (reg:TI XMM12_REG))
- (clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))])
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_0_rex64_ms_sysv"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "constant_call_address_operand" ""))
- (match_operand:DI 2 "const_int_operand" "")))
- (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
- (clobber (reg:TI XMM6_REG))
- (clobber (reg:TI XMM7_REG))
- (clobber (reg:TI XMM8_REG))
- (clobber (reg:TI XMM9_REG))
- (clobber (reg:TI XMM10_REG))
- (clobber (reg:TI XMM11_REG))
- (clobber (reg:TI XMM12_REG))
- (clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_1_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lsm"))
- (match_operand:SI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_1"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lsm"))
- (match_operand:SI 2 "" "")))]
- "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*sibcall_value_1_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*sibcall_value_1"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:SI 2 "" "")))]
- "!TARGET_64BIT && SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_1_rex64_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rsm"))
- (match_operand:DI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)
- && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_1_rex64"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rsm"))
- (match_operand:DI 2 "" "")))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)
- && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_1_rex64_ms_sysv_vzeroupper"
- [(parallel
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rsm"))
- (match_operand:DI 2 "" "")))
- (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
- (clobber (reg:TI XMM6_REG))
- (clobber (reg:TI XMM7_REG))
- (clobber (reg:TI XMM8_REG))
- (clobber (reg:TI XMM9_REG))
- (clobber (reg:TI XMM10_REG))
- (clobber (reg:TI XMM11_REG))
- (clobber (reg:TI XMM12_REG))
- (clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))])
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_1_rex64_ms_sysv"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rsm"))
- (match_operand:DI 2 "" "")))
- (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
- (clobber (reg:TI XMM6_REG))
- (clobber (reg:TI XMM7_REG))
- (clobber (reg:TI XMM8_REG))
- (clobber (reg:TI XMM9_REG))
- (clobber (reg:TI XMM10_REG))
- (clobber (reg:TI XMM11_REG))
- (clobber (reg:TI XMM12_REG))
- (clobber (reg:TI XMM13_REG))
- (clobber (reg:TI XMM14_REG))
- (clobber (reg:TI XMM15_REG))
- (clobber (reg:DI SI_REG))
- (clobber (reg:DI DI_REG))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*call_value_1_rex64_large_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rm"))
- (match_operand:DI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*call_value_1_rex64_large"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rm"))
- (match_operand:DI 2 "" "")))]
- "TARGET_64BIT && !SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-
-(define_insn_and_split "*sibcall_value_1_rex64_vzeroupper"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:DI 2 "" "")))
- (unspec [(match_operand 3 "const_int_operand" "")]
- UNSPEC_CALL_NEEDS_VZEROUPPER)]
- "TARGET_VZEROUPPER && TARGET_64BIT && SIBLING_CALL_P (insn)"
- "#"
- "&& reload_completed"
- [(const_int 0)]
- "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
- [(set_attr "type" "callv")])
-
-(define_insn "*sibcall_value_1_rex64"
- [(set (match_operand 0 "" "")
- (call (mem:QI (match_operand:DI 1 "sibcall_insn_operand" "s,U"))
- (match_operand:DI 2 "" "")))]
- "TARGET_64BIT && SIBLING_CALL_P (insn)"
- { return ix86_output_call_insn (insn, operands[1], 1); }
- [(set_attr "type" "callv")])
-\f
;; We used to use "int $5", in honor of #BR which maps to interrupt vector 5.
;; That, however, is usually mapped by the OS to SIGSEGV, which is often
;; caught for use by garbage collectors and the like. Using an insn that
#ifdef TARGET_THREAD_SSP_OFFSET
operands[1] = GEN_INT (TARGET_THREAD_SSP_OFFSET);
- insn = (TARGET_64BIT
+ insn = (TARGET_LP64
? gen_stack_tls_protect_set_di
: gen_stack_tls_protect_set_si);
#else
- insn = (TARGET_64BIT
+ insn = (TARGET_LP64
? gen_stack_protect_set_di
: gen_stack_protect_set_si);
#endif
})
(define_insn "stack_protect_set_<mode>"
- [(set (match_operand:P 0 "memory_operand" "=m")
- (unspec:P [(match_operand:P 1 "memory_operand" "m")] UNSPEC_SP_SET))
- (set (match_scratch:P 2 "=&r") (const_int 0))
+ [(set (match_operand:PTR 0 "memory_operand" "=m")
+ (unspec:PTR [(match_operand:PTR 1 "memory_operand" "m")]
+ UNSPEC_SP_SET))
+ (set (match_scratch:PTR 2 "=&r") (const_int 0))
(clobber (reg:CC FLAGS_REG))]
""
"mov{<imodesuffix>}\t{%1, %2|%2, %1}\;mov{<imodesuffix>}\t{%2, %0|%0, %2}\;xor{l}\t%k2, %k2"
[(set_attr "type" "multi")])
(define_insn "stack_tls_protect_set_<mode>"
- [(set (match_operand:P 0 "memory_operand" "=m")
- (unspec:P [(match_operand:P 1 "const_int_operand" "i")]
- UNSPEC_SP_TLS_SET))
- (set (match_scratch:P 2 "=&r") (const_int 0))
+ [(set (match_operand:PTR 0 "memory_operand" "=m")
+ (unspec:PTR [(match_operand:PTR 1 "const_int_operand" "i")]
+ UNSPEC_SP_TLS_SET))
+ (set (match_scratch:PTR 2 "=&r") (const_int 0))
(clobber (reg:CC FLAGS_REG))]
""
"mov{<imodesuffix>}\t{%@:%P1, %2|%2, <iptrsize> PTR %@:%P1}\;mov{<imodesuffix>}\t{%2, %0|%0, %2}\;xor{l}\t%k2, %k2"
#ifdef TARGET_THREAD_SSP_OFFSET
operands[1] = GEN_INT (TARGET_THREAD_SSP_OFFSET);
- insn = (TARGET_64BIT
+ insn = (TARGET_LP64
? gen_stack_tls_protect_test_di
: gen_stack_tls_protect_test_si);
#else
- insn = (TARGET_64BIT
+ insn = (TARGET_LP64
? gen_stack_protect_test_di
: gen_stack_protect_test_si);
#endif
(define_insn "stack_protect_test_<mode>"
[(set (match_operand:CCZ 0 "flags_reg_operand" "")
- (unspec:CCZ [(match_operand:P 1 "memory_operand" "m")
- (match_operand:P 2 "memory_operand" "m")]
+ (unspec:CCZ [(match_operand:PTR 1 "memory_operand" "m")
+ (match_operand:PTR 2 "memory_operand" "m")]
UNSPEC_SP_TEST))
- (clobber (match_scratch:P 3 "=&r"))]
+ (clobber (match_scratch:PTR 3 "=&r"))]
""
"mov{<imodesuffix>}\t{%1, %3|%3, %1}\;xor{<imodesuffix>}\t{%2, %3|%3, %2}"
[(set_attr "type" "multi")])
(define_insn "stack_tls_protect_test_<mode>"
[(set (match_operand:CCZ 0 "flags_reg_operand" "")
- (unspec:CCZ [(match_operand:P 1 "memory_operand" "m")
- (match_operand:P 2 "const_int_operand" "i")]
+ (unspec:CCZ [(match_operand:PTR 1 "memory_operand" "m")
+ (match_operand:PTR 2 "const_int_operand" "i")]
UNSPEC_SP_TLS_TEST))
- (clobber (match_scratch:P 3 "=r"))]
+ (clobber (match_scratch:PTR 3 "=r"))]
""
"mov{<imodesuffix>}\t{%1, %3|%3, %1}\;xor{<imodesuffix>}\t{%@:%P2, %3|%3, <iptrsize> PTR %@:%P2}"
[(set_attr "type" "multi")])
(match_operand:SI 3 "const_int_operand" "i")]
UNSPECV_LWPVAL_INTRINSIC)]
"TARGET_LWP"
- "/* Avoid unused variable warning. */
- (void) operand0;")
+ ;; Avoid unused variable warning.
+ "(void) operands[0];")
(define_insn "*lwp_lwpval<mode>3_1"
[(unspec_volatile [(match_operand:SWI48 0 "register_operand" "r")
(define_insn "rdrand<mode>_1"
[(set (match_operand:SWI248 0 "register_operand" "=r")
- (unspec:SWI248 [(const_int 0)] UNSPEC_RDRAND))
+ (unspec_volatile:SWI248 [(const_int 0)] UNSPECV_RDRAND))
(set (reg:CCC FLAGS_REG)
- (unspec:CCC [(const_int 0)] UNSPEC_RDRAND))]
+ (unspec_volatile:CCC [(const_int 0)] UNSPECV_RDRAND))]
"TARGET_RDRND"
"rdrand\t%0"
[(set_attr "type" "other")
(set_attr "prefix_extra" "1")])
+(define_expand "pause"
+ [(set (match_dup 0)
+ (unspec:BLK [(match_dup 0)] UNSPEC_PAUSE))]
+ ""
+{
+ operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
+ MEM_VOLATILE_P (operands[0]) = 1;
+})
+
+;; Use "rep; nop", instead of "pause", to support older assemblers.
+;; They have the same encoding.
+(define_insn "*pause"
+ [(set (match_operand:BLK 0 "" "")
+ (unspec:BLK [(match_dup 0)] UNSPEC_PAUSE))]
+ ""
+ "rep; nop"
+ [(set_attr "length" "2")
+ (set_attr "memory" "unknown")])
+
(include "mmx.md")
(include "sse.md")
(include "sync.md")