(define_mode_attr mptrsize [(SI "si")
(DI "di")])
-(define_mode_attr ptrload [(SI "{l|lwz}")
+(define_mode_attr ptrload [(SI "lwz")
(DI "ld")])
(define_mode_attr rreg [(SF "f")
""
"@
lbz%U1%X1 %0,%1
- {rlinm|rlwinm} %0,%1,0,0xff"
+ rlwinm %0,%1,0,0xff"
[(set_attr "type" "load,*")])
(define_insn ""
(clobber (match_scratch:SI 2 "=r,r"))]
""
"@
- {andil.|andi.} %2,%1,0xff
+ andi. %2,%1,0xff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
(zero_extend:SI (match_dup 1)))]
""
"@
- {andil.|andi.} %0,%1,0xff
+ andi. %0,%1,0xff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
""
"@
lbz%U1%X1 %0,%1
- {rlinm|rlwinm} %0,%1,0,0xff"
+ rlwinm %0,%1,0,0xff"
[(set_attr "type" "load,*")])
(define_insn ""
(clobber (match_scratch:HI 2 "=r,r"))]
""
"@
- {andil.|andi.} %2,%1,0xff
+ andi. %2,%1,0xff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
(zero_extend:HI (match_dup 1)))]
""
"@
- {andil.|andi.} %0,%1,0xff
+ andi. %0,%1,0xff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
""
"@
lhz%U1%X1 %0,%1
- {rlinm|rlwinm} %0,%1,0,0xffff"
+ rlwinm %0,%1,0,0xffff"
[(set_attr "type" "load,*")])
(define_insn ""
(clobber (match_scratch:SI 2 "=r,r"))]
""
"@
- {andil.|andi.} %2,%1,0xffff
+ andi. %2,%1,0xffff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
(zero_extend:SI (match_dup 1)))]
""
"@
- {andil.|andi.} %0,%1,0xffff
+ andi. %0,%1,0xffff
#"
[(set_attr "type" "fast_compare,compare")
(set_attr "length" "4,8")])
"rs6000_gen_cell_microcode"
"@
lha%U1%X1 %0,%1
- {exts|extsh} %0,%1"
+ extsh %0,%1"
[(set_attr "type" "load_ext,exts")])
(define_insn ""
[(set (match_operand:SI 0 "gpc_reg_operand" "=r")
(sign_extend:SI (match_operand:HI 1 "gpc_reg_operand" "r")))]
"!rs6000_gen_cell_microcode"
- "{exts|extsh} %0,%1"
+ "extsh %0,%1"
[(set_attr "type" "exts")])
(define_insn ""
(clobber (match_scratch:SI 2 "=r,r"))]
""
"@
- {exts.|extsh.} %2,%1
+ extsh. %2,%1
#"
[(set_attr "type" "compare")
(set_attr "length" "4,8")])
(sign_extend:SI (match_dup 1)))]
""
"@
- {exts.|extsh.} %0,%1
+ extsh. %0,%1
#"
[(set_attr "type" "compare")
(set_attr "length" "4,8")])
(match_operand:GPR 2 "add_operand" "r,I,I,L")))]
"!DECIMAL_FLOAT_MODE_P (GET_MODE (operands[0])) && !DECIMAL_FLOAT_MODE_P (GET_MODE (operands[1]))"
"@
- {cax|add} %0,%1,%2
- {cal %0,%2(%1)|addi %0,%1,%2}
- {ai|addic} %0,%1,%2
- {cau|addis} %0,%1,%v2"
+ add %0,%1,%2
+ addi %0,%1,%2
+ addic %0,%1,%2
+ addis %0,%1,%v2"
[(set_attr "length" "4,4,4,4")])
(define_insn "addsi3_high"
(plus:SI (match_operand:SI 1 "gpc_reg_operand" "b")
(high:SI (match_operand 2 "" ""))))]
"TARGET_MACHO && !TARGET_64BIT"
- "{cau|addis} %0,%1,ha16(%2)"
+ "addis %0,%1,ha16(%2)"
[(set_attr "length" "4")])
(define_insn "*add<mode>3_internal2"
(clobber (match_scratch:P 3 "=r,r,r,r"))]
""
"@
- {cax.|add.} %3,%1,%2
- {ai.|addic.} %3,%1,%2
+ add. %3,%1,%2
+ addic. %3,%1,%2
#
#"
[(set_attr "type" "fast_compare,compare,compare,compare")
(match_dup 2)))]
""
"@
- {cax.|add.} %0,%1,%2
- {ai.|addic.} %0,%1,%2
+ add. %0,%1,%2
+ addic. %0,%1,%2
#
#"
[(set_attr "type" "fast_compare,compare,compare,compare")
[(set (match_operand:GPR 0 "gpc_reg_operand" "=r")
(clz:GPR (match_operand:GPR 1 "gpc_reg_operand" "r")))]
""
- "{cntlz|cntlz<wd>} %0,%1"
+ "cntlz<wd> %0,%1"
[(set_attr "type" "cntlz")])
(define_expand "ctz<mode>2"
(match_operand:SI 1 "reg_or_mem_operand" "Z,r,r")))]
""
"@
- {lbrx|lwbrx} %0,%y1
- {stbrx|stwbrx} %1,%y0
+ lwbrx %0,%y1
+ stwbrx %1,%y0
#"
[(set_attr "length" "4,4,12")
(set_attr "type" "load,store,*")])
(match_operand:SI 2 "reg_or_short_operand" "r,I")))]
""
"@
- {muls|mullw} %0,%1,%2
- {muli|mulli} %0,%1,%2"
+ mullw %0,%1,%2
+ mulli %0,%1,%2"
[(set (attr "type")
(cond [(match_operand:SI 2 "s8bit_cint_operand" "")
(const_string "imul3")
(clobber (match_scratch:SI 3 "=r,r"))]
""
"@
- {muls.|mullw.} %3,%1,%2
+ mullw. %3,%1,%2
#"
[(set_attr "type" "imul_compare")
(set_attr "length" "4,8")])
(mult:SI (match_dup 1) (match_dup 2)))]
""
"@
- {muls.|mullw.} %0,%1,%2
+ mullw. %0,%1,%2
#"
[(set_attr "type" "imul_compare")
(set_attr "length" "4,8")])
(div:GPR (match_operand:GPR 1 "gpc_reg_operand" "r")
(match_operand:GPR 2 "exact_log2_cint_operand" "N")))]
""
- "{srai|sra<wd>i} %0,%1,%p2\;{aze|addze} %0,%0"
+ "sra<wd>i %0,%1,%p2\;addze %0,%0"
[(set_attr "type" "two")
(set_attr "length" "8")])
(clobber (match_scratch:P 3 "=r,r"))]
""
"@
- {srai|sra<wd>i} %3,%1,%p2\;{aze.|addze.} %3,%3
+ sra<wd>i %3,%1,%p2\;addze. %3,%3
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")
(div:P (match_dup 1) (match_dup 2)))]
""
"@
- {srai|sra<wd>i} %0,%1,%p2\;{aze.|addze.} %0,%0
+ sra<wd>i %0,%1,%p2\;addze. %0,%0
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")
"rs6000_gen_cell_microcode"
"@
and %0,%1,%2
- {rlinm|rlwinm} %0,%1,0,%m2,%M2
- {andil.|andi.} %0,%1,%b2
- {andiu.|andis.} %0,%1,%u2"
+ rlwinm %0,%1,0,%m2,%M2
+ andi. %0,%1,%b2
+ andis. %0,%1,%u2"
[(set_attr "type" "*,*,fast_compare,fast_compare")])
(define_insn "andsi3_nomc"
"!rs6000_gen_cell_microcode"
"@
and %0,%1,%2
- {rlinm|rlwinm} %0,%1,0,%m2,%M2")
+ rlwinm %0,%1,0,%m2,%M2")
(define_insn "andsi3_internal0_nomc"
[(set (match_operand:SI 0 "gpc_reg_operand" "=r,r")
"!rs6000_gen_cell_microcode"
"@
and %0,%1,%2
- {rlinm|rlwinm} %0,%1,0,%m2,%M2")
+ rlwinm %0,%1,0,%m2,%M2")
;; Note to set cr's other than cr0 we do the and immediate and then
"TARGET_32BIT && rs6000_gen_cell_microcode"
"@
and. %3,%1,%2
- {andil.|andi.} %3,%1,%b2
- {andiu.|andis.} %3,%1,%u2
- {rlinm.|rlwinm.} %3,%1,0,%m2,%M2
+ andi. %3,%1,%b2
+ andis. %3,%1,%u2
+ rlwinm. %3,%1,0,%m2,%M2
#
#
#
"TARGET_64BIT && rs6000_gen_cell_microcode"
"@
#
- {andil.|andi.} %3,%1,%b2
- {andiu.|andis.} %3,%1,%u2
- {rlinm.|rlwinm.} %3,%1,0,%m2,%M2
+ andi. %3,%1,%b2
+ andis. %3,%1,%u2
+ rlwinm. %3,%1,0,%m2,%M2
#
#
#
"TARGET_32BIT && rs6000_gen_cell_microcode"
"@
and. %0,%1,%2
- {andil.|andi.} %0,%1,%b2
- {andiu.|andis.} %0,%1,%u2
- {rlinm.|rlwinm.} %0,%1,0,%m2,%M2
+ andi. %0,%1,%b2
+ andis. %0,%1,%u2
+ rlwinm. %0,%1,0,%m2,%M2
#
#
#
"TARGET_64BIT && rs6000_gen_cell_microcode"
"@
#
- {andil.|andi.} %0,%1,%b2
- {andiu.|andis.} %0,%1,%u2
- {rlinm.|rlwinm.} %0,%1,0,%m2,%M2
+ andi. %0,%1,%b2
+ andis. %0,%1,%u2
+ rlwinm. %0,%1,0,%m2,%M2
#
#
#
""
"@
%q3 %0,%1,%2
- {%q3il|%q3i} %0,%1,%b2
- {%q3iu|%q3is} %0,%1,%u2")
+ %q3i %0,%1,%b2
+ %q3is %0,%1,%u2")
(define_insn "*boolsi3_internal2"
[(set (match_operand:CC 0 "cc_reg_operand" "=x,?y")
operands[4] = GEN_INT (32 - start - size);
operands[1] = GEN_INT (start + size - 1);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[4] = GEN_INT (shift - start - size);
operands[1] = GEN_INT (start + size - 1);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[4] = GEN_INT (32 - shift - start - size);
operands[1] = GEN_INT (start + size - 1);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[4] = GEN_INT (32 - shift - start - size);
operands[1] = GEN_INT (start + size - 1);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
/* Align extract field with insert field */
operands[5] = GEN_INT (extract_start + extract_size - insert_start - insert_size);
operands[1] = GEN_INT (insert_start + insert_size - 1);
- return \"{rlimi|rlwimi} %0,%3,%h5,%h2,%h1\";
+ return \"rlwimi %0,%3,%h5,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[4] = GEN_INT(32 - INTVAL(operands[2]));
operands[2] = GEN_INT(mb);
operands[1] = GEN_INT(me);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[4] = GEN_INT(32 - INTVAL(operands[2]));
operands[2] = GEN_INT(mb);
operands[1] = GEN_INT(me);
- return \"{rlimi|rlwimi} %0,%3,%h4,%h2,%h1\";
+ return \"rlwimi %0,%3,%h4,%h2,%h1\";
}"
[(set_attr "type" "insert_word")])
operands[3] = const0_rtx;
else
operands[3] = GEN_INT (start + size);
- return \"{rlinm|rlwinm} %0,%1,%3,%s2,31\";
+ return \"rlwinm %0,%1,%3,%s2,31\";
}")
(define_insn "*extzvsi_internal1"
operands[3] = GEN_INT (((1 << (16 - (start & 15)))
- (1 << (16 - (start & 15) - size))));
if (start < 16)
- return \"{andiu.|andis.} %4,%1,%3\";
+ return \"andis. %4,%1,%3\";
else
- return \"{andil.|andi.} %4,%1,%3\";
+ return \"andi. %4,%1,%3\";
}
if (start + size >= 32)
operands[3] = const0_rtx;
else
operands[3] = GEN_INT (start + size);
- return \"{rlinm.|rlwinm.} %4,%1,%3,%s2,31\";
+ return \"rlwinm. %4,%1,%3,%s2,31\";
}"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
if (start >= 16 && start + size == 32)
{
operands[3] = GEN_INT ((1 << size) - 1);
- return \"{andil.|andi.} %0,%1,%3\";
+ return \"andi. %0,%1,%3\";
}
if (start + size >= 32)
operands[3] = const0_rtx;
else
operands[3] = GEN_INT (start + size);
- return \"{rlinm.|rlwinm.} %0,%1,%3,%s2,31\";
+ return \"rlwinm. %0,%1,%3,%s2,31\";
}"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(match_operand:SI 2 "reg_or_cint_operand" "r,i")))]
""
"@
- {rlnm|rlwnm} %0,%1,%2,0xffffffff
- {rlinm|rlwinm} %0,%1,%h2,0xffffffff"
+ rlwnm %0,%1,%2,0xffffffff
+ rlwinm %0,%1,%h2,0xffffffff"
[(set_attr "type" "var_shift_rotate,integer")])
(define_insn "*rotlsi3_64"
(match_operand:SI 2 "reg_or_cint_operand" "r,i"))))]
"TARGET_64BIT"
"@
- {rlnm|rlwnm} %0,%1,%2,0xffffffff
- {rlinm|rlwinm} %0,%1,%h2,0xffffffff"
+ rlwnm %0,%1,%2,0xffffffff
+ rlwinm %0,%1,%h2,0xffffffff"
[(set_attr "type" "var_shift_rotate,integer")])
(define_insn "*rotlsi3_internal2"
(clobber (match_scratch:SI 3 "=r,r,r,r"))]
""
"@
- {rlnm.|rlwnm.} %3,%1,%2,0xffffffff
- {rlinm.|rlwinm.} %3,%1,%h2,0xffffffff
+ rlwnm. %3,%1,%2,0xffffffff
+ rlwinm. %3,%1,%h2,0xffffffff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(rotate:SI (match_dup 1) (match_dup 2)))]
""
"@
- {rlnm.|rlwnm.} %0,%1,%2,0xffffffff
- {rlinm.|rlwinm.} %0,%1,%h2,0xffffffff
+ rlwnm. %0,%1,%2,0xffffffff
+ rlwinm. %0,%1,%h2,0xffffffff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(match_operand:SI 3 "mask_operand" "n,n")))]
""
"@
- {rlnm|rlwnm} %0,%1,%2,%m3,%M3
- {rlinm|rlwinm} %0,%1,%h2,%m3,%M3"
+ rlwnm %0,%1,%2,%m3,%M3
+ rlwinm %0,%1,%h2,%m3,%M3"
[(set_attr "type" "var_shift_rotate,integer")])
(define_insn "*rotlsi3_internal5"
(clobber (match_scratch:SI 4 "=r,r,r,r"))]
""
"@
- {rlnm.|rlwnm.} %4,%1,%2,%m3,%M3
- {rlinm.|rlwinm.} %4,%1,%h2,%m3,%M3
+ rlwnm. %4,%1,%2,%m3,%M3
+ rlwinm. %4,%1,%h2,%m3,%M3
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(and:SI (rotate:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
""
"@
- {rlnm.|rlwnm.} %0,%1,%2,%m3,%M3
- {rlinm.|rlwinm.} %0,%1,%h2,%m3,%M3
+ rlwnm. %0,%1,%2,%m3,%M3
+ rlwinm. %0,%1,%h2,%m3,%M3
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(rotate:SI (match_operand:SI 1 "gpc_reg_operand" "r")
(match_operand:SI 2 "reg_or_cint_operand" "ri")) 0)))]
""
- "{rl%I2nm|rlw%I2nm} %0,%1,%h2,0xff"
+ "rlw%I2nm %0,%1,%h2,0xff"
[(set (attr "cell_micro")
(if_then_else (match_operand:SI 2 "const_int_operand" "")
(const_string "not")
(clobber (match_scratch:SI 3 "=r,r,r,r"))]
""
"@
- {rlnm.|rlwnm.} %3,%1,%2,0xff
- {rlinm.|rlwinm.} %3,%1,%h2,0xff
+ rlwnm. %3,%1,%2,0xff
+ rlwinm. %3,%1,%h2,0xff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(zero_extend:SI (subreg:QI (rotate:SI (match_dup 1) (match_dup 2)) 0)))]
""
"@
- {rlnm.|rlwnm.} %0,%1,%2,0xff
- {rlinm.|rlwinm.} %0,%1,%h2,0xff
+ rlwnm. %0,%1,%2,0xff
+ rlwinm. %0,%1,%h2,0xff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(match_operand:SI 2 "reg_or_cint_operand" "r,i")) 0)))]
""
"@
- {rlnm|rlwnm} %0,%1,%2,0xffff
- {rlinm|rlwinm} %0,%1,%h2,0xffff"
+ rlwnm %0,%1,%2,0xffff
+ rlwinm %0,%1,%h2,0xffff"
[(set_attr "type" "var_shift_rotate,integer")])
(clobber (match_scratch:SI 3 "=r,r,r,r"))]
""
"@
- {rlnm.|rlwnm.} %3,%1,%2,0xffff
- {rlinm.|rlwinm.} %3,%1,%h2,0xffff
+ rlwnm. %3,%1,%2,0xffff
+ rlwinm. %3,%1,%h2,0xffff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(zero_extend:SI (subreg:HI (rotate:SI (match_dup 1) (match_dup 2)) 0)))]
""
"@
- {rlnm.|rlwnm.} %0,%1,%2,0xffff
- {rlinm.|rlwinm.} %0,%1,%h2,0xffff
+ rlwnm. %0,%1,%2,0xffff
+ rlwinm. %0,%1,%h2,0xffff
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(match_operand:SI 2 "reg_or_cint_operand" "r,i")))]
""
"@
- {sl|slw} %0,%1,%2
- {sli|slwi} %0,%1,%h2"
+ slw %0,%1,%2
+ slwi %0,%1,%h2"
[(set_attr "type" "var_shift_rotate,shift")])
(define_insn "*ashlsi3_64"
(match_operand:SI 2 "reg_or_cint_operand" "r,i"))))]
"TARGET_POWERPC64"
"@
- {sl|slw} %0,%1,%2
- {sli|slwi} %0,%1,%h2"
+ slw %0,%1,%2
+ slwi %0,%1,%h2"
[(set_attr "type" "var_shift_rotate,shift")])
(define_insn ""
(clobber (match_scratch:SI 3 "=r,r,r,r"))]
"TARGET_32BIT"
"@
- {sl.|slw.} %3,%1,%2
- {sli.|slwi.} %3,%1,%h2
+ slw. %3,%1,%2
+ slwi. %3,%1,%h2
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(ashift:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
- {sl.|slw.} %0,%1,%2
- {sli.|slwi.} %0,%1,%h2
+ slw. %0,%1,%2
+ slwi. %0,%1,%h2
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(match_operand:SI 2 "const_int_operand" "i"))
(match_operand:SI 3 "mask_operand" "n")))]
"includes_lshift_p (operands[2], operands[3])"
- "{rlinm|rlwinm} %0,%1,%h2,%m3,%M3")
+ "rlwinm %0,%1,%h2,%m3,%M3")
(define_insn ""
[(set (match_operand:CC 0 "cc_reg_operand" "=x,?y")
(clobber (match_scratch:SI 4 "=r,r"))]
"includes_lshift_p (operands[2], operands[3])"
"@
- {rlinm.|rlwinm.} %4,%1,%h2,%m3,%M3
+ rlwinm. %4,%1,%h2,%m3,%M3
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(and:SI (ashift:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
"includes_lshift_p (operands[2], operands[3])"
"@
- {rlinm.|rlwinm.} %0,%1,%h2,%m3,%M3
+ rlwinm. %0,%1,%h2,%m3,%M3
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
""
"@
mr %0,%1
- {sr|srw} %0,%1,%2
- {sri|srwi} %0,%1,%h2"
+ srw %0,%1,%2
+ srwi %0,%1,%h2"
[(set_attr "type" "integer,var_shift_rotate,shift")])
(define_insn "*lshrsi3_64"
(match_operand:SI 2 "reg_or_cint_operand" "r,i"))))]
"TARGET_POWERPC64"
"@
- {sr|srw} %0,%1,%2
- {sri|srwi} %0,%1,%h2"
+ srw %0,%1,%2
+ srwi %0,%1,%h2"
[(set_attr "type" "var_shift_rotate,shift")])
(define_insn ""
"TARGET_32BIT"
"@
mr. %1,%1
- {sr.|srw.} %3,%1,%2
- {sri.|srwi.} %3,%1,%h2
+ srw. %3,%1,%2
+ srwi. %3,%1,%h2
#
#
#"
"TARGET_32BIT"
"@
mr. %0,%1
- {sr.|srw.} %0,%1,%2
- {sri.|srwi.} %0,%1,%h2
+ srw. %0,%1,%2
+ srwi. %0,%1,%h2
#
#
#"
(match_operand:SI 2 "const_int_operand" "i"))
(match_operand:SI 3 "mask_operand" "n")))]
"includes_rshift_p (operands[2], operands[3])"
- "{rlinm|rlwinm} %0,%1,%s2,%m3,%M3")
+ "rlwinm %0,%1,%s2,%m3,%M3")
(define_insn ""
[(set (match_operand:CC 0 "cc_reg_operand" "=x,?y")
(clobber (match_scratch:SI 4 "=r,r"))]
"includes_rshift_p (operands[2], operands[3])"
"@
- {rlinm.|rlwinm.} %4,%1,%s2,%m3,%M3
+ rlwinm. %4,%1,%s2,%m3,%M3
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(and:SI (lshiftrt:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
"includes_rshift_p (operands[2], operands[3])"
"@
- {rlinm.|rlwinm.} %0,%1,%s2,%m3,%M3
+ rlwinm. %0,%1,%s2,%m3,%M3
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(lshiftrt:SI (match_operand:SI 1 "gpc_reg_operand" "r")
(match_operand:SI 2 "const_int_operand" "i")) 0)))]
"includes_rshift_p (operands[2], GEN_INT (255))"
- "{rlinm|rlwinm} %0,%1,%s2,0xff")
+ "rlwinm %0,%1,%s2,0xff")
(define_insn ""
[(set (match_operand:CC 0 "cc_reg_operand" "=x,?y")
(clobber (match_scratch:SI 3 "=r,r"))]
"includes_rshift_p (operands[2], GEN_INT (255))"
"@
- {rlinm.|rlwinm.} %3,%1,%s2,0xff
+ rlwinm. %3,%1,%s2,0xff
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(zero_extend:SI (subreg:QI (lshiftrt:SI (match_dup 1) (match_dup 2)) 0)))]
"includes_rshift_p (operands[2], GEN_INT (255))"
"@
- {rlinm.|rlwinm.} %0,%1,%s2,0xff
+ rlwinm. %0,%1,%s2,0xff
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(lshiftrt:SI (match_operand:SI 1 "gpc_reg_operand" "r")
(match_operand:SI 2 "const_int_operand" "i")) 0)))]
"includes_rshift_p (operands[2], GEN_INT (65535))"
- "{rlinm|rlwinm} %0,%1,%s2,0xffff")
+ "rlwinm %0,%1,%s2,0xffff")
(define_insn ""
[(set (match_operand:CC 0 "cc_reg_operand" "=x,?y")
(clobber (match_scratch:SI 3 "=r,r"))]
"includes_rshift_p (operands[2], GEN_INT (65535))"
"@
- {rlinm.|rlwinm.} %3,%1,%s2,0xffff
+ rlwinm. %3,%1,%s2,0xffff
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(zero_extend:SI (subreg:HI (lshiftrt:SI (match_dup 1) (match_dup 2)) 0)))]
"includes_rshift_p (operands[2], GEN_INT (65535))"
"@
- {rlinm.|rlwinm.} %0,%1,%s2,0xffff
+ rlwinm. %0,%1,%s2,0xffff
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "4,8")])
(match_operand:SI 2 "reg_or_cint_operand" "r,i")))]
""
"@
- {sra|sraw} %0,%1,%2
- {srai|srawi} %0,%1,%h2"
+ sraw %0,%1,%2
+ srawi %0,%1,%h2"
[(set_attr "type" "var_shift_rotate,shift")])
(define_insn "*ashrsi3_64"
(match_operand:SI 2 "reg_or_cint_operand" "r,i"))))]
"TARGET_POWERPC64"
"@
- {sra|sraw} %0,%1,%2
- {srai|srawi} %0,%1,%h2"
+ sraw %0,%1,%2
+ srawi %0,%1,%h2"
[(set_attr "type" "var_shift_rotate,shift")])
(define_insn ""
(clobber (match_scratch:SI 3 "=r,r,r,r"))]
""
"@
- {sra.|sraw.} %3,%1,%2
- {srai.|srawi.} %3,%1,%h2
+ sraw. %3,%1,%2
+ srawi. %3,%1,%h2
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(ashiftrt:SI (match_dup 1) (match_dup 2)))]
""
"@
- {sra.|sraw.} %0,%1,%2
- {srai.|srawi.} %0,%1,%h2
+ sraw. %0,%1,%2
+ srawi. %0,%1,%h2
#
#"
[(set_attr "type" "var_delayed_compare,delayed_compare,var_delayed_compare,delayed_compare")
(match_operand:DF 2 "gpc_reg_operand" "d")))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& !VECTOR_UNIT_VSX_P (DFmode)"
- "{fa|fadd} %0,%1,%2"
+ "fadd %0,%1,%2"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_addsub_d")])
(match_operand:DF 2 "gpc_reg_operand" "d")))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& !VECTOR_UNIT_VSX_P (DFmode)"
- "{fs|fsub} %0,%1,%2"
+ "fsub %0,%1,%2"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_addsub_d")])
(match_operand:DF 2 "gpc_reg_operand" "d")))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& !VECTOR_UNIT_VSX_P (DFmode)"
- "{fm|fmul} %0,%1,%2"
+ "fmul %0,%1,%2"
[(set_attr "type" "dmul")
(set_attr "fp_type" "fp_mul_d")])
(match_operand:DF 2 "gpc_reg_operand" "d")))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT && !TARGET_SIMPLE_FPU
&& !VECTOR_UNIT_VSX_P (DFmode)"
- "{fd|fdiv} %0,%1,%2"
+ "fdiv %0,%1,%2"
[(set_attr "type" "ddiv")])
(define_insn "*fred_fpr"
(match_operand:DF 3 "gpc_reg_operand" "f")))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& VECTOR_UNIT_NONE_P (DFmode)"
- "{fma|fmadd} %0,%1,%2,%3"
+ "fmadd %0,%1,%2,%3"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_maddsub_d")])
(neg:DF (match_operand:DF 3 "gpc_reg_operand" "f"))))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& VECTOR_UNIT_NONE_P (DFmode)"
- "{fms|fmsub} %0,%1,%2,%3"
+ "fmsub %0,%1,%2,%3"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_maddsub_d")])
(match_operand:DF 3 "gpc_reg_operand" "f"))))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& VECTOR_UNIT_NONE_P (DFmode)"
- "{fnma|fnmadd} %0,%1,%2,%3"
+ "fnmadd %0,%1,%2,%3"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_maddsub_d")])
(neg:DF (match_operand:DF 3 "gpc_reg_operand" "f")))))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
&& VECTOR_UNIT_NONE_P (DFmode)"
- "{fnms|fnmsub} %0,%1,%2,%3"
+ "fnmsub %0,%1,%2,%3"
[(set_attr "type" "fp")
(set_attr "fp_type" "fp_maddsub_d")])
(unspec:DI [(fix:SI (match_operand:SFDF 1 "gpc_reg_operand" "d"))]
UNSPEC_FCTIWZ))]
"TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT"
- "{fcirz|fctiwz} %0,%1"
+ "fctiwz %0,%1"
[(set_attr "type" "fp")])
(define_insn "fctiwuz_<mode>"
{
if (WORDS_BIG_ENDIAN)
return (GET_CODE (operands[2])) != CONST_INT
- ? \"{a|addc} %L0,%L1,%L2\;{ae|adde} %0,%1,%2\"
- : \"{ai|addic} %L0,%L1,%2\;{a%G2e|add%G2e} %0,%1\";
+ ? \"addc %L0,%L1,%L2\;adde %0,%1,%2\"
+ : \"addic %L0,%L1,%2\;add%G2e %0,%1\";
else
return (GET_CODE (operands[2])) != CONST_INT
- ? \"{a|addc} %0,%1,%2\;{ae|adde} %L0,%L1,%L2\"
- : \"{ai|addic} %0,%1,%2\;{a%G2e|add%G2e} %L0,%L1\";
+ ? \"addc %0,%1,%2\;adde %L0,%L1,%L2\"
+ : \"addic %0,%1,%2\;add%G2e %L0,%L1\";
}"
[(set_attr "type" "two")
(set_attr "length" "8")])
{
if (WORDS_BIG_ENDIAN)
return (GET_CODE (operands[1]) != CONST_INT)
- ? \"{sf|subfc} %L0,%L2,%L1\;{sfe|subfe} %0,%2,%1\"
- : \"{sfi|subfic} %L0,%L2,%1\;{sf%G1e|subf%G1e} %0,%2\";
+ ? \"subfc %L0,%L2,%L1\;subfe %0,%2,%1\"
+ : \"subfic %L0,%L2,%1\;subf%G1e %0,%2\";
else
return (GET_CODE (operands[1]) != CONST_INT)
- ? \"{sf|subfc} %0,%2,%1\;{sfe|subfe} %L0,%L2,%L1\"
- : \"{sfi|subfic} %0,%2,%1\;{sf%G1e|subf%G1e} %L0,%L2\";
+ ? \"subfc %0,%2,%1\;subfe %L0,%L2,%L1\"
+ : \"subfic %0,%2,%1\;subf%G1e %L0,%L2\";
}"
[(set_attr "type" "two")
(set_attr "length" "8")])
"*
{
return (WORDS_BIG_ENDIAN)
- ? \"{sfi|subfic} %L0,%L1,0\;{sfze|subfze} %0,%1\"
- : \"{sfi|subfic} %0,%1,0\;{sfze|subfze} %L0,%L1\";
+ ? \"subfic %L0,%L1,0\;subfze %0,%1\"
+ : \"subfic %0,%1,0\;subfze %L0,%L1\";
}"
[(set_attr "type" "two")
(set_attr "length" "8")])
(match_operand:SI 2 "const_int_operand" "M,i")))]
"TARGET_32BIT && !TARGET_POWERPC64 && WORDS_BIG_ENDIAN"
"@
- {srai|srawi} %0,%1,31\;{srai|srawi} %L0,%1,%h2
- {sri|srwi} %L0,%L1,%h2\;insrwi %L0,%1,%h2,0\;{srai|srawi} %0,%1,%h2"
+ srawi %0,%1,31\;srawi %L0,%1,%h2
+ srwi %L0,%L1,%h2\;insrwi %L0,%1,%h2,0\;srawi %0,%1,%h2"
[(set_attr "type" "two,three")
(set_attr "length" "8,12")])
(match_operand:SI 2 "gpc_reg_operand" "b")]
UNSPEC_MOVSI_GOT))]
"DEFAULT_ABI == ABI_V4 && flag_pic == 1"
- "{l|lwz} %0,%a1@got(%2)"
+ "lwz %0,%a1@got(%2)"
[(set_attr "type" "load")])
;; Used by sched, shorten_branches and final when the GOT pseudo reg
(mem:SI (lo_sum:SI (match_operand:SI 1 "gpc_reg_operand" "b")
(match_operand 2 "" ""))))]
"TARGET_MACHO && ! TARGET_64BIT"
- "{l|lwz} %0,lo16(%2)(%1)"
+ "lwz %0,lo16(%2)(%1)"
[(set_attr "type" "load")
(set_attr "length" "4")])
(gpc_reg_operand (operands[0], SImode) || gpc_reg_operand (operands[1], SImode))"
"@
mr %0,%1
- {cal|la} %0,%a1
- {l%U1%X1|lwz%U1%X1} %0,%1
- {st%U0%X0|stw%U0%X0} %1,%0
- {lil|li} %0,%1
- {liu|lis} %0,%v1
+ la %0,%a1
+ lwz%U1%X1 %0,%1
+ stw%U0%X0 %1,%0
+ li %0,%1
+ lis %0,%v1
#
mf%1 %0
mt%0 %1
mt%0 %1
- {cror 0,0,0|nop}"
+ nop"
[(set_attr "type" "*,*,load,store,*,*,*,mfjmpr,mtjmpr,*,*")
(set_attr "length" "4,4,4,4,4,4,8,4,4,4,4")])
(gpc_reg_operand (operands[0], SImode) || gpc_reg_operand (operands[1], SImode))"
"@
mr %0,%1
- {cal|la} %0,%a1
- {l%U1%X1|lwz%U1%X1} %0,%1
- {st%U0%X0|stw%U0%X0} %1,%0
- {lil|li} %0,%1
- {liu|lis} %0,%v1
+ la %0,%a1
+ lwz%U1%X1 %0,%1
+ stw%U0%X0 %1,%0
+ li %0,%1
+ lis %0,%v1
#
mf%1 %0
mt%0 %1
mt%0 %1
- {cror 0,0,0|nop}
+ nop
stfs%U0%X0 %1, %0
lfs%U1%X1 %0, %1"
[(set_attr "type" "*,*,load,store,*,*,*,mfjmpr,mtjmpr,*,*,*,*")
(set (match_operand:P 0 "gpc_reg_operand" "=r,r,r") (match_dup 1))]
""
"@
- {cmpi|cmp<wd>i} %2,%0,0
+ cmp<wd>i %2,%0,0
mr. %0,%1
#"
[(set_attr "type" "cmp,compare,cmp")
mr %0,%1
lhz%U1%X1 %0,%1
sth%U0%X0 %1,%0
- {lil|li} %0,%w1
+ li %0,%w1
mf%1 %0
mt%0 %1
- {cror 0,0,0|nop}"
+ nop"
[(set_attr "type" "*,load,store,*,mfjmpr,mtjmpr,*")])
(define_expand "mov<mode>"
mr %0,%1
lbz%U1%X1 %0,%1
stb%U0%X0 %1,%0
- {lil|li} %0,%1
+ li %0,%1
mf%1 %0
mt%0 %1
- {cror 0,0,0|nop}"
+ nop"
[(set_attr "type" "*,load,store,*,mfjmpr,mtjmpr,*")])
\f
;; Here is how to move condition codes around. When we store CC data in
"@
mcrf %0,%1
mtcrf 128,%1
- {rlinm|rlwinm} %1,%1,%F0,0xffffffff\;mtcrf %R0,%1\;{rlinm|rlwinm} %1,%1,%f0,0xffffffff
+ rlwinm %1,%1,%F0,0xffffffff\;mtcrf %R0,%1\;rlwinm %1,%1,%f0,0xffffffff
crxor %0,%0,%0
mfcr %0%Q1
- mfcr %0%Q1\;{rlinm|rlwinm} %0,%0,%f1,0xf0000000
+ mfcr %0%Q1\;rlwinm %0,%0,%f1,0xf0000000
mr %0,%1
- {lil|li} %0,%1
+ li %0,%1
mf%1 %0
mt%0 %1
- {l%U1%X1|lwz%U1%X1} %0,%1
- {st%U0%U1|stw%U0%U1} %1,%0"
+ lwz%U1%X1 %0,%1
+ stw%U0%U1 %1,%0"
[(set (attr "type")
(cond [(eq_attr "alternative" "0,3")
(const_string "cr_logical")
&& (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT)"
"@
mr %0,%1
- {l%U1%X1|lwz%U1%X1} %0,%1
- {st%U0%X0|stw%U0%X0} %1,%0
+ lwz%U1%X1 %0,%1
+ stw%U0%X0 %1,%0
fmr %0,%1
lfs%U1%X1 %0,%1
stfs%U0%X0 %1,%0
mt%0 %1
mf%1 %0
- {cror 0,0,0|nop}
+ nop
#
#"
[(set_attr "type" "*,load,store,fp,fpload,fpstore,mtjmpr,mfjmpr,*,*,*")
mr %0,%1
mt%0 %1
mf%1 %0
- {l%U1%X1|lwz%U1%X1} %0,%1
- {st%U0%X0|stw%U0%X0} %1,%0
- {lil|li} %0,%1
- {liu|lis} %0,%v1
+ lwz%U1%X1 %0,%1
+ stw%U0%X0 %1,%0
+ li %0,%1
+ lis %0,%v1
#
#
- {cror 0,0,0|nop}"
+ nop"
[(set_attr "type" "*,mtjmpr,mfjmpr,load,store,*,*,*,*,*")
(set_attr "length" "4,4,4,4,4,4,4,4,8,4")])
xxlxor %x0,%x0,%x0
mt%0 %1
mf%1 %0
- {cror 0,0,0|nop}
+ nop
#
#
#
xxlxor %x0,%x0,%x0
mt%0 %1
mf%1 %0
- {cror 0,0,0|nop}
+ nop
#
#
#"
#
#
#
- {cror 0,0,0|nop}"
+ nop"
[(set_attr "type" "store,load,*,mtjmpr,mfjmpr,*,*,*,*")
(set_attr "length" "4,4,4,4,4,8,12,16,4")])
\f
fmr %0,%1
mf%1 %0
mt%0 %1
- {cror 0,0,0|nop}
+ nop
mftgpr %0,%1
mffgpr %0,%1"
[(set_attr "type" "store,load,*,*,*,*,fpstore,fpload,fp,mfjmpr,mtjmpr,*,mftgpr,mffgpr")
fmr %0,%1
mf%1 %0
mt%0 %1
- {cror 0,0,0|nop}
+ nop
xxlxor %x0,%x0,%x0"
[(set_attr "type" "store,load,*,*,*,*,fpstore,fpload,fp,mfjmpr,mtjmpr,*,vecsimple")
(set_attr "length" "4,4,4,4,4,20,4,4,4,4,4,4,4")])
gcc_unreachable ();
case 0:
if (TARGET_STRING)
- return \"{stsi|stswi} %1,%P0,16\";
+ return \"stswi %1,%P0,16\";
case 1:
return \"#\";
case 2:
fall through to generating four loads. */
if (TARGET_STRING
&& ! reg_overlap_mentioned_p (operands[0], operands[1]))
- return \"{lsi|lswi} %0,%P1,16\";
+ return \"lswi %0,%P1,16\";
/* ... fall through ... */
case 3:
case 4:
(set (mem:SI (plus:SI (match_dup 1) (const_int 28)))
(match_operand:SI 10 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 9"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
(set (mem:SI (plus:SI (match_dup 1) (const_int 24)))
(match_operand:SI 9 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 8"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
(set (mem:SI (plus:SI (match_dup 1) (const_int 20)))
(match_operand:SI 8 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 7"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
(set (mem:SI (plus:SI (match_dup 1) (const_int 16)))
(match_operand:SI 7 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 6"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
(set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
(match_operand:SI 6 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 5"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
(set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
(match_operand:SI 5 "gpc_reg_operand" "r"))])]
"TARGET_STRING && XVECLEN (operands[0], 0) == 4"
- "{stsi|stswi} %2,%1,%O0"
+ "stswi %2,%1,%O0"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")])
\f
&& (REGNO (operands[0]) < 5 || REGNO (operands[0]) > 12)
&& (REGNO (operands[1]) < 5 || REGNO (operands[1]) > 12)
&& REGNO (operands[4]) == 5"
- "{lsi|lswi} %4,%1,%2\;{stsi|stswi} %4,%0,%2"
+ "lswi %4,%1,%2\;stswi %4,%0,%2"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")
(set_attr "length" "8")])
&& (REGNO (operands[0]) < 5 || REGNO (operands[0]) > 10)
&& (REGNO (operands[1]) < 5 || REGNO (operands[1]) > 10)
&& REGNO (operands[4]) == 5"
- "{lsi|lswi} %4,%1,%2\;{stsi|stswi} %4,%0,%2"
+ "lswi %4,%1,%2\;stswi %4,%0,%2"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")
(set_attr "length" "8")])
&& (REGNO (operands[0]) < 5 || REGNO (operands[0]) > 8)
&& (REGNO (operands[1]) < 5 || REGNO (operands[1]) > 8)
&& REGNO (operands[4]) == 5"
- "{lsi|lswi} %4,%1,%2\;{stsi|stswi} %4,%0,%2"
+ "lswi %4,%1,%2\;stswi %4,%0,%2"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")
(set_attr "length" "8")])
(clobber (match_scratch:SI 5 "=X"))]
"TARGET_STRING && ! TARGET_POWERPC64
&& INTVAL (operands[2]) > 4 && INTVAL (operands[2]) <= 8"
- "{lsi|lswi} %4,%1,%2\;{stsi|stswi} %4,%0,%2"
+ "lswi %4,%1,%2\;stswi %4,%0,%2"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")
(set_attr "length" "8")])
(clobber (match_scratch:SI 4 "=&r"))
(clobber (match_scratch:SI 5 "=X"))]
"TARGET_STRING && INTVAL (operands[2]) > 0 && INTVAL (operands[2]) <= 4"
- "{lsi|lswi} %4,%1,%2\;{stsi|stswi} %4,%0,%2"
+ "lswi %4,%1,%2\;stswi %4,%0,%2"
[(set_attr "type" "store_ux")
(set_attr "cell_micro" "always")
(set_attr "length" "8")])
&& (!avoiding_indexed_address_p (SImode)
|| !gpc_reg_operand (operands[2], SImode))"
"@
- {lux|lwzux} %3,%0,%2
- {lu|lwzu} %3,%2(%0)"
+ lwzux %3,%0,%2
+ lwzu %3,%2(%0)"
[(set_attr "type" "load_ux,load_u")])
(define_insn "*movsi_update2"
|| (REG_P (operands[0])
&& REGNO (operands[0]) == STACK_POINTER_REGNUM))"
"@
- {stux|stwux} %3,%0,%2
- {stu|stwu} %3,%2(%0)"
+ stwux %3,%0,%2
+ stwu %3,%2(%0)"
[(set_attr "type" "store_ux,store_u")])
;; This is an unconditional pattern; needed for stack allocation, even
(plus:SI (match_dup 1) (match_dup 2)))]
""
"@
- {stux|stwux} %3,%0,%2
- {stu|stwu} %3,%2(%0)"
+ stwux %3,%0,%2
+ stwu %3,%2(%0)"
[(set_attr "type" "store_ux,store_u")])
(define_insn "*movhi_update1"
&& (!avoiding_indexed_address_p (SImode)
|| !gpc_reg_operand (operands[2], SImode))"
"@
- {lux|lwzux} %3,%0,%2
- {lu|lwzu} %3,%2(%0)"
+ lwzux %3,%0,%2
+ lwzu %3,%2(%0)"
[(set_attr "type" "load_ux,load_u")])
(define_insn "*movsf_update4"
&& (!avoiding_indexed_address_p (SImode)
|| !gpc_reg_operand (operands[2], SImode))"
"@
- {stux|stwux} %3,%0,%2
- {stu|stwu} %3,%2(%0)"
+ stwux %3,%0,%2
+ stwu %3,%2(%0)"
[(set_attr "type" "store_ux,store_u")])
(define_insn "*movdf_update1"
ASM_GENERATE_INTERNAL_LABEL (buf, \"LCTOC\", 1);
operands[1] = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
operands[2] = gen_rtx_REG (Pmode, 2);
- return \"{l|lwz} %0,%1(%2)\";
+ return \"lwz %0,%1(%2)\";
}"
[(set_attr "type" "load")])
(minus:SI (match_operand:SI 2 "immediate_operand" "s")
(match_operand:SI 3 "immediate_operand" "s")))))]
"TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2"
- "{l|lwz} %0,%2-%3(%1)"
+ "lwz %0,%2-%3(%1)"
[(set_attr "type" "load")])
(define_insn "load_toc_v4_PIC_3b"
(minus:SI (match_operand:SI 2 "symbol_ref_operand" "s")
(match_operand:SI 3 "symbol_ref_operand" "s")))))]
"TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic"
- "{cau|addis} %0,%1,%2-%3@ha")
+ "addis %0,%1,%2-%3@ha")
(define_insn "load_toc_v4_PIC_3c"
[(set (match_operand:SI 0 "gpc_reg_operand" "=r")
(minus:SI (match_operand:SI 2 "symbol_ref_operand" "s")
(match_operand:SI 3 "symbol_ref_operand" "s"))))]
"TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic"
- "{cal %0,%2-%3@l(%1)|addi %0,%1,%2-%3@l}")
+ "addi %0,%1,%2-%3@l")
;; If the TOC is shared over a translation unit, as happens with all
;; the kinds of PIC that we support, we need to restore the TOC
(match_operand:DI 2 "gpc_reg_operand" "b")]
UNSPEC_TOCREL)))]
"TARGET_ELF && TARGET_CMODEL != CMODEL_SMALL"
- "{cau|addis} %0,%2,%1@toc@ha")
+ "addis %0,%2,%1@toc@ha")
(define_insn "*largetoc_high_plus"
[(set (match_operand:DI 0 "gpc_reg_operand" "=b*r")
UNSPEC_TOCREL)
(match_operand 3 "const_int_operand" "n"))))]
"TARGET_ELF && TARGET_CMODEL != CMODEL_SMALL"
- "{cau|addis} %0,%2,%1+%3@toc@ha")
+ "addis %0,%2,%1+%3@toc@ha")
(define_insn "*largetoc_low"
[(set (match_operand:DI 0 "gpc_reg_operand" "=r,r")
(match_operand:DI 2 "" "")))]
"TARGET_ELF && TARGET_CMODEL != CMODEL_SMALL"
"@
- {cal %0,%2@l(%1)|addi %0,%1,%2@l}
- {ai|addic} %0,%1,%2@l")
+ addi %0,%1,%2@l
+ addic %0,%1,%2@l")
(define_insn_and_split "*tocref<mode>"
[(set (match_operand:P 0 "gpc_reg_operand" "=b*r")
(match_operand:P 1 "small_toc_ref" "R"))]
"TARGET_TOC"
- "{cal|la} %0,%a1"
+ "la %0,%a1"
"&& TARGET_ELF && TARGET_CMODEL != CMODEL_SMALL && reload_completed"
[(set (match_dup 0) (high:P (match_dup 1)))
(set (match_dup 0) (lo_sum:P (match_dup 0) (match_dup 1)))])
[(set (match_operand:SI 0 "gpc_reg_operand" "=b*r")
(high:SI (match_operand 1 "" "")))]
"TARGET_ELF && ! TARGET_64BIT"
- "{liu|lis} %0,%1@ha")
+ "lis %0,%1@ha")
(define_insn "elf_low"
[(set (match_operand:SI 0 "gpc_reg_operand" "=r,r")
(match_operand 2 "" "")))]
"TARGET_ELF && ! TARGET_64BIT"
"@
- {cal|la} %0,%2@l(%1)
- {ai|addic} %0,%1,%K2")
+ la %0,%2@l(%1)
+ addic %0,%1,%K2")
\f
;; Call and call_value insns
(define_expand "call"
"*
{
operands[1] = gen_rtx_REG (Pmode, 0);
- return \"{st%U0%X0|stw%U0%X0} %1,%0\";
+ return \"stw%U0%X0 %1,%0\";
}"
[(set_attr "type" "store")
(set_attr "length" "4")])
(unspec:SI [(match_operand:SI 1 "memory_operand" "m")] UNSPEC_SP_SET))
(set (match_scratch:SI 2 "=&r") (const_int 0))]
"TARGET_32BIT"
- "{l%U1%X1|lwz%U1%X1} %2,%1\;{st%U0%X0|stw%U0%X0} %2,%0\;{lil|li} %2,0"
+ "lwz%U1%X1 %2,%1\;stw%U0%X0 %2,%0\;li %2,0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(unspec:DI [(match_operand:DI 1 "memory_operand" "Y")] UNSPEC_SP_SET))
(set (match_scratch:DI 2 "=&r") (const_int 0))]
"TARGET_64BIT"
- "ld%U1%X1 %2,%1\;std%U0%X0 %2,%0\;{lil|li} %2,0"
+ "ld%U1%X1 %2,%1\;std%U0%X0 %2,%0\;li %2,0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(clobber (match_scratch:SI 3 "=&r,&r"))]
"TARGET_32BIT"
"@
- {l%U1%X1|lwz%U1%X1} %3,%1\;{l%U2%X2|lwz%U2%X2} %4,%2\;xor. %3,%3,%4\;{lil|li} %4,0
- {l%U1%X1|lwz%U1%X1} %3,%1\;{l%U2%X2|lwz%U2%X2} %4,%2\;{cmpl|cmplw} %0,%3,%4\;{lil|li} %3,0\;{lil|li} %4,0"
+ lwz%U1%X1 %3,%1\;lwz%U2%X2 %4,%2\;xor. %3,%3,%4\;li %4,0
+ lwz%U1%X1 %3,%1\;lwz%U2%X2 %4,%2\;cmplw %0,%3,%4\;li %3,0\;li %4,0"
[(set_attr "length" "16,20")])
(define_insn "stack_protect_testdi"
(clobber (match_scratch:DI 3 "=&r,&r"))]
"TARGET_64BIT"
"@
- ld%U1%X1 %3,%1\;ld%U2%X2 %4,%2\;xor. %3,%3,%4\;{lil|li} %4,0
- ld%U1%X1 %3,%1\;ld%U2%X2 %4,%2\;cmpld %0,%3,%4\;{lil|li} %3,0\;{lil|li} %4,0"
+ ld%U1%X1 %3,%1\;ld%U2%X2 %4,%2\;xor. %3,%3,%4\;li %4,0
+ ld%U1%X1 %3,%1\;ld%U2%X2 %4,%2\;cmpld %0,%3,%4\;li %3,0\;li %4,0"
[(set_attr "length" "16,20")])
\f
(compare:CC (match_operand:GPR 1 "gpc_reg_operand" "r")
(match_operand:GPR 2 "reg_or_short_operand" "rI")))]
""
- "{cmp%I2|cmp<wd>%I2} %0,%1,%2"
+ "cmp<wd>%I2 %0,%1,%2"
[(set_attr "type" "cmp")])
;; If we are comparing a register for equality with a large constant,
(compare:CCUNS (match_operand:SI 1 "gpc_reg_operand" "r")
(match_operand:SI 2 "reg_or_u_short_operand" "rK")))]
""
- "{cmpl%I2|cmplw%I2} %0,%1,%b2"
+ "cmplw%I2 %0,%1,%b2"
[(set_attr "type" "cmp")])
(define_insn "*cmpdi_internal2"
[(match_operand 2 "cc_reg_operand" "y")
(const_int 0)]))]
""
- "mfcr %0%Q2\;{rlinm|rlwinm} %0,%0,%J1,1"
+ "mfcr %0%Q2\;rlwinm %0,%0,%J1,1"
[(set (attr "type")
(cond [(match_test "TARGET_MFCRF")
(const_string "mfcrf")
[(set (match_operand:SI 0 "gpc_reg_operand" "=r")
(unspec:SI [(match_operand 1 "cc_reg_operand" "y")] UNSPEC_MV_CR_GT))]
"TARGET_HARD_FLOAT && !TARGET_FPRS"
- "mfcr %0\;{rlinm|rlwinm} %0,%0,%D1,31,31"
+ "mfcr %0\;rlwinm %0,%0,%D1,31,31"
[(set_attr "type" "mfcr")
(set_attr "length" "8")])
[(set (match_operand:SI 0 "gpc_reg_operand" "=r")
(unspec:SI [(match_operand 1 "cc_reg_operand" "y")] UNSPEC_MV_CR_OV))]
"TARGET_ISEL"
- "mfcr %0\;{rlinm|rlwinm} %0,%0,%t1,1"
+ "mfcr %0\;rlwinm %0,%0,%t1,1"
[(set_attr "type" "mfcr")
(set_attr "length" "8")])
[(match_operand 2 "cc_reg_operand" "y")
(const_int 0)]))]
"TARGET_POWERPC64"
- "mfcr %0%Q2\;{rlinm|rlwinm} %0,%0,%J1,1"
+ "mfcr %0%Q2\;rlwinm %0,%0,%J1,1"
[(set (attr "type")
(cond [(match_test "TARGET_MFCRF")
(const_string "mfcrf")
(match_op_dup 1 [(match_dup 2) (const_int 0)]))]
"TARGET_32BIT"
"@
- mfcr %3%Q2\;{rlinm.|rlwinm.} %3,%3,%J1,1
+ mfcr %3%Q2\;rlwinm. %3,%3,%J1,1
#"
[(set_attr "type" "delayed_compare")
(set_attr "length" "8,16")])
operands[4] = GEN_INT (count);
operands[5] = GEN_INT (put_bit);
- return \"mfcr %0%Q2\;{rlinm|rlwinm} %0,%0,%4,%5,%5\";
+ return \"mfcr %0%Q2\;rlwinm %0,%0,%4,%5,%5\";
}"
[(set (attr "type")
(cond [(match_test "TARGET_MFCRF")
operands[5] = GEN_INT (count);
operands[6] = GEN_INT (put_bit);
- return \"mfcr %4%Q2\;{rlinm.|rlwinm.} %4,%4,%5,%6,%6\";
+ return \"mfcr %4%Q2\;rlwinm. %4,%4,%5,%6,%6\";
}"
[(set_attr "type" "delayed_compare")
(set_attr "length" "8,16")])
[(match_operand 5 "cc_reg_operand" "y")
(const_int 0)]))]
"REGNO (operands[2]) != REGNO (operands[5])"
- "mfcr %3\;{rlinm|rlwinm} %0,%3,%J1,1\;{rlinm|rlwinm} %3,%3,%J4,1"
+ "mfcr %3\;rlwinm %0,%3,%J1,1\;rlwinm %3,%3,%J4,1"
[(set_attr "type" "mfcr")
(set_attr "length" "12")])
[(match_operand 5 "cc_reg_operand" "y")
(const_int 0)]))]
"TARGET_POWERPC64 && REGNO (operands[2]) != REGNO (operands[5])"
- "mfcr %3\;{rlinm|rlwinm} %0,%3,%J1,1\;{rlinm|rlwinm} %3,%3,%J4,1"
+ "mfcr %3\;rlwinm %0,%3,%J1,1\;rlwinm %3,%3,%J4,1"
[(set_attr "type" "mfcr")
(set_attr "length" "12")])
(match_operand:SI 3 "gpc_reg_operand" "r,r,r,r,r")))]
"TARGET_32BIT"
"@
- xor %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3
- {sfi|subfic} %0,%1,0\;{aze|addze} %0,%3
- {xoril|xori} %0,%1,%b2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3
- {xoriu|xoris} %0,%1,%u2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3
- {sfi|subfic} %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3"
+ xor %0,%1,%2\;subfic %0,%0,0\;addze %0,%3
+ subfic %0,%1,0\;addze %0,%3
+ xori %0,%1,%b2\;subfic %0,%0,0\;addze %0,%3
+ xoris %0,%1,%u2\;subfic %0,%0,0\;addze %0,%3
+ subfic %0,%1,%2\;subfic %0,%0,0\;addze %0,%3"
[(set_attr "type" "three,two,three,three,three")
(set_attr "length" "12,8,12,12,12")])
(clobber (match_scratch:SI 4 "=&r,&r,&r,&r,&r,&r,&r,&r,&r,&r"))]
"TARGET_32BIT && optimize_size"
"@
- xor %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %4,%3
- {sfi|subfic} %4,%1,0\;{aze.|addze.} %4,%3
- {xoril|xori} %4,%1,%b2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %4,%3
- {xoriu|xoris} %4,%1,%u2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %4,%3
- {sfi|subfic} %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %4,%3
+ xor %4,%1,%2\;subfic %4,%4,0\;addze. %4,%3
+ subfic %4,%1,0\;addze. %4,%3
+ xori %4,%1,%b2\;subfic %4,%4,0\;addze. %4,%3
+ xoris %4,%1,%u2\;subfic %4,%4,0\;addze. %4,%3
+ subfic %4,%1,%2\;subfic %4,%4,0\;addze. %4,%3
#
#
#
(plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
"TARGET_32BIT && optimize_size"
"@
- xor %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3
- {sfi|subfic} %0,%1,0\;{aze.|addze.} %0,%3
- {xoril|xori} %0,%1,%b2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3
- {xoriu|xoris} %0,%1,%u2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3
- {sfi|subfic} %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3
+ xor %0,%1,%2\;subfic %0,%0,0\;addze. %0,%3
+ subfic %0,%1,0\;addze. %0,%3
+ xori %0,%1,%b2\;subfic %0,%0,0\;addze. %0,%3
+ xoris %0,%1,%u2\;subfic %0,%0,0\;addze. %0,%3
+ subfic %0,%1,%2\;subfic %0,%0,0\;addze. %0,%3
#
#
#
(neg:P (eq:P (match_operand:P 1 "gpc_reg_operand" "r")
(const_int 0))))]
""
- "{ai|addic} %0,%1,-1\;{sfe|subfe} %0,%0,%0"
+ "addic %0,%1,-1\;subfe %0,%0,%0"
[(set_attr "type" "two")
(set_attr "length" "8")])
(const_int 31)))
(clobber (match_scratch:SI 2 "=&r"))]
"TARGET_32BIT && !TARGET_ISEL"
- "{ai|addic} %2,%1,-1\;{sfe|subfe} %0,%2,%1"
+ "addic %2,%1,-1\;subfe %0,%2,%1"
[(set_attr "type" "two")
(set_attr "length" "8")])
(match_operand:SI 2 "gpc_reg_operand" "r")))
(clobber (match_scratch:SI 3 "=&r"))]
"TARGET_32BIT"
- "{ai|addic} %3,%1,-1\;{aze|addze} %0,%2"
+ "addic %3,%1,-1\;addze %0,%2"
[(set_attr "type" "two")
(set_attr "length" "8")])
(clobber (match_scratch:SI 4 "=X,&r"))]
"TARGET_32BIT"
"@
- {ai|addic} %3,%1,-1\;{aze.|addze.} %3,%2
+ addic %3,%1,-1\;addze. %3,%2
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")])
(clobber (match_scratch:SI 3 "=&r,&r"))]
"TARGET_32BIT"
"@
- {ai|addic} %3,%1,-1\;{aze.|addze.} %0,%2
+ addic %3,%1,-1\;addze. %0,%2
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")])
(leu:P (match_operand:P 1 "gpc_reg_operand" "r")
(match_operand:P 2 "reg_or_short_operand" "rI")))]
""
- "{sf%I2|subf%I2c} %0,%1,%2\;{cal %0,0(0)|li %0,0}\;{ae|adde} %0,%0,%0"
+ "subf%I2c %0,%1,%2\;li %0,0\;adde %0,%0,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(leu:P (match_dup 1) (match_dup 2)))]
""
"@
- {sf%I2|subf%I2c} %0,%1,%2\;{cal %0,0(0)|li %0,0}\;{ae.|adde.} %0,%0,%0
+ subf%I2c %0,%1,%2\;li %0,0\;adde. %0,%0,%0
#"
[(set_attr "type" "compare")
(set_attr "length" "12,16")])
(match_operand:P 2 "reg_or_short_operand" "rI"))
(match_operand:P 3 "gpc_reg_operand" "r")))]
""
- "{sf%I2|subf%I2c} %0,%1,%2\;{aze|addze} %0,%3"
+ "subf%I2c %0,%1,%2\;addze %0,%3"
[(set_attr "type" "two")
(set_attr "length" "8")])
(clobber (match_scratch:SI 4 "=&r,&r"))]
"TARGET_32BIT"
"@
- {sf%I2|subf%I2c} %4,%1,%2\;{aze.|addze.} %4,%3
+ subf%I2c %4,%1,%2\;addze. %4,%3
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")])
(plus:SI (leu:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
"TARGET_32BIT"
"@
- {sf%I2|subf%I2c} %0,%1,%2\;{aze.|addze.} %0,%3
+ subf%I2c %0,%1,%2\;addze. %0,%3
#"
[(set_attr "type" "compare")
(set_attr "length" "8,12")])
(neg:P (leu:P (match_operand:P 1 "gpc_reg_operand" "r")
(match_operand:P 2 "reg_or_short_operand" "rI"))))]
""
- "{sf%I2|subf%I2c} %0,%1,%2\;{sfe|subfe} %0,%0,%0\;nand %0,%0,%0"
+ "subf%I2c %0,%1,%2\;subfe %0,%0,%0\;nand %0,%0,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(match_operand:P 2 "reg_or_short_operand" "rI")))
(match_operand:P 3 "gpc_reg_operand" "r")))]
""
- "{sf%I2|subf%I2c} %0,%1,%2\;{sfe|subfe} %0,%0,%0\;andc %0,%3,%0"
+ "subf%I2c %0,%1,%2\;subfe %0,%0,%0\;andc %0,%3,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(clobber (match_scratch:SI 4 "=&r,&r"))]
"TARGET_32BIT"
"@
- {sf%I2|subf%I2c} %4,%1,%2\;{sfe|subfe} %4,%4,%4\;andc. %4,%3,%4
+ subf%I2c %4,%1,%2\;subfe %4,%4,%4\;andc. %4,%3,%4
#"
[(set_attr "type" "compare")
(set_attr "length" "12,16")])
(and:SI (neg:SI (leu:SI (match_dup 1) (match_dup 2))) (match_dup 3)))]
"TARGET_32BIT"
"@
- {sf%I2|subf%I2c} %0,%1,%2\;{sfe|subfe} %0,%0,%0\;andc. %0,%3,%0
+ subf%I2c %0,%1,%2\;subfe %0,%0,%0\;andc. %0,%3,%0
#"
[(set_attr "type" "compare")
(set_attr "length" "12,16")])
(match_operand:P 2 "reg_or_neg_short_operand" "r,P"))))]
""
"@
- {sf|subfc} %0,%2,%1\;{sfe|subfe} %0,%0,%0
- {ai|addic} %0,%1,%n2\;{sfe|subfe} %0,%0,%0"
+ subfc %0,%2,%1\;subfe %0,%0,%0
+ addic %0,%1,%n2\;subfe %0,%0,%0"
[(set_attr "type" "two")
(set_attr "length" "8")])
(match_operand:P 2 "reg_or_neg_short_operand" "r,P")))]
""
"@
- {sf|subfc} %0,%2,%1\;{cal %0,0(0)|li %0,0}\;{ae|adde} %0,%0,%0
- {ai|addic} %0,%1,%n2\;{cal %0,0(0)|li %0,0}\;{ae|adde} %0,%0,%0"
+ subfc %0,%2,%1\;li %0,0\;adde %0,%0,%0
+ addic %0,%1,%n2\;li %0,0\;adde %0,%0,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(geu:P (match_dup 1) (match_dup 2)))]
""
"@
- {sf|subfc} %0,%2,%1\;{cal %0,0(0)|li %0,0}\;{ae.|adde.} %0,%0,%0
- {ai|addic} %0,%1,%n2\;{cal %0,0(0)|li %0,0}\;{ae.|adde.} %0,%0,%0
+ subfc %0,%2,%1\;li %0,0\;adde. %0,%0,%0
+ addic %0,%1,%n2\;li %0,0\;adde. %0,%0,%0
#
#"
[(set_attr "type" "compare")
(match_operand:P 3 "gpc_reg_operand" "r,r")))]
""
"@
- {sf|subfc} %0,%2,%1\;{aze|addze} %0,%3
- {ai|addic} %0,%1,%n2\;{aze|addze} %0,%3"
+ subfc %0,%2,%1\;addze %0,%3
+ addic %0,%1,%n2\;addze %0,%3"
[(set_attr "type" "two")
(set_attr "length" "8")])
(clobber (match_scratch:SI 4 "=&r,&r,&r,&r"))]
"TARGET_32BIT"
"@
- {sf|subfc} %4,%2,%1\;{aze.|addze.} %4,%3
- {ai|addic} %4,%1,%n2\;{aze.|addze.} %4,%3
+ subfc %4,%2,%1\;addze. %4,%3
+ addic %4,%1,%n2\;addze. %4,%3
#
#"
[(set_attr "type" "compare")
(plus:SI (geu:SI (match_dup 1) (match_dup 2)) (match_dup 3)))]
"TARGET_32BIT"
"@
- {sf|subfc} %0,%2,%1\;{aze.|addze.} %0,%3
- {ai|addic} %0,%1,%n2\;{aze.|addze.} %0,%3
+ subfc %0,%2,%1\;addze. %0,%3
+ addic %0,%1,%n2\;addze. %0,%3
#
#"
[(set_attr "type" "compare")
(match_operand:P 2 "reg_or_short_operand" "r,I"))))]
""
"@
- {sf|subfc} %0,%2,%1\;{sfe|subfe} %0,%0,%0\;nand %0,%0,%0
- {sfi|subfic} %0,%1,-1\;{a%I2|add%I2c} %0,%0,%2\;{sfe|subfe} %0,%0,%0"
+ subfc %0,%2,%1\;subfe %0,%0,%0\;nand %0,%0,%0
+ subfic %0,%1,-1\;add%I2c %0,%0,%2\;subfe %0,%0,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(match_operand:P 3 "gpc_reg_operand" "r,r")))]
""
"@
- {sf|subfc} %0,%2,%1\;{sfe|subfe} %0,%0,%0\;andc %0,%3,%0
- {ai|addic} %0,%1,%n2\;{sfe|subfe} %0,%0,%0\;andc %0,%3,%0"
+ subfc %0,%2,%1\;subfe %0,%0,%0\;andc %0,%3,%0
+ addic %0,%1,%n2\;subfe %0,%0,%0\;andc %0,%3,%0"
[(set_attr "type" "three")
(set_attr "length" "12")])
(clobber (match_scratch:SI 4 "=&r,&r,&r,&r"))]
"TARGET_32BIT"
"@
- {sf|subfc} %4,%2,%1\;{sfe|subfe} %4,%4,%4\;andc. %4,%3,%4
- {ai|addic} %4,%1,%n2\;{sfe|subfe} %4,%4,%4\;andc. %4,%3,%4
+ subfc %4,%2,%1\;subfe %4,%4,%4\;andc. %4,%3,%4
+ addic %4,%1,%n2\;subfe %4,%4,%4\;andc. %4,%3,%4
#
#"
[(set_attr "type" "compare")
(and:SI (neg:SI (geu:SI (match_dup 1) (match_dup 2))) (match_dup 3)))]
"TARGET_32BIT"
"@
- {sf|subfc} %0,%2,%1\;{sfe|subfe} %0,%0,%0\;andc. %0,%3,%0
- {ai|addic} %0,%1,%n2\;{sfe|subfe} %0,%0,%0\;andc. %0,%3,%0
+ subfc %0,%2,%1\;subfe %0,%0,%0\;andc. %0,%3,%0
+ addic %0,%1,%n2\;subfe %0,%0,%0\;andc. %0,%3,%0
#
#"
[(set_attr "type" "compare")
(const_int 0))
(match_operand:P 2 "gpc_reg_operand" "r")))]
""
- "{a|addc} %0,%1,%1\;{sfe|subfe} %0,%1,%0\;{aze|addze} %0,%2"
+ "addc %0,%1,%1\;subfe %0,%1,%0\;addze %0,%2"
[(set_attr "type" "three")
(set_attr "length" "12")])
(clobber (match_scratch:SI 3 "=&r,&r"))]
"TARGET_32BIT"
"@
- {a|addc} %3,%1,%1\;{sfe|subfe} %3,%1,%3\;{aze.|addze.} %3,%2
+ addc %3,%1,%1\;subfe %3,%1,%3\;addze. %3,%2
#"
[(set_attr "type" "compare")
(set_attr "length" "12,16")])
(plus:SI (gt:SI (match_dup 1) (const_int 0)) (match_dup 2)))]
"TARGET_32BIT"
"@
- {a|addc} %0,%1,%1\;{sfe|subfe} %0,%1,%0\;{aze.|addze.} %0,%2
+ addc %0,%1,%1\;subfe %0,%1,%0\;addze. %0,%2
#"
[(set_attr "type" "compare")
(set_attr "length" "12,16")])
(neg:P (gtu:P (match_operand:P 1 "gpc_reg_operand" "r")
(match_operand:P 2 "reg_or_short_operand" "rI"))))]
""
- "{sf%I2|subf%I2c} %0,%1,%2\;{sfe|subfe} %0,%0,%0"
+ "subf%I2c %0,%1,%2\;subfe %0,%0,%0"
[(set_attr "type" "two")
(set_attr "length" "8")])
(const_int 0)])
(const_int 0)))]
""
- "{crnor %E0,%j1,%j1|crnot %E0,%j1}"
+ "crnot %E0,%j1"
[(set_attr "type" "cr_logical,delayed_cr")])
;; If we are comparing the result of two comparisons, this can be done
(define_insn "<return_str>return"
[(any_return)]
"<return_pred>"
- "{br|blr}"
+ "blr"
[(set_attr "type" "jmpreg")])
(define_expand "indirect_jump"
""
"@
bctr
- {br|blr}"
+ blr"
[(set_attr "type" "jmpreg")])
;; Table jump for switch statements:
""
"@
bctr
- {br|blr}"
+ blr"
[(set_attr "type" "jmpreg")])
(define_insn "nop"
[(const_int 0)]
""
- "{cror 0,0,0|nop}")
+ "nop")
(define_insn "group_ending_nop"
[(unspec [(const_int 0)] UNSPEC_GRP_END_NOP)]
if (which_alternative != 0)
return \"#\";
else if (get_attr_length (insn) == 4)
- return \"{bdn|bdnz} %l0\";
+ return \"bdnz %l0\";
else
return \"bdz $+8\;b %l0\";
}"
else if (get_attr_length (insn) == 4)
return \"bdz %l0\";
else
- return \"{bdn|bdnz} $+8\;b %l0\";
+ return \"bdnz $+8\;b %l0\";
}"
[(set_attr "type" "branch")
(set_attr "length" "*,12,16,16")])
else if (get_attr_length (insn) == 4)
return \"bdz %l0\";
else
- return \"{bdn|bdnz} $+8\;b %l0\";
+ return \"bdnz $+8\;b %l0\";
}"
[(set_attr "type" "branch")
(set_attr "length" "*,12,16,16")])
if (which_alternative != 0)
return \"#\";
else if (get_attr_length (insn) == 4)
- return \"{bdn|bdnz} %l0\";
+ return \"bdnz %l0\";
else
return \"bdz $+8\;b %l0\";
}"
(define_insn "trap"
[(trap_if (const_int 1) (const_int 0))]
""
- "{t 31,0,0|trap}"
+ "trap"
[(set_attr "type" "trap")])
(define_expand "ctrap<mode>4"
(match_operand:GPR 2 "reg_or_short_operand" "rI")])
(const_int 0))]
""
- "{t|t<wd>}%V0%I2 %1,%2"
+ "t<wd>%V0%I2 %1,%2"
[(set_attr "type" "trap")])
\f
;; Insns related to generating the function prologue and epilogue.
[(set (match_operand:SI 1 "memory_operand" "=m")
(match_operand:SI 2 "gpc_reg_operand" "r"))])]
"TARGET_MULTIPLE"
- "{stm|stmw} %2,%1"
+ "stmw %2,%1"
[(set_attr "type" "store_ux")])
; The following comment applies to:
[(set (match_operand:SI 1 "gpc_reg_operand" "=r")
(match_operand:SI 2 "memory_operand" "m"))])]
"TARGET_MULTIPLE"
- "{lm|lmw} %1,%2"
+ "lmw %1,%2"
[(set_attr "type" "load_ux")
(set_attr "cell_micro" "always")])