1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
109 ;; UNSPEC_VOLATILE Usage:
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
143 ;;---------------------------------------------------------------------------
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
151 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
152 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
154 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
155 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
157 ;; Operand number of an input operand that is shifted. Zero if the
158 ;; given instruction does not shift one of its input operands.
159 (define_attr "shift" "" (const_int 0))
161 ; Floating Point Unit. If we only have floating point emulation, then there
162 ; is no point in scheduling the floating point insns. (Well, for best
163 ; performance we should try and group them together).
164 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
165 (const (symbol_ref "arm_fpu_attr")))
167 ; LENGTH of an instruction (in bytes)
168 (define_attr "length" "" (const_int 4))
170 ; The architecture which supports the instruction (or alternative).
171 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
172 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
173 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
174 ; arm_arch6. This attribute is used to compute attribute "enabled",
175 ; use type "any" to enable an alternative in all cases.
176 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
177 (const_string "any"))
179 (define_attr "arch_enabled" "no,yes"
180 (cond [(eq_attr "arch" "any")
183 (and (eq_attr "arch" "a")
184 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
187 (and (eq_attr "arch" "t")
188 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
191 (and (eq_attr "arch" "t1")
192 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
195 (and (eq_attr "arch" "t2")
196 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
199 (and (eq_attr "arch" "32")
200 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
203 (and (eq_attr "arch" "v6")
204 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
207 (and (eq_attr "arch" "nov6")
208 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
209 (const_string "yes")]
210 (const_string "no")))
212 ; Allows an insn to disable certain alternatives for reasons other than
214 (define_attr "insn_enabled" "no,yes"
215 (const_string "yes"))
217 ; Enable all alternatives that are both arch_enabled and insn_enabled.
218 (define_attr "enabled" "no,yes"
219 (if_then_else (eq_attr "insn_enabled" "yes")
220 (attr "arch_enabled")
221 (const_string "no")))
223 ; POOL_RANGE is how far away from a constant pool entry that this insn
224 ; can be placed. If the distance is zero, then this insn will never
225 ; reference the pool.
226 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
227 ; before its address.
228 (define_attr "arm_pool_range" "" (const_int 0))
229 (define_attr "thumb2_pool_range" "" (const_int 0))
230 (define_attr "arm_neg_pool_range" "" (const_int 0))
231 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
233 (define_attr "pool_range" ""
234 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
235 (attr "arm_pool_range")))
236 (define_attr "neg_pool_range" ""
237 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
238 (attr "arm_neg_pool_range")))
240 ; An assembler sequence may clobber the condition codes without us knowing.
241 ; If such an insn references the pool, then we have no way of knowing how,
242 ; so use the most conservative value for pool_range.
243 (define_asm_attributes
244 [(set_attr "conds" "clob")
245 (set_attr "length" "4")
246 (set_attr "pool_range" "250")])
248 ;; The instruction used to implement a particular pattern. This
249 ;; information is used by pipeline descriptions to provide accurate
250 ;; scheduling information.
253 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
254 (const_string "other"))
256 ; TYPE attribute is used to detect floating point instructions which, if
257 ; running on a co-processor can run in parallel with other, basic instructions
258 ; If write-buffer scheduling is enabled then it can also be used in the
259 ; scheduling of writes.
261 ; Classification of each insn
262 ; Note: vfp.md has different meanings for some of these, and some further
263 ; types as well. See that file for details.
264 ; alu any alu instruction that doesn't hit memory or fp
265 ; regs or have a shifted source operand
266 ; alu_shift any data instruction that doesn't hit memory or fp
267 ; regs, but has a source operand shifted by a constant
268 ; alu_shift_reg any data instruction that doesn't hit memory or fp
269 ; regs, but has a source operand shifted by a register value
270 ; mult a multiply instruction
271 ; block blockage insn, this blocks all functional units
272 ; float a floating point arithmetic operation (subject to expansion)
273 ; fdivd DFmode floating point division
274 ; fdivs SFmode floating point division
275 ; fmul Floating point multiply
276 ; ffmul Fast floating point multiply
277 ; farith Floating point arithmetic (4 cycle)
278 ; ffarith Fast floating point arithmetic (2 cycle)
279 ; float_em a floating point arithmetic operation that is normally emulated
280 ; even on a machine with an fpa.
281 ; f_load a floating point load from memory
282 ; f_store a floating point store to memory
283 ; f_load[sd] single/double load from memory
284 ; f_store[sd] single/double store to memory
285 ; f_flag a transfer of co-processor flags to the CPSR
286 ; f_mem_r a transfer of a floating point register to a real reg via mem
287 ; r_mem_f the reverse of f_mem_r
288 ; f_2_r fast transfer float to arm (no memory needed)
289 ; r_2_f fast transfer arm to float
290 ; f_cvt convert floating<->integral
292 ; call a subroutine call
293 ; load_byte load byte(s) from memory to arm registers
294 ; load1 load 1 word from memory to arm registers
295 ; load2 load 2 words from memory to arm registers
296 ; load3 load 3 words from memory to arm registers
297 ; load4 load 4 words from memory to arm registers
298 ; store store 1 word to memory from arm registers
299 ; store2 store 2 words
300 ; store3 store 3 words
301 ; store4 store 4 (or more) words
302 ; Additions for Cirrus Maverick co-processor:
303 ; mav_farith Floating point arithmetic (4 cycle)
304 ; mav_dmult Double multiplies (7 cycle)
308 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
310 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
311 (const_string "mult")
312 (const_string "alu")))
314 ; Load scheduling, set from the arm_ld_sched variable
315 ; initialized by arm_override_options()
316 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
318 ;; Classification of NEON instructions for scheduling purposes.
319 ;; Do not set this attribute and the "type" attribute together in
320 ;; any one instruction pattern.
321 (define_attr "neon_type"
332 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
333 neon_mul_qqq_8_16_32_ddd_32,\
334 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
335 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
337 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
338 neon_mla_qqq_32_qqd_32_scalar,\
339 neon_mul_ddd_16_scalar_32_16_long_scalar,\
340 neon_mul_qqd_32_scalar,\
341 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
346 neon_vqshl_vrshl_vqrshl_qqq,\
348 neon_fp_vadd_ddd_vabs_dd,\
349 neon_fp_vadd_qqq_vabs_qq,\
355 neon_fp_vmla_ddd_scalar,\
356 neon_fp_vmla_qqq_scalar,\
357 neon_fp_vrecps_vrsqrts_ddd,\
358 neon_fp_vrecps_vrsqrts_qqq,\
366 neon_vld2_2_regs_vld1_vld2_all_lanes,\
369 neon_vst1_1_2_regs_vst2_2_regs,\
371 neon_vst2_4_regs_vst3_vst4,\
373 neon_vld1_vld2_lane,\
374 neon_vld3_vld4_lane,\
375 neon_vst1_vst2_lane,\
376 neon_vst3_vst4_lane,\
377 neon_vld3_vld4_all_lanes,\
385 (const_string "none"))
387 ; condition codes: this one is used by final_prescan_insn to speed up
388 ; conditionalizing instructions. It saves having to scan the rtl to see if
389 ; it uses or alters the condition codes.
391 ; USE means that the condition codes are used by the insn in the process of
392 ; outputting code, this means (at present) that we can't use the insn in
395 ; SET means that the purpose of the insn is to set the condition codes in a
396 ; well defined manner.
398 ; CLOB means that the condition codes are altered in an undefined manner, if
399 ; they are altered at all
401 ; UNCONDITIONAL means the instions can not be conditionally executed.
403 ; NOCOND means that the condition codes are neither altered nor affect the
404 ; output of this insn
406 (define_attr "conds" "use,set,clob,unconditional,nocond"
408 (ior (eq_attr "is_thumb1" "yes")
409 (eq_attr "type" "call"))
410 (const_string "clob")
411 (if_then_else (eq_attr "neon_type" "none")
412 (const_string "nocond")
413 (const_string "unconditional"))))
415 ; Predicable means that the insn can be conditionally executed based on
416 ; an automatically added predicate (additional patterns are generated by
417 ; gen...). We default to 'no' because no Thumb patterns match this rule
418 ; and not all ARM patterns do.
419 (define_attr "predicable" "no,yes" (const_string "no"))
421 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
422 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
423 ; suffer blockages enough to warrant modelling this (and it can adversely
424 ; affect the schedule).
425 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
427 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
428 ; to stall the processor. Used with model_wbuf above.
429 (define_attr "write_conflict" "no,yes"
430 (if_then_else (eq_attr "type"
431 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
433 (const_string "no")))
435 ; Classify the insns into those that take one cycle and those that take more
436 ; than one on the main cpu execution unit.
437 (define_attr "core_cycles" "single,multi"
438 (if_then_else (eq_attr "type"
439 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
440 (const_string "single")
441 (const_string "multi")))
443 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
444 ;; distant label. Only applicable to Thumb code.
445 (define_attr "far_jump" "yes,no" (const_string "no"))
448 ;; The number of machine instructions this pattern expands to.
449 ;; Used for Thumb-2 conditional execution.
450 (define_attr "ce_count" "" (const_int 1))
452 ;;---------------------------------------------------------------------------
455 (include "iterators.md")
457 ;;---------------------------------------------------------------------------
460 (include "predicates.md")
461 (include "constraints.md")
463 ;;---------------------------------------------------------------------------
464 ;; Pipeline descriptions
466 ;; Processor type. This is created automatically from arm-cores.def.
467 (include "arm-tune.md")
469 (define_attr "tune_cortexr4" "yes,no"
471 (eq_attr "tune" "cortexr4,cortexr4f")
473 (const_string "no"))))
475 ;; True if the generic scheduling description should be used.
477 (define_attr "generic_sched" "yes,no"
479 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
480 (eq_attr "tune_cortexr4" "yes"))
482 (const_string "yes"))))
484 (define_attr "generic_vfp" "yes,no"
486 (and (eq_attr "fpu" "vfp")
487 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
488 (eq_attr "tune_cortexr4" "no"))
490 (const_string "no"))))
492 (include "arm-generic.md")
493 (include "arm926ejs.md")
494 (include "arm1020e.md")
495 (include "arm1026ejs.md")
496 (include "arm1136jfs.md")
497 (include "cortex-a8.md")
498 (include "cortex-a9.md")
499 (include "cortex-r4.md")
500 (include "cortex-r4f.md")
504 ;;---------------------------------------------------------------------------
509 ;; Note: For DImode insns, there is normally no reason why operands should
510 ;; not be in the same register, what we don't want is for something being
511 ;; written to partially overlap something that is an input.
512 ;; Cirrus 64bit additions should not be split because we have a native
513 ;; 64bit addition instructions.
515 (define_expand "adddi3"
517 [(set (match_operand:DI 0 "s_register_operand" "")
518 (plus:DI (match_operand:DI 1 "s_register_operand" "")
519 (match_operand:DI 2 "s_register_operand" "")))
520 (clobber (reg:CC CC_REGNUM))])]
523 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
525 if (!cirrus_fp_register (operands[0], DImode))
526 operands[0] = force_reg (DImode, operands[0]);
527 if (!cirrus_fp_register (operands[1], DImode))
528 operands[1] = force_reg (DImode, operands[1]);
529 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
535 if (GET_CODE (operands[1]) != REG)
536 operands[1] = force_reg (DImode, operands[1]);
537 if (GET_CODE (operands[2]) != REG)
538 operands[2] = force_reg (DImode, operands[2]);
543 (define_insn "*thumb1_adddi3"
544 [(set (match_operand:DI 0 "register_operand" "=l")
545 (plus:DI (match_operand:DI 1 "register_operand" "%0")
546 (match_operand:DI 2 "register_operand" "l")))
547 (clobber (reg:CC CC_REGNUM))
550 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
551 [(set_attr "length" "4")]
554 (define_insn_and_split "*arm_adddi3"
555 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
556 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
557 (match_operand:DI 2 "s_register_operand" "r, 0")))
558 (clobber (reg:CC CC_REGNUM))]
559 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
561 "TARGET_32BIT && reload_completed
562 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
563 [(parallel [(set (reg:CC_C CC_REGNUM)
564 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
566 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
567 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
568 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
571 operands[3] = gen_highpart (SImode, operands[0]);
572 operands[0] = gen_lowpart (SImode, operands[0]);
573 operands[4] = gen_highpart (SImode, operands[1]);
574 operands[1] = gen_lowpart (SImode, operands[1]);
575 operands[5] = gen_highpart (SImode, operands[2]);
576 operands[2] = gen_lowpart (SImode, operands[2]);
578 [(set_attr "conds" "clob")
579 (set_attr "length" "8")]
582 (define_insn_and_split "*adddi_sesidi_di"
583 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
584 (plus:DI (sign_extend:DI
585 (match_operand:SI 2 "s_register_operand" "r,r"))
586 (match_operand:DI 1 "s_register_operand" "0,r")))
587 (clobber (reg:CC CC_REGNUM))]
588 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
590 "TARGET_32BIT && reload_completed"
591 [(parallel [(set (reg:CC_C CC_REGNUM)
592 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
594 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
595 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
598 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
601 operands[3] = gen_highpart (SImode, operands[0]);
602 operands[0] = gen_lowpart (SImode, operands[0]);
603 operands[4] = gen_highpart (SImode, operands[1]);
604 operands[1] = gen_lowpart (SImode, operands[1]);
605 operands[2] = gen_lowpart (SImode, operands[2]);
607 [(set_attr "conds" "clob")
608 (set_attr "length" "8")]
611 (define_insn_and_split "*adddi_zesidi_di"
612 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
613 (plus:DI (zero_extend:DI
614 (match_operand:SI 2 "s_register_operand" "r,r"))
615 (match_operand:DI 1 "s_register_operand" "0,r")))
616 (clobber (reg:CC CC_REGNUM))]
617 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
619 "TARGET_32BIT && reload_completed"
620 [(parallel [(set (reg:CC_C CC_REGNUM)
621 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
623 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
624 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
625 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
628 operands[3] = gen_highpart (SImode, operands[0]);
629 operands[0] = gen_lowpart (SImode, operands[0]);
630 operands[4] = gen_highpart (SImode, operands[1]);
631 operands[1] = gen_lowpart (SImode, operands[1]);
632 operands[2] = gen_lowpart (SImode, operands[2]);
634 [(set_attr "conds" "clob")
635 (set_attr "length" "8")]
638 (define_expand "addsi3"
639 [(set (match_operand:SI 0 "s_register_operand" "")
640 (plus:SI (match_operand:SI 1 "s_register_operand" "")
641 (match_operand:SI 2 "reg_or_int_operand" "")))]
644 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
646 arm_split_constant (PLUS, SImode, NULL_RTX,
647 INTVAL (operands[2]), operands[0], operands[1],
648 optimize && can_create_pseudo_p ());
654 ; If there is a scratch available, this will be faster than synthesizing the
657 [(match_scratch:SI 3 "r")
658 (set (match_operand:SI 0 "arm_general_register_operand" "")
659 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
660 (match_operand:SI 2 "const_int_operand" "")))]
662 !(const_ok_for_arm (INTVAL (operands[2]))
663 || const_ok_for_arm (-INTVAL (operands[2])))
664 && const_ok_for_arm (~INTVAL (operands[2]))"
665 [(set (match_dup 3) (match_dup 2))
666 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
670 ;; The r/r/k alternative is required when reloading the address
671 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
672 ;; put the duplicated register first, and not try the commutative version.
673 (define_insn_and_split "*arm_addsi3"
674 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
675 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
676 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
686 && GET_CODE (operands[2]) == CONST_INT
687 && !(const_ok_for_arm (INTVAL (operands[2]))
688 || const_ok_for_arm (-INTVAL (operands[2])))
689 && (reload_completed || !arm_eliminable_register (operands[1]))"
690 [(clobber (const_int 0))]
692 arm_split_constant (PLUS, SImode, curr_insn,
693 INTVAL (operands[2]), operands[0],
697 [(set_attr "length" "4,4,4,4,4,16")
698 (set_attr "predicable" "yes")]
701 (define_insn_and_split "*thumb1_addsi3"
702 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
703 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
704 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
707 static const char * const asms[] =
709 \"add\\t%0, %0, %2\",
710 \"sub\\t%0, %0, #%n2\",
711 \"add\\t%0, %1, %2\",
712 \"add\\t%0, %0, %2\",
713 \"add\\t%0, %0, %2\",
714 \"add\\t%0, %1, %2\",
715 \"add\\t%0, %1, %2\",
720 if ((which_alternative == 2 || which_alternative == 6)
721 && GET_CODE (operands[2]) == CONST_INT
722 && INTVAL (operands[2]) < 0)
723 return \"sub\\t%0, %1, #%n2\";
724 return asms[which_alternative];
726 "&& reload_completed && CONST_INT_P (operands[2])
727 && ((operands[1] != stack_pointer_rtx
728 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
729 || (operands[1] == stack_pointer_rtx
730 && INTVAL (operands[2]) > 1020))"
731 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
732 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
734 HOST_WIDE_INT offset = INTVAL (operands[2]);
735 if (operands[1] == stack_pointer_rtx)
741 else if (offset < -255)
744 operands[3] = GEN_INT (offset);
745 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
747 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
750 ;; Reloading and elimination of the frame pointer can
751 ;; sometimes cause this optimization to be missed.
753 [(set (match_operand:SI 0 "arm_general_register_operand" "")
754 (match_operand:SI 1 "const_int_operand" ""))
756 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
758 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
759 && (INTVAL (operands[1]) & 3) == 0"
760 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
764 (define_insn "*addsi3_compare0"
765 [(set (reg:CC_NOOV CC_REGNUM)
767 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
768 (match_operand:SI 2 "arm_add_operand" "rI,L"))
770 (set (match_operand:SI 0 "s_register_operand" "=r,r")
771 (plus:SI (match_dup 1) (match_dup 2)))]
775 sub%.\\t%0, %1, #%n2"
776 [(set_attr "conds" "set")]
779 (define_insn "*addsi3_compare0_scratch"
780 [(set (reg:CC_NOOV CC_REGNUM)
782 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
783 (match_operand:SI 1 "arm_add_operand" "rI,L"))
789 [(set_attr "conds" "set")]
792 (define_insn "*compare_negsi_si"
793 [(set (reg:CC_Z CC_REGNUM)
795 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
796 (match_operand:SI 1 "s_register_operand" "r")))]
799 [(set_attr "conds" "set")]
802 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
803 ;; addend is a constant.
804 (define_insn "*cmpsi2_addneg"
805 [(set (reg:CC CC_REGNUM)
807 (match_operand:SI 1 "s_register_operand" "r,r")
808 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
809 (set (match_operand:SI 0 "s_register_operand" "=r,r")
810 (plus:SI (match_dup 1)
811 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
812 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
815 sub%.\\t%0, %1, #%n3"
816 [(set_attr "conds" "set")]
819 ;; Convert the sequence
821 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
825 ;; bcs dest ((unsigned)rn >= 1)
826 ;; similarly for the beq variant using bcc.
827 ;; This is a common looping idiom (while (n--))
829 [(set (match_operand:SI 0 "arm_general_register_operand" "")
830 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
832 (set (match_operand 2 "cc_register" "")
833 (compare (match_dup 0) (const_int -1)))
835 (if_then_else (match_operator 3 "equality_operator"
836 [(match_dup 2) (const_int 0)])
837 (match_operand 4 "" "")
838 (match_operand 5 "" "")))]
839 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
843 (match_dup 1) (const_int 1)))
844 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
846 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
849 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
850 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
853 operands[2], const0_rtx);"
856 ;; The next four insns work because they compare the result with one of
857 ;; the operands, and we know that the use of the condition code is
858 ;; either GEU or LTU, so we can use the carry flag from the addition
859 ;; instead of doing the compare a second time.
860 (define_insn "*addsi3_compare_op1"
861 [(set (reg:CC_C CC_REGNUM)
863 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
864 (match_operand:SI 2 "arm_add_operand" "rI,L"))
866 (set (match_operand:SI 0 "s_register_operand" "=r,r")
867 (plus:SI (match_dup 1) (match_dup 2)))]
871 sub%.\\t%0, %1, #%n2"
872 [(set_attr "conds" "set")]
875 (define_insn "*addsi3_compare_op2"
876 [(set (reg:CC_C CC_REGNUM)
878 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
879 (match_operand:SI 2 "arm_add_operand" "rI,L"))
881 (set (match_operand:SI 0 "s_register_operand" "=r,r")
882 (plus:SI (match_dup 1) (match_dup 2)))]
886 sub%.\\t%0, %1, #%n2"
887 [(set_attr "conds" "set")]
890 (define_insn "*compare_addsi2_op0"
891 [(set (reg:CC_C CC_REGNUM)
893 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
894 (match_operand:SI 1 "arm_add_operand" "rI,L"))
900 [(set_attr "conds" "set")]
903 (define_insn "*compare_addsi2_op1"
904 [(set (reg:CC_C CC_REGNUM)
906 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
907 (match_operand:SI 1 "arm_add_operand" "rI,L"))
913 [(set_attr "conds" "set")]
916 (define_insn "*addsi3_carryin_<optab>"
917 [(set (match_operand:SI 0 "s_register_operand" "=r")
918 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
919 (match_operand:SI 2 "arm_rhs_operand" "rI"))
920 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
923 [(set_attr "conds" "use")]
926 (define_insn "*addsi3_carryin_alt2_<optab>"
927 [(set (match_operand:SI 0 "s_register_operand" "=r")
928 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
929 (match_operand:SI 1 "s_register_operand" "%r"))
930 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
933 [(set_attr "conds" "use")]
936 (define_insn "*addsi3_carryin_shift_<optab>"
937 [(set (match_operand:SI 0 "s_register_operand" "=r")
939 (match_operator:SI 2 "shift_operator"
940 [(match_operand:SI 3 "s_register_operand" "r")
941 (match_operand:SI 4 "reg_or_int_operand" "rM")])
942 (match_operand:SI 1 "s_register_operand" "r"))
943 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
945 "adc%?\\t%0, %1, %3%S2"
946 [(set_attr "conds" "use")
947 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
948 (const_string "alu_shift")
949 (const_string "alu_shift_reg")))]
952 (define_expand "incscc"
953 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
954 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
955 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
956 (match_operand:SI 1 "s_register_operand" "0,?r")))]
961 (define_insn "*arm_incscc"
962 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
963 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
964 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
965 (match_operand:SI 1 "s_register_operand" "0,?r")))]
969 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
970 [(set_attr "conds" "use")
971 (set_attr "length" "4,8")]
974 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
976 [(set (match_operand:SI 0 "s_register_operand" "")
977 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
978 (match_operand:SI 2 "s_register_operand" ""))
980 (clobber (match_operand:SI 3 "s_register_operand" ""))]
982 [(set (match_dup 3) (match_dup 1))
983 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
985 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
988 (define_expand "addsf3"
989 [(set (match_operand:SF 0 "s_register_operand" "")
990 (plus:SF (match_operand:SF 1 "s_register_operand" "")
991 (match_operand:SF 2 "arm_float_add_operand" "")))]
992 "TARGET_32BIT && TARGET_HARD_FLOAT"
995 && !cirrus_fp_register (operands[2], SFmode))
996 operands[2] = force_reg (SFmode, operands[2]);
999 (define_expand "adddf3"
1000 [(set (match_operand:DF 0 "s_register_operand" "")
1001 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1002 (match_operand:DF 2 "arm_float_add_operand" "")))]
1003 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1006 && !cirrus_fp_register (operands[2], DFmode))
1007 operands[2] = force_reg (DFmode, operands[2]);
1010 (define_expand "subdi3"
1012 [(set (match_operand:DI 0 "s_register_operand" "")
1013 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1014 (match_operand:DI 2 "s_register_operand" "")))
1015 (clobber (reg:CC CC_REGNUM))])]
1018 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1020 && cirrus_fp_register (operands[0], DImode)
1021 && cirrus_fp_register (operands[1], DImode))
1023 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1029 if (GET_CODE (operands[1]) != REG)
1030 operands[1] = force_reg (DImode, operands[1]);
1031 if (GET_CODE (operands[2]) != REG)
1032 operands[2] = force_reg (DImode, operands[2]);
1037 (define_insn "*arm_subdi3"
1038 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1039 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1040 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1041 (clobber (reg:CC CC_REGNUM))]
1042 "TARGET_32BIT && !TARGET_NEON"
1043 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1044 [(set_attr "conds" "clob")
1045 (set_attr "length" "8")]
1048 (define_insn "*thumb_subdi3"
1049 [(set (match_operand:DI 0 "register_operand" "=l")
1050 (minus:DI (match_operand:DI 1 "register_operand" "0")
1051 (match_operand:DI 2 "register_operand" "l")))
1052 (clobber (reg:CC CC_REGNUM))]
1054 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1055 [(set_attr "length" "4")]
1058 (define_insn "*subdi_di_zesidi"
1059 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1060 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1062 (match_operand:SI 2 "s_register_operand" "r,r"))))
1063 (clobber (reg:CC CC_REGNUM))]
1065 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1066 [(set_attr "conds" "clob")
1067 (set_attr "length" "8")]
1070 (define_insn "*subdi_di_sesidi"
1071 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1072 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1074 (match_operand:SI 2 "s_register_operand" "r,r"))))
1075 (clobber (reg:CC CC_REGNUM))]
1077 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1078 [(set_attr "conds" "clob")
1079 (set_attr "length" "8")]
1082 (define_insn "*subdi_zesidi_di"
1083 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1084 (minus:DI (zero_extend:DI
1085 (match_operand:SI 2 "s_register_operand" "r,r"))
1086 (match_operand:DI 1 "s_register_operand" "0,r")))
1087 (clobber (reg:CC CC_REGNUM))]
1089 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1090 [(set_attr "conds" "clob")
1091 (set_attr "length" "8")]
1094 (define_insn "*subdi_sesidi_di"
1095 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1096 (minus:DI (sign_extend:DI
1097 (match_operand:SI 2 "s_register_operand" "r,r"))
1098 (match_operand:DI 1 "s_register_operand" "0,r")))
1099 (clobber (reg:CC CC_REGNUM))]
1101 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1102 [(set_attr "conds" "clob")
1103 (set_attr "length" "8")]
1106 (define_insn "*subdi_zesidi_zesidi"
1107 [(set (match_operand:DI 0 "s_register_operand" "=r")
1108 (minus:DI (zero_extend:DI
1109 (match_operand:SI 1 "s_register_operand" "r"))
1111 (match_operand:SI 2 "s_register_operand" "r"))))
1112 (clobber (reg:CC CC_REGNUM))]
1114 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1115 [(set_attr "conds" "clob")
1116 (set_attr "length" "8")]
1119 (define_expand "subsi3"
1120 [(set (match_operand:SI 0 "s_register_operand" "")
1121 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1122 (match_operand:SI 2 "s_register_operand" "")))]
1125 if (GET_CODE (operands[1]) == CONST_INT)
1129 arm_split_constant (MINUS, SImode, NULL_RTX,
1130 INTVAL (operands[1]), operands[0],
1131 operands[2], optimize && can_create_pseudo_p ());
1134 else /* TARGET_THUMB1 */
1135 operands[1] = force_reg (SImode, operands[1]);
1140 (define_insn "thumb1_subsi3_insn"
1141 [(set (match_operand:SI 0 "register_operand" "=l")
1142 (minus:SI (match_operand:SI 1 "register_operand" "l")
1143 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1146 [(set_attr "length" "2")
1147 (set_attr "conds" "set")])
1149 ; ??? Check Thumb-2 split length
1150 (define_insn_and_split "*arm_subsi3_insn"
1151 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1152 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1153 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1161 "&& ((GET_CODE (operands[1]) == CONST_INT
1162 && !const_ok_for_arm (INTVAL (operands[1])))
1163 || (GET_CODE (operands[2]) == CONST_INT
1164 && !const_ok_for_arm (INTVAL (operands[2]))))"
1165 [(clobber (const_int 0))]
1167 arm_split_constant (MINUS, SImode, curr_insn,
1168 INTVAL (operands[1]), operands[0], operands[2], 0);
1171 [(set_attr "length" "4,4,4,16,16")
1172 (set_attr "predicable" "yes")]
1176 [(match_scratch:SI 3 "r")
1177 (set (match_operand:SI 0 "arm_general_register_operand" "")
1178 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1179 (match_operand:SI 2 "arm_general_register_operand" "")))]
1181 && !const_ok_for_arm (INTVAL (operands[1]))
1182 && const_ok_for_arm (~INTVAL (operands[1]))"
1183 [(set (match_dup 3) (match_dup 1))
1184 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1188 (define_insn "*subsi3_compare0"
1189 [(set (reg:CC_NOOV CC_REGNUM)
1191 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1192 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1194 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1195 (minus:SI (match_dup 1) (match_dup 2)))]
1200 [(set_attr "conds" "set")]
1203 (define_insn "*subsi3_compare"
1204 [(set (reg:CC CC_REGNUM)
1205 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1206 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1207 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1208 (minus:SI (match_dup 1) (match_dup 2)))]
1213 [(set_attr "conds" "set")]
1216 (define_expand "decscc"
1217 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1218 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1219 (match_operator:SI 2 "arm_comparison_operator"
1220 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1225 (define_insn "*arm_decscc"
1226 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1227 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1228 (match_operator:SI 2 "arm_comparison_operator"
1229 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1233 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1234 [(set_attr "conds" "use")
1235 (set_attr "length" "*,8")]
1238 (define_expand "subsf3"
1239 [(set (match_operand:SF 0 "s_register_operand" "")
1240 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1241 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1242 "TARGET_32BIT && TARGET_HARD_FLOAT"
1244 if (TARGET_MAVERICK)
1246 if (!cirrus_fp_register (operands[1], SFmode))
1247 operands[1] = force_reg (SFmode, operands[1]);
1248 if (!cirrus_fp_register (operands[2], SFmode))
1249 operands[2] = force_reg (SFmode, operands[2]);
1253 (define_expand "subdf3"
1254 [(set (match_operand:DF 0 "s_register_operand" "")
1255 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1256 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1257 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1259 if (TARGET_MAVERICK)
1261 if (!cirrus_fp_register (operands[1], DFmode))
1262 operands[1] = force_reg (DFmode, operands[1]);
1263 if (!cirrus_fp_register (operands[2], DFmode))
1264 operands[2] = force_reg (DFmode, operands[2]);
1269 ;; Multiplication insns
1271 (define_expand "mulsi3"
1272 [(set (match_operand:SI 0 "s_register_operand" "")
1273 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1274 (match_operand:SI 1 "s_register_operand" "")))]
1279 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1280 (define_insn "*arm_mulsi3"
1281 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1282 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1283 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1284 "TARGET_32BIT && !arm_arch6"
1285 "mul%?\\t%0, %2, %1"
1286 [(set_attr "insn" "mul")
1287 (set_attr "predicable" "yes")]
1290 (define_insn "*arm_mulsi3_v6"
1291 [(set (match_operand:SI 0 "s_register_operand" "=r")
1292 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1293 (match_operand:SI 2 "s_register_operand" "r")))]
1294 "TARGET_32BIT && arm_arch6"
1295 "mul%?\\t%0, %1, %2"
1296 [(set_attr "insn" "mul")
1297 (set_attr "predicable" "yes")]
1300 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1301 ; 1 and 2; are the same, because reload will make operand 0 match
1302 ; operand 1 without realizing that this conflicts with operand 2. We fix
1303 ; this by adding another alternative to match this case, and then `reload'
1304 ; it ourselves. This alternative must come first.
1305 (define_insn "*thumb_mulsi3"
1306 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1307 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1308 (match_operand:SI 2 "register_operand" "l,l,l")))]
1309 "TARGET_THUMB1 && !arm_arch6"
1311 if (which_alternative < 2)
1312 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1314 return \"mul\\t%0, %2\";
1316 [(set_attr "length" "4,4,2")
1317 (set_attr "insn" "mul")]
1320 (define_insn "*thumb_mulsi3_v6"
1321 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1322 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1323 (match_operand:SI 2 "register_operand" "l,0,0")))]
1324 "TARGET_THUMB1 && arm_arch6"
1329 [(set_attr "length" "2")
1330 (set_attr "insn" "mul")]
1333 (define_insn "*mulsi3_compare0"
1334 [(set (reg:CC_NOOV CC_REGNUM)
1335 (compare:CC_NOOV (mult:SI
1336 (match_operand:SI 2 "s_register_operand" "r,r")
1337 (match_operand:SI 1 "s_register_operand" "%0,r"))
1339 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1340 (mult:SI (match_dup 2) (match_dup 1)))]
1341 "TARGET_ARM && !arm_arch6"
1342 "mul%.\\t%0, %2, %1"
1343 [(set_attr "conds" "set")
1344 (set_attr "insn" "muls")]
1347 (define_insn "*mulsi3_compare0_v6"
1348 [(set (reg:CC_NOOV CC_REGNUM)
1349 (compare:CC_NOOV (mult:SI
1350 (match_operand:SI 2 "s_register_operand" "r")
1351 (match_operand:SI 1 "s_register_operand" "r"))
1353 (set (match_operand:SI 0 "s_register_operand" "=r")
1354 (mult:SI (match_dup 2) (match_dup 1)))]
1355 "TARGET_ARM && arm_arch6 && optimize_size"
1356 "mul%.\\t%0, %2, %1"
1357 [(set_attr "conds" "set")
1358 (set_attr "insn" "muls")]
1361 (define_insn "*mulsi_compare0_scratch"
1362 [(set (reg:CC_NOOV CC_REGNUM)
1363 (compare:CC_NOOV (mult:SI
1364 (match_operand:SI 2 "s_register_operand" "r,r")
1365 (match_operand:SI 1 "s_register_operand" "%0,r"))
1367 (clobber (match_scratch:SI 0 "=&r,&r"))]
1368 "TARGET_ARM && !arm_arch6"
1369 "mul%.\\t%0, %2, %1"
1370 [(set_attr "conds" "set")
1371 (set_attr "insn" "muls")]
1374 (define_insn "*mulsi_compare0_scratch_v6"
1375 [(set (reg:CC_NOOV CC_REGNUM)
1376 (compare:CC_NOOV (mult:SI
1377 (match_operand:SI 2 "s_register_operand" "r")
1378 (match_operand:SI 1 "s_register_operand" "r"))
1380 (clobber (match_scratch:SI 0 "=r"))]
1381 "TARGET_ARM && arm_arch6 && optimize_size"
1382 "mul%.\\t%0, %2, %1"
1383 [(set_attr "conds" "set")
1384 (set_attr "insn" "muls")]
1387 ;; Unnamed templates to match MLA instruction.
1389 (define_insn "*mulsi3addsi"
1390 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1392 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1393 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1394 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1395 "TARGET_32BIT && !arm_arch6"
1396 "mla%?\\t%0, %2, %1, %3"
1397 [(set_attr "insn" "mla")
1398 (set_attr "predicable" "yes")]
1401 (define_insn "*mulsi3addsi_v6"
1402 [(set (match_operand:SI 0 "s_register_operand" "=r")
1404 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1405 (match_operand:SI 1 "s_register_operand" "r"))
1406 (match_operand:SI 3 "s_register_operand" "r")))]
1407 "TARGET_32BIT && arm_arch6"
1408 "mla%?\\t%0, %2, %1, %3"
1409 [(set_attr "insn" "mla")
1410 (set_attr "predicable" "yes")]
1413 (define_insn "*mulsi3addsi_compare0"
1414 [(set (reg:CC_NOOV CC_REGNUM)
1417 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1418 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1419 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1421 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1422 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1424 "TARGET_ARM && arm_arch6"
1425 "mla%.\\t%0, %2, %1, %3"
1426 [(set_attr "conds" "set")
1427 (set_attr "insn" "mlas")]
1430 (define_insn "*mulsi3addsi_compare0_v6"
1431 [(set (reg:CC_NOOV CC_REGNUM)
1434 (match_operand:SI 2 "s_register_operand" "r")
1435 (match_operand:SI 1 "s_register_operand" "r"))
1436 (match_operand:SI 3 "s_register_operand" "r"))
1438 (set (match_operand:SI 0 "s_register_operand" "=r")
1439 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1441 "TARGET_ARM && arm_arch6 && optimize_size"
1442 "mla%.\\t%0, %2, %1, %3"
1443 [(set_attr "conds" "set")
1444 (set_attr "insn" "mlas")]
1447 (define_insn "*mulsi3addsi_compare0_scratch"
1448 [(set (reg:CC_NOOV CC_REGNUM)
1451 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1452 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1453 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1455 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1456 "TARGET_ARM && !arm_arch6"
1457 "mla%.\\t%0, %2, %1, %3"
1458 [(set_attr "conds" "set")
1459 (set_attr "insn" "mlas")]
1462 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1463 [(set (reg:CC_NOOV CC_REGNUM)
1466 (match_operand:SI 2 "s_register_operand" "r")
1467 (match_operand:SI 1 "s_register_operand" "r"))
1468 (match_operand:SI 3 "s_register_operand" "r"))
1470 (clobber (match_scratch:SI 0 "=r"))]
1471 "TARGET_ARM && arm_arch6 && optimize_size"
1472 "mla%.\\t%0, %2, %1, %3"
1473 [(set_attr "conds" "set")
1474 (set_attr "insn" "mlas")]
1477 (define_insn "*mulsi3subsi"
1478 [(set (match_operand:SI 0 "s_register_operand" "=r")
1480 (match_operand:SI 3 "s_register_operand" "r")
1481 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1482 (match_operand:SI 1 "s_register_operand" "r"))))]
1483 "TARGET_32BIT && arm_arch_thumb2"
1484 "mls%?\\t%0, %2, %1, %3"
1485 [(set_attr "insn" "mla")
1486 (set_attr "predicable" "yes")]
1489 (define_expand "maddsidi4"
1490 [(set (match_operand:DI 0 "s_register_operand" "")
1493 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1494 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1495 (match_operand:DI 3 "s_register_operand" "")))]
1496 "TARGET_32BIT && arm_arch3m"
1499 (define_insn "*mulsidi3adddi"
1500 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1503 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1504 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1505 (match_operand:DI 1 "s_register_operand" "0")))]
1506 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1507 "smlal%?\\t%Q0, %R0, %3, %2"
1508 [(set_attr "insn" "smlal")
1509 (set_attr "predicable" "yes")]
1512 (define_insn "*mulsidi3adddi_v6"
1513 [(set (match_operand:DI 0 "s_register_operand" "=r")
1516 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1517 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1518 (match_operand:DI 1 "s_register_operand" "0")))]
1519 "TARGET_32BIT && arm_arch6"
1520 "smlal%?\\t%Q0, %R0, %3, %2"
1521 [(set_attr "insn" "smlal")
1522 (set_attr "predicable" "yes")]
1525 ;; 32x32->64 widening multiply.
1526 ;; As with mulsi3, the only difference between the v3-5 and v6+
1527 ;; versions of these patterns is the requirement that the output not
1528 ;; overlap the inputs, but that still means we have to have a named
1529 ;; expander and two different starred insns.
1531 (define_expand "mulsidi3"
1532 [(set (match_operand:DI 0 "s_register_operand" "")
1534 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1535 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1536 "TARGET_32BIT && arm_arch3m"
1540 (define_insn "*mulsidi3_nov6"
1541 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1543 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1544 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1545 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1546 "smull%?\\t%Q0, %R0, %1, %2"
1547 [(set_attr "insn" "smull")
1548 (set_attr "predicable" "yes")]
1551 (define_insn "*mulsidi3_v6"
1552 [(set (match_operand:DI 0 "s_register_operand" "=r")
1554 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1555 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1556 "TARGET_32BIT && arm_arch6"
1557 "smull%?\\t%Q0, %R0, %1, %2"
1558 [(set_attr "insn" "smull")
1559 (set_attr "predicable" "yes")]
1562 (define_expand "umulsidi3"
1563 [(set (match_operand:DI 0 "s_register_operand" "")
1565 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1566 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1567 "TARGET_32BIT && arm_arch3m"
1571 (define_insn "*umulsidi3_nov6"
1572 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1574 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1575 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1576 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1577 "umull%?\\t%Q0, %R0, %1, %2"
1578 [(set_attr "insn" "umull")
1579 (set_attr "predicable" "yes")]
1582 (define_insn "*umulsidi3_v6"
1583 [(set (match_operand:DI 0 "s_register_operand" "=r")
1585 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1586 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1587 "TARGET_32BIT && arm_arch6"
1588 "umull%?\\t%Q0, %R0, %1, %2"
1589 [(set_attr "insn" "umull")
1590 (set_attr "predicable" "yes")]
1593 (define_expand "umaddsidi4"
1594 [(set (match_operand:DI 0 "s_register_operand" "")
1597 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1598 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1599 (match_operand:DI 3 "s_register_operand" "")))]
1600 "TARGET_32BIT && arm_arch3m"
1603 (define_insn "*umulsidi3adddi"
1604 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1607 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1608 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1609 (match_operand:DI 1 "s_register_operand" "0")))]
1610 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1611 "umlal%?\\t%Q0, %R0, %3, %2"
1612 [(set_attr "insn" "umlal")
1613 (set_attr "predicable" "yes")]
1616 (define_insn "*umulsidi3adddi_v6"
1617 [(set (match_operand:DI 0 "s_register_operand" "=r")
1620 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1621 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1622 (match_operand:DI 1 "s_register_operand" "0")))]
1623 "TARGET_32BIT && arm_arch6"
1624 "umlal%?\\t%Q0, %R0, %3, %2"
1625 [(set_attr "insn" "umlal")
1626 (set_attr "predicable" "yes")]
1629 (define_expand "smulsi3_highpart"
1631 [(set (match_operand:SI 0 "s_register_operand" "")
1635 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1636 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1638 (clobber (match_scratch:SI 3 ""))])]
1639 "TARGET_32BIT && arm_arch3m"
1643 (define_insn "*smulsi3_highpart_nov6"
1644 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1648 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1649 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1651 (clobber (match_scratch:SI 3 "=&r,&r"))]
1652 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1653 "smull%?\\t%3, %0, %2, %1"
1654 [(set_attr "insn" "smull")
1655 (set_attr "predicable" "yes")]
1658 (define_insn "*smulsi3_highpart_v6"
1659 [(set (match_operand:SI 0 "s_register_operand" "=r")
1663 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1664 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1666 (clobber (match_scratch:SI 3 "=r"))]
1667 "TARGET_32BIT && arm_arch6"
1668 "smull%?\\t%3, %0, %2, %1"
1669 [(set_attr "insn" "smull")
1670 (set_attr "predicable" "yes")]
1673 (define_expand "umulsi3_highpart"
1675 [(set (match_operand:SI 0 "s_register_operand" "")
1679 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1680 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1682 (clobber (match_scratch:SI 3 ""))])]
1683 "TARGET_32BIT && arm_arch3m"
1687 (define_insn "*umulsi3_highpart_nov6"
1688 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1692 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1693 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1695 (clobber (match_scratch:SI 3 "=&r,&r"))]
1696 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1697 "umull%?\\t%3, %0, %2, %1"
1698 [(set_attr "insn" "umull")
1699 (set_attr "predicable" "yes")]
1702 (define_insn "*umulsi3_highpart_v6"
1703 [(set (match_operand:SI 0 "s_register_operand" "=r")
1707 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1708 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1710 (clobber (match_scratch:SI 3 "=r"))]
1711 "TARGET_32BIT && arm_arch6"
1712 "umull%?\\t%3, %0, %2, %1"
1713 [(set_attr "insn" "umull")
1714 (set_attr "predicable" "yes")]
1717 (define_insn "mulhisi3"
1718 [(set (match_operand:SI 0 "s_register_operand" "=r")
1719 (mult:SI (sign_extend:SI
1720 (match_operand:HI 1 "s_register_operand" "%r"))
1722 (match_operand:HI 2 "s_register_operand" "r"))))]
1723 "TARGET_DSP_MULTIPLY"
1724 "smulbb%?\\t%0, %1, %2"
1725 [(set_attr "insn" "smulxy")
1726 (set_attr "predicable" "yes")]
1729 (define_insn "*mulhisi3tb"
1730 [(set (match_operand:SI 0 "s_register_operand" "=r")
1731 (mult:SI (ashiftrt:SI
1732 (match_operand:SI 1 "s_register_operand" "r")
1735 (match_operand:HI 2 "s_register_operand" "r"))))]
1736 "TARGET_DSP_MULTIPLY"
1737 "smultb%?\\t%0, %1, %2"
1738 [(set_attr "insn" "smulxy")
1739 (set_attr "predicable" "yes")]
1742 (define_insn "*mulhisi3bt"
1743 [(set (match_operand:SI 0 "s_register_operand" "=r")
1744 (mult:SI (sign_extend:SI
1745 (match_operand:HI 1 "s_register_operand" "r"))
1747 (match_operand:SI 2 "s_register_operand" "r")
1749 "TARGET_DSP_MULTIPLY"
1750 "smulbt%?\\t%0, %1, %2"
1751 [(set_attr "insn" "smulxy")
1752 (set_attr "predicable" "yes")]
1755 (define_insn "*mulhisi3tt"
1756 [(set (match_operand:SI 0 "s_register_operand" "=r")
1757 (mult:SI (ashiftrt:SI
1758 (match_operand:SI 1 "s_register_operand" "r")
1761 (match_operand:SI 2 "s_register_operand" "r")
1763 "TARGET_DSP_MULTIPLY"
1764 "smultt%?\\t%0, %1, %2"
1765 [(set_attr "insn" "smulxy")
1766 (set_attr "predicable" "yes")]
1769 (define_insn "maddhisi4"
1770 [(set (match_operand:SI 0 "s_register_operand" "=r")
1771 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1772 (mult:SI (sign_extend:SI
1773 (match_operand:HI 1 "s_register_operand" "%r"))
1775 (match_operand:HI 2 "s_register_operand" "r")))))]
1776 "TARGET_DSP_MULTIPLY"
1777 "smlabb%?\\t%0, %1, %2, %3"
1778 [(set_attr "insn" "smlaxy")
1779 (set_attr "predicable" "yes")]
1782 (define_insn "*maddhidi4"
1783 [(set (match_operand:DI 0 "s_register_operand" "=r")
1785 (match_operand:DI 3 "s_register_operand" "0")
1786 (mult:DI (sign_extend:DI
1787 (match_operand:HI 1 "s_register_operand" "%r"))
1789 (match_operand:HI 2 "s_register_operand" "r")))))]
1790 "TARGET_DSP_MULTIPLY"
1791 "smlalbb%?\\t%Q0, %R0, %1, %2"
1792 [(set_attr "insn" "smlalxy")
1793 (set_attr "predicable" "yes")])
1795 (define_expand "mulsf3"
1796 [(set (match_operand:SF 0 "s_register_operand" "")
1797 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1798 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1799 "TARGET_32BIT && TARGET_HARD_FLOAT"
1802 && !cirrus_fp_register (operands[2], SFmode))
1803 operands[2] = force_reg (SFmode, operands[2]);
1806 (define_expand "muldf3"
1807 [(set (match_operand:DF 0 "s_register_operand" "")
1808 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1809 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1810 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1813 && !cirrus_fp_register (operands[2], DFmode))
1814 operands[2] = force_reg (DFmode, operands[2]);
1819 (define_expand "divsf3"
1820 [(set (match_operand:SF 0 "s_register_operand" "")
1821 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1822 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1823 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1826 (define_expand "divdf3"
1827 [(set (match_operand:DF 0 "s_register_operand" "")
1828 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1829 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1830 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1835 (define_expand "modsf3"
1836 [(set (match_operand:SF 0 "s_register_operand" "")
1837 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1838 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1839 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1842 (define_expand "moddf3"
1843 [(set (match_operand:DF 0 "s_register_operand" "")
1844 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1845 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1846 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1849 ;; Boolean and,ior,xor insns
1851 ;; Split up double word logical operations
1853 ;; Split up simple DImode logical operations. Simply perform the logical
1854 ;; operation on the upper and lower halves of the registers.
1856 [(set (match_operand:DI 0 "s_register_operand" "")
1857 (match_operator:DI 6 "logical_binary_operator"
1858 [(match_operand:DI 1 "s_register_operand" "")
1859 (match_operand:DI 2 "s_register_operand" "")]))]
1860 "TARGET_32BIT && reload_completed
1861 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1862 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1863 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1864 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1867 operands[3] = gen_highpart (SImode, operands[0]);
1868 operands[0] = gen_lowpart (SImode, operands[0]);
1869 operands[4] = gen_highpart (SImode, operands[1]);
1870 operands[1] = gen_lowpart (SImode, operands[1]);
1871 operands[5] = gen_highpart (SImode, operands[2]);
1872 operands[2] = gen_lowpart (SImode, operands[2]);
1877 [(set (match_operand:DI 0 "s_register_operand" "")
1878 (match_operator:DI 6 "logical_binary_operator"
1879 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1880 (match_operand:DI 1 "s_register_operand" "")]))]
1881 "TARGET_32BIT && reload_completed"
1882 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1883 (set (match_dup 3) (match_op_dup:SI 6
1884 [(ashiftrt:SI (match_dup 2) (const_int 31))
1888 operands[3] = gen_highpart (SImode, operands[0]);
1889 operands[0] = gen_lowpart (SImode, operands[0]);
1890 operands[4] = gen_highpart (SImode, operands[1]);
1891 operands[1] = gen_lowpart (SImode, operands[1]);
1892 operands[5] = gen_highpart (SImode, operands[2]);
1893 operands[2] = gen_lowpart (SImode, operands[2]);
1897 ;; The zero extend of operand 2 means we can just copy the high part of
1898 ;; operand1 into operand0.
1900 [(set (match_operand:DI 0 "s_register_operand" "")
1902 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1903 (match_operand:DI 1 "s_register_operand" "")))]
1904 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1905 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1906 (set (match_dup 3) (match_dup 4))]
1909 operands[4] = gen_highpart (SImode, operands[1]);
1910 operands[3] = gen_highpart (SImode, operands[0]);
1911 operands[0] = gen_lowpart (SImode, operands[0]);
1912 operands[1] = gen_lowpart (SImode, operands[1]);
1916 ;; The zero extend of operand 2 means we can just copy the high part of
1917 ;; operand1 into operand0.
1919 [(set (match_operand:DI 0 "s_register_operand" "")
1921 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1922 (match_operand:DI 1 "s_register_operand" "")))]
1923 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1924 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1925 (set (match_dup 3) (match_dup 4))]
1928 operands[4] = gen_highpart (SImode, operands[1]);
1929 operands[3] = gen_highpart (SImode, operands[0]);
1930 operands[0] = gen_lowpart (SImode, operands[0]);
1931 operands[1] = gen_lowpart (SImode, operands[1]);
1935 (define_expand "anddi3"
1936 [(set (match_operand:DI 0 "s_register_operand" "")
1937 (and:DI (match_operand:DI 1 "s_register_operand" "")
1938 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1943 (define_insn "*anddi3_insn"
1944 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1945 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1946 (match_operand:DI 2 "s_register_operand" "r,r")))]
1947 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1949 [(set_attr "length" "8")]
1952 (define_insn_and_split "*anddi_zesidi_di"
1953 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1954 (and:DI (zero_extend:DI
1955 (match_operand:SI 2 "s_register_operand" "r,r"))
1956 (match_operand:DI 1 "s_register_operand" "0,r")))]
1959 "TARGET_32BIT && reload_completed"
1960 ; The zero extend of operand 2 clears the high word of the output
1962 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1963 (set (match_dup 3) (const_int 0))]
1966 operands[3] = gen_highpart (SImode, operands[0]);
1967 operands[0] = gen_lowpart (SImode, operands[0]);
1968 operands[1] = gen_lowpart (SImode, operands[1]);
1970 [(set_attr "length" "8")]
1973 (define_insn "*anddi_sesdi_di"
1974 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1975 (and:DI (sign_extend:DI
1976 (match_operand:SI 2 "s_register_operand" "r,r"))
1977 (match_operand:DI 1 "s_register_operand" "0,r")))]
1980 [(set_attr "length" "8")]
1983 (define_expand "andsi3"
1984 [(set (match_operand:SI 0 "s_register_operand" "")
1985 (and:SI (match_operand:SI 1 "s_register_operand" "")
1986 (match_operand:SI 2 "reg_or_int_operand" "")))]
1991 if (GET_CODE (operands[2]) == CONST_INT)
1993 if (INTVAL (operands[2]) == 255 && arm_arch6)
1995 operands[1] = convert_to_mode (QImode, operands[1], 1);
1996 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2000 arm_split_constant (AND, SImode, NULL_RTX,
2001 INTVAL (operands[2]), operands[0],
2003 optimize && can_create_pseudo_p ());
2008 else /* TARGET_THUMB1 */
2010 if (GET_CODE (operands[2]) != CONST_INT)
2012 rtx tmp = force_reg (SImode, operands[2]);
2013 if (rtx_equal_p (operands[0], operands[1]))
2017 operands[2] = operands[1];
2025 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2027 operands[2] = force_reg (SImode,
2028 GEN_INT (~INTVAL (operands[2])));
2030 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2035 for (i = 9; i <= 31; i++)
2037 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2039 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2043 else if ((((HOST_WIDE_INT) 1) << i) - 1
2044 == ~INTVAL (operands[2]))
2046 rtx shift = GEN_INT (i);
2047 rtx reg = gen_reg_rtx (SImode);
2049 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2050 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2056 operands[2] = force_reg (SImode, operands[2]);
2062 ; ??? Check split length for Thumb-2
2063 (define_insn_and_split "*arm_andsi3_insn"
2064 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2065 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2066 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2070 bic%?\\t%0, %1, #%B2
2073 && GET_CODE (operands[2]) == CONST_INT
2074 && !(const_ok_for_arm (INTVAL (operands[2]))
2075 || const_ok_for_arm (~INTVAL (operands[2])))"
2076 [(clobber (const_int 0))]
2078 arm_split_constant (AND, SImode, curr_insn,
2079 INTVAL (operands[2]), operands[0], operands[1], 0);
2082 [(set_attr "length" "4,4,16")
2083 (set_attr "predicable" "yes")]
2086 (define_insn "*thumb1_andsi3_insn"
2087 [(set (match_operand:SI 0 "register_operand" "=l")
2088 (and:SI (match_operand:SI 1 "register_operand" "%0")
2089 (match_operand:SI 2 "register_operand" "l")))]
2092 [(set_attr "length" "2")
2093 (set_attr "conds" "set")])
2095 (define_insn "*andsi3_compare0"
2096 [(set (reg:CC_NOOV CC_REGNUM)
2098 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2099 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2101 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2102 (and:SI (match_dup 1) (match_dup 2)))]
2106 bic%.\\t%0, %1, #%B2"
2107 [(set_attr "conds" "set")]
2110 (define_insn "*andsi3_compare0_scratch"
2111 [(set (reg:CC_NOOV CC_REGNUM)
2113 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2114 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2116 (clobber (match_scratch:SI 2 "=X,r"))]
2120 bic%.\\t%2, %0, #%B1"
2121 [(set_attr "conds" "set")]
2124 (define_insn "*zeroextractsi_compare0_scratch"
2125 [(set (reg:CC_NOOV CC_REGNUM)
2126 (compare:CC_NOOV (zero_extract:SI
2127 (match_operand:SI 0 "s_register_operand" "r")
2128 (match_operand 1 "const_int_operand" "n")
2129 (match_operand 2 "const_int_operand" "n"))
2132 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2133 && INTVAL (operands[1]) > 0
2134 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2135 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2137 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2138 << INTVAL (operands[2]));
2139 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2142 [(set_attr "conds" "set")]
2145 (define_insn_and_split "*ne_zeroextractsi"
2146 [(set (match_operand:SI 0 "s_register_operand" "=r")
2147 (ne:SI (zero_extract:SI
2148 (match_operand:SI 1 "s_register_operand" "r")
2149 (match_operand:SI 2 "const_int_operand" "n")
2150 (match_operand:SI 3 "const_int_operand" "n"))
2152 (clobber (reg:CC CC_REGNUM))]
2154 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2155 && INTVAL (operands[2]) > 0
2156 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2157 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2160 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2161 && INTVAL (operands[2]) > 0
2162 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2163 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2164 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2165 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2167 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2169 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2170 (match_dup 0) (const_int 1)))]
2172 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2173 << INTVAL (operands[3]));
2175 [(set_attr "conds" "clob")
2176 (set (attr "length")
2177 (if_then_else (eq_attr "is_thumb" "yes")
2182 (define_insn_and_split "*ne_zeroextractsi_shifted"
2183 [(set (match_operand:SI 0 "s_register_operand" "=r")
2184 (ne:SI (zero_extract:SI
2185 (match_operand:SI 1 "s_register_operand" "r")
2186 (match_operand:SI 2 "const_int_operand" "n")
2189 (clobber (reg:CC CC_REGNUM))]
2193 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2194 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2196 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2198 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2199 (match_dup 0) (const_int 1)))]
2201 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2203 [(set_attr "conds" "clob")
2204 (set_attr "length" "8")]
2207 (define_insn_and_split "*ite_ne_zeroextractsi"
2208 [(set (match_operand:SI 0 "s_register_operand" "=r")
2209 (if_then_else:SI (ne (zero_extract:SI
2210 (match_operand:SI 1 "s_register_operand" "r")
2211 (match_operand:SI 2 "const_int_operand" "n")
2212 (match_operand:SI 3 "const_int_operand" "n"))
2214 (match_operand:SI 4 "arm_not_operand" "rIK")
2216 (clobber (reg:CC CC_REGNUM))]
2218 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2219 && INTVAL (operands[2]) > 0
2220 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2221 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2222 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2225 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2226 && INTVAL (operands[2]) > 0
2227 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2228 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2229 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2230 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2231 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2233 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2235 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2236 (match_dup 0) (match_dup 4)))]
2238 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2239 << INTVAL (operands[3]));
2241 [(set_attr "conds" "clob")
2242 (set_attr "length" "8")]
2245 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2246 [(set (match_operand:SI 0 "s_register_operand" "=r")
2247 (if_then_else:SI (ne (zero_extract:SI
2248 (match_operand:SI 1 "s_register_operand" "r")
2249 (match_operand:SI 2 "const_int_operand" "n")
2252 (match_operand:SI 3 "arm_not_operand" "rIK")
2254 (clobber (reg:CC CC_REGNUM))]
2255 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2257 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2258 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2259 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2261 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2263 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2264 (match_dup 0) (match_dup 3)))]
2266 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2268 [(set_attr "conds" "clob")
2269 (set_attr "length" "8")]
2273 [(set (match_operand:SI 0 "s_register_operand" "")
2274 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2275 (match_operand:SI 2 "const_int_operand" "")
2276 (match_operand:SI 3 "const_int_operand" "")))
2277 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2279 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2280 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2282 HOST_WIDE_INT temp = INTVAL (operands[2]);
2284 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2285 operands[3] = GEN_INT (32 - temp);
2289 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2291 [(set (match_operand:SI 0 "s_register_operand" "")
2292 (match_operator:SI 1 "shiftable_operator"
2293 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2294 (match_operand:SI 3 "const_int_operand" "")
2295 (match_operand:SI 4 "const_int_operand" ""))
2296 (match_operand:SI 5 "s_register_operand" "")]))
2297 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2299 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2302 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2305 HOST_WIDE_INT temp = INTVAL (operands[3]);
2307 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2308 operands[4] = GEN_INT (32 - temp);
2313 [(set (match_operand:SI 0 "s_register_operand" "")
2314 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2315 (match_operand:SI 2 "const_int_operand" "")
2316 (match_operand:SI 3 "const_int_operand" "")))]
2318 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2319 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2321 HOST_WIDE_INT temp = INTVAL (operands[2]);
2323 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2324 operands[3] = GEN_INT (32 - temp);
2329 [(set (match_operand:SI 0 "s_register_operand" "")
2330 (match_operator:SI 1 "shiftable_operator"
2331 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2332 (match_operand:SI 3 "const_int_operand" "")
2333 (match_operand:SI 4 "const_int_operand" ""))
2334 (match_operand:SI 5 "s_register_operand" "")]))
2335 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2337 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2340 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2343 HOST_WIDE_INT temp = INTVAL (operands[3]);
2345 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2346 operands[4] = GEN_INT (32 - temp);
2350 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2351 ;;; represented by the bitfield, then this will produce incorrect results.
2352 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2353 ;;; which have a real bit-field insert instruction, the truncation happens
2354 ;;; in the bit-field insert instruction itself. Since arm does not have a
2355 ;;; bit-field insert instruction, we would have to emit code here to truncate
2356 ;;; the value before we insert. This loses some of the advantage of having
2357 ;;; this insv pattern, so this pattern needs to be reevalutated.
2359 (define_expand "insv"
2360 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2361 (match_operand:SI 1 "general_operand" "")
2362 (match_operand:SI 2 "general_operand" ""))
2363 (match_operand:SI 3 "reg_or_int_operand" ""))]
2364 "TARGET_ARM || arm_arch_thumb2"
2367 int start_bit = INTVAL (operands[2]);
2368 int width = INTVAL (operands[1]);
2369 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2370 rtx target, subtarget;
2372 if (arm_arch_thumb2)
2374 bool use_bfi = TRUE;
2376 if (GET_CODE (operands[3]) == CONST_INT)
2378 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2382 emit_insn (gen_insv_zero (operands[0], operands[1],
2387 /* See if the set can be done with a single orr instruction. */
2388 if (val == mask && const_ok_for_arm (val << start_bit))
2394 if (GET_CODE (operands[3]) != REG)
2395 operands[3] = force_reg (SImode, operands[3]);
2397 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2403 target = copy_rtx (operands[0]);
2404 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2405 subreg as the final target. */
2406 if (GET_CODE (target) == SUBREG)
2408 subtarget = gen_reg_rtx (SImode);
2409 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2410 < GET_MODE_SIZE (SImode))
2411 target = SUBREG_REG (target);
2416 if (GET_CODE (operands[3]) == CONST_INT)
2418 /* Since we are inserting a known constant, we may be able to
2419 reduce the number of bits that we have to clear so that
2420 the mask becomes simple. */
2421 /* ??? This code does not check to see if the new mask is actually
2422 simpler. It may not be. */
2423 rtx op1 = gen_reg_rtx (SImode);
2424 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2425 start of this pattern. */
2426 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2427 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2429 emit_insn (gen_andsi3 (op1, operands[0],
2430 gen_int_mode (~mask2, SImode)));
2431 emit_insn (gen_iorsi3 (subtarget, op1,
2432 gen_int_mode (op3_value << start_bit, SImode)));
2434 else if (start_bit == 0
2435 && !(const_ok_for_arm (mask)
2436 || const_ok_for_arm (~mask)))
2438 /* A Trick, since we are setting the bottom bits in the word,
2439 we can shift operand[3] up, operand[0] down, OR them together
2440 and rotate the result back again. This takes 3 insns, and
2441 the third might be mergeable into another op. */
2442 /* The shift up copes with the possibility that operand[3] is
2443 wider than the bitfield. */
2444 rtx op0 = gen_reg_rtx (SImode);
2445 rtx op1 = gen_reg_rtx (SImode);
2447 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2448 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2449 emit_insn (gen_iorsi3 (op1, op1, op0));
2450 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2452 else if ((width + start_bit == 32)
2453 && !(const_ok_for_arm (mask)
2454 || const_ok_for_arm (~mask)))
2456 /* Similar trick, but slightly less efficient. */
2458 rtx op0 = gen_reg_rtx (SImode);
2459 rtx op1 = gen_reg_rtx (SImode);
2461 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2462 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2463 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2464 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2468 rtx op0 = gen_int_mode (mask, SImode);
2469 rtx op1 = gen_reg_rtx (SImode);
2470 rtx op2 = gen_reg_rtx (SImode);
2472 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2474 rtx tmp = gen_reg_rtx (SImode);
2476 emit_insn (gen_movsi (tmp, op0));
2480 /* Mask out any bits in operand[3] that are not needed. */
2481 emit_insn (gen_andsi3 (op1, operands[3], op0));
2483 if (GET_CODE (op0) == CONST_INT
2484 && (const_ok_for_arm (mask << start_bit)
2485 || const_ok_for_arm (~(mask << start_bit))))
2487 op0 = gen_int_mode (~(mask << start_bit), SImode);
2488 emit_insn (gen_andsi3 (op2, operands[0], op0));
2492 if (GET_CODE (op0) == CONST_INT)
2494 rtx tmp = gen_reg_rtx (SImode);
2496 emit_insn (gen_movsi (tmp, op0));
2501 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2503 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2507 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2509 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2512 if (subtarget != target)
2514 /* If TARGET is still a SUBREG, then it must be wider than a word,
2515 so we must be careful only to set the subword we were asked to. */
2516 if (GET_CODE (target) == SUBREG)
2517 emit_move_insn (target, subtarget);
2519 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2526 (define_insn "insv_zero"
2527 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2528 (match_operand:SI 1 "const_int_operand" "M")
2529 (match_operand:SI 2 "const_int_operand" "M"))
2533 [(set_attr "length" "4")
2534 (set_attr "predicable" "yes")]
2537 (define_insn "insv_t2"
2538 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2539 (match_operand:SI 1 "const_int_operand" "M")
2540 (match_operand:SI 2 "const_int_operand" "M"))
2541 (match_operand:SI 3 "s_register_operand" "r"))]
2543 "bfi%?\t%0, %3, %2, %1"
2544 [(set_attr "length" "4")
2545 (set_attr "predicable" "yes")]
2548 ; constants for op 2 will never be given to these patterns.
2549 (define_insn_and_split "*anddi_notdi_di"
2550 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2551 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2552 (match_operand:DI 2 "s_register_operand" "r,0")))]
2555 "TARGET_32BIT && reload_completed
2556 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2557 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2558 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2559 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2562 operands[3] = gen_highpart (SImode, operands[0]);
2563 operands[0] = gen_lowpart (SImode, operands[0]);
2564 operands[4] = gen_highpart (SImode, operands[1]);
2565 operands[1] = gen_lowpart (SImode, operands[1]);
2566 operands[5] = gen_highpart (SImode, operands[2]);
2567 operands[2] = gen_lowpart (SImode, operands[2]);
2569 [(set_attr "length" "8")
2570 (set_attr "predicable" "yes")]
2573 (define_insn_and_split "*anddi_notzesidi_di"
2574 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2575 (and:DI (not:DI (zero_extend:DI
2576 (match_operand:SI 2 "s_register_operand" "r,r")))
2577 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2580 bic%?\\t%Q0, %Q1, %2
2582 ; (not (zero_extend ...)) allows us to just copy the high word from
2583 ; operand1 to operand0.
2586 && operands[0] != operands[1]"
2587 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2588 (set (match_dup 3) (match_dup 4))]
2591 operands[3] = gen_highpart (SImode, operands[0]);
2592 operands[0] = gen_lowpart (SImode, operands[0]);
2593 operands[4] = gen_highpart (SImode, operands[1]);
2594 operands[1] = gen_lowpart (SImode, operands[1]);
2596 [(set_attr "length" "4,8")
2597 (set_attr "predicable" "yes")]
2600 (define_insn_and_split "*anddi_notsesidi_di"
2601 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2602 (and:DI (not:DI (sign_extend:DI
2603 (match_operand:SI 2 "s_register_operand" "r,r")))
2604 (match_operand:DI 1 "s_register_operand" "0,r")))]
2607 "TARGET_32BIT && reload_completed"
2608 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2609 (set (match_dup 3) (and:SI (not:SI
2610 (ashiftrt:SI (match_dup 2) (const_int 31)))
2614 operands[3] = gen_highpart (SImode, operands[0]);
2615 operands[0] = gen_lowpart (SImode, operands[0]);
2616 operands[4] = gen_highpart (SImode, operands[1]);
2617 operands[1] = gen_lowpart (SImode, operands[1]);
2619 [(set_attr "length" "8")
2620 (set_attr "predicable" "yes")]
2623 (define_insn "andsi_notsi_si"
2624 [(set (match_operand:SI 0 "s_register_operand" "=r")
2625 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2626 (match_operand:SI 1 "s_register_operand" "r")))]
2628 "bic%?\\t%0, %1, %2"
2629 [(set_attr "predicable" "yes")]
2632 (define_insn "thumb1_bicsi3"
2633 [(set (match_operand:SI 0 "register_operand" "=l")
2634 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2635 (match_operand:SI 2 "register_operand" "0")))]
2638 [(set_attr "length" "2")
2639 (set_attr "conds" "set")])
2641 (define_insn "andsi_not_shiftsi_si"
2642 [(set (match_operand:SI 0 "s_register_operand" "=r")
2643 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2644 [(match_operand:SI 2 "s_register_operand" "r")
2645 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2646 (match_operand:SI 1 "s_register_operand" "r")))]
2648 "bic%?\\t%0, %1, %2%S4"
2649 [(set_attr "predicable" "yes")
2650 (set_attr "shift" "2")
2651 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2652 (const_string "alu_shift")
2653 (const_string "alu_shift_reg")))]
2656 (define_insn "*andsi_notsi_si_compare0"
2657 [(set (reg:CC_NOOV CC_REGNUM)
2659 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2660 (match_operand:SI 1 "s_register_operand" "r"))
2662 (set (match_operand:SI 0 "s_register_operand" "=r")
2663 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2665 "bic%.\\t%0, %1, %2"
2666 [(set_attr "conds" "set")]
2669 (define_insn "*andsi_notsi_si_compare0_scratch"
2670 [(set (reg:CC_NOOV CC_REGNUM)
2672 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2673 (match_operand:SI 1 "s_register_operand" "r"))
2675 (clobber (match_scratch:SI 0 "=r"))]
2677 "bic%.\\t%0, %1, %2"
2678 [(set_attr "conds" "set")]
2681 (define_expand "iordi3"
2682 [(set (match_operand:DI 0 "s_register_operand" "")
2683 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2684 (match_operand:DI 2 "neon_logic_op2" "")))]
2689 (define_insn "*iordi3_insn"
2690 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2691 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2692 (match_operand:DI 2 "s_register_operand" "r,r")))]
2693 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2695 [(set_attr "length" "8")
2696 (set_attr "predicable" "yes")]
2699 (define_insn "*iordi_zesidi_di"
2700 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2701 (ior:DI (zero_extend:DI
2702 (match_operand:SI 2 "s_register_operand" "r,r"))
2703 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2706 orr%?\\t%Q0, %Q1, %2
2708 [(set_attr "length" "4,8")
2709 (set_attr "predicable" "yes")]
2712 (define_insn "*iordi_sesidi_di"
2713 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2714 (ior:DI (sign_extend:DI
2715 (match_operand:SI 2 "s_register_operand" "r,r"))
2716 (match_operand:DI 1 "s_register_operand" "0,r")))]
2719 [(set_attr "length" "8")
2720 (set_attr "predicable" "yes")]
2723 (define_expand "iorsi3"
2724 [(set (match_operand:SI 0 "s_register_operand" "")
2725 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2726 (match_operand:SI 2 "reg_or_int_operand" "")))]
2729 if (GET_CODE (operands[2]) == CONST_INT)
2733 arm_split_constant (IOR, SImode, NULL_RTX,
2734 INTVAL (operands[2]), operands[0], operands[1],
2735 optimize && can_create_pseudo_p ());
2738 else /* TARGET_THUMB1 */
2740 rtx tmp = force_reg (SImode, operands[2]);
2741 if (rtx_equal_p (operands[0], operands[1]))
2745 operands[2] = operands[1];
2753 (define_insn_and_split "*iorsi3_insn"
2754 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2755 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2756 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2760 orn%?\\t%0, %1, #%B2
2763 && GET_CODE (operands[2]) == CONST_INT
2764 && !(const_ok_for_arm (INTVAL (operands[2]))
2765 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2766 [(clobber (const_int 0))]
2768 arm_split_constant (IOR, SImode, curr_insn,
2769 INTVAL (operands[2]), operands[0], operands[1], 0);
2772 [(set_attr "length" "4,4,16")
2773 (set_attr "arch" "32,t2,32")
2774 (set_attr "predicable" "yes")])
2776 (define_insn "*thumb1_iorsi3_insn"
2777 [(set (match_operand:SI 0 "register_operand" "=l")
2778 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2779 (match_operand:SI 2 "register_operand" "l")))]
2782 [(set_attr "length" "2")
2783 (set_attr "conds" "set")])
2786 [(match_scratch:SI 3 "r")
2787 (set (match_operand:SI 0 "arm_general_register_operand" "")
2788 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2789 (match_operand:SI 2 "const_int_operand" "")))]
2791 && !const_ok_for_arm (INTVAL (operands[2]))
2792 && const_ok_for_arm (~INTVAL (operands[2]))"
2793 [(set (match_dup 3) (match_dup 2))
2794 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2798 (define_insn "*iorsi3_compare0"
2799 [(set (reg:CC_NOOV CC_REGNUM)
2800 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2801 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2803 (set (match_operand:SI 0 "s_register_operand" "=r")
2804 (ior:SI (match_dup 1) (match_dup 2)))]
2806 "orr%.\\t%0, %1, %2"
2807 [(set_attr "conds" "set")]
2810 (define_insn "*iorsi3_compare0_scratch"
2811 [(set (reg:CC_NOOV CC_REGNUM)
2812 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2813 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2815 (clobber (match_scratch:SI 0 "=r"))]
2817 "orr%.\\t%0, %1, %2"
2818 [(set_attr "conds" "set")]
2821 (define_expand "xordi3"
2822 [(set (match_operand:DI 0 "s_register_operand" "")
2823 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2824 (match_operand:DI 2 "s_register_operand" "")))]
2829 (define_insn "*xordi3_insn"
2830 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2831 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2832 (match_operand:DI 2 "s_register_operand" "r,r")))]
2833 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2835 [(set_attr "length" "8")
2836 (set_attr "predicable" "yes")]
2839 (define_insn "*xordi_zesidi_di"
2840 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2841 (xor:DI (zero_extend:DI
2842 (match_operand:SI 2 "s_register_operand" "r,r"))
2843 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2846 eor%?\\t%Q0, %Q1, %2
2848 [(set_attr "length" "4,8")
2849 (set_attr "predicable" "yes")]
2852 (define_insn "*xordi_sesidi_di"
2853 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2854 (xor:DI (sign_extend:DI
2855 (match_operand:SI 2 "s_register_operand" "r,r"))
2856 (match_operand:DI 1 "s_register_operand" "0,r")))]
2859 [(set_attr "length" "8")
2860 (set_attr "predicable" "yes")]
2863 (define_expand "xorsi3"
2864 [(set (match_operand:SI 0 "s_register_operand" "")
2865 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2866 (match_operand:SI 2 "reg_or_int_operand" "")))]
2868 "if (GET_CODE (operands[2]) == CONST_INT)
2872 arm_split_constant (XOR, SImode, NULL_RTX,
2873 INTVAL (operands[2]), operands[0], operands[1],
2874 optimize && can_create_pseudo_p ());
2877 else /* TARGET_THUMB1 */
2879 rtx tmp = force_reg (SImode, operands[2]);
2880 if (rtx_equal_p (operands[0], operands[1]))
2884 operands[2] = operands[1];
2891 (define_insn "*arm_xorsi3"
2892 [(set (match_operand:SI 0 "s_register_operand" "=r")
2893 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2894 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2896 "eor%?\\t%0, %1, %2"
2897 [(set_attr "predicable" "yes")]
2900 (define_insn "*thumb1_xorsi3_insn"
2901 [(set (match_operand:SI 0 "register_operand" "=l")
2902 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2903 (match_operand:SI 2 "register_operand" "l")))]
2906 [(set_attr "length" "2")
2907 (set_attr "conds" "set")])
2909 (define_insn "*xorsi3_compare0"
2910 [(set (reg:CC_NOOV CC_REGNUM)
2911 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2912 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2914 (set (match_operand:SI 0 "s_register_operand" "=r")
2915 (xor:SI (match_dup 1) (match_dup 2)))]
2917 "eor%.\\t%0, %1, %2"
2918 [(set_attr "conds" "set")]
2921 (define_insn "*xorsi3_compare0_scratch"
2922 [(set (reg:CC_NOOV CC_REGNUM)
2923 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2924 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2928 [(set_attr "conds" "set")]
2931 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2932 ; (NOT D) we can sometimes merge the final NOT into one of the following
2936 [(set (match_operand:SI 0 "s_register_operand" "")
2937 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2938 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2939 (match_operand:SI 3 "arm_rhs_operand" "")))
2940 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2942 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2943 (not:SI (match_dup 3))))
2944 (set (match_dup 0) (not:SI (match_dup 4)))]
2948 (define_insn "*andsi_iorsi3_notsi"
2949 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2950 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2951 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2952 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2954 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2955 [(set_attr "length" "8")
2956 (set_attr "ce_count" "2")
2957 (set_attr "predicable" "yes")]
2960 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2961 ; insns are available?
2963 [(set (match_operand:SI 0 "s_register_operand" "")
2964 (match_operator:SI 1 "logical_binary_operator"
2965 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2966 (match_operand:SI 3 "const_int_operand" "")
2967 (match_operand:SI 4 "const_int_operand" ""))
2968 (match_operator:SI 9 "logical_binary_operator"
2969 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2970 (match_operand:SI 6 "const_int_operand" ""))
2971 (match_operand:SI 7 "s_register_operand" "")])]))
2972 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2974 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2975 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2978 [(ashift:SI (match_dup 2) (match_dup 4))
2982 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2985 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2989 [(set (match_operand:SI 0 "s_register_operand" "")
2990 (match_operator:SI 1 "logical_binary_operator"
2991 [(match_operator:SI 9 "logical_binary_operator"
2992 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2993 (match_operand:SI 6 "const_int_operand" ""))
2994 (match_operand:SI 7 "s_register_operand" "")])
2995 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2996 (match_operand:SI 3 "const_int_operand" "")
2997 (match_operand:SI 4 "const_int_operand" ""))]))
2998 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3000 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3001 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3004 [(ashift:SI (match_dup 2) (match_dup 4))
3008 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3011 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3015 [(set (match_operand:SI 0 "s_register_operand" "")
3016 (match_operator:SI 1 "logical_binary_operator"
3017 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3018 (match_operand:SI 3 "const_int_operand" "")
3019 (match_operand:SI 4 "const_int_operand" ""))
3020 (match_operator:SI 9 "logical_binary_operator"
3021 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3022 (match_operand:SI 6 "const_int_operand" ""))
3023 (match_operand:SI 7 "s_register_operand" "")])]))
3024 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3026 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3027 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3030 [(ashift:SI (match_dup 2) (match_dup 4))
3034 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3037 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3041 [(set (match_operand:SI 0 "s_register_operand" "")
3042 (match_operator:SI 1 "logical_binary_operator"
3043 [(match_operator:SI 9 "logical_binary_operator"
3044 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3045 (match_operand:SI 6 "const_int_operand" ""))
3046 (match_operand:SI 7 "s_register_operand" "")])
3047 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3048 (match_operand:SI 3 "const_int_operand" "")
3049 (match_operand:SI 4 "const_int_operand" ""))]))
3050 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3052 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3053 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3056 [(ashift:SI (match_dup 2) (match_dup 4))
3060 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3063 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3067 ;; Minimum and maximum insns
3069 (define_expand "smaxsi3"
3071 (set (match_operand:SI 0 "s_register_operand" "")
3072 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3073 (match_operand:SI 2 "arm_rhs_operand" "")))
3074 (clobber (reg:CC CC_REGNUM))])]
3077 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3079 /* No need for a clobber of the condition code register here. */
3080 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3081 gen_rtx_SMAX (SImode, operands[1],
3087 (define_insn "*smax_0"
3088 [(set (match_operand:SI 0 "s_register_operand" "=r")
3089 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3092 "bic%?\\t%0, %1, %1, asr #31"
3093 [(set_attr "predicable" "yes")]
3096 (define_insn "*smax_m1"
3097 [(set (match_operand:SI 0 "s_register_operand" "=r")
3098 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3101 "orr%?\\t%0, %1, %1, asr #31"
3102 [(set_attr "predicable" "yes")]
3105 (define_insn "*arm_smax_insn"
3106 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3107 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3108 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3109 (clobber (reg:CC CC_REGNUM))]
3112 cmp\\t%1, %2\;movlt\\t%0, %2
3113 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3114 [(set_attr "conds" "clob")
3115 (set_attr "length" "8,12")]
3118 (define_expand "sminsi3"
3120 (set (match_operand:SI 0 "s_register_operand" "")
3121 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3122 (match_operand:SI 2 "arm_rhs_operand" "")))
3123 (clobber (reg:CC CC_REGNUM))])]
3126 if (operands[2] == const0_rtx)
3128 /* No need for a clobber of the condition code register here. */
3129 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3130 gen_rtx_SMIN (SImode, operands[1],
3136 (define_insn "*smin_0"
3137 [(set (match_operand:SI 0 "s_register_operand" "=r")
3138 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3141 "and%?\\t%0, %1, %1, asr #31"
3142 [(set_attr "predicable" "yes")]
3145 (define_insn "*arm_smin_insn"
3146 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3147 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3148 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3149 (clobber (reg:CC CC_REGNUM))]
3152 cmp\\t%1, %2\;movge\\t%0, %2
3153 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3154 [(set_attr "conds" "clob")
3155 (set_attr "length" "8,12")]
3158 (define_expand "umaxsi3"
3160 (set (match_operand:SI 0 "s_register_operand" "")
3161 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3162 (match_operand:SI 2 "arm_rhs_operand" "")))
3163 (clobber (reg:CC CC_REGNUM))])]
3168 (define_insn "*arm_umaxsi3"
3169 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3170 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3171 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3172 (clobber (reg:CC CC_REGNUM))]
3175 cmp\\t%1, %2\;movcc\\t%0, %2
3176 cmp\\t%1, %2\;movcs\\t%0, %1
3177 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3178 [(set_attr "conds" "clob")
3179 (set_attr "length" "8,8,12")]
3182 (define_expand "uminsi3"
3184 (set (match_operand:SI 0 "s_register_operand" "")
3185 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3186 (match_operand:SI 2 "arm_rhs_operand" "")))
3187 (clobber (reg:CC CC_REGNUM))])]
3192 (define_insn "*arm_uminsi3"
3193 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3194 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3195 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3196 (clobber (reg:CC CC_REGNUM))]
3199 cmp\\t%1, %2\;movcs\\t%0, %2
3200 cmp\\t%1, %2\;movcc\\t%0, %1
3201 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3202 [(set_attr "conds" "clob")
3203 (set_attr "length" "8,8,12")]
3206 (define_insn "*store_minmaxsi"
3207 [(set (match_operand:SI 0 "memory_operand" "=m")
3208 (match_operator:SI 3 "minmax_operator"
3209 [(match_operand:SI 1 "s_register_operand" "r")
3210 (match_operand:SI 2 "s_register_operand" "r")]))
3211 (clobber (reg:CC CC_REGNUM))]
3214 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3215 operands[1], operands[2]);
3216 output_asm_insn (\"cmp\\t%1, %2\", operands);
3218 output_asm_insn (\"ite\t%d3\", operands);
3219 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3220 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3223 [(set_attr "conds" "clob")
3224 (set (attr "length")
3225 (if_then_else (eq_attr "is_thumb" "yes")
3228 (set_attr "type" "store1")]
3231 ; Reject the frame pointer in operand[1], since reloading this after
3232 ; it has been eliminated can cause carnage.
3233 (define_insn "*minmax_arithsi"
3234 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3235 (match_operator:SI 4 "shiftable_operator"
3236 [(match_operator:SI 5 "minmax_operator"
3237 [(match_operand:SI 2 "s_register_operand" "r,r")
3238 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3239 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3240 (clobber (reg:CC CC_REGNUM))]
3241 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3244 enum rtx_code code = GET_CODE (operands[4]);
3247 if (which_alternative != 0 || operands[3] != const0_rtx
3248 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3253 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3254 operands[2], operands[3]);
3255 output_asm_insn (\"cmp\\t%2, %3\", operands);
3259 output_asm_insn (\"ite\\t%d5\", operands);
3261 output_asm_insn (\"it\\t%d5\", operands);
3263 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3265 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3268 [(set_attr "conds" "clob")
3269 (set (attr "length")
3270 (if_then_else (eq_attr "is_thumb" "yes")
3276 ;; Shift and rotation insns
3278 (define_expand "ashldi3"
3279 [(set (match_operand:DI 0 "s_register_operand" "")
3280 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3281 (match_operand:SI 2 "reg_or_int_operand" "")))]
3284 if (GET_CODE (operands[2]) == CONST_INT)
3286 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3288 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3291 /* Ideally we shouldn't fail here if we could know that operands[1]
3292 ends up already living in an iwmmxt register. Otherwise it's
3293 cheaper to have the alternate code being generated than moving
3294 values to iwmmxt regs and back. */
3297 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3302 (define_insn "arm_ashldi3_1bit"
3303 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3304 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3306 (clobber (reg:CC CC_REGNUM))]
3308 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3309 [(set_attr "conds" "clob")
3310 (set_attr "length" "8")]
3313 (define_expand "ashlsi3"
3314 [(set (match_operand:SI 0 "s_register_operand" "")
3315 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3316 (match_operand:SI 2 "arm_rhs_operand" "")))]
3319 if (GET_CODE (operands[2]) == CONST_INT
3320 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3322 emit_insn (gen_movsi (operands[0], const0_rtx));
3328 (define_insn "*thumb1_ashlsi3"
3329 [(set (match_operand:SI 0 "register_operand" "=l,l")
3330 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3331 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3334 [(set_attr "length" "2")
3335 (set_attr "conds" "set")])
3337 (define_expand "ashrdi3"
3338 [(set (match_operand:DI 0 "s_register_operand" "")
3339 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3340 (match_operand:SI 2 "reg_or_int_operand" "")))]
3343 if (GET_CODE (operands[2]) == CONST_INT)
3345 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3347 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3350 /* Ideally we shouldn't fail here if we could know that operands[1]
3351 ends up already living in an iwmmxt register. Otherwise it's
3352 cheaper to have the alternate code being generated than moving
3353 values to iwmmxt regs and back. */
3356 else if (!TARGET_REALLY_IWMMXT)
3361 (define_insn "arm_ashrdi3_1bit"
3362 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3363 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3365 (clobber (reg:CC CC_REGNUM))]
3367 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3368 [(set_attr "conds" "clob")
3369 (set_attr "length" "8")]
3372 (define_expand "ashrsi3"
3373 [(set (match_operand:SI 0 "s_register_operand" "")
3374 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3375 (match_operand:SI 2 "arm_rhs_operand" "")))]
3378 if (GET_CODE (operands[2]) == CONST_INT
3379 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3380 operands[2] = GEN_INT (31);
3384 (define_insn "*thumb1_ashrsi3"
3385 [(set (match_operand:SI 0 "register_operand" "=l,l")
3386 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3387 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3390 [(set_attr "length" "2")
3391 (set_attr "conds" "set")])
3393 (define_expand "lshrdi3"
3394 [(set (match_operand:DI 0 "s_register_operand" "")
3395 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3396 (match_operand:SI 2 "reg_or_int_operand" "")))]
3399 if (GET_CODE (operands[2]) == CONST_INT)
3401 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3403 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3406 /* Ideally we shouldn't fail here if we could know that operands[1]
3407 ends up already living in an iwmmxt register. Otherwise it's
3408 cheaper to have the alternate code being generated than moving
3409 values to iwmmxt regs and back. */
3412 else if (!TARGET_REALLY_IWMMXT)
3417 (define_insn "arm_lshrdi3_1bit"
3418 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3419 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3421 (clobber (reg:CC CC_REGNUM))]
3423 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3424 [(set_attr "conds" "clob")
3425 (set_attr "length" "8")]
3428 (define_expand "lshrsi3"
3429 [(set (match_operand:SI 0 "s_register_operand" "")
3430 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3431 (match_operand:SI 2 "arm_rhs_operand" "")))]
3434 if (GET_CODE (operands[2]) == CONST_INT
3435 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3437 emit_insn (gen_movsi (operands[0], const0_rtx));
3443 (define_insn "*thumb1_lshrsi3"
3444 [(set (match_operand:SI 0 "register_operand" "=l,l")
3445 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3446 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3449 [(set_attr "length" "2")
3450 (set_attr "conds" "set")])
3452 (define_expand "rotlsi3"
3453 [(set (match_operand:SI 0 "s_register_operand" "")
3454 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3455 (match_operand:SI 2 "reg_or_int_operand" "")))]
3458 if (GET_CODE (operands[2]) == CONST_INT)
3459 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3462 rtx reg = gen_reg_rtx (SImode);
3463 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3469 (define_expand "rotrsi3"
3470 [(set (match_operand:SI 0 "s_register_operand" "")
3471 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3472 (match_operand:SI 2 "arm_rhs_operand" "")))]
3477 if (GET_CODE (operands[2]) == CONST_INT
3478 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3479 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3481 else /* TARGET_THUMB1 */
3483 if (GET_CODE (operands [2]) == CONST_INT)
3484 operands [2] = force_reg (SImode, operands[2]);
3489 (define_insn "*thumb1_rotrsi3"
3490 [(set (match_operand:SI 0 "register_operand" "=l")
3491 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3492 (match_operand:SI 2 "register_operand" "l")))]
3495 [(set_attr "length" "2")]
3498 (define_insn "*arm_shiftsi3"
3499 [(set (match_operand:SI 0 "s_register_operand" "=r")
3500 (match_operator:SI 3 "shift_operator"
3501 [(match_operand:SI 1 "s_register_operand" "r")
3502 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3504 "* return arm_output_shift(operands, 0);"
3505 [(set_attr "predicable" "yes")
3506 (set_attr "shift" "1")
3507 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3508 (const_string "alu_shift")
3509 (const_string "alu_shift_reg")))]
3512 (define_insn "*shiftsi3_compare0"
3513 [(set (reg:CC_NOOV CC_REGNUM)
3514 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3515 [(match_operand:SI 1 "s_register_operand" "r")
3516 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3518 (set (match_operand:SI 0 "s_register_operand" "=r")
3519 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3521 "* return arm_output_shift(operands, 1);"
3522 [(set_attr "conds" "set")
3523 (set_attr "shift" "1")
3524 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3525 (const_string "alu_shift")
3526 (const_string "alu_shift_reg")))]
3529 (define_insn "*shiftsi3_compare0_scratch"
3530 [(set (reg:CC_NOOV CC_REGNUM)
3531 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3532 [(match_operand:SI 1 "s_register_operand" "r")
3533 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3535 (clobber (match_scratch:SI 0 "=r"))]
3537 "* return arm_output_shift(operands, 1);"
3538 [(set_attr "conds" "set")
3539 (set_attr "shift" "1")]
3542 (define_insn "*not_shiftsi"
3543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3544 (not:SI (match_operator:SI 3 "shift_operator"
3545 [(match_operand:SI 1 "s_register_operand" "r,r")
3546 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3549 [(set_attr "predicable" "yes")
3550 (set_attr "shift" "1")
3551 (set_attr "arch" "32,a")
3552 (set_attr "type" "alu_shift,alu_shift_reg")])
3554 (define_insn "*not_shiftsi_compare0"
3555 [(set (reg:CC_NOOV CC_REGNUM)
3557 (not:SI (match_operator:SI 3 "shift_operator"
3558 [(match_operand:SI 1 "s_register_operand" "r,r")
3559 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3561 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3562 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3565 [(set_attr "conds" "set")
3566 (set_attr "shift" "1")
3567 (set_attr "arch" "32,a")
3568 (set_attr "type" "alu_shift,alu_shift_reg")])
3570 (define_insn "*not_shiftsi_compare0_scratch"
3571 [(set (reg:CC_NOOV CC_REGNUM)
3573 (not:SI (match_operator:SI 3 "shift_operator"
3574 [(match_operand:SI 1 "s_register_operand" "r,r")
3575 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3577 (clobber (match_scratch:SI 0 "=r,r"))]
3580 [(set_attr "conds" "set")
3581 (set_attr "shift" "1")
3582 (set_attr "arch" "32,a")
3583 (set_attr "type" "alu_shift,alu_shift_reg")])
3585 ;; We don't really have extzv, but defining this using shifts helps
3586 ;; to reduce register pressure later on.
3588 (define_expand "extzv"
3590 (ashift:SI (match_operand:SI 1 "register_operand" "")
3591 (match_operand:SI 2 "const_int_operand" "")))
3592 (set (match_operand:SI 0 "register_operand" "")
3593 (lshiftrt:SI (match_dup 4)
3594 (match_operand:SI 3 "const_int_operand" "")))]
3595 "TARGET_THUMB1 || arm_arch_thumb2"
3598 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3599 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3601 if (arm_arch_thumb2)
3603 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3608 operands[3] = GEN_INT (rshift);
3612 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3616 operands[2] = GEN_INT (lshift);
3617 operands[4] = gen_reg_rtx (SImode);
3622 [(set (match_operand:SI 0 "s_register_operand" "=r")
3623 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3624 (match_operand:SI 2 "const_int_operand" "M")
3625 (match_operand:SI 3 "const_int_operand" "M")))]
3627 "sbfx%?\t%0, %1, %3, %2"
3628 [(set_attr "length" "4")
3629 (set_attr "predicable" "yes")]
3632 (define_insn "extzv_t2"
3633 [(set (match_operand:SI 0 "s_register_operand" "=r")
3634 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3635 (match_operand:SI 2 "const_int_operand" "M")
3636 (match_operand:SI 3 "const_int_operand" "M")))]
3638 "ubfx%?\t%0, %1, %3, %2"
3639 [(set_attr "length" "4")
3640 (set_attr "predicable" "yes")]
3644 ;; Unary arithmetic insns
3646 (define_expand "negdi2"
3648 [(set (match_operand:DI 0 "s_register_operand" "")
3649 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3650 (clobber (reg:CC CC_REGNUM))])]
3655 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3656 ;; The first alternative allows the common case of a *full* overlap.
3657 (define_insn "*arm_negdi2"
3658 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3659 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3660 (clobber (reg:CC CC_REGNUM))]
3662 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3663 [(set_attr "conds" "clob")
3664 (set_attr "length" "8")]
3667 (define_insn "*thumb1_negdi2"
3668 [(set (match_operand:DI 0 "register_operand" "=&l")
3669 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3670 (clobber (reg:CC CC_REGNUM))]
3672 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3673 [(set_attr "length" "6")]
3676 (define_expand "negsi2"
3677 [(set (match_operand:SI 0 "s_register_operand" "")
3678 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3683 (define_insn "*arm_negsi2"
3684 [(set (match_operand:SI 0 "s_register_operand" "=r")
3685 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3687 "rsb%?\\t%0, %1, #0"
3688 [(set_attr "predicable" "yes")]
3691 (define_insn "*thumb1_negsi2"
3692 [(set (match_operand:SI 0 "register_operand" "=l")
3693 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3696 [(set_attr "length" "2")]
3699 (define_expand "negsf2"
3700 [(set (match_operand:SF 0 "s_register_operand" "")
3701 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3702 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3706 (define_expand "negdf2"
3707 [(set (match_operand:DF 0 "s_register_operand" "")
3708 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3709 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3712 ;; abssi2 doesn't really clobber the condition codes if a different register
3713 ;; is being set. To keep things simple, assume during rtl manipulations that
3714 ;; it does, but tell the final scan operator the truth. Similarly for
3717 (define_expand "abssi2"
3719 [(set (match_operand:SI 0 "s_register_operand" "")
3720 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3721 (clobber (match_dup 2))])]
3725 operands[2] = gen_rtx_SCRATCH (SImode);
3727 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3730 (define_insn "*arm_abssi2"
3731 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3732 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3733 (clobber (reg:CC CC_REGNUM))]
3736 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3737 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3738 [(set_attr "conds" "clob,*")
3739 (set_attr "shift" "1")
3740 ;; predicable can't be set based on the variant, so left as no
3741 (set_attr "length" "8")]
3744 (define_insn_and_split "*thumb1_abssi2"
3745 [(set (match_operand:SI 0 "s_register_operand" "=l")
3746 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3747 (clobber (match_scratch:SI 2 "=&l"))]
3750 "TARGET_THUMB1 && reload_completed"
3751 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3752 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3753 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3755 [(set_attr "length" "6")]
3758 (define_insn "*arm_neg_abssi2"
3759 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3760 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3761 (clobber (reg:CC CC_REGNUM))]
3764 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3765 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3766 [(set_attr "conds" "clob,*")
3767 (set_attr "shift" "1")
3768 ;; predicable can't be set based on the variant, so left as no
3769 (set_attr "length" "8")]
3772 (define_insn_and_split "*thumb1_neg_abssi2"
3773 [(set (match_operand:SI 0 "s_register_operand" "=l")
3774 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3775 (clobber (match_scratch:SI 2 "=&l"))]
3778 "TARGET_THUMB1 && reload_completed"
3779 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3780 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3781 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3783 [(set_attr "length" "6")]
3786 (define_expand "abssf2"
3787 [(set (match_operand:SF 0 "s_register_operand" "")
3788 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3789 "TARGET_32BIT && TARGET_HARD_FLOAT"
3792 (define_expand "absdf2"
3793 [(set (match_operand:DF 0 "s_register_operand" "")
3794 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3795 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3798 (define_expand "sqrtsf2"
3799 [(set (match_operand:SF 0 "s_register_operand" "")
3800 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3801 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3804 (define_expand "sqrtdf2"
3805 [(set (match_operand:DF 0 "s_register_operand" "")
3806 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3807 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3810 (define_insn_and_split "one_cmpldi2"
3811 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3812 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3815 "TARGET_32BIT && reload_completed"
3816 [(set (match_dup 0) (not:SI (match_dup 1)))
3817 (set (match_dup 2) (not:SI (match_dup 3)))]
3820 operands[2] = gen_highpart (SImode, operands[0]);
3821 operands[0] = gen_lowpart (SImode, operands[0]);
3822 operands[3] = gen_highpart (SImode, operands[1]);
3823 operands[1] = gen_lowpart (SImode, operands[1]);
3825 [(set_attr "length" "8")
3826 (set_attr "predicable" "yes")]
3829 (define_expand "one_cmplsi2"
3830 [(set (match_operand:SI 0 "s_register_operand" "")
3831 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3836 (define_insn "*arm_one_cmplsi2"
3837 [(set (match_operand:SI 0 "s_register_operand" "=r")
3838 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3841 [(set_attr "predicable" "yes")]
3844 (define_insn "*thumb1_one_cmplsi2"
3845 [(set (match_operand:SI 0 "register_operand" "=l")
3846 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3849 [(set_attr "length" "2")]
3852 (define_insn "*notsi_compare0"
3853 [(set (reg:CC_NOOV CC_REGNUM)
3854 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3856 (set (match_operand:SI 0 "s_register_operand" "=r")
3857 (not:SI (match_dup 1)))]
3860 [(set_attr "conds" "set")]
3863 (define_insn "*notsi_compare0_scratch"
3864 [(set (reg:CC_NOOV CC_REGNUM)
3865 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3867 (clobber (match_scratch:SI 0 "=r"))]
3870 [(set_attr "conds" "set")]
3873 ;; Fixed <--> Floating conversion insns
3875 (define_expand "floatsihf2"
3876 [(set (match_operand:HF 0 "general_operand" "")
3877 (float:HF (match_operand:SI 1 "general_operand" "")))]
3881 rtx op1 = gen_reg_rtx (SFmode);
3882 expand_float (op1, operands[1], 0);
3883 op1 = convert_to_mode (HFmode, op1, 0);
3884 emit_move_insn (operands[0], op1);
3889 (define_expand "floatdihf2"
3890 [(set (match_operand:HF 0 "general_operand" "")
3891 (float:HF (match_operand:DI 1 "general_operand" "")))]
3895 rtx op1 = gen_reg_rtx (SFmode);
3896 expand_float (op1, operands[1], 0);
3897 op1 = convert_to_mode (HFmode, op1, 0);
3898 emit_move_insn (operands[0], op1);
3903 (define_expand "floatsisf2"
3904 [(set (match_operand:SF 0 "s_register_operand" "")
3905 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3906 "TARGET_32BIT && TARGET_HARD_FLOAT"
3908 if (TARGET_MAVERICK)
3910 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3915 (define_expand "floatsidf2"
3916 [(set (match_operand:DF 0 "s_register_operand" "")
3917 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3918 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3920 if (TARGET_MAVERICK)
3922 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3927 (define_expand "fix_trunchfsi2"
3928 [(set (match_operand:SI 0 "general_operand" "")
3929 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3933 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3934 expand_fix (operands[0], op1, 0);
3939 (define_expand "fix_trunchfdi2"
3940 [(set (match_operand:DI 0 "general_operand" "")
3941 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3945 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3946 expand_fix (operands[0], op1, 0);
3951 (define_expand "fix_truncsfsi2"
3952 [(set (match_operand:SI 0 "s_register_operand" "")
3953 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3954 "TARGET_32BIT && TARGET_HARD_FLOAT"
3956 if (TARGET_MAVERICK)
3958 if (!cirrus_fp_register (operands[0], SImode))
3959 operands[0] = force_reg (SImode, operands[0]);
3960 if (!cirrus_fp_register (operands[1], SFmode))
3961 operands[1] = force_reg (SFmode, operands[0]);
3962 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3967 (define_expand "fix_truncdfsi2"
3968 [(set (match_operand:SI 0 "s_register_operand" "")
3969 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3970 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3972 if (TARGET_MAVERICK)
3974 if (!cirrus_fp_register (operands[1], DFmode))
3975 operands[1] = force_reg (DFmode, operands[0]);
3976 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3983 (define_expand "truncdfsf2"
3984 [(set (match_operand:SF 0 "s_register_operand" "")
3986 (match_operand:DF 1 "s_register_operand" "")))]
3987 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3991 /* DFmode -> HFmode conversions have to go through SFmode. */
3992 (define_expand "truncdfhf2"
3993 [(set (match_operand:HF 0 "general_operand" "")
3995 (match_operand:DF 1 "general_operand" "")))]
4000 op1 = convert_to_mode (SFmode, operands[1], 0);
4001 op1 = convert_to_mode (HFmode, op1, 0);
4002 emit_move_insn (operands[0], op1);
4007 ;; Zero and sign extension instructions.
4009 (define_expand "zero_extendsidi2"
4010 [(set (match_operand:DI 0 "s_register_operand" "")
4011 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
4016 (define_insn "*arm_zero_extendsidi2"
4017 [(set (match_operand:DI 0 "s_register_operand" "=r")
4018 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
4021 if (REGNO (operands[1])
4022 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
4023 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4024 return \"mov%?\\t%R0, #0\";
4026 [(set_attr "length" "8")
4027 (set_attr "predicable" "yes")]
4030 (define_expand "zero_extendqidi2"
4031 [(set (match_operand:DI 0 "s_register_operand" "")
4032 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
4037 (define_insn "*arm_zero_extendqidi2"
4038 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
4039 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4042 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
4043 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
4044 [(set_attr "length" "8")
4045 (set_attr "predicable" "yes")
4046 (set_attr "type" "*,load_byte")
4047 (set_attr "pool_range" "*,4092")
4048 (set_attr "neg_pool_range" "*,4084")]
4051 (define_expand "extendsidi2"
4052 [(set (match_operand:DI 0 "s_register_operand" "")
4053 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
4058 (define_insn "*arm_extendsidi2"
4059 [(set (match_operand:DI 0 "s_register_operand" "=r")
4060 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
4063 if (REGNO (operands[1])
4064 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
4065 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4066 return \"mov%?\\t%R0, %Q0, asr #31\";
4068 [(set_attr "length" "8")
4069 (set_attr "shift" "1")
4070 (set_attr "predicable" "yes")]
4073 (define_expand "zero_extendhisi2"
4074 [(set (match_operand:SI 0 "s_register_operand" "")
4075 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4078 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4080 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4083 if (!arm_arch6 && !MEM_P (operands[1]))
4085 rtx t = gen_lowpart (SImode, operands[1]);
4086 rtx tmp = gen_reg_rtx (SImode);
4087 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4088 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4094 [(set (match_operand:SI 0 "register_operand" "")
4095 (zero_extend:SI (match_operand:HI 1 "register_operand" "")))]
4096 "!TARGET_THUMB2 && !arm_arch6"
4097 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4098 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4100 operands[2] = gen_lowpart (SImode, operands[1]);
4103 (define_insn "*thumb1_zero_extendhisi2"
4104 [(set (match_operand:SI 0 "register_operand" "=l,l")
4105 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4110 if (which_alternative == 0 && arm_arch6)
4111 return \"uxth\\t%0, %1\";
4112 if (which_alternative == 0)
4115 mem = XEXP (operands[1], 0);
4117 if (GET_CODE (mem) == CONST)
4118 mem = XEXP (mem, 0);
4120 if (GET_CODE (mem) == LABEL_REF)
4121 return \"ldr\\t%0, %1\";
4123 if (GET_CODE (mem) == PLUS)
4125 rtx a = XEXP (mem, 0);
4126 rtx b = XEXP (mem, 1);
4128 /* This can happen due to bugs in reload. */
4129 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4132 ops[0] = operands[0];
4135 output_asm_insn (\"mov %0, %1\", ops);
4137 XEXP (mem, 0) = operands[0];
4140 else if ( GET_CODE (a) == LABEL_REF
4141 && GET_CODE (b) == CONST_INT)
4142 return \"ldr\\t%0, %1\";
4145 return \"ldrh\\t%0, %1\";
4147 [(set_attr_alternative "length"
4148 [(if_then_else (eq_attr "is_arch6" "yes")
4149 (const_int 2) (const_int 4))
4151 (set_attr "type" "alu_shift,load_byte")
4152 (set_attr "pool_range" "*,60")]
4155 (define_insn "*arm_zero_extendhisi2"
4156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4157 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4158 "TARGET_ARM && arm_arch4 && !arm_arch6"
4162 [(set_attr "type" "alu_shift,load_byte")
4163 (set_attr "predicable" "yes")
4164 (set_attr "pool_range" "*,256")
4165 (set_attr "neg_pool_range" "*,244")]
4168 (define_insn "*arm_zero_extendhisi2_v6"
4169 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4170 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4171 "TARGET_ARM && arm_arch6"
4175 [(set_attr "type" "alu_shift,load_byte")
4176 (set_attr "predicable" "yes")
4177 (set_attr "pool_range" "*,256")
4178 (set_attr "neg_pool_range" "*,244")]
4181 (define_insn "*arm_zero_extendhisi2addsi"
4182 [(set (match_operand:SI 0 "s_register_operand" "=r")
4183 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4184 (match_operand:SI 2 "s_register_operand" "r")))]
4186 "uxtah%?\\t%0, %2, %1"
4187 [(set_attr "type" "alu_shift")
4188 (set_attr "predicable" "yes")]
4191 (define_expand "zero_extendqisi2"
4192 [(set (match_operand:SI 0 "s_register_operand" "")
4193 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4196 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4198 emit_insn (gen_andsi3 (operands[0],
4199 gen_lowpart (SImode, operands[1]),
4203 if (!arm_arch6 && !MEM_P (operands[1]))
4205 rtx t = gen_lowpart (SImode, operands[1]);
4206 rtx tmp = gen_reg_rtx (SImode);
4207 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4208 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4214 [(set (match_operand:SI 0 "register_operand" "")
4215 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4217 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4218 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4220 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4223 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4228 (define_insn "*thumb1_zero_extendqisi2"
4229 [(set (match_operand:SI 0 "register_operand" "=l,l")
4230 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4231 "TARGET_THUMB1 && !arm_arch6"
4235 [(set_attr "length" "4,2")
4236 (set_attr "type" "alu_shift,load_byte")
4237 (set_attr "pool_range" "*,32")]
4240 (define_insn "*thumb1_zero_extendqisi2_v6"
4241 [(set (match_operand:SI 0 "register_operand" "=l,l")
4242 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4243 "TARGET_THUMB1 && arm_arch6"
4247 [(set_attr "length" "2,2")
4248 (set_attr "type" "alu_shift,load_byte")
4249 (set_attr "pool_range" "*,32")]
4252 (define_insn "*arm_zero_extendqisi2"
4253 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4254 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4255 "TARGET_ARM && !arm_arch6"
4258 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4259 [(set_attr "length" "8,4")
4260 (set_attr "type" "alu_shift,load_byte")
4261 (set_attr "predicable" "yes")
4262 (set_attr "pool_range" "*,4096")
4263 (set_attr "neg_pool_range" "*,4084")]
4266 (define_insn "*arm_zero_extendqisi2_v6"
4267 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4268 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4269 "TARGET_ARM && arm_arch6"
4272 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4273 [(set_attr "type" "alu_shift,load_byte")
4274 (set_attr "predicable" "yes")
4275 (set_attr "pool_range" "*,4096")
4276 (set_attr "neg_pool_range" "*,4084")]
4279 (define_insn "*arm_zero_extendqisi2addsi"
4280 [(set (match_operand:SI 0 "s_register_operand" "=r")
4281 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4282 (match_operand:SI 2 "s_register_operand" "r")))]
4284 "uxtab%?\\t%0, %2, %1"
4285 [(set_attr "predicable" "yes")
4286 (set_attr "insn" "xtab")
4287 (set_attr "type" "alu_shift")]
4291 [(set (match_operand:SI 0 "s_register_operand" "")
4292 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4293 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4294 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4295 [(set (match_dup 2) (match_dup 1))
4296 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4301 [(set (match_operand:SI 0 "s_register_operand" "")
4302 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4303 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4304 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4305 [(set (match_dup 2) (match_dup 1))
4306 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4312 [(set (match_operand:SI 0 "s_register_operand" "")
4313 (ior_xor:SI (and:SI (ashift:SI
4314 (match_operand:SI 1 "s_register_operand" "")
4315 (match_operand:SI 2 "const_int_operand" ""))
4316 (match_operand:SI 3 "const_int_operand" ""))
4318 (match_operator 5 "subreg_lowpart_operator"
4319 [(match_operand:SI 4 "s_register_operand" "")]))))]
4321 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4322 == (GET_MODE_MASK (GET_MODE (operands[5]))
4323 & (GET_MODE_MASK (GET_MODE (operands[5]))
4324 << (INTVAL (operands[2])))))"
4325 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4327 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4328 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4331 (define_insn "*compareqi_eq0"
4332 [(set (reg:CC_Z CC_REGNUM)
4333 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4337 [(set_attr "conds" "set")]
4340 (define_expand "extendhisi2"
4341 [(set (match_operand:SI 0 "s_register_operand" "")
4342 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4347 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4350 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4352 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4356 if (!arm_arch6 && !MEM_P (operands[1]))
4358 rtx t = gen_lowpart (SImode, operands[1]);
4359 rtx tmp = gen_reg_rtx (SImode);
4360 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4361 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4368 [(set (match_operand:SI 0 "register_operand" "")
4369 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4370 (clobber (match_scratch:SI 2 ""))])]
4372 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4373 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4375 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4378 ;; We used to have an early-clobber on the scratch register here.
4379 ;; However, there's a bug somewhere in reload which means that this
4380 ;; can be partially ignored during spill allocation if the memory
4381 ;; address also needs reloading; this causes us to die later on when
4382 ;; we try to verify the operands. Fortunately, we don't really need
4383 ;; the early-clobber: we can always use operand 0 if operand 2
4384 ;; overlaps the address.
4385 (define_insn "thumb1_extendhisi2"
4386 [(set (match_operand:SI 0 "register_operand" "=l,l")
4387 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4388 (clobber (match_scratch:SI 2 "=X,l"))]
4395 if (which_alternative == 0 && !arm_arch6)
4397 if (which_alternative == 0)
4398 return \"sxth\\t%0, %1\";
4400 mem = XEXP (operands[1], 0);
4402 /* This code used to try to use 'V', and fix the address only if it was
4403 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4404 range of QImode offsets, and offsettable_address_p does a QImode
4407 if (GET_CODE (mem) == CONST)
4408 mem = XEXP (mem, 0);
4410 if (GET_CODE (mem) == LABEL_REF)
4411 return \"ldr\\t%0, %1\";
4413 if (GET_CODE (mem) == PLUS)
4415 rtx a = XEXP (mem, 0);
4416 rtx b = XEXP (mem, 1);
4418 if (GET_CODE (a) == LABEL_REF
4419 && GET_CODE (b) == CONST_INT)
4420 return \"ldr\\t%0, %1\";
4422 if (GET_CODE (b) == REG)
4423 return \"ldrsh\\t%0, %1\";
4431 ops[2] = const0_rtx;
4434 gcc_assert (GET_CODE (ops[1]) == REG);
4436 ops[0] = operands[0];
4437 if (reg_mentioned_p (operands[2], ops[1]))
4440 ops[3] = operands[2];
4441 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4444 [(set_attr_alternative "length"
4445 [(if_then_else (eq_attr "is_arch6" "yes")
4446 (const_int 2) (const_int 4))
4448 (set_attr "type" "alu_shift,load_byte")
4449 (set_attr "pool_range" "*,1020")]
4452 ;; This pattern will only be used when ldsh is not available
4453 (define_expand "extendhisi2_mem"
4454 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4456 (zero_extend:SI (match_dup 7)))
4457 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4458 (set (match_operand:SI 0 "" "")
4459 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4464 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4466 mem1 = change_address (operands[1], QImode, addr);
4467 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4468 operands[0] = gen_lowpart (SImode, operands[0]);
4470 operands[2] = gen_reg_rtx (SImode);
4471 operands[3] = gen_reg_rtx (SImode);
4472 operands[6] = gen_reg_rtx (SImode);
4475 if (BYTES_BIG_ENDIAN)
4477 operands[4] = operands[2];
4478 operands[5] = operands[3];
4482 operands[4] = operands[3];
4483 operands[5] = operands[2];
4489 [(set (match_operand:SI 0 "register_operand" "")
4490 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4492 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4493 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4495 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4498 (define_insn "*arm_extendhisi2"
4499 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4500 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4501 "TARGET_ARM && arm_arch4 && !arm_arch6"
4505 [(set_attr "length" "8,4")
4506 (set_attr "type" "alu_shift,load_byte")
4507 (set_attr "predicable" "yes")
4508 (set_attr "pool_range" "*,256")
4509 (set_attr "neg_pool_range" "*,244")]
4512 ;; ??? Check Thumb-2 pool range
4513 (define_insn "*arm_extendhisi2_v6"
4514 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4515 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4516 "TARGET_32BIT && arm_arch6"
4520 [(set_attr "type" "alu_shift,load_byte")
4521 (set_attr "predicable" "yes")
4522 (set_attr "pool_range" "*,256")
4523 (set_attr "neg_pool_range" "*,244")]
4526 (define_insn "*arm_extendhisi2addsi"
4527 [(set (match_operand:SI 0 "s_register_operand" "=r")
4528 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4529 (match_operand:SI 2 "s_register_operand" "r")))]
4531 "sxtah%?\\t%0, %2, %1"
4534 (define_expand "extendqihi2"
4536 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4538 (set (match_operand:HI 0 "s_register_operand" "")
4539 (ashiftrt:SI (match_dup 2)
4544 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4546 emit_insn (gen_rtx_SET (VOIDmode,
4548 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4551 if (!s_register_operand (operands[1], QImode))
4552 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4553 operands[0] = gen_lowpart (SImode, operands[0]);
4554 operands[1] = gen_lowpart (SImode, operands[1]);
4555 operands[2] = gen_reg_rtx (SImode);
4559 (define_insn "*arm_extendqihi_insn"
4560 [(set (match_operand:HI 0 "s_register_operand" "=r")
4561 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4562 "TARGET_ARM && arm_arch4"
4563 "ldr%(sb%)\\t%0, %1"
4564 [(set_attr "type" "load_byte")
4565 (set_attr "predicable" "yes")
4566 (set_attr "pool_range" "256")
4567 (set_attr "neg_pool_range" "244")]
4570 (define_expand "extendqisi2"
4571 [(set (match_operand:SI 0 "s_register_operand" "")
4572 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4575 if (!arm_arch4 && MEM_P (operands[1]))
4576 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4578 if (!arm_arch6 && !MEM_P (operands[1]))
4580 rtx t = gen_lowpart (SImode, operands[1]);
4581 rtx tmp = gen_reg_rtx (SImode);
4582 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4583 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4589 [(set (match_operand:SI 0 "register_operand" "")
4590 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4592 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4593 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4595 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4598 (define_insn "*arm_extendqisi"
4599 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4600 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4601 "TARGET_ARM && arm_arch4 && !arm_arch6"
4605 [(set_attr "length" "8,4")
4606 (set_attr "type" "alu_shift,load_byte")
4607 (set_attr "predicable" "yes")
4608 (set_attr "pool_range" "*,256")
4609 (set_attr "neg_pool_range" "*,244")]
4612 (define_insn "*arm_extendqisi_v6"
4613 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4615 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4616 "TARGET_ARM && arm_arch6"
4620 [(set_attr "type" "alu_shift,load_byte")
4621 (set_attr "predicable" "yes")
4622 (set_attr "pool_range" "*,256")
4623 (set_attr "neg_pool_range" "*,244")]
4626 (define_insn "*arm_extendqisi2addsi"
4627 [(set (match_operand:SI 0 "s_register_operand" "=r")
4628 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4629 (match_operand:SI 2 "s_register_operand" "r")))]
4631 "sxtab%?\\t%0, %2, %1"
4632 [(set_attr "type" "alu_shift")
4633 (set_attr "insn" "xtab")
4634 (set_attr "predicable" "yes")]
4638 [(set (match_operand:SI 0 "register_operand" "")
4639 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4640 "TARGET_THUMB1 && reload_completed"
4641 [(set (match_dup 0) (match_dup 2))
4642 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4644 rtx addr = XEXP (operands[1], 0);
4646 if (GET_CODE (addr) == CONST)
4647 addr = XEXP (addr, 0);
4649 if (GET_CODE (addr) == PLUS
4650 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4651 /* No split necessary. */
4654 if (GET_CODE (addr) == PLUS
4655 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4658 if (reg_overlap_mentioned_p (operands[0], addr))
4660 rtx t = gen_lowpart (QImode, operands[0]);
4661 emit_move_insn (t, operands[1]);
4662 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4668 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4669 operands[2] = const0_rtx;
4671 else if (GET_CODE (addr) != PLUS)
4673 else if (REG_P (XEXP (addr, 0)))
4675 operands[2] = XEXP (addr, 1);
4676 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4680 operands[2] = XEXP (addr, 0);
4681 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4684 operands[3] = change_address (operands[1], QImode, addr);
4688 [(set (match_operand:SI 0 "register_operand" "")
4689 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4690 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4691 (set (match_operand:SI 3 "register_operand" "")
4692 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4694 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4695 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4696 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4697 && (peep2_reg_dead_p (3, operands[0])
4698 || rtx_equal_p (operands[0], operands[3]))
4699 && (peep2_reg_dead_p (3, operands[2])
4700 || rtx_equal_p (operands[2], operands[3]))"
4701 [(set (match_dup 2) (match_dup 1))
4702 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4704 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4705 operands[4] = change_address (operands[4], QImode, addr);
4708 (define_insn "thumb1_extendqisi2"
4709 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4710 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4715 if (which_alternative == 0 && arm_arch6)
4716 return "sxtb\\t%0, %1";
4717 if (which_alternative == 0)
4720 addr = XEXP (operands[1], 0);
4721 if (GET_CODE (addr) == PLUS
4722 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4723 return "ldrsb\\t%0, %1";
4727 [(set_attr_alternative "length"
4728 [(if_then_else (eq_attr "is_arch6" "yes")
4729 (const_int 2) (const_int 4))
4731 (if_then_else (eq_attr "is_arch6" "yes")
4732 (const_int 4) (const_int 6))])
4733 (set_attr "type" "alu_shift,load_byte,load_byte")]
4736 (define_expand "extendsfdf2"
4737 [(set (match_operand:DF 0 "s_register_operand" "")
4738 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4739 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4743 /* HFmode -> DFmode conversions have to go through SFmode. */
4744 (define_expand "extendhfdf2"
4745 [(set (match_operand:DF 0 "general_operand" "")
4746 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4751 op1 = convert_to_mode (SFmode, operands[1], 0);
4752 op1 = convert_to_mode (DFmode, op1, 0);
4753 emit_insn (gen_movdf (operands[0], op1));
4758 ;; Move insns (including loads and stores)
4760 ;; XXX Just some ideas about movti.
4761 ;; I don't think these are a good idea on the arm, there just aren't enough
4763 ;;(define_expand "loadti"
4764 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4765 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4768 ;;(define_expand "storeti"
4769 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4770 ;; (match_operand:TI 1 "s_register_operand" ""))]
4773 ;;(define_expand "movti"
4774 ;; [(set (match_operand:TI 0 "general_operand" "")
4775 ;; (match_operand:TI 1 "general_operand" ""))]
4781 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4782 ;; operands[1] = copy_to_reg (operands[1]);
4783 ;; if (GET_CODE (operands[0]) == MEM)
4784 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4785 ;; else if (GET_CODE (operands[1]) == MEM)
4786 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4790 ;; emit_insn (insn);
4794 ;; Recognize garbage generated above.
4797 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4798 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4802 ;; register mem = (which_alternative < 3);
4803 ;; register const char *template;
4805 ;; operands[mem] = XEXP (operands[mem], 0);
4806 ;; switch (which_alternative)
4808 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4809 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4810 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4811 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4812 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4813 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4815 ;; output_asm_insn (template, operands);
4819 (define_expand "movdi"
4820 [(set (match_operand:DI 0 "general_operand" "")
4821 (match_operand:DI 1 "general_operand" ""))]
4824 if (can_create_pseudo_p ())
4826 if (GET_CODE (operands[0]) != REG)
4827 operands[1] = force_reg (DImode, operands[1]);
4832 (define_insn "*arm_movdi"
4833 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4834 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4836 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4838 && ( register_operand (operands[0], DImode)
4839 || register_operand (operands[1], DImode))"
4841 switch (which_alternative)
4848 return output_move_double (operands);
4851 [(set_attr "length" "8,12,16,8,8")
4852 (set_attr "type" "*,*,*,load2,store2")
4853 (set_attr "arm_pool_range" "*,*,*,1020,*")
4854 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4855 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4856 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4860 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4861 (match_operand:ANY64 1 "const_double_operand" ""))]
4864 && (arm_const_double_inline_cost (operands[1])
4865 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4868 arm_split_constant (SET, SImode, curr_insn,
4869 INTVAL (gen_lowpart (SImode, operands[1])),
4870 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4871 arm_split_constant (SET, SImode, curr_insn,
4872 INTVAL (gen_highpart_mode (SImode,
4873 GET_MODE (operands[0]),
4875 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4880 ; If optimizing for size, or if we have load delay slots, then
4881 ; we want to split the constant into two separate operations.
4882 ; In both cases this may split a trivial part into a single data op
4883 ; leaving a single complex constant to load. We can also get longer
4884 ; offsets in a LDR which means we get better chances of sharing the pool
4885 ; entries. Finally, we can normally do a better job of scheduling
4886 ; LDR instructions than we can with LDM.
4887 ; This pattern will only match if the one above did not.
4889 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4890 (match_operand:ANY64 1 "const_double_operand" ""))]
4891 "TARGET_ARM && reload_completed
4892 && arm_const_double_by_parts (operands[1])"
4893 [(set (match_dup 0) (match_dup 1))
4894 (set (match_dup 2) (match_dup 3))]
4896 operands[2] = gen_highpart (SImode, operands[0]);
4897 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4899 operands[0] = gen_lowpart (SImode, operands[0]);
4900 operands[1] = gen_lowpart (SImode, operands[1]);
4905 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4906 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4907 "TARGET_EITHER && reload_completed"
4908 [(set (match_dup 0) (match_dup 1))
4909 (set (match_dup 2) (match_dup 3))]
4911 operands[2] = gen_highpart (SImode, operands[0]);
4912 operands[3] = gen_highpart (SImode, operands[1]);
4913 operands[0] = gen_lowpart (SImode, operands[0]);
4914 operands[1] = gen_lowpart (SImode, operands[1]);
4916 /* Handle a partial overlap. */
4917 if (rtx_equal_p (operands[0], operands[3]))
4919 rtx tmp0 = operands[0];
4920 rtx tmp1 = operands[1];
4922 operands[0] = operands[2];
4923 operands[1] = operands[3];
4930 ;; We can't actually do base+index doubleword loads if the index and
4931 ;; destination overlap. Split here so that we at least have chance to
4934 [(set (match_operand:DI 0 "s_register_operand" "")
4935 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4936 (match_operand:SI 2 "s_register_operand" ""))))]
4938 && reg_overlap_mentioned_p (operands[0], operands[1])
4939 && reg_overlap_mentioned_p (operands[0], operands[2])"
4941 (plus:SI (match_dup 1)
4944 (mem:DI (match_dup 4)))]
4946 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4950 ;;; ??? This should have alternatives for constants.
4951 ;;; ??? This was originally identical to the movdf_insn pattern.
4952 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4953 ;;; thumb_reorg with a memory reference.
4954 (define_insn "*thumb1_movdi_insn"
4955 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4956 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4958 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4959 && ( register_operand (operands[0], DImode)
4960 || register_operand (operands[1], DImode))"
4963 switch (which_alternative)
4967 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4968 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4969 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4971 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4973 operands[1] = GEN_INT (- INTVAL (operands[1]));
4974 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4976 return \"ldmia\\t%1, {%0, %H0}\";
4978 return \"stmia\\t%0, {%1, %H1}\";
4980 return thumb_load_double_from_address (operands);
4982 operands[2] = gen_rtx_MEM (SImode,
4983 plus_constant (XEXP (operands[0], 0), 4));
4984 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4987 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4988 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4989 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4992 [(set_attr "length" "4,4,6,2,2,6,4,4")
4993 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4994 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4997 (define_expand "movsi"
4998 [(set (match_operand:SI 0 "general_operand" "")
4999 (match_operand:SI 1 "general_operand" ""))]
5003 rtx base, offset, tmp;
5007 /* Everything except mem = const or mem = mem can be done easily. */
5008 if (GET_CODE (operands[0]) == MEM)
5009 operands[1] = force_reg (SImode, operands[1]);
5010 if (arm_general_register_operand (operands[0], SImode)
5011 && GET_CODE (operands[1]) == CONST_INT
5012 && !(const_ok_for_arm (INTVAL (operands[1]))
5013 || const_ok_for_arm (~INTVAL (operands[1]))))
5015 arm_split_constant (SET, SImode, NULL_RTX,
5016 INTVAL (operands[1]), operands[0], NULL_RTX,
5017 optimize && can_create_pseudo_p ());
5021 if (TARGET_USE_MOVT && !target_word_relocations
5022 && GET_CODE (operands[1]) == SYMBOL_REF
5023 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5025 arm_emit_movpair (operands[0], operands[1]);
5029 else /* TARGET_THUMB1... */
5031 if (can_create_pseudo_p ())
5033 if (GET_CODE (operands[0]) != REG)
5034 operands[1] = force_reg (SImode, operands[1]);
5038 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5040 split_const (operands[1], &base, &offset);
5041 if (GET_CODE (base) == SYMBOL_REF
5042 && !offset_within_block_p (base, INTVAL (offset)))
5044 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5045 emit_move_insn (tmp, base);
5046 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5051 /* Recognize the case where operand[1] is a reference to thread-local
5052 data and load its address to a register. */
5053 if (arm_tls_referenced_p (operands[1]))
5055 rtx tmp = operands[1];
5058 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5060 addend = XEXP (XEXP (tmp, 0), 1);
5061 tmp = XEXP (XEXP (tmp, 0), 0);
5064 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5065 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5067 tmp = legitimize_tls_address (tmp,
5068 !can_create_pseudo_p () ? operands[0] : 0);
5071 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5072 tmp = force_operand (tmp, operands[0]);
5077 && (CONSTANT_P (operands[1])
5078 || symbol_mentioned_p (operands[1])
5079 || label_mentioned_p (operands[1])))
5080 operands[1] = legitimize_pic_address (operands[1], SImode,
5081 (!can_create_pseudo_p ()
5088 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5089 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5090 ;; so this does not matter.
5091 (define_insn "*arm_movt"
5092 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5093 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5094 (match_operand:SI 2 "general_operand" "i")))]
5096 "movt%?\t%0, #:upper16:%c2"
5097 [(set_attr "predicable" "yes")
5098 (set_attr "length" "4")]
5101 (define_insn "*arm_movsi_insn"
5102 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5103 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5104 "TARGET_ARM && ! TARGET_IWMMXT
5105 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5106 && ( register_operand (operands[0], SImode)
5107 || register_operand (operands[1], SImode))"
5115 [(set_attr "type" "*,*,*,*,load1,store1")
5116 (set_attr "predicable" "yes")
5117 (set_attr "pool_range" "*,*,*,*,4096,*")
5118 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5122 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5123 (match_operand:SI 1 "const_int_operand" ""))]
5125 && (!(const_ok_for_arm (INTVAL (operands[1]))
5126 || const_ok_for_arm (~INTVAL (operands[1]))))"
5127 [(clobber (const_int 0))]
5129 arm_split_constant (SET, SImode, NULL_RTX,
5130 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5135 (define_insn "*thumb1_movsi_insn"
5136 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5137 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5139 && ( register_operand (operands[0], SImode)
5140 || register_operand (operands[1], SImode))"
5151 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5152 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5153 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5154 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5157 [(set (match_operand:SI 0 "register_operand" "")
5158 (match_operand:SI 1 "const_int_operand" ""))]
5159 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5160 [(set (match_dup 2) (match_dup 1))
5161 (set (match_dup 0) (neg:SI (match_dup 2)))]
5164 operands[1] = GEN_INT (- INTVAL (operands[1]));
5165 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5170 [(set (match_operand:SI 0 "register_operand" "")
5171 (match_operand:SI 1 "const_int_operand" ""))]
5172 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5173 [(set (match_dup 2) (match_dup 1))
5174 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5177 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5178 unsigned HOST_WIDE_INT mask = 0xff;
5181 for (i = 0; i < 25; i++)
5182 if ((val & (mask << i)) == val)
5185 /* Don't split if the shift is zero. */
5189 operands[1] = GEN_INT (val >> i);
5190 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5191 operands[3] = GEN_INT (i);
5195 ;; When generating pic, we need to load the symbol offset into a register.
5196 ;; So that the optimizer does not confuse this with a normal symbol load
5197 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5198 ;; since that is the only type of relocation we can use.
5200 ;; Wrap calculation of the whole PIC address in a single pattern for the
5201 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5202 ;; a PIC address involves two loads from memory, so we want to CSE it
5203 ;; as often as possible.
5204 ;; This pattern will be split into one of the pic_load_addr_* patterns
5205 ;; and a move after GCSE optimizations.
5207 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5208 (define_expand "calculate_pic_address"
5209 [(set (match_operand:SI 0 "register_operand" "")
5210 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5211 (unspec:SI [(match_operand:SI 2 "" "")]
5216 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5218 [(set (match_operand:SI 0 "register_operand" "")
5219 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5220 (unspec:SI [(match_operand:SI 2 "" "")]
5223 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5224 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5225 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5228 ;; The rather odd constraints on the following are to force reload to leave
5229 ;; the insn alone, and to force the minipool generation pass to then move
5230 ;; the GOT symbol to memory.
5232 (define_insn "pic_load_addr_32bit"
5233 [(set (match_operand:SI 0 "s_register_operand" "=r")
5234 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5235 "TARGET_32BIT && flag_pic"
5237 [(set_attr "type" "load1")
5238 (set_attr "pool_range" "4096")
5239 (set (attr "neg_pool_range")
5240 (if_then_else (eq_attr "is_thumb" "no")
5245 (define_insn "pic_load_addr_thumb1"
5246 [(set (match_operand:SI 0 "s_register_operand" "=l")
5247 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5248 "TARGET_THUMB1 && flag_pic"
5250 [(set_attr "type" "load1")
5251 (set (attr "pool_range") (const_int 1024))]
5254 (define_insn "pic_add_dot_plus_four"
5255 [(set (match_operand:SI 0 "register_operand" "=r")
5256 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5258 (match_operand 2 "" "")]
5262 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5263 INTVAL (operands[2]));
5264 return \"add\\t%0, %|pc\";
5266 [(set_attr "length" "2")]
5269 (define_insn "pic_add_dot_plus_eight"
5270 [(set (match_operand:SI 0 "register_operand" "=r")
5271 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5273 (match_operand 2 "" "")]
5277 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5278 INTVAL (operands[2]));
5279 return \"add%?\\t%0, %|pc, %1\";
5281 [(set_attr "predicable" "yes")]
5284 (define_insn "tls_load_dot_plus_eight"
5285 [(set (match_operand:SI 0 "register_operand" "=r")
5286 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5288 (match_operand 2 "" "")]
5292 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5293 INTVAL (operands[2]));
5294 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5296 [(set_attr "predicable" "yes")]
5299 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5300 ;; followed by a load. These sequences can be crunched down to
5301 ;; tls_load_dot_plus_eight by a peephole.
5304 [(set (match_operand:SI 0 "register_operand" "")
5305 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5307 (match_operand 1 "" "")]
5309 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5310 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5312 (mem:SI (unspec:SI [(match_dup 3)
5319 (define_insn "pic_offset_arm"
5320 [(set (match_operand:SI 0 "register_operand" "=r")
5321 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5322 (unspec:SI [(match_operand:SI 2 "" "X")]
5323 UNSPEC_PIC_OFFSET))))]
5324 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5325 "ldr%?\\t%0, [%1,%2]"
5326 [(set_attr "type" "load1")]
5329 (define_expand "builtin_setjmp_receiver"
5330 [(label_ref (match_operand 0 "" ""))]
5334 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5336 if (arm_pic_register != INVALID_REGNUM)
5337 arm_load_pic_register (1UL << 3);
5341 ;; If copying one reg to another we can set the condition codes according to
5342 ;; its value. Such a move is common after a return from subroutine and the
5343 ;; result is being tested against zero.
5345 (define_insn "*movsi_compare0"
5346 [(set (reg:CC CC_REGNUM)
5347 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5349 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5355 [(set_attr "conds" "set")]
5358 ;; Subroutine to store a half word from a register into memory.
5359 ;; Operand 0 is the source register (HImode)
5360 ;; Operand 1 is the destination address in a register (SImode)
5362 ;; In both this routine and the next, we must be careful not to spill
5363 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5364 ;; can generate unrecognizable rtl.
5366 (define_expand "storehi"
5367 [;; store the low byte
5368 (set (match_operand 1 "" "") (match_dup 3))
5369 ;; extract the high byte
5371 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5372 ;; store the high byte
5373 (set (match_dup 4) (match_dup 5))]
5377 rtx op1 = operands[1];
5378 rtx addr = XEXP (op1, 0);
5379 enum rtx_code code = GET_CODE (addr);
5381 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5383 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5385 operands[4] = adjust_address (op1, QImode, 1);
5386 operands[1] = adjust_address (operands[1], QImode, 0);
5387 operands[3] = gen_lowpart (QImode, operands[0]);
5388 operands[0] = gen_lowpart (SImode, operands[0]);
5389 operands[2] = gen_reg_rtx (SImode);
5390 operands[5] = gen_lowpart (QImode, operands[2]);
5394 (define_expand "storehi_bigend"
5395 [(set (match_dup 4) (match_dup 3))
5397 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5398 (set (match_operand 1 "" "") (match_dup 5))]
5402 rtx op1 = operands[1];
5403 rtx addr = XEXP (op1, 0);
5404 enum rtx_code code = GET_CODE (addr);
5406 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5408 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5410 operands[4] = adjust_address (op1, QImode, 1);
5411 operands[1] = adjust_address (operands[1], QImode, 0);
5412 operands[3] = gen_lowpart (QImode, operands[0]);
5413 operands[0] = gen_lowpart (SImode, operands[0]);
5414 operands[2] = gen_reg_rtx (SImode);
5415 operands[5] = gen_lowpart (QImode, operands[2]);
5419 ;; Subroutine to store a half word integer constant into memory.
5420 (define_expand "storeinthi"
5421 [(set (match_operand 0 "" "")
5422 (match_operand 1 "" ""))
5423 (set (match_dup 3) (match_dup 2))]
5427 HOST_WIDE_INT value = INTVAL (operands[1]);
5428 rtx addr = XEXP (operands[0], 0);
5429 rtx op0 = operands[0];
5430 enum rtx_code code = GET_CODE (addr);
5432 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5434 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5436 operands[1] = gen_reg_rtx (SImode);
5437 if (BYTES_BIG_ENDIAN)
5439 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5440 if ((value & 255) == ((value >> 8) & 255))
5441 operands[2] = operands[1];
5444 operands[2] = gen_reg_rtx (SImode);
5445 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5450 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5451 if ((value & 255) == ((value >> 8) & 255))
5452 operands[2] = operands[1];
5455 operands[2] = gen_reg_rtx (SImode);
5456 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5460 operands[3] = adjust_address (op0, QImode, 1);
5461 operands[0] = adjust_address (operands[0], QImode, 0);
5462 operands[2] = gen_lowpart (QImode, operands[2]);
5463 operands[1] = gen_lowpart (QImode, operands[1]);
5467 (define_expand "storehi_single_op"
5468 [(set (match_operand:HI 0 "memory_operand" "")
5469 (match_operand:HI 1 "general_operand" ""))]
5470 "TARGET_32BIT && arm_arch4"
5472 if (!s_register_operand (operands[1], HImode))
5473 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5477 (define_expand "movhi"
5478 [(set (match_operand:HI 0 "general_operand" "")
5479 (match_operand:HI 1 "general_operand" ""))]
5484 if (can_create_pseudo_p ())
5486 if (GET_CODE (operands[0]) == MEM)
5490 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5493 if (GET_CODE (operands[1]) == CONST_INT)
5494 emit_insn (gen_storeinthi (operands[0], operands[1]));
5497 if (GET_CODE (operands[1]) == MEM)
5498 operands[1] = force_reg (HImode, operands[1]);
5499 if (BYTES_BIG_ENDIAN)
5500 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5502 emit_insn (gen_storehi (operands[1], operands[0]));
5506 /* Sign extend a constant, and keep it in an SImode reg. */
5507 else if (GET_CODE (operands[1]) == CONST_INT)
5509 rtx reg = gen_reg_rtx (SImode);
5510 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5512 /* If the constant is already valid, leave it alone. */
5513 if (!const_ok_for_arm (val))
5515 /* If setting all the top bits will make the constant
5516 loadable in a single instruction, then set them.
5517 Otherwise, sign extend the number. */
5519 if (const_ok_for_arm (~(val | ~0xffff)))
5521 else if (val & 0x8000)
5525 emit_insn (gen_movsi (reg, GEN_INT (val)));
5526 operands[1] = gen_lowpart (HImode, reg);
5528 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5529 && GET_CODE (operands[1]) == MEM)
5531 rtx reg = gen_reg_rtx (SImode);
5533 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5534 operands[1] = gen_lowpart (HImode, reg);
5536 else if (!arm_arch4)
5538 if (GET_CODE (operands[1]) == MEM)
5541 rtx offset = const0_rtx;
5542 rtx reg = gen_reg_rtx (SImode);
5544 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5545 || (GET_CODE (base) == PLUS
5546 && (GET_CODE (offset = XEXP (base, 1))
5548 && ((INTVAL(offset) & 1) != 1)
5549 && GET_CODE (base = XEXP (base, 0)) == REG))
5550 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5554 new_rtx = widen_memory_access (operands[1], SImode,
5555 ((INTVAL (offset) & ~3)
5556 - INTVAL (offset)));
5557 emit_insn (gen_movsi (reg, new_rtx));
5558 if (((INTVAL (offset) & 2) != 0)
5559 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5561 rtx reg2 = gen_reg_rtx (SImode);
5563 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5568 emit_insn (gen_movhi_bytes (reg, operands[1]));
5570 operands[1] = gen_lowpart (HImode, reg);
5574 /* Handle loading a large integer during reload. */
5575 else if (GET_CODE (operands[1]) == CONST_INT
5576 && !const_ok_for_arm (INTVAL (operands[1]))
5577 && !const_ok_for_arm (~INTVAL (operands[1])))
5579 /* Writing a constant to memory needs a scratch, which should
5580 be handled with SECONDARY_RELOADs. */
5581 gcc_assert (GET_CODE (operands[0]) == REG);
5583 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5584 emit_insn (gen_movsi (operands[0], operands[1]));
5588 else if (TARGET_THUMB2)
5590 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5591 if (can_create_pseudo_p ())
5593 if (GET_CODE (operands[0]) != REG)
5594 operands[1] = force_reg (HImode, operands[1]);
5595 /* Zero extend a constant, and keep it in an SImode reg. */
5596 else if (GET_CODE (operands[1]) == CONST_INT)
5598 rtx reg = gen_reg_rtx (SImode);
5599 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5601 emit_insn (gen_movsi (reg, GEN_INT (val)));
5602 operands[1] = gen_lowpart (HImode, reg);
5606 else /* TARGET_THUMB1 */
5608 if (can_create_pseudo_p ())
5610 if (GET_CODE (operands[1]) == CONST_INT)
5612 rtx reg = gen_reg_rtx (SImode);
5614 emit_insn (gen_movsi (reg, operands[1]));
5615 operands[1] = gen_lowpart (HImode, reg);
5618 /* ??? We shouldn't really get invalid addresses here, but this can
5619 happen if we are passed a SP (never OK for HImode/QImode) or
5620 virtual register (also rejected as illegitimate for HImode/QImode)
5621 relative address. */
5622 /* ??? This should perhaps be fixed elsewhere, for instance, in
5623 fixup_stack_1, by checking for other kinds of invalid addresses,
5624 e.g. a bare reference to a virtual register. This may confuse the
5625 alpha though, which must handle this case differently. */
5626 if (GET_CODE (operands[0]) == MEM
5627 && !memory_address_p (GET_MODE (operands[0]),
5628 XEXP (operands[0], 0)))
5630 = replace_equiv_address (operands[0],
5631 copy_to_reg (XEXP (operands[0], 0)));
5633 if (GET_CODE (operands[1]) == MEM
5634 && !memory_address_p (GET_MODE (operands[1]),
5635 XEXP (operands[1], 0)))
5637 = replace_equiv_address (operands[1],
5638 copy_to_reg (XEXP (operands[1], 0)));
5640 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5642 rtx reg = gen_reg_rtx (SImode);
5644 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5645 operands[1] = gen_lowpart (HImode, reg);
5648 if (GET_CODE (operands[0]) == MEM)
5649 operands[1] = force_reg (HImode, operands[1]);
5651 else if (GET_CODE (operands[1]) == CONST_INT
5652 && !satisfies_constraint_I (operands[1]))
5654 /* Handle loading a large integer during reload. */
5656 /* Writing a constant to memory needs a scratch, which should
5657 be handled with SECONDARY_RELOADs. */
5658 gcc_assert (GET_CODE (operands[0]) == REG);
5660 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5661 emit_insn (gen_movsi (operands[0], operands[1]));
5668 (define_insn "*thumb1_movhi_insn"
5669 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5670 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5672 && ( register_operand (operands[0], HImode)
5673 || register_operand (operands[1], HImode))"
5675 switch (which_alternative)
5677 case 0: return \"add %0, %1, #0\";
5678 case 2: return \"strh %1, %0\";
5679 case 3: return \"mov %0, %1\";
5680 case 4: return \"mov %0, %1\";
5681 case 5: return \"mov %0, %1\";
5682 default: gcc_unreachable ();
5684 /* The stack pointer can end up being taken as an index register.
5685 Catch this case here and deal with it. */
5686 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5687 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5688 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5691 ops[0] = operands[0];
5692 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5694 output_asm_insn (\"mov %0, %1\", ops);
5696 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5699 return \"ldrh %0, %1\";
5701 [(set_attr "length" "2,4,2,2,2,2")
5702 (set_attr "type" "*,load1,store1,*,*,*")
5703 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5706 (define_expand "movhi_bytes"
5707 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5709 (zero_extend:SI (match_dup 6)))
5710 (set (match_operand:SI 0 "" "")
5711 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5716 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5718 mem1 = change_address (operands[1], QImode, addr);
5719 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5720 operands[0] = gen_lowpart (SImode, operands[0]);
5722 operands[2] = gen_reg_rtx (SImode);
5723 operands[3] = gen_reg_rtx (SImode);
5726 if (BYTES_BIG_ENDIAN)
5728 operands[4] = operands[2];
5729 operands[5] = operands[3];
5733 operands[4] = operands[3];
5734 operands[5] = operands[2];
5739 (define_expand "movhi_bigend"
5741 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5744 (ashiftrt:SI (match_dup 2) (const_int 16)))
5745 (set (match_operand:HI 0 "s_register_operand" "")
5749 operands[2] = gen_reg_rtx (SImode);
5750 operands[3] = gen_reg_rtx (SImode);
5751 operands[4] = gen_lowpart (HImode, operands[3]);
5755 ;; Pattern to recognize insn generated default case above
5756 (define_insn "*movhi_insn_arch4"
5757 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5758 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5761 && (GET_CODE (operands[1]) != CONST_INT
5762 || const_ok_for_arm (INTVAL (operands[1]))
5763 || const_ok_for_arm (~INTVAL (operands[1])))"
5765 mov%?\\t%0, %1\\t%@ movhi
5766 mvn%?\\t%0, #%B1\\t%@ movhi
5767 str%(h%)\\t%1, %0\\t%@ movhi
5768 ldr%(h%)\\t%0, %1\\t%@ movhi"
5769 [(set_attr "type" "*,*,store1,load1")
5770 (set_attr "predicable" "yes")
5771 (set_attr "pool_range" "*,*,*,256")
5772 (set_attr "neg_pool_range" "*,*,*,244")]
5775 (define_insn "*movhi_bytes"
5776 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5777 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5780 mov%?\\t%0, %1\\t%@ movhi
5781 mvn%?\\t%0, #%B1\\t%@ movhi"
5782 [(set_attr "predicable" "yes")]
5785 (define_expand "thumb_movhi_clobber"
5786 [(set (match_operand:HI 0 "memory_operand" "")
5787 (match_operand:HI 1 "register_operand" ""))
5788 (clobber (match_operand:DI 2 "register_operand" ""))]
5791 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5792 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5794 emit_insn (gen_movhi (operands[0], operands[1]));
5797 /* XXX Fixme, need to handle other cases here as well. */
5802 ;; We use a DImode scratch because we may occasionally need an additional
5803 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5804 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5805 (define_expand "reload_outhi"
5806 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5807 (match_operand:HI 1 "s_register_operand" "r")
5808 (match_operand:DI 2 "s_register_operand" "=&l")])]
5811 arm_reload_out_hi (operands);
5813 thumb_reload_out_hi (operands);
5818 (define_expand "reload_inhi"
5819 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5820 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5821 (match_operand:DI 2 "s_register_operand" "=&r")])]
5825 arm_reload_in_hi (operands);
5827 thumb_reload_out_hi (operands);
5831 (define_expand "movqi"
5832 [(set (match_operand:QI 0 "general_operand" "")
5833 (match_operand:QI 1 "general_operand" ""))]
5836 /* Everything except mem = const or mem = mem can be done easily */
5838 if (can_create_pseudo_p ())
5840 if (GET_CODE (operands[1]) == CONST_INT)
5842 rtx reg = gen_reg_rtx (SImode);
5844 /* For thumb we want an unsigned immediate, then we are more likely
5845 to be able to use a movs insn. */
5847 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5849 emit_insn (gen_movsi (reg, operands[1]));
5850 operands[1] = gen_lowpart (QImode, reg);
5855 /* ??? We shouldn't really get invalid addresses here, but this can
5856 happen if we are passed a SP (never OK for HImode/QImode) or
5857 virtual register (also rejected as illegitimate for HImode/QImode)
5858 relative address. */
5859 /* ??? This should perhaps be fixed elsewhere, for instance, in
5860 fixup_stack_1, by checking for other kinds of invalid addresses,
5861 e.g. a bare reference to a virtual register. This may confuse the
5862 alpha though, which must handle this case differently. */
5863 if (GET_CODE (operands[0]) == MEM
5864 && !memory_address_p (GET_MODE (operands[0]),
5865 XEXP (operands[0], 0)))
5867 = replace_equiv_address (operands[0],
5868 copy_to_reg (XEXP (operands[0], 0)));
5869 if (GET_CODE (operands[1]) == MEM
5870 && !memory_address_p (GET_MODE (operands[1]),
5871 XEXP (operands[1], 0)))
5873 = replace_equiv_address (operands[1],
5874 copy_to_reg (XEXP (operands[1], 0)));
5877 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5879 rtx reg = gen_reg_rtx (SImode);
5881 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5882 operands[1] = gen_lowpart (QImode, reg);
5885 if (GET_CODE (operands[0]) == MEM)
5886 operands[1] = force_reg (QImode, operands[1]);
5888 else if (TARGET_THUMB
5889 && GET_CODE (operands[1]) == CONST_INT
5890 && !satisfies_constraint_I (operands[1]))
5892 /* Handle loading a large integer during reload. */
5894 /* Writing a constant to memory needs a scratch, which should
5895 be handled with SECONDARY_RELOADs. */
5896 gcc_assert (GET_CODE (operands[0]) == REG);
5898 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5899 emit_insn (gen_movsi (operands[0], operands[1]));
5906 (define_insn "*arm_movqi_insn"
5907 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5908 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5910 && ( register_operand (operands[0], QImode)
5911 || register_operand (operands[1], QImode))"
5917 [(set_attr "type" "*,*,load1,store1")
5918 (set_attr "predicable" "yes")]
5921 (define_insn "*thumb1_movqi_insn"
5922 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5923 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5925 && ( register_operand (operands[0], QImode)
5926 || register_operand (operands[1], QImode))"
5934 [(set_attr "length" "2")
5935 (set_attr "type" "*,load1,store1,*,*,*")
5936 (set_attr "pool_range" "*,32,*,*,*,*")
5937 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5940 (define_expand "movhf"
5941 [(set (match_operand:HF 0 "general_operand" "")
5942 (match_operand:HF 1 "general_operand" ""))]
5947 if (GET_CODE (operands[0]) == MEM)
5948 operands[1] = force_reg (HFmode, operands[1]);
5950 else /* TARGET_THUMB1 */
5952 if (can_create_pseudo_p ())
5954 if (GET_CODE (operands[0]) != REG)
5955 operands[1] = force_reg (HFmode, operands[1]);
5961 (define_insn "*arm32_movhf"
5962 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5963 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5964 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5965 && ( s_register_operand (operands[0], HFmode)
5966 || s_register_operand (operands[1], HFmode))"
5968 switch (which_alternative)
5970 case 0: /* ARM register from memory */
5971 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5972 case 1: /* memory from ARM register */
5973 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5974 case 2: /* ARM register from ARM register */
5975 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5976 case 3: /* ARM register from constant */
5982 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5983 bits = real_to_target (NULL, &r, HFmode);
5984 ops[0] = operands[0];
5985 ops[1] = GEN_INT (bits);
5986 ops[2] = GEN_INT (bits & 0xff00);
5987 ops[3] = GEN_INT (bits & 0x00ff);
5989 if (arm_arch_thumb2)
5990 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5992 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5999 [(set_attr "conds" "unconditional")
6000 (set_attr "type" "load1,store1,*,*")
6001 (set_attr "length" "4,4,4,8")
6002 (set_attr "predicable" "yes")
6006 (define_insn "*thumb1_movhf"
6007 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6008 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6010 && ( s_register_operand (operands[0], HFmode)
6011 || s_register_operand (operands[1], HFmode))"
6013 switch (which_alternative)
6018 gcc_assert (GET_CODE(operands[1]) == MEM);
6019 addr = XEXP (operands[1], 0);
6020 if (GET_CODE (addr) == LABEL_REF
6021 || (GET_CODE (addr) == CONST
6022 && GET_CODE (XEXP (addr, 0)) == PLUS
6023 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6024 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6026 /* Constant pool entry. */
6027 return \"ldr\\t%0, %1\";
6029 return \"ldrh\\t%0, %1\";
6031 case 2: return \"strh\\t%1, %0\";
6032 default: return \"mov\\t%0, %1\";
6035 [(set_attr "length" "2")
6036 (set_attr "type" "*,load1,store1,*,*")
6037 (set_attr "pool_range" "*,1020,*,*,*")
6038 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6040 (define_expand "movsf"
6041 [(set (match_operand:SF 0 "general_operand" "")
6042 (match_operand:SF 1 "general_operand" ""))]
6047 if (GET_CODE (operands[0]) == MEM)
6048 operands[1] = force_reg (SFmode, operands[1]);
6050 else /* TARGET_THUMB1 */
6052 if (can_create_pseudo_p ())
6054 if (GET_CODE (operands[0]) != REG)
6055 operands[1] = force_reg (SFmode, operands[1]);
6061 ;; Transform a floating-point move of a constant into a core register into
6062 ;; an SImode operation.
6064 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6065 (match_operand:SF 1 "immediate_operand" ""))]
6068 && GET_CODE (operands[1]) == CONST_DOUBLE"
6069 [(set (match_dup 2) (match_dup 3))]
6071 operands[2] = gen_lowpart (SImode, operands[0]);
6072 operands[3] = gen_lowpart (SImode, operands[1]);
6073 if (operands[2] == 0 || operands[3] == 0)
6078 (define_insn "*arm_movsf_soft_insn"
6079 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6080 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6082 && TARGET_SOFT_FLOAT
6083 && (GET_CODE (operands[0]) != MEM
6084 || register_operand (operands[1], SFmode))"
6087 ldr%?\\t%0, %1\\t%@ float
6088 str%?\\t%1, %0\\t%@ float"
6089 [(set_attr "predicable" "yes")
6090 (set_attr "type" "*,load1,store1")
6091 (set_attr "pool_range" "*,4096,*")
6092 (set_attr "arm_neg_pool_range" "*,4084,*")
6093 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6096 ;;; ??? This should have alternatives for constants.
6097 (define_insn "*thumb1_movsf_insn"
6098 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6099 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6101 && ( register_operand (operands[0], SFmode)
6102 || register_operand (operands[1], SFmode))"
6111 [(set_attr "length" "2")
6112 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6113 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6114 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6117 (define_expand "movdf"
6118 [(set (match_operand:DF 0 "general_operand" "")
6119 (match_operand:DF 1 "general_operand" ""))]
6124 if (GET_CODE (operands[0]) == MEM)
6125 operands[1] = force_reg (DFmode, operands[1]);
6127 else /* TARGET_THUMB */
6129 if (can_create_pseudo_p ())
6131 if (GET_CODE (operands[0]) != REG)
6132 operands[1] = force_reg (DFmode, operands[1]);
6138 ;; Reloading a df mode value stored in integer regs to memory can require a
6140 (define_expand "reload_outdf"
6141 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6142 (match_operand:DF 1 "s_register_operand" "r")
6143 (match_operand:SI 2 "s_register_operand" "=&r")]
6147 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6150 operands[2] = XEXP (operands[0], 0);
6151 else if (code == POST_INC || code == PRE_DEC)
6153 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6154 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6155 emit_insn (gen_movdi (operands[0], operands[1]));
6158 else if (code == PRE_INC)
6160 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6162 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6165 else if (code == POST_DEC)
6166 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6168 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6169 XEXP (XEXP (operands[0], 0), 1)));
6171 emit_insn (gen_rtx_SET (VOIDmode,
6172 replace_equiv_address (operands[0], operands[2]),
6175 if (code == POST_DEC)
6176 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6182 (define_insn "*movdf_soft_insn"
6183 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6184 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6185 "TARGET_32BIT && TARGET_SOFT_FLOAT
6186 && ( register_operand (operands[0], DFmode)
6187 || register_operand (operands[1], DFmode))"
6189 switch (which_alternative)
6196 return output_move_double (operands);
6199 [(set_attr "length" "8,12,16,8,8")
6200 (set_attr "type" "*,*,*,load2,store2")
6201 (set_attr "pool_range" "*,*,*,1020,*")
6202 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6203 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6206 ;;; ??? This should have alternatives for constants.
6207 ;;; ??? This was originally identical to the movdi_insn pattern.
6208 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6209 ;;; thumb_reorg with a memory reference.
6210 (define_insn "*thumb_movdf_insn"
6211 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6212 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6214 && ( register_operand (operands[0], DFmode)
6215 || register_operand (operands[1], DFmode))"
6217 switch (which_alternative)
6221 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6222 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6223 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6225 return \"ldmia\\t%1, {%0, %H0}\";
6227 return \"stmia\\t%0, {%1, %H1}\";
6229 return thumb_load_double_from_address (operands);
6231 operands[2] = gen_rtx_MEM (SImode,
6232 plus_constant (XEXP (operands[0], 0), 4));
6233 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6236 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6237 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6238 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6241 [(set_attr "length" "4,2,2,6,4,4")
6242 (set_attr "type" "*,load2,store2,load2,store2,*")
6243 (set_attr "pool_range" "*,*,*,1020,*,*")]
6246 (define_expand "movxf"
6247 [(set (match_operand:XF 0 "general_operand" "")
6248 (match_operand:XF 1 "general_operand" ""))]
6249 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6251 if (GET_CODE (operands[0]) == MEM)
6252 operands[1] = force_reg (XFmode, operands[1]);
6258 ;; load- and store-multiple insns
6259 ;; The arm can load/store any set of registers, provided that they are in
6260 ;; ascending order, but these expanders assume a contiguous set.
6262 (define_expand "load_multiple"
6263 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6264 (match_operand:SI 1 "" ""))
6265 (use (match_operand:SI 2 "" ""))])]
6268 HOST_WIDE_INT offset = 0;
6270 /* Support only fixed point registers. */
6271 if (GET_CODE (operands[2]) != CONST_INT
6272 || INTVAL (operands[2]) > 14
6273 || INTVAL (operands[2]) < 2
6274 || GET_CODE (operands[1]) != MEM
6275 || GET_CODE (operands[0]) != REG
6276 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6277 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6281 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6282 INTVAL (operands[2]),
6283 force_reg (SImode, XEXP (operands[1], 0)),
6284 FALSE, operands[1], &offset);
6287 (define_expand "store_multiple"
6288 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6289 (match_operand:SI 1 "" ""))
6290 (use (match_operand:SI 2 "" ""))])]
6293 HOST_WIDE_INT offset = 0;
6295 /* Support only fixed point registers. */
6296 if (GET_CODE (operands[2]) != CONST_INT
6297 || INTVAL (operands[2]) > 14
6298 || INTVAL (operands[2]) < 2
6299 || GET_CODE (operands[1]) != REG
6300 || GET_CODE (operands[0]) != MEM
6301 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6302 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6306 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6307 INTVAL (operands[2]),
6308 force_reg (SImode, XEXP (operands[0], 0)),
6309 FALSE, operands[0], &offset);
6313 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6314 ;; We could let this apply for blocks of less than this, but it clobbers so
6315 ;; many registers that there is then probably a better way.
6317 (define_expand "movmemqi"
6318 [(match_operand:BLK 0 "general_operand" "")
6319 (match_operand:BLK 1 "general_operand" "")
6320 (match_operand:SI 2 "const_int_operand" "")
6321 (match_operand:SI 3 "const_int_operand" "")]
6326 if (arm_gen_movmemqi (operands))
6330 else /* TARGET_THUMB1 */
6332 if ( INTVAL (operands[3]) != 4
6333 || INTVAL (operands[2]) > 48)
6336 thumb_expand_movmemqi (operands);
6342 ;; Thumb block-move insns
6344 (define_insn "movmem12b"
6345 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6346 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6347 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6348 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6349 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6350 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6351 (set (match_operand:SI 0 "register_operand" "=l")
6352 (plus:SI (match_dup 2) (const_int 12)))
6353 (set (match_operand:SI 1 "register_operand" "=l")
6354 (plus:SI (match_dup 3) (const_int 12)))
6355 (clobber (match_scratch:SI 4 "=&l"))
6356 (clobber (match_scratch:SI 5 "=&l"))
6357 (clobber (match_scratch:SI 6 "=&l"))]
6359 "* return thumb_output_move_mem_multiple (3, operands);"
6360 [(set_attr "length" "4")
6361 ; This isn't entirely accurate... It loads as well, but in terms of
6362 ; scheduling the following insn it is better to consider it as a store
6363 (set_attr "type" "store3")]
6366 (define_insn "movmem8b"
6367 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6368 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6369 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6370 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6371 (set (match_operand:SI 0 "register_operand" "=l")
6372 (plus:SI (match_dup 2) (const_int 8)))
6373 (set (match_operand:SI 1 "register_operand" "=l")
6374 (plus:SI (match_dup 3) (const_int 8)))
6375 (clobber (match_scratch:SI 4 "=&l"))
6376 (clobber (match_scratch:SI 5 "=&l"))]
6378 "* return thumb_output_move_mem_multiple (2, operands);"
6379 [(set_attr "length" "4")
6380 ; This isn't entirely accurate... It loads as well, but in terms of
6381 ; scheduling the following insn it is better to consider it as a store
6382 (set_attr "type" "store2")]
6387 ;; Compare & branch insns
6388 ;; The range calculations are based as follows:
6389 ;; For forward branches, the address calculation returns the address of
6390 ;; the next instruction. This is 2 beyond the branch instruction.
6391 ;; For backward branches, the address calculation returns the address of
6392 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6393 ;; instruction for the shortest sequence, and 4 before the branch instruction
6394 ;; if we have to jump around an unconditional branch.
6395 ;; To the basic branch range the PC offset must be added (this is +4).
6396 ;; So for forward branches we have
6397 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6398 ;; And for backward branches we have
6399 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6401 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6402 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6404 (define_expand "cbranchsi4"
6405 [(set (pc) (if_then_else
6406 (match_operator 0 "arm_comparison_operator"
6407 [(match_operand:SI 1 "s_register_operand" "")
6408 (match_operand:SI 2 "nonmemory_operand" "")])
6409 (label_ref (match_operand 3 "" ""))
6411 "TARGET_THUMB1 || TARGET_32BIT"
6415 if (!arm_add_operand (operands[2], SImode))
6416 operands[2] = force_reg (SImode, operands[2]);
6417 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6421 if (thumb1_cmpneg_operand (operands[2], SImode))
6423 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6424 operands[3], operands[0]));
6427 if (!thumb1_cmp_operand (operands[2], SImode))
6428 operands[2] = force_reg (SImode, operands[2]);
6431 ;; A pattern to recognize a special situation and optimize for it.
6432 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6433 ;; due to the available addressing modes. Hence, convert a signed comparison
6434 ;; with zero into an unsigned comparison with 127 if possible.
6435 (define_expand "cbranchqi4"
6436 [(set (pc) (if_then_else
6437 (match_operator 0 "lt_ge_comparison_operator"
6438 [(match_operand:QI 1 "memory_operand" "")
6439 (match_operand:QI 2 "const0_operand" "")])
6440 (label_ref (match_operand 3 "" ""))
6445 xops[1] = gen_reg_rtx (SImode);
6446 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6447 xops[2] = GEN_INT (127);
6448 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6449 VOIDmode, xops[1], xops[2]);
6450 xops[3] = operands[3];
6451 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6455 (define_expand "cbranchsf4"
6456 [(set (pc) (if_then_else
6457 (match_operator 0 "arm_comparison_operator"
6458 [(match_operand:SF 1 "s_register_operand" "")
6459 (match_operand:SF 2 "arm_float_compare_operand" "")])
6460 (label_ref (match_operand 3 "" ""))
6462 "TARGET_32BIT && TARGET_HARD_FLOAT"
6463 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6464 operands[3])); DONE;"
6467 (define_expand "cbranchdf4"
6468 [(set (pc) (if_then_else
6469 (match_operator 0 "arm_comparison_operator"
6470 [(match_operand:DF 1 "s_register_operand" "")
6471 (match_operand:DF 2 "arm_float_compare_operand" "")])
6472 (label_ref (match_operand 3 "" ""))
6474 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6475 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6476 operands[3])); DONE;"
6479 (define_expand "cbranchdi4"
6480 [(set (pc) (if_then_else
6481 (match_operator 0 "arm_comparison_operator"
6482 [(match_operand:DI 1 "cmpdi_operand" "")
6483 (match_operand:DI 2 "cmpdi_operand" "")])
6484 (label_ref (match_operand 3 "" ""))
6488 rtx swap = NULL_RTX;
6489 enum rtx_code code = GET_CODE (operands[0]);
6491 /* We should not have two constants. */
6492 gcc_assert (GET_MODE (operands[1]) == DImode
6493 || GET_MODE (operands[2]) == DImode);
6495 /* Flip unimplemented DImode comparisons to a form that
6496 arm_gen_compare_reg can handle. */
6500 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6502 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6504 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6506 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6511 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6514 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6520 (define_insn "cbranchsi4_insn"
6521 [(set (pc) (if_then_else
6522 (match_operator 0 "arm_comparison_operator"
6523 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6524 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6525 (label_ref (match_operand 3 "" ""))
6529 rtx t = cfun->machine->thumb1_cc_insn;
6532 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6533 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6535 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6537 if (!noov_comparison_operator (operands[0], VOIDmode))
6540 else if (cfun->machine->thumb1_cc_mode != CCmode)
6545 output_asm_insn ("cmp\t%1, %2", operands);
6546 cfun->machine->thumb1_cc_insn = insn;
6547 cfun->machine->thumb1_cc_op0 = operands[1];
6548 cfun->machine->thumb1_cc_op1 = operands[2];
6549 cfun->machine->thumb1_cc_mode = CCmode;
6552 /* Ensure we emit the right type of condition code on the jump. */
6553 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6556 switch (get_attr_length (insn))
6558 case 4: return \"b%d0\\t%l3\";
6559 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6560 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6563 [(set (attr "far_jump")
6565 (eq_attr "length" "8")
6566 (const_string "yes")
6567 (const_string "no")))
6568 (set (attr "length")
6570 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6571 (le (minus (match_dup 3) (pc)) (const_int 256)))
6574 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6575 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6580 (define_insn "cbranchsi4_scratch"
6581 [(set (pc) (if_then_else
6582 (match_operator 4 "arm_comparison_operator"
6583 [(match_operand:SI 1 "s_register_operand" "l,0")
6584 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6585 (label_ref (match_operand 3 "" ""))
6587 (clobber (match_scratch:SI 0 "=l,l"))]
6590 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6592 switch (get_attr_length (insn))
6594 case 4: return \"b%d4\\t%l3\";
6595 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6596 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6599 [(set (attr "far_jump")
6601 (eq_attr "length" "8")
6602 (const_string "yes")
6603 (const_string "no")))
6604 (set (attr "length")
6606 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6607 (le (minus (match_dup 3) (pc)) (const_int 256)))
6610 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6611 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6616 ;; Two peepholes to generate subtract of 0 instead of a move if the
6617 ;; condition codes will be useful.
6619 [(set (match_operand:SI 0 "low_register_operand" "")
6620 (match_operand:SI 1 "low_register_operand" ""))
6622 (if_then_else (match_operator 2 "arm_comparison_operator"
6623 [(match_dup 1) (const_int 0)])
6624 (label_ref (match_operand 3 "" ""))
6627 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6629 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6630 (label_ref (match_dup 3))
6634 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6635 ;; merge cases like this because the op1 is a hard register in
6636 ;; CLASS_LIKELY_SPILLED_P.
6638 [(set (match_operand:SI 0 "low_register_operand" "")
6639 (match_operand:SI 1 "low_register_operand" ""))
6641 (if_then_else (match_operator 2 "arm_comparison_operator"
6642 [(match_dup 0) (const_int 0)])
6643 (label_ref (match_operand 3 "" ""))
6646 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6648 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6649 (label_ref (match_dup 3))
6653 (define_insn "*negated_cbranchsi4"
6656 (match_operator 0 "equality_operator"
6657 [(match_operand:SI 1 "s_register_operand" "l")
6658 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6659 (label_ref (match_operand 3 "" ""))
6663 output_asm_insn (\"cmn\\t%1, %2\", operands);
6664 switch (get_attr_length (insn))
6666 case 4: return \"b%d0\\t%l3\";
6667 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6668 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6671 [(set (attr "far_jump")
6673 (eq_attr "length" "8")
6674 (const_string "yes")
6675 (const_string "no")))
6676 (set (attr "length")
6678 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6679 (le (minus (match_dup 3) (pc)) (const_int 256)))
6682 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6683 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6688 (define_insn "*tbit_cbranch"
6691 (match_operator 0 "equality_operator"
6692 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6694 (match_operand:SI 2 "const_int_operand" "i"))
6696 (label_ref (match_operand 3 "" ""))
6698 (clobber (match_scratch:SI 4 "=l"))]
6703 op[0] = operands[4];
6704 op[1] = operands[1];
6705 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6707 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6708 switch (get_attr_length (insn))
6710 case 4: return \"b%d0\\t%l3\";
6711 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6712 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6715 [(set (attr "far_jump")
6717 (eq_attr "length" "8")
6718 (const_string "yes")
6719 (const_string "no")))
6720 (set (attr "length")
6722 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6723 (le (minus (match_dup 3) (pc)) (const_int 256)))
6726 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6727 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6732 (define_insn "*tlobits_cbranch"
6735 (match_operator 0 "equality_operator"
6736 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6737 (match_operand:SI 2 "const_int_operand" "i")
6740 (label_ref (match_operand 3 "" ""))
6742 (clobber (match_scratch:SI 4 "=l"))]
6747 op[0] = operands[4];
6748 op[1] = operands[1];
6749 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6751 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6752 switch (get_attr_length (insn))
6754 case 4: return \"b%d0\\t%l3\";
6755 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6756 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6759 [(set (attr "far_jump")
6761 (eq_attr "length" "8")
6762 (const_string "yes")
6763 (const_string "no")))
6764 (set (attr "length")
6766 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6767 (le (minus (match_dup 3) (pc)) (const_int 256)))
6770 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6771 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6776 (define_insn "*tstsi3_cbranch"
6779 (match_operator 3 "equality_operator"
6780 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6781 (match_operand:SI 1 "s_register_operand" "l"))
6783 (label_ref (match_operand 2 "" ""))
6788 output_asm_insn (\"tst\\t%0, %1\", operands);
6789 switch (get_attr_length (insn))
6791 case 4: return \"b%d3\\t%l2\";
6792 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6793 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6796 [(set (attr "far_jump")
6798 (eq_attr "length" "8")
6799 (const_string "yes")
6800 (const_string "no")))
6801 (set (attr "length")
6803 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6804 (le (minus (match_dup 2) (pc)) (const_int 256)))
6807 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6808 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6813 (define_insn "*cbranchne_decr1"
6815 (if_then_else (match_operator 3 "equality_operator"
6816 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6818 (label_ref (match_operand 4 "" ""))
6820 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6821 (plus:SI (match_dup 2) (const_int -1)))
6822 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6827 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6829 VOIDmode, operands[2], const1_rtx);
6830 cond[1] = operands[4];
6832 if (which_alternative == 0)
6833 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6834 else if (which_alternative == 1)
6836 /* We must provide an alternative for a hi reg because reload
6837 cannot handle output reloads on a jump instruction, but we
6838 can't subtract into that. Fortunately a mov from lo to hi
6839 does not clobber the condition codes. */
6840 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6841 output_asm_insn (\"mov\\t%0, %1\", operands);
6845 /* Similarly, but the target is memory. */
6846 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6847 output_asm_insn (\"str\\t%1, %0\", operands);
6850 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6853 output_asm_insn (\"b%d0\\t%l1\", cond);
6856 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6857 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6859 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6860 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6864 [(set (attr "far_jump")
6866 (ior (and (eq (symbol_ref ("which_alternative"))
6868 (eq_attr "length" "8"))
6869 (eq_attr "length" "10"))
6870 (const_string "yes")
6871 (const_string "no")))
6872 (set_attr_alternative "length"
6876 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6877 (le (minus (match_dup 4) (pc)) (const_int 256)))
6880 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6881 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6886 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6887 (le (minus (match_dup 4) (pc)) (const_int 256)))
6890 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6891 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6896 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6897 (le (minus (match_dup 4) (pc)) (const_int 256)))
6900 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6901 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6906 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6907 (le (minus (match_dup 4) (pc)) (const_int 256)))
6910 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6911 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6916 (define_insn "*addsi3_cbranch"
6919 (match_operator 4 "arm_comparison_operator"
6921 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6922 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6924 (label_ref (match_operand 5 "" ""))
6927 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6928 (plus:SI (match_dup 2) (match_dup 3)))
6929 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6931 && (GET_CODE (operands[4]) == EQ
6932 || GET_CODE (operands[4]) == NE
6933 || GET_CODE (operands[4]) == GE
6934 || GET_CODE (operands[4]) == LT)"
6939 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6940 cond[1] = operands[2];
6941 cond[2] = operands[3];
6943 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6944 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6946 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6948 if (which_alternative >= 2
6949 && which_alternative < 4)
6950 output_asm_insn (\"mov\\t%0, %1\", operands);
6951 else if (which_alternative >= 4)
6952 output_asm_insn (\"str\\t%1, %0\", operands);
6954 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
6957 return \"b%d4\\t%l5\";
6959 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6961 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6965 [(set (attr "far_jump")
6967 (ior (and (lt (symbol_ref ("which_alternative"))
6969 (eq_attr "length" "8"))
6970 (eq_attr "length" "10"))
6971 (const_string "yes")
6972 (const_string "no")))
6973 (set (attr "length")
6975 (lt (symbol_ref ("which_alternative"))
6978 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
6979 (le (minus (match_dup 5) (pc)) (const_int 256)))
6982 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
6983 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6987 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
6988 (le (minus (match_dup 5) (pc)) (const_int 256)))
6991 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
6992 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6997 (define_insn "*addsi3_cbranch_scratch"
7000 (match_operator 3 "arm_comparison_operator"
7002 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7003 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7005 (label_ref (match_operand 4 "" ""))
7007 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7009 && (GET_CODE (operands[3]) == EQ
7010 || GET_CODE (operands[3]) == NE
7011 || GET_CODE (operands[3]) == GE
7012 || GET_CODE (operands[3]) == LT)"
7015 switch (which_alternative)
7018 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7021 output_asm_insn (\"cmn\t%1, %2\", operands);
7024 if (INTVAL (operands[2]) < 0)
7025 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7027 output_asm_insn (\"add\t%0, %1, %2\", operands);
7030 if (INTVAL (operands[2]) < 0)
7031 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7033 output_asm_insn (\"add\t%0, %0, %2\", operands);
7037 switch (get_attr_length (insn))
7040 return \"b%d3\\t%l4\";
7042 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7044 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7048 [(set (attr "far_jump")
7050 (eq_attr "length" "8")
7051 (const_string "yes")
7052 (const_string "no")))
7053 (set (attr "length")
7055 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7056 (le (minus (match_dup 4) (pc)) (const_int 256)))
7059 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7060 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7066 ;; Comparison and test insns
7068 (define_insn "*arm_cmpsi_insn"
7069 [(set (reg:CC CC_REGNUM)
7070 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7071 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7076 [(set_attr "conds" "set")]
7079 (define_insn "*cmpsi_shiftsi"
7080 [(set (reg:CC CC_REGNUM)
7081 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7082 (match_operator:SI 3 "shift_operator"
7083 [(match_operand:SI 1 "s_register_operand" "r,r")
7084 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7087 [(set_attr "conds" "set")
7088 (set_attr "shift" "1")
7089 (set_attr "arch" "32,a")
7090 (set_attr "type" "alu_shift,alu_shift_reg")])
7092 (define_insn "*cmpsi_shiftsi_swp"
7093 [(set (reg:CC_SWP CC_REGNUM)
7094 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7095 [(match_operand:SI 1 "s_register_operand" "r,r")
7096 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7097 (match_operand:SI 0 "s_register_operand" "r,r")))]
7100 [(set_attr "conds" "set")
7101 (set_attr "shift" "1")
7102 (set_attr "arch" "32,a")
7103 (set_attr "type" "alu_shift,alu_shift_reg")])
7105 (define_insn "*arm_cmpsi_negshiftsi_si"
7106 [(set (reg:CC_Z CC_REGNUM)
7108 (neg:SI (match_operator:SI 1 "shift_operator"
7109 [(match_operand:SI 2 "s_register_operand" "r")
7110 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7111 (match_operand:SI 0 "s_register_operand" "r")))]
7114 [(set_attr "conds" "set")
7115 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7116 (const_string "alu_shift")
7117 (const_string "alu_shift_reg")))]
7120 ;; DImode comparisons. The generic code generates branches that
7121 ;; if-conversion can not reduce to a conditional compare, so we do
7124 (define_insn "*arm_cmpdi_insn"
7125 [(set (reg:CC_NCV CC_REGNUM)
7126 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7127 (match_operand:DI 1 "arm_di_operand" "rDi")))
7128 (clobber (match_scratch:SI 2 "=r"))]
7129 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7130 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7131 [(set_attr "conds" "set")
7132 (set_attr "length" "8")]
7135 (define_insn "*arm_cmpdi_unsigned"
7136 [(set (reg:CC_CZ CC_REGNUM)
7137 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7138 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7140 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7141 [(set_attr "conds" "set")
7142 (set_attr "length" "8")]
7145 (define_insn "*arm_cmpdi_zero"
7146 [(set (reg:CC_Z CC_REGNUM)
7147 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7149 (clobber (match_scratch:SI 1 "=r"))]
7151 "orr%.\\t%1, %Q0, %R0"
7152 [(set_attr "conds" "set")]
7155 (define_insn "*thumb_cmpdi_zero"
7156 [(set (reg:CC_Z CC_REGNUM)
7157 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7159 (clobber (match_scratch:SI 1 "=l"))]
7161 "orr\\t%1, %Q0, %R0"
7162 [(set_attr "conds" "set")
7163 (set_attr "length" "2")]
7166 ;; Cirrus SF compare instruction
7167 (define_insn "*cirrus_cmpsf"
7168 [(set (reg:CCFP CC_REGNUM)
7169 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7170 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7171 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7172 "cfcmps%?\\tr15, %V0, %V1"
7173 [(set_attr "type" "mav_farith")
7174 (set_attr "cirrus" "compare")]
7177 ;; Cirrus DF compare instruction
7178 (define_insn "*cirrus_cmpdf"
7179 [(set (reg:CCFP CC_REGNUM)
7180 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7181 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7182 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7183 "cfcmpd%?\\tr15, %V0, %V1"
7184 [(set_attr "type" "mav_farith")
7185 (set_attr "cirrus" "compare")]
7188 (define_insn "*cirrus_cmpdi"
7189 [(set (reg:CC CC_REGNUM)
7190 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7191 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7192 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7193 "cfcmp64%?\\tr15, %V0, %V1"
7194 [(set_attr "type" "mav_farith")
7195 (set_attr "cirrus" "compare")]
7198 ; This insn allows redundant compares to be removed by cse, nothing should
7199 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7200 ; is deleted later on. The match_dup will match the mode here, so that
7201 ; mode changes of the condition codes aren't lost by this even though we don't
7202 ; specify what they are.
7204 (define_insn "*deleted_compare"
7205 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7207 "\\t%@ deleted compare"
7208 [(set_attr "conds" "set")
7209 (set_attr "length" "0")]
7213 ;; Conditional branch insns
7215 (define_expand "cbranch_cc"
7217 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7218 (match_operand 2 "" "")])
7219 (label_ref (match_operand 3 "" ""))
7222 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7223 operands[1], operands[2]);
7224 operands[2] = const0_rtx;"
7228 ;; Patterns to match conditional branch insns.
7231 (define_insn "*arm_cond_branch"
7233 (if_then_else (match_operator 1 "arm_comparison_operator"
7234 [(match_operand 2 "cc_register" "") (const_int 0)])
7235 (label_ref (match_operand 0 "" ""))
7239 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7241 arm_ccfsm_state += 2;
7244 return \"b%d1\\t%l0\";
7246 [(set_attr "conds" "use")
7247 (set_attr "type" "branch")]
7250 (define_insn "*arm_cond_branch_reversed"
7252 (if_then_else (match_operator 1 "arm_comparison_operator"
7253 [(match_operand 2 "cc_register" "") (const_int 0)])
7255 (label_ref (match_operand 0 "" ""))))]
7258 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7260 arm_ccfsm_state += 2;
7263 return \"b%D1\\t%l0\";
7265 [(set_attr "conds" "use")
7266 (set_attr "type" "branch")]
7273 (define_expand "cstore_cc"
7274 [(set (match_operand:SI 0 "s_register_operand" "")
7275 (match_operator:SI 1 "" [(match_operand 2 "" "")
7276 (match_operand 3 "" "")]))]
7278 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7279 operands[2], operands[3]);
7280 operands[3] = const0_rtx;"
7283 (define_insn "*mov_scc"
7284 [(set (match_operand:SI 0 "s_register_operand" "=r")
7285 (match_operator:SI 1 "arm_comparison_operator"
7286 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7288 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7289 [(set_attr "conds" "use")
7290 (set_attr "length" "8")]
7293 (define_insn "*mov_negscc"
7294 [(set (match_operand:SI 0 "s_register_operand" "=r")
7295 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7296 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7298 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7299 [(set_attr "conds" "use")
7300 (set_attr "length" "8")]
7303 (define_insn "*mov_notscc"
7304 [(set (match_operand:SI 0 "s_register_operand" "=r")
7305 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7306 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7308 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7309 [(set_attr "conds" "use")
7310 (set_attr "length" "8")]
7313 (define_expand "cstoresi4"
7314 [(set (match_operand:SI 0 "s_register_operand" "")
7315 (match_operator:SI 1 "arm_comparison_operator"
7316 [(match_operand:SI 2 "s_register_operand" "")
7317 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7318 "TARGET_32BIT || TARGET_THUMB1"
7320 rtx op3, scratch, scratch2;
7324 if (!arm_add_operand (operands[3], SImode))
7325 operands[3] = force_reg (SImode, operands[3]);
7326 emit_insn (gen_cstore_cc (operands[0], operands[1],
7327 operands[2], operands[3]));
7331 if (operands[3] == const0_rtx)
7333 switch (GET_CODE (operands[1]))
7336 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7340 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7344 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7345 NULL_RTX, 0, OPTAB_WIDEN);
7346 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7347 NULL_RTX, 0, OPTAB_WIDEN);
7348 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7349 operands[0], 1, OPTAB_WIDEN);
7353 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7355 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7356 NULL_RTX, 1, OPTAB_WIDEN);
7360 scratch = expand_binop (SImode, ashr_optab, operands[2],
7361 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7362 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7363 NULL_RTX, 0, OPTAB_WIDEN);
7364 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7368 /* LT is handled by generic code. No need for unsigned with 0. */
7375 switch (GET_CODE (operands[1]))
7378 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7379 NULL_RTX, 0, OPTAB_WIDEN);
7380 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7384 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7385 NULL_RTX, 0, OPTAB_WIDEN);
7386 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7390 op3 = force_reg (SImode, operands[3]);
7392 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7393 NULL_RTX, 1, OPTAB_WIDEN);
7394 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7395 NULL_RTX, 0, OPTAB_WIDEN);
7396 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7402 if (!thumb1_cmp_operand (op3, SImode))
7403 op3 = force_reg (SImode, op3);
7404 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7405 NULL_RTX, 0, OPTAB_WIDEN);
7406 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7407 NULL_RTX, 1, OPTAB_WIDEN);
7408 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7413 op3 = force_reg (SImode, operands[3]);
7414 scratch = force_reg (SImode, const0_rtx);
7415 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7421 if (!thumb1_cmp_operand (op3, SImode))
7422 op3 = force_reg (SImode, op3);
7423 scratch = force_reg (SImode, const0_rtx);
7424 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7430 if (!thumb1_cmp_operand (op3, SImode))
7431 op3 = force_reg (SImode, op3);
7432 scratch = gen_reg_rtx (SImode);
7433 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7437 op3 = force_reg (SImode, operands[3]);
7438 scratch = gen_reg_rtx (SImode);
7439 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7442 /* No good sequences for GT, LT. */
7449 (define_expand "cstoresf4"
7450 [(set (match_operand:SI 0 "s_register_operand" "")
7451 (match_operator:SI 1 "arm_comparison_operator"
7452 [(match_operand:SF 2 "s_register_operand" "")
7453 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7454 "TARGET_32BIT && TARGET_HARD_FLOAT"
7455 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7456 operands[2], operands[3])); DONE;"
7459 (define_expand "cstoredf4"
7460 [(set (match_operand:SI 0 "s_register_operand" "")
7461 (match_operator:SI 1 "arm_comparison_operator"
7462 [(match_operand:DF 2 "s_register_operand" "")
7463 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7464 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7465 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7466 operands[2], operands[3])); DONE;"
7469 (define_expand "cstoredi4"
7470 [(set (match_operand:SI 0 "s_register_operand" "")
7471 (match_operator:SI 1 "arm_comparison_operator"
7472 [(match_operand:DI 2 "cmpdi_operand" "")
7473 (match_operand:DI 3 "cmpdi_operand" "")]))]
7476 rtx swap = NULL_RTX;
7477 enum rtx_code code = GET_CODE (operands[1]);
7479 /* We should not have two constants. */
7480 gcc_assert (GET_MODE (operands[2]) == DImode
7481 || GET_MODE (operands[3]) == DImode);
7483 /* Flip unimplemented DImode comparisons to a form that
7484 arm_gen_compare_reg can handle. */
7488 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7490 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7492 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7494 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7499 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7502 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7508 (define_expand "cstoresi_eq0_thumb1"
7510 [(set (match_operand:SI 0 "s_register_operand" "")
7511 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7513 (clobber (match_dup:SI 2))])]
7515 "operands[2] = gen_reg_rtx (SImode);"
7518 (define_expand "cstoresi_ne0_thumb1"
7520 [(set (match_operand:SI 0 "s_register_operand" "")
7521 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7523 (clobber (match_dup:SI 2))])]
7525 "operands[2] = gen_reg_rtx (SImode);"
7528 (define_insn "*cstoresi_eq0_thumb1_insn"
7529 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7530 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7532 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7535 neg\\t%0, %1\;adc\\t%0, %0, %1
7536 neg\\t%2, %1\;adc\\t%0, %1, %2"
7537 [(set_attr "length" "4")]
7540 (define_insn "*cstoresi_ne0_thumb1_insn"
7541 [(set (match_operand:SI 0 "s_register_operand" "=l")
7542 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7544 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7546 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7547 [(set_attr "length" "4")]
7550 ;; Used as part of the expansion of thumb ltu and gtu sequences
7551 (define_insn "cstoresi_nltu_thumb1"
7552 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7553 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7554 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7556 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7557 [(set_attr "length" "4")]
7560 (define_insn_and_split "cstoresi_ltu_thumb1"
7561 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7562 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7563 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7568 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7569 (set (match_dup 0) (neg:SI (match_dup 3)))]
7570 "operands[3] = gen_reg_rtx (SImode);"
7571 [(set_attr "length" "4")]
7574 ;; Used as part of the expansion of thumb les sequence.
7575 (define_insn "thumb1_addsi3_addgeu"
7576 [(set (match_operand:SI 0 "s_register_operand" "=l")
7577 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7578 (match_operand:SI 2 "s_register_operand" "l"))
7579 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7580 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7582 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7583 [(set_attr "length" "4")]
7587 ;; Conditional move insns
7589 (define_expand "movsicc"
7590 [(set (match_operand:SI 0 "s_register_operand" "")
7591 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7592 (match_operand:SI 2 "arm_not_operand" "")
7593 (match_operand:SI 3 "arm_not_operand" "")))]
7597 enum rtx_code code = GET_CODE (operands[1]);
7600 if (code == UNEQ || code == LTGT)
7603 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7604 XEXP (operands[1], 1));
7605 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7609 (define_expand "movsfcc"
7610 [(set (match_operand:SF 0 "s_register_operand" "")
7611 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7612 (match_operand:SF 2 "s_register_operand" "")
7613 (match_operand:SF 3 "nonmemory_operand" "")))]
7614 "TARGET_32BIT && TARGET_HARD_FLOAT"
7617 enum rtx_code code = GET_CODE (operands[1]);
7620 if (code == UNEQ || code == LTGT)
7623 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7624 Otherwise, ensure it is a valid FP add operand */
7625 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7626 || (!arm_float_add_operand (operands[3], SFmode)))
7627 operands[3] = force_reg (SFmode, operands[3]);
7629 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7630 XEXP (operands[1], 1));
7631 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7635 (define_expand "movdfcc"
7636 [(set (match_operand:DF 0 "s_register_operand" "")
7637 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7638 (match_operand:DF 2 "s_register_operand" "")
7639 (match_operand:DF 3 "arm_float_add_operand" "")))]
7640 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7643 enum rtx_code code = GET_CODE (operands[1]);
7646 if (code == UNEQ || code == LTGT)
7649 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7650 XEXP (operands[1], 1));
7651 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7655 (define_insn "*movsicc_insn"
7656 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7658 (match_operator 3 "arm_comparison_operator"
7659 [(match_operand 4 "cc_register" "") (const_int 0)])
7660 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7661 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7668 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7669 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7670 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7671 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7672 [(set_attr "length" "4,4,4,4,8,8,8,8")
7673 (set_attr "conds" "use")]
7676 (define_insn "*movsfcc_soft_insn"
7677 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7678 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7679 [(match_operand 4 "cc_register" "") (const_int 0)])
7680 (match_operand:SF 1 "s_register_operand" "0,r")
7681 (match_operand:SF 2 "s_register_operand" "r,0")))]
7682 "TARGET_ARM && TARGET_SOFT_FLOAT"
7686 [(set_attr "conds" "use")]
7690 ;; Jump and linkage insns
7692 (define_expand "jump"
7694 (label_ref (match_operand 0 "" "")))]
7699 (define_insn "*arm_jump"
7701 (label_ref (match_operand 0 "" "")))]
7705 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7707 arm_ccfsm_state += 2;
7710 return \"b%?\\t%l0\";
7713 [(set_attr "predicable" "yes")]
7716 (define_insn "*thumb_jump"
7718 (label_ref (match_operand 0 "" "")))]
7721 if (get_attr_length (insn) == 2)
7723 return \"bl\\t%l0\\t%@ far jump\";
7725 [(set (attr "far_jump")
7727 (eq_attr "length" "4")
7728 (const_string "yes")
7729 (const_string "no")))
7730 (set (attr "length")
7732 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7733 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7738 (define_expand "call"
7739 [(parallel [(call (match_operand 0 "memory_operand" "")
7740 (match_operand 1 "general_operand" ""))
7741 (use (match_operand 2 "" ""))
7742 (clobber (reg:SI LR_REGNUM))])]
7748 /* In an untyped call, we can get NULL for operand 2. */
7749 if (operands[2] == NULL_RTX)
7750 operands[2] = const0_rtx;
7752 /* Decide if we should generate indirect calls by loading the
7753 32-bit address of the callee into a register before performing the
7755 callee = XEXP (operands[0], 0);
7756 if (GET_CODE (callee) == SYMBOL_REF
7757 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7759 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7761 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7762 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7767 (define_expand "call_internal"
7768 [(parallel [(call (match_operand 0 "memory_operand" "")
7769 (match_operand 1 "general_operand" ""))
7770 (use (match_operand 2 "" ""))
7771 (clobber (reg:SI LR_REGNUM))])])
7773 (define_insn "*call_reg_armv5"
7774 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7775 (match_operand 1 "" ""))
7776 (use (match_operand 2 "" ""))
7777 (clobber (reg:SI LR_REGNUM))]
7778 "TARGET_ARM && arm_arch5"
7780 [(set_attr "type" "call")]
7783 (define_insn "*call_reg_arm"
7784 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7785 (match_operand 1 "" ""))
7786 (use (match_operand 2 "" ""))
7787 (clobber (reg:SI LR_REGNUM))]
7788 "TARGET_ARM && !arm_arch5"
7790 return output_call (operands);
7792 ;; length is worst case, normally it is only two
7793 [(set_attr "length" "12")
7794 (set_attr "type" "call")]
7798 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7799 ;; considered a function call by the branch predictor of some cores (PR40887).
7800 ;; Falls back to blx rN (*call_reg_armv5).
7802 (define_insn "*call_mem"
7803 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7804 (match_operand 1 "" ""))
7805 (use (match_operand 2 "" ""))
7806 (clobber (reg:SI LR_REGNUM))]
7807 "TARGET_ARM && !arm_arch5"
7809 return output_call_mem (operands);
7811 [(set_attr "length" "12")
7812 (set_attr "type" "call")]
7815 (define_insn "*call_reg_thumb1_v5"
7816 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7817 (match_operand 1 "" ""))
7818 (use (match_operand 2 "" ""))
7819 (clobber (reg:SI LR_REGNUM))]
7820 "TARGET_THUMB1 && arm_arch5"
7822 [(set_attr "length" "2")
7823 (set_attr "type" "call")]
7826 (define_insn "*call_reg_thumb1"
7827 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7828 (match_operand 1 "" ""))
7829 (use (match_operand 2 "" ""))
7830 (clobber (reg:SI LR_REGNUM))]
7831 "TARGET_THUMB1 && !arm_arch5"
7834 if (!TARGET_CALLER_INTERWORKING)
7835 return thumb_call_via_reg (operands[0]);
7836 else if (operands[1] == const0_rtx)
7837 return \"bl\\t%__interwork_call_via_%0\";
7838 else if (frame_pointer_needed)
7839 return \"bl\\t%__interwork_r7_call_via_%0\";
7841 return \"bl\\t%__interwork_r11_call_via_%0\";
7843 [(set_attr "type" "call")]
7846 (define_expand "call_value"
7847 [(parallel [(set (match_operand 0 "" "")
7848 (call (match_operand 1 "memory_operand" "")
7849 (match_operand 2 "general_operand" "")))
7850 (use (match_operand 3 "" ""))
7851 (clobber (reg:SI LR_REGNUM))])]
7857 /* In an untyped call, we can get NULL for operand 2. */
7858 if (operands[3] == 0)
7859 operands[3] = const0_rtx;
7861 /* Decide if we should generate indirect calls by loading the
7862 32-bit address of the callee into a register before performing the
7864 callee = XEXP (operands[1], 0);
7865 if (GET_CODE (callee) == SYMBOL_REF
7866 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7868 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7870 pat = gen_call_value_internal (operands[0], operands[1],
7871 operands[2], operands[3]);
7872 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7877 (define_expand "call_value_internal"
7878 [(parallel [(set (match_operand 0 "" "")
7879 (call (match_operand 1 "memory_operand" "")
7880 (match_operand 2 "general_operand" "")))
7881 (use (match_operand 3 "" ""))
7882 (clobber (reg:SI LR_REGNUM))])])
7884 (define_insn "*call_value_reg_armv5"
7885 [(set (match_operand 0 "" "")
7886 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7887 (match_operand 2 "" "")))
7888 (use (match_operand 3 "" ""))
7889 (clobber (reg:SI LR_REGNUM))]
7890 "TARGET_ARM && arm_arch5"
7892 [(set_attr "type" "call")]
7895 (define_insn "*call_value_reg_arm"
7896 [(set (match_operand 0 "" "")
7897 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7898 (match_operand 2 "" "")))
7899 (use (match_operand 3 "" ""))
7900 (clobber (reg:SI LR_REGNUM))]
7901 "TARGET_ARM && !arm_arch5"
7903 return output_call (&operands[1]);
7905 [(set_attr "length" "12")
7906 (set_attr "type" "call")]
7909 ;; Note: see *call_mem
7911 (define_insn "*call_value_mem"
7912 [(set (match_operand 0 "" "")
7913 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7914 (match_operand 2 "" "")))
7915 (use (match_operand 3 "" ""))
7916 (clobber (reg:SI LR_REGNUM))]
7917 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7919 return output_call_mem (&operands[1]);
7921 [(set_attr "length" "12")
7922 (set_attr "type" "call")]
7925 (define_insn "*call_value_reg_thumb1_v5"
7926 [(set (match_operand 0 "" "")
7927 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7928 (match_operand 2 "" "")))
7929 (use (match_operand 3 "" ""))
7930 (clobber (reg:SI LR_REGNUM))]
7931 "TARGET_THUMB1 && arm_arch5"
7933 [(set_attr "length" "2")
7934 (set_attr "type" "call")]
7937 (define_insn "*call_value_reg_thumb1"
7938 [(set (match_operand 0 "" "")
7939 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7940 (match_operand 2 "" "")))
7941 (use (match_operand 3 "" ""))
7942 (clobber (reg:SI LR_REGNUM))]
7943 "TARGET_THUMB1 && !arm_arch5"
7946 if (!TARGET_CALLER_INTERWORKING)
7947 return thumb_call_via_reg (operands[1]);
7948 else if (operands[2] == const0_rtx)
7949 return \"bl\\t%__interwork_call_via_%1\";
7950 else if (frame_pointer_needed)
7951 return \"bl\\t%__interwork_r7_call_via_%1\";
7953 return \"bl\\t%__interwork_r11_call_via_%1\";
7955 [(set_attr "type" "call")]
7958 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7959 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7961 (define_insn "*call_symbol"
7962 [(call (mem:SI (match_operand:SI 0 "" ""))
7963 (match_operand 1 "" ""))
7964 (use (match_operand 2 "" ""))
7965 (clobber (reg:SI LR_REGNUM))]
7967 && (GET_CODE (operands[0]) == SYMBOL_REF)
7968 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7971 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7973 [(set_attr "type" "call")]
7976 (define_insn "*call_value_symbol"
7977 [(set (match_operand 0 "" "")
7978 (call (mem:SI (match_operand:SI 1 "" ""))
7979 (match_operand:SI 2 "" "")))
7980 (use (match_operand 3 "" ""))
7981 (clobber (reg:SI LR_REGNUM))]
7983 && (GET_CODE (operands[1]) == SYMBOL_REF)
7984 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7987 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7989 [(set_attr "type" "call")]
7992 (define_insn "*call_insn"
7993 [(call (mem:SI (match_operand:SI 0 "" ""))
7994 (match_operand:SI 1 "" ""))
7995 (use (match_operand 2 "" ""))
7996 (clobber (reg:SI LR_REGNUM))]
7998 && GET_CODE (operands[0]) == SYMBOL_REF
7999 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8001 [(set_attr "length" "4")
8002 (set_attr "type" "call")]
8005 (define_insn "*call_value_insn"
8006 [(set (match_operand 0 "" "")
8007 (call (mem:SI (match_operand 1 "" ""))
8008 (match_operand 2 "" "")))
8009 (use (match_operand 3 "" ""))
8010 (clobber (reg:SI LR_REGNUM))]
8012 && GET_CODE (operands[1]) == SYMBOL_REF
8013 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8015 [(set_attr "length" "4")
8016 (set_attr "type" "call")]
8019 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8020 (define_expand "sibcall"
8021 [(parallel [(call (match_operand 0 "memory_operand" "")
8022 (match_operand 1 "general_operand" ""))
8024 (use (match_operand 2 "" ""))])]
8028 if (operands[2] == NULL_RTX)
8029 operands[2] = const0_rtx;
8033 (define_expand "sibcall_value"
8034 [(parallel [(set (match_operand 0 "" "")
8035 (call (match_operand 1 "memory_operand" "")
8036 (match_operand 2 "general_operand" "")))
8038 (use (match_operand 3 "" ""))])]
8042 if (operands[3] == NULL_RTX)
8043 operands[3] = const0_rtx;
8047 (define_insn "*sibcall_insn"
8048 [(call (mem:SI (match_operand:SI 0 "" "X"))
8049 (match_operand 1 "" ""))
8051 (use (match_operand 2 "" ""))]
8052 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8054 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8056 [(set_attr "type" "call")]
8059 (define_insn "*sibcall_value_insn"
8060 [(set (match_operand 0 "" "")
8061 (call (mem:SI (match_operand:SI 1 "" "X"))
8062 (match_operand 2 "" "")))
8064 (use (match_operand 3 "" ""))]
8065 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8067 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8069 [(set_attr "type" "call")]
8072 (define_expand "return"
8074 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8077 ;; Often the return insn will be the same as loading from memory, so set attr
8078 (define_insn "*arm_return"
8080 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8083 if (arm_ccfsm_state == 2)
8085 arm_ccfsm_state += 2;
8088 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8090 [(set_attr "type" "load1")
8091 (set_attr "length" "12")
8092 (set_attr "predicable" "yes")]
8095 (define_insn "*cond_return"
8097 (if_then_else (match_operator 0 "arm_comparison_operator"
8098 [(match_operand 1 "cc_register" "") (const_int 0)])
8101 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8104 if (arm_ccfsm_state == 2)
8106 arm_ccfsm_state += 2;
8109 return output_return_instruction (operands[0], TRUE, FALSE);
8111 [(set_attr "conds" "use")
8112 (set_attr "length" "12")
8113 (set_attr "type" "load1")]
8116 (define_insn "*cond_return_inverted"
8118 (if_then_else (match_operator 0 "arm_comparison_operator"
8119 [(match_operand 1 "cc_register" "") (const_int 0)])
8122 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8125 if (arm_ccfsm_state == 2)
8127 arm_ccfsm_state += 2;
8130 return output_return_instruction (operands[0], TRUE, TRUE);
8132 [(set_attr "conds" "use")
8133 (set_attr "length" "12")
8134 (set_attr "type" "load1")]
8137 ;; Generate a sequence of instructions to determine if the processor is
8138 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8141 (define_expand "return_addr_mask"
8143 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8145 (set (match_operand:SI 0 "s_register_operand" "")
8146 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8148 (const_int 67108860)))] ; 0x03fffffc
8151 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8154 (define_insn "*check_arch2"
8155 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8156 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8159 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8160 [(set_attr "length" "8")
8161 (set_attr "conds" "set")]
8164 ;; Call subroutine returning any type.
8166 (define_expand "untyped_call"
8167 [(parallel [(call (match_operand 0 "" "")
8169 (match_operand 1 "" "")
8170 (match_operand 2 "" "")])]
8175 rtx par = gen_rtx_PARALLEL (VOIDmode,
8176 rtvec_alloc (XVECLEN (operands[2], 0)));
8177 rtx addr = gen_reg_rtx (Pmode);
8181 emit_move_insn (addr, XEXP (operands[1], 0));
8182 mem = change_address (operands[1], BLKmode, addr);
8184 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8186 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8188 /* Default code only uses r0 as a return value, but we could
8189 be using anything up to 4 registers. */
8190 if (REGNO (src) == R0_REGNUM)
8191 src = gen_rtx_REG (TImode, R0_REGNUM);
8193 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8195 size += GET_MODE_SIZE (GET_MODE (src));
8198 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8203 for (i = 0; i < XVECLEN (par, 0); i++)
8205 HOST_WIDE_INT offset = 0;
8206 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8209 emit_move_insn (addr, plus_constant (addr, size));
8211 mem = change_address (mem, GET_MODE (reg), NULL);
8212 if (REGNO (reg) == R0_REGNUM)
8214 /* On thumb we have to use a write-back instruction. */
8215 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8216 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8217 size = TARGET_ARM ? 16 : 0;
8221 emit_move_insn (mem, reg);
8222 size = GET_MODE_SIZE (GET_MODE (reg));
8226 /* The optimizer does not know that the call sets the function value
8227 registers we stored in the result block. We avoid problems by
8228 claiming that all hard registers are used and clobbered at this
8230 emit_insn (gen_blockage ());
8236 (define_expand "untyped_return"
8237 [(match_operand:BLK 0 "memory_operand" "")
8238 (match_operand 1 "" "")]
8243 rtx addr = gen_reg_rtx (Pmode);
8247 emit_move_insn (addr, XEXP (operands[0], 0));
8248 mem = change_address (operands[0], BLKmode, addr);
8250 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8252 HOST_WIDE_INT offset = 0;
8253 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8256 emit_move_insn (addr, plus_constant (addr, size));
8258 mem = change_address (mem, GET_MODE (reg), NULL);
8259 if (REGNO (reg) == R0_REGNUM)
8261 /* On thumb we have to use a write-back instruction. */
8262 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8263 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8264 size = TARGET_ARM ? 16 : 0;
8268 emit_move_insn (reg, mem);
8269 size = GET_MODE_SIZE (GET_MODE (reg));
8273 /* Emit USE insns before the return. */
8274 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8275 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8277 /* Construct the return. */
8278 expand_naked_return ();
8284 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8285 ;; all of memory. This blocks insns from being moved across this point.
8287 (define_insn "blockage"
8288 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8291 [(set_attr "length" "0")
8292 (set_attr "type" "block")]
8295 (define_expand "casesi"
8296 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8297 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8298 (match_operand:SI 2 "const_int_operand" "") ; total range
8299 (match_operand:SI 3 "" "") ; table label
8300 (match_operand:SI 4 "" "")] ; Out of range label
8301 "TARGET_32BIT || optimize_size || flag_pic"
8304 enum insn_code code;
8305 if (operands[1] != const0_rtx)
8307 rtx reg = gen_reg_rtx (SImode);
8309 emit_insn (gen_addsi3 (reg, operands[0],
8310 GEN_INT (-INTVAL (operands[1]))));
8315 code = CODE_FOR_arm_casesi_internal;
8316 else if (TARGET_THUMB1)
8317 code = CODE_FOR_thumb1_casesi_internal_pic;
8319 code = CODE_FOR_thumb2_casesi_internal_pic;
8321 code = CODE_FOR_thumb2_casesi_internal;
8323 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8324 operands[2] = force_reg (SImode, operands[2]);
8326 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8327 operands[3], operands[4]));
8332 ;; The USE in this pattern is needed to tell flow analysis that this is
8333 ;; a CASESI insn. It has no other purpose.
8334 (define_insn "arm_casesi_internal"
8335 [(parallel [(set (pc)
8337 (leu (match_operand:SI 0 "s_register_operand" "r")
8338 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8339 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8340 (label_ref (match_operand 2 "" ""))))
8341 (label_ref (match_operand 3 "" ""))))
8342 (clobber (reg:CC CC_REGNUM))
8343 (use (label_ref (match_dup 2)))])]
8347 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8348 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8350 [(set_attr "conds" "clob")
8351 (set_attr "length" "12")]
8354 (define_expand "thumb1_casesi_internal_pic"
8355 [(match_operand:SI 0 "s_register_operand" "")
8356 (match_operand:SI 1 "thumb1_cmp_operand" "")
8357 (match_operand 2 "" "")
8358 (match_operand 3 "" "")]
8362 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8363 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8365 reg0 = gen_rtx_REG (SImode, 0);
8366 emit_move_insn (reg0, operands[0]);
8367 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8372 (define_insn "thumb1_casesi_dispatch"
8373 [(parallel [(set (pc) (unspec [(reg:SI 0)
8374 (label_ref (match_operand 0 "" ""))
8375 ;; (label_ref (match_operand 1 "" ""))
8377 UNSPEC_THUMB1_CASESI))
8378 (clobber (reg:SI IP_REGNUM))
8379 (clobber (reg:SI LR_REGNUM))])]
8381 "* return thumb1_output_casesi(operands);"
8382 [(set_attr "length" "4")]
8385 (define_expand "indirect_jump"
8387 (match_operand:SI 0 "s_register_operand" ""))]
8390 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8391 address and use bx. */
8395 tmp = gen_reg_rtx (SImode);
8396 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8402 ;; NB Never uses BX.
8403 (define_insn "*arm_indirect_jump"
8405 (match_operand:SI 0 "s_register_operand" "r"))]
8407 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8408 [(set_attr "predicable" "yes")]
8411 (define_insn "*load_indirect_jump"
8413 (match_operand:SI 0 "memory_operand" "m"))]
8415 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8416 [(set_attr "type" "load1")
8417 (set_attr "pool_range" "4096")
8418 (set_attr "neg_pool_range" "4084")
8419 (set_attr "predicable" "yes")]
8422 ;; NB Never uses BX.
8423 (define_insn "*thumb1_indirect_jump"
8425 (match_operand:SI 0 "register_operand" "l*r"))]
8428 [(set_attr "conds" "clob")
8429 (set_attr "length" "2")]
8439 if (TARGET_UNIFIED_ASM)
8442 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8443 return \"mov\\tr8, r8\";
8445 [(set (attr "length")
8446 (if_then_else (eq_attr "is_thumb" "yes")
8452 ;; Patterns to allow combination of arithmetic, cond code and shifts
8454 (define_insn "*arith_shiftsi"
8455 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8456 (match_operator:SI 1 "shiftable_operator"
8457 [(match_operator:SI 3 "shift_operator"
8458 [(match_operand:SI 4 "s_register_operand" "r,r")
8459 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8460 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8462 "%i1%?\\t%0, %2, %4%S3"
8463 [(set_attr "predicable" "yes")
8464 (set_attr "shift" "4")
8465 (set_attr "arch" "32,a")
8466 ;; We have to make sure to disable the second alternative if
8467 ;; the shift_operator is MULT, since otherwise the insn will
8468 ;; also match a multiply_accumulate pattern and validate_change
8469 ;; will allow a replacement of the constant with a register
8470 ;; despite the checks done in shift_operator.
8471 (set_attr_alternative "insn_enabled"
8472 [(const_string "yes")
8474 (match_operand:SI 3 "mult_operator" "")
8475 (const_string "no") (const_string "yes"))])
8476 (set_attr "type" "alu_shift,alu_shift_reg")])
8479 [(set (match_operand:SI 0 "s_register_operand" "")
8480 (match_operator:SI 1 "shiftable_operator"
8481 [(match_operator:SI 2 "shiftable_operator"
8482 [(match_operator:SI 3 "shift_operator"
8483 [(match_operand:SI 4 "s_register_operand" "")
8484 (match_operand:SI 5 "reg_or_int_operand" "")])
8485 (match_operand:SI 6 "s_register_operand" "")])
8486 (match_operand:SI 7 "arm_rhs_operand" "")]))
8487 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8490 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8493 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8496 (define_insn "*arith_shiftsi_compare0"
8497 [(set (reg:CC_NOOV CC_REGNUM)
8499 (match_operator:SI 1 "shiftable_operator"
8500 [(match_operator:SI 3 "shift_operator"
8501 [(match_operand:SI 4 "s_register_operand" "r,r")
8502 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8503 (match_operand:SI 2 "s_register_operand" "r,r")])
8505 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8506 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8509 "%i1%.\\t%0, %2, %4%S3"
8510 [(set_attr "conds" "set")
8511 (set_attr "shift" "4")
8512 (set_attr "arch" "32,a")
8513 (set_attr "type" "alu_shift,alu_shift_reg")])
8515 (define_insn "*arith_shiftsi_compare0_scratch"
8516 [(set (reg:CC_NOOV CC_REGNUM)
8518 (match_operator:SI 1 "shiftable_operator"
8519 [(match_operator:SI 3 "shift_operator"
8520 [(match_operand:SI 4 "s_register_operand" "r,r")
8521 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8522 (match_operand:SI 2 "s_register_operand" "r,r")])
8524 (clobber (match_scratch:SI 0 "=r,r"))]
8526 "%i1%.\\t%0, %2, %4%S3"
8527 [(set_attr "conds" "set")
8528 (set_attr "shift" "4")
8529 (set_attr "arch" "32,a")
8530 (set_attr "type" "alu_shift,alu_shift_reg")])
8532 (define_insn "*sub_shiftsi"
8533 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8534 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8535 (match_operator:SI 2 "shift_operator"
8536 [(match_operand:SI 3 "s_register_operand" "r,r")
8537 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8539 "sub%?\\t%0, %1, %3%S2"
8540 [(set_attr "predicable" "yes")
8541 (set_attr "shift" "3")
8542 (set_attr "arch" "32,a")
8543 (set_attr "type" "alu_shift,alu_shift_reg")])
8545 (define_insn "*sub_shiftsi_compare0"
8546 [(set (reg:CC_NOOV CC_REGNUM)
8548 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8549 (match_operator:SI 2 "shift_operator"
8550 [(match_operand:SI 3 "s_register_operand" "r,r")
8551 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8553 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8554 (minus:SI (match_dup 1)
8555 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8557 "sub%.\\t%0, %1, %3%S2"
8558 [(set_attr "conds" "set")
8559 (set_attr "shift" "3")
8560 (set_attr "arch" "32,a")
8561 (set_attr "type" "alu_shift,alu_shift_reg")])
8563 (define_insn "*sub_shiftsi_compare0_scratch"
8564 [(set (reg:CC_NOOV CC_REGNUM)
8566 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8567 (match_operator:SI 2 "shift_operator"
8568 [(match_operand:SI 3 "s_register_operand" "r,r")
8569 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8571 (clobber (match_scratch:SI 0 "=r,r"))]
8573 "sub%.\\t%0, %1, %3%S2"
8574 [(set_attr "conds" "set")
8575 (set_attr "shift" "3")
8576 (set_attr "arch" "32,a")
8577 (set_attr "type" "alu_shift,alu_shift_reg")])
8580 (define_insn "*and_scc"
8581 [(set (match_operand:SI 0 "s_register_operand" "=r")
8582 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8583 [(match_operand 3 "cc_register" "") (const_int 0)])
8584 (match_operand:SI 2 "s_register_operand" "r")))]
8586 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8587 [(set_attr "conds" "use")
8588 (set_attr "length" "8")]
8591 (define_insn "*ior_scc"
8592 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8593 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8594 [(match_operand 3 "cc_register" "") (const_int 0)])
8595 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8599 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8600 [(set_attr "conds" "use")
8601 (set_attr "length" "4,8")]
8604 ; A series of splitters for the compare_scc pattern below. Note that
8605 ; order is important.
8607 [(set (match_operand:SI 0 "s_register_operand" "")
8608 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8610 (clobber (reg:CC CC_REGNUM))]
8611 "TARGET_32BIT && reload_completed"
8612 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8615 [(set (match_operand:SI 0 "s_register_operand" "")
8616 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8618 (clobber (reg:CC CC_REGNUM))]
8619 "TARGET_32BIT && reload_completed"
8620 [(set (match_dup 0) (not:SI (match_dup 1)))
8621 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8624 [(set (match_operand:SI 0 "s_register_operand" "")
8625 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8627 (clobber (reg:CC CC_REGNUM))]
8628 "TARGET_32BIT && reload_completed"
8630 [(set (reg:CC CC_REGNUM)
8631 (compare:CC (const_int 1) (match_dup 1)))
8633 (minus:SI (const_int 1) (match_dup 1)))])
8634 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8635 (set (match_dup 0) (const_int 0)))])
8638 [(set (match_operand:SI 0 "s_register_operand" "")
8639 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8640 (match_operand:SI 2 "const_int_operand" "")))
8641 (clobber (reg:CC CC_REGNUM))]
8642 "TARGET_32BIT && reload_completed"
8644 [(set (reg:CC CC_REGNUM)
8645 (compare:CC (match_dup 1) (match_dup 2)))
8646 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8647 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8648 (set (match_dup 0) (const_int 1)))]
8650 operands[3] = GEN_INT (-INTVAL (operands[2]));
8654 [(set (match_operand:SI 0 "s_register_operand" "")
8655 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8656 (match_operand:SI 2 "arm_add_operand" "")))
8657 (clobber (reg:CC CC_REGNUM))]
8658 "TARGET_32BIT && reload_completed"
8660 [(set (reg:CC_NOOV CC_REGNUM)
8661 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8663 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8664 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8665 (set (match_dup 0) (const_int 1)))])
8667 (define_insn_and_split "*compare_scc"
8668 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8669 (match_operator:SI 1 "arm_comparison_operator"
8670 [(match_operand:SI 2 "s_register_operand" "r,r")
8671 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8672 (clobber (reg:CC CC_REGNUM))]
8675 "&& reload_completed"
8676 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8677 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8678 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8681 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8682 operands[2], operands[3]);
8683 enum rtx_code rc = GET_CODE (operands[1]);
8685 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8687 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8688 if (mode == CCFPmode || mode == CCFPEmode)
8689 rc = reverse_condition_maybe_unordered (rc);
8691 rc = reverse_condition (rc);
8692 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8695 ;; Attempt to improve the sequence generated by the compare_scc splitters
8696 ;; not to use conditional execution.
8698 [(set (reg:CC CC_REGNUM)
8699 (compare:CC (match_operand:SI 1 "register_operand" "")
8700 (match_operand:SI 2 "arm_rhs_operand" "")))
8701 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8702 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8703 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8704 (set (match_dup 0) (const_int 1)))
8705 (match_scratch:SI 3 "r")]
8707 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8709 [(set (reg:CC CC_REGNUM)
8710 (compare:CC (const_int 0) (match_dup 3)))
8711 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8713 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8714 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8716 (define_insn "*cond_move"
8717 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8718 (if_then_else:SI (match_operator 3 "equality_operator"
8719 [(match_operator 4 "arm_comparison_operator"
8720 [(match_operand 5 "cc_register" "") (const_int 0)])
8722 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8723 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8726 if (GET_CODE (operands[3]) == NE)
8728 if (which_alternative != 1)
8729 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8730 if (which_alternative != 0)
8731 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8734 if (which_alternative != 0)
8735 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8736 if (which_alternative != 1)
8737 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8740 [(set_attr "conds" "use")
8741 (set_attr "length" "4,4,8")]
8744 (define_insn "*cond_arith"
8745 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8746 (match_operator:SI 5 "shiftable_operator"
8747 [(match_operator:SI 4 "arm_comparison_operator"
8748 [(match_operand:SI 2 "s_register_operand" "r,r")
8749 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8750 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8751 (clobber (reg:CC CC_REGNUM))]
8754 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8755 return \"%i5\\t%0, %1, %2, lsr #31\";
8757 output_asm_insn (\"cmp\\t%2, %3\", operands);
8758 if (GET_CODE (operands[5]) == AND)
8759 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8760 else if (GET_CODE (operands[5]) == MINUS)
8761 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8762 else if (which_alternative != 0)
8763 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8764 return \"%i5%d4\\t%0, %1, #1\";
8766 [(set_attr "conds" "clob")
8767 (set_attr "length" "12")]
8770 (define_insn "*cond_sub"
8771 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8772 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8773 (match_operator:SI 4 "arm_comparison_operator"
8774 [(match_operand:SI 2 "s_register_operand" "r,r")
8775 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8776 (clobber (reg:CC CC_REGNUM))]
8779 output_asm_insn (\"cmp\\t%2, %3\", operands);
8780 if (which_alternative != 0)
8781 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8782 return \"sub%d4\\t%0, %1, #1\";
8784 [(set_attr "conds" "clob")
8785 (set_attr "length" "8,12")]
8788 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8789 (define_insn "*cmp_ite0"
8790 [(set (match_operand 6 "dominant_cc_register" "")
8793 (match_operator 4 "arm_comparison_operator"
8794 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8795 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8796 (match_operator:SI 5 "arm_comparison_operator"
8797 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8798 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8804 static const char * const opcodes[4][2] =
8806 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8807 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8808 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8809 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8810 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8811 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8812 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8813 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8816 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8818 return opcodes[which_alternative][swap];
8820 [(set_attr "conds" "set")
8821 (set_attr "length" "8")]
8824 (define_insn "*cmp_ite1"
8825 [(set (match_operand 6 "dominant_cc_register" "")
8828 (match_operator 4 "arm_comparison_operator"
8829 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8830 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8831 (match_operator:SI 5 "arm_comparison_operator"
8832 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8833 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8839 static const char * const opcodes[4][2] =
8841 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8842 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8843 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8844 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8845 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8846 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8847 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8848 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8851 comparison_dominates_p (GET_CODE (operands[5]),
8852 reverse_condition (GET_CODE (operands[4])));
8854 return opcodes[which_alternative][swap];
8856 [(set_attr "conds" "set")
8857 (set_attr "length" "8")]
8860 (define_insn "*cmp_and"
8861 [(set (match_operand 6 "dominant_cc_register" "")
8864 (match_operator 4 "arm_comparison_operator"
8865 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8866 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8867 (match_operator:SI 5 "arm_comparison_operator"
8868 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8869 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8874 static const char *const opcodes[4][2] =
8876 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8877 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8878 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8879 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8880 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8881 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8882 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8883 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8886 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8888 return opcodes[which_alternative][swap];
8890 [(set_attr "conds" "set")
8891 (set_attr "predicable" "no")
8892 (set_attr "length" "8")]
8895 (define_insn "*cmp_ior"
8896 [(set (match_operand 6 "dominant_cc_register" "")
8899 (match_operator 4 "arm_comparison_operator"
8900 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8901 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8902 (match_operator:SI 5 "arm_comparison_operator"
8903 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8904 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8909 static const char *const opcodes[4][2] =
8911 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8912 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8913 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8914 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8915 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8916 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8917 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8918 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8921 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8923 return opcodes[which_alternative][swap];
8926 [(set_attr "conds" "set")
8927 (set_attr "length" "8")]
8930 (define_insn_and_split "*ior_scc_scc"
8931 [(set (match_operand:SI 0 "s_register_operand" "=r")
8932 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8933 [(match_operand:SI 1 "s_register_operand" "r")
8934 (match_operand:SI 2 "arm_add_operand" "rIL")])
8935 (match_operator:SI 6 "arm_comparison_operator"
8936 [(match_operand:SI 4 "s_register_operand" "r")
8937 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8938 (clobber (reg:CC CC_REGNUM))]
8940 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8943 "TARGET_ARM && reload_completed"
8947 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8948 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8950 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8952 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8955 [(set_attr "conds" "clob")
8956 (set_attr "length" "16")])
8958 ; If the above pattern is followed by a CMP insn, then the compare is
8959 ; redundant, since we can rework the conditional instruction that follows.
8960 (define_insn_and_split "*ior_scc_scc_cmp"
8961 [(set (match_operand 0 "dominant_cc_register" "")
8962 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8963 [(match_operand:SI 1 "s_register_operand" "r")
8964 (match_operand:SI 2 "arm_add_operand" "rIL")])
8965 (match_operator:SI 6 "arm_comparison_operator"
8966 [(match_operand:SI 4 "s_register_operand" "r")
8967 (match_operand:SI 5 "arm_add_operand" "rIL")]))
8969 (set (match_operand:SI 7 "s_register_operand" "=r")
8970 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8971 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8974 "TARGET_ARM && reload_completed"
8978 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8979 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8981 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8983 [(set_attr "conds" "set")
8984 (set_attr "length" "16")])
8986 (define_insn_and_split "*and_scc_scc"
8987 [(set (match_operand:SI 0 "s_register_operand" "=r")
8988 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8989 [(match_operand:SI 1 "s_register_operand" "r")
8990 (match_operand:SI 2 "arm_add_operand" "rIL")])
8991 (match_operator:SI 6 "arm_comparison_operator"
8992 [(match_operand:SI 4 "s_register_operand" "r")
8993 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8994 (clobber (reg:CC CC_REGNUM))]
8996 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8999 "TARGET_ARM && reload_completed
9000 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9005 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9006 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9008 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9010 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9013 [(set_attr "conds" "clob")
9014 (set_attr "length" "16")])
9016 ; If the above pattern is followed by a CMP insn, then the compare is
9017 ; redundant, since we can rework the conditional instruction that follows.
9018 (define_insn_and_split "*and_scc_scc_cmp"
9019 [(set (match_operand 0 "dominant_cc_register" "")
9020 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9021 [(match_operand:SI 1 "s_register_operand" "r")
9022 (match_operand:SI 2 "arm_add_operand" "rIL")])
9023 (match_operator:SI 6 "arm_comparison_operator"
9024 [(match_operand:SI 4 "s_register_operand" "r")
9025 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9027 (set (match_operand:SI 7 "s_register_operand" "=r")
9028 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9029 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9032 "TARGET_ARM && reload_completed"
9036 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9037 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9039 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9041 [(set_attr "conds" "set")
9042 (set_attr "length" "16")])
9044 ;; If there is no dominance in the comparison, then we can still save an
9045 ;; instruction in the AND case, since we can know that the second compare
9046 ;; need only zero the value if false (if true, then the value is already
9048 (define_insn_and_split "*and_scc_scc_nodom"
9049 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9050 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9051 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9052 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9053 (match_operator:SI 6 "arm_comparison_operator"
9054 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9055 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9056 (clobber (reg:CC CC_REGNUM))]
9058 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9061 "TARGET_ARM && reload_completed"
9062 [(parallel [(set (match_dup 0)
9063 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9064 (clobber (reg:CC CC_REGNUM))])
9065 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9067 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9070 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9071 operands[4], operands[5]),
9073 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9075 [(set_attr "conds" "clob")
9076 (set_attr "length" "20")])
9079 [(set (reg:CC_NOOV CC_REGNUM)
9080 (compare:CC_NOOV (ior:SI
9081 (and:SI (match_operand:SI 0 "s_register_operand" "")
9083 (match_operator:SI 1 "arm_comparison_operator"
9084 [(match_operand:SI 2 "s_register_operand" "")
9085 (match_operand:SI 3 "arm_add_operand" "")]))
9087 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9090 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9092 (set (reg:CC_NOOV CC_REGNUM)
9093 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9098 [(set (reg:CC_NOOV CC_REGNUM)
9099 (compare:CC_NOOV (ior:SI
9100 (match_operator:SI 1 "arm_comparison_operator"
9101 [(match_operand:SI 2 "s_register_operand" "")
9102 (match_operand:SI 3 "arm_add_operand" "")])
9103 (and:SI (match_operand:SI 0 "s_register_operand" "")
9106 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9109 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9111 (set (reg:CC_NOOV CC_REGNUM)
9112 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9115 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9117 (define_insn "*negscc"
9118 [(set (match_operand:SI 0 "s_register_operand" "=r")
9119 (neg:SI (match_operator 3 "arm_comparison_operator"
9120 [(match_operand:SI 1 "s_register_operand" "r")
9121 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9122 (clobber (reg:CC CC_REGNUM))]
9125 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9126 return \"mov\\t%0, %1, asr #31\";
9128 if (GET_CODE (operands[3]) == NE)
9129 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9131 output_asm_insn (\"cmp\\t%1, %2\", operands);
9132 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9133 return \"mvn%d3\\t%0, #0\";
9135 [(set_attr "conds" "clob")
9136 (set_attr "length" "12")]
9139 (define_insn "movcond"
9140 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9142 (match_operator 5 "arm_comparison_operator"
9143 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9144 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9145 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9146 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9147 (clobber (reg:CC CC_REGNUM))]
9150 if (GET_CODE (operands[5]) == LT
9151 && (operands[4] == const0_rtx))
9153 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9155 if (operands[2] == const0_rtx)
9156 return \"and\\t%0, %1, %3, asr #31\";
9157 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9159 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9161 if (operands[1] == const0_rtx)
9162 return \"bic\\t%0, %2, %3, asr #31\";
9163 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9165 /* The only case that falls through to here is when both ops 1 & 2
9169 if (GET_CODE (operands[5]) == GE
9170 && (operands[4] == const0_rtx))
9172 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9174 if (operands[2] == const0_rtx)
9175 return \"bic\\t%0, %1, %3, asr #31\";
9176 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9178 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9180 if (operands[1] == const0_rtx)
9181 return \"and\\t%0, %2, %3, asr #31\";
9182 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9184 /* The only case that falls through to here is when both ops 1 & 2
9187 if (GET_CODE (operands[4]) == CONST_INT
9188 && !const_ok_for_arm (INTVAL (operands[4])))
9189 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9191 output_asm_insn (\"cmp\\t%3, %4\", operands);
9192 if (which_alternative != 0)
9193 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9194 if (which_alternative != 1)
9195 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9198 [(set_attr "conds" "clob")
9199 (set_attr "length" "8,8,12")]
9202 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9204 (define_insn "*ifcompare_plus_move"
9205 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9206 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9207 [(match_operand:SI 4 "s_register_operand" "r,r")
9208 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9210 (match_operand:SI 2 "s_register_operand" "r,r")
9211 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9212 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9213 (clobber (reg:CC CC_REGNUM))]
9216 [(set_attr "conds" "clob")
9217 (set_attr "length" "8,12")]
9220 (define_insn "*if_plus_move"
9221 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9223 (match_operator 4 "arm_comparison_operator"
9224 [(match_operand 5 "cc_register" "") (const_int 0)])
9226 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9227 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9228 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9232 sub%d4\\t%0, %2, #%n3
9233 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9234 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9235 [(set_attr "conds" "use")
9236 (set_attr "length" "4,4,8,8")
9237 (set_attr "type" "*,*,*,*")]
9240 (define_insn "*ifcompare_move_plus"
9241 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9242 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9243 [(match_operand:SI 4 "s_register_operand" "r,r")
9244 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9245 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9247 (match_operand:SI 2 "s_register_operand" "r,r")
9248 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9249 (clobber (reg:CC CC_REGNUM))]
9252 [(set_attr "conds" "clob")
9253 (set_attr "length" "8,12")]
9256 (define_insn "*if_move_plus"
9257 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9259 (match_operator 4 "arm_comparison_operator"
9260 [(match_operand 5 "cc_register" "") (const_int 0)])
9261 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9263 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9264 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9268 sub%D4\\t%0, %2, #%n3
9269 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9270 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9271 [(set_attr "conds" "use")
9272 (set_attr "length" "4,4,8,8")
9273 (set_attr "type" "*,*,*,*")]
9276 (define_insn "*ifcompare_arith_arith"
9277 [(set (match_operand:SI 0 "s_register_operand" "=r")
9278 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9279 [(match_operand:SI 5 "s_register_operand" "r")
9280 (match_operand:SI 6 "arm_add_operand" "rIL")])
9281 (match_operator:SI 8 "shiftable_operator"
9282 [(match_operand:SI 1 "s_register_operand" "r")
9283 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9284 (match_operator:SI 7 "shiftable_operator"
9285 [(match_operand:SI 3 "s_register_operand" "r")
9286 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9287 (clobber (reg:CC CC_REGNUM))]
9290 [(set_attr "conds" "clob")
9291 (set_attr "length" "12")]
9294 (define_insn "*if_arith_arith"
9295 [(set (match_operand:SI 0 "s_register_operand" "=r")
9296 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9297 [(match_operand 8 "cc_register" "") (const_int 0)])
9298 (match_operator:SI 6 "shiftable_operator"
9299 [(match_operand:SI 1 "s_register_operand" "r")
9300 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9301 (match_operator:SI 7 "shiftable_operator"
9302 [(match_operand:SI 3 "s_register_operand" "r")
9303 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9305 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9306 [(set_attr "conds" "use")
9307 (set_attr "length" "8")]
9310 (define_insn "*ifcompare_arith_move"
9311 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9312 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9313 [(match_operand:SI 2 "s_register_operand" "r,r")
9314 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9315 (match_operator:SI 7 "shiftable_operator"
9316 [(match_operand:SI 4 "s_register_operand" "r,r")
9317 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9318 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9319 (clobber (reg:CC CC_REGNUM))]
9322 /* If we have an operation where (op x 0) is the identity operation and
9323 the conditional operator is LT or GE and we are comparing against zero and
9324 everything is in registers then we can do this in two instructions. */
9325 if (operands[3] == const0_rtx
9326 && GET_CODE (operands[7]) != AND
9327 && GET_CODE (operands[5]) == REG
9328 && GET_CODE (operands[1]) == REG
9329 && REGNO (operands[1]) == REGNO (operands[4])
9330 && REGNO (operands[4]) != REGNO (operands[0]))
9332 if (GET_CODE (operands[6]) == LT)
9333 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9334 else if (GET_CODE (operands[6]) == GE)
9335 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9337 if (GET_CODE (operands[3]) == CONST_INT
9338 && !const_ok_for_arm (INTVAL (operands[3])))
9339 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9341 output_asm_insn (\"cmp\\t%2, %3\", operands);
9342 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9343 if (which_alternative != 0)
9344 return \"mov%D6\\t%0, %1\";
9347 [(set_attr "conds" "clob")
9348 (set_attr "length" "8,12")]
9351 (define_insn "*if_arith_move"
9352 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9353 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9354 [(match_operand 6 "cc_register" "") (const_int 0)])
9355 (match_operator:SI 5 "shiftable_operator"
9356 [(match_operand:SI 2 "s_register_operand" "r,r")
9357 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9358 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9362 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9363 [(set_attr "conds" "use")
9364 (set_attr "length" "4,8")
9365 (set_attr "type" "*,*")]
9368 (define_insn "*ifcompare_move_arith"
9369 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9370 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9371 [(match_operand:SI 4 "s_register_operand" "r,r")
9372 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9373 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9374 (match_operator:SI 7 "shiftable_operator"
9375 [(match_operand:SI 2 "s_register_operand" "r,r")
9376 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9377 (clobber (reg:CC CC_REGNUM))]
9380 /* If we have an operation where (op x 0) is the identity operation and
9381 the conditional operator is LT or GE and we are comparing against zero and
9382 everything is in registers then we can do this in two instructions */
9383 if (operands[5] == const0_rtx
9384 && GET_CODE (operands[7]) != AND
9385 && GET_CODE (operands[3]) == REG
9386 && GET_CODE (operands[1]) == REG
9387 && REGNO (operands[1]) == REGNO (operands[2])
9388 && REGNO (operands[2]) != REGNO (operands[0]))
9390 if (GET_CODE (operands[6]) == GE)
9391 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9392 else if (GET_CODE (operands[6]) == LT)
9393 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9396 if (GET_CODE (operands[5]) == CONST_INT
9397 && !const_ok_for_arm (INTVAL (operands[5])))
9398 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9400 output_asm_insn (\"cmp\\t%4, %5\", operands);
9402 if (which_alternative != 0)
9403 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9404 return \"%I7%D6\\t%0, %2, %3\";
9406 [(set_attr "conds" "clob")
9407 (set_attr "length" "8,12")]
9410 (define_insn "*if_move_arith"
9411 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9413 (match_operator 4 "arm_comparison_operator"
9414 [(match_operand 6 "cc_register" "") (const_int 0)])
9415 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9416 (match_operator:SI 5 "shiftable_operator"
9417 [(match_operand:SI 2 "s_register_operand" "r,r")
9418 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9422 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9423 [(set_attr "conds" "use")
9424 (set_attr "length" "4,8")
9425 (set_attr "type" "*,*")]
9428 (define_insn "*ifcompare_move_not"
9429 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9431 (match_operator 5 "arm_comparison_operator"
9432 [(match_operand:SI 3 "s_register_operand" "r,r")
9433 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9434 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9436 (match_operand:SI 2 "s_register_operand" "r,r"))))
9437 (clobber (reg:CC CC_REGNUM))]
9440 [(set_attr "conds" "clob")
9441 (set_attr "length" "8,12")]
9444 (define_insn "*if_move_not"
9445 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9447 (match_operator 4 "arm_comparison_operator"
9448 [(match_operand 3 "cc_register" "") (const_int 0)])
9449 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9450 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9454 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9455 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9456 [(set_attr "conds" "use")
9457 (set_attr "length" "4,8,8")]
9460 (define_insn "*ifcompare_not_move"
9461 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9463 (match_operator 5 "arm_comparison_operator"
9464 [(match_operand:SI 3 "s_register_operand" "r,r")
9465 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9467 (match_operand:SI 2 "s_register_operand" "r,r"))
9468 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9469 (clobber (reg:CC CC_REGNUM))]
9472 [(set_attr "conds" "clob")
9473 (set_attr "length" "8,12")]
9476 (define_insn "*if_not_move"
9477 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9479 (match_operator 4 "arm_comparison_operator"
9480 [(match_operand 3 "cc_register" "") (const_int 0)])
9481 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9482 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9486 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9487 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9488 [(set_attr "conds" "use")
9489 (set_attr "length" "4,8,8")]
9492 (define_insn "*ifcompare_shift_move"
9493 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9495 (match_operator 6 "arm_comparison_operator"
9496 [(match_operand:SI 4 "s_register_operand" "r,r")
9497 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9498 (match_operator:SI 7 "shift_operator"
9499 [(match_operand:SI 2 "s_register_operand" "r,r")
9500 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9501 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9502 (clobber (reg:CC CC_REGNUM))]
9505 [(set_attr "conds" "clob")
9506 (set_attr "length" "8,12")]
9509 (define_insn "*if_shift_move"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9512 (match_operator 5 "arm_comparison_operator"
9513 [(match_operand 6 "cc_register" "") (const_int 0)])
9514 (match_operator:SI 4 "shift_operator"
9515 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9516 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9517 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9521 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9522 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9523 [(set_attr "conds" "use")
9524 (set_attr "shift" "2")
9525 (set_attr "length" "4,8,8")
9526 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9527 (const_string "alu_shift")
9528 (const_string "alu_shift_reg")))]
9531 (define_insn "*ifcompare_move_shift"
9532 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9534 (match_operator 6 "arm_comparison_operator"
9535 [(match_operand:SI 4 "s_register_operand" "r,r")
9536 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9537 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9538 (match_operator:SI 7 "shift_operator"
9539 [(match_operand:SI 2 "s_register_operand" "r,r")
9540 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9541 (clobber (reg:CC CC_REGNUM))]
9544 [(set_attr "conds" "clob")
9545 (set_attr "length" "8,12")]
9548 (define_insn "*if_move_shift"
9549 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9551 (match_operator 5 "arm_comparison_operator"
9552 [(match_operand 6 "cc_register" "") (const_int 0)])
9553 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9554 (match_operator:SI 4 "shift_operator"
9555 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9556 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9560 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9561 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9562 [(set_attr "conds" "use")
9563 (set_attr "shift" "2")
9564 (set_attr "length" "4,8,8")
9565 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9566 (const_string "alu_shift")
9567 (const_string "alu_shift_reg")))]
9570 (define_insn "*ifcompare_shift_shift"
9571 [(set (match_operand:SI 0 "s_register_operand" "=r")
9573 (match_operator 7 "arm_comparison_operator"
9574 [(match_operand:SI 5 "s_register_operand" "r")
9575 (match_operand:SI 6 "arm_add_operand" "rIL")])
9576 (match_operator:SI 8 "shift_operator"
9577 [(match_operand:SI 1 "s_register_operand" "r")
9578 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9579 (match_operator:SI 9 "shift_operator"
9580 [(match_operand:SI 3 "s_register_operand" "r")
9581 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9582 (clobber (reg:CC CC_REGNUM))]
9585 [(set_attr "conds" "clob")
9586 (set_attr "length" "12")]
9589 (define_insn "*if_shift_shift"
9590 [(set (match_operand:SI 0 "s_register_operand" "=r")
9592 (match_operator 5 "arm_comparison_operator"
9593 [(match_operand 8 "cc_register" "") (const_int 0)])
9594 (match_operator:SI 6 "shift_operator"
9595 [(match_operand:SI 1 "s_register_operand" "r")
9596 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9597 (match_operator:SI 7 "shift_operator"
9598 [(match_operand:SI 3 "s_register_operand" "r")
9599 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9601 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9602 [(set_attr "conds" "use")
9603 (set_attr "shift" "1")
9604 (set_attr "length" "8")
9605 (set (attr "type") (if_then_else
9606 (and (match_operand 2 "const_int_operand" "")
9607 (match_operand 4 "const_int_operand" ""))
9608 (const_string "alu_shift")
9609 (const_string "alu_shift_reg")))]
9612 (define_insn "*ifcompare_not_arith"
9613 [(set (match_operand:SI 0 "s_register_operand" "=r")
9615 (match_operator 6 "arm_comparison_operator"
9616 [(match_operand:SI 4 "s_register_operand" "r")
9617 (match_operand:SI 5 "arm_add_operand" "rIL")])
9618 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9619 (match_operator:SI 7 "shiftable_operator"
9620 [(match_operand:SI 2 "s_register_operand" "r")
9621 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9622 (clobber (reg:CC CC_REGNUM))]
9625 [(set_attr "conds" "clob")
9626 (set_attr "length" "12")]
9629 (define_insn "*if_not_arith"
9630 [(set (match_operand:SI 0 "s_register_operand" "=r")
9632 (match_operator 5 "arm_comparison_operator"
9633 [(match_operand 4 "cc_register" "") (const_int 0)])
9634 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9635 (match_operator:SI 6 "shiftable_operator"
9636 [(match_operand:SI 2 "s_register_operand" "r")
9637 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9639 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9640 [(set_attr "conds" "use")
9641 (set_attr "length" "8")]
9644 (define_insn "*ifcompare_arith_not"
9645 [(set (match_operand:SI 0 "s_register_operand" "=r")
9647 (match_operator 6 "arm_comparison_operator"
9648 [(match_operand:SI 4 "s_register_operand" "r")
9649 (match_operand:SI 5 "arm_add_operand" "rIL")])
9650 (match_operator:SI 7 "shiftable_operator"
9651 [(match_operand:SI 2 "s_register_operand" "r")
9652 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9653 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9654 (clobber (reg:CC CC_REGNUM))]
9657 [(set_attr "conds" "clob")
9658 (set_attr "length" "12")]
9661 (define_insn "*if_arith_not"
9662 [(set (match_operand:SI 0 "s_register_operand" "=r")
9664 (match_operator 5 "arm_comparison_operator"
9665 [(match_operand 4 "cc_register" "") (const_int 0)])
9666 (match_operator:SI 6 "shiftable_operator"
9667 [(match_operand:SI 2 "s_register_operand" "r")
9668 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9669 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9671 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9672 [(set_attr "conds" "use")
9673 (set_attr "length" "8")]
9676 (define_insn "*ifcompare_neg_move"
9677 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9679 (match_operator 5 "arm_comparison_operator"
9680 [(match_operand:SI 3 "s_register_operand" "r,r")
9681 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9682 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9683 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9684 (clobber (reg:CC CC_REGNUM))]
9687 [(set_attr "conds" "clob")
9688 (set_attr "length" "8,12")]
9691 (define_insn "*if_neg_move"
9692 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9694 (match_operator 4 "arm_comparison_operator"
9695 [(match_operand 3 "cc_register" "") (const_int 0)])
9696 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9697 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9701 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9702 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9703 [(set_attr "conds" "use")
9704 (set_attr "length" "4,8,8")]
9707 (define_insn "*ifcompare_move_neg"
9708 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9710 (match_operator 5 "arm_comparison_operator"
9711 [(match_operand:SI 3 "s_register_operand" "r,r")
9712 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9713 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9714 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9715 (clobber (reg:CC CC_REGNUM))]
9718 [(set_attr "conds" "clob")
9719 (set_attr "length" "8,12")]
9722 (define_insn "*if_move_neg"
9723 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9725 (match_operator 4 "arm_comparison_operator"
9726 [(match_operand 3 "cc_register" "") (const_int 0)])
9727 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9728 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9732 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9733 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9734 [(set_attr "conds" "use")
9735 (set_attr "length" "4,8,8")]
9738 (define_insn "*arith_adjacentmem"
9739 [(set (match_operand:SI 0 "s_register_operand" "=r")
9740 (match_operator:SI 1 "shiftable_operator"
9741 [(match_operand:SI 2 "memory_operand" "m")
9742 (match_operand:SI 3 "memory_operand" "m")]))
9743 (clobber (match_scratch:SI 4 "=r"))]
9744 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9750 HOST_WIDE_INT val1 = 0, val2 = 0;
9752 if (REGNO (operands[0]) > REGNO (operands[4]))
9754 ldm[1] = operands[4];
9755 ldm[2] = operands[0];
9759 ldm[1] = operands[0];
9760 ldm[2] = operands[4];
9763 base_reg = XEXP (operands[2], 0);
9765 if (!REG_P (base_reg))
9767 val1 = INTVAL (XEXP (base_reg, 1));
9768 base_reg = XEXP (base_reg, 0);
9771 if (!REG_P (XEXP (operands[3], 0)))
9772 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9774 arith[0] = operands[0];
9775 arith[3] = operands[1];
9789 if (val1 !=0 && val2 != 0)
9793 if (val1 == 4 || val2 == 4)
9794 /* Other val must be 8, since we know they are adjacent and neither
9796 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9797 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9799 ldm[0] = ops[0] = operands[4];
9801 ops[2] = GEN_INT (val1);
9802 output_add_immediate (ops);
9804 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9806 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9810 /* Offset is out of range for a single add, so use two ldr. */
9813 ops[2] = GEN_INT (val1);
9814 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9816 ops[2] = GEN_INT (val2);
9817 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9823 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9825 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9830 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9832 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9834 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9837 [(set_attr "length" "12")
9838 (set_attr "predicable" "yes")
9839 (set_attr "type" "load1")]
9842 ; This pattern is never tried by combine, so do it as a peephole
9845 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9846 (match_operand:SI 1 "arm_general_register_operand" ""))
9847 (set (reg:CC CC_REGNUM)
9848 (compare:CC (match_dup 1) (const_int 0)))]
9850 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9851 (set (match_dup 0) (match_dup 1))])]
9856 [(set (match_operand:SI 0 "s_register_operand" "")
9857 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9859 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9860 [(match_operand:SI 3 "s_register_operand" "")
9861 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9862 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9864 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9865 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9870 ;; This split can be used because CC_Z mode implies that the following
9871 ;; branch will be an equality, or an unsigned inequality, so the sign
9872 ;; extension is not needed.
9875 [(set (reg:CC_Z CC_REGNUM)
9877 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9879 (match_operand 1 "const_int_operand" "")))
9880 (clobber (match_scratch:SI 2 ""))]
9882 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9883 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9884 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9885 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9887 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9890 ;; ??? Check the patterns above for Thumb-2 usefulness
9892 (define_expand "prologue"
9893 [(clobber (const_int 0))]
9896 arm_expand_prologue ();
9898 thumb1_expand_prologue ();
9903 (define_expand "epilogue"
9904 [(clobber (const_int 0))]
9907 if (crtl->calls_eh_return)
9908 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9910 thumb1_expand_epilogue ();
9911 else if (USE_RETURN_INSN (FALSE))
9913 emit_jump_insn (gen_return ());
9916 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9918 gen_rtx_RETURN (VOIDmode)),
9924 ;; Note - although unspec_volatile's USE all hard registers,
9925 ;; USEs are ignored after relaod has completed. Thus we need
9926 ;; to add an unspec of the link register to ensure that flow
9927 ;; does not think that it is unused by the sibcall branch that
9928 ;; will replace the standard function epilogue.
9929 (define_insn "sibcall_epilogue"
9930 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9931 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9934 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9935 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9936 return arm_output_epilogue (next_nonnote_insn (insn));
9938 ;; Length is absolute worst case
9939 [(set_attr "length" "44")
9940 (set_attr "type" "block")
9941 ;; We don't clobber the conditions, but the potential length of this
9942 ;; operation is sufficient to make conditionalizing the sequence
9943 ;; unlikely to be profitable.
9944 (set_attr "conds" "clob")]
9947 (define_insn "*epilogue_insns"
9948 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
9952 return arm_output_epilogue (NULL);
9953 else /* TARGET_THUMB1 */
9954 return thumb_unexpanded_epilogue ();
9956 ; Length is absolute worst case
9957 [(set_attr "length" "44")
9958 (set_attr "type" "block")
9959 ;; We don't clobber the conditions, but the potential length of this
9960 ;; operation is sufficient to make conditionalizing the sequence
9961 ;; unlikely to be profitable.
9962 (set_attr "conds" "clob")]
9965 (define_expand "eh_epilogue"
9966 [(use (match_operand:SI 0 "register_operand" ""))
9967 (use (match_operand:SI 1 "register_operand" ""))
9968 (use (match_operand:SI 2 "register_operand" ""))]
9972 cfun->machine->eh_epilogue_sp_ofs = operands[1];
9973 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
9975 rtx ra = gen_rtx_REG (Pmode, 2);
9977 emit_move_insn (ra, operands[2]);
9980 /* This is a hack -- we may have crystalized the function type too
9982 cfun->machine->func_type = 0;
9986 ;; This split is only used during output to reduce the number of patterns
9987 ;; that need assembler instructions adding to them. We allowed the setting
9988 ;; of the conditions to be implicit during rtl generation so that
9989 ;; the conditional compare patterns would work. However this conflicts to
9990 ;; some extent with the conditional data operations, so we have to split them
9993 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
9994 ;; conditional execution sufficient?
9997 [(set (match_operand:SI 0 "s_register_operand" "")
9998 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
9999 [(match_operand 2 "" "") (match_operand 3 "" "")])
10001 (match_operand 4 "" "")))
10002 (clobber (reg:CC CC_REGNUM))]
10003 "TARGET_ARM && reload_completed"
10004 [(set (match_dup 5) (match_dup 6))
10005 (cond_exec (match_dup 7)
10006 (set (match_dup 0) (match_dup 4)))]
10009 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10010 operands[2], operands[3]);
10011 enum rtx_code rc = GET_CODE (operands[1]);
10013 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10014 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10015 if (mode == CCFPmode || mode == CCFPEmode)
10016 rc = reverse_condition_maybe_unordered (rc);
10018 rc = reverse_condition (rc);
10020 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10025 [(set (match_operand:SI 0 "s_register_operand" "")
10026 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10027 [(match_operand 2 "" "") (match_operand 3 "" "")])
10028 (match_operand 4 "" "")
10030 (clobber (reg:CC CC_REGNUM))]
10031 "TARGET_ARM && reload_completed"
10032 [(set (match_dup 5) (match_dup 6))
10033 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10034 (set (match_dup 0) (match_dup 4)))]
10037 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10038 operands[2], operands[3]);
10040 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10041 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10046 [(set (match_operand:SI 0 "s_register_operand" "")
10047 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10048 [(match_operand 2 "" "") (match_operand 3 "" "")])
10049 (match_operand 4 "" "")
10050 (match_operand 5 "" "")))
10051 (clobber (reg:CC CC_REGNUM))]
10052 "TARGET_ARM && reload_completed"
10053 [(set (match_dup 6) (match_dup 7))
10054 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10055 (set (match_dup 0) (match_dup 4)))
10056 (cond_exec (match_dup 8)
10057 (set (match_dup 0) (match_dup 5)))]
10060 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10061 operands[2], operands[3]);
10062 enum rtx_code rc = GET_CODE (operands[1]);
10064 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10065 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10066 if (mode == CCFPmode || mode == CCFPEmode)
10067 rc = reverse_condition_maybe_unordered (rc);
10069 rc = reverse_condition (rc);
10071 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10076 [(set (match_operand:SI 0 "s_register_operand" "")
10077 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10078 [(match_operand:SI 2 "s_register_operand" "")
10079 (match_operand:SI 3 "arm_add_operand" "")])
10080 (match_operand:SI 4 "arm_rhs_operand" "")
10082 (match_operand:SI 5 "s_register_operand" ""))))
10083 (clobber (reg:CC CC_REGNUM))]
10084 "TARGET_ARM && reload_completed"
10085 [(set (match_dup 6) (match_dup 7))
10086 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10087 (set (match_dup 0) (match_dup 4)))
10088 (cond_exec (match_dup 8)
10089 (set (match_dup 0) (not:SI (match_dup 5))))]
10092 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10093 operands[2], operands[3]);
10094 enum rtx_code rc = GET_CODE (operands[1]);
10096 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10097 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10098 if (mode == CCFPmode || mode == CCFPEmode)
10099 rc = reverse_condition_maybe_unordered (rc);
10101 rc = reverse_condition (rc);
10103 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10107 (define_insn "*cond_move_not"
10108 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10109 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10110 [(match_operand 3 "cc_register" "") (const_int 0)])
10111 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10113 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10117 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10118 [(set_attr "conds" "use")
10119 (set_attr "length" "4,8")]
10122 ;; The next two patterns occur when an AND operation is followed by a
10123 ;; scc insn sequence
10125 (define_insn "*sign_extract_onebit"
10126 [(set (match_operand:SI 0 "s_register_operand" "=r")
10127 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10129 (match_operand:SI 2 "const_int_operand" "n")))
10130 (clobber (reg:CC CC_REGNUM))]
10133 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10134 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10135 return \"mvnne\\t%0, #0\";
10137 [(set_attr "conds" "clob")
10138 (set_attr "length" "8")]
10141 (define_insn "*not_signextract_onebit"
10142 [(set (match_operand:SI 0 "s_register_operand" "=r")
10144 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10146 (match_operand:SI 2 "const_int_operand" "n"))))
10147 (clobber (reg:CC CC_REGNUM))]
10150 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10151 output_asm_insn (\"tst\\t%1, %2\", operands);
10152 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10153 return \"movne\\t%0, #0\";
10155 [(set_attr "conds" "clob")
10156 (set_attr "length" "12")]
10158 ;; ??? The above patterns need auditing for Thumb-2
10160 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10161 ;; expressions. For simplicity, the first register is also in the unspec
10163 (define_insn "*push_multi"
10164 [(match_parallel 2 "multi_register_push"
10165 [(set (match_operand:BLK 0 "memory_operand" "=m")
10166 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10167 UNSPEC_PUSH_MULT))])]
10171 int num_saves = XVECLEN (operands[2], 0);
10173 /* For the StrongARM at least it is faster to
10174 use STR to store only a single register.
10175 In Thumb mode always use push, and the assembler will pick
10176 something appropriate. */
10177 if (num_saves == 1 && TARGET_ARM)
10178 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10185 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10187 strcpy (pattern, \"push\\t{%1\");
10189 for (i = 1; i < num_saves; i++)
10191 strcat (pattern, \", %|\");
10193 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10196 strcat (pattern, \"}\");
10197 output_asm_insn (pattern, operands);
10202 [(set_attr "type" "store4")]
10205 (define_insn "stack_tie"
10206 [(set (mem:BLK (scratch))
10207 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10208 (match_operand:SI 1 "s_register_operand" "rk")]
10212 [(set_attr "length" "0")]
10215 ;; Similarly for the floating point registers
10216 (define_insn "*push_fp_multi"
10217 [(match_parallel 2 "multi_register_push"
10218 [(set (match_operand:BLK 0 "memory_operand" "=m")
10219 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10220 UNSPEC_PUSH_MULT))])]
10221 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10226 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10227 output_asm_insn (pattern, operands);
10230 [(set_attr "type" "f_store")]
10233 ;; Special patterns for dealing with the constant pool
10235 (define_insn "align_4"
10236 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10239 assemble_align (32);
10244 (define_insn "align_8"
10245 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10248 assemble_align (64);
10253 (define_insn "consttable_end"
10254 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10257 making_const_table = FALSE;
10262 (define_insn "consttable_1"
10263 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10266 making_const_table = TRUE;
10267 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10268 assemble_zeros (3);
10271 [(set_attr "length" "4")]
10274 (define_insn "consttable_2"
10275 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10278 making_const_table = TRUE;
10279 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10280 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10281 assemble_zeros (2);
10284 [(set_attr "length" "4")]
10287 (define_insn "consttable_4"
10288 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10292 rtx x = operands[0];
10293 making_const_table = TRUE;
10294 switch (GET_MODE_CLASS (GET_MODE (x)))
10297 if (GET_MODE (x) == HFmode)
10298 arm_emit_fp16_const (x);
10302 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10303 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10307 /* XXX: Sometimes gcc does something really dumb and ends up with
10308 a HIGH in a constant pool entry, usually because it's trying to
10309 load into a VFP register. We know this will always be used in
10310 combination with a LO_SUM which ignores the high bits, so just
10311 strip off the HIGH. */
10312 if (GET_CODE (x) == HIGH)
10314 assemble_integer (x, 4, BITS_PER_WORD, 1);
10315 mark_symbol_refs_as_used (x);
10320 [(set_attr "length" "4")]
10323 (define_insn "consttable_8"
10324 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10328 making_const_table = TRUE;
10329 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10334 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10335 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10339 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10344 [(set_attr "length" "8")]
10347 (define_insn "consttable_16"
10348 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10352 making_const_table = TRUE;
10353 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10358 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10359 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10363 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10368 [(set_attr "length" "16")]
10371 ;; Miscellaneous Thumb patterns
10373 (define_expand "tablejump"
10374 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10375 (use (label_ref (match_operand 1 "" "")))])]
10380 /* Hopefully, CSE will eliminate this copy. */
10381 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10382 rtx reg2 = gen_reg_rtx (SImode);
10384 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10385 operands[0] = reg2;
10390 ;; NB never uses BX.
10391 (define_insn "*thumb1_tablejump"
10392 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10393 (use (label_ref (match_operand 1 "" "")))]
10396 [(set_attr "length" "2")]
10399 ;; V5 Instructions,
10401 (define_insn "clzsi2"
10402 [(set (match_operand:SI 0 "s_register_operand" "=r")
10403 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10404 "TARGET_32BIT && arm_arch5"
10406 [(set_attr "predicable" "yes")
10407 (set_attr "insn" "clz")])
10409 (define_insn "rbitsi2"
10410 [(set (match_operand:SI 0 "s_register_operand" "=r")
10411 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10412 "TARGET_32BIT && arm_arch_thumb2"
10414 [(set_attr "predicable" "yes")
10415 (set_attr "insn" "clz")])
10417 (define_expand "ctzsi2"
10418 [(set (match_operand:SI 0 "s_register_operand" "")
10419 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10420 "TARGET_32BIT && arm_arch_thumb2"
10423 rtx tmp = gen_reg_rtx (SImode);
10424 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10425 emit_insn (gen_clzsi2 (operands[0], tmp));
10431 ;; V5E instructions.
10433 (define_insn "prefetch"
10434 [(prefetch (match_operand:SI 0 "address_operand" "p")
10435 (match_operand:SI 1 "" "")
10436 (match_operand:SI 2 "" ""))]
10437 "TARGET_32BIT && arm_arch5e"
10440 ;; General predication pattern
10443 [(match_operator 0 "arm_comparison_operator"
10444 [(match_operand 1 "cc_register" "")
10450 (define_insn "prologue_use"
10451 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10453 "%@ %0 needed for prologue"
10454 [(set_attr "length" "0")]
10458 ;; Patterns for exception handling
10460 (define_expand "eh_return"
10461 [(use (match_operand 0 "general_operand" ""))]
10466 emit_insn (gen_arm_eh_return (operands[0]));
10468 emit_insn (gen_thumb_eh_return (operands[0]));
10473 ;; We can't expand this before we know where the link register is stored.
10474 (define_insn_and_split "arm_eh_return"
10475 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10477 (clobber (match_scratch:SI 1 "=&r"))]
10480 "&& reload_completed"
10484 arm_set_return_address (operands[0], operands[1]);
10489 (define_insn_and_split "thumb_eh_return"
10490 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10492 (clobber (match_scratch:SI 1 "=&l"))]
10495 "&& reload_completed"
10499 thumb_set_return_address (operands[0], operands[1]);
10507 (define_insn "load_tp_hard"
10508 [(set (match_operand:SI 0 "register_operand" "=r")
10509 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10511 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10512 [(set_attr "predicable" "yes")]
10515 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10516 (define_insn "load_tp_soft"
10517 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10518 (clobber (reg:SI LR_REGNUM))
10519 (clobber (reg:SI IP_REGNUM))
10520 (clobber (reg:CC CC_REGNUM))]
10522 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10523 [(set_attr "conds" "clob")]
10526 (define_insn "*arm_movtas_ze"
10527 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10530 (match_operand:SI 1 "const_int_operand" ""))]
10533 [(set_attr "predicable" "yes")
10534 (set_attr "length" "4")]
10537 (define_insn "*arm_rev"
10538 [(set (match_operand:SI 0 "s_register_operand" "=r")
10539 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10540 "TARGET_32BIT && arm_arch6"
10542 [(set_attr "predicable" "yes")
10543 (set_attr "length" "4")]
10546 (define_insn "*thumb1_rev"
10547 [(set (match_operand:SI 0 "s_register_operand" "=l")
10548 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10549 "TARGET_THUMB1 && arm_arch6"
10551 [(set_attr "length" "2")]
10554 (define_expand "arm_legacy_rev"
10555 [(set (match_operand:SI 2 "s_register_operand" "")
10556 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10560 (lshiftrt:SI (match_dup 2)
10562 (set (match_operand:SI 3 "s_register_operand" "")
10563 (rotatert:SI (match_dup 1)
10566 (and:SI (match_dup 2)
10567 (const_int -65281)))
10568 (set (match_operand:SI 0 "s_register_operand" "")
10569 (xor:SI (match_dup 3)
10575 ;; Reuse temporaries to keep register pressure down.
10576 (define_expand "thumb_legacy_rev"
10577 [(set (match_operand:SI 2 "s_register_operand" "")
10578 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10580 (set (match_operand:SI 3 "s_register_operand" "")
10581 (lshiftrt:SI (match_dup 1)
10584 (ior:SI (match_dup 3)
10586 (set (match_operand:SI 4 "s_register_operand" "")
10588 (set (match_operand:SI 5 "s_register_operand" "")
10589 (rotatert:SI (match_dup 1)
10592 (ashift:SI (match_dup 5)
10595 (lshiftrt:SI (match_dup 5)
10598 (ior:SI (match_dup 5)
10601 (rotatert:SI (match_dup 5)
10603 (set (match_operand:SI 0 "s_register_operand" "")
10604 (ior:SI (match_dup 5)
10610 (define_expand "bswapsi2"
10611 [(set (match_operand:SI 0 "s_register_operand" "=r")
10612 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10617 if (!optimize_size)
10619 rtx op2 = gen_reg_rtx (SImode);
10620 rtx op3 = gen_reg_rtx (SImode);
10624 rtx op4 = gen_reg_rtx (SImode);
10625 rtx op5 = gen_reg_rtx (SImode);
10627 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10628 op2, op3, op4, op5));
10632 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10644 ;; Load the load/store multiple patterns
10645 (include "ldmstm.md")
10646 ;; Load the FPA co-processor patterns
10648 ;; Load the Maverick co-processor patterns
10649 (include "cirrus.md")
10650 ;; Vector bits common to IWMMXT and Neon
10651 (include "vec-common.md")
10652 ;; Load the Intel Wireless Multimedia Extension patterns
10653 (include "iwmmxt.md")
10654 ;; Load the VFP co-processor patterns
10656 ;; Thumb-2 patterns
10657 (include "thumb2.md")
10659 (include "neon.md")