1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
45 ;; 3rd operand to select_dominance_cc_mode
54 ;; Note: sin and cos are no-longer used.
55 ;; Unspec enumerators for Neon are defined in neon.md.
57 (define_c_enum "unspec" [
58 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
59 ; operand 0 is the result,
60 ; operand 1 the parameter.
61 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
62 ; operand 0 is the result,
63 ; operand 1 the parameter.
64 UNSPEC_PUSH_MULT ; `push multiple' operation:
65 ; operand 0 is the first register,
66 ; subsequent registers are in parallel (use ...)
68 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
69 ; usage, that is, we will add the pic_register
70 ; value to it before trying to dereference it.
71 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
72 ; The last operand is the number of a PIC_LABEL
73 ; that points at the containing instruction.
74 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
75 ; being scheduled before the stack adjustment insn.
76 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
77 ; this unspec is used to prevent the deletion of
78 ; instructions setting registers for EH handling
79 ; and stack frame generation. Operand 0 is the
81 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
82 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
83 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
84 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
85 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
86 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
87 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
88 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
89 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
90 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
91 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
92 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
93 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
94 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
95 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
97 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
98 ; correctly for PIC usage.
99 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
100 ; a given symbolic address.
101 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
102 UNSPEC_RBIT ; rbit operation.
103 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
104 ; another symbolic address.
105 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
108 ;; UNSPEC_VOLATILE Usage:
110 (define_c_enum "unspecv" [
111 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
113 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
114 ; instruction epilogue sequence that isn't expanded
115 ; into normal RTL. Used for both normal and sibcall
117 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
118 ; modes from arm to thumb.
119 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
123 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
125 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
127 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
129 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
131 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
133 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
134 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
135 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
136 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
137 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
138 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
139 VUNSPEC_EH_RETURN ; Use to override the return address for exception
141 VUNSPEC_SYNC_COMPARE_AND_SWAP ; Represent an atomic compare swap.
142 VUNSPEC_SYNC_LOCK ; Represent a sync_lock_test_and_set.
143 VUNSPEC_SYNC_OP ; Represent a sync_<op>
144 VUNSPEC_SYNC_NEW_OP ; Represent a sync_new_<op>
145 VUNSPEC_SYNC_OLD_OP ; Represent a sync_old_<op>
148 ;;---------------------------------------------------------------------------
151 ;; Processor type. This is created automatically from arm-cores.def.
152 (include "arm-tune.md")
154 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
155 ; generating ARM code. This is used to control the length of some insn
156 ; patterns that share the same RTL in both ARM and Thumb code.
157 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
159 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
160 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
162 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
163 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
165 ;; Operand number of an input operand that is shifted. Zero if the
166 ;; given instruction does not shift one of its input operands.
167 (define_attr "shift" "" (const_int 0))
169 ; Floating Point Unit. If we only have floating point emulation, then there
170 ; is no point in scheduling the floating point insns. (Well, for best
171 ; performance we should try and group them together).
172 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
173 (const (symbol_ref "arm_fpu_attr")))
175 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
180 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
181 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
182 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
183 (const_string "none"))
185 ; LENGTH of an instruction (in bytes)
186 (define_attr "length" ""
187 (cond [(not (eq_attr "sync_memory" "none"))
188 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
191 ; The architecture which supports the instruction (or alternative).
192 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
193 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
194 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
195 ; arm_arch6. This attribute is used to compute attribute "enabled",
196 ; use type "any" to enable an alternative in all cases.
197 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
198 (const_string "any"))
200 (define_attr "arch_enabled" "no,yes"
201 (cond [(eq_attr "arch" "any")
204 (and (eq_attr "arch" "a")
205 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
208 (and (eq_attr "arch" "t")
209 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
212 (and (eq_attr "arch" "t1")
213 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
216 (and (eq_attr "arch" "t2")
217 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
220 (and (eq_attr "arch" "32")
221 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
224 (and (eq_attr "arch" "v6")
225 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
228 (and (eq_attr "arch" "nov6")
229 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
232 (and (eq_attr "arch" "onlya8")
233 (eq_attr "tune" "cortexa8"))
236 (and (eq_attr "arch" "nota8")
237 (not (eq_attr "tune" "cortexa8")))
238 (const_string "yes")]
239 (const_string "no")))
241 ; Allows an insn to disable certain alternatives for reasons other than
243 (define_attr "insn_enabled" "no,yes"
244 (const_string "yes"))
246 ; Enable all alternatives that are both arch_enabled and insn_enabled.
247 (define_attr "enabled" "no,yes"
248 (if_then_else (eq_attr "insn_enabled" "yes")
249 (if_then_else (eq_attr "arch_enabled" "yes")
252 (const_string "no")))
254 ; POOL_RANGE is how far away from a constant pool entry that this insn
255 ; can be placed. If the distance is zero, then this insn will never
256 ; reference the pool.
257 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
258 ; before its address.
259 (define_attr "arm_pool_range" "" (const_int 0))
260 (define_attr "thumb2_pool_range" "" (const_int 0))
261 (define_attr "arm_neg_pool_range" "" (const_int 0))
262 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
264 (define_attr "pool_range" ""
265 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
266 (attr "arm_pool_range")))
267 (define_attr "neg_pool_range" ""
268 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
269 (attr "arm_neg_pool_range")))
271 ; An assembler sequence may clobber the condition codes without us knowing.
272 ; If such an insn references the pool, then we have no way of knowing how,
273 ; so use the most conservative value for pool_range.
274 (define_asm_attributes
275 [(set_attr "conds" "clob")
276 (set_attr "length" "4")
277 (set_attr "pool_range" "250")])
279 ;; The instruction used to implement a particular pattern. This
280 ;; information is used by pipeline descriptions to provide accurate
281 ;; scheduling information.
284 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
285 (const_string "other"))
287 ; TYPE attribute is used to detect floating point instructions which, if
288 ; running on a co-processor can run in parallel with other, basic instructions
289 ; If write-buffer scheduling is enabled then it can also be used in the
290 ; scheduling of writes.
292 ; Classification of each insn
293 ; Note: vfp.md has different meanings for some of these, and some further
294 ; types as well. See that file for details.
295 ; alu any alu instruction that doesn't hit memory or fp
296 ; regs or have a shifted source operand
297 ; alu_shift any data instruction that doesn't hit memory or fp
298 ; regs, but has a source operand shifted by a constant
299 ; alu_shift_reg any data instruction that doesn't hit memory or fp
300 ; regs, but has a source operand shifted by a register value
301 ; mult a multiply instruction
302 ; block blockage insn, this blocks all functional units
303 ; float a floating point arithmetic operation (subject to expansion)
304 ; fdivd DFmode floating point division
305 ; fdivs SFmode floating point division
306 ; fmul Floating point multiply
307 ; ffmul Fast floating point multiply
308 ; farith Floating point arithmetic (4 cycle)
309 ; ffarith Fast floating point arithmetic (2 cycle)
310 ; float_em a floating point arithmetic operation that is normally emulated
311 ; even on a machine with an fpa.
312 ; f_fpa_load a floating point load from memory. Only for the FPA.
313 ; f_fpa_store a floating point store to memory. Only for the FPA.
314 ; f_load[sd] A single/double load from memory. Used for VFP unit.
315 ; f_store[sd] A single/double store to memory. Used for VFP unit.
316 ; f_flag a transfer of co-processor flags to the CPSR
317 ; f_mem_r a transfer of a floating point register to a real reg via mem
318 ; r_mem_f the reverse of f_mem_r
319 ; f_2_r fast transfer float to arm (no memory needed)
320 ; r_2_f fast transfer arm to float
321 ; f_cvt convert floating<->integral
323 ; call a subroutine call
324 ; load_byte load byte(s) from memory to arm registers
325 ; load1 load 1 word from memory to arm registers
326 ; load2 load 2 words from memory to arm registers
327 ; load3 load 3 words from memory to arm registers
328 ; load4 load 4 words from memory to arm registers
329 ; store store 1 word to memory from arm registers
330 ; store2 store 2 words
331 ; store3 store 3 words
332 ; store4 store 4 (or more) words
333 ; Additions for Cirrus Maverick co-processor:
334 ; mav_farith Floating point arithmetic (4 cycle)
335 ; mav_dmult Double multiplies (7 cycle)
339 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
341 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
342 (const_string "mult")
343 (const_string "alu")))
345 ; Load scheduling, set from the arm_ld_sched variable
346 ; initialized by arm_option_override()
347 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
349 ;; Classification of NEON instructions for scheduling purposes.
350 ;; Do not set this attribute and the "type" attribute together in
351 ;; any one instruction pattern.
352 (define_attr "neon_type"
363 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
364 neon_mul_qqq_8_16_32_ddd_32,\
365 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
366 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
368 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
369 neon_mla_qqq_32_qqd_32_scalar,\
370 neon_mul_ddd_16_scalar_32_16_long_scalar,\
371 neon_mul_qqd_32_scalar,\
372 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
377 neon_vqshl_vrshl_vqrshl_qqq,\
379 neon_fp_vadd_ddd_vabs_dd,\
380 neon_fp_vadd_qqq_vabs_qq,\
386 neon_fp_vmla_ddd_scalar,\
387 neon_fp_vmla_qqq_scalar,\
388 neon_fp_vrecps_vrsqrts_ddd,\
389 neon_fp_vrecps_vrsqrts_qqq,\
397 neon_vld2_2_regs_vld1_vld2_all_lanes,\
400 neon_vst1_1_2_regs_vst2_2_regs,\
402 neon_vst2_4_regs_vst3_vst4,\
404 neon_vld1_vld2_lane,\
405 neon_vld3_vld4_lane,\
406 neon_vst1_vst2_lane,\
407 neon_vst3_vst4_lane,\
408 neon_vld3_vld4_all_lanes,\
416 (const_string "none"))
418 ; condition codes: this one is used by final_prescan_insn to speed up
419 ; conditionalizing instructions. It saves having to scan the rtl to see if
420 ; it uses or alters the condition codes.
422 ; USE means that the condition codes are used by the insn in the process of
423 ; outputting code, this means (at present) that we can't use the insn in
426 ; SET means that the purpose of the insn is to set the condition codes in a
427 ; well defined manner.
429 ; CLOB means that the condition codes are altered in an undefined manner, if
430 ; they are altered at all
432 ; UNCONDITIONAL means the instruction can not be conditionally executed and
433 ; that the instruction does not use or alter the condition codes.
435 ; NOCOND means that the instruction does not use or alter the condition
436 ; codes but can be converted into a conditionally exectuted instruction.
438 (define_attr "conds" "use,set,clob,unconditional,nocond"
440 (ior (eq_attr "is_thumb1" "yes")
441 (eq_attr "type" "call"))
442 (const_string "clob")
443 (if_then_else (eq_attr "neon_type" "none")
444 (const_string "nocond")
445 (const_string "unconditional"))))
447 ; Predicable means that the insn can be conditionally executed based on
448 ; an automatically added predicate (additional patterns are generated by
449 ; gen...). We default to 'no' because no Thumb patterns match this rule
450 ; and not all ARM patterns do.
451 (define_attr "predicable" "no,yes" (const_string "no"))
453 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
454 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
455 ; suffer blockages enough to warrant modelling this (and it can adversely
456 ; affect the schedule).
457 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
459 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
460 ; to stall the processor. Used with model_wbuf above.
461 (define_attr "write_conflict" "no,yes"
462 (if_then_else (eq_attr "type"
463 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
465 (const_string "no")))
467 ; Classify the insns into those that take one cycle and those that take more
468 ; than one on the main cpu execution unit.
469 (define_attr "core_cycles" "single,multi"
470 (if_then_else (eq_attr "type"
471 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
472 (const_string "single")
473 (const_string "multi")))
475 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
476 ;; distant label. Only applicable to Thumb code.
477 (define_attr "far_jump" "yes,no" (const_string "no"))
480 ;; The number of machine instructions this pattern expands to.
481 ;; Used for Thumb-2 conditional execution.
482 (define_attr "ce_count" "" (const_int 1))
484 ;;---------------------------------------------------------------------------
487 (include "iterators.md")
489 ;;---------------------------------------------------------------------------
492 (include "predicates.md")
493 (include "constraints.md")
495 ;;---------------------------------------------------------------------------
496 ;; Pipeline descriptions
498 (define_attr "tune_cortexr4" "yes,no"
500 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
502 (const_string "no"))))
504 ;; True if the generic scheduling description should be used.
506 (define_attr "generic_sched" "yes,no"
508 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
509 (eq_attr "tune_cortexr4" "yes"))
511 (const_string "yes"))))
513 (define_attr "generic_vfp" "yes,no"
515 (and (eq_attr "fpu" "vfp")
516 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
517 (eq_attr "tune_cortexr4" "no"))
519 (const_string "no"))))
521 (include "arm-generic.md")
522 (include "arm926ejs.md")
523 (include "arm1020e.md")
524 (include "arm1026ejs.md")
525 (include "arm1136jfs.md")
527 (include "fa606te.md")
528 (include "fa626te.md")
529 (include "fmp626.md")
530 (include "fa726te.md")
531 (include "cortex-a5.md")
532 (include "cortex-a8.md")
533 (include "cortex-a9.md")
534 (include "cortex-r4.md")
535 (include "cortex-r4f.md")
536 (include "cortex-m4.md")
537 (include "cortex-m4-fpu.md")
541 ;;---------------------------------------------------------------------------
546 ;; Note: For DImode insns, there is normally no reason why operands should
547 ;; not be in the same register, what we don't want is for something being
548 ;; written to partially overlap something that is an input.
549 ;; Cirrus 64bit additions should not be split because we have a native
550 ;; 64bit addition instructions.
552 (define_expand "adddi3"
554 [(set (match_operand:DI 0 "s_register_operand" "")
555 (plus:DI (match_operand:DI 1 "s_register_operand" "")
556 (match_operand:DI 2 "s_register_operand" "")))
557 (clobber (reg:CC CC_REGNUM))])]
560 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
562 if (!cirrus_fp_register (operands[0], DImode))
563 operands[0] = force_reg (DImode, operands[0]);
564 if (!cirrus_fp_register (operands[1], DImode))
565 operands[1] = force_reg (DImode, operands[1]);
566 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
572 if (GET_CODE (operands[1]) != REG)
573 operands[1] = force_reg (DImode, operands[1]);
574 if (GET_CODE (operands[2]) != REG)
575 operands[2] = force_reg (DImode, operands[2]);
580 (define_insn "*thumb1_adddi3"
581 [(set (match_operand:DI 0 "register_operand" "=l")
582 (plus:DI (match_operand:DI 1 "register_operand" "%0")
583 (match_operand:DI 2 "register_operand" "l")))
584 (clobber (reg:CC CC_REGNUM))
587 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
588 [(set_attr "length" "4")]
591 (define_insn_and_split "*arm_adddi3"
592 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
593 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
594 (match_operand:DI 2 "s_register_operand" "r, 0")))
595 (clobber (reg:CC CC_REGNUM))]
596 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
598 "TARGET_32BIT && reload_completed
599 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
600 [(parallel [(set (reg:CC_C CC_REGNUM)
601 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
603 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
604 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
605 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
608 operands[3] = gen_highpart (SImode, operands[0]);
609 operands[0] = gen_lowpart (SImode, operands[0]);
610 operands[4] = gen_highpart (SImode, operands[1]);
611 operands[1] = gen_lowpart (SImode, operands[1]);
612 operands[5] = gen_highpart (SImode, operands[2]);
613 operands[2] = gen_lowpart (SImode, operands[2]);
615 [(set_attr "conds" "clob")
616 (set_attr "length" "8")]
619 (define_insn_and_split "*adddi_sesidi_di"
620 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
621 (plus:DI (sign_extend:DI
622 (match_operand:SI 2 "s_register_operand" "r,r"))
623 (match_operand:DI 1 "s_register_operand" "0,r")))
624 (clobber (reg:CC CC_REGNUM))]
625 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
627 "TARGET_32BIT && reload_completed"
628 [(parallel [(set (reg:CC_C CC_REGNUM)
629 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
631 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
632 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
635 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
638 operands[3] = gen_highpart (SImode, operands[0]);
639 operands[0] = gen_lowpart (SImode, operands[0]);
640 operands[4] = gen_highpart (SImode, operands[1]);
641 operands[1] = gen_lowpart (SImode, operands[1]);
642 operands[2] = gen_lowpart (SImode, operands[2]);
644 [(set_attr "conds" "clob")
645 (set_attr "length" "8")]
648 (define_insn_and_split "*adddi_zesidi_di"
649 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
650 (plus:DI (zero_extend:DI
651 (match_operand:SI 2 "s_register_operand" "r,r"))
652 (match_operand:DI 1 "s_register_operand" "0,r")))
653 (clobber (reg:CC CC_REGNUM))]
654 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
656 "TARGET_32BIT && reload_completed"
657 [(parallel [(set (reg:CC_C CC_REGNUM)
658 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
660 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
661 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
662 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
665 operands[3] = gen_highpart (SImode, operands[0]);
666 operands[0] = gen_lowpart (SImode, operands[0]);
667 operands[4] = gen_highpart (SImode, operands[1]);
668 operands[1] = gen_lowpart (SImode, operands[1]);
669 operands[2] = gen_lowpart (SImode, operands[2]);
671 [(set_attr "conds" "clob")
672 (set_attr "length" "8")]
675 (define_expand "addsi3"
676 [(set (match_operand:SI 0 "s_register_operand" "")
677 (plus:SI (match_operand:SI 1 "s_register_operand" "")
678 (match_operand:SI 2 "reg_or_int_operand" "")))]
681 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
683 arm_split_constant (PLUS, SImode, NULL_RTX,
684 INTVAL (operands[2]), operands[0], operands[1],
685 optimize && can_create_pseudo_p ());
691 ; If there is a scratch available, this will be faster than synthesizing the
694 [(match_scratch:SI 3 "r")
695 (set (match_operand:SI 0 "arm_general_register_operand" "")
696 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
697 (match_operand:SI 2 "const_int_operand" "")))]
699 !(const_ok_for_arm (INTVAL (operands[2]))
700 || const_ok_for_arm (-INTVAL (operands[2])))
701 && const_ok_for_arm (~INTVAL (operands[2]))"
702 [(set (match_dup 3) (match_dup 2))
703 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
707 ;; The r/r/k alternative is required when reloading the address
708 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
709 ;; put the duplicated register first, and not try the commutative version.
710 (define_insn_and_split "*arm_addsi3"
711 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
712 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
713 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
723 && GET_CODE (operands[2]) == CONST_INT
724 && !(const_ok_for_arm (INTVAL (operands[2]))
725 || const_ok_for_arm (-INTVAL (operands[2])))
726 && (reload_completed || !arm_eliminable_register (operands[1]))"
727 [(clobber (const_int 0))]
729 arm_split_constant (PLUS, SImode, curr_insn,
730 INTVAL (operands[2]), operands[0],
734 [(set_attr "length" "4,4,4,4,4,16")
735 (set_attr "predicable" "yes")]
738 (define_insn_and_split "*thumb1_addsi3"
739 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
740 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
741 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
744 static const char * const asms[] =
746 \"add\\t%0, %0, %2\",
747 \"sub\\t%0, %0, #%n2\",
748 \"add\\t%0, %1, %2\",
749 \"add\\t%0, %0, %2\",
750 \"add\\t%0, %0, %2\",
751 \"add\\t%0, %1, %2\",
752 \"add\\t%0, %1, %2\",
757 if ((which_alternative == 2 || which_alternative == 6)
758 && GET_CODE (operands[2]) == CONST_INT
759 && INTVAL (operands[2]) < 0)
760 return \"sub\\t%0, %1, #%n2\";
761 return asms[which_alternative];
763 "&& reload_completed && CONST_INT_P (operands[2])
764 && ((operands[1] != stack_pointer_rtx
765 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
766 || (operands[1] == stack_pointer_rtx
767 && INTVAL (operands[2]) > 1020))"
768 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
769 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
771 HOST_WIDE_INT offset = INTVAL (operands[2]);
772 if (operands[1] == stack_pointer_rtx)
778 else if (offset < -255)
781 operands[3] = GEN_INT (offset);
782 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
784 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
787 ;; Reloading and elimination of the frame pointer can
788 ;; sometimes cause this optimization to be missed.
790 [(set (match_operand:SI 0 "arm_general_register_operand" "")
791 (match_operand:SI 1 "const_int_operand" ""))
793 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
795 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
796 && (INTVAL (operands[1]) & 3) == 0"
797 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
801 (define_insn "addsi3_compare0"
802 [(set (reg:CC_NOOV CC_REGNUM)
804 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
805 (match_operand:SI 2 "arm_add_operand" "rI,L"))
807 (set (match_operand:SI 0 "s_register_operand" "=r,r")
808 (plus:SI (match_dup 1) (match_dup 2)))]
812 sub%.\\t%0, %1, #%n2"
813 [(set_attr "conds" "set")]
816 (define_insn "*addsi3_compare0_scratch"
817 [(set (reg:CC_NOOV CC_REGNUM)
819 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
820 (match_operand:SI 1 "arm_add_operand" "rI,L"))
826 [(set_attr "conds" "set")]
829 (define_insn "*compare_negsi_si"
830 [(set (reg:CC_Z CC_REGNUM)
832 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
833 (match_operand:SI 1 "s_register_operand" "r")))]
836 [(set_attr "conds" "set")]
839 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
840 ;; addend is a constant.
841 (define_insn "*cmpsi2_addneg"
842 [(set (reg:CC CC_REGNUM)
844 (match_operand:SI 1 "s_register_operand" "r,r")
845 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
846 (set (match_operand:SI 0 "s_register_operand" "=r,r")
847 (plus:SI (match_dup 1)
848 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
849 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
852 sub%.\\t%0, %1, #%n3"
853 [(set_attr "conds" "set")]
856 ;; Convert the sequence
858 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
862 ;; bcs dest ((unsigned)rn >= 1)
863 ;; similarly for the beq variant using bcc.
864 ;; This is a common looping idiom (while (n--))
866 [(set (match_operand:SI 0 "arm_general_register_operand" "")
867 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
869 (set (match_operand 2 "cc_register" "")
870 (compare (match_dup 0) (const_int -1)))
872 (if_then_else (match_operator 3 "equality_operator"
873 [(match_dup 2) (const_int 0)])
874 (match_operand 4 "" "")
875 (match_operand 5 "" "")))]
876 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
880 (match_dup 1) (const_int 1)))
881 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
883 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
886 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
887 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
890 operands[2], const0_rtx);"
893 ;; The next four insns work because they compare the result with one of
894 ;; the operands, and we know that the use of the condition code is
895 ;; either GEU or LTU, so we can use the carry flag from the addition
896 ;; instead of doing the compare a second time.
897 (define_insn "*addsi3_compare_op1"
898 [(set (reg:CC_C CC_REGNUM)
900 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
901 (match_operand:SI 2 "arm_add_operand" "rI,L"))
903 (set (match_operand:SI 0 "s_register_operand" "=r,r")
904 (plus:SI (match_dup 1) (match_dup 2)))]
908 sub%.\\t%0, %1, #%n2"
909 [(set_attr "conds" "set")]
912 (define_insn "*addsi3_compare_op2"
913 [(set (reg:CC_C CC_REGNUM)
915 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
916 (match_operand:SI 2 "arm_add_operand" "rI,L"))
918 (set (match_operand:SI 0 "s_register_operand" "=r,r")
919 (plus:SI (match_dup 1) (match_dup 2)))]
923 sub%.\\t%0, %1, #%n2"
924 [(set_attr "conds" "set")]
927 (define_insn "*compare_addsi2_op0"
928 [(set (reg:CC_C CC_REGNUM)
930 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
931 (match_operand:SI 1 "arm_add_operand" "rI,L"))
937 [(set_attr "conds" "set")]
940 (define_insn "*compare_addsi2_op1"
941 [(set (reg:CC_C CC_REGNUM)
943 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
944 (match_operand:SI 1 "arm_add_operand" "rI,L"))
950 [(set_attr "conds" "set")]
953 (define_insn "*addsi3_carryin_<optab>"
954 [(set (match_operand:SI 0 "s_register_operand" "=r")
955 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
956 (match_operand:SI 2 "arm_rhs_operand" "rI"))
957 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
960 [(set_attr "conds" "use")]
963 (define_insn "*addsi3_carryin_alt2_<optab>"
964 [(set (match_operand:SI 0 "s_register_operand" "=r")
965 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
966 (match_operand:SI 1 "s_register_operand" "%r"))
967 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
970 [(set_attr "conds" "use")]
973 (define_insn "*addsi3_carryin_shift_<optab>"
974 [(set (match_operand:SI 0 "s_register_operand" "=r")
976 (match_operator:SI 2 "shift_operator"
977 [(match_operand:SI 3 "s_register_operand" "r")
978 (match_operand:SI 4 "reg_or_int_operand" "rM")])
979 (match_operand:SI 1 "s_register_operand" "r"))
980 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
982 "adc%?\\t%0, %1, %3%S2"
983 [(set_attr "conds" "use")
984 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
985 (const_string "alu_shift")
986 (const_string "alu_shift_reg")))]
989 (define_insn "*addsi3_carryin_clobercc_<optab>"
990 [(set (match_operand:SI 0 "s_register_operand" "=r")
991 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
992 (match_operand:SI 2 "arm_rhs_operand" "rI"))
993 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
994 (clobber (reg:CC CC_REGNUM))]
997 [(set_attr "conds" "set")]
1000 (define_expand "incscc"
1001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1002 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1003 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1004 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1009 (define_insn "*arm_incscc"
1010 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1011 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1012 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1013 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1017 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1018 [(set_attr "conds" "use")
1019 (set_attr "length" "4,8")]
1022 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1024 [(set (match_operand:SI 0 "s_register_operand" "")
1025 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1026 (match_operand:SI 2 "s_register_operand" ""))
1028 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1030 [(set (match_dup 3) (match_dup 1))
1031 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1033 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1036 (define_expand "addsf3"
1037 [(set (match_operand:SF 0 "s_register_operand" "")
1038 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1039 (match_operand:SF 2 "arm_float_add_operand" "")))]
1040 "TARGET_32BIT && TARGET_HARD_FLOAT"
1043 && !cirrus_fp_register (operands[2], SFmode))
1044 operands[2] = force_reg (SFmode, operands[2]);
1047 (define_expand "adddf3"
1048 [(set (match_operand:DF 0 "s_register_operand" "")
1049 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1050 (match_operand:DF 2 "arm_float_add_operand" "")))]
1051 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1054 && !cirrus_fp_register (operands[2], DFmode))
1055 operands[2] = force_reg (DFmode, operands[2]);
1058 (define_expand "subdi3"
1060 [(set (match_operand:DI 0 "s_register_operand" "")
1061 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1062 (match_operand:DI 2 "s_register_operand" "")))
1063 (clobber (reg:CC CC_REGNUM))])]
1066 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1068 && cirrus_fp_register (operands[0], DImode)
1069 && cirrus_fp_register (operands[1], DImode))
1071 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1077 if (GET_CODE (operands[1]) != REG)
1078 operands[1] = force_reg (DImode, operands[1]);
1079 if (GET_CODE (operands[2]) != REG)
1080 operands[2] = force_reg (DImode, operands[2]);
1085 (define_insn "*arm_subdi3"
1086 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1087 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1088 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1089 (clobber (reg:CC CC_REGNUM))]
1090 "TARGET_32BIT && !TARGET_NEON"
1091 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1092 [(set_attr "conds" "clob")
1093 (set_attr "length" "8")]
1096 (define_insn "*thumb_subdi3"
1097 [(set (match_operand:DI 0 "register_operand" "=l")
1098 (minus:DI (match_operand:DI 1 "register_operand" "0")
1099 (match_operand:DI 2 "register_operand" "l")))
1100 (clobber (reg:CC CC_REGNUM))]
1102 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1103 [(set_attr "length" "4")]
1106 (define_insn "*subdi_di_zesidi"
1107 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1108 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1110 (match_operand:SI 2 "s_register_operand" "r,r"))))
1111 (clobber (reg:CC CC_REGNUM))]
1113 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1114 [(set_attr "conds" "clob")
1115 (set_attr "length" "8")]
1118 (define_insn "*subdi_di_sesidi"
1119 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1120 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1122 (match_operand:SI 2 "s_register_operand" "r,r"))))
1123 (clobber (reg:CC CC_REGNUM))]
1125 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1126 [(set_attr "conds" "clob")
1127 (set_attr "length" "8")]
1130 (define_insn "*subdi_zesidi_di"
1131 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1132 (minus:DI (zero_extend:DI
1133 (match_operand:SI 2 "s_register_operand" "r,r"))
1134 (match_operand:DI 1 "s_register_operand" "0,r")))
1135 (clobber (reg:CC CC_REGNUM))]
1137 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1138 [(set_attr "conds" "clob")
1139 (set_attr "length" "8")]
1142 (define_insn "*subdi_sesidi_di"
1143 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1144 (minus:DI (sign_extend:DI
1145 (match_operand:SI 2 "s_register_operand" "r,r"))
1146 (match_operand:DI 1 "s_register_operand" "0,r")))
1147 (clobber (reg:CC CC_REGNUM))]
1149 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1150 [(set_attr "conds" "clob")
1151 (set_attr "length" "8")]
1154 (define_insn "*subdi_zesidi_zesidi"
1155 [(set (match_operand:DI 0 "s_register_operand" "=r")
1156 (minus:DI (zero_extend:DI
1157 (match_operand:SI 1 "s_register_operand" "r"))
1159 (match_operand:SI 2 "s_register_operand" "r"))))
1160 (clobber (reg:CC CC_REGNUM))]
1162 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1163 [(set_attr "conds" "clob")
1164 (set_attr "length" "8")]
1167 (define_expand "subsi3"
1168 [(set (match_operand:SI 0 "s_register_operand" "")
1169 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1170 (match_operand:SI 2 "s_register_operand" "")))]
1173 if (GET_CODE (operands[1]) == CONST_INT)
1177 arm_split_constant (MINUS, SImode, NULL_RTX,
1178 INTVAL (operands[1]), operands[0],
1179 operands[2], optimize && can_create_pseudo_p ());
1182 else /* TARGET_THUMB1 */
1183 operands[1] = force_reg (SImode, operands[1]);
1188 (define_insn "thumb1_subsi3_insn"
1189 [(set (match_operand:SI 0 "register_operand" "=l")
1190 (minus:SI (match_operand:SI 1 "register_operand" "l")
1191 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1194 [(set_attr "length" "2")
1195 (set_attr "conds" "set")])
1197 ; ??? Check Thumb-2 split length
1198 (define_insn_and_split "*arm_subsi3_insn"
1199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1200 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1201 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1209 "&& ((GET_CODE (operands[1]) == CONST_INT
1210 && !const_ok_for_arm (INTVAL (operands[1])))
1211 || (GET_CODE (operands[2]) == CONST_INT
1212 && !const_ok_for_arm (INTVAL (operands[2]))))"
1213 [(clobber (const_int 0))]
1215 arm_split_constant (MINUS, SImode, curr_insn,
1216 INTVAL (operands[1]), operands[0], operands[2], 0);
1219 [(set_attr "length" "4,4,4,16,16")
1220 (set_attr "predicable" "yes")]
1224 [(match_scratch:SI 3 "r")
1225 (set (match_operand:SI 0 "arm_general_register_operand" "")
1226 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1227 (match_operand:SI 2 "arm_general_register_operand" "")))]
1229 && !const_ok_for_arm (INTVAL (operands[1]))
1230 && const_ok_for_arm (~INTVAL (operands[1]))"
1231 [(set (match_dup 3) (match_dup 1))
1232 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1236 (define_insn "*subsi3_compare0"
1237 [(set (reg:CC_NOOV CC_REGNUM)
1239 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1240 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1242 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1243 (minus:SI (match_dup 1) (match_dup 2)))]
1248 [(set_attr "conds" "set")]
1251 (define_insn "*subsi3_compare"
1252 [(set (reg:CC CC_REGNUM)
1253 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1254 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1255 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1256 (minus:SI (match_dup 1) (match_dup 2)))]
1261 [(set_attr "conds" "set")]
1264 (define_expand "decscc"
1265 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1266 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1267 (match_operator:SI 2 "arm_comparison_operator"
1268 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1273 (define_insn "*arm_decscc"
1274 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1275 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1276 (match_operator:SI 2 "arm_comparison_operator"
1277 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1281 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1282 [(set_attr "conds" "use")
1283 (set_attr "length" "*,8")]
1286 (define_expand "subsf3"
1287 [(set (match_operand:SF 0 "s_register_operand" "")
1288 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1289 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1290 "TARGET_32BIT && TARGET_HARD_FLOAT"
1292 if (TARGET_MAVERICK)
1294 if (!cirrus_fp_register (operands[1], SFmode))
1295 operands[1] = force_reg (SFmode, operands[1]);
1296 if (!cirrus_fp_register (operands[2], SFmode))
1297 operands[2] = force_reg (SFmode, operands[2]);
1301 (define_expand "subdf3"
1302 [(set (match_operand:DF 0 "s_register_operand" "")
1303 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1304 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1305 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1307 if (TARGET_MAVERICK)
1309 if (!cirrus_fp_register (operands[1], DFmode))
1310 operands[1] = force_reg (DFmode, operands[1]);
1311 if (!cirrus_fp_register (operands[2], DFmode))
1312 operands[2] = force_reg (DFmode, operands[2]);
1317 ;; Multiplication insns
1319 (define_expand "mulsi3"
1320 [(set (match_operand:SI 0 "s_register_operand" "")
1321 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1322 (match_operand:SI 1 "s_register_operand" "")))]
1327 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1328 (define_insn "*arm_mulsi3"
1329 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1330 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1331 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1332 "TARGET_32BIT && !arm_arch6"
1333 "mul%?\\t%0, %2, %1"
1334 [(set_attr "insn" "mul")
1335 (set_attr "predicable" "yes")]
1338 (define_insn "*arm_mulsi3_v6"
1339 [(set (match_operand:SI 0 "s_register_operand" "=r")
1340 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1341 (match_operand:SI 2 "s_register_operand" "r")))]
1342 "TARGET_32BIT && arm_arch6"
1343 "mul%?\\t%0, %1, %2"
1344 [(set_attr "insn" "mul")
1345 (set_attr "predicable" "yes")]
1348 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1349 ; 1 and 2; are the same, because reload will make operand 0 match
1350 ; operand 1 without realizing that this conflicts with operand 2. We fix
1351 ; this by adding another alternative to match this case, and then `reload'
1352 ; it ourselves. This alternative must come first.
1353 (define_insn "*thumb_mulsi3"
1354 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1355 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1356 (match_operand:SI 2 "register_operand" "l,l,l")))]
1357 "TARGET_THUMB1 && !arm_arch6"
1359 if (which_alternative < 2)
1360 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1362 return \"mul\\t%0, %2\";
1364 [(set_attr "length" "4,4,2")
1365 (set_attr "insn" "mul")]
1368 (define_insn "*thumb_mulsi3_v6"
1369 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1370 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1371 (match_operand:SI 2 "register_operand" "l,0,0")))]
1372 "TARGET_THUMB1 && arm_arch6"
1377 [(set_attr "length" "2")
1378 (set_attr "insn" "mul")]
1381 (define_insn "*mulsi3_compare0"
1382 [(set (reg:CC_NOOV CC_REGNUM)
1383 (compare:CC_NOOV (mult:SI
1384 (match_operand:SI 2 "s_register_operand" "r,r")
1385 (match_operand:SI 1 "s_register_operand" "%0,r"))
1387 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1388 (mult:SI (match_dup 2) (match_dup 1)))]
1389 "TARGET_ARM && !arm_arch6"
1390 "mul%.\\t%0, %2, %1"
1391 [(set_attr "conds" "set")
1392 (set_attr "insn" "muls")]
1395 (define_insn "*mulsi3_compare0_v6"
1396 [(set (reg:CC_NOOV CC_REGNUM)
1397 (compare:CC_NOOV (mult:SI
1398 (match_operand:SI 2 "s_register_operand" "r")
1399 (match_operand:SI 1 "s_register_operand" "r"))
1401 (set (match_operand:SI 0 "s_register_operand" "=r")
1402 (mult:SI (match_dup 2) (match_dup 1)))]
1403 "TARGET_ARM && arm_arch6 && optimize_size"
1404 "mul%.\\t%0, %2, %1"
1405 [(set_attr "conds" "set")
1406 (set_attr "insn" "muls")]
1409 (define_insn "*mulsi_compare0_scratch"
1410 [(set (reg:CC_NOOV CC_REGNUM)
1411 (compare:CC_NOOV (mult:SI
1412 (match_operand:SI 2 "s_register_operand" "r,r")
1413 (match_operand:SI 1 "s_register_operand" "%0,r"))
1415 (clobber (match_scratch:SI 0 "=&r,&r"))]
1416 "TARGET_ARM && !arm_arch6"
1417 "mul%.\\t%0, %2, %1"
1418 [(set_attr "conds" "set")
1419 (set_attr "insn" "muls")]
1422 (define_insn "*mulsi_compare0_scratch_v6"
1423 [(set (reg:CC_NOOV CC_REGNUM)
1424 (compare:CC_NOOV (mult:SI
1425 (match_operand:SI 2 "s_register_operand" "r")
1426 (match_operand:SI 1 "s_register_operand" "r"))
1428 (clobber (match_scratch:SI 0 "=r"))]
1429 "TARGET_ARM && arm_arch6 && optimize_size"
1430 "mul%.\\t%0, %2, %1"
1431 [(set_attr "conds" "set")
1432 (set_attr "insn" "muls")]
1435 ;; Unnamed templates to match MLA instruction.
1437 (define_insn "*mulsi3addsi"
1438 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1440 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1441 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1442 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1443 "TARGET_32BIT && !arm_arch6"
1444 "mla%?\\t%0, %2, %1, %3"
1445 [(set_attr "insn" "mla")
1446 (set_attr "predicable" "yes")]
1449 (define_insn "*mulsi3addsi_v6"
1450 [(set (match_operand:SI 0 "s_register_operand" "=r")
1452 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1453 (match_operand:SI 1 "s_register_operand" "r"))
1454 (match_operand:SI 3 "s_register_operand" "r")))]
1455 "TARGET_32BIT && arm_arch6"
1456 "mla%?\\t%0, %2, %1, %3"
1457 [(set_attr "insn" "mla")
1458 (set_attr "predicable" "yes")]
1461 (define_insn "*mulsi3addsi_compare0"
1462 [(set (reg:CC_NOOV CC_REGNUM)
1465 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1466 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1467 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1469 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1470 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1472 "TARGET_ARM && arm_arch6"
1473 "mla%.\\t%0, %2, %1, %3"
1474 [(set_attr "conds" "set")
1475 (set_attr "insn" "mlas")]
1478 (define_insn "*mulsi3addsi_compare0_v6"
1479 [(set (reg:CC_NOOV CC_REGNUM)
1482 (match_operand:SI 2 "s_register_operand" "r")
1483 (match_operand:SI 1 "s_register_operand" "r"))
1484 (match_operand:SI 3 "s_register_operand" "r"))
1486 (set (match_operand:SI 0 "s_register_operand" "=r")
1487 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1489 "TARGET_ARM && arm_arch6 && optimize_size"
1490 "mla%.\\t%0, %2, %1, %3"
1491 [(set_attr "conds" "set")
1492 (set_attr "insn" "mlas")]
1495 (define_insn "*mulsi3addsi_compare0_scratch"
1496 [(set (reg:CC_NOOV CC_REGNUM)
1499 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1500 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1501 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1503 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1504 "TARGET_ARM && !arm_arch6"
1505 "mla%.\\t%0, %2, %1, %3"
1506 [(set_attr "conds" "set")
1507 (set_attr "insn" "mlas")]
1510 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1511 [(set (reg:CC_NOOV CC_REGNUM)
1514 (match_operand:SI 2 "s_register_operand" "r")
1515 (match_operand:SI 1 "s_register_operand" "r"))
1516 (match_operand:SI 3 "s_register_operand" "r"))
1518 (clobber (match_scratch:SI 0 "=r"))]
1519 "TARGET_ARM && arm_arch6 && optimize_size"
1520 "mla%.\\t%0, %2, %1, %3"
1521 [(set_attr "conds" "set")
1522 (set_attr "insn" "mlas")]
1525 (define_insn "*mulsi3subsi"
1526 [(set (match_operand:SI 0 "s_register_operand" "=r")
1528 (match_operand:SI 3 "s_register_operand" "r")
1529 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1530 (match_operand:SI 1 "s_register_operand" "r"))))]
1531 "TARGET_32BIT && arm_arch_thumb2"
1532 "mls%?\\t%0, %2, %1, %3"
1533 [(set_attr "insn" "mla")
1534 (set_attr "predicable" "yes")]
1537 (define_expand "maddsidi4"
1538 [(set (match_operand:DI 0 "s_register_operand" "")
1541 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1542 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1543 (match_operand:DI 3 "s_register_operand" "")))]
1544 "TARGET_32BIT && arm_arch3m"
1547 (define_insn "*mulsidi3adddi"
1548 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1551 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1552 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1553 (match_operand:DI 1 "s_register_operand" "0")))]
1554 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1555 "smlal%?\\t%Q0, %R0, %3, %2"
1556 [(set_attr "insn" "smlal")
1557 (set_attr "predicable" "yes")]
1560 (define_insn "*mulsidi3adddi_v6"
1561 [(set (match_operand:DI 0 "s_register_operand" "=r")
1564 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1565 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1566 (match_operand:DI 1 "s_register_operand" "0")))]
1567 "TARGET_32BIT && arm_arch6"
1568 "smlal%?\\t%Q0, %R0, %3, %2"
1569 [(set_attr "insn" "smlal")
1570 (set_attr "predicable" "yes")]
1573 ;; 32x32->64 widening multiply.
1574 ;; As with mulsi3, the only difference between the v3-5 and v6+
1575 ;; versions of these patterns is the requirement that the output not
1576 ;; overlap the inputs, but that still means we have to have a named
1577 ;; expander and two different starred insns.
1579 (define_expand "mulsidi3"
1580 [(set (match_operand:DI 0 "s_register_operand" "")
1582 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1583 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1584 "TARGET_32BIT && arm_arch3m"
1588 (define_insn "*mulsidi3_nov6"
1589 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1591 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1592 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1593 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1594 "smull%?\\t%Q0, %R0, %1, %2"
1595 [(set_attr "insn" "smull")
1596 (set_attr "predicable" "yes")]
1599 (define_insn "*mulsidi3_v6"
1600 [(set (match_operand:DI 0 "s_register_operand" "=r")
1602 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1603 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1604 "TARGET_32BIT && arm_arch6"
1605 "smull%?\\t%Q0, %R0, %1, %2"
1606 [(set_attr "insn" "smull")
1607 (set_attr "predicable" "yes")]
1610 (define_expand "umulsidi3"
1611 [(set (match_operand:DI 0 "s_register_operand" "")
1613 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1614 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1615 "TARGET_32BIT && arm_arch3m"
1619 (define_insn "*umulsidi3_nov6"
1620 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1622 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1623 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1624 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1625 "umull%?\\t%Q0, %R0, %1, %2"
1626 [(set_attr "insn" "umull")
1627 (set_attr "predicable" "yes")]
1630 (define_insn "*umulsidi3_v6"
1631 [(set (match_operand:DI 0 "s_register_operand" "=r")
1633 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1634 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1635 "TARGET_32BIT && arm_arch6"
1636 "umull%?\\t%Q0, %R0, %1, %2"
1637 [(set_attr "insn" "umull")
1638 (set_attr "predicable" "yes")]
1641 (define_expand "umaddsidi4"
1642 [(set (match_operand:DI 0 "s_register_operand" "")
1645 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1646 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1647 (match_operand:DI 3 "s_register_operand" "")))]
1648 "TARGET_32BIT && arm_arch3m"
1651 (define_insn "*umulsidi3adddi"
1652 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1655 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1656 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1657 (match_operand:DI 1 "s_register_operand" "0")))]
1658 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1659 "umlal%?\\t%Q0, %R0, %3, %2"
1660 [(set_attr "insn" "umlal")
1661 (set_attr "predicable" "yes")]
1664 (define_insn "*umulsidi3adddi_v6"
1665 [(set (match_operand:DI 0 "s_register_operand" "=r")
1668 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1669 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1670 (match_operand:DI 1 "s_register_operand" "0")))]
1671 "TARGET_32BIT && arm_arch6"
1672 "umlal%?\\t%Q0, %R0, %3, %2"
1673 [(set_attr "insn" "umlal")
1674 (set_attr "predicable" "yes")]
1677 (define_expand "smulsi3_highpart"
1679 [(set (match_operand:SI 0 "s_register_operand" "")
1683 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1684 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1686 (clobber (match_scratch:SI 3 ""))])]
1687 "TARGET_32BIT && arm_arch3m"
1691 (define_insn "*smulsi3_highpart_nov6"
1692 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1696 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1697 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1699 (clobber (match_scratch:SI 3 "=&r,&r"))]
1700 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1701 "smull%?\\t%3, %0, %2, %1"
1702 [(set_attr "insn" "smull")
1703 (set_attr "predicable" "yes")]
1706 (define_insn "*smulsi3_highpart_v6"
1707 [(set (match_operand:SI 0 "s_register_operand" "=r")
1711 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1712 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1714 (clobber (match_scratch:SI 3 "=r"))]
1715 "TARGET_32BIT && arm_arch6"
1716 "smull%?\\t%3, %0, %2, %1"
1717 [(set_attr "insn" "smull")
1718 (set_attr "predicable" "yes")]
1721 (define_expand "umulsi3_highpart"
1723 [(set (match_operand:SI 0 "s_register_operand" "")
1727 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1728 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1730 (clobber (match_scratch:SI 3 ""))])]
1731 "TARGET_32BIT && arm_arch3m"
1735 (define_insn "*umulsi3_highpart_nov6"
1736 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1740 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1741 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1743 (clobber (match_scratch:SI 3 "=&r,&r"))]
1744 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1745 "umull%?\\t%3, %0, %2, %1"
1746 [(set_attr "insn" "umull")
1747 (set_attr "predicable" "yes")]
1750 (define_insn "*umulsi3_highpart_v6"
1751 [(set (match_operand:SI 0 "s_register_operand" "=r")
1755 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1756 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1758 (clobber (match_scratch:SI 3 "=r"))]
1759 "TARGET_32BIT && arm_arch6"
1760 "umull%?\\t%3, %0, %2, %1"
1761 [(set_attr "insn" "umull")
1762 (set_attr "predicable" "yes")]
1765 (define_insn "mulhisi3"
1766 [(set (match_operand:SI 0 "s_register_operand" "=r")
1767 (mult:SI (sign_extend:SI
1768 (match_operand:HI 1 "s_register_operand" "%r"))
1770 (match_operand:HI 2 "s_register_operand" "r"))))]
1771 "TARGET_DSP_MULTIPLY"
1772 "smulbb%?\\t%0, %1, %2"
1773 [(set_attr "insn" "smulxy")
1774 (set_attr "predicable" "yes")]
1777 (define_insn "*mulhisi3tb"
1778 [(set (match_operand:SI 0 "s_register_operand" "=r")
1779 (mult:SI (ashiftrt:SI
1780 (match_operand:SI 1 "s_register_operand" "r")
1783 (match_operand:HI 2 "s_register_operand" "r"))))]
1784 "TARGET_DSP_MULTIPLY"
1785 "smultb%?\\t%0, %1, %2"
1786 [(set_attr "insn" "smulxy")
1787 (set_attr "predicable" "yes")]
1790 (define_insn "*mulhisi3bt"
1791 [(set (match_operand:SI 0 "s_register_operand" "=r")
1792 (mult:SI (sign_extend:SI
1793 (match_operand:HI 1 "s_register_operand" "r"))
1795 (match_operand:SI 2 "s_register_operand" "r")
1797 "TARGET_DSP_MULTIPLY"
1798 "smulbt%?\\t%0, %1, %2"
1799 [(set_attr "insn" "smulxy")
1800 (set_attr "predicable" "yes")]
1803 (define_insn "*mulhisi3tt"
1804 [(set (match_operand:SI 0 "s_register_operand" "=r")
1805 (mult:SI (ashiftrt:SI
1806 (match_operand:SI 1 "s_register_operand" "r")
1809 (match_operand:SI 2 "s_register_operand" "r")
1811 "TARGET_DSP_MULTIPLY"
1812 "smultt%?\\t%0, %1, %2"
1813 [(set_attr "insn" "smulxy")
1814 (set_attr "predicable" "yes")]
1817 (define_insn "maddhisi4"
1818 [(set (match_operand:SI 0 "s_register_operand" "=r")
1819 (plus:SI (mult:SI (sign_extend:SI
1820 (match_operand:HI 1 "s_register_operand" "r"))
1822 (match_operand:HI 2 "s_register_operand" "r")))
1823 (match_operand:SI 3 "s_register_operand" "r")))]
1824 "TARGET_DSP_MULTIPLY"
1825 "smlabb%?\\t%0, %1, %2, %3"
1826 [(set_attr "insn" "smlaxy")
1827 (set_attr "predicable" "yes")]
1830 ;; Note: there is no maddhisi4ibt because this one is canonical form
1831 (define_insn "*maddhisi4tb"
1832 [(set (match_operand:SI 0 "s_register_operand" "=r")
1833 (plus:SI (mult:SI (ashiftrt:SI
1834 (match_operand:SI 1 "s_register_operand" "r")
1837 (match_operand:HI 2 "s_register_operand" "r")))
1838 (match_operand:SI 3 "s_register_operand" "r")))]
1839 "TARGET_DSP_MULTIPLY"
1840 "smlatb%?\\t%0, %1, %2, %3"
1841 [(set_attr "insn" "smlaxy")
1842 (set_attr "predicable" "yes")]
1845 (define_insn "*maddhisi4tt"
1846 [(set (match_operand:SI 0 "s_register_operand" "=r")
1847 (plus:SI (mult:SI (ashiftrt:SI
1848 (match_operand:SI 1 "s_register_operand" "r")
1851 (match_operand:SI 2 "s_register_operand" "r")
1853 (match_operand:SI 3 "s_register_operand" "r")))]
1854 "TARGET_DSP_MULTIPLY"
1855 "smlatt%?\\t%0, %1, %2, %3"
1856 [(set_attr "insn" "smlaxy")
1857 (set_attr "predicable" "yes")]
1860 (define_insn "*maddhidi4"
1861 [(set (match_operand:DI 0 "s_register_operand" "=r")
1863 (mult:DI (sign_extend:DI
1864 (match_operand:HI 1 "s_register_operand" "r"))
1866 (match_operand:HI 2 "s_register_operand" "r")))
1867 (match_operand:DI 3 "s_register_operand" "0")))]
1868 "TARGET_DSP_MULTIPLY"
1869 "smlalbb%?\\t%Q0, %R0, %1, %2"
1870 [(set_attr "insn" "smlalxy")
1871 (set_attr "predicable" "yes")])
1873 ;; Note: there is no maddhidi4ibt because this one is canonical form
1874 (define_insn "*maddhidi4tb"
1875 [(set (match_operand:DI 0 "s_register_operand" "=r")
1877 (mult:DI (sign_extend:DI
1879 (match_operand:SI 1 "s_register_operand" "r")
1882 (match_operand:HI 2 "s_register_operand" "r")))
1883 (match_operand:DI 3 "s_register_operand" "0")))]
1884 "TARGET_DSP_MULTIPLY"
1885 "smlaltb%?\\t%Q0, %R0, %1, %2"
1886 [(set_attr "insn" "smlalxy")
1887 (set_attr "predicable" "yes")])
1889 (define_insn "*maddhidi4tt"
1890 [(set (match_operand:DI 0 "s_register_operand" "=r")
1892 (mult:DI (sign_extend:DI
1894 (match_operand:SI 1 "s_register_operand" "r")
1898 (match_operand:SI 2 "s_register_operand" "r")
1900 (match_operand:DI 3 "s_register_operand" "0")))]
1901 "TARGET_DSP_MULTIPLY"
1902 "smlaltt%?\\t%Q0, %R0, %1, %2"
1903 [(set_attr "insn" "smlalxy")
1904 (set_attr "predicable" "yes")])
1906 (define_expand "mulsf3"
1907 [(set (match_operand:SF 0 "s_register_operand" "")
1908 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1909 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1910 "TARGET_32BIT && TARGET_HARD_FLOAT"
1913 && !cirrus_fp_register (operands[2], SFmode))
1914 operands[2] = force_reg (SFmode, operands[2]);
1917 (define_expand "muldf3"
1918 [(set (match_operand:DF 0 "s_register_operand" "")
1919 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1920 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1921 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1924 && !cirrus_fp_register (operands[2], DFmode))
1925 operands[2] = force_reg (DFmode, operands[2]);
1930 (define_expand "divsf3"
1931 [(set (match_operand:SF 0 "s_register_operand" "")
1932 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1933 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1934 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1937 (define_expand "divdf3"
1938 [(set (match_operand:DF 0 "s_register_operand" "")
1939 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1940 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1941 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1946 (define_expand "modsf3"
1947 [(set (match_operand:SF 0 "s_register_operand" "")
1948 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1949 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1950 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1953 (define_expand "moddf3"
1954 [(set (match_operand:DF 0 "s_register_operand" "")
1955 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1956 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1957 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1960 ;; Boolean and,ior,xor insns
1962 ;; Split up double word logical operations
1964 ;; Split up simple DImode logical operations. Simply perform the logical
1965 ;; operation on the upper and lower halves of the registers.
1967 [(set (match_operand:DI 0 "s_register_operand" "")
1968 (match_operator:DI 6 "logical_binary_operator"
1969 [(match_operand:DI 1 "s_register_operand" "")
1970 (match_operand:DI 2 "s_register_operand" "")]))]
1971 "TARGET_32BIT && reload_completed
1972 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1973 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1974 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1975 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1978 operands[3] = gen_highpart (SImode, operands[0]);
1979 operands[0] = gen_lowpart (SImode, operands[0]);
1980 operands[4] = gen_highpart (SImode, operands[1]);
1981 operands[1] = gen_lowpart (SImode, operands[1]);
1982 operands[5] = gen_highpart (SImode, operands[2]);
1983 operands[2] = gen_lowpart (SImode, operands[2]);
1988 [(set (match_operand:DI 0 "s_register_operand" "")
1989 (match_operator:DI 6 "logical_binary_operator"
1990 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1991 (match_operand:DI 1 "s_register_operand" "")]))]
1992 "TARGET_32BIT && reload_completed"
1993 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1994 (set (match_dup 3) (match_op_dup:SI 6
1995 [(ashiftrt:SI (match_dup 2) (const_int 31))
1999 operands[3] = gen_highpart (SImode, operands[0]);
2000 operands[0] = gen_lowpart (SImode, operands[0]);
2001 operands[4] = gen_highpart (SImode, operands[1]);
2002 operands[1] = gen_lowpart (SImode, operands[1]);
2003 operands[5] = gen_highpart (SImode, operands[2]);
2004 operands[2] = gen_lowpart (SImode, operands[2]);
2008 ;; The zero extend of operand 2 means we can just copy the high part of
2009 ;; operand1 into operand0.
2011 [(set (match_operand:DI 0 "s_register_operand" "")
2013 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2014 (match_operand:DI 1 "s_register_operand" "")))]
2015 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2016 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2017 (set (match_dup 3) (match_dup 4))]
2020 operands[4] = gen_highpart (SImode, operands[1]);
2021 operands[3] = gen_highpart (SImode, operands[0]);
2022 operands[0] = gen_lowpart (SImode, operands[0]);
2023 operands[1] = gen_lowpart (SImode, operands[1]);
2027 ;; The zero extend of operand 2 means we can just copy the high part of
2028 ;; operand1 into operand0.
2030 [(set (match_operand:DI 0 "s_register_operand" "")
2032 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2033 (match_operand:DI 1 "s_register_operand" "")))]
2034 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2035 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2036 (set (match_dup 3) (match_dup 4))]
2039 operands[4] = gen_highpart (SImode, operands[1]);
2040 operands[3] = gen_highpart (SImode, operands[0]);
2041 operands[0] = gen_lowpart (SImode, operands[0]);
2042 operands[1] = gen_lowpart (SImode, operands[1]);
2046 (define_expand "anddi3"
2047 [(set (match_operand:DI 0 "s_register_operand" "")
2048 (and:DI (match_operand:DI 1 "s_register_operand" "")
2049 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2054 (define_insn "*anddi3_insn"
2055 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2056 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2057 (match_operand:DI 2 "s_register_operand" "r,r")))]
2058 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2060 [(set_attr "length" "8")]
2063 (define_insn_and_split "*anddi_zesidi_di"
2064 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2065 (and:DI (zero_extend:DI
2066 (match_operand:SI 2 "s_register_operand" "r,r"))
2067 (match_operand:DI 1 "s_register_operand" "0,r")))]
2070 "TARGET_32BIT && reload_completed"
2071 ; The zero extend of operand 2 clears the high word of the output
2073 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2074 (set (match_dup 3) (const_int 0))]
2077 operands[3] = gen_highpart (SImode, operands[0]);
2078 operands[0] = gen_lowpart (SImode, operands[0]);
2079 operands[1] = gen_lowpart (SImode, operands[1]);
2081 [(set_attr "length" "8")]
2084 (define_insn "*anddi_sesdi_di"
2085 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2086 (and:DI (sign_extend:DI
2087 (match_operand:SI 2 "s_register_operand" "r,r"))
2088 (match_operand:DI 1 "s_register_operand" "0,r")))]
2091 [(set_attr "length" "8")]
2094 (define_expand "andsi3"
2095 [(set (match_operand:SI 0 "s_register_operand" "")
2096 (and:SI (match_operand:SI 1 "s_register_operand" "")
2097 (match_operand:SI 2 "reg_or_int_operand" "")))]
2102 if (GET_CODE (operands[2]) == CONST_INT)
2104 if (INTVAL (operands[2]) == 255 && arm_arch6)
2106 operands[1] = convert_to_mode (QImode, operands[1], 1);
2107 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2111 arm_split_constant (AND, SImode, NULL_RTX,
2112 INTVAL (operands[2]), operands[0],
2114 optimize && can_create_pseudo_p ());
2119 else /* TARGET_THUMB1 */
2121 if (GET_CODE (operands[2]) != CONST_INT)
2123 rtx tmp = force_reg (SImode, operands[2]);
2124 if (rtx_equal_p (operands[0], operands[1]))
2128 operands[2] = operands[1];
2136 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2138 operands[2] = force_reg (SImode,
2139 GEN_INT (~INTVAL (operands[2])));
2141 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2146 for (i = 9; i <= 31; i++)
2148 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2150 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2154 else if ((((HOST_WIDE_INT) 1) << i) - 1
2155 == ~INTVAL (operands[2]))
2157 rtx shift = GEN_INT (i);
2158 rtx reg = gen_reg_rtx (SImode);
2160 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2161 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2167 operands[2] = force_reg (SImode, operands[2]);
2173 ; ??? Check split length for Thumb-2
2174 (define_insn_and_split "*arm_andsi3_insn"
2175 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2176 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2177 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2181 bic%?\\t%0, %1, #%B2
2184 && GET_CODE (operands[2]) == CONST_INT
2185 && !(const_ok_for_arm (INTVAL (operands[2]))
2186 || const_ok_for_arm (~INTVAL (operands[2])))"
2187 [(clobber (const_int 0))]
2189 arm_split_constant (AND, SImode, curr_insn,
2190 INTVAL (operands[2]), operands[0], operands[1], 0);
2193 [(set_attr "length" "4,4,16")
2194 (set_attr "predicable" "yes")]
2197 (define_insn "*thumb1_andsi3_insn"
2198 [(set (match_operand:SI 0 "register_operand" "=l")
2199 (and:SI (match_operand:SI 1 "register_operand" "%0")
2200 (match_operand:SI 2 "register_operand" "l")))]
2203 [(set_attr "length" "2")
2204 (set_attr "conds" "set")])
2206 (define_insn "*andsi3_compare0"
2207 [(set (reg:CC_NOOV CC_REGNUM)
2209 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2210 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2212 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2213 (and:SI (match_dup 1) (match_dup 2)))]
2217 bic%.\\t%0, %1, #%B2"
2218 [(set_attr "conds" "set")]
2221 (define_insn "*andsi3_compare0_scratch"
2222 [(set (reg:CC_NOOV CC_REGNUM)
2224 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2225 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2227 (clobber (match_scratch:SI 2 "=X,r"))]
2231 bic%.\\t%2, %0, #%B1"
2232 [(set_attr "conds" "set")]
2235 (define_insn "*zeroextractsi_compare0_scratch"
2236 [(set (reg:CC_NOOV CC_REGNUM)
2237 (compare:CC_NOOV (zero_extract:SI
2238 (match_operand:SI 0 "s_register_operand" "r")
2239 (match_operand 1 "const_int_operand" "n")
2240 (match_operand 2 "const_int_operand" "n"))
2243 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2244 && INTVAL (operands[1]) > 0
2245 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2246 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2248 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2249 << INTVAL (operands[2]));
2250 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2253 [(set_attr "conds" "set")]
2256 (define_insn_and_split "*ne_zeroextractsi"
2257 [(set (match_operand:SI 0 "s_register_operand" "=r")
2258 (ne:SI (zero_extract:SI
2259 (match_operand:SI 1 "s_register_operand" "r")
2260 (match_operand:SI 2 "const_int_operand" "n")
2261 (match_operand:SI 3 "const_int_operand" "n"))
2263 (clobber (reg:CC CC_REGNUM))]
2265 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2266 && INTVAL (operands[2]) > 0
2267 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2268 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2271 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2272 && INTVAL (operands[2]) > 0
2273 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2274 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2275 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2276 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2278 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2280 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2281 (match_dup 0) (const_int 1)))]
2283 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2284 << INTVAL (operands[3]));
2286 [(set_attr "conds" "clob")
2287 (set (attr "length")
2288 (if_then_else (eq_attr "is_thumb" "yes")
2293 (define_insn_and_split "*ne_zeroextractsi_shifted"
2294 [(set (match_operand:SI 0 "s_register_operand" "=r")
2295 (ne:SI (zero_extract:SI
2296 (match_operand:SI 1 "s_register_operand" "r")
2297 (match_operand:SI 2 "const_int_operand" "n")
2300 (clobber (reg:CC CC_REGNUM))]
2304 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2305 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2307 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2309 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2310 (match_dup 0) (const_int 1)))]
2312 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2314 [(set_attr "conds" "clob")
2315 (set_attr "length" "8")]
2318 (define_insn_and_split "*ite_ne_zeroextractsi"
2319 [(set (match_operand:SI 0 "s_register_operand" "=r")
2320 (if_then_else:SI (ne (zero_extract:SI
2321 (match_operand:SI 1 "s_register_operand" "r")
2322 (match_operand:SI 2 "const_int_operand" "n")
2323 (match_operand:SI 3 "const_int_operand" "n"))
2325 (match_operand:SI 4 "arm_not_operand" "rIK")
2327 (clobber (reg:CC CC_REGNUM))]
2329 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2330 && INTVAL (operands[2]) > 0
2331 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2332 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2333 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2336 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2337 && INTVAL (operands[2]) > 0
2338 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2339 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2340 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2341 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2342 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2344 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2346 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2347 (match_dup 0) (match_dup 4)))]
2349 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2350 << INTVAL (operands[3]));
2352 [(set_attr "conds" "clob")
2353 (set_attr "length" "8")]
2356 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2357 [(set (match_operand:SI 0 "s_register_operand" "=r")
2358 (if_then_else:SI (ne (zero_extract:SI
2359 (match_operand:SI 1 "s_register_operand" "r")
2360 (match_operand:SI 2 "const_int_operand" "n")
2363 (match_operand:SI 3 "arm_not_operand" "rIK")
2365 (clobber (reg:CC CC_REGNUM))]
2366 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2368 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2369 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2370 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2372 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2374 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2375 (match_dup 0) (match_dup 3)))]
2377 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2379 [(set_attr "conds" "clob")
2380 (set_attr "length" "8")]
2384 [(set (match_operand:SI 0 "s_register_operand" "")
2385 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2386 (match_operand:SI 2 "const_int_operand" "")
2387 (match_operand:SI 3 "const_int_operand" "")))
2388 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2390 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2391 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2393 HOST_WIDE_INT temp = INTVAL (operands[2]);
2395 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2396 operands[3] = GEN_INT (32 - temp);
2400 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2402 [(set (match_operand:SI 0 "s_register_operand" "")
2403 (match_operator:SI 1 "shiftable_operator"
2404 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2405 (match_operand:SI 3 "const_int_operand" "")
2406 (match_operand:SI 4 "const_int_operand" ""))
2407 (match_operand:SI 5 "s_register_operand" "")]))
2408 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2410 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2413 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2416 HOST_WIDE_INT temp = INTVAL (operands[3]);
2418 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2419 operands[4] = GEN_INT (32 - temp);
2424 [(set (match_operand:SI 0 "s_register_operand" "")
2425 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2426 (match_operand:SI 2 "const_int_operand" "")
2427 (match_operand:SI 3 "const_int_operand" "")))]
2429 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2430 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2432 HOST_WIDE_INT temp = INTVAL (operands[2]);
2434 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2435 operands[3] = GEN_INT (32 - temp);
2440 [(set (match_operand:SI 0 "s_register_operand" "")
2441 (match_operator:SI 1 "shiftable_operator"
2442 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2443 (match_operand:SI 3 "const_int_operand" "")
2444 (match_operand:SI 4 "const_int_operand" ""))
2445 (match_operand:SI 5 "s_register_operand" "")]))
2446 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2448 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2451 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2454 HOST_WIDE_INT temp = INTVAL (operands[3]);
2456 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2457 operands[4] = GEN_INT (32 - temp);
2461 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2462 ;;; represented by the bitfield, then this will produce incorrect results.
2463 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2464 ;;; which have a real bit-field insert instruction, the truncation happens
2465 ;;; in the bit-field insert instruction itself. Since arm does not have a
2466 ;;; bit-field insert instruction, we would have to emit code here to truncate
2467 ;;; the value before we insert. This loses some of the advantage of having
2468 ;;; this insv pattern, so this pattern needs to be reevalutated.
2470 (define_expand "insv"
2471 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2472 (match_operand:SI 1 "general_operand" "")
2473 (match_operand:SI 2 "general_operand" ""))
2474 (match_operand:SI 3 "reg_or_int_operand" ""))]
2475 "TARGET_ARM || arm_arch_thumb2"
2478 int start_bit = INTVAL (operands[2]);
2479 int width = INTVAL (operands[1]);
2480 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2481 rtx target, subtarget;
2483 if (arm_arch_thumb2)
2485 bool use_bfi = TRUE;
2487 if (GET_CODE (operands[3]) == CONST_INT)
2489 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2493 emit_insn (gen_insv_zero (operands[0], operands[1],
2498 /* See if the set can be done with a single orr instruction. */
2499 if (val == mask && const_ok_for_arm (val << start_bit))
2505 if (GET_CODE (operands[3]) != REG)
2506 operands[3] = force_reg (SImode, operands[3]);
2508 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2514 target = copy_rtx (operands[0]);
2515 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2516 subreg as the final target. */
2517 if (GET_CODE (target) == SUBREG)
2519 subtarget = gen_reg_rtx (SImode);
2520 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2521 < GET_MODE_SIZE (SImode))
2522 target = SUBREG_REG (target);
2527 if (GET_CODE (operands[3]) == CONST_INT)
2529 /* Since we are inserting a known constant, we may be able to
2530 reduce the number of bits that we have to clear so that
2531 the mask becomes simple. */
2532 /* ??? This code does not check to see if the new mask is actually
2533 simpler. It may not be. */
2534 rtx op1 = gen_reg_rtx (SImode);
2535 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2536 start of this pattern. */
2537 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2538 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2540 emit_insn (gen_andsi3 (op1, operands[0],
2541 gen_int_mode (~mask2, SImode)));
2542 emit_insn (gen_iorsi3 (subtarget, op1,
2543 gen_int_mode (op3_value << start_bit, SImode)));
2545 else if (start_bit == 0
2546 && !(const_ok_for_arm (mask)
2547 || const_ok_for_arm (~mask)))
2549 /* A Trick, since we are setting the bottom bits in the word,
2550 we can shift operand[3] up, operand[0] down, OR them together
2551 and rotate the result back again. This takes 3 insns, and
2552 the third might be mergeable into another op. */
2553 /* The shift up copes with the possibility that operand[3] is
2554 wider than the bitfield. */
2555 rtx op0 = gen_reg_rtx (SImode);
2556 rtx op1 = gen_reg_rtx (SImode);
2558 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2559 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2560 emit_insn (gen_iorsi3 (op1, op1, op0));
2561 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2563 else if ((width + start_bit == 32)
2564 && !(const_ok_for_arm (mask)
2565 || const_ok_for_arm (~mask)))
2567 /* Similar trick, but slightly less efficient. */
2569 rtx op0 = gen_reg_rtx (SImode);
2570 rtx op1 = gen_reg_rtx (SImode);
2572 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2573 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2574 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2575 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2579 rtx op0 = gen_int_mode (mask, SImode);
2580 rtx op1 = gen_reg_rtx (SImode);
2581 rtx op2 = gen_reg_rtx (SImode);
2583 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2585 rtx tmp = gen_reg_rtx (SImode);
2587 emit_insn (gen_movsi (tmp, op0));
2591 /* Mask out any bits in operand[3] that are not needed. */
2592 emit_insn (gen_andsi3 (op1, operands[3], op0));
2594 if (GET_CODE (op0) == CONST_INT
2595 && (const_ok_for_arm (mask << start_bit)
2596 || const_ok_for_arm (~(mask << start_bit))))
2598 op0 = gen_int_mode (~(mask << start_bit), SImode);
2599 emit_insn (gen_andsi3 (op2, operands[0], op0));
2603 if (GET_CODE (op0) == CONST_INT)
2605 rtx tmp = gen_reg_rtx (SImode);
2607 emit_insn (gen_movsi (tmp, op0));
2612 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2614 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2618 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2620 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2623 if (subtarget != target)
2625 /* If TARGET is still a SUBREG, then it must be wider than a word,
2626 so we must be careful only to set the subword we were asked to. */
2627 if (GET_CODE (target) == SUBREG)
2628 emit_move_insn (target, subtarget);
2630 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2637 (define_insn "insv_zero"
2638 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2639 (match_operand:SI 1 "const_int_operand" "M")
2640 (match_operand:SI 2 "const_int_operand" "M"))
2644 [(set_attr "length" "4")
2645 (set_attr "predicable" "yes")]
2648 (define_insn "insv_t2"
2649 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2650 (match_operand:SI 1 "const_int_operand" "M")
2651 (match_operand:SI 2 "const_int_operand" "M"))
2652 (match_operand:SI 3 "s_register_operand" "r"))]
2654 "bfi%?\t%0, %3, %2, %1"
2655 [(set_attr "length" "4")
2656 (set_attr "predicable" "yes")]
2659 ; constants for op 2 will never be given to these patterns.
2660 (define_insn_and_split "*anddi_notdi_di"
2661 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2662 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2663 (match_operand:DI 2 "s_register_operand" "r,0")))]
2666 "TARGET_32BIT && reload_completed
2667 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2668 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2669 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2670 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2673 operands[3] = gen_highpart (SImode, operands[0]);
2674 operands[0] = gen_lowpart (SImode, operands[0]);
2675 operands[4] = gen_highpart (SImode, operands[1]);
2676 operands[1] = gen_lowpart (SImode, operands[1]);
2677 operands[5] = gen_highpart (SImode, operands[2]);
2678 operands[2] = gen_lowpart (SImode, operands[2]);
2680 [(set_attr "length" "8")
2681 (set_attr "predicable" "yes")]
2684 (define_insn_and_split "*anddi_notzesidi_di"
2685 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2686 (and:DI (not:DI (zero_extend:DI
2687 (match_operand:SI 2 "s_register_operand" "r,r")))
2688 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2691 bic%?\\t%Q0, %Q1, %2
2693 ; (not (zero_extend ...)) allows us to just copy the high word from
2694 ; operand1 to operand0.
2697 && operands[0] != operands[1]"
2698 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2699 (set (match_dup 3) (match_dup 4))]
2702 operands[3] = gen_highpart (SImode, operands[0]);
2703 operands[0] = gen_lowpart (SImode, operands[0]);
2704 operands[4] = gen_highpart (SImode, operands[1]);
2705 operands[1] = gen_lowpart (SImode, operands[1]);
2707 [(set_attr "length" "4,8")
2708 (set_attr "predicable" "yes")]
2711 (define_insn_and_split "*anddi_notsesidi_di"
2712 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2713 (and:DI (not:DI (sign_extend:DI
2714 (match_operand:SI 2 "s_register_operand" "r,r")))
2715 (match_operand:DI 1 "s_register_operand" "0,r")))]
2718 "TARGET_32BIT && reload_completed"
2719 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2720 (set (match_dup 3) (and:SI (not:SI
2721 (ashiftrt:SI (match_dup 2) (const_int 31)))
2725 operands[3] = gen_highpart (SImode, operands[0]);
2726 operands[0] = gen_lowpart (SImode, operands[0]);
2727 operands[4] = gen_highpart (SImode, operands[1]);
2728 operands[1] = gen_lowpart (SImode, operands[1]);
2730 [(set_attr "length" "8")
2731 (set_attr "predicable" "yes")]
2734 (define_insn "andsi_notsi_si"
2735 [(set (match_operand:SI 0 "s_register_operand" "=r")
2736 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2737 (match_operand:SI 1 "s_register_operand" "r")))]
2739 "bic%?\\t%0, %1, %2"
2740 [(set_attr "predicable" "yes")]
2743 (define_insn "thumb1_bicsi3"
2744 [(set (match_operand:SI 0 "register_operand" "=l")
2745 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2746 (match_operand:SI 2 "register_operand" "0")))]
2749 [(set_attr "length" "2")
2750 (set_attr "conds" "set")])
2752 (define_insn "andsi_not_shiftsi_si"
2753 [(set (match_operand:SI 0 "s_register_operand" "=r")
2754 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2755 [(match_operand:SI 2 "s_register_operand" "r")
2756 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2757 (match_operand:SI 1 "s_register_operand" "r")))]
2759 "bic%?\\t%0, %1, %2%S4"
2760 [(set_attr "predicable" "yes")
2761 (set_attr "shift" "2")
2762 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2763 (const_string "alu_shift")
2764 (const_string "alu_shift_reg")))]
2767 (define_insn "*andsi_notsi_si_compare0"
2768 [(set (reg:CC_NOOV CC_REGNUM)
2770 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2771 (match_operand:SI 1 "s_register_operand" "r"))
2773 (set (match_operand:SI 0 "s_register_operand" "=r")
2774 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2776 "bic%.\\t%0, %1, %2"
2777 [(set_attr "conds" "set")]
2780 (define_insn "*andsi_notsi_si_compare0_scratch"
2781 [(set (reg:CC_NOOV CC_REGNUM)
2783 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2784 (match_operand:SI 1 "s_register_operand" "r"))
2786 (clobber (match_scratch:SI 0 "=r"))]
2788 "bic%.\\t%0, %1, %2"
2789 [(set_attr "conds" "set")]
2792 (define_expand "iordi3"
2793 [(set (match_operand:DI 0 "s_register_operand" "")
2794 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2795 (match_operand:DI 2 "neon_logic_op2" "")))]
2800 (define_insn "*iordi3_insn"
2801 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2802 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2803 (match_operand:DI 2 "s_register_operand" "r,r")))]
2804 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2806 [(set_attr "length" "8")
2807 (set_attr "predicable" "yes")]
2810 (define_insn "*iordi_zesidi_di"
2811 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2812 (ior:DI (zero_extend:DI
2813 (match_operand:SI 2 "s_register_operand" "r,r"))
2814 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2817 orr%?\\t%Q0, %Q1, %2
2819 [(set_attr "length" "4,8")
2820 (set_attr "predicable" "yes")]
2823 (define_insn "*iordi_sesidi_di"
2824 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2825 (ior:DI (sign_extend:DI
2826 (match_operand:SI 2 "s_register_operand" "r,r"))
2827 (match_operand:DI 1 "s_register_operand" "0,r")))]
2830 [(set_attr "length" "8")
2831 (set_attr "predicable" "yes")]
2834 (define_expand "iorsi3"
2835 [(set (match_operand:SI 0 "s_register_operand" "")
2836 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2837 (match_operand:SI 2 "reg_or_int_operand" "")))]
2840 if (GET_CODE (operands[2]) == CONST_INT)
2844 arm_split_constant (IOR, SImode, NULL_RTX,
2845 INTVAL (operands[2]), operands[0], operands[1],
2846 optimize && can_create_pseudo_p ());
2849 else /* TARGET_THUMB1 */
2851 rtx tmp = force_reg (SImode, operands[2]);
2852 if (rtx_equal_p (operands[0], operands[1]))
2856 operands[2] = operands[1];
2864 (define_insn_and_split "*iorsi3_insn"
2865 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2866 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2867 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2871 orn%?\\t%0, %1, #%B2
2874 && GET_CODE (operands[2]) == CONST_INT
2875 && !(const_ok_for_arm (INTVAL (operands[2]))
2876 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2877 [(clobber (const_int 0))]
2879 arm_split_constant (IOR, SImode, curr_insn,
2880 INTVAL (operands[2]), operands[0], operands[1], 0);
2883 [(set_attr "length" "4,4,16")
2884 (set_attr "arch" "32,t2,32")
2885 (set_attr "predicable" "yes")])
2887 (define_insn "*thumb1_iorsi3_insn"
2888 [(set (match_operand:SI 0 "register_operand" "=l")
2889 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2890 (match_operand:SI 2 "register_operand" "l")))]
2893 [(set_attr "length" "2")
2894 (set_attr "conds" "set")])
2897 [(match_scratch:SI 3 "r")
2898 (set (match_operand:SI 0 "arm_general_register_operand" "")
2899 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2900 (match_operand:SI 2 "const_int_operand" "")))]
2902 && !const_ok_for_arm (INTVAL (operands[2]))
2903 && const_ok_for_arm (~INTVAL (operands[2]))"
2904 [(set (match_dup 3) (match_dup 2))
2905 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2909 (define_insn "*iorsi3_compare0"
2910 [(set (reg:CC_NOOV CC_REGNUM)
2911 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2912 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2914 (set (match_operand:SI 0 "s_register_operand" "=r")
2915 (ior:SI (match_dup 1) (match_dup 2)))]
2917 "orr%.\\t%0, %1, %2"
2918 [(set_attr "conds" "set")]
2921 (define_insn "*iorsi3_compare0_scratch"
2922 [(set (reg:CC_NOOV CC_REGNUM)
2923 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2924 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2926 (clobber (match_scratch:SI 0 "=r"))]
2928 "orr%.\\t%0, %1, %2"
2929 [(set_attr "conds" "set")]
2932 (define_expand "xordi3"
2933 [(set (match_operand:DI 0 "s_register_operand" "")
2934 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2935 (match_operand:DI 2 "s_register_operand" "")))]
2940 (define_insn "*xordi3_insn"
2941 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2942 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2943 (match_operand:DI 2 "s_register_operand" "r,r")))]
2944 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2946 [(set_attr "length" "8")
2947 (set_attr "predicable" "yes")]
2950 (define_insn "*xordi_zesidi_di"
2951 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2952 (xor:DI (zero_extend:DI
2953 (match_operand:SI 2 "s_register_operand" "r,r"))
2954 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2957 eor%?\\t%Q0, %Q1, %2
2959 [(set_attr "length" "4,8")
2960 (set_attr "predicable" "yes")]
2963 (define_insn "*xordi_sesidi_di"
2964 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2965 (xor:DI (sign_extend:DI
2966 (match_operand:SI 2 "s_register_operand" "r,r"))
2967 (match_operand:DI 1 "s_register_operand" "0,r")))]
2970 [(set_attr "length" "8")
2971 (set_attr "predicable" "yes")]
2974 (define_expand "xorsi3"
2975 [(set (match_operand:SI 0 "s_register_operand" "")
2976 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2977 (match_operand:SI 2 "reg_or_int_operand" "")))]
2979 "if (GET_CODE (operands[2]) == CONST_INT)
2983 arm_split_constant (XOR, SImode, NULL_RTX,
2984 INTVAL (operands[2]), operands[0], operands[1],
2985 optimize && can_create_pseudo_p ());
2988 else /* TARGET_THUMB1 */
2990 rtx tmp = force_reg (SImode, operands[2]);
2991 if (rtx_equal_p (operands[0], operands[1]))
2995 operands[2] = operands[1];
3002 (define_insn "*arm_xorsi3"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r")
3004 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3005 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
3007 "eor%?\\t%0, %1, %2"
3008 [(set_attr "predicable" "yes")]
3011 (define_insn "*thumb1_xorsi3_insn"
3012 [(set (match_operand:SI 0 "register_operand" "=l")
3013 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3014 (match_operand:SI 2 "register_operand" "l")))]
3017 [(set_attr "length" "2")
3018 (set_attr "conds" "set")])
3020 (define_insn "*xorsi3_compare0"
3021 [(set (reg:CC_NOOV CC_REGNUM)
3022 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3023 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3025 (set (match_operand:SI 0 "s_register_operand" "=r")
3026 (xor:SI (match_dup 1) (match_dup 2)))]
3028 "eor%.\\t%0, %1, %2"
3029 [(set_attr "conds" "set")]
3032 (define_insn "*xorsi3_compare0_scratch"
3033 [(set (reg:CC_NOOV CC_REGNUM)
3034 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3035 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3039 [(set_attr "conds" "set")]
3042 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3043 ; (NOT D) we can sometimes merge the final NOT into one of the following
3047 [(set (match_operand:SI 0 "s_register_operand" "")
3048 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3049 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3050 (match_operand:SI 3 "arm_rhs_operand" "")))
3051 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3053 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3054 (not:SI (match_dup 3))))
3055 (set (match_dup 0) (not:SI (match_dup 4)))]
3059 (define_insn "*andsi_iorsi3_notsi"
3060 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3061 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3062 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3063 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3065 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3066 [(set_attr "length" "8")
3067 (set_attr "ce_count" "2")
3068 (set_attr "predicable" "yes")]
3071 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3072 ; insns are available?
3074 [(set (match_operand:SI 0 "s_register_operand" "")
3075 (match_operator:SI 1 "logical_binary_operator"
3076 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3077 (match_operand:SI 3 "const_int_operand" "")
3078 (match_operand:SI 4 "const_int_operand" ""))
3079 (match_operator:SI 9 "logical_binary_operator"
3080 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3081 (match_operand:SI 6 "const_int_operand" ""))
3082 (match_operand:SI 7 "s_register_operand" "")])]))
3083 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3085 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3086 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3089 [(ashift:SI (match_dup 2) (match_dup 4))
3093 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3096 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3100 [(set (match_operand:SI 0 "s_register_operand" "")
3101 (match_operator:SI 1 "logical_binary_operator"
3102 [(match_operator:SI 9 "logical_binary_operator"
3103 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3104 (match_operand:SI 6 "const_int_operand" ""))
3105 (match_operand:SI 7 "s_register_operand" "")])
3106 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3107 (match_operand:SI 3 "const_int_operand" "")
3108 (match_operand:SI 4 "const_int_operand" ""))]))
3109 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3111 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3112 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3115 [(ashift:SI (match_dup 2) (match_dup 4))
3119 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3122 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3126 [(set (match_operand:SI 0 "s_register_operand" "")
3127 (match_operator:SI 1 "logical_binary_operator"
3128 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3129 (match_operand:SI 3 "const_int_operand" "")
3130 (match_operand:SI 4 "const_int_operand" ""))
3131 (match_operator:SI 9 "logical_binary_operator"
3132 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3133 (match_operand:SI 6 "const_int_operand" ""))
3134 (match_operand:SI 7 "s_register_operand" "")])]))
3135 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3137 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3138 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3141 [(ashift:SI (match_dup 2) (match_dup 4))
3145 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3148 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3152 [(set (match_operand:SI 0 "s_register_operand" "")
3153 (match_operator:SI 1 "logical_binary_operator"
3154 [(match_operator:SI 9 "logical_binary_operator"
3155 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3156 (match_operand:SI 6 "const_int_operand" ""))
3157 (match_operand:SI 7 "s_register_operand" "")])
3158 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3159 (match_operand:SI 3 "const_int_operand" "")
3160 (match_operand:SI 4 "const_int_operand" ""))]))
3161 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3163 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3164 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3167 [(ashift:SI (match_dup 2) (match_dup 4))
3171 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3174 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3178 ;; Minimum and maximum insns
3180 (define_expand "smaxsi3"
3182 (set (match_operand:SI 0 "s_register_operand" "")
3183 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3184 (match_operand:SI 2 "arm_rhs_operand" "")))
3185 (clobber (reg:CC CC_REGNUM))])]
3188 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3190 /* No need for a clobber of the condition code register here. */
3191 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3192 gen_rtx_SMAX (SImode, operands[1],
3198 (define_insn "*smax_0"
3199 [(set (match_operand:SI 0 "s_register_operand" "=r")
3200 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3203 "bic%?\\t%0, %1, %1, asr #31"
3204 [(set_attr "predicable" "yes")]
3207 (define_insn "*smax_m1"
3208 [(set (match_operand:SI 0 "s_register_operand" "=r")
3209 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3212 "orr%?\\t%0, %1, %1, asr #31"
3213 [(set_attr "predicable" "yes")]
3216 (define_insn "*arm_smax_insn"
3217 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3218 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3219 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3220 (clobber (reg:CC CC_REGNUM))]
3223 cmp\\t%1, %2\;movlt\\t%0, %2
3224 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3225 [(set_attr "conds" "clob")
3226 (set_attr "length" "8,12")]
3229 (define_expand "sminsi3"
3231 (set (match_operand:SI 0 "s_register_operand" "")
3232 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3233 (match_operand:SI 2 "arm_rhs_operand" "")))
3234 (clobber (reg:CC CC_REGNUM))])]
3237 if (operands[2] == const0_rtx)
3239 /* No need for a clobber of the condition code register here. */
3240 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3241 gen_rtx_SMIN (SImode, operands[1],
3247 (define_insn "*smin_0"
3248 [(set (match_operand:SI 0 "s_register_operand" "=r")
3249 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3252 "and%?\\t%0, %1, %1, asr #31"
3253 [(set_attr "predicable" "yes")]
3256 (define_insn "*arm_smin_insn"
3257 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3258 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3259 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3260 (clobber (reg:CC CC_REGNUM))]
3263 cmp\\t%1, %2\;movge\\t%0, %2
3264 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3265 [(set_attr "conds" "clob")
3266 (set_attr "length" "8,12")]
3269 (define_expand "umaxsi3"
3271 (set (match_operand:SI 0 "s_register_operand" "")
3272 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3273 (match_operand:SI 2 "arm_rhs_operand" "")))
3274 (clobber (reg:CC CC_REGNUM))])]
3279 (define_insn "*arm_umaxsi3"
3280 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3281 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3282 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3283 (clobber (reg:CC CC_REGNUM))]
3286 cmp\\t%1, %2\;movcc\\t%0, %2
3287 cmp\\t%1, %2\;movcs\\t%0, %1
3288 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3289 [(set_attr "conds" "clob")
3290 (set_attr "length" "8,8,12")]
3293 (define_expand "uminsi3"
3295 (set (match_operand:SI 0 "s_register_operand" "")
3296 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3297 (match_operand:SI 2 "arm_rhs_operand" "")))
3298 (clobber (reg:CC CC_REGNUM))])]
3303 (define_insn "*arm_uminsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3305 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3306 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3307 (clobber (reg:CC CC_REGNUM))]
3310 cmp\\t%1, %2\;movcs\\t%0, %2
3311 cmp\\t%1, %2\;movcc\\t%0, %1
3312 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3313 [(set_attr "conds" "clob")
3314 (set_attr "length" "8,8,12")]
3317 (define_insn "*store_minmaxsi"
3318 [(set (match_operand:SI 0 "memory_operand" "=m")
3319 (match_operator:SI 3 "minmax_operator"
3320 [(match_operand:SI 1 "s_register_operand" "r")
3321 (match_operand:SI 2 "s_register_operand" "r")]))
3322 (clobber (reg:CC CC_REGNUM))]
3325 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3326 operands[1], operands[2]);
3327 output_asm_insn (\"cmp\\t%1, %2\", operands);
3329 output_asm_insn (\"ite\t%d3\", operands);
3330 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3331 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3334 [(set_attr "conds" "clob")
3335 (set (attr "length")
3336 (if_then_else (eq_attr "is_thumb" "yes")
3339 (set_attr "type" "store1")]
3342 ; Reject the frame pointer in operand[1], since reloading this after
3343 ; it has been eliminated can cause carnage.
3344 (define_insn "*minmax_arithsi"
3345 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3346 (match_operator:SI 4 "shiftable_operator"
3347 [(match_operator:SI 5 "minmax_operator"
3348 [(match_operand:SI 2 "s_register_operand" "r,r")
3349 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3350 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3351 (clobber (reg:CC CC_REGNUM))]
3352 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3355 enum rtx_code code = GET_CODE (operands[4]);
3358 if (which_alternative != 0 || operands[3] != const0_rtx
3359 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3364 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3365 operands[2], operands[3]);
3366 output_asm_insn (\"cmp\\t%2, %3\", operands);
3370 output_asm_insn (\"ite\\t%d5\", operands);
3372 output_asm_insn (\"it\\t%d5\", operands);
3374 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3376 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3379 [(set_attr "conds" "clob")
3380 (set (attr "length")
3381 (if_then_else (eq_attr "is_thumb" "yes")
3387 ;; Shift and rotation insns
3389 (define_expand "ashldi3"
3390 [(set (match_operand:DI 0 "s_register_operand" "")
3391 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3392 (match_operand:SI 2 "reg_or_int_operand" "")))]
3395 if (GET_CODE (operands[2]) == CONST_INT)
3397 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3399 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3402 /* Ideally we shouldn't fail here if we could know that operands[1]
3403 ends up already living in an iwmmxt register. Otherwise it's
3404 cheaper to have the alternate code being generated than moving
3405 values to iwmmxt regs and back. */
3408 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3413 (define_insn "arm_ashldi3_1bit"
3414 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3415 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3417 (clobber (reg:CC CC_REGNUM))]
3419 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3420 [(set_attr "conds" "clob")
3421 (set_attr "length" "8")]
3424 (define_expand "ashlsi3"
3425 [(set (match_operand:SI 0 "s_register_operand" "")
3426 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3427 (match_operand:SI 2 "arm_rhs_operand" "")))]
3430 if (GET_CODE (operands[2]) == CONST_INT
3431 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3433 emit_insn (gen_movsi (operands[0], const0_rtx));
3439 (define_insn "*thumb1_ashlsi3"
3440 [(set (match_operand:SI 0 "register_operand" "=l,l")
3441 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3442 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3445 [(set_attr "length" "2")
3446 (set_attr "conds" "set")])
3448 (define_expand "ashrdi3"
3449 [(set (match_operand:DI 0 "s_register_operand" "")
3450 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3451 (match_operand:SI 2 "reg_or_int_operand" "")))]
3454 if (GET_CODE (operands[2]) == CONST_INT)
3456 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3458 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3461 /* Ideally we shouldn't fail here if we could know that operands[1]
3462 ends up already living in an iwmmxt register. Otherwise it's
3463 cheaper to have the alternate code being generated than moving
3464 values to iwmmxt regs and back. */
3467 else if (!TARGET_REALLY_IWMMXT)
3472 (define_insn "arm_ashrdi3_1bit"
3473 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3474 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3476 (clobber (reg:CC CC_REGNUM))]
3478 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3479 [(set_attr "conds" "clob")
3480 (set_attr "insn" "mov")
3481 (set_attr "length" "8")]
3484 (define_expand "ashrsi3"
3485 [(set (match_operand:SI 0 "s_register_operand" "")
3486 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3487 (match_operand:SI 2 "arm_rhs_operand" "")))]
3490 if (GET_CODE (operands[2]) == CONST_INT
3491 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3492 operands[2] = GEN_INT (31);
3496 (define_insn "*thumb1_ashrsi3"
3497 [(set (match_operand:SI 0 "register_operand" "=l,l")
3498 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3499 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3502 [(set_attr "length" "2")
3503 (set_attr "conds" "set")])
3505 (define_expand "lshrdi3"
3506 [(set (match_operand:DI 0 "s_register_operand" "")
3507 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3508 (match_operand:SI 2 "reg_or_int_operand" "")))]
3511 if (GET_CODE (operands[2]) == CONST_INT)
3513 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3515 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3518 /* Ideally we shouldn't fail here if we could know that operands[1]
3519 ends up already living in an iwmmxt register. Otherwise it's
3520 cheaper to have the alternate code being generated than moving
3521 values to iwmmxt regs and back. */
3524 else if (!TARGET_REALLY_IWMMXT)
3529 (define_insn "arm_lshrdi3_1bit"
3530 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3531 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3533 (clobber (reg:CC CC_REGNUM))]
3535 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3536 [(set_attr "conds" "clob")
3537 (set_attr "insn" "mov")
3538 (set_attr "length" "8")]
3541 (define_expand "lshrsi3"
3542 [(set (match_operand:SI 0 "s_register_operand" "")
3543 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3544 (match_operand:SI 2 "arm_rhs_operand" "")))]
3547 if (GET_CODE (operands[2]) == CONST_INT
3548 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3550 emit_insn (gen_movsi (operands[0], const0_rtx));
3556 (define_insn "*thumb1_lshrsi3"
3557 [(set (match_operand:SI 0 "register_operand" "=l,l")
3558 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3559 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3562 [(set_attr "length" "2")
3563 (set_attr "conds" "set")])
3565 (define_expand "rotlsi3"
3566 [(set (match_operand:SI 0 "s_register_operand" "")
3567 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3568 (match_operand:SI 2 "reg_or_int_operand" "")))]
3571 if (GET_CODE (operands[2]) == CONST_INT)
3572 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3575 rtx reg = gen_reg_rtx (SImode);
3576 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3582 (define_expand "rotrsi3"
3583 [(set (match_operand:SI 0 "s_register_operand" "")
3584 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3585 (match_operand:SI 2 "arm_rhs_operand" "")))]
3590 if (GET_CODE (operands[2]) == CONST_INT
3591 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3592 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3594 else /* TARGET_THUMB1 */
3596 if (GET_CODE (operands [2]) == CONST_INT)
3597 operands [2] = force_reg (SImode, operands[2]);
3602 (define_insn "*thumb1_rotrsi3"
3603 [(set (match_operand:SI 0 "register_operand" "=l")
3604 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3605 (match_operand:SI 2 "register_operand" "l")))]
3608 [(set_attr "length" "2")]
3611 (define_insn "*arm_shiftsi3"
3612 [(set (match_operand:SI 0 "s_register_operand" "=r")
3613 (match_operator:SI 3 "shift_operator"
3614 [(match_operand:SI 1 "s_register_operand" "r")
3615 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3617 "* return arm_output_shift(operands, 0);"
3618 [(set_attr "predicable" "yes")
3619 (set_attr "shift" "1")
3620 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3621 (const_string "alu_shift")
3622 (const_string "alu_shift_reg")))]
3625 (define_insn "*shiftsi3_compare0"
3626 [(set (reg:CC_NOOV CC_REGNUM)
3627 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3628 [(match_operand:SI 1 "s_register_operand" "r")
3629 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3631 (set (match_operand:SI 0 "s_register_operand" "=r")
3632 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3634 "* return arm_output_shift(operands, 1);"
3635 [(set_attr "conds" "set")
3636 (set_attr "shift" "1")
3637 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3638 (const_string "alu_shift")
3639 (const_string "alu_shift_reg")))]
3642 (define_insn "*shiftsi3_compare0_scratch"
3643 [(set (reg:CC_NOOV CC_REGNUM)
3644 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3645 [(match_operand:SI 1 "s_register_operand" "r")
3646 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3648 (clobber (match_scratch:SI 0 "=r"))]
3650 "* return arm_output_shift(operands, 1);"
3651 [(set_attr "conds" "set")
3652 (set_attr "shift" "1")]
3655 (define_insn "*not_shiftsi"
3656 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3657 (not:SI (match_operator:SI 3 "shift_operator"
3658 [(match_operand:SI 1 "s_register_operand" "r,r")
3659 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3662 [(set_attr "predicable" "yes")
3663 (set_attr "shift" "1")
3664 (set_attr "insn" "mvn")
3665 (set_attr "arch" "32,a")
3666 (set_attr "type" "alu_shift,alu_shift_reg")])
3668 (define_insn "*not_shiftsi_compare0"
3669 [(set (reg:CC_NOOV CC_REGNUM)
3671 (not:SI (match_operator:SI 3 "shift_operator"
3672 [(match_operand:SI 1 "s_register_operand" "r,r")
3673 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3675 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3676 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3679 [(set_attr "conds" "set")
3680 (set_attr "shift" "1")
3681 (set_attr "insn" "mvn")
3682 (set_attr "arch" "32,a")
3683 (set_attr "type" "alu_shift,alu_shift_reg")])
3685 (define_insn "*not_shiftsi_compare0_scratch"
3686 [(set (reg:CC_NOOV CC_REGNUM)
3688 (not:SI (match_operator:SI 3 "shift_operator"
3689 [(match_operand:SI 1 "s_register_operand" "r,r")
3690 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3692 (clobber (match_scratch:SI 0 "=r,r"))]
3695 [(set_attr "conds" "set")
3696 (set_attr "shift" "1")
3697 (set_attr "insn" "mvn")
3698 (set_attr "arch" "32,a")
3699 (set_attr "type" "alu_shift,alu_shift_reg")])
3701 ;; We don't really have extzv, but defining this using shifts helps
3702 ;; to reduce register pressure later on.
3704 (define_expand "extzv"
3706 (ashift:SI (match_operand:SI 1 "register_operand" "")
3707 (match_operand:SI 2 "const_int_operand" "")))
3708 (set (match_operand:SI 0 "register_operand" "")
3709 (lshiftrt:SI (match_dup 4)
3710 (match_operand:SI 3 "const_int_operand" "")))]
3711 "TARGET_THUMB1 || arm_arch_thumb2"
3714 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3715 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3717 if (arm_arch_thumb2)
3719 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3724 operands[3] = GEN_INT (rshift);
3728 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3732 operands[2] = GEN_INT (lshift);
3733 operands[4] = gen_reg_rtx (SImode);
3738 [(set (match_operand:SI 0 "s_register_operand" "=r")
3739 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3740 (match_operand:SI 2 "const_int_operand" "M")
3741 (match_operand:SI 3 "const_int_operand" "M")))]
3743 "sbfx%?\t%0, %1, %3, %2"
3744 [(set_attr "length" "4")
3745 (set_attr "predicable" "yes")]
3748 (define_insn "extzv_t2"
3749 [(set (match_operand:SI 0 "s_register_operand" "=r")
3750 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3751 (match_operand:SI 2 "const_int_operand" "M")
3752 (match_operand:SI 3 "const_int_operand" "M")))]
3754 "ubfx%?\t%0, %1, %3, %2"
3755 [(set_attr "length" "4")
3756 (set_attr "predicable" "yes")]
3760 ;; Division instructions
3761 (define_insn "divsi3"
3762 [(set (match_operand:SI 0 "s_register_operand" "=r")
3763 (div:SI (match_operand:SI 1 "s_register_operand" "r")
3764 (match_operand:SI 2 "s_register_operand" "r")))]
3766 "sdiv%?\t%0, %1, %2"
3767 [(set_attr "predicable" "yes")
3768 (set_attr "insn" "sdiv")]
3771 (define_insn "udivsi3"
3772 [(set (match_operand:SI 0 "s_register_operand" "=r")
3773 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
3774 (match_operand:SI 2 "s_register_operand" "r")))]
3776 "udiv%?\t%0, %1, %2"
3777 [(set_attr "predicable" "yes")
3778 (set_attr "insn" "udiv")]
3782 ;; Unary arithmetic insns
3784 (define_expand "negdi2"
3786 [(set (match_operand:DI 0 "s_register_operand" "")
3787 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3788 (clobber (reg:CC CC_REGNUM))])]
3793 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3794 ;; The first alternative allows the common case of a *full* overlap.
3795 (define_insn "*arm_negdi2"
3796 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3797 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3798 (clobber (reg:CC CC_REGNUM))]
3800 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3801 [(set_attr "conds" "clob")
3802 (set_attr "length" "8")]
3805 (define_insn "*thumb1_negdi2"
3806 [(set (match_operand:DI 0 "register_operand" "=&l")
3807 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3808 (clobber (reg:CC CC_REGNUM))]
3810 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3811 [(set_attr "length" "6")]
3814 (define_expand "negsi2"
3815 [(set (match_operand:SI 0 "s_register_operand" "")
3816 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3821 (define_insn "*arm_negsi2"
3822 [(set (match_operand:SI 0 "s_register_operand" "=r")
3823 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3825 "rsb%?\\t%0, %1, #0"
3826 [(set_attr "predicable" "yes")]
3829 (define_insn "*thumb1_negsi2"
3830 [(set (match_operand:SI 0 "register_operand" "=l")
3831 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3834 [(set_attr "length" "2")]
3837 (define_expand "negsf2"
3838 [(set (match_operand:SF 0 "s_register_operand" "")
3839 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3840 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3844 (define_expand "negdf2"
3845 [(set (match_operand:DF 0 "s_register_operand" "")
3846 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3847 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3850 ;; abssi2 doesn't really clobber the condition codes if a different register
3851 ;; is being set. To keep things simple, assume during rtl manipulations that
3852 ;; it does, but tell the final scan operator the truth. Similarly for
3855 (define_expand "abssi2"
3857 [(set (match_operand:SI 0 "s_register_operand" "")
3858 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3859 (clobber (match_dup 2))])]
3863 operands[2] = gen_rtx_SCRATCH (SImode);
3865 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3868 (define_insn "*arm_abssi2"
3869 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3870 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3871 (clobber (reg:CC CC_REGNUM))]
3874 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3875 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3876 [(set_attr "conds" "clob,*")
3877 (set_attr "shift" "1")
3878 ;; predicable can't be set based on the variant, so left as no
3879 (set_attr "length" "8")]
3882 (define_insn_and_split "*thumb1_abssi2"
3883 [(set (match_operand:SI 0 "s_register_operand" "=l")
3884 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3885 (clobber (match_scratch:SI 2 "=&l"))]
3888 "TARGET_THUMB1 && reload_completed"
3889 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3890 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3891 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3893 [(set_attr "length" "6")]
3896 (define_insn "*arm_neg_abssi2"
3897 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3898 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3899 (clobber (reg:CC CC_REGNUM))]
3902 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3903 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3904 [(set_attr "conds" "clob,*")
3905 (set_attr "shift" "1")
3906 ;; predicable can't be set based on the variant, so left as no
3907 (set_attr "length" "8")]
3910 (define_insn_and_split "*thumb1_neg_abssi2"
3911 [(set (match_operand:SI 0 "s_register_operand" "=l")
3912 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3913 (clobber (match_scratch:SI 2 "=&l"))]
3916 "TARGET_THUMB1 && reload_completed"
3917 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3918 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3919 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3921 [(set_attr "length" "6")]
3924 (define_expand "abssf2"
3925 [(set (match_operand:SF 0 "s_register_operand" "")
3926 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3927 "TARGET_32BIT && TARGET_HARD_FLOAT"
3930 (define_expand "absdf2"
3931 [(set (match_operand:DF 0 "s_register_operand" "")
3932 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3933 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3936 (define_expand "sqrtsf2"
3937 [(set (match_operand:SF 0 "s_register_operand" "")
3938 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3939 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3942 (define_expand "sqrtdf2"
3943 [(set (match_operand:DF 0 "s_register_operand" "")
3944 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3945 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3948 (define_insn_and_split "one_cmpldi2"
3949 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3950 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3953 "TARGET_32BIT && reload_completed"
3954 [(set (match_dup 0) (not:SI (match_dup 1)))
3955 (set (match_dup 2) (not:SI (match_dup 3)))]
3958 operands[2] = gen_highpart (SImode, operands[0]);
3959 operands[0] = gen_lowpart (SImode, operands[0]);
3960 operands[3] = gen_highpart (SImode, operands[1]);
3961 operands[1] = gen_lowpart (SImode, operands[1]);
3963 [(set_attr "length" "8")
3964 (set_attr "predicable" "yes")]
3967 (define_expand "one_cmplsi2"
3968 [(set (match_operand:SI 0 "s_register_operand" "")
3969 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3974 (define_insn "*arm_one_cmplsi2"
3975 [(set (match_operand:SI 0 "s_register_operand" "=r")
3976 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3979 [(set_attr "predicable" "yes")
3980 (set_attr "insn" "mvn")]
3983 (define_insn "*thumb1_one_cmplsi2"
3984 [(set (match_operand:SI 0 "register_operand" "=l")
3985 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3988 [(set_attr "length" "2")
3989 (set_attr "insn" "mvn")]
3992 (define_insn "*notsi_compare0"
3993 [(set (reg:CC_NOOV CC_REGNUM)
3994 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3996 (set (match_operand:SI 0 "s_register_operand" "=r")
3997 (not:SI (match_dup 1)))]
4000 [(set_attr "conds" "set")
4001 (set_attr "insn" "mvn")]
4004 (define_insn "*notsi_compare0_scratch"
4005 [(set (reg:CC_NOOV CC_REGNUM)
4006 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4008 (clobber (match_scratch:SI 0 "=r"))]
4011 [(set_attr "conds" "set")
4012 (set_attr "insn" "mvn")]
4015 ;; Fixed <--> Floating conversion insns
4017 (define_expand "floatsihf2"
4018 [(set (match_operand:HF 0 "general_operand" "")
4019 (float:HF (match_operand:SI 1 "general_operand" "")))]
4023 rtx op1 = gen_reg_rtx (SFmode);
4024 expand_float (op1, operands[1], 0);
4025 op1 = convert_to_mode (HFmode, op1, 0);
4026 emit_move_insn (operands[0], op1);
4031 (define_expand "floatdihf2"
4032 [(set (match_operand:HF 0 "general_operand" "")
4033 (float:HF (match_operand:DI 1 "general_operand" "")))]
4037 rtx op1 = gen_reg_rtx (SFmode);
4038 expand_float (op1, operands[1], 0);
4039 op1 = convert_to_mode (HFmode, op1, 0);
4040 emit_move_insn (operands[0], op1);
4045 (define_expand "floatsisf2"
4046 [(set (match_operand:SF 0 "s_register_operand" "")
4047 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4048 "TARGET_32BIT && TARGET_HARD_FLOAT"
4050 if (TARGET_MAVERICK)
4052 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4057 (define_expand "floatsidf2"
4058 [(set (match_operand:DF 0 "s_register_operand" "")
4059 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4060 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4062 if (TARGET_MAVERICK)
4064 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4069 (define_expand "fix_trunchfsi2"
4070 [(set (match_operand:SI 0 "general_operand" "")
4071 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4075 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4076 expand_fix (operands[0], op1, 0);
4081 (define_expand "fix_trunchfdi2"
4082 [(set (match_operand:DI 0 "general_operand" "")
4083 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4087 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4088 expand_fix (operands[0], op1, 0);
4093 (define_expand "fix_truncsfsi2"
4094 [(set (match_operand:SI 0 "s_register_operand" "")
4095 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4096 "TARGET_32BIT && TARGET_HARD_FLOAT"
4098 if (TARGET_MAVERICK)
4100 if (!cirrus_fp_register (operands[0], SImode))
4101 operands[0] = force_reg (SImode, operands[0]);
4102 if (!cirrus_fp_register (operands[1], SFmode))
4103 operands[1] = force_reg (SFmode, operands[0]);
4104 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4109 (define_expand "fix_truncdfsi2"
4110 [(set (match_operand:SI 0 "s_register_operand" "")
4111 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4112 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4114 if (TARGET_MAVERICK)
4116 if (!cirrus_fp_register (operands[1], DFmode))
4117 operands[1] = force_reg (DFmode, operands[0]);
4118 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4125 (define_expand "truncdfsf2"
4126 [(set (match_operand:SF 0 "s_register_operand" "")
4128 (match_operand:DF 1 "s_register_operand" "")))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4133 /* DFmode -> HFmode conversions have to go through SFmode. */
4134 (define_expand "truncdfhf2"
4135 [(set (match_operand:HF 0 "general_operand" "")
4137 (match_operand:DF 1 "general_operand" "")))]
4142 op1 = convert_to_mode (SFmode, operands[1], 0);
4143 op1 = convert_to_mode (HFmode, op1, 0);
4144 emit_move_insn (operands[0], op1);
4149 ;; Zero and sign extension instructions.
4151 (define_insn "zero_extend<mode>di2"
4152 [(set (match_operand:DI 0 "s_register_operand" "=r")
4153 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4154 "<qhs_extenddi_cstr>")))]
4155 "TARGET_32BIT <qhs_zextenddi_cond>"
4157 [(set_attr "length" "8")
4158 (set_attr "ce_count" "2")
4159 (set_attr "predicable" "yes")]
4162 (define_insn "extend<mode>di2"
4163 [(set (match_operand:DI 0 "s_register_operand" "=r")
4164 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4165 "<qhs_extenddi_cstr>")))]
4166 "TARGET_32BIT <qhs_sextenddi_cond>"
4168 [(set_attr "length" "8")
4169 (set_attr "ce_count" "2")
4170 (set_attr "shift" "1")
4171 (set_attr "predicable" "yes")]
4174 ;; Splits for all extensions to DImode
4176 [(set (match_operand:DI 0 "s_register_operand" "")
4177 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4179 [(set (match_dup 0) (match_dup 1))]
4181 rtx lo_part = gen_lowpart (SImode, operands[0]);
4182 enum machine_mode src_mode = GET_MODE (operands[1]);
4184 if (REG_P (operands[0])
4185 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4186 emit_clobber (operands[0]);
4187 if (!REG_P (lo_part) || src_mode != SImode
4188 || !rtx_equal_p (lo_part, operands[1]))
4190 if (src_mode == SImode)
4191 emit_move_insn (lo_part, operands[1]);
4193 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4194 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4195 operands[1] = lo_part;
4197 operands[0] = gen_highpart (SImode, operands[0]);
4198 operands[1] = const0_rtx;
4202 [(set (match_operand:DI 0 "s_register_operand" "")
4203 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4205 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4207 rtx lo_part = gen_lowpart (SImode, operands[0]);
4208 enum machine_mode src_mode = GET_MODE (operands[1]);
4210 if (REG_P (operands[0])
4211 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4212 emit_clobber (operands[0]);
4214 if (!REG_P (lo_part) || src_mode != SImode
4215 || !rtx_equal_p (lo_part, operands[1]))
4217 if (src_mode == SImode)
4218 emit_move_insn (lo_part, operands[1]);
4220 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4221 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4222 operands[1] = lo_part;
4224 operands[0] = gen_highpart (SImode, operands[0]);
4227 (define_expand "zero_extendhisi2"
4228 [(set (match_operand:SI 0 "s_register_operand" "")
4229 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4232 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4234 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4237 if (!arm_arch6 && !MEM_P (operands[1]))
4239 rtx t = gen_lowpart (SImode, operands[1]);
4240 rtx tmp = gen_reg_rtx (SImode);
4241 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4242 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4248 [(set (match_operand:SI 0 "s_register_operand" "")
4249 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4250 "!TARGET_THUMB2 && !arm_arch6"
4251 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4252 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4254 operands[2] = gen_lowpart (SImode, operands[1]);
4257 (define_insn "*thumb1_zero_extendhisi2"
4258 [(set (match_operand:SI 0 "register_operand" "=l,l")
4259 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4264 if (which_alternative == 0 && arm_arch6)
4265 return "uxth\t%0, %1";
4266 if (which_alternative == 0)
4269 mem = XEXP (operands[1], 0);
4271 if (GET_CODE (mem) == CONST)
4272 mem = XEXP (mem, 0);
4274 if (GET_CODE (mem) == PLUS)
4276 rtx a = XEXP (mem, 0);
4278 /* This can happen due to bugs in reload. */
4279 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4282 ops[0] = operands[0];
4285 output_asm_insn ("mov\t%0, %1", ops);
4287 XEXP (mem, 0) = operands[0];
4291 return "ldrh\t%0, %1";
4293 [(set_attr_alternative "length"
4294 [(if_then_else (eq_attr "is_arch6" "yes")
4295 (const_int 2) (const_int 4))
4297 (set_attr "type" "alu_shift,load_byte")]
4300 (define_insn "*arm_zero_extendhisi2"
4301 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4302 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4303 "TARGET_ARM && arm_arch4 && !arm_arch6"
4307 [(set_attr "type" "alu_shift,load_byte")
4308 (set_attr "predicable" "yes")]
4311 (define_insn "*arm_zero_extendhisi2_v6"
4312 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4313 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4314 "TARGET_ARM && arm_arch6"
4318 [(set_attr "type" "alu_shift,load_byte")
4319 (set_attr "predicable" "yes")]
4322 (define_insn "*arm_zero_extendhisi2addsi"
4323 [(set (match_operand:SI 0 "s_register_operand" "=r")
4324 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4325 (match_operand:SI 2 "s_register_operand" "r")))]
4327 "uxtah%?\\t%0, %2, %1"
4328 [(set_attr "type" "alu_shift")
4329 (set_attr "predicable" "yes")]
4332 (define_expand "zero_extendqisi2"
4333 [(set (match_operand:SI 0 "s_register_operand" "")
4334 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4337 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4339 emit_insn (gen_andsi3 (operands[0],
4340 gen_lowpart (SImode, operands[1]),
4344 if (!arm_arch6 && !MEM_P (operands[1]))
4346 rtx t = gen_lowpart (SImode, operands[1]);
4347 rtx tmp = gen_reg_rtx (SImode);
4348 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4349 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4355 [(set (match_operand:SI 0 "s_register_operand" "")
4356 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4358 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4359 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4361 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4364 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4369 (define_insn "*thumb1_zero_extendqisi2"
4370 [(set (match_operand:SI 0 "register_operand" "=l,l")
4371 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4372 "TARGET_THUMB1 && !arm_arch6"
4376 [(set_attr "length" "4,2")
4377 (set_attr "type" "alu_shift,load_byte")
4378 (set_attr "pool_range" "*,32")]
4381 (define_insn "*thumb1_zero_extendqisi2_v6"
4382 [(set (match_operand:SI 0 "register_operand" "=l,l")
4383 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4384 "TARGET_THUMB1 && arm_arch6"
4388 [(set_attr "length" "2")
4389 (set_attr "type" "alu_shift,load_byte")]
4392 (define_insn "*arm_zero_extendqisi2"
4393 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4394 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4395 "TARGET_ARM && !arm_arch6"
4398 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4399 [(set_attr "length" "8,4")
4400 (set_attr "type" "alu_shift,load_byte")
4401 (set_attr "predicable" "yes")]
4404 (define_insn "*arm_zero_extendqisi2_v6"
4405 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4406 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4407 "TARGET_ARM && arm_arch6"
4410 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4411 [(set_attr "type" "alu_shift,load_byte")
4412 (set_attr "predicable" "yes")]
4415 (define_insn "*arm_zero_extendqisi2addsi"
4416 [(set (match_operand:SI 0 "s_register_operand" "=r")
4417 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4418 (match_operand:SI 2 "s_register_operand" "r")))]
4420 "uxtab%?\\t%0, %2, %1"
4421 [(set_attr "predicable" "yes")
4422 (set_attr "insn" "xtab")
4423 (set_attr "type" "alu_shift")]
4427 [(set (match_operand:SI 0 "s_register_operand" "")
4428 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4429 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4430 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4431 [(set (match_dup 2) (match_dup 1))
4432 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4437 [(set (match_operand:SI 0 "s_register_operand" "")
4438 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4439 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4440 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4441 [(set (match_dup 2) (match_dup 1))
4442 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4448 [(set (match_operand:SI 0 "s_register_operand" "")
4449 (ior_xor:SI (and:SI (ashift:SI
4450 (match_operand:SI 1 "s_register_operand" "")
4451 (match_operand:SI 2 "const_int_operand" ""))
4452 (match_operand:SI 3 "const_int_operand" ""))
4454 (match_operator 5 "subreg_lowpart_operator"
4455 [(match_operand:SI 4 "s_register_operand" "")]))))]
4457 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4458 == (GET_MODE_MASK (GET_MODE (operands[5]))
4459 & (GET_MODE_MASK (GET_MODE (operands[5]))
4460 << (INTVAL (operands[2])))))"
4461 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4463 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4464 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4467 (define_insn "*compareqi_eq0"
4468 [(set (reg:CC_Z CC_REGNUM)
4469 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4473 [(set_attr "conds" "set")]
4476 (define_expand "extendhisi2"
4477 [(set (match_operand:SI 0 "s_register_operand" "")
4478 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4483 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4486 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4488 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4492 if (!arm_arch6 && !MEM_P (operands[1]))
4494 rtx t = gen_lowpart (SImode, operands[1]);
4495 rtx tmp = gen_reg_rtx (SImode);
4496 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4497 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4504 [(set (match_operand:SI 0 "register_operand" "")
4505 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4506 (clobber (match_scratch:SI 2 ""))])]
4508 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4509 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4511 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4514 ;; We used to have an early-clobber on the scratch register here.
4515 ;; However, there's a bug somewhere in reload which means that this
4516 ;; can be partially ignored during spill allocation if the memory
4517 ;; address also needs reloading; this causes us to die later on when
4518 ;; we try to verify the operands. Fortunately, we don't really need
4519 ;; the early-clobber: we can always use operand 0 if operand 2
4520 ;; overlaps the address.
4521 (define_insn "thumb1_extendhisi2"
4522 [(set (match_operand:SI 0 "register_operand" "=l,l")
4523 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4524 (clobber (match_scratch:SI 2 "=X,l"))]
4531 if (which_alternative == 0 && !arm_arch6)
4533 if (which_alternative == 0)
4534 return \"sxth\\t%0, %1\";
4536 mem = XEXP (operands[1], 0);
4538 /* This code used to try to use 'V', and fix the address only if it was
4539 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4540 range of QImode offsets, and offsettable_address_p does a QImode
4543 if (GET_CODE (mem) == CONST)
4544 mem = XEXP (mem, 0);
4546 if (GET_CODE (mem) == LABEL_REF)
4547 return \"ldr\\t%0, %1\";
4549 if (GET_CODE (mem) == PLUS)
4551 rtx a = XEXP (mem, 0);
4552 rtx b = XEXP (mem, 1);
4554 if (GET_CODE (a) == LABEL_REF
4555 && GET_CODE (b) == CONST_INT)
4556 return \"ldr\\t%0, %1\";
4558 if (GET_CODE (b) == REG)
4559 return \"ldrsh\\t%0, %1\";
4567 ops[2] = const0_rtx;
4570 gcc_assert (GET_CODE (ops[1]) == REG);
4572 ops[0] = operands[0];
4573 if (reg_mentioned_p (operands[2], ops[1]))
4576 ops[3] = operands[2];
4577 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4580 [(set_attr_alternative "length"
4581 [(if_then_else (eq_attr "is_arch6" "yes")
4582 (const_int 2) (const_int 4))
4584 (set_attr "type" "alu_shift,load_byte")
4585 (set_attr "pool_range" "*,1020")]
4588 ;; This pattern will only be used when ldsh is not available
4589 (define_expand "extendhisi2_mem"
4590 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4592 (zero_extend:SI (match_dup 7)))
4593 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4594 (set (match_operand:SI 0 "" "")
4595 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4600 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4602 mem1 = change_address (operands[1], QImode, addr);
4603 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4604 operands[0] = gen_lowpart (SImode, operands[0]);
4606 operands[2] = gen_reg_rtx (SImode);
4607 operands[3] = gen_reg_rtx (SImode);
4608 operands[6] = gen_reg_rtx (SImode);
4611 if (BYTES_BIG_ENDIAN)
4613 operands[4] = operands[2];
4614 operands[5] = operands[3];
4618 operands[4] = operands[3];
4619 operands[5] = operands[2];
4625 [(set (match_operand:SI 0 "register_operand" "")
4626 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4628 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4629 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4631 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4634 (define_insn "*arm_extendhisi2"
4635 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4636 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4637 "TARGET_ARM && arm_arch4 && !arm_arch6"
4641 [(set_attr "length" "8,4")
4642 (set_attr "type" "alu_shift,load_byte")
4643 (set_attr "predicable" "yes")
4644 (set_attr "pool_range" "*,256")
4645 (set_attr "neg_pool_range" "*,244")]
4648 ;; ??? Check Thumb-2 pool range
4649 (define_insn "*arm_extendhisi2_v6"
4650 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4651 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4652 "TARGET_32BIT && arm_arch6"
4656 [(set_attr "type" "alu_shift,load_byte")
4657 (set_attr "predicable" "yes")
4658 (set_attr "pool_range" "*,256")
4659 (set_attr "neg_pool_range" "*,244")]
4662 (define_insn "*arm_extendhisi2addsi"
4663 [(set (match_operand:SI 0 "s_register_operand" "=r")
4664 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4665 (match_operand:SI 2 "s_register_operand" "r")))]
4667 "sxtah%?\\t%0, %2, %1"
4670 (define_expand "extendqihi2"
4672 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4674 (set (match_operand:HI 0 "s_register_operand" "")
4675 (ashiftrt:SI (match_dup 2)
4680 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4682 emit_insn (gen_rtx_SET (VOIDmode,
4684 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4687 if (!s_register_operand (operands[1], QImode))
4688 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4689 operands[0] = gen_lowpart (SImode, operands[0]);
4690 operands[1] = gen_lowpart (SImode, operands[1]);
4691 operands[2] = gen_reg_rtx (SImode);
4695 (define_insn "*arm_extendqihi_insn"
4696 [(set (match_operand:HI 0 "s_register_operand" "=r")
4697 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4698 "TARGET_ARM && arm_arch4"
4699 "ldr%(sb%)\\t%0, %1"
4700 [(set_attr "type" "load_byte")
4701 (set_attr "predicable" "yes")
4702 (set_attr "pool_range" "256")
4703 (set_attr "neg_pool_range" "244")]
4706 (define_expand "extendqisi2"
4707 [(set (match_operand:SI 0 "s_register_operand" "")
4708 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4711 if (!arm_arch4 && MEM_P (operands[1]))
4712 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4714 if (!arm_arch6 && !MEM_P (operands[1]))
4716 rtx t = gen_lowpart (SImode, operands[1]);
4717 rtx tmp = gen_reg_rtx (SImode);
4718 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4719 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4725 [(set (match_operand:SI 0 "register_operand" "")
4726 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4728 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4729 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4731 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4734 (define_insn "*arm_extendqisi"
4735 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4736 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4737 "TARGET_ARM && arm_arch4 && !arm_arch6"
4741 [(set_attr "length" "8,4")
4742 (set_attr "type" "alu_shift,load_byte")
4743 (set_attr "predicable" "yes")
4744 (set_attr "pool_range" "*,256")
4745 (set_attr "neg_pool_range" "*,244")]
4748 (define_insn "*arm_extendqisi_v6"
4749 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4751 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4752 "TARGET_ARM && arm_arch6"
4756 [(set_attr "type" "alu_shift,load_byte")
4757 (set_attr "predicable" "yes")
4758 (set_attr "pool_range" "*,256")
4759 (set_attr "neg_pool_range" "*,244")]
4762 (define_insn "*arm_extendqisi2addsi"
4763 [(set (match_operand:SI 0 "s_register_operand" "=r")
4764 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4765 (match_operand:SI 2 "s_register_operand" "r")))]
4767 "sxtab%?\\t%0, %2, %1"
4768 [(set_attr "type" "alu_shift")
4769 (set_attr "insn" "xtab")
4770 (set_attr "predicable" "yes")]
4774 [(set (match_operand:SI 0 "register_operand" "")
4775 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4776 "TARGET_THUMB1 && reload_completed"
4777 [(set (match_dup 0) (match_dup 2))
4778 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4780 rtx addr = XEXP (operands[1], 0);
4782 if (GET_CODE (addr) == CONST)
4783 addr = XEXP (addr, 0);
4785 if (GET_CODE (addr) == PLUS
4786 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4787 /* No split necessary. */
4790 if (GET_CODE (addr) == PLUS
4791 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4794 if (reg_overlap_mentioned_p (operands[0], addr))
4796 rtx t = gen_lowpart (QImode, operands[0]);
4797 emit_move_insn (t, operands[1]);
4798 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4804 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4805 operands[2] = const0_rtx;
4807 else if (GET_CODE (addr) != PLUS)
4809 else if (REG_P (XEXP (addr, 0)))
4811 operands[2] = XEXP (addr, 1);
4812 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4816 operands[2] = XEXP (addr, 0);
4817 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4820 operands[3] = change_address (operands[1], QImode, addr);
4824 [(set (match_operand:SI 0 "register_operand" "")
4825 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4826 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4827 (set (match_operand:SI 3 "register_operand" "")
4828 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4830 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4831 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4832 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4833 && (peep2_reg_dead_p (3, operands[0])
4834 || rtx_equal_p (operands[0], operands[3]))
4835 && (peep2_reg_dead_p (3, operands[2])
4836 || rtx_equal_p (operands[2], operands[3]))"
4837 [(set (match_dup 2) (match_dup 1))
4838 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4840 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4841 operands[4] = change_address (operands[4], QImode, addr);
4844 (define_insn "thumb1_extendqisi2"
4845 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4846 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4851 if (which_alternative == 0 && arm_arch6)
4852 return "sxtb\\t%0, %1";
4853 if (which_alternative == 0)
4856 addr = XEXP (operands[1], 0);
4857 if (GET_CODE (addr) == PLUS
4858 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4859 return "ldrsb\\t%0, %1";
4863 [(set_attr_alternative "length"
4864 [(if_then_else (eq_attr "is_arch6" "yes")
4865 (const_int 2) (const_int 4))
4867 (if_then_else (eq_attr "is_arch6" "yes")
4868 (const_int 4) (const_int 6))])
4869 (set_attr "type" "alu_shift,load_byte,load_byte")]
4872 (define_expand "extendsfdf2"
4873 [(set (match_operand:DF 0 "s_register_operand" "")
4874 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4875 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4879 /* HFmode -> DFmode conversions have to go through SFmode. */
4880 (define_expand "extendhfdf2"
4881 [(set (match_operand:DF 0 "general_operand" "")
4882 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4887 op1 = convert_to_mode (SFmode, operands[1], 0);
4888 op1 = convert_to_mode (DFmode, op1, 0);
4889 emit_insn (gen_movdf (operands[0], op1));
4894 ;; Move insns (including loads and stores)
4896 ;; XXX Just some ideas about movti.
4897 ;; I don't think these are a good idea on the arm, there just aren't enough
4899 ;;(define_expand "loadti"
4900 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4901 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4904 ;;(define_expand "storeti"
4905 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4906 ;; (match_operand:TI 1 "s_register_operand" ""))]
4909 ;;(define_expand "movti"
4910 ;; [(set (match_operand:TI 0 "general_operand" "")
4911 ;; (match_operand:TI 1 "general_operand" ""))]
4917 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4918 ;; operands[1] = copy_to_reg (operands[1]);
4919 ;; if (GET_CODE (operands[0]) == MEM)
4920 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4921 ;; else if (GET_CODE (operands[1]) == MEM)
4922 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4926 ;; emit_insn (insn);
4930 ;; Recognize garbage generated above.
4933 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4934 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4938 ;; register mem = (which_alternative < 3);
4939 ;; register const char *template;
4941 ;; operands[mem] = XEXP (operands[mem], 0);
4942 ;; switch (which_alternative)
4944 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4945 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4946 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4947 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4948 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4949 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4951 ;; output_asm_insn (template, operands);
4955 (define_expand "movdi"
4956 [(set (match_operand:DI 0 "general_operand" "")
4957 (match_operand:DI 1 "general_operand" ""))]
4960 if (can_create_pseudo_p ())
4962 if (GET_CODE (operands[0]) != REG)
4963 operands[1] = force_reg (DImode, operands[1]);
4968 (define_insn "*arm_movdi"
4969 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4970 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4972 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4974 && ( register_operand (operands[0], DImode)
4975 || register_operand (operands[1], DImode))"
4977 switch (which_alternative)
4984 return output_move_double (operands);
4987 [(set_attr "length" "8,12,16,8,8")
4988 (set_attr "type" "*,*,*,load2,store2")
4989 (set_attr "arm_pool_range" "*,*,*,1020,*")
4990 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4991 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4992 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4996 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4997 (match_operand:ANY64 1 "const_double_operand" ""))]
5000 && (arm_const_double_inline_cost (operands[1])
5001 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5004 arm_split_constant (SET, SImode, curr_insn,
5005 INTVAL (gen_lowpart (SImode, operands[1])),
5006 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5007 arm_split_constant (SET, SImode, curr_insn,
5008 INTVAL (gen_highpart_mode (SImode,
5009 GET_MODE (operands[0]),
5011 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5016 ; If optimizing for size, or if we have load delay slots, then
5017 ; we want to split the constant into two separate operations.
5018 ; In both cases this may split a trivial part into a single data op
5019 ; leaving a single complex constant to load. We can also get longer
5020 ; offsets in a LDR which means we get better chances of sharing the pool
5021 ; entries. Finally, we can normally do a better job of scheduling
5022 ; LDR instructions than we can with LDM.
5023 ; This pattern will only match if the one above did not.
5025 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5026 (match_operand:ANY64 1 "const_double_operand" ""))]
5027 "TARGET_ARM && reload_completed
5028 && arm_const_double_by_parts (operands[1])"
5029 [(set (match_dup 0) (match_dup 1))
5030 (set (match_dup 2) (match_dup 3))]
5032 operands[2] = gen_highpart (SImode, operands[0]);
5033 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5035 operands[0] = gen_lowpart (SImode, operands[0]);
5036 operands[1] = gen_lowpart (SImode, operands[1]);
5041 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5042 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5043 "TARGET_EITHER && reload_completed"
5044 [(set (match_dup 0) (match_dup 1))
5045 (set (match_dup 2) (match_dup 3))]
5047 operands[2] = gen_highpart (SImode, operands[0]);
5048 operands[3] = gen_highpart (SImode, operands[1]);
5049 operands[0] = gen_lowpart (SImode, operands[0]);
5050 operands[1] = gen_lowpart (SImode, operands[1]);
5052 /* Handle a partial overlap. */
5053 if (rtx_equal_p (operands[0], operands[3]))
5055 rtx tmp0 = operands[0];
5056 rtx tmp1 = operands[1];
5058 operands[0] = operands[2];
5059 operands[1] = operands[3];
5066 ;; We can't actually do base+index doubleword loads if the index and
5067 ;; destination overlap. Split here so that we at least have chance to
5070 [(set (match_operand:DI 0 "s_register_operand" "")
5071 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5072 (match_operand:SI 2 "s_register_operand" ""))))]
5074 && reg_overlap_mentioned_p (operands[0], operands[1])
5075 && reg_overlap_mentioned_p (operands[0], operands[2])"
5077 (plus:SI (match_dup 1)
5080 (mem:DI (match_dup 4)))]
5082 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5086 ;;; ??? This should have alternatives for constants.
5087 ;;; ??? This was originally identical to the movdf_insn pattern.
5088 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5089 ;;; thumb_reorg with a memory reference.
5090 (define_insn "*thumb1_movdi_insn"
5091 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5092 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5094 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5095 && ( register_operand (operands[0], DImode)
5096 || register_operand (operands[1], DImode))"
5099 switch (which_alternative)
5103 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5104 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5105 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5107 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5109 operands[1] = GEN_INT (- INTVAL (operands[1]));
5110 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5112 return \"ldmia\\t%1, {%0, %H0}\";
5114 return \"stmia\\t%0, {%1, %H1}\";
5116 return thumb_load_double_from_address (operands);
5118 operands[2] = gen_rtx_MEM (SImode,
5119 plus_constant (XEXP (operands[0], 0), 4));
5120 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5123 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5124 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5125 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5128 [(set_attr "length" "4,4,6,2,2,6,4,4")
5129 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5130 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5131 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5134 (define_expand "movsi"
5135 [(set (match_operand:SI 0 "general_operand" "")
5136 (match_operand:SI 1 "general_operand" ""))]
5140 rtx base, offset, tmp;
5144 /* Everything except mem = const or mem = mem can be done easily. */
5145 if (GET_CODE (operands[0]) == MEM)
5146 operands[1] = force_reg (SImode, operands[1]);
5147 if (arm_general_register_operand (operands[0], SImode)
5148 && GET_CODE (operands[1]) == CONST_INT
5149 && !(const_ok_for_arm (INTVAL (operands[1]))
5150 || const_ok_for_arm (~INTVAL (operands[1]))))
5152 arm_split_constant (SET, SImode, NULL_RTX,
5153 INTVAL (operands[1]), operands[0], NULL_RTX,
5154 optimize && can_create_pseudo_p ());
5158 if (TARGET_USE_MOVT && !target_word_relocations
5159 && GET_CODE (operands[1]) == SYMBOL_REF
5160 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5162 arm_emit_movpair (operands[0], operands[1]);
5166 else /* TARGET_THUMB1... */
5168 if (can_create_pseudo_p ())
5170 if (GET_CODE (operands[0]) != REG)
5171 operands[1] = force_reg (SImode, operands[1]);
5175 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5177 split_const (operands[1], &base, &offset);
5178 if (GET_CODE (base) == SYMBOL_REF
5179 && !offset_within_block_p (base, INTVAL (offset)))
5181 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5182 emit_move_insn (tmp, base);
5183 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5188 /* Recognize the case where operand[1] is a reference to thread-local
5189 data and load its address to a register. */
5190 if (arm_tls_referenced_p (operands[1]))
5192 rtx tmp = operands[1];
5195 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5197 addend = XEXP (XEXP (tmp, 0), 1);
5198 tmp = XEXP (XEXP (tmp, 0), 0);
5201 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5202 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5204 tmp = legitimize_tls_address (tmp,
5205 !can_create_pseudo_p () ? operands[0] : 0);
5208 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5209 tmp = force_operand (tmp, operands[0]);
5214 && (CONSTANT_P (operands[1])
5215 || symbol_mentioned_p (operands[1])
5216 || label_mentioned_p (operands[1])))
5217 operands[1] = legitimize_pic_address (operands[1], SImode,
5218 (!can_create_pseudo_p ()
5225 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5226 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5227 ;; so this does not matter.
5228 (define_insn "*arm_movt"
5229 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5230 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5231 (match_operand:SI 2 "general_operand" "i")))]
5233 "movt%?\t%0, #:upper16:%c2"
5234 [(set_attr "predicable" "yes")
5235 (set_attr "length" "4")]
5238 (define_insn "*arm_movsi_insn"
5239 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5240 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5241 "TARGET_ARM && ! TARGET_IWMMXT
5242 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5243 && ( register_operand (operands[0], SImode)
5244 || register_operand (operands[1], SImode))"
5252 [(set_attr "type" "*,*,*,*,load1,store1")
5253 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5254 (set_attr "predicable" "yes")
5255 (set_attr "pool_range" "*,*,*,*,4096,*")
5256 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5260 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5261 (match_operand:SI 1 "const_int_operand" ""))]
5263 && (!(const_ok_for_arm (INTVAL (operands[1]))
5264 || const_ok_for_arm (~INTVAL (operands[1]))))"
5265 [(clobber (const_int 0))]
5267 arm_split_constant (SET, SImode, NULL_RTX,
5268 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5273 (define_insn "*thumb1_movsi_insn"
5274 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5275 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5277 && ( register_operand (operands[0], SImode)
5278 || register_operand (operands[1], SImode))"
5289 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5290 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5291 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5292 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5295 [(set (match_operand:SI 0 "register_operand" "")
5296 (match_operand:SI 1 "const_int_operand" ""))]
5297 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5298 [(set (match_dup 2) (match_dup 1))
5299 (set (match_dup 0) (neg:SI (match_dup 2)))]
5302 operands[1] = GEN_INT (- INTVAL (operands[1]));
5303 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5308 [(set (match_operand:SI 0 "register_operand" "")
5309 (match_operand:SI 1 "const_int_operand" ""))]
5310 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5311 [(set (match_dup 2) (match_dup 1))
5312 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5315 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5316 unsigned HOST_WIDE_INT mask = 0xff;
5319 for (i = 0; i < 25; i++)
5320 if ((val & (mask << i)) == val)
5323 /* Don't split if the shift is zero. */
5327 operands[1] = GEN_INT (val >> i);
5328 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5329 operands[3] = GEN_INT (i);
5333 ;; When generating pic, we need to load the symbol offset into a register.
5334 ;; So that the optimizer does not confuse this with a normal symbol load
5335 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5336 ;; since that is the only type of relocation we can use.
5338 ;; Wrap calculation of the whole PIC address in a single pattern for the
5339 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5340 ;; a PIC address involves two loads from memory, so we want to CSE it
5341 ;; as often as possible.
5342 ;; This pattern will be split into one of the pic_load_addr_* patterns
5343 ;; and a move after GCSE optimizations.
5345 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5346 (define_expand "calculate_pic_address"
5347 [(set (match_operand:SI 0 "register_operand" "")
5348 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5349 (unspec:SI [(match_operand:SI 2 "" "")]
5354 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5356 [(set (match_operand:SI 0 "register_operand" "")
5357 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5358 (unspec:SI [(match_operand:SI 2 "" "")]
5361 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5362 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5363 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5366 ;; The rather odd constraints on the following are to force reload to leave
5367 ;; the insn alone, and to force the minipool generation pass to then move
5368 ;; the GOT symbol to memory.
5370 (define_insn "pic_load_addr_32bit"
5371 [(set (match_operand:SI 0 "s_register_operand" "=r")
5372 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5373 "TARGET_32BIT && flag_pic"
5375 [(set_attr "type" "load1")
5376 (set_attr "pool_range" "4096")
5377 (set (attr "neg_pool_range")
5378 (if_then_else (eq_attr "is_thumb" "no")
5383 (define_insn "pic_load_addr_thumb1"
5384 [(set (match_operand:SI 0 "s_register_operand" "=l")
5385 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5386 "TARGET_THUMB1 && flag_pic"
5388 [(set_attr "type" "load1")
5389 (set (attr "pool_range") (const_int 1024))]
5392 (define_insn "pic_add_dot_plus_four"
5393 [(set (match_operand:SI 0 "register_operand" "=r")
5394 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5396 (match_operand 2 "" "")]
5400 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5401 INTVAL (operands[2]));
5402 return \"add\\t%0, %|pc\";
5404 [(set_attr "length" "2")]
5407 (define_insn "pic_add_dot_plus_eight"
5408 [(set (match_operand:SI 0 "register_operand" "=r")
5409 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5411 (match_operand 2 "" "")]
5415 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5416 INTVAL (operands[2]));
5417 return \"add%?\\t%0, %|pc, %1\";
5419 [(set_attr "predicable" "yes")]
5422 (define_insn "tls_load_dot_plus_eight"
5423 [(set (match_operand:SI 0 "register_operand" "=r")
5424 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5426 (match_operand 2 "" "")]
5430 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5431 INTVAL (operands[2]));
5432 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5434 [(set_attr "predicable" "yes")]
5437 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5438 ;; followed by a load. These sequences can be crunched down to
5439 ;; tls_load_dot_plus_eight by a peephole.
5442 [(set (match_operand:SI 0 "register_operand" "")
5443 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5445 (match_operand 1 "" "")]
5447 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5448 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5450 (mem:SI (unspec:SI [(match_dup 3)
5457 (define_insn "pic_offset_arm"
5458 [(set (match_operand:SI 0 "register_operand" "=r")
5459 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5460 (unspec:SI [(match_operand:SI 2 "" "X")]
5461 UNSPEC_PIC_OFFSET))))]
5462 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5463 "ldr%?\\t%0, [%1,%2]"
5464 [(set_attr "type" "load1")]
5467 (define_expand "builtin_setjmp_receiver"
5468 [(label_ref (match_operand 0 "" ""))]
5472 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5474 if (arm_pic_register != INVALID_REGNUM)
5475 arm_load_pic_register (1UL << 3);
5479 ;; If copying one reg to another we can set the condition codes according to
5480 ;; its value. Such a move is common after a return from subroutine and the
5481 ;; result is being tested against zero.
5483 (define_insn "*movsi_compare0"
5484 [(set (reg:CC CC_REGNUM)
5485 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5487 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5493 [(set_attr "conds" "set")]
5496 ;; Subroutine to store a half word from a register into memory.
5497 ;; Operand 0 is the source register (HImode)
5498 ;; Operand 1 is the destination address in a register (SImode)
5500 ;; In both this routine and the next, we must be careful not to spill
5501 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5502 ;; can generate unrecognizable rtl.
5504 (define_expand "storehi"
5505 [;; store the low byte
5506 (set (match_operand 1 "" "") (match_dup 3))
5507 ;; extract the high byte
5509 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5510 ;; store the high byte
5511 (set (match_dup 4) (match_dup 5))]
5515 rtx op1 = operands[1];
5516 rtx addr = XEXP (op1, 0);
5517 enum rtx_code code = GET_CODE (addr);
5519 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5521 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5523 operands[4] = adjust_address (op1, QImode, 1);
5524 operands[1] = adjust_address (operands[1], QImode, 0);
5525 operands[3] = gen_lowpart (QImode, operands[0]);
5526 operands[0] = gen_lowpart (SImode, operands[0]);
5527 operands[2] = gen_reg_rtx (SImode);
5528 operands[5] = gen_lowpart (QImode, operands[2]);
5532 (define_expand "storehi_bigend"
5533 [(set (match_dup 4) (match_dup 3))
5535 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5536 (set (match_operand 1 "" "") (match_dup 5))]
5540 rtx op1 = operands[1];
5541 rtx addr = XEXP (op1, 0);
5542 enum rtx_code code = GET_CODE (addr);
5544 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5546 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5548 operands[4] = adjust_address (op1, QImode, 1);
5549 operands[1] = adjust_address (operands[1], QImode, 0);
5550 operands[3] = gen_lowpart (QImode, operands[0]);
5551 operands[0] = gen_lowpart (SImode, operands[0]);
5552 operands[2] = gen_reg_rtx (SImode);
5553 operands[5] = gen_lowpart (QImode, operands[2]);
5557 ;; Subroutine to store a half word integer constant into memory.
5558 (define_expand "storeinthi"
5559 [(set (match_operand 0 "" "")
5560 (match_operand 1 "" ""))
5561 (set (match_dup 3) (match_dup 2))]
5565 HOST_WIDE_INT value = INTVAL (operands[1]);
5566 rtx addr = XEXP (operands[0], 0);
5567 rtx op0 = operands[0];
5568 enum rtx_code code = GET_CODE (addr);
5570 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5572 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5574 operands[1] = gen_reg_rtx (SImode);
5575 if (BYTES_BIG_ENDIAN)
5577 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5578 if ((value & 255) == ((value >> 8) & 255))
5579 operands[2] = operands[1];
5582 operands[2] = gen_reg_rtx (SImode);
5583 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5588 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5589 if ((value & 255) == ((value >> 8) & 255))
5590 operands[2] = operands[1];
5593 operands[2] = gen_reg_rtx (SImode);
5594 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5598 operands[3] = adjust_address (op0, QImode, 1);
5599 operands[0] = adjust_address (operands[0], QImode, 0);
5600 operands[2] = gen_lowpart (QImode, operands[2]);
5601 operands[1] = gen_lowpart (QImode, operands[1]);
5605 (define_expand "storehi_single_op"
5606 [(set (match_operand:HI 0 "memory_operand" "")
5607 (match_operand:HI 1 "general_operand" ""))]
5608 "TARGET_32BIT && arm_arch4"
5610 if (!s_register_operand (operands[1], HImode))
5611 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5615 (define_expand "movhi"
5616 [(set (match_operand:HI 0 "general_operand" "")
5617 (match_operand:HI 1 "general_operand" ""))]
5622 if (can_create_pseudo_p ())
5624 if (GET_CODE (operands[0]) == MEM)
5628 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5631 if (GET_CODE (operands[1]) == CONST_INT)
5632 emit_insn (gen_storeinthi (operands[0], operands[1]));
5635 if (GET_CODE (operands[1]) == MEM)
5636 operands[1] = force_reg (HImode, operands[1]);
5637 if (BYTES_BIG_ENDIAN)
5638 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5640 emit_insn (gen_storehi (operands[1], operands[0]));
5644 /* Sign extend a constant, and keep it in an SImode reg. */
5645 else if (GET_CODE (operands[1]) == CONST_INT)
5647 rtx reg = gen_reg_rtx (SImode);
5648 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5650 /* If the constant is already valid, leave it alone. */
5651 if (!const_ok_for_arm (val))
5653 /* If setting all the top bits will make the constant
5654 loadable in a single instruction, then set them.
5655 Otherwise, sign extend the number. */
5657 if (const_ok_for_arm (~(val | ~0xffff)))
5659 else if (val & 0x8000)
5663 emit_insn (gen_movsi (reg, GEN_INT (val)));
5664 operands[1] = gen_lowpart (HImode, reg);
5666 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5667 && GET_CODE (operands[1]) == MEM)
5669 rtx reg = gen_reg_rtx (SImode);
5671 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5672 operands[1] = gen_lowpart (HImode, reg);
5674 else if (!arm_arch4)
5676 if (GET_CODE (operands[1]) == MEM)
5679 rtx offset = const0_rtx;
5680 rtx reg = gen_reg_rtx (SImode);
5682 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5683 || (GET_CODE (base) == PLUS
5684 && (GET_CODE (offset = XEXP (base, 1))
5686 && ((INTVAL(offset) & 1) != 1)
5687 && GET_CODE (base = XEXP (base, 0)) == REG))
5688 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5692 new_rtx = widen_memory_access (operands[1], SImode,
5693 ((INTVAL (offset) & ~3)
5694 - INTVAL (offset)));
5695 emit_insn (gen_movsi (reg, new_rtx));
5696 if (((INTVAL (offset) & 2) != 0)
5697 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5699 rtx reg2 = gen_reg_rtx (SImode);
5701 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5706 emit_insn (gen_movhi_bytes (reg, operands[1]));
5708 operands[1] = gen_lowpart (HImode, reg);
5712 /* Handle loading a large integer during reload. */
5713 else if (GET_CODE (operands[1]) == CONST_INT
5714 && !const_ok_for_arm (INTVAL (operands[1]))
5715 && !const_ok_for_arm (~INTVAL (operands[1])))
5717 /* Writing a constant to memory needs a scratch, which should
5718 be handled with SECONDARY_RELOADs. */
5719 gcc_assert (GET_CODE (operands[0]) == REG);
5721 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5722 emit_insn (gen_movsi (operands[0], operands[1]));
5726 else if (TARGET_THUMB2)
5728 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5729 if (can_create_pseudo_p ())
5731 if (GET_CODE (operands[0]) != REG)
5732 operands[1] = force_reg (HImode, operands[1]);
5733 /* Zero extend a constant, and keep it in an SImode reg. */
5734 else if (GET_CODE (operands[1]) == CONST_INT)
5736 rtx reg = gen_reg_rtx (SImode);
5737 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5739 emit_insn (gen_movsi (reg, GEN_INT (val)));
5740 operands[1] = gen_lowpart (HImode, reg);
5744 else /* TARGET_THUMB1 */
5746 if (can_create_pseudo_p ())
5748 if (GET_CODE (operands[1]) == CONST_INT)
5750 rtx reg = gen_reg_rtx (SImode);
5752 emit_insn (gen_movsi (reg, operands[1]));
5753 operands[1] = gen_lowpart (HImode, reg);
5756 /* ??? We shouldn't really get invalid addresses here, but this can
5757 happen if we are passed a SP (never OK for HImode/QImode) or
5758 virtual register (also rejected as illegitimate for HImode/QImode)
5759 relative address. */
5760 /* ??? This should perhaps be fixed elsewhere, for instance, in
5761 fixup_stack_1, by checking for other kinds of invalid addresses,
5762 e.g. a bare reference to a virtual register. This may confuse the
5763 alpha though, which must handle this case differently. */
5764 if (GET_CODE (operands[0]) == MEM
5765 && !memory_address_p (GET_MODE (operands[0]),
5766 XEXP (operands[0], 0)))
5768 = replace_equiv_address (operands[0],
5769 copy_to_reg (XEXP (operands[0], 0)));
5771 if (GET_CODE (operands[1]) == MEM
5772 && !memory_address_p (GET_MODE (operands[1]),
5773 XEXP (operands[1], 0)))
5775 = replace_equiv_address (operands[1],
5776 copy_to_reg (XEXP (operands[1], 0)));
5778 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5780 rtx reg = gen_reg_rtx (SImode);
5782 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5783 operands[1] = gen_lowpart (HImode, reg);
5786 if (GET_CODE (operands[0]) == MEM)
5787 operands[1] = force_reg (HImode, operands[1]);
5789 else if (GET_CODE (operands[1]) == CONST_INT
5790 && !satisfies_constraint_I (operands[1]))
5792 /* Handle loading a large integer during reload. */
5794 /* Writing a constant to memory needs a scratch, which should
5795 be handled with SECONDARY_RELOADs. */
5796 gcc_assert (GET_CODE (operands[0]) == REG);
5798 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5799 emit_insn (gen_movsi (operands[0], operands[1]));
5806 (define_insn "*thumb1_movhi_insn"
5807 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5808 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5810 && ( register_operand (operands[0], HImode)
5811 || register_operand (operands[1], HImode))"
5813 switch (which_alternative)
5815 case 0: return \"add %0, %1, #0\";
5816 case 2: return \"strh %1, %0\";
5817 case 3: return \"mov %0, %1\";
5818 case 4: return \"mov %0, %1\";
5819 case 5: return \"mov %0, %1\";
5820 default: gcc_unreachable ();
5822 /* The stack pointer can end up being taken as an index register.
5823 Catch this case here and deal with it. */
5824 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5825 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5826 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5829 ops[0] = operands[0];
5830 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5832 output_asm_insn (\"mov %0, %1\", ops);
5834 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5837 return \"ldrh %0, %1\";
5839 [(set_attr "length" "2,4,2,2,2,2")
5840 (set_attr "type" "*,load1,store1,*,*,*")
5841 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5844 (define_expand "movhi_bytes"
5845 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5847 (zero_extend:SI (match_dup 6)))
5848 (set (match_operand:SI 0 "" "")
5849 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5854 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5856 mem1 = change_address (operands[1], QImode, addr);
5857 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5858 operands[0] = gen_lowpart (SImode, operands[0]);
5860 operands[2] = gen_reg_rtx (SImode);
5861 operands[3] = gen_reg_rtx (SImode);
5864 if (BYTES_BIG_ENDIAN)
5866 operands[4] = operands[2];
5867 operands[5] = operands[3];
5871 operands[4] = operands[3];
5872 operands[5] = operands[2];
5877 (define_expand "movhi_bigend"
5879 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5882 (ashiftrt:SI (match_dup 2) (const_int 16)))
5883 (set (match_operand:HI 0 "s_register_operand" "")
5887 operands[2] = gen_reg_rtx (SImode);
5888 operands[3] = gen_reg_rtx (SImode);
5889 operands[4] = gen_lowpart (HImode, operands[3]);
5893 ;; Pattern to recognize insn generated default case above
5894 (define_insn "*movhi_insn_arch4"
5895 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5896 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
5899 && (register_operand (operands[0], HImode)
5900 || register_operand (operands[1], HImode))"
5902 mov%?\\t%0, %1\\t%@ movhi
5903 mvn%?\\t%0, #%B1\\t%@ movhi
5904 str%(h%)\\t%1, %0\\t%@ movhi
5905 ldr%(h%)\\t%0, %1\\t%@ movhi"
5906 [(set_attr "type" "*,*,store1,load1")
5907 (set_attr "predicable" "yes")
5908 (set_attr "insn" "mov,mvn,*,*")
5909 (set_attr "pool_range" "*,*,*,256")
5910 (set_attr "neg_pool_range" "*,*,*,244")]
5913 (define_insn "*movhi_bytes"
5914 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5915 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5918 mov%?\\t%0, %1\\t%@ movhi
5919 mvn%?\\t%0, #%B1\\t%@ movhi"
5920 [(set_attr "predicable" "yes")
5921 (set_attr "insn" "mov,mvn")]
5924 (define_expand "thumb_movhi_clobber"
5925 [(set (match_operand:HI 0 "memory_operand" "")
5926 (match_operand:HI 1 "register_operand" ""))
5927 (clobber (match_operand:DI 2 "register_operand" ""))]
5930 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5931 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5933 emit_insn (gen_movhi (operands[0], operands[1]));
5936 /* XXX Fixme, need to handle other cases here as well. */
5941 ;; We use a DImode scratch because we may occasionally need an additional
5942 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5943 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5944 (define_expand "reload_outhi"
5945 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5946 (match_operand:HI 1 "s_register_operand" "r")
5947 (match_operand:DI 2 "s_register_operand" "=&l")])]
5950 arm_reload_out_hi (operands);
5952 thumb_reload_out_hi (operands);
5957 (define_expand "reload_inhi"
5958 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5959 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5960 (match_operand:DI 2 "s_register_operand" "=&r")])]
5964 arm_reload_in_hi (operands);
5966 thumb_reload_out_hi (operands);
5970 (define_expand "movqi"
5971 [(set (match_operand:QI 0 "general_operand" "")
5972 (match_operand:QI 1 "general_operand" ""))]
5975 /* Everything except mem = const or mem = mem can be done easily */
5977 if (can_create_pseudo_p ())
5979 if (GET_CODE (operands[1]) == CONST_INT)
5981 rtx reg = gen_reg_rtx (SImode);
5983 /* For thumb we want an unsigned immediate, then we are more likely
5984 to be able to use a movs insn. */
5986 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5988 emit_insn (gen_movsi (reg, operands[1]));
5989 operands[1] = gen_lowpart (QImode, reg);
5994 /* ??? We shouldn't really get invalid addresses here, but this can
5995 happen if we are passed a SP (never OK for HImode/QImode) or
5996 virtual register (also rejected as illegitimate for HImode/QImode)
5997 relative address. */
5998 /* ??? This should perhaps be fixed elsewhere, for instance, in
5999 fixup_stack_1, by checking for other kinds of invalid addresses,
6000 e.g. a bare reference to a virtual register. This may confuse the
6001 alpha though, which must handle this case differently. */
6002 if (GET_CODE (operands[0]) == MEM
6003 && !memory_address_p (GET_MODE (operands[0]),
6004 XEXP (operands[0], 0)))
6006 = replace_equiv_address (operands[0],
6007 copy_to_reg (XEXP (operands[0], 0)));
6008 if (GET_CODE (operands[1]) == MEM
6009 && !memory_address_p (GET_MODE (operands[1]),
6010 XEXP (operands[1], 0)))
6012 = replace_equiv_address (operands[1],
6013 copy_to_reg (XEXP (operands[1], 0)));
6016 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6018 rtx reg = gen_reg_rtx (SImode);
6020 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6021 operands[1] = gen_lowpart (QImode, reg);
6024 if (GET_CODE (operands[0]) == MEM)
6025 operands[1] = force_reg (QImode, operands[1]);
6027 else if (TARGET_THUMB
6028 && GET_CODE (operands[1]) == CONST_INT
6029 && !satisfies_constraint_I (operands[1]))
6031 /* Handle loading a large integer during reload. */
6033 /* Writing a constant to memory needs a scratch, which should
6034 be handled with SECONDARY_RELOADs. */
6035 gcc_assert (GET_CODE (operands[0]) == REG);
6037 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6038 emit_insn (gen_movsi (operands[0], operands[1]));
6045 (define_insn "*arm_movqi_insn"
6046 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6047 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6049 && ( register_operand (operands[0], QImode)
6050 || register_operand (operands[1], QImode))"
6058 [(set_attr "type" "*,*,load1,store1,load1,store1")
6059 (set_attr "insn" "mov,mvn,*,*,*,*")
6060 (set_attr "predicable" "yes")
6061 (set_attr "arch" "any,any,t2,t2,any,any")
6062 (set_attr "length" "4,4,2,2,4,4")]
6065 (define_insn "*thumb1_movqi_insn"
6066 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6067 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6069 && ( register_operand (operands[0], QImode)
6070 || register_operand (operands[1], QImode))"
6078 [(set_attr "length" "2")
6079 (set_attr "type" "*,load1,store1,*,*,*")
6080 (set_attr "insn" "*,*,*,mov,mov,mov")
6081 (set_attr "pool_range" "*,32,*,*,*,*")
6082 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6085 (define_expand "movhf"
6086 [(set (match_operand:HF 0 "general_operand" "")
6087 (match_operand:HF 1 "general_operand" ""))]
6092 if (GET_CODE (operands[0]) == MEM)
6093 operands[1] = force_reg (HFmode, operands[1]);
6095 else /* TARGET_THUMB1 */
6097 if (can_create_pseudo_p ())
6099 if (GET_CODE (operands[0]) != REG)
6100 operands[1] = force_reg (HFmode, operands[1]);
6106 (define_insn "*arm32_movhf"
6107 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6108 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6109 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6110 && ( s_register_operand (operands[0], HFmode)
6111 || s_register_operand (operands[1], HFmode))"
6113 switch (which_alternative)
6115 case 0: /* ARM register from memory */
6116 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6117 case 1: /* memory from ARM register */
6118 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6119 case 2: /* ARM register from ARM register */
6120 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6121 case 3: /* ARM register from constant */
6127 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6128 bits = real_to_target (NULL, &r, HFmode);
6129 ops[0] = operands[0];
6130 ops[1] = GEN_INT (bits);
6131 ops[2] = GEN_INT (bits & 0xff00);
6132 ops[3] = GEN_INT (bits & 0x00ff);
6134 if (arm_arch_thumb2)
6135 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6137 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6144 [(set_attr "conds" "unconditional")
6145 (set_attr "type" "load1,store1,*,*")
6146 (set_attr "insn" "*,*,mov,mov")
6147 (set_attr "length" "4,4,4,8")
6148 (set_attr "predicable" "yes")]
6151 (define_insn "*thumb1_movhf"
6152 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6153 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6155 && ( s_register_operand (operands[0], HFmode)
6156 || s_register_operand (operands[1], HFmode))"
6158 switch (which_alternative)
6163 gcc_assert (GET_CODE(operands[1]) == MEM);
6164 addr = XEXP (operands[1], 0);
6165 if (GET_CODE (addr) == LABEL_REF
6166 || (GET_CODE (addr) == CONST
6167 && GET_CODE (XEXP (addr, 0)) == PLUS
6168 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6169 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6171 /* Constant pool entry. */
6172 return \"ldr\\t%0, %1\";
6174 return \"ldrh\\t%0, %1\";
6176 case 2: return \"strh\\t%1, %0\";
6177 default: return \"mov\\t%0, %1\";
6180 [(set_attr "length" "2")
6181 (set_attr "type" "*,load1,store1,*,*")
6182 (set_attr "insn" "mov,*,*,mov,mov")
6183 (set_attr "pool_range" "*,1020,*,*,*")
6184 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6186 (define_expand "movsf"
6187 [(set (match_operand:SF 0 "general_operand" "")
6188 (match_operand:SF 1 "general_operand" ""))]
6193 if (GET_CODE (operands[0]) == MEM)
6194 operands[1] = force_reg (SFmode, operands[1]);
6196 else /* TARGET_THUMB1 */
6198 if (can_create_pseudo_p ())
6200 if (GET_CODE (operands[0]) != REG)
6201 operands[1] = force_reg (SFmode, operands[1]);
6207 ;; Transform a floating-point move of a constant into a core register into
6208 ;; an SImode operation.
6210 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6211 (match_operand:SF 1 "immediate_operand" ""))]
6214 && GET_CODE (operands[1]) == CONST_DOUBLE"
6215 [(set (match_dup 2) (match_dup 3))]
6217 operands[2] = gen_lowpart (SImode, operands[0]);
6218 operands[3] = gen_lowpart (SImode, operands[1]);
6219 if (operands[2] == 0 || operands[3] == 0)
6224 (define_insn "*arm_movsf_soft_insn"
6225 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6226 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6228 && TARGET_SOFT_FLOAT
6229 && (GET_CODE (operands[0]) != MEM
6230 || register_operand (operands[1], SFmode))"
6233 ldr%?\\t%0, %1\\t%@ float
6234 str%?\\t%1, %0\\t%@ float"
6235 [(set_attr "predicable" "yes")
6236 (set_attr "type" "*,load1,store1")
6237 (set_attr "insn" "mov,*,*")
6238 (set_attr "pool_range" "*,4096,*")
6239 (set_attr "arm_neg_pool_range" "*,4084,*")
6240 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6243 ;;; ??? This should have alternatives for constants.
6244 (define_insn "*thumb1_movsf_insn"
6245 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6246 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6248 && ( register_operand (operands[0], SFmode)
6249 || register_operand (operands[1], SFmode))"
6258 [(set_attr "length" "2")
6259 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6260 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6261 (set_attr "insn" "*,*,*,*,*,mov,mov")
6262 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6265 (define_expand "movdf"
6266 [(set (match_operand:DF 0 "general_operand" "")
6267 (match_operand:DF 1 "general_operand" ""))]
6272 if (GET_CODE (operands[0]) == MEM)
6273 operands[1] = force_reg (DFmode, operands[1]);
6275 else /* TARGET_THUMB */
6277 if (can_create_pseudo_p ())
6279 if (GET_CODE (operands[0]) != REG)
6280 operands[1] = force_reg (DFmode, operands[1]);
6286 ;; Reloading a df mode value stored in integer regs to memory can require a
6288 (define_expand "reload_outdf"
6289 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6290 (match_operand:DF 1 "s_register_operand" "r")
6291 (match_operand:SI 2 "s_register_operand" "=&r")]
6295 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6298 operands[2] = XEXP (operands[0], 0);
6299 else if (code == POST_INC || code == PRE_DEC)
6301 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6302 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6303 emit_insn (gen_movdi (operands[0], operands[1]));
6306 else if (code == PRE_INC)
6308 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6310 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6313 else if (code == POST_DEC)
6314 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6316 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6317 XEXP (XEXP (operands[0], 0), 1)));
6319 emit_insn (gen_rtx_SET (VOIDmode,
6320 replace_equiv_address (operands[0], operands[2]),
6323 if (code == POST_DEC)
6324 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6330 (define_insn "*movdf_soft_insn"
6331 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6332 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6333 "TARGET_32BIT && TARGET_SOFT_FLOAT
6334 && ( register_operand (operands[0], DFmode)
6335 || register_operand (operands[1], DFmode))"
6337 switch (which_alternative)
6344 return output_move_double (operands);
6347 [(set_attr "length" "8,12,16,8,8")
6348 (set_attr "type" "*,*,*,load2,store2")
6349 (set_attr "pool_range" "*,*,*,1020,*")
6350 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6351 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6354 ;;; ??? This should have alternatives for constants.
6355 ;;; ??? This was originally identical to the movdi_insn pattern.
6356 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6357 ;;; thumb_reorg with a memory reference.
6358 (define_insn "*thumb_movdf_insn"
6359 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6360 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6362 && ( register_operand (operands[0], DFmode)
6363 || register_operand (operands[1], DFmode))"
6365 switch (which_alternative)
6369 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6370 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6371 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6373 return \"ldmia\\t%1, {%0, %H0}\";
6375 return \"stmia\\t%0, {%1, %H1}\";
6377 return thumb_load_double_from_address (operands);
6379 operands[2] = gen_rtx_MEM (SImode,
6380 plus_constant (XEXP (operands[0], 0), 4));
6381 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6384 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6385 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6386 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6389 [(set_attr "length" "4,2,2,6,4,4")
6390 (set_attr "type" "*,load2,store2,load2,store2,*")
6391 (set_attr "insn" "*,*,*,*,*,mov")
6392 (set_attr "pool_range" "*,*,*,1020,*,*")]
6395 (define_expand "movxf"
6396 [(set (match_operand:XF 0 "general_operand" "")
6397 (match_operand:XF 1 "general_operand" ""))]
6398 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6400 if (GET_CODE (operands[0]) == MEM)
6401 operands[1] = force_reg (XFmode, operands[1]);
6407 ;; load- and store-multiple insns
6408 ;; The arm can load/store any set of registers, provided that they are in
6409 ;; ascending order, but these expanders assume a contiguous set.
6411 (define_expand "load_multiple"
6412 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6413 (match_operand:SI 1 "" ""))
6414 (use (match_operand:SI 2 "" ""))])]
6417 HOST_WIDE_INT offset = 0;
6419 /* Support only fixed point registers. */
6420 if (GET_CODE (operands[2]) != CONST_INT
6421 || INTVAL (operands[2]) > 14
6422 || INTVAL (operands[2]) < 2
6423 || GET_CODE (operands[1]) != MEM
6424 || GET_CODE (operands[0]) != REG
6425 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6426 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6430 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6431 INTVAL (operands[2]),
6432 force_reg (SImode, XEXP (operands[1], 0)),
6433 FALSE, operands[1], &offset);
6436 (define_expand "store_multiple"
6437 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6438 (match_operand:SI 1 "" ""))
6439 (use (match_operand:SI 2 "" ""))])]
6442 HOST_WIDE_INT offset = 0;
6444 /* Support only fixed point registers. */
6445 if (GET_CODE (operands[2]) != CONST_INT
6446 || INTVAL (operands[2]) > 14
6447 || INTVAL (operands[2]) < 2
6448 || GET_CODE (operands[1]) != REG
6449 || GET_CODE (operands[0]) != MEM
6450 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6451 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6455 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6456 INTVAL (operands[2]),
6457 force_reg (SImode, XEXP (operands[0], 0)),
6458 FALSE, operands[0], &offset);
6462 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6463 ;; We could let this apply for blocks of less than this, but it clobbers so
6464 ;; many registers that there is then probably a better way.
6466 (define_expand "movmemqi"
6467 [(match_operand:BLK 0 "general_operand" "")
6468 (match_operand:BLK 1 "general_operand" "")
6469 (match_operand:SI 2 "const_int_operand" "")
6470 (match_operand:SI 3 "const_int_operand" "")]
6475 if (arm_gen_movmemqi (operands))
6479 else /* TARGET_THUMB1 */
6481 if ( INTVAL (operands[3]) != 4
6482 || INTVAL (operands[2]) > 48)
6485 thumb_expand_movmemqi (operands);
6491 ;; Thumb block-move insns
6493 (define_insn "movmem12b"
6494 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6495 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6496 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6497 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6498 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6499 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6500 (set (match_operand:SI 0 "register_operand" "=l")
6501 (plus:SI (match_dup 2) (const_int 12)))
6502 (set (match_operand:SI 1 "register_operand" "=l")
6503 (plus:SI (match_dup 3) (const_int 12)))
6504 (clobber (match_scratch:SI 4 "=&l"))
6505 (clobber (match_scratch:SI 5 "=&l"))
6506 (clobber (match_scratch:SI 6 "=&l"))]
6508 "* return thumb_output_move_mem_multiple (3, operands);"
6509 [(set_attr "length" "4")
6510 ; This isn't entirely accurate... It loads as well, but in terms of
6511 ; scheduling the following insn it is better to consider it as a store
6512 (set_attr "type" "store3")]
6515 (define_insn "movmem8b"
6516 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6517 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6518 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6519 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6520 (set (match_operand:SI 0 "register_operand" "=l")
6521 (plus:SI (match_dup 2) (const_int 8)))
6522 (set (match_operand:SI 1 "register_operand" "=l")
6523 (plus:SI (match_dup 3) (const_int 8)))
6524 (clobber (match_scratch:SI 4 "=&l"))
6525 (clobber (match_scratch:SI 5 "=&l"))]
6527 "* return thumb_output_move_mem_multiple (2, operands);"
6528 [(set_attr "length" "4")
6529 ; This isn't entirely accurate... It loads as well, but in terms of
6530 ; scheduling the following insn it is better to consider it as a store
6531 (set_attr "type" "store2")]
6536 ;; Compare & branch insns
6537 ;; The range calculations are based as follows:
6538 ;; For forward branches, the address calculation returns the address of
6539 ;; the next instruction. This is 2 beyond the branch instruction.
6540 ;; For backward branches, the address calculation returns the address of
6541 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6542 ;; instruction for the shortest sequence, and 4 before the branch instruction
6543 ;; if we have to jump around an unconditional branch.
6544 ;; To the basic branch range the PC offset must be added (this is +4).
6545 ;; So for forward branches we have
6546 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6547 ;; And for backward branches we have
6548 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6550 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6551 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6553 (define_expand "cbranchsi4"
6554 [(set (pc) (if_then_else
6555 (match_operator 0 "arm_comparison_operator"
6556 [(match_operand:SI 1 "s_register_operand" "")
6557 (match_operand:SI 2 "nonmemory_operand" "")])
6558 (label_ref (match_operand 3 "" ""))
6560 "TARGET_THUMB1 || TARGET_32BIT"
6564 if (!arm_add_operand (operands[2], SImode))
6565 operands[2] = force_reg (SImode, operands[2]);
6566 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6570 if (thumb1_cmpneg_operand (operands[2], SImode))
6572 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6573 operands[3], operands[0]));
6576 if (!thumb1_cmp_operand (operands[2], SImode))
6577 operands[2] = force_reg (SImode, operands[2]);
6580 ;; A pattern to recognize a special situation and optimize for it.
6581 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6582 ;; due to the available addressing modes. Hence, convert a signed comparison
6583 ;; with zero into an unsigned comparison with 127 if possible.
6584 (define_expand "cbranchqi4"
6585 [(set (pc) (if_then_else
6586 (match_operator 0 "lt_ge_comparison_operator"
6587 [(match_operand:QI 1 "memory_operand" "")
6588 (match_operand:QI 2 "const0_operand" "")])
6589 (label_ref (match_operand 3 "" ""))
6594 xops[1] = gen_reg_rtx (SImode);
6595 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6596 xops[2] = GEN_INT (127);
6597 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6598 VOIDmode, xops[1], xops[2]);
6599 xops[3] = operands[3];
6600 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6604 (define_expand "cbranchsf4"
6605 [(set (pc) (if_then_else
6606 (match_operator 0 "arm_comparison_operator"
6607 [(match_operand:SF 1 "s_register_operand" "")
6608 (match_operand:SF 2 "arm_float_compare_operand" "")])
6609 (label_ref (match_operand 3 "" ""))
6611 "TARGET_32BIT && TARGET_HARD_FLOAT"
6612 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6613 operands[3])); DONE;"
6616 (define_expand "cbranchdf4"
6617 [(set (pc) (if_then_else
6618 (match_operator 0 "arm_comparison_operator"
6619 [(match_operand:DF 1 "s_register_operand" "")
6620 (match_operand:DF 2 "arm_float_compare_operand" "")])
6621 (label_ref (match_operand 3 "" ""))
6623 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6624 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6625 operands[3])); DONE;"
6628 (define_expand "cbranchdi4"
6629 [(set (pc) (if_then_else
6630 (match_operator 0 "arm_comparison_operator"
6631 [(match_operand:DI 1 "cmpdi_operand" "")
6632 (match_operand:DI 2 "cmpdi_operand" "")])
6633 (label_ref (match_operand 3 "" ""))
6637 rtx swap = NULL_RTX;
6638 enum rtx_code code = GET_CODE (operands[0]);
6640 /* We should not have two constants. */
6641 gcc_assert (GET_MODE (operands[1]) == DImode
6642 || GET_MODE (operands[2]) == DImode);
6644 /* Flip unimplemented DImode comparisons to a form that
6645 arm_gen_compare_reg can handle. */
6649 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6651 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6653 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6655 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6660 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6663 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6669 (define_insn "cbranchsi4_insn"
6670 [(set (pc) (if_then_else
6671 (match_operator 0 "arm_comparison_operator"
6672 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6673 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6674 (label_ref (match_operand 3 "" ""))
6678 rtx t = cfun->machine->thumb1_cc_insn;
6681 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6682 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6684 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6686 if (!noov_comparison_operator (operands[0], VOIDmode))
6689 else if (cfun->machine->thumb1_cc_mode != CCmode)
6694 output_asm_insn ("cmp\t%1, %2", operands);
6695 cfun->machine->thumb1_cc_insn = insn;
6696 cfun->machine->thumb1_cc_op0 = operands[1];
6697 cfun->machine->thumb1_cc_op1 = operands[2];
6698 cfun->machine->thumb1_cc_mode = CCmode;
6701 /* Ensure we emit the right type of condition code on the jump. */
6702 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6705 switch (get_attr_length (insn))
6707 case 4: return \"b%d0\\t%l3\";
6708 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6709 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6712 [(set (attr "far_jump")
6714 (eq_attr "length" "8")
6715 (const_string "yes")
6716 (const_string "no")))
6717 (set (attr "length")
6719 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6720 (le (minus (match_dup 3) (pc)) (const_int 256)))
6723 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6724 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6729 (define_insn "cbranchsi4_scratch"
6730 [(set (pc) (if_then_else
6731 (match_operator 4 "arm_comparison_operator"
6732 [(match_operand:SI 1 "s_register_operand" "l,0")
6733 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6734 (label_ref (match_operand 3 "" ""))
6736 (clobber (match_scratch:SI 0 "=l,l"))]
6739 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6741 switch (get_attr_length (insn))
6743 case 4: return \"b%d4\\t%l3\";
6744 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6745 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6748 [(set (attr "far_jump")
6750 (eq_attr "length" "8")
6751 (const_string "yes")
6752 (const_string "no")))
6753 (set (attr "length")
6755 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6756 (le (minus (match_dup 3) (pc)) (const_int 256)))
6759 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6760 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6765 ;; Two peepholes to generate subtract of 0 instead of a move if the
6766 ;; condition codes will be useful.
6768 [(set (match_operand:SI 0 "low_register_operand" "")
6769 (match_operand:SI 1 "low_register_operand" ""))
6771 (if_then_else (match_operator 2 "arm_comparison_operator"
6772 [(match_dup 1) (const_int 0)])
6773 (label_ref (match_operand 3 "" ""))
6776 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6778 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6779 (label_ref (match_dup 3))
6783 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6784 ;; merge cases like this because the op1 is a hard register in
6785 ;; arm_class_likely_spilled_p.
6787 [(set (match_operand:SI 0 "low_register_operand" "")
6788 (match_operand:SI 1 "low_register_operand" ""))
6790 (if_then_else (match_operator 2 "arm_comparison_operator"
6791 [(match_dup 0) (const_int 0)])
6792 (label_ref (match_operand 3 "" ""))
6795 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6797 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6798 (label_ref (match_dup 3))
6802 (define_insn "*negated_cbranchsi4"
6805 (match_operator 0 "equality_operator"
6806 [(match_operand:SI 1 "s_register_operand" "l")
6807 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6808 (label_ref (match_operand 3 "" ""))
6812 output_asm_insn (\"cmn\\t%1, %2\", operands);
6813 switch (get_attr_length (insn))
6815 case 4: return \"b%d0\\t%l3\";
6816 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6817 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6820 [(set (attr "far_jump")
6822 (eq_attr "length" "8")
6823 (const_string "yes")
6824 (const_string "no")))
6825 (set (attr "length")
6827 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6828 (le (minus (match_dup 3) (pc)) (const_int 256)))
6831 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6832 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6837 (define_insn "*tbit_cbranch"
6840 (match_operator 0 "equality_operator"
6841 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6843 (match_operand:SI 2 "const_int_operand" "i"))
6845 (label_ref (match_operand 3 "" ""))
6847 (clobber (match_scratch:SI 4 "=l"))]
6852 op[0] = operands[4];
6853 op[1] = operands[1];
6854 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6856 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6857 switch (get_attr_length (insn))
6859 case 4: return \"b%d0\\t%l3\";
6860 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6861 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6864 [(set (attr "far_jump")
6866 (eq_attr "length" "8")
6867 (const_string "yes")
6868 (const_string "no")))
6869 (set (attr "length")
6871 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6872 (le (minus (match_dup 3) (pc)) (const_int 256)))
6875 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6876 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6881 (define_insn "*tlobits_cbranch"
6884 (match_operator 0 "equality_operator"
6885 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6886 (match_operand:SI 2 "const_int_operand" "i")
6889 (label_ref (match_operand 3 "" ""))
6891 (clobber (match_scratch:SI 4 "=l"))]
6896 op[0] = operands[4];
6897 op[1] = operands[1];
6898 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6900 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6901 switch (get_attr_length (insn))
6903 case 4: return \"b%d0\\t%l3\";
6904 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6905 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6908 [(set (attr "far_jump")
6910 (eq_attr "length" "8")
6911 (const_string "yes")
6912 (const_string "no")))
6913 (set (attr "length")
6915 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6916 (le (minus (match_dup 3) (pc)) (const_int 256)))
6919 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6920 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6925 (define_insn "*tstsi3_cbranch"
6928 (match_operator 3 "equality_operator"
6929 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6930 (match_operand:SI 1 "s_register_operand" "l"))
6932 (label_ref (match_operand 2 "" ""))
6937 output_asm_insn (\"tst\\t%0, %1\", operands);
6938 switch (get_attr_length (insn))
6940 case 4: return \"b%d3\\t%l2\";
6941 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6942 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6945 [(set (attr "far_jump")
6947 (eq_attr "length" "8")
6948 (const_string "yes")
6949 (const_string "no")))
6950 (set (attr "length")
6952 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6953 (le (minus (match_dup 2) (pc)) (const_int 256)))
6956 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6957 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6962 (define_insn "*cbranchne_decr1"
6964 (if_then_else (match_operator 3 "equality_operator"
6965 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6967 (label_ref (match_operand 4 "" ""))
6969 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6970 (plus:SI (match_dup 2) (const_int -1)))
6971 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6976 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6978 VOIDmode, operands[2], const1_rtx);
6979 cond[1] = operands[4];
6981 if (which_alternative == 0)
6982 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6983 else if (which_alternative == 1)
6985 /* We must provide an alternative for a hi reg because reload
6986 cannot handle output reloads on a jump instruction, but we
6987 can't subtract into that. Fortunately a mov from lo to hi
6988 does not clobber the condition codes. */
6989 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6990 output_asm_insn (\"mov\\t%0, %1\", operands);
6994 /* Similarly, but the target is memory. */
6995 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6996 output_asm_insn (\"str\\t%1, %0\", operands);
6999 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7002 output_asm_insn (\"b%d0\\t%l1\", cond);
7005 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7006 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7008 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7009 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7013 [(set (attr "far_jump")
7015 (ior (and (eq (symbol_ref ("which_alternative"))
7017 (eq_attr "length" "8"))
7018 (eq_attr "length" "10"))
7019 (const_string "yes")
7020 (const_string "no")))
7021 (set_attr_alternative "length"
7025 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7026 (le (minus (match_dup 4) (pc)) (const_int 256)))
7029 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7030 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7035 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7036 (le (minus (match_dup 4) (pc)) (const_int 256)))
7039 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7040 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7045 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7046 (le (minus (match_dup 4) (pc)) (const_int 256)))
7049 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7050 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7055 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7056 (le (minus (match_dup 4) (pc)) (const_int 256)))
7059 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7060 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7065 (define_insn "*addsi3_cbranch"
7068 (match_operator 4 "arm_comparison_operator"
7070 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7071 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7073 (label_ref (match_operand 5 "" ""))
7076 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7077 (plus:SI (match_dup 2) (match_dup 3)))
7078 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7080 && (GET_CODE (operands[4]) == EQ
7081 || GET_CODE (operands[4]) == NE
7082 || GET_CODE (operands[4]) == GE
7083 || GET_CODE (operands[4]) == LT)"
7088 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7089 cond[1] = operands[2];
7090 cond[2] = operands[3];
7092 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7093 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7095 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7097 if (which_alternative >= 2
7098 && which_alternative < 4)
7099 output_asm_insn (\"mov\\t%0, %1\", operands);
7100 else if (which_alternative >= 4)
7101 output_asm_insn (\"str\\t%1, %0\", operands);
7103 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7106 return \"b%d4\\t%l5\";
7108 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7110 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7114 [(set (attr "far_jump")
7116 (ior (and (lt (symbol_ref ("which_alternative"))
7118 (eq_attr "length" "8"))
7119 (eq_attr "length" "10"))
7120 (const_string "yes")
7121 (const_string "no")))
7122 (set (attr "length")
7124 (lt (symbol_ref ("which_alternative"))
7127 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7128 (le (minus (match_dup 5) (pc)) (const_int 256)))
7131 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7132 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7136 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7137 (le (minus (match_dup 5) (pc)) (const_int 256)))
7140 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7141 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7146 (define_insn "*addsi3_cbranch_scratch"
7149 (match_operator 3 "arm_comparison_operator"
7151 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7152 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7154 (label_ref (match_operand 4 "" ""))
7156 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7158 && (GET_CODE (operands[3]) == EQ
7159 || GET_CODE (operands[3]) == NE
7160 || GET_CODE (operands[3]) == GE
7161 || GET_CODE (operands[3]) == LT)"
7164 switch (which_alternative)
7167 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7170 output_asm_insn (\"cmn\t%1, %2\", operands);
7173 if (INTVAL (operands[2]) < 0)
7174 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7176 output_asm_insn (\"add\t%0, %1, %2\", operands);
7179 if (INTVAL (operands[2]) < 0)
7180 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7182 output_asm_insn (\"add\t%0, %0, %2\", operands);
7186 switch (get_attr_length (insn))
7189 return \"b%d3\\t%l4\";
7191 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7193 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7197 [(set (attr "far_jump")
7199 (eq_attr "length" "8")
7200 (const_string "yes")
7201 (const_string "no")))
7202 (set (attr "length")
7204 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7205 (le (minus (match_dup 4) (pc)) (const_int 256)))
7208 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7209 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7215 ;; Comparison and test insns
7217 (define_insn "*arm_cmpsi_insn"
7218 [(set (reg:CC CC_REGNUM)
7219 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7220 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7227 [(set_attr "conds" "set")
7228 (set_attr "arch" "t2,t2,any,any")
7229 (set_attr "length" "2,2,4,4")]
7232 (define_insn "*cmpsi_shiftsi"
7233 [(set (reg:CC CC_REGNUM)
7234 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7235 (match_operator:SI 3 "shift_operator"
7236 [(match_operand:SI 1 "s_register_operand" "r,r")
7237 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7240 [(set_attr "conds" "set")
7241 (set_attr "shift" "1")
7242 (set_attr "arch" "32,a")
7243 (set_attr "type" "alu_shift,alu_shift_reg")])
7245 (define_insn "*cmpsi_shiftsi_swp"
7246 [(set (reg:CC_SWP CC_REGNUM)
7247 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7248 [(match_operand:SI 1 "s_register_operand" "r,r")
7249 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7250 (match_operand:SI 0 "s_register_operand" "r,r")))]
7253 [(set_attr "conds" "set")
7254 (set_attr "shift" "1")
7255 (set_attr "arch" "32,a")
7256 (set_attr "type" "alu_shift,alu_shift_reg")])
7258 (define_insn "*arm_cmpsi_negshiftsi_si"
7259 [(set (reg:CC_Z CC_REGNUM)
7261 (neg:SI (match_operator:SI 1 "shift_operator"
7262 [(match_operand:SI 2 "s_register_operand" "r")
7263 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7264 (match_operand:SI 0 "s_register_operand" "r")))]
7267 [(set_attr "conds" "set")
7268 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7269 (const_string "alu_shift")
7270 (const_string "alu_shift_reg")))]
7273 ;; DImode comparisons. The generic code generates branches that
7274 ;; if-conversion can not reduce to a conditional compare, so we do
7277 (define_insn "*arm_cmpdi_insn"
7278 [(set (reg:CC_NCV CC_REGNUM)
7279 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7280 (match_operand:DI 1 "arm_di_operand" "rDi")))
7281 (clobber (match_scratch:SI 2 "=r"))]
7282 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7283 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7284 [(set_attr "conds" "set")
7285 (set_attr "length" "8")]
7288 (define_insn "*arm_cmpdi_unsigned"
7289 [(set (reg:CC_CZ CC_REGNUM)
7290 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7291 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7293 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7294 [(set_attr "conds" "set")
7295 (set_attr "length" "8")]
7298 (define_insn "*arm_cmpdi_zero"
7299 [(set (reg:CC_Z CC_REGNUM)
7300 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7302 (clobber (match_scratch:SI 1 "=r"))]
7304 "orr%.\\t%1, %Q0, %R0"
7305 [(set_attr "conds" "set")]
7308 (define_insn "*thumb_cmpdi_zero"
7309 [(set (reg:CC_Z CC_REGNUM)
7310 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7312 (clobber (match_scratch:SI 1 "=l"))]
7314 "orr\\t%1, %Q0, %R0"
7315 [(set_attr "conds" "set")
7316 (set_attr "length" "2")]
7319 ;; Cirrus SF compare instruction
7320 (define_insn "*cirrus_cmpsf"
7321 [(set (reg:CCFP CC_REGNUM)
7322 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7323 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7324 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7325 "cfcmps%?\\tr15, %V0, %V1"
7326 [(set_attr "type" "mav_farith")
7327 (set_attr "cirrus" "compare")]
7330 ;; Cirrus DF compare instruction
7331 (define_insn "*cirrus_cmpdf"
7332 [(set (reg:CCFP CC_REGNUM)
7333 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7334 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7335 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7336 "cfcmpd%?\\tr15, %V0, %V1"
7337 [(set_attr "type" "mav_farith")
7338 (set_attr "cirrus" "compare")]
7341 (define_insn "*cirrus_cmpdi"
7342 [(set (reg:CC CC_REGNUM)
7343 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7344 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7345 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7346 "cfcmp64%?\\tr15, %V0, %V1"
7347 [(set_attr "type" "mav_farith")
7348 (set_attr "cirrus" "compare")]
7351 ; This insn allows redundant compares to be removed by cse, nothing should
7352 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7353 ; is deleted later on. The match_dup will match the mode here, so that
7354 ; mode changes of the condition codes aren't lost by this even though we don't
7355 ; specify what they are.
7357 (define_insn "*deleted_compare"
7358 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7360 "\\t%@ deleted compare"
7361 [(set_attr "conds" "set")
7362 (set_attr "length" "0")]
7366 ;; Conditional branch insns
7368 (define_expand "cbranch_cc"
7370 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7371 (match_operand 2 "" "")])
7372 (label_ref (match_operand 3 "" ""))
7375 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7376 operands[1], operands[2]);
7377 operands[2] = const0_rtx;"
7381 ;; Patterns to match conditional branch insns.
7384 (define_insn "*arm_cond_branch"
7386 (if_then_else (match_operator 1 "arm_comparison_operator"
7387 [(match_operand 2 "cc_register" "") (const_int 0)])
7388 (label_ref (match_operand 0 "" ""))
7392 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7394 arm_ccfsm_state += 2;
7397 return \"b%d1\\t%l0\";
7399 [(set_attr "conds" "use")
7400 (set_attr "type" "branch")
7401 (set (attr "length")
7403 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7404 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7405 (le (minus (match_dup 0) (pc)) (const_int 256))))
7410 (define_insn "*arm_cond_branch_reversed"
7412 (if_then_else (match_operator 1 "arm_comparison_operator"
7413 [(match_operand 2 "cc_register" "") (const_int 0)])
7415 (label_ref (match_operand 0 "" ""))))]
7418 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7420 arm_ccfsm_state += 2;
7423 return \"b%D1\\t%l0\";
7425 [(set_attr "conds" "use")
7426 (set_attr "type" "branch")
7427 (set (attr "length")
7429 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7430 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7431 (le (minus (match_dup 0) (pc)) (const_int 256))))
7440 (define_expand "cstore_cc"
7441 [(set (match_operand:SI 0 "s_register_operand" "")
7442 (match_operator:SI 1 "" [(match_operand 2 "" "")
7443 (match_operand 3 "" "")]))]
7445 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7446 operands[2], operands[3]);
7447 operands[3] = const0_rtx;"
7450 (define_insn "*mov_scc"
7451 [(set (match_operand:SI 0 "s_register_operand" "=r")
7452 (match_operator:SI 1 "arm_comparison_operator"
7453 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7455 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7456 [(set_attr "conds" "use")
7457 (set_attr "insn" "mov")
7458 (set_attr "length" "8")]
7461 (define_insn "*mov_negscc"
7462 [(set (match_operand:SI 0 "s_register_operand" "=r")
7463 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7464 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7466 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7467 [(set_attr "conds" "use")
7468 (set_attr "insn" "mov")
7469 (set_attr "length" "8")]
7472 (define_insn "*mov_notscc"
7473 [(set (match_operand:SI 0 "s_register_operand" "=r")
7474 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7475 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7477 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7478 [(set_attr "conds" "use")
7479 (set_attr "insn" "mov")
7480 (set_attr "length" "8")]
7483 (define_expand "cstoresi4"
7484 [(set (match_operand:SI 0 "s_register_operand" "")
7485 (match_operator:SI 1 "arm_comparison_operator"
7486 [(match_operand:SI 2 "s_register_operand" "")
7487 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7488 "TARGET_32BIT || TARGET_THUMB1"
7490 rtx op3, scratch, scratch2;
7494 if (!arm_add_operand (operands[3], SImode))
7495 operands[3] = force_reg (SImode, operands[3]);
7496 emit_insn (gen_cstore_cc (operands[0], operands[1],
7497 operands[2], operands[3]));
7501 if (operands[3] == const0_rtx)
7503 switch (GET_CODE (operands[1]))
7506 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7510 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7514 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7515 NULL_RTX, 0, OPTAB_WIDEN);
7516 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7517 NULL_RTX, 0, OPTAB_WIDEN);
7518 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7519 operands[0], 1, OPTAB_WIDEN);
7523 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7525 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7526 NULL_RTX, 1, OPTAB_WIDEN);
7530 scratch = expand_binop (SImode, ashr_optab, operands[2],
7531 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7532 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7533 NULL_RTX, 0, OPTAB_WIDEN);
7534 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7538 /* LT is handled by generic code. No need for unsigned with 0. */
7545 switch (GET_CODE (operands[1]))
7548 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7549 NULL_RTX, 0, OPTAB_WIDEN);
7550 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7554 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7555 NULL_RTX, 0, OPTAB_WIDEN);
7556 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7560 op3 = force_reg (SImode, operands[3]);
7562 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7563 NULL_RTX, 1, OPTAB_WIDEN);
7564 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7565 NULL_RTX, 0, OPTAB_WIDEN);
7566 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7572 if (!thumb1_cmp_operand (op3, SImode))
7573 op3 = force_reg (SImode, op3);
7574 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7575 NULL_RTX, 0, OPTAB_WIDEN);
7576 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7577 NULL_RTX, 1, OPTAB_WIDEN);
7578 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7583 op3 = force_reg (SImode, operands[3]);
7584 scratch = force_reg (SImode, const0_rtx);
7585 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7591 if (!thumb1_cmp_operand (op3, SImode))
7592 op3 = force_reg (SImode, op3);
7593 scratch = force_reg (SImode, const0_rtx);
7594 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7600 if (!thumb1_cmp_operand (op3, SImode))
7601 op3 = force_reg (SImode, op3);
7602 scratch = gen_reg_rtx (SImode);
7603 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7607 op3 = force_reg (SImode, operands[3]);
7608 scratch = gen_reg_rtx (SImode);
7609 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7612 /* No good sequences for GT, LT. */
7619 (define_expand "cstoresf4"
7620 [(set (match_operand:SI 0 "s_register_operand" "")
7621 (match_operator:SI 1 "arm_comparison_operator"
7622 [(match_operand:SF 2 "s_register_operand" "")
7623 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7624 "TARGET_32BIT && TARGET_HARD_FLOAT"
7625 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7626 operands[2], operands[3])); DONE;"
7629 (define_expand "cstoredf4"
7630 [(set (match_operand:SI 0 "s_register_operand" "")
7631 (match_operator:SI 1 "arm_comparison_operator"
7632 [(match_operand:DF 2 "s_register_operand" "")
7633 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7634 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7635 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7636 operands[2], operands[3])); DONE;"
7639 (define_expand "cstoredi4"
7640 [(set (match_operand:SI 0 "s_register_operand" "")
7641 (match_operator:SI 1 "arm_comparison_operator"
7642 [(match_operand:DI 2 "cmpdi_operand" "")
7643 (match_operand:DI 3 "cmpdi_operand" "")]))]
7646 rtx swap = NULL_RTX;
7647 enum rtx_code code = GET_CODE (operands[1]);
7649 /* We should not have two constants. */
7650 gcc_assert (GET_MODE (operands[2]) == DImode
7651 || GET_MODE (operands[3]) == DImode);
7653 /* Flip unimplemented DImode comparisons to a form that
7654 arm_gen_compare_reg can handle. */
7658 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7660 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7662 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7664 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7669 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7672 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7678 (define_expand "cstoresi_eq0_thumb1"
7680 [(set (match_operand:SI 0 "s_register_operand" "")
7681 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7683 (clobber (match_dup:SI 2))])]
7685 "operands[2] = gen_reg_rtx (SImode);"
7688 (define_expand "cstoresi_ne0_thumb1"
7690 [(set (match_operand:SI 0 "s_register_operand" "")
7691 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7693 (clobber (match_dup:SI 2))])]
7695 "operands[2] = gen_reg_rtx (SImode);"
7698 (define_insn "*cstoresi_eq0_thumb1_insn"
7699 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7700 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7702 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7705 neg\\t%0, %1\;adc\\t%0, %0, %1
7706 neg\\t%2, %1\;adc\\t%0, %1, %2"
7707 [(set_attr "length" "4")]
7710 (define_insn "*cstoresi_ne0_thumb1_insn"
7711 [(set (match_operand:SI 0 "s_register_operand" "=l")
7712 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7714 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7716 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7717 [(set_attr "length" "4")]
7720 ;; Used as part of the expansion of thumb ltu and gtu sequences
7721 (define_insn "cstoresi_nltu_thumb1"
7722 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7723 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7724 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7726 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7727 [(set_attr "length" "4")]
7730 (define_insn_and_split "cstoresi_ltu_thumb1"
7731 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7732 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7733 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7738 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7739 (set (match_dup 0) (neg:SI (match_dup 3)))]
7740 "operands[3] = gen_reg_rtx (SImode);"
7741 [(set_attr "length" "4")]
7744 ;; Used as part of the expansion of thumb les sequence.
7745 (define_insn "thumb1_addsi3_addgeu"
7746 [(set (match_operand:SI 0 "s_register_operand" "=l")
7747 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7748 (match_operand:SI 2 "s_register_operand" "l"))
7749 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7750 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7752 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7753 [(set_attr "length" "4")]
7757 ;; Conditional move insns
7759 (define_expand "movsicc"
7760 [(set (match_operand:SI 0 "s_register_operand" "")
7761 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7762 (match_operand:SI 2 "arm_not_operand" "")
7763 (match_operand:SI 3 "arm_not_operand" "")))]
7767 enum rtx_code code = GET_CODE (operands[1]);
7770 if (code == UNEQ || code == LTGT)
7773 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7774 XEXP (operands[1], 1));
7775 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7779 (define_expand "movsfcc"
7780 [(set (match_operand:SF 0 "s_register_operand" "")
7781 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7782 (match_operand:SF 2 "s_register_operand" "")
7783 (match_operand:SF 3 "nonmemory_operand" "")))]
7784 "TARGET_32BIT && TARGET_HARD_FLOAT"
7787 enum rtx_code code = GET_CODE (operands[1]);
7790 if (code == UNEQ || code == LTGT)
7793 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7794 Otherwise, ensure it is a valid FP add operand */
7795 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7796 || (!arm_float_add_operand (operands[3], SFmode)))
7797 operands[3] = force_reg (SFmode, operands[3]);
7799 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7800 XEXP (operands[1], 1));
7801 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7805 (define_expand "movdfcc"
7806 [(set (match_operand:DF 0 "s_register_operand" "")
7807 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7808 (match_operand:DF 2 "s_register_operand" "")
7809 (match_operand:DF 3 "arm_float_add_operand" "")))]
7810 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7813 enum rtx_code code = GET_CODE (operands[1]);
7816 if (code == UNEQ || code == LTGT)
7819 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7820 XEXP (operands[1], 1));
7821 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7825 (define_insn "*movsicc_insn"
7826 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7828 (match_operator 3 "arm_comparison_operator"
7829 [(match_operand 4 "cc_register" "") (const_int 0)])
7830 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7831 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7838 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7839 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7840 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7841 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7842 [(set_attr "length" "4,4,4,4,8,8,8,8")
7843 (set_attr "conds" "use")
7844 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7847 (define_insn "*movsfcc_soft_insn"
7848 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7849 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7850 [(match_operand 4 "cc_register" "") (const_int 0)])
7851 (match_operand:SF 1 "s_register_operand" "0,r")
7852 (match_operand:SF 2 "s_register_operand" "r,0")))]
7853 "TARGET_ARM && TARGET_SOFT_FLOAT"
7857 [(set_attr "conds" "use")
7858 (set_attr "insn" "mov")]
7862 ;; Jump and linkage insns
7864 (define_expand "jump"
7866 (label_ref (match_operand 0 "" "")))]
7871 (define_insn "*arm_jump"
7873 (label_ref (match_operand 0 "" "")))]
7877 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7879 arm_ccfsm_state += 2;
7882 return \"b%?\\t%l0\";
7885 [(set_attr "predicable" "yes")
7886 (set (attr "length")
7888 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7889 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7890 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7895 (define_insn "*thumb_jump"
7897 (label_ref (match_operand 0 "" "")))]
7900 if (get_attr_length (insn) == 2)
7902 return \"bl\\t%l0\\t%@ far jump\";
7904 [(set (attr "far_jump")
7906 (eq_attr "length" "4")
7907 (const_string "yes")
7908 (const_string "no")))
7909 (set (attr "length")
7911 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7912 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7917 (define_expand "call"
7918 [(parallel [(call (match_operand 0 "memory_operand" "")
7919 (match_operand 1 "general_operand" ""))
7920 (use (match_operand 2 "" ""))
7921 (clobber (reg:SI LR_REGNUM))])]
7927 /* In an untyped call, we can get NULL for operand 2. */
7928 if (operands[2] == NULL_RTX)
7929 operands[2] = const0_rtx;
7931 /* Decide if we should generate indirect calls by loading the
7932 32-bit address of the callee into a register before performing the
7934 callee = XEXP (operands[0], 0);
7935 if (GET_CODE (callee) == SYMBOL_REF
7936 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7938 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7940 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7941 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7946 (define_expand "call_internal"
7947 [(parallel [(call (match_operand 0 "memory_operand" "")
7948 (match_operand 1 "general_operand" ""))
7949 (use (match_operand 2 "" ""))
7950 (clobber (reg:SI LR_REGNUM))])])
7952 (define_insn "*call_reg_armv5"
7953 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7954 (match_operand 1 "" ""))
7955 (use (match_operand 2 "" ""))
7956 (clobber (reg:SI LR_REGNUM))]
7957 "TARGET_ARM && arm_arch5"
7959 [(set_attr "type" "call")]
7962 (define_insn "*call_reg_arm"
7963 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7964 (match_operand 1 "" ""))
7965 (use (match_operand 2 "" ""))
7966 (clobber (reg:SI LR_REGNUM))]
7967 "TARGET_ARM && !arm_arch5"
7969 return output_call (operands);
7971 ;; length is worst case, normally it is only two
7972 [(set_attr "length" "12")
7973 (set_attr "type" "call")]
7977 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7978 ;; considered a function call by the branch predictor of some cores (PR40887).
7979 ;; Falls back to blx rN (*call_reg_armv5).
7981 (define_insn "*call_mem"
7982 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7983 (match_operand 1 "" ""))
7984 (use (match_operand 2 "" ""))
7985 (clobber (reg:SI LR_REGNUM))]
7986 "TARGET_ARM && !arm_arch5"
7988 return output_call_mem (operands);
7990 [(set_attr "length" "12")
7991 (set_attr "type" "call")]
7994 (define_insn "*call_reg_thumb1_v5"
7995 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7996 (match_operand 1 "" ""))
7997 (use (match_operand 2 "" ""))
7998 (clobber (reg:SI LR_REGNUM))]
7999 "TARGET_THUMB1 && arm_arch5"
8001 [(set_attr "length" "2")
8002 (set_attr "type" "call")]
8005 (define_insn "*call_reg_thumb1"
8006 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8007 (match_operand 1 "" ""))
8008 (use (match_operand 2 "" ""))
8009 (clobber (reg:SI LR_REGNUM))]
8010 "TARGET_THUMB1 && !arm_arch5"
8013 if (!TARGET_CALLER_INTERWORKING)
8014 return thumb_call_via_reg (operands[0]);
8015 else if (operands[1] == const0_rtx)
8016 return \"bl\\t%__interwork_call_via_%0\";
8017 else if (frame_pointer_needed)
8018 return \"bl\\t%__interwork_r7_call_via_%0\";
8020 return \"bl\\t%__interwork_r11_call_via_%0\";
8022 [(set_attr "type" "call")]
8025 (define_expand "call_value"
8026 [(parallel [(set (match_operand 0 "" "")
8027 (call (match_operand 1 "memory_operand" "")
8028 (match_operand 2 "general_operand" "")))
8029 (use (match_operand 3 "" ""))
8030 (clobber (reg:SI LR_REGNUM))])]
8036 /* In an untyped call, we can get NULL for operand 2. */
8037 if (operands[3] == 0)
8038 operands[3] = const0_rtx;
8040 /* Decide if we should generate indirect calls by loading the
8041 32-bit address of the callee into a register before performing the
8043 callee = XEXP (operands[1], 0);
8044 if (GET_CODE (callee) == SYMBOL_REF
8045 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8047 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8049 pat = gen_call_value_internal (operands[0], operands[1],
8050 operands[2], operands[3]);
8051 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8056 (define_expand "call_value_internal"
8057 [(parallel [(set (match_operand 0 "" "")
8058 (call (match_operand 1 "memory_operand" "")
8059 (match_operand 2 "general_operand" "")))
8060 (use (match_operand 3 "" ""))
8061 (clobber (reg:SI LR_REGNUM))])])
8063 (define_insn "*call_value_reg_armv5"
8064 [(set (match_operand 0 "" "")
8065 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8066 (match_operand 2 "" "")))
8067 (use (match_operand 3 "" ""))
8068 (clobber (reg:SI LR_REGNUM))]
8069 "TARGET_ARM && arm_arch5"
8071 [(set_attr "type" "call")]
8074 (define_insn "*call_value_reg_arm"
8075 [(set (match_operand 0 "" "")
8076 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8077 (match_operand 2 "" "")))
8078 (use (match_operand 3 "" ""))
8079 (clobber (reg:SI LR_REGNUM))]
8080 "TARGET_ARM && !arm_arch5"
8082 return output_call (&operands[1]);
8084 [(set_attr "length" "12")
8085 (set_attr "type" "call")]
8088 ;; Note: see *call_mem
8090 (define_insn "*call_value_mem"
8091 [(set (match_operand 0 "" "")
8092 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8093 (match_operand 2 "" "")))
8094 (use (match_operand 3 "" ""))
8095 (clobber (reg:SI LR_REGNUM))]
8096 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8098 return output_call_mem (&operands[1]);
8100 [(set_attr "length" "12")
8101 (set_attr "type" "call")]
8104 (define_insn "*call_value_reg_thumb1_v5"
8105 [(set (match_operand 0 "" "")
8106 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8107 (match_operand 2 "" "")))
8108 (use (match_operand 3 "" ""))
8109 (clobber (reg:SI LR_REGNUM))]
8110 "TARGET_THUMB1 && arm_arch5"
8112 [(set_attr "length" "2")
8113 (set_attr "type" "call")]
8116 (define_insn "*call_value_reg_thumb1"
8117 [(set (match_operand 0 "" "")
8118 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8119 (match_operand 2 "" "")))
8120 (use (match_operand 3 "" ""))
8121 (clobber (reg:SI LR_REGNUM))]
8122 "TARGET_THUMB1 && !arm_arch5"
8125 if (!TARGET_CALLER_INTERWORKING)
8126 return thumb_call_via_reg (operands[1]);
8127 else if (operands[2] == const0_rtx)
8128 return \"bl\\t%__interwork_call_via_%1\";
8129 else if (frame_pointer_needed)
8130 return \"bl\\t%__interwork_r7_call_via_%1\";
8132 return \"bl\\t%__interwork_r11_call_via_%1\";
8134 [(set_attr "type" "call")]
8137 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8138 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8140 (define_insn "*call_symbol"
8141 [(call (mem:SI (match_operand:SI 0 "" ""))
8142 (match_operand 1 "" ""))
8143 (use (match_operand 2 "" ""))
8144 (clobber (reg:SI LR_REGNUM))]
8146 && (GET_CODE (operands[0]) == SYMBOL_REF)
8147 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8150 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8152 [(set_attr "type" "call")]
8155 (define_insn "*call_value_symbol"
8156 [(set (match_operand 0 "" "")
8157 (call (mem:SI (match_operand:SI 1 "" ""))
8158 (match_operand:SI 2 "" "")))
8159 (use (match_operand 3 "" ""))
8160 (clobber (reg:SI LR_REGNUM))]
8162 && (GET_CODE (operands[1]) == SYMBOL_REF)
8163 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8166 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8168 [(set_attr "type" "call")]
8171 (define_insn "*call_insn"
8172 [(call (mem:SI (match_operand:SI 0 "" ""))
8173 (match_operand:SI 1 "" ""))
8174 (use (match_operand 2 "" ""))
8175 (clobber (reg:SI LR_REGNUM))]
8177 && GET_CODE (operands[0]) == SYMBOL_REF
8178 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8180 [(set_attr "length" "4")
8181 (set_attr "type" "call")]
8184 (define_insn "*call_value_insn"
8185 [(set (match_operand 0 "" "")
8186 (call (mem:SI (match_operand 1 "" ""))
8187 (match_operand 2 "" "")))
8188 (use (match_operand 3 "" ""))
8189 (clobber (reg:SI LR_REGNUM))]
8191 && GET_CODE (operands[1]) == SYMBOL_REF
8192 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8194 [(set_attr "length" "4")
8195 (set_attr "type" "call")]
8198 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8199 (define_expand "sibcall"
8200 [(parallel [(call (match_operand 0 "memory_operand" "")
8201 (match_operand 1 "general_operand" ""))
8203 (use (match_operand 2 "" ""))])]
8207 if (operands[2] == NULL_RTX)
8208 operands[2] = const0_rtx;
8212 (define_expand "sibcall_value"
8213 [(parallel [(set (match_operand 0 "" "")
8214 (call (match_operand 1 "memory_operand" "")
8215 (match_operand 2 "general_operand" "")))
8217 (use (match_operand 3 "" ""))])]
8221 if (operands[3] == NULL_RTX)
8222 operands[3] = const0_rtx;
8226 (define_insn "*sibcall_insn"
8227 [(call (mem:SI (match_operand:SI 0 "" "X"))
8228 (match_operand 1 "" ""))
8230 (use (match_operand 2 "" ""))]
8231 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8233 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8235 [(set_attr "type" "call")]
8238 (define_insn "*sibcall_value_insn"
8239 [(set (match_operand 0 "" "")
8240 (call (mem:SI (match_operand:SI 1 "" "X"))
8241 (match_operand 2 "" "")))
8243 (use (match_operand 3 "" ""))]
8244 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8246 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8248 [(set_attr "type" "call")]
8251 (define_expand "return"
8253 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8256 ;; Often the return insn will be the same as loading from memory, so set attr
8257 (define_insn "*arm_return"
8259 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8262 if (arm_ccfsm_state == 2)
8264 arm_ccfsm_state += 2;
8267 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8269 [(set_attr "type" "load1")
8270 (set_attr "length" "12")
8271 (set_attr "predicable" "yes")]
8274 (define_insn "*cond_return"
8276 (if_then_else (match_operator 0 "arm_comparison_operator"
8277 [(match_operand 1 "cc_register" "") (const_int 0)])
8280 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8283 if (arm_ccfsm_state == 2)
8285 arm_ccfsm_state += 2;
8288 return output_return_instruction (operands[0], TRUE, FALSE);
8290 [(set_attr "conds" "use")
8291 (set_attr "length" "12")
8292 (set_attr "type" "load1")]
8295 (define_insn "*cond_return_inverted"
8297 (if_then_else (match_operator 0 "arm_comparison_operator"
8298 [(match_operand 1 "cc_register" "") (const_int 0)])
8301 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8304 if (arm_ccfsm_state == 2)
8306 arm_ccfsm_state += 2;
8309 return output_return_instruction (operands[0], TRUE, TRUE);
8311 [(set_attr "conds" "use")
8312 (set_attr "length" "12")
8313 (set_attr "type" "load1")]
8316 ;; Generate a sequence of instructions to determine if the processor is
8317 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8320 (define_expand "return_addr_mask"
8322 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8324 (set (match_operand:SI 0 "s_register_operand" "")
8325 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8327 (const_int 67108860)))] ; 0x03fffffc
8330 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8333 (define_insn "*check_arch2"
8334 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8335 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8338 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8339 [(set_attr "length" "8")
8340 (set_attr "conds" "set")]
8343 ;; Call subroutine returning any type.
8345 (define_expand "untyped_call"
8346 [(parallel [(call (match_operand 0 "" "")
8348 (match_operand 1 "" "")
8349 (match_operand 2 "" "")])]
8354 rtx par = gen_rtx_PARALLEL (VOIDmode,
8355 rtvec_alloc (XVECLEN (operands[2], 0)));
8356 rtx addr = gen_reg_rtx (Pmode);
8360 emit_move_insn (addr, XEXP (operands[1], 0));
8361 mem = change_address (operands[1], BLKmode, addr);
8363 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8365 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8367 /* Default code only uses r0 as a return value, but we could
8368 be using anything up to 4 registers. */
8369 if (REGNO (src) == R0_REGNUM)
8370 src = gen_rtx_REG (TImode, R0_REGNUM);
8372 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8374 size += GET_MODE_SIZE (GET_MODE (src));
8377 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8382 for (i = 0; i < XVECLEN (par, 0); i++)
8384 HOST_WIDE_INT offset = 0;
8385 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8388 emit_move_insn (addr, plus_constant (addr, size));
8390 mem = change_address (mem, GET_MODE (reg), NULL);
8391 if (REGNO (reg) == R0_REGNUM)
8393 /* On thumb we have to use a write-back instruction. */
8394 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8395 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8396 size = TARGET_ARM ? 16 : 0;
8400 emit_move_insn (mem, reg);
8401 size = GET_MODE_SIZE (GET_MODE (reg));
8405 /* The optimizer does not know that the call sets the function value
8406 registers we stored in the result block. We avoid problems by
8407 claiming that all hard registers are used and clobbered at this
8409 emit_insn (gen_blockage ());
8415 (define_expand "untyped_return"
8416 [(match_operand:BLK 0 "memory_operand" "")
8417 (match_operand 1 "" "")]
8422 rtx addr = gen_reg_rtx (Pmode);
8426 emit_move_insn (addr, XEXP (operands[0], 0));
8427 mem = change_address (operands[0], BLKmode, addr);
8429 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8431 HOST_WIDE_INT offset = 0;
8432 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8435 emit_move_insn (addr, plus_constant (addr, size));
8437 mem = change_address (mem, GET_MODE (reg), NULL);
8438 if (REGNO (reg) == R0_REGNUM)
8440 /* On thumb we have to use a write-back instruction. */
8441 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8442 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8443 size = TARGET_ARM ? 16 : 0;
8447 emit_move_insn (reg, mem);
8448 size = GET_MODE_SIZE (GET_MODE (reg));
8452 /* Emit USE insns before the return. */
8453 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8454 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8456 /* Construct the return. */
8457 expand_naked_return ();
8463 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8464 ;; all of memory. This blocks insns from being moved across this point.
8466 (define_insn "blockage"
8467 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8470 [(set_attr "length" "0")
8471 (set_attr "type" "block")]
8474 (define_expand "casesi"
8475 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8476 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8477 (match_operand:SI 2 "const_int_operand" "") ; total range
8478 (match_operand:SI 3 "" "") ; table label
8479 (match_operand:SI 4 "" "")] ; Out of range label
8480 "TARGET_32BIT || optimize_size || flag_pic"
8483 enum insn_code code;
8484 if (operands[1] != const0_rtx)
8486 rtx reg = gen_reg_rtx (SImode);
8488 emit_insn (gen_addsi3 (reg, operands[0],
8489 gen_int_mode (-INTVAL (operands[1]),
8495 code = CODE_FOR_arm_casesi_internal;
8496 else if (TARGET_THUMB1)
8497 code = CODE_FOR_thumb1_casesi_internal_pic;
8499 code = CODE_FOR_thumb2_casesi_internal_pic;
8501 code = CODE_FOR_thumb2_casesi_internal;
8503 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8504 operands[2] = force_reg (SImode, operands[2]);
8506 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8507 operands[3], operands[4]));
8512 ;; The USE in this pattern is needed to tell flow analysis that this is
8513 ;; a CASESI insn. It has no other purpose.
8514 (define_insn "arm_casesi_internal"
8515 [(parallel [(set (pc)
8517 (leu (match_operand:SI 0 "s_register_operand" "r")
8518 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8519 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8520 (label_ref (match_operand 2 "" ""))))
8521 (label_ref (match_operand 3 "" ""))))
8522 (clobber (reg:CC CC_REGNUM))
8523 (use (label_ref (match_dup 2)))])]
8527 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8528 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8530 [(set_attr "conds" "clob")
8531 (set_attr "length" "12")]
8534 (define_expand "thumb1_casesi_internal_pic"
8535 [(match_operand:SI 0 "s_register_operand" "")
8536 (match_operand:SI 1 "thumb1_cmp_operand" "")
8537 (match_operand 2 "" "")
8538 (match_operand 3 "" "")]
8542 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8543 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8545 reg0 = gen_rtx_REG (SImode, 0);
8546 emit_move_insn (reg0, operands[0]);
8547 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8552 (define_insn "thumb1_casesi_dispatch"
8553 [(parallel [(set (pc) (unspec [(reg:SI 0)
8554 (label_ref (match_operand 0 "" ""))
8555 ;; (label_ref (match_operand 1 "" ""))
8557 UNSPEC_THUMB1_CASESI))
8558 (clobber (reg:SI IP_REGNUM))
8559 (clobber (reg:SI LR_REGNUM))])]
8561 "* return thumb1_output_casesi(operands);"
8562 [(set_attr "length" "4")]
8565 (define_expand "indirect_jump"
8567 (match_operand:SI 0 "s_register_operand" ""))]
8570 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8571 address and use bx. */
8575 tmp = gen_reg_rtx (SImode);
8576 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8582 ;; NB Never uses BX.
8583 (define_insn "*arm_indirect_jump"
8585 (match_operand:SI 0 "s_register_operand" "r"))]
8587 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8588 [(set_attr "predicable" "yes")]
8591 (define_insn "*load_indirect_jump"
8593 (match_operand:SI 0 "memory_operand" "m"))]
8595 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8596 [(set_attr "type" "load1")
8597 (set_attr "pool_range" "4096")
8598 (set_attr "neg_pool_range" "4084")
8599 (set_attr "predicable" "yes")]
8602 ;; NB Never uses BX.
8603 (define_insn "*thumb1_indirect_jump"
8605 (match_operand:SI 0 "register_operand" "l*r"))]
8608 [(set_attr "conds" "clob")
8609 (set_attr "length" "2")]
8619 if (TARGET_UNIFIED_ASM)
8622 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8623 return \"mov\\tr8, r8\";
8625 [(set (attr "length")
8626 (if_then_else (eq_attr "is_thumb" "yes")
8632 ;; Patterns to allow combination of arithmetic, cond code and shifts
8634 (define_insn "*arith_shiftsi"
8635 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8636 (match_operator:SI 1 "shiftable_operator"
8637 [(match_operator:SI 3 "shift_operator"
8638 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8639 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8640 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8642 "%i1%?\\t%0, %2, %4%S3"
8643 [(set_attr "predicable" "yes")
8644 (set_attr "shift" "4")
8645 (set_attr "arch" "a,t2,t2,a")
8646 ;; Thumb2 doesn't allow the stack pointer to be used for
8647 ;; operand1 for all operations other than add and sub. In this case
8648 ;; the minus operation is a candidate for an rsub and hence needs
8650 ;; We have to make sure to disable the fourth alternative if
8651 ;; the shift_operator is MULT, since otherwise the insn will
8652 ;; also match a multiply_accumulate pattern and validate_change
8653 ;; will allow a replacement of the constant with a register
8654 ;; despite the checks done in shift_operator.
8655 (set_attr_alternative "insn_enabled"
8656 [(const_string "yes")
8658 (match_operand:SI 1 "add_operator" "")
8659 (const_string "yes") (const_string "no"))
8660 (const_string "yes")
8662 (match_operand:SI 3 "mult_operator" "")
8663 (const_string "no") (const_string "yes"))])
8664 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8667 [(set (match_operand:SI 0 "s_register_operand" "")
8668 (match_operator:SI 1 "shiftable_operator"
8669 [(match_operator:SI 2 "shiftable_operator"
8670 [(match_operator:SI 3 "shift_operator"
8671 [(match_operand:SI 4 "s_register_operand" "")
8672 (match_operand:SI 5 "reg_or_int_operand" "")])
8673 (match_operand:SI 6 "s_register_operand" "")])
8674 (match_operand:SI 7 "arm_rhs_operand" "")]))
8675 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8678 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8681 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8684 (define_insn "*arith_shiftsi_compare0"
8685 [(set (reg:CC_NOOV CC_REGNUM)
8687 (match_operator:SI 1 "shiftable_operator"
8688 [(match_operator:SI 3 "shift_operator"
8689 [(match_operand:SI 4 "s_register_operand" "r,r")
8690 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8691 (match_operand:SI 2 "s_register_operand" "r,r")])
8693 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8694 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8697 "%i1%.\\t%0, %2, %4%S3"
8698 [(set_attr "conds" "set")
8699 (set_attr "shift" "4")
8700 (set_attr "arch" "32,a")
8701 (set_attr "type" "alu_shift,alu_shift_reg")])
8703 (define_insn "*arith_shiftsi_compare0_scratch"
8704 [(set (reg:CC_NOOV CC_REGNUM)
8706 (match_operator:SI 1 "shiftable_operator"
8707 [(match_operator:SI 3 "shift_operator"
8708 [(match_operand:SI 4 "s_register_operand" "r,r")
8709 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8710 (match_operand:SI 2 "s_register_operand" "r,r")])
8712 (clobber (match_scratch:SI 0 "=r,r"))]
8714 "%i1%.\\t%0, %2, %4%S3"
8715 [(set_attr "conds" "set")
8716 (set_attr "shift" "4")
8717 (set_attr "arch" "32,a")
8718 (set_attr "type" "alu_shift,alu_shift_reg")])
8720 (define_insn "*sub_shiftsi"
8721 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8722 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8723 (match_operator:SI 2 "shift_operator"
8724 [(match_operand:SI 3 "s_register_operand" "r,r")
8725 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8727 "sub%?\\t%0, %1, %3%S2"
8728 [(set_attr "predicable" "yes")
8729 (set_attr "shift" "3")
8730 (set_attr "arch" "32,a")
8731 (set_attr "type" "alu_shift,alu_shift_reg")])
8733 (define_insn "*sub_shiftsi_compare0"
8734 [(set (reg:CC_NOOV CC_REGNUM)
8736 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8737 (match_operator:SI 2 "shift_operator"
8738 [(match_operand:SI 3 "s_register_operand" "r,r")
8739 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8741 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8742 (minus:SI (match_dup 1)
8743 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8745 "sub%.\\t%0, %1, %3%S2"
8746 [(set_attr "conds" "set")
8747 (set_attr "shift" "3")
8748 (set_attr "arch" "32,a")
8749 (set_attr "type" "alu_shift,alu_shift_reg")])
8751 (define_insn "*sub_shiftsi_compare0_scratch"
8752 [(set (reg:CC_NOOV CC_REGNUM)
8754 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8755 (match_operator:SI 2 "shift_operator"
8756 [(match_operand:SI 3 "s_register_operand" "r,r")
8757 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8759 (clobber (match_scratch:SI 0 "=r,r"))]
8761 "sub%.\\t%0, %1, %3%S2"
8762 [(set_attr "conds" "set")
8763 (set_attr "shift" "3")
8764 (set_attr "arch" "32,a")
8765 (set_attr "type" "alu_shift,alu_shift_reg")])
8768 (define_insn "*and_scc"
8769 [(set (match_operand:SI 0 "s_register_operand" "=r")
8770 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8771 [(match_operand 3 "cc_register" "") (const_int 0)])
8772 (match_operand:SI 2 "s_register_operand" "r")))]
8774 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8775 [(set_attr "conds" "use")
8776 (set_attr "insn" "mov")
8777 (set_attr "length" "8")]
8780 (define_insn "*ior_scc"
8781 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8782 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8783 [(match_operand 3 "cc_register" "") (const_int 0)])
8784 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8788 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8789 [(set_attr "conds" "use")
8790 (set_attr "length" "4,8")]
8793 ; A series of splitters for the compare_scc pattern below. Note that
8794 ; order is important.
8796 [(set (match_operand:SI 0 "s_register_operand" "")
8797 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8799 (clobber (reg:CC CC_REGNUM))]
8800 "TARGET_32BIT && reload_completed"
8801 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8804 [(set (match_operand:SI 0 "s_register_operand" "")
8805 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8807 (clobber (reg:CC CC_REGNUM))]
8808 "TARGET_32BIT && reload_completed"
8809 [(set (match_dup 0) (not:SI (match_dup 1)))
8810 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8813 [(set (match_operand:SI 0 "s_register_operand" "")
8814 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8816 (clobber (reg:CC CC_REGNUM))]
8817 "TARGET_32BIT && reload_completed"
8819 [(set (reg:CC CC_REGNUM)
8820 (compare:CC (const_int 1) (match_dup 1)))
8822 (minus:SI (const_int 1) (match_dup 1)))])
8823 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8824 (set (match_dup 0) (const_int 0)))])
8827 [(set (match_operand:SI 0 "s_register_operand" "")
8828 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8829 (match_operand:SI 2 "const_int_operand" "")))
8830 (clobber (reg:CC CC_REGNUM))]
8831 "TARGET_32BIT && reload_completed"
8833 [(set (reg:CC CC_REGNUM)
8834 (compare:CC (match_dup 1) (match_dup 2)))
8835 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8836 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8837 (set (match_dup 0) (const_int 1)))]
8839 operands[3] = GEN_INT (-INTVAL (operands[2]));
8843 [(set (match_operand:SI 0 "s_register_operand" "")
8844 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8845 (match_operand:SI 2 "arm_add_operand" "")))
8846 (clobber (reg:CC CC_REGNUM))]
8847 "TARGET_32BIT && reload_completed"
8849 [(set (reg:CC_NOOV CC_REGNUM)
8850 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8852 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8853 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8854 (set (match_dup 0) (const_int 1)))])
8856 (define_insn_and_split "*compare_scc"
8857 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8858 (match_operator:SI 1 "arm_comparison_operator"
8859 [(match_operand:SI 2 "s_register_operand" "r,r")
8860 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8861 (clobber (reg:CC CC_REGNUM))]
8864 "&& reload_completed"
8865 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8866 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8867 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8870 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8871 operands[2], operands[3]);
8872 enum rtx_code rc = GET_CODE (operands[1]);
8874 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8876 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8877 if (mode == CCFPmode || mode == CCFPEmode)
8878 rc = reverse_condition_maybe_unordered (rc);
8880 rc = reverse_condition (rc);
8881 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8884 ;; Attempt to improve the sequence generated by the compare_scc splitters
8885 ;; not to use conditional execution.
8887 [(set (reg:CC CC_REGNUM)
8888 (compare:CC (match_operand:SI 1 "register_operand" "")
8889 (match_operand:SI 2 "arm_rhs_operand" "")))
8890 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8891 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8892 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8893 (set (match_dup 0) (const_int 1)))
8894 (match_scratch:SI 3 "r")]
8897 [(set (reg:CC CC_REGNUM)
8898 (compare:CC (match_dup 1) (match_dup 2)))
8899 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
8901 [(set (reg:CC CC_REGNUM)
8902 (compare:CC (const_int 0) (match_dup 3)))
8903 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8906 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8907 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
8908 (clobber (reg:CC CC_REGNUM))])])
8910 (define_insn "*cond_move"
8911 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8912 (if_then_else:SI (match_operator 3 "equality_operator"
8913 [(match_operator 4 "arm_comparison_operator"
8914 [(match_operand 5 "cc_register" "") (const_int 0)])
8916 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8917 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8920 if (GET_CODE (operands[3]) == NE)
8922 if (which_alternative != 1)
8923 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8924 if (which_alternative != 0)
8925 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8928 if (which_alternative != 0)
8929 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8930 if (which_alternative != 1)
8931 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8934 [(set_attr "conds" "use")
8935 (set_attr "insn" "mov")
8936 (set_attr "length" "4,4,8")]
8939 (define_insn "*cond_arith"
8940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8941 (match_operator:SI 5 "shiftable_operator"
8942 [(match_operator:SI 4 "arm_comparison_operator"
8943 [(match_operand:SI 2 "s_register_operand" "r,r")
8944 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8945 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8946 (clobber (reg:CC CC_REGNUM))]
8949 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8950 return \"%i5\\t%0, %1, %2, lsr #31\";
8952 output_asm_insn (\"cmp\\t%2, %3\", operands);
8953 if (GET_CODE (operands[5]) == AND)
8954 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8955 else if (GET_CODE (operands[5]) == MINUS)
8956 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8957 else if (which_alternative != 0)
8958 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8959 return \"%i5%d4\\t%0, %1, #1\";
8961 [(set_attr "conds" "clob")
8962 (set_attr "length" "12")]
8965 (define_insn "*cond_sub"
8966 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8967 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8968 (match_operator:SI 4 "arm_comparison_operator"
8969 [(match_operand:SI 2 "s_register_operand" "r,r")
8970 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8971 (clobber (reg:CC CC_REGNUM))]
8974 output_asm_insn (\"cmp\\t%2, %3\", operands);
8975 if (which_alternative != 0)
8976 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8977 return \"sub%d4\\t%0, %1, #1\";
8979 [(set_attr "conds" "clob")
8980 (set_attr "length" "8,12")]
8983 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8984 (define_insn "*cmp_ite0"
8985 [(set (match_operand 6 "dominant_cc_register" "")
8988 (match_operator 4 "arm_comparison_operator"
8989 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8990 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8991 (match_operator:SI 5 "arm_comparison_operator"
8992 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8993 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8999 static const char * const opcodes[4][2] =
9001 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9002 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9003 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9004 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9005 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9006 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9007 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9008 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9011 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9013 return opcodes[which_alternative][swap];
9015 [(set_attr "conds" "set")
9016 (set_attr "length" "8")]
9019 (define_insn "*cmp_ite1"
9020 [(set (match_operand 6 "dominant_cc_register" "")
9023 (match_operator 4 "arm_comparison_operator"
9024 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9025 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9026 (match_operator:SI 5 "arm_comparison_operator"
9027 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9028 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9034 static const char * const opcodes[4][2] =
9036 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9037 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9038 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9039 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9040 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9041 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9042 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9043 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9046 comparison_dominates_p (GET_CODE (operands[5]),
9047 reverse_condition (GET_CODE (operands[4])));
9049 return opcodes[which_alternative][swap];
9051 [(set_attr "conds" "set")
9052 (set_attr "length" "8")]
9055 (define_insn "*cmp_and"
9056 [(set (match_operand 6 "dominant_cc_register" "")
9059 (match_operator 4 "arm_comparison_operator"
9060 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9061 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9062 (match_operator:SI 5 "arm_comparison_operator"
9063 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9064 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9069 static const char *const opcodes[4][2] =
9071 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9072 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9073 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9074 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9075 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9076 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9077 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9078 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9081 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9083 return opcodes[which_alternative][swap];
9085 [(set_attr "conds" "set")
9086 (set_attr "predicable" "no")
9087 (set_attr "length" "8")]
9090 (define_insn "*cmp_ior"
9091 [(set (match_operand 6 "dominant_cc_register" "")
9094 (match_operator 4 "arm_comparison_operator"
9095 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9096 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9097 (match_operator:SI 5 "arm_comparison_operator"
9098 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9099 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9104 static const char *const opcodes[4][2] =
9106 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9107 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9108 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9109 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9110 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9111 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9112 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9113 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9116 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9118 return opcodes[which_alternative][swap];
9121 [(set_attr "conds" "set")
9122 (set_attr "length" "8")]
9125 (define_insn_and_split "*ior_scc_scc"
9126 [(set (match_operand:SI 0 "s_register_operand" "=r")
9127 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9128 [(match_operand:SI 1 "s_register_operand" "r")
9129 (match_operand:SI 2 "arm_add_operand" "rIL")])
9130 (match_operator:SI 6 "arm_comparison_operator"
9131 [(match_operand:SI 4 "s_register_operand" "r")
9132 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9133 (clobber (reg:CC CC_REGNUM))]
9135 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9138 "TARGET_ARM && reload_completed"
9142 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9143 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9145 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9147 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9150 [(set_attr "conds" "clob")
9151 (set_attr "length" "16")])
9153 ; If the above pattern is followed by a CMP insn, then the compare is
9154 ; redundant, since we can rework the conditional instruction that follows.
9155 (define_insn_and_split "*ior_scc_scc_cmp"
9156 [(set (match_operand 0 "dominant_cc_register" "")
9157 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9158 [(match_operand:SI 1 "s_register_operand" "r")
9159 (match_operand:SI 2 "arm_add_operand" "rIL")])
9160 (match_operator:SI 6 "arm_comparison_operator"
9161 [(match_operand:SI 4 "s_register_operand" "r")
9162 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9164 (set (match_operand:SI 7 "s_register_operand" "=r")
9165 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9166 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9169 "TARGET_ARM && reload_completed"
9173 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9174 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9176 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9178 [(set_attr "conds" "set")
9179 (set_attr "length" "16")])
9181 (define_insn_and_split "*and_scc_scc"
9182 [(set (match_operand:SI 0 "s_register_operand" "=r")
9183 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9184 [(match_operand:SI 1 "s_register_operand" "r")
9185 (match_operand:SI 2 "arm_add_operand" "rIL")])
9186 (match_operator:SI 6 "arm_comparison_operator"
9187 [(match_operand:SI 4 "s_register_operand" "r")
9188 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9189 (clobber (reg:CC CC_REGNUM))]
9191 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9194 "TARGET_ARM && reload_completed
9195 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9200 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9201 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9203 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9205 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9208 [(set_attr "conds" "clob")
9209 (set_attr "length" "16")])
9211 ; If the above pattern is followed by a CMP insn, then the compare is
9212 ; redundant, since we can rework the conditional instruction that follows.
9213 (define_insn_and_split "*and_scc_scc_cmp"
9214 [(set (match_operand 0 "dominant_cc_register" "")
9215 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9216 [(match_operand:SI 1 "s_register_operand" "r")
9217 (match_operand:SI 2 "arm_add_operand" "rIL")])
9218 (match_operator:SI 6 "arm_comparison_operator"
9219 [(match_operand:SI 4 "s_register_operand" "r")
9220 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9222 (set (match_operand:SI 7 "s_register_operand" "=r")
9223 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9224 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9227 "TARGET_ARM && reload_completed"
9231 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9232 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9234 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9236 [(set_attr "conds" "set")
9237 (set_attr "length" "16")])
9239 ;; If there is no dominance in the comparison, then we can still save an
9240 ;; instruction in the AND case, since we can know that the second compare
9241 ;; need only zero the value if false (if true, then the value is already
9243 (define_insn_and_split "*and_scc_scc_nodom"
9244 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9245 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9246 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9247 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9248 (match_operator:SI 6 "arm_comparison_operator"
9249 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9250 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9251 (clobber (reg:CC CC_REGNUM))]
9253 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9256 "TARGET_ARM && reload_completed"
9257 [(parallel [(set (match_dup 0)
9258 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9259 (clobber (reg:CC CC_REGNUM))])
9260 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9262 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9265 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9266 operands[4], operands[5]),
9268 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9270 [(set_attr "conds" "clob")
9271 (set_attr "length" "20")])
9274 [(set (reg:CC_NOOV CC_REGNUM)
9275 (compare:CC_NOOV (ior:SI
9276 (and:SI (match_operand:SI 0 "s_register_operand" "")
9278 (match_operator:SI 1 "arm_comparison_operator"
9279 [(match_operand:SI 2 "s_register_operand" "")
9280 (match_operand:SI 3 "arm_add_operand" "")]))
9282 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9285 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9287 (set (reg:CC_NOOV CC_REGNUM)
9288 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9293 [(set (reg:CC_NOOV CC_REGNUM)
9294 (compare:CC_NOOV (ior:SI
9295 (match_operator:SI 1 "arm_comparison_operator"
9296 [(match_operand:SI 2 "s_register_operand" "")
9297 (match_operand:SI 3 "arm_add_operand" "")])
9298 (and:SI (match_operand:SI 0 "s_register_operand" "")
9301 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9304 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9306 (set (reg:CC_NOOV CC_REGNUM)
9307 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9310 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9312 (define_insn "*negscc"
9313 [(set (match_operand:SI 0 "s_register_operand" "=r")
9314 (neg:SI (match_operator 3 "arm_comparison_operator"
9315 [(match_operand:SI 1 "s_register_operand" "r")
9316 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9317 (clobber (reg:CC CC_REGNUM))]
9320 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9321 return \"mov\\t%0, %1, asr #31\";
9323 if (GET_CODE (operands[3]) == NE)
9324 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9326 output_asm_insn (\"cmp\\t%1, %2\", operands);
9327 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9328 return \"mvn%d3\\t%0, #0\";
9330 [(set_attr "conds" "clob")
9331 (set_attr "length" "12")]
9334 (define_insn "movcond"
9335 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9337 (match_operator 5 "arm_comparison_operator"
9338 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9339 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9340 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9341 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9342 (clobber (reg:CC CC_REGNUM))]
9345 if (GET_CODE (operands[5]) == LT
9346 && (operands[4] == const0_rtx))
9348 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9350 if (operands[2] == const0_rtx)
9351 return \"and\\t%0, %1, %3, asr #31\";
9352 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9354 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9356 if (operands[1] == const0_rtx)
9357 return \"bic\\t%0, %2, %3, asr #31\";
9358 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9360 /* The only case that falls through to here is when both ops 1 & 2
9364 if (GET_CODE (operands[5]) == GE
9365 && (operands[4] == const0_rtx))
9367 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9369 if (operands[2] == const0_rtx)
9370 return \"bic\\t%0, %1, %3, asr #31\";
9371 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9373 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9375 if (operands[1] == const0_rtx)
9376 return \"and\\t%0, %2, %3, asr #31\";
9377 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9379 /* The only case that falls through to here is when both ops 1 & 2
9382 if (GET_CODE (operands[4]) == CONST_INT
9383 && !const_ok_for_arm (INTVAL (operands[4])))
9384 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9386 output_asm_insn (\"cmp\\t%3, %4\", operands);
9387 if (which_alternative != 0)
9388 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9389 if (which_alternative != 1)
9390 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9393 [(set_attr "conds" "clob")
9394 (set_attr "length" "8,8,12")]
9397 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9399 (define_insn "*ifcompare_plus_move"
9400 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9401 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9402 [(match_operand:SI 4 "s_register_operand" "r,r")
9403 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9405 (match_operand:SI 2 "s_register_operand" "r,r")
9406 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9407 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9408 (clobber (reg:CC CC_REGNUM))]
9411 [(set_attr "conds" "clob")
9412 (set_attr "length" "8,12")]
9415 (define_insn "*if_plus_move"
9416 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9418 (match_operator 4 "arm_comparison_operator"
9419 [(match_operand 5 "cc_register" "") (const_int 0)])
9421 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9422 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9423 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9427 sub%d4\\t%0, %2, #%n3
9428 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9429 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9430 [(set_attr "conds" "use")
9431 (set_attr "length" "4,4,8,8")
9432 (set_attr "type" "*,*,*,*")]
9435 (define_insn "*ifcompare_move_plus"
9436 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9437 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9438 [(match_operand:SI 4 "s_register_operand" "r,r")
9439 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9440 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9442 (match_operand:SI 2 "s_register_operand" "r,r")
9443 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9444 (clobber (reg:CC CC_REGNUM))]
9447 [(set_attr "conds" "clob")
9448 (set_attr "length" "8,12")]
9451 (define_insn "*if_move_plus"
9452 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9454 (match_operator 4 "arm_comparison_operator"
9455 [(match_operand 5 "cc_register" "") (const_int 0)])
9456 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9458 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9459 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9463 sub%D4\\t%0, %2, #%n3
9464 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9465 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9466 [(set_attr "conds" "use")
9467 (set_attr "length" "4,4,8,8")
9468 (set_attr "type" "*,*,*,*")]
9471 (define_insn "*ifcompare_arith_arith"
9472 [(set (match_operand:SI 0 "s_register_operand" "=r")
9473 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9474 [(match_operand:SI 5 "s_register_operand" "r")
9475 (match_operand:SI 6 "arm_add_operand" "rIL")])
9476 (match_operator:SI 8 "shiftable_operator"
9477 [(match_operand:SI 1 "s_register_operand" "r")
9478 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9479 (match_operator:SI 7 "shiftable_operator"
9480 [(match_operand:SI 3 "s_register_operand" "r")
9481 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9482 (clobber (reg:CC CC_REGNUM))]
9485 [(set_attr "conds" "clob")
9486 (set_attr "length" "12")]
9489 (define_insn "*if_arith_arith"
9490 [(set (match_operand:SI 0 "s_register_operand" "=r")
9491 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9492 [(match_operand 8 "cc_register" "") (const_int 0)])
9493 (match_operator:SI 6 "shiftable_operator"
9494 [(match_operand:SI 1 "s_register_operand" "r")
9495 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9496 (match_operator:SI 7 "shiftable_operator"
9497 [(match_operand:SI 3 "s_register_operand" "r")
9498 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9500 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9501 [(set_attr "conds" "use")
9502 (set_attr "length" "8")]
9505 (define_insn "*ifcompare_arith_move"
9506 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9507 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9508 [(match_operand:SI 2 "s_register_operand" "r,r")
9509 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9510 (match_operator:SI 7 "shiftable_operator"
9511 [(match_operand:SI 4 "s_register_operand" "r,r")
9512 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9513 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9514 (clobber (reg:CC CC_REGNUM))]
9517 /* If we have an operation where (op x 0) is the identity operation and
9518 the conditional operator is LT or GE and we are comparing against zero and
9519 everything is in registers then we can do this in two instructions. */
9520 if (operands[3] == const0_rtx
9521 && GET_CODE (operands[7]) != AND
9522 && GET_CODE (operands[5]) == REG
9523 && GET_CODE (operands[1]) == REG
9524 && REGNO (operands[1]) == REGNO (operands[4])
9525 && REGNO (operands[4]) != REGNO (operands[0]))
9527 if (GET_CODE (operands[6]) == LT)
9528 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9529 else if (GET_CODE (operands[6]) == GE)
9530 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9532 if (GET_CODE (operands[3]) == CONST_INT
9533 && !const_ok_for_arm (INTVAL (operands[3])))
9534 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9536 output_asm_insn (\"cmp\\t%2, %3\", operands);
9537 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9538 if (which_alternative != 0)
9539 return \"mov%D6\\t%0, %1\";
9542 [(set_attr "conds" "clob")
9543 (set_attr "length" "8,12")]
9546 (define_insn "*if_arith_move"
9547 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9548 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9549 [(match_operand 6 "cc_register" "") (const_int 0)])
9550 (match_operator:SI 5 "shiftable_operator"
9551 [(match_operand:SI 2 "s_register_operand" "r,r")
9552 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9553 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9557 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9558 [(set_attr "conds" "use")
9559 (set_attr "length" "4,8")
9560 (set_attr "type" "*,*")]
9563 (define_insn "*ifcompare_move_arith"
9564 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9565 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9566 [(match_operand:SI 4 "s_register_operand" "r,r")
9567 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9568 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9569 (match_operator:SI 7 "shiftable_operator"
9570 [(match_operand:SI 2 "s_register_operand" "r,r")
9571 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9572 (clobber (reg:CC CC_REGNUM))]
9575 /* If we have an operation where (op x 0) is the identity operation and
9576 the conditional operator is LT or GE and we are comparing against zero and
9577 everything is in registers then we can do this in two instructions */
9578 if (operands[5] == const0_rtx
9579 && GET_CODE (operands[7]) != AND
9580 && GET_CODE (operands[3]) == REG
9581 && GET_CODE (operands[1]) == REG
9582 && REGNO (operands[1]) == REGNO (operands[2])
9583 && REGNO (operands[2]) != REGNO (operands[0]))
9585 if (GET_CODE (operands[6]) == GE)
9586 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9587 else if (GET_CODE (operands[6]) == LT)
9588 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9591 if (GET_CODE (operands[5]) == CONST_INT
9592 && !const_ok_for_arm (INTVAL (operands[5])))
9593 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9595 output_asm_insn (\"cmp\\t%4, %5\", operands);
9597 if (which_alternative != 0)
9598 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9599 return \"%I7%D6\\t%0, %2, %3\";
9601 [(set_attr "conds" "clob")
9602 (set_attr "length" "8,12")]
9605 (define_insn "*if_move_arith"
9606 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9608 (match_operator 4 "arm_comparison_operator"
9609 [(match_operand 6 "cc_register" "") (const_int 0)])
9610 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9611 (match_operator:SI 5 "shiftable_operator"
9612 [(match_operand:SI 2 "s_register_operand" "r,r")
9613 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9617 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9618 [(set_attr "conds" "use")
9619 (set_attr "length" "4,8")
9620 (set_attr "type" "*,*")]
9623 (define_insn "*ifcompare_move_not"
9624 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9626 (match_operator 5 "arm_comparison_operator"
9627 [(match_operand:SI 3 "s_register_operand" "r,r")
9628 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9629 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9631 (match_operand:SI 2 "s_register_operand" "r,r"))))
9632 (clobber (reg:CC CC_REGNUM))]
9635 [(set_attr "conds" "clob")
9636 (set_attr "length" "8,12")]
9639 (define_insn "*if_move_not"
9640 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9642 (match_operator 4 "arm_comparison_operator"
9643 [(match_operand 3 "cc_register" "") (const_int 0)])
9644 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9645 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9649 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9650 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9651 [(set_attr "conds" "use")
9652 (set_attr "insn" "mvn")
9653 (set_attr "length" "4,8,8")]
9656 (define_insn "*ifcompare_not_move"
9657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9659 (match_operator 5 "arm_comparison_operator"
9660 [(match_operand:SI 3 "s_register_operand" "r,r")
9661 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9663 (match_operand:SI 2 "s_register_operand" "r,r"))
9664 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9665 (clobber (reg:CC CC_REGNUM))]
9668 [(set_attr "conds" "clob")
9669 (set_attr "length" "8,12")]
9672 (define_insn "*if_not_move"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9675 (match_operator 4 "arm_comparison_operator"
9676 [(match_operand 3 "cc_register" "") (const_int 0)])
9677 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9678 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9682 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9683 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9684 [(set_attr "conds" "use")
9685 (set_attr "insn" "mvn")
9686 (set_attr "length" "4,8,8")]
9689 (define_insn "*ifcompare_shift_move"
9690 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9692 (match_operator 6 "arm_comparison_operator"
9693 [(match_operand:SI 4 "s_register_operand" "r,r")
9694 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9695 (match_operator:SI 7 "shift_operator"
9696 [(match_operand:SI 2 "s_register_operand" "r,r")
9697 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9698 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9699 (clobber (reg:CC CC_REGNUM))]
9702 [(set_attr "conds" "clob")
9703 (set_attr "length" "8,12")]
9706 (define_insn "*if_shift_move"
9707 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9709 (match_operator 5 "arm_comparison_operator"
9710 [(match_operand 6 "cc_register" "") (const_int 0)])
9711 (match_operator:SI 4 "shift_operator"
9712 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9713 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9714 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9718 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9719 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9720 [(set_attr "conds" "use")
9721 (set_attr "shift" "2")
9722 (set_attr "length" "4,8,8")
9723 (set_attr "insn" "mov")
9724 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9725 (const_string "alu_shift")
9726 (const_string "alu_shift_reg")))]
9729 (define_insn "*ifcompare_move_shift"
9730 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9732 (match_operator 6 "arm_comparison_operator"
9733 [(match_operand:SI 4 "s_register_operand" "r,r")
9734 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9735 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9736 (match_operator:SI 7 "shift_operator"
9737 [(match_operand:SI 2 "s_register_operand" "r,r")
9738 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9739 (clobber (reg:CC CC_REGNUM))]
9742 [(set_attr "conds" "clob")
9743 (set_attr "length" "8,12")]
9746 (define_insn "*if_move_shift"
9747 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9749 (match_operator 5 "arm_comparison_operator"
9750 [(match_operand 6 "cc_register" "") (const_int 0)])
9751 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9752 (match_operator:SI 4 "shift_operator"
9753 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9754 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9758 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9759 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9760 [(set_attr "conds" "use")
9761 (set_attr "shift" "2")
9762 (set_attr "length" "4,8,8")
9763 (set_attr "insn" "mov")
9764 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9765 (const_string "alu_shift")
9766 (const_string "alu_shift_reg")))]
9769 (define_insn "*ifcompare_shift_shift"
9770 [(set (match_operand:SI 0 "s_register_operand" "=r")
9772 (match_operator 7 "arm_comparison_operator"
9773 [(match_operand:SI 5 "s_register_operand" "r")
9774 (match_operand:SI 6 "arm_add_operand" "rIL")])
9775 (match_operator:SI 8 "shift_operator"
9776 [(match_operand:SI 1 "s_register_operand" "r")
9777 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9778 (match_operator:SI 9 "shift_operator"
9779 [(match_operand:SI 3 "s_register_operand" "r")
9780 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9781 (clobber (reg:CC CC_REGNUM))]
9784 [(set_attr "conds" "clob")
9785 (set_attr "length" "12")]
9788 (define_insn "*if_shift_shift"
9789 [(set (match_operand:SI 0 "s_register_operand" "=r")
9791 (match_operator 5 "arm_comparison_operator"
9792 [(match_operand 8 "cc_register" "") (const_int 0)])
9793 (match_operator:SI 6 "shift_operator"
9794 [(match_operand:SI 1 "s_register_operand" "r")
9795 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9796 (match_operator:SI 7 "shift_operator"
9797 [(match_operand:SI 3 "s_register_operand" "r")
9798 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9800 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9801 [(set_attr "conds" "use")
9802 (set_attr "shift" "1")
9803 (set_attr "length" "8")
9804 (set_attr "insn" "mov")
9805 (set (attr "type") (if_then_else
9806 (and (match_operand 2 "const_int_operand" "")
9807 (match_operand 4 "const_int_operand" ""))
9808 (const_string "alu_shift")
9809 (const_string "alu_shift_reg")))]
9812 (define_insn "*ifcompare_not_arith"
9813 [(set (match_operand:SI 0 "s_register_operand" "=r")
9815 (match_operator 6 "arm_comparison_operator"
9816 [(match_operand:SI 4 "s_register_operand" "r")
9817 (match_operand:SI 5 "arm_add_operand" "rIL")])
9818 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9819 (match_operator:SI 7 "shiftable_operator"
9820 [(match_operand:SI 2 "s_register_operand" "r")
9821 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9822 (clobber (reg:CC CC_REGNUM))]
9825 [(set_attr "conds" "clob")
9826 (set_attr "length" "12")]
9829 (define_insn "*if_not_arith"
9830 [(set (match_operand:SI 0 "s_register_operand" "=r")
9832 (match_operator 5 "arm_comparison_operator"
9833 [(match_operand 4 "cc_register" "") (const_int 0)])
9834 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9835 (match_operator:SI 6 "shiftable_operator"
9836 [(match_operand:SI 2 "s_register_operand" "r")
9837 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9839 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9840 [(set_attr "conds" "use")
9841 (set_attr "insn" "mvn")
9842 (set_attr "length" "8")]
9845 (define_insn "*ifcompare_arith_not"
9846 [(set (match_operand:SI 0 "s_register_operand" "=r")
9848 (match_operator 6 "arm_comparison_operator"
9849 [(match_operand:SI 4 "s_register_operand" "r")
9850 (match_operand:SI 5 "arm_add_operand" "rIL")])
9851 (match_operator:SI 7 "shiftable_operator"
9852 [(match_operand:SI 2 "s_register_operand" "r")
9853 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9854 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9855 (clobber (reg:CC CC_REGNUM))]
9858 [(set_attr "conds" "clob")
9859 (set_attr "length" "12")]
9862 (define_insn "*if_arith_not"
9863 [(set (match_operand:SI 0 "s_register_operand" "=r")
9865 (match_operator 5 "arm_comparison_operator"
9866 [(match_operand 4 "cc_register" "") (const_int 0)])
9867 (match_operator:SI 6 "shiftable_operator"
9868 [(match_operand:SI 2 "s_register_operand" "r")
9869 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9870 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9872 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9873 [(set_attr "conds" "use")
9874 (set_attr "insn" "mvn")
9875 (set_attr "length" "8")]
9878 (define_insn "*ifcompare_neg_move"
9879 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9881 (match_operator 5 "arm_comparison_operator"
9882 [(match_operand:SI 3 "s_register_operand" "r,r")
9883 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9884 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9885 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9886 (clobber (reg:CC CC_REGNUM))]
9889 [(set_attr "conds" "clob")
9890 (set_attr "length" "8,12")]
9893 (define_insn "*if_neg_move"
9894 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9896 (match_operator 4 "arm_comparison_operator"
9897 [(match_operand 3 "cc_register" "") (const_int 0)])
9898 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9899 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9903 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9904 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9905 [(set_attr "conds" "use")
9906 (set_attr "length" "4,8,8")]
9909 (define_insn "*ifcompare_move_neg"
9910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9912 (match_operator 5 "arm_comparison_operator"
9913 [(match_operand:SI 3 "s_register_operand" "r,r")
9914 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9915 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9916 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9917 (clobber (reg:CC CC_REGNUM))]
9920 [(set_attr "conds" "clob")
9921 (set_attr "length" "8,12")]
9924 (define_insn "*if_move_neg"
9925 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9927 (match_operator 4 "arm_comparison_operator"
9928 [(match_operand 3 "cc_register" "") (const_int 0)])
9929 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9930 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9934 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9935 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9936 [(set_attr "conds" "use")
9937 (set_attr "length" "4,8,8")]
9940 (define_insn "*arith_adjacentmem"
9941 [(set (match_operand:SI 0 "s_register_operand" "=r")
9942 (match_operator:SI 1 "shiftable_operator"
9943 [(match_operand:SI 2 "memory_operand" "m")
9944 (match_operand:SI 3 "memory_operand" "m")]))
9945 (clobber (match_scratch:SI 4 "=r"))]
9946 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9952 HOST_WIDE_INT val1 = 0, val2 = 0;
9954 if (REGNO (operands[0]) > REGNO (operands[4]))
9956 ldm[1] = operands[4];
9957 ldm[2] = operands[0];
9961 ldm[1] = operands[0];
9962 ldm[2] = operands[4];
9965 base_reg = XEXP (operands[2], 0);
9967 if (!REG_P (base_reg))
9969 val1 = INTVAL (XEXP (base_reg, 1));
9970 base_reg = XEXP (base_reg, 0);
9973 if (!REG_P (XEXP (operands[3], 0)))
9974 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9976 arith[0] = operands[0];
9977 arith[3] = operands[1];
9991 if (val1 !=0 && val2 != 0)
9995 if (val1 == 4 || val2 == 4)
9996 /* Other val must be 8, since we know they are adjacent and neither
9998 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9999 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10001 ldm[0] = ops[0] = operands[4];
10003 ops[2] = GEN_INT (val1);
10004 output_add_immediate (ops);
10006 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10008 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10012 /* Offset is out of range for a single add, so use two ldr. */
10015 ops[2] = GEN_INT (val1);
10016 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10018 ops[2] = GEN_INT (val2);
10019 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10022 else if (val1 != 0)
10025 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10027 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10032 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10034 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10036 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10039 [(set_attr "length" "12")
10040 (set_attr "predicable" "yes")
10041 (set_attr "type" "load1")]
10044 ; This pattern is never tried by combine, so do it as a peephole
10047 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10048 (match_operand:SI 1 "arm_general_register_operand" ""))
10049 (set (reg:CC CC_REGNUM)
10050 (compare:CC (match_dup 1) (const_int 0)))]
10052 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10053 (set (match_dup 0) (match_dup 1))])]
10058 [(set (match_operand:SI 0 "s_register_operand" "")
10059 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10061 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10062 [(match_operand:SI 3 "s_register_operand" "")
10063 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10064 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10066 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10067 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10072 ;; This split can be used because CC_Z mode implies that the following
10073 ;; branch will be an equality, or an unsigned inequality, so the sign
10074 ;; extension is not needed.
10077 [(set (reg:CC_Z CC_REGNUM)
10079 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10081 (match_operand 1 "const_int_operand" "")))
10082 (clobber (match_scratch:SI 2 ""))]
10084 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10085 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10086 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10087 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10089 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10092 ;; ??? Check the patterns above for Thumb-2 usefulness
10094 (define_expand "prologue"
10095 [(clobber (const_int 0))]
10098 arm_expand_prologue ();
10100 thumb1_expand_prologue ();
10105 (define_expand "epilogue"
10106 [(clobber (const_int 0))]
10109 if (crtl->calls_eh_return)
10110 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10112 thumb1_expand_epilogue ();
10113 else if (USE_RETURN_INSN (FALSE))
10115 emit_jump_insn (gen_return ());
10118 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10119 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10124 (define_insn "prologue_thumb1_interwork"
10125 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10127 "* return thumb1_output_interwork ();"
10128 [(set_attr "length" "8")]
10131 ;; Note - although unspec_volatile's USE all hard registers,
10132 ;; USEs are ignored after relaod has completed. Thus we need
10133 ;; to add an unspec of the link register to ensure that flow
10134 ;; does not think that it is unused by the sibcall branch that
10135 ;; will replace the standard function epilogue.
10136 (define_insn "sibcall_epilogue"
10137 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10138 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10141 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10142 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10143 return arm_output_epilogue (next_nonnote_insn (insn));
10145 ;; Length is absolute worst case
10146 [(set_attr "length" "44")
10147 (set_attr "type" "block")
10148 ;; We don't clobber the conditions, but the potential length of this
10149 ;; operation is sufficient to make conditionalizing the sequence
10150 ;; unlikely to be profitable.
10151 (set_attr "conds" "clob")]
10154 (define_insn "*epilogue_insns"
10155 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10159 return arm_output_epilogue (NULL);
10160 else /* TARGET_THUMB1 */
10161 return thumb_unexpanded_epilogue ();
10163 ; Length is absolute worst case
10164 [(set_attr "length" "44")
10165 (set_attr "type" "block")
10166 ;; We don't clobber the conditions, but the potential length of this
10167 ;; operation is sufficient to make conditionalizing the sequence
10168 ;; unlikely to be profitable.
10169 (set_attr "conds" "clob")]
10172 (define_expand "eh_epilogue"
10173 [(use (match_operand:SI 0 "register_operand" ""))
10174 (use (match_operand:SI 1 "register_operand" ""))
10175 (use (match_operand:SI 2 "register_operand" ""))]
10179 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10180 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10182 rtx ra = gen_rtx_REG (Pmode, 2);
10184 emit_move_insn (ra, operands[2]);
10187 /* This is a hack -- we may have crystalized the function type too
10189 cfun->machine->func_type = 0;
10193 ;; This split is only used during output to reduce the number of patterns
10194 ;; that need assembler instructions adding to them. We allowed the setting
10195 ;; of the conditions to be implicit during rtl generation so that
10196 ;; the conditional compare patterns would work. However this conflicts to
10197 ;; some extent with the conditional data operations, so we have to split them
10200 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10201 ;; conditional execution sufficient?
10204 [(set (match_operand:SI 0 "s_register_operand" "")
10205 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10206 [(match_operand 2 "" "") (match_operand 3 "" "")])
10208 (match_operand 4 "" "")))
10209 (clobber (reg:CC CC_REGNUM))]
10210 "TARGET_ARM && reload_completed"
10211 [(set (match_dup 5) (match_dup 6))
10212 (cond_exec (match_dup 7)
10213 (set (match_dup 0) (match_dup 4)))]
10216 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10217 operands[2], operands[3]);
10218 enum rtx_code rc = GET_CODE (operands[1]);
10220 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10221 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10222 if (mode == CCFPmode || mode == CCFPEmode)
10223 rc = reverse_condition_maybe_unordered (rc);
10225 rc = reverse_condition (rc);
10227 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10232 [(set (match_operand:SI 0 "s_register_operand" "")
10233 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10234 [(match_operand 2 "" "") (match_operand 3 "" "")])
10235 (match_operand 4 "" "")
10237 (clobber (reg:CC CC_REGNUM))]
10238 "TARGET_ARM && reload_completed"
10239 [(set (match_dup 5) (match_dup 6))
10240 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10241 (set (match_dup 0) (match_dup 4)))]
10244 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10245 operands[2], operands[3]);
10247 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10248 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10253 [(set (match_operand:SI 0 "s_register_operand" "")
10254 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10255 [(match_operand 2 "" "") (match_operand 3 "" "")])
10256 (match_operand 4 "" "")
10257 (match_operand 5 "" "")))
10258 (clobber (reg:CC CC_REGNUM))]
10259 "TARGET_ARM && reload_completed"
10260 [(set (match_dup 6) (match_dup 7))
10261 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10262 (set (match_dup 0) (match_dup 4)))
10263 (cond_exec (match_dup 8)
10264 (set (match_dup 0) (match_dup 5)))]
10267 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10268 operands[2], operands[3]);
10269 enum rtx_code rc = GET_CODE (operands[1]);
10271 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10272 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10273 if (mode == CCFPmode || mode == CCFPEmode)
10274 rc = reverse_condition_maybe_unordered (rc);
10276 rc = reverse_condition (rc);
10278 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10283 [(set (match_operand:SI 0 "s_register_operand" "")
10284 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10285 [(match_operand:SI 2 "s_register_operand" "")
10286 (match_operand:SI 3 "arm_add_operand" "")])
10287 (match_operand:SI 4 "arm_rhs_operand" "")
10289 (match_operand:SI 5 "s_register_operand" ""))))
10290 (clobber (reg:CC CC_REGNUM))]
10291 "TARGET_ARM && reload_completed"
10292 [(set (match_dup 6) (match_dup 7))
10293 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10294 (set (match_dup 0) (match_dup 4)))
10295 (cond_exec (match_dup 8)
10296 (set (match_dup 0) (not:SI (match_dup 5))))]
10299 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10300 operands[2], operands[3]);
10301 enum rtx_code rc = GET_CODE (operands[1]);
10303 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10304 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10305 if (mode == CCFPmode || mode == CCFPEmode)
10306 rc = reverse_condition_maybe_unordered (rc);
10308 rc = reverse_condition (rc);
10310 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10314 (define_insn "*cond_move_not"
10315 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10316 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10317 [(match_operand 3 "cc_register" "") (const_int 0)])
10318 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10320 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10324 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10325 [(set_attr "conds" "use")
10326 (set_attr "insn" "mvn")
10327 (set_attr "length" "4,8")]
10330 ;; The next two patterns occur when an AND operation is followed by a
10331 ;; scc insn sequence
10333 (define_insn "*sign_extract_onebit"
10334 [(set (match_operand:SI 0 "s_register_operand" "=r")
10335 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10337 (match_operand:SI 2 "const_int_operand" "n")))
10338 (clobber (reg:CC CC_REGNUM))]
10341 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10342 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10343 return \"mvnne\\t%0, #0\";
10345 [(set_attr "conds" "clob")
10346 (set_attr "length" "8")]
10349 (define_insn "*not_signextract_onebit"
10350 [(set (match_operand:SI 0 "s_register_operand" "=r")
10352 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10354 (match_operand:SI 2 "const_int_operand" "n"))))
10355 (clobber (reg:CC CC_REGNUM))]
10358 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10359 output_asm_insn (\"tst\\t%1, %2\", operands);
10360 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10361 return \"movne\\t%0, #0\";
10363 [(set_attr "conds" "clob")
10364 (set_attr "length" "12")]
10366 ;; ??? The above patterns need auditing for Thumb-2
10368 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10369 ;; expressions. For simplicity, the first register is also in the unspec
10371 ;; To avoid the usage of GNU extension, the length attribute is computed
10372 ;; in a C function arm_attr_length_push_multi.
10373 (define_insn "*push_multi"
10374 [(match_parallel 2 "multi_register_push"
10375 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10376 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10377 UNSPEC_PUSH_MULT))])]
10381 int num_saves = XVECLEN (operands[2], 0);
10383 /* For the StrongARM at least it is faster to
10384 use STR to store only a single register.
10385 In Thumb mode always use push, and the assembler will pick
10386 something appropriate. */
10387 if (num_saves == 1 && TARGET_ARM)
10388 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10395 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10397 strcpy (pattern, \"push\\t{%1\");
10399 for (i = 1; i < num_saves; i++)
10401 strcat (pattern, \", %|\");
10403 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10406 strcat (pattern, \"}\");
10407 output_asm_insn (pattern, operands);
10412 [(set_attr "type" "store4")
10413 (set (attr "length")
10414 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10417 (define_insn "stack_tie"
10418 [(set (mem:BLK (scratch))
10419 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10420 (match_operand:SI 1 "s_register_operand" "rk")]
10424 [(set_attr "length" "0")]
10427 ;; Similarly for the floating point registers
10428 (define_insn "*push_fp_multi"
10429 [(match_parallel 2 "multi_register_push"
10430 [(set (match_operand:BLK 0 "memory_operand" "=m")
10431 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10432 UNSPEC_PUSH_MULT))])]
10433 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10438 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10439 output_asm_insn (pattern, operands);
10442 [(set_attr "type" "f_fpa_store")]
10445 ;; Special patterns for dealing with the constant pool
10447 (define_insn "align_4"
10448 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10451 assemble_align (32);
10456 (define_insn "align_8"
10457 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10460 assemble_align (64);
10465 (define_insn "consttable_end"
10466 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10469 making_const_table = FALSE;
10474 (define_insn "consttable_1"
10475 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10478 making_const_table = TRUE;
10479 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10480 assemble_zeros (3);
10483 [(set_attr "length" "4")]
10486 (define_insn "consttable_2"
10487 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10490 making_const_table = TRUE;
10491 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10492 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10493 assemble_zeros (2);
10496 [(set_attr "length" "4")]
10499 (define_insn "consttable_4"
10500 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10504 rtx x = operands[0];
10505 making_const_table = TRUE;
10506 switch (GET_MODE_CLASS (GET_MODE (x)))
10509 if (GET_MODE (x) == HFmode)
10510 arm_emit_fp16_const (x);
10514 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10515 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10519 /* XXX: Sometimes gcc does something really dumb and ends up with
10520 a HIGH in a constant pool entry, usually because it's trying to
10521 load into a VFP register. We know this will always be used in
10522 combination with a LO_SUM which ignores the high bits, so just
10523 strip off the HIGH. */
10524 if (GET_CODE (x) == HIGH)
10526 assemble_integer (x, 4, BITS_PER_WORD, 1);
10527 mark_symbol_refs_as_used (x);
10532 [(set_attr "length" "4")]
10535 (define_insn "consttable_8"
10536 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10540 making_const_table = TRUE;
10541 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10546 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10547 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10551 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10556 [(set_attr "length" "8")]
10559 (define_insn "consttable_16"
10560 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10564 making_const_table = TRUE;
10565 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10570 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10571 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10575 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10580 [(set_attr "length" "16")]
10583 ;; Miscellaneous Thumb patterns
10585 (define_expand "tablejump"
10586 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10587 (use (label_ref (match_operand 1 "" "")))])]
10592 /* Hopefully, CSE will eliminate this copy. */
10593 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10594 rtx reg2 = gen_reg_rtx (SImode);
10596 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10597 operands[0] = reg2;
10602 ;; NB never uses BX.
10603 (define_insn "*thumb1_tablejump"
10604 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10605 (use (label_ref (match_operand 1 "" "")))]
10608 [(set_attr "length" "2")]
10611 ;; V5 Instructions,
10613 (define_insn "clzsi2"
10614 [(set (match_operand:SI 0 "s_register_operand" "=r")
10615 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10616 "TARGET_32BIT && arm_arch5"
10618 [(set_attr "predicable" "yes")
10619 (set_attr "insn" "clz")])
10621 (define_insn "rbitsi2"
10622 [(set (match_operand:SI 0 "s_register_operand" "=r")
10623 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10624 "TARGET_32BIT && arm_arch_thumb2"
10626 [(set_attr "predicable" "yes")
10627 (set_attr "insn" "clz")])
10629 (define_expand "ctzsi2"
10630 [(set (match_operand:SI 0 "s_register_operand" "")
10631 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10632 "TARGET_32BIT && arm_arch_thumb2"
10635 rtx tmp = gen_reg_rtx (SImode);
10636 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10637 emit_insn (gen_clzsi2 (operands[0], tmp));
10643 ;; V5E instructions.
10645 (define_insn "prefetch"
10646 [(prefetch (match_operand:SI 0 "address_operand" "p")
10647 (match_operand:SI 1 "" "")
10648 (match_operand:SI 2 "" ""))]
10649 "TARGET_32BIT && arm_arch5e"
10652 ;; General predication pattern
10655 [(match_operator 0 "arm_comparison_operator"
10656 [(match_operand 1 "cc_register" "")
10662 (define_insn "prologue_use"
10663 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10665 "%@ %0 needed for prologue"
10666 [(set_attr "length" "0")]
10670 ;; Patterns for exception handling
10672 (define_expand "eh_return"
10673 [(use (match_operand 0 "general_operand" ""))]
10678 emit_insn (gen_arm_eh_return (operands[0]));
10680 emit_insn (gen_thumb_eh_return (operands[0]));
10685 ;; We can't expand this before we know where the link register is stored.
10686 (define_insn_and_split "arm_eh_return"
10687 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10689 (clobber (match_scratch:SI 1 "=&r"))]
10692 "&& reload_completed"
10696 arm_set_return_address (operands[0], operands[1]);
10701 (define_insn_and_split "thumb_eh_return"
10702 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10704 (clobber (match_scratch:SI 1 "=&l"))]
10707 "&& reload_completed"
10711 thumb_set_return_address (operands[0], operands[1]);
10719 (define_insn "load_tp_hard"
10720 [(set (match_operand:SI 0 "register_operand" "=r")
10721 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10723 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10724 [(set_attr "predicable" "yes")]
10727 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10728 (define_insn "load_tp_soft"
10729 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10730 (clobber (reg:SI LR_REGNUM))
10731 (clobber (reg:SI IP_REGNUM))
10732 (clobber (reg:CC CC_REGNUM))]
10734 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10735 [(set_attr "conds" "clob")]
10738 ;; tls descriptor call
10739 (define_insn "tlscall"
10740 [(set (reg:SI R0_REGNUM)
10741 (unspec:SI [(reg:SI R0_REGNUM)
10742 (match_operand:SI 0 "" "X")
10743 (match_operand 1 "" "")] UNSPEC_TLS))
10744 (clobber (reg:SI R1_REGNUM))
10745 (clobber (reg:SI LR_REGNUM))
10746 (clobber (reg:SI CC_REGNUM))]
10749 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10750 INTVAL (operands[1]));
10751 return "bl\\t%c0(tlscall)";
10753 [(set_attr "conds" "clob")
10754 (set_attr "length" "4")]
10759 ;; We only care about the lower 16 bits of the constant
10760 ;; being inserted into the upper 16 bits of the register.
10761 (define_insn "*arm_movtas_ze"
10762 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10765 (match_operand:SI 1 "const_int_operand" ""))]
10768 [(set_attr "predicable" "yes")
10769 (set_attr "length" "4")]
10772 (define_insn "*arm_rev"
10773 [(set (match_operand:SI 0 "s_register_operand" "=r")
10774 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10775 "TARGET_32BIT && arm_arch6"
10777 [(set_attr "predicable" "yes")
10778 (set_attr "length" "4")]
10781 (define_insn "*thumb1_rev"
10782 [(set (match_operand:SI 0 "s_register_operand" "=l")
10783 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10784 "TARGET_THUMB1 && arm_arch6"
10786 [(set_attr "length" "2")]
10789 (define_expand "arm_legacy_rev"
10790 [(set (match_operand:SI 2 "s_register_operand" "")
10791 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10795 (lshiftrt:SI (match_dup 2)
10797 (set (match_operand:SI 3 "s_register_operand" "")
10798 (rotatert:SI (match_dup 1)
10801 (and:SI (match_dup 2)
10802 (const_int -65281)))
10803 (set (match_operand:SI 0 "s_register_operand" "")
10804 (xor:SI (match_dup 3)
10810 ;; Reuse temporaries to keep register pressure down.
10811 (define_expand "thumb_legacy_rev"
10812 [(set (match_operand:SI 2 "s_register_operand" "")
10813 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10815 (set (match_operand:SI 3 "s_register_operand" "")
10816 (lshiftrt:SI (match_dup 1)
10819 (ior:SI (match_dup 3)
10821 (set (match_operand:SI 4 "s_register_operand" "")
10823 (set (match_operand:SI 5 "s_register_operand" "")
10824 (rotatert:SI (match_dup 1)
10827 (ashift:SI (match_dup 5)
10830 (lshiftrt:SI (match_dup 5)
10833 (ior:SI (match_dup 5)
10836 (rotatert:SI (match_dup 5)
10838 (set (match_operand:SI 0 "s_register_operand" "")
10839 (ior:SI (match_dup 5)
10845 (define_expand "bswapsi2"
10846 [(set (match_operand:SI 0 "s_register_operand" "=r")
10847 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10848 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10852 rtx op2 = gen_reg_rtx (SImode);
10853 rtx op3 = gen_reg_rtx (SImode);
10857 rtx op4 = gen_reg_rtx (SImode);
10858 rtx op5 = gen_reg_rtx (SImode);
10860 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10861 op2, op3, op4, op5));
10865 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10874 ;; Load the load/store multiple patterns
10875 (include "ldmstm.md")
10876 ;; Load the FPA co-processor patterns
10878 ;; Load the Maverick co-processor patterns
10879 (include "cirrus.md")
10880 ;; Vector bits common to IWMMXT and Neon
10881 (include "vec-common.md")
10882 ;; Load the Intel Wireless Multimedia Extension patterns
10883 (include "iwmmxt.md")
10884 ;; Load the VFP co-processor patterns
10886 ;; Thumb-2 patterns
10887 (include "thumb2.md")
10889 (include "neon.md")
10890 ;; Synchronization Primitives
10891 (include "sync.md")