1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
110 ;; UNSPEC_VOLATILE Usage:
113 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
115 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
116 ; instruction epilogue sequence that isn't expanded
117 ; into normal RTL. Used for both normal and sibcall
119 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
123 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
125 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
127 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
129 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
131 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
133 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
134 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
135 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
136 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
137 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
138 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
139 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap.
142 (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set.
143 (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op>
144 (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op>
145 (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op>
149 ;;---------------------------------------------------------------------------
152 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
153 ; generating ARM code. This is used to control the length of some insn
154 ; patterns that share the same RTL in both ARM and Thumb code.
155 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
157 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
158 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
160 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
161 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
163 ;; Operand number of an input operand that is shifted. Zero if the
164 ;; given instruction does not shift one of its input operands.
165 (define_attr "shift" "" (const_int 0))
167 ; Floating Point Unit. If we only have floating point emulation, then there
168 ; is no point in scheduling the floating point insns. (Well, for best
169 ; performance we should try and group them together).
170 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
171 (const (symbol_ref "arm_fpu_attr")))
173 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
174 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
180 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
181 (const_string "none"))
183 ; LENGTH of an instruction (in bytes)
184 (define_attr "length" ""
185 (cond [(not (eq_attr "sync_memory" "none"))
186 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
189 ; The architecture which supports the instruction (or alternative).
190 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
191 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
192 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
193 ; arm_arch6. This attribute is used to compute attribute "enabled",
194 ; use type "any" to enable an alternative in all cases.
195 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
196 (const_string "any"))
198 (define_attr "arch_enabled" "no,yes"
199 (cond [(eq_attr "arch" "any")
202 (and (eq_attr "arch" "a")
203 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
206 (and (eq_attr "arch" "t")
207 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
210 (and (eq_attr "arch" "t1")
211 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
214 (and (eq_attr "arch" "t2")
215 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
218 (and (eq_attr "arch" "32")
219 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
222 (and (eq_attr "arch" "v6")
223 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
226 (and (eq_attr "arch" "nov6")
227 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
228 (const_string "yes")]
229 (const_string "no")))
231 ; Allows an insn to disable certain alternatives for reasons other than
233 (define_attr "insn_enabled" "no,yes"
234 (const_string "yes"))
236 ; Enable all alternatives that are both arch_enabled and insn_enabled.
237 (define_attr "enabled" "no,yes"
238 (if_then_else (eq_attr "insn_enabled" "yes")
239 (if_then_else (eq_attr "arch_enabled" "yes")
242 (const_string "no")))
244 ; POOL_RANGE is how far away from a constant pool entry that this insn
245 ; can be placed. If the distance is zero, then this insn will never
246 ; reference the pool.
247 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
248 ; before its address.
249 (define_attr "arm_pool_range" "" (const_int 0))
250 (define_attr "thumb2_pool_range" "" (const_int 0))
251 (define_attr "arm_neg_pool_range" "" (const_int 0))
252 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
254 (define_attr "pool_range" ""
255 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
256 (attr "arm_pool_range")))
257 (define_attr "neg_pool_range" ""
258 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
259 (attr "arm_neg_pool_range")))
261 ; An assembler sequence may clobber the condition codes without us knowing.
262 ; If such an insn references the pool, then we have no way of knowing how,
263 ; so use the most conservative value for pool_range.
264 (define_asm_attributes
265 [(set_attr "conds" "clob")
266 (set_attr "length" "4")
267 (set_attr "pool_range" "250")])
269 ;; The instruction used to implement a particular pattern. This
270 ;; information is used by pipeline descriptions to provide accurate
271 ;; scheduling information.
274 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
275 (const_string "other"))
277 ; TYPE attribute is used to detect floating point instructions which, if
278 ; running on a co-processor can run in parallel with other, basic instructions
279 ; If write-buffer scheduling is enabled then it can also be used in the
280 ; scheduling of writes.
282 ; Classification of each insn
283 ; Note: vfp.md has different meanings for some of these, and some further
284 ; types as well. See that file for details.
285 ; alu any alu instruction that doesn't hit memory or fp
286 ; regs or have a shifted source operand
287 ; alu_shift any data instruction that doesn't hit memory or fp
288 ; regs, but has a source operand shifted by a constant
289 ; alu_shift_reg any data instruction that doesn't hit memory or fp
290 ; regs, but has a source operand shifted by a register value
291 ; mult a multiply instruction
292 ; block blockage insn, this blocks all functional units
293 ; float a floating point arithmetic operation (subject to expansion)
294 ; fdivd DFmode floating point division
295 ; fdivs SFmode floating point division
296 ; fmul Floating point multiply
297 ; ffmul Fast floating point multiply
298 ; farith Floating point arithmetic (4 cycle)
299 ; ffarith Fast floating point arithmetic (2 cycle)
300 ; float_em a floating point arithmetic operation that is normally emulated
301 ; even on a machine with an fpa.
302 ; f_load a floating point load from memory
303 ; f_store a floating point store to memory
304 ; f_load[sd] single/double load from memory
305 ; f_store[sd] single/double store to memory
306 ; f_flag a transfer of co-processor flags to the CPSR
307 ; f_mem_r a transfer of a floating point register to a real reg via mem
308 ; r_mem_f the reverse of f_mem_r
309 ; f_2_r fast transfer float to arm (no memory needed)
310 ; r_2_f fast transfer arm to float
311 ; f_cvt convert floating<->integral
313 ; call a subroutine call
314 ; load_byte load byte(s) from memory to arm registers
315 ; load1 load 1 word from memory to arm registers
316 ; load2 load 2 words from memory to arm registers
317 ; load3 load 3 words from memory to arm registers
318 ; load4 load 4 words from memory to arm registers
319 ; store store 1 word to memory from arm registers
320 ; store2 store 2 words
321 ; store3 store 3 words
322 ; store4 store 4 (or more) words
323 ; Additions for Cirrus Maverick co-processor:
324 ; mav_farith Floating point arithmetic (4 cycle)
325 ; mav_dmult Double multiplies (7 cycle)
329 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
331 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
332 (const_string "mult")
333 (const_string "alu")))
335 ; Load scheduling, set from the arm_ld_sched variable
336 ; initialized by arm_option_override()
337 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
339 ;; Classification of NEON instructions for scheduling purposes.
340 ;; Do not set this attribute and the "type" attribute together in
341 ;; any one instruction pattern.
342 (define_attr "neon_type"
353 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
354 neon_mul_qqq_8_16_32_ddd_32,\
355 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
356 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
358 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
359 neon_mla_qqq_32_qqd_32_scalar,\
360 neon_mul_ddd_16_scalar_32_16_long_scalar,\
361 neon_mul_qqd_32_scalar,\
362 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
367 neon_vqshl_vrshl_vqrshl_qqq,\
369 neon_fp_vadd_ddd_vabs_dd,\
370 neon_fp_vadd_qqq_vabs_qq,\
376 neon_fp_vmla_ddd_scalar,\
377 neon_fp_vmla_qqq_scalar,\
378 neon_fp_vrecps_vrsqrts_ddd,\
379 neon_fp_vrecps_vrsqrts_qqq,\
387 neon_vld2_2_regs_vld1_vld2_all_lanes,\
390 neon_vst1_1_2_regs_vst2_2_regs,\
392 neon_vst2_4_regs_vst3_vst4,\
394 neon_vld1_vld2_lane,\
395 neon_vld3_vld4_lane,\
396 neon_vst1_vst2_lane,\
397 neon_vst3_vst4_lane,\
398 neon_vld3_vld4_all_lanes,\
406 (const_string "none"))
408 ; condition codes: this one is used by final_prescan_insn to speed up
409 ; conditionalizing instructions. It saves having to scan the rtl to see if
410 ; it uses or alters the condition codes.
412 ; USE means that the condition codes are used by the insn in the process of
413 ; outputting code, this means (at present) that we can't use the insn in
416 ; SET means that the purpose of the insn is to set the condition codes in a
417 ; well defined manner.
419 ; CLOB means that the condition codes are altered in an undefined manner, if
420 ; they are altered at all
422 ; UNCONDITIONAL means the instruction can not be conditionally executed and
423 ; that the instruction does not use or alter the condition codes.
425 ; NOCOND means that the instruction does not use or alter the condition
426 ; codes but can be converted into a conditionally exectuted instruction.
428 (define_attr "conds" "use,set,clob,unconditional,nocond"
430 (ior (eq_attr "is_thumb1" "yes")
431 (eq_attr "type" "call"))
432 (const_string "clob")
433 (if_then_else (eq_attr "neon_type" "none")
434 (const_string "nocond")
435 (const_string "unconditional"))))
437 ; Predicable means that the insn can be conditionally executed based on
438 ; an automatically added predicate (additional patterns are generated by
439 ; gen...). We default to 'no' because no Thumb patterns match this rule
440 ; and not all ARM patterns do.
441 (define_attr "predicable" "no,yes" (const_string "no"))
443 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
444 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
445 ; suffer blockages enough to warrant modelling this (and it can adversely
446 ; affect the schedule).
447 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
449 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
450 ; to stall the processor. Used with model_wbuf above.
451 (define_attr "write_conflict" "no,yes"
452 (if_then_else (eq_attr "type"
453 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
455 (const_string "no")))
457 ; Classify the insns into those that take one cycle and those that take more
458 ; than one on the main cpu execution unit.
459 (define_attr "core_cycles" "single,multi"
460 (if_then_else (eq_attr "type"
461 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
462 (const_string "single")
463 (const_string "multi")))
465 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
466 ;; distant label. Only applicable to Thumb code.
467 (define_attr "far_jump" "yes,no" (const_string "no"))
470 ;; The number of machine instructions this pattern expands to.
471 ;; Used for Thumb-2 conditional execution.
472 (define_attr "ce_count" "" (const_int 1))
474 ;;---------------------------------------------------------------------------
477 (include "iterators.md")
479 ;;---------------------------------------------------------------------------
482 (include "predicates.md")
483 (include "constraints.md")
485 ;;---------------------------------------------------------------------------
486 ;; Pipeline descriptions
488 ;; Processor type. This is created automatically from arm-cores.def.
489 (include "arm-tune.md")
491 (define_attr "tune_cortexr4" "yes,no"
493 (eq_attr "tune" "cortexr4,cortexr4f")
495 (const_string "no"))))
497 ;; True if the generic scheduling description should be used.
499 (define_attr "generic_sched" "yes,no"
501 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
502 (eq_attr "tune_cortexr4" "yes"))
504 (const_string "yes"))))
506 (define_attr "generic_vfp" "yes,no"
508 (and (eq_attr "fpu" "vfp")
509 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
510 (eq_attr "tune_cortexr4" "no"))
512 (const_string "no"))))
514 (include "arm-generic.md")
515 (include "arm926ejs.md")
516 (include "arm1020e.md")
517 (include "arm1026ejs.md")
518 (include "arm1136jfs.md")
519 (include "cortex-a5.md")
520 (include "cortex-a8.md")
521 (include "cortex-a9.md")
522 (include "cortex-r4.md")
523 (include "cortex-r4f.md")
524 (include "cortex-m4.md")
525 (include "cortex-m4-fpu.md")
529 ;;---------------------------------------------------------------------------
534 ;; Note: For DImode insns, there is normally no reason why operands should
535 ;; not be in the same register, what we don't want is for something being
536 ;; written to partially overlap something that is an input.
537 ;; Cirrus 64bit additions should not be split because we have a native
538 ;; 64bit addition instructions.
540 (define_expand "adddi3"
542 [(set (match_operand:DI 0 "s_register_operand" "")
543 (plus:DI (match_operand:DI 1 "s_register_operand" "")
544 (match_operand:DI 2 "s_register_operand" "")))
545 (clobber (reg:CC CC_REGNUM))])]
548 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
550 if (!cirrus_fp_register (operands[0], DImode))
551 operands[0] = force_reg (DImode, operands[0]);
552 if (!cirrus_fp_register (operands[1], DImode))
553 operands[1] = force_reg (DImode, operands[1]);
554 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
560 if (GET_CODE (operands[1]) != REG)
561 operands[1] = force_reg (DImode, operands[1]);
562 if (GET_CODE (operands[2]) != REG)
563 operands[2] = force_reg (DImode, operands[2]);
568 (define_insn "*thumb1_adddi3"
569 [(set (match_operand:DI 0 "register_operand" "=l")
570 (plus:DI (match_operand:DI 1 "register_operand" "%0")
571 (match_operand:DI 2 "register_operand" "l")))
572 (clobber (reg:CC CC_REGNUM))
575 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
576 [(set_attr "length" "4")]
579 (define_insn_and_split "*arm_adddi3"
580 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
581 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
582 (match_operand:DI 2 "s_register_operand" "r, 0")))
583 (clobber (reg:CC CC_REGNUM))]
584 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
586 "TARGET_32BIT && reload_completed
587 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
588 [(parallel [(set (reg:CC_C CC_REGNUM)
589 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
591 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
592 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
593 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
596 operands[3] = gen_highpart (SImode, operands[0]);
597 operands[0] = gen_lowpart (SImode, operands[0]);
598 operands[4] = gen_highpart (SImode, operands[1]);
599 operands[1] = gen_lowpart (SImode, operands[1]);
600 operands[5] = gen_highpart (SImode, operands[2]);
601 operands[2] = gen_lowpart (SImode, operands[2]);
603 [(set_attr "conds" "clob")
604 (set_attr "length" "8")]
607 (define_insn_and_split "*adddi_sesidi_di"
608 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
609 (plus:DI (sign_extend:DI
610 (match_operand:SI 2 "s_register_operand" "r,r"))
611 (match_operand:DI 1 "s_register_operand" "0,r")))
612 (clobber (reg:CC CC_REGNUM))]
613 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
615 "TARGET_32BIT && reload_completed"
616 [(parallel [(set (reg:CC_C CC_REGNUM)
617 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
619 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
620 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
623 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
626 operands[3] = gen_highpart (SImode, operands[0]);
627 operands[0] = gen_lowpart (SImode, operands[0]);
628 operands[4] = gen_highpart (SImode, operands[1]);
629 operands[1] = gen_lowpart (SImode, operands[1]);
630 operands[2] = gen_lowpart (SImode, operands[2]);
632 [(set_attr "conds" "clob")
633 (set_attr "length" "8")]
636 (define_insn_and_split "*adddi_zesidi_di"
637 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
638 (plus:DI (zero_extend:DI
639 (match_operand:SI 2 "s_register_operand" "r,r"))
640 (match_operand:DI 1 "s_register_operand" "0,r")))
641 (clobber (reg:CC CC_REGNUM))]
642 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
644 "TARGET_32BIT && reload_completed"
645 [(parallel [(set (reg:CC_C CC_REGNUM)
646 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
648 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
649 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
650 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
653 operands[3] = gen_highpart (SImode, operands[0]);
654 operands[0] = gen_lowpart (SImode, operands[0]);
655 operands[4] = gen_highpart (SImode, operands[1]);
656 operands[1] = gen_lowpart (SImode, operands[1]);
657 operands[2] = gen_lowpart (SImode, operands[2]);
659 [(set_attr "conds" "clob")
660 (set_attr "length" "8")]
663 (define_expand "addsi3"
664 [(set (match_operand:SI 0 "s_register_operand" "")
665 (plus:SI (match_operand:SI 1 "s_register_operand" "")
666 (match_operand:SI 2 "reg_or_int_operand" "")))]
669 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
671 arm_split_constant (PLUS, SImode, NULL_RTX,
672 INTVAL (operands[2]), operands[0], operands[1],
673 optimize && can_create_pseudo_p ());
679 ; If there is a scratch available, this will be faster than synthesizing the
682 [(match_scratch:SI 3 "r")
683 (set (match_operand:SI 0 "arm_general_register_operand" "")
684 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
685 (match_operand:SI 2 "const_int_operand" "")))]
687 !(const_ok_for_arm (INTVAL (operands[2]))
688 || const_ok_for_arm (-INTVAL (operands[2])))
689 && const_ok_for_arm (~INTVAL (operands[2]))"
690 [(set (match_dup 3) (match_dup 2))
691 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
695 ;; The r/r/k alternative is required when reloading the address
696 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
697 ;; put the duplicated register first, and not try the commutative version.
698 (define_insn_and_split "*arm_addsi3"
699 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
700 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
701 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
711 && GET_CODE (operands[2]) == CONST_INT
712 && !(const_ok_for_arm (INTVAL (operands[2]))
713 || const_ok_for_arm (-INTVAL (operands[2])))
714 && (reload_completed || !arm_eliminable_register (operands[1]))"
715 [(clobber (const_int 0))]
717 arm_split_constant (PLUS, SImode, curr_insn,
718 INTVAL (operands[2]), operands[0],
722 [(set_attr "length" "4,4,4,4,4,16")
723 (set_attr "predicable" "yes")]
726 (define_insn_and_split "*thumb1_addsi3"
727 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
728 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
729 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
732 static const char * const asms[] =
734 \"add\\t%0, %0, %2\",
735 \"sub\\t%0, %0, #%n2\",
736 \"add\\t%0, %1, %2\",
737 \"add\\t%0, %0, %2\",
738 \"add\\t%0, %0, %2\",
739 \"add\\t%0, %1, %2\",
740 \"add\\t%0, %1, %2\",
745 if ((which_alternative == 2 || which_alternative == 6)
746 && GET_CODE (operands[2]) == CONST_INT
747 && INTVAL (operands[2]) < 0)
748 return \"sub\\t%0, %1, #%n2\";
749 return asms[which_alternative];
751 "&& reload_completed && CONST_INT_P (operands[2])
752 && ((operands[1] != stack_pointer_rtx
753 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
754 || (operands[1] == stack_pointer_rtx
755 && INTVAL (operands[2]) > 1020))"
756 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
757 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
759 HOST_WIDE_INT offset = INTVAL (operands[2]);
760 if (operands[1] == stack_pointer_rtx)
766 else if (offset < -255)
769 operands[3] = GEN_INT (offset);
770 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
772 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
775 ;; Reloading and elimination of the frame pointer can
776 ;; sometimes cause this optimization to be missed.
778 [(set (match_operand:SI 0 "arm_general_register_operand" "")
779 (match_operand:SI 1 "const_int_operand" ""))
781 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
783 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
784 && (INTVAL (operands[1]) & 3) == 0"
785 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
789 (define_insn "*addsi3_compare0"
790 [(set (reg:CC_NOOV CC_REGNUM)
792 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
793 (match_operand:SI 2 "arm_add_operand" "rI,L"))
795 (set (match_operand:SI 0 "s_register_operand" "=r,r")
796 (plus:SI (match_dup 1) (match_dup 2)))]
800 sub%.\\t%0, %1, #%n2"
801 [(set_attr "conds" "set")]
804 (define_insn "*addsi3_compare0_scratch"
805 [(set (reg:CC_NOOV CC_REGNUM)
807 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
808 (match_operand:SI 1 "arm_add_operand" "rI,L"))
814 [(set_attr "conds" "set")]
817 (define_insn "*compare_negsi_si"
818 [(set (reg:CC_Z CC_REGNUM)
820 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
821 (match_operand:SI 1 "s_register_operand" "r")))]
824 [(set_attr "conds" "set")]
827 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
828 ;; addend is a constant.
829 (define_insn "*cmpsi2_addneg"
830 [(set (reg:CC CC_REGNUM)
832 (match_operand:SI 1 "s_register_operand" "r,r")
833 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
834 (set (match_operand:SI 0 "s_register_operand" "=r,r")
835 (plus:SI (match_dup 1)
836 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
837 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
840 sub%.\\t%0, %1, #%n3"
841 [(set_attr "conds" "set")]
844 ;; Convert the sequence
846 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
850 ;; bcs dest ((unsigned)rn >= 1)
851 ;; similarly for the beq variant using bcc.
852 ;; This is a common looping idiom (while (n--))
854 [(set (match_operand:SI 0 "arm_general_register_operand" "")
855 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
857 (set (match_operand 2 "cc_register" "")
858 (compare (match_dup 0) (const_int -1)))
860 (if_then_else (match_operator 3 "equality_operator"
861 [(match_dup 2) (const_int 0)])
862 (match_operand 4 "" "")
863 (match_operand 5 "" "")))]
864 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
868 (match_dup 1) (const_int 1)))
869 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
871 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
874 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
875 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
878 operands[2], const0_rtx);"
881 ;; The next four insns work because they compare the result with one of
882 ;; the operands, and we know that the use of the condition code is
883 ;; either GEU or LTU, so we can use the carry flag from the addition
884 ;; instead of doing the compare a second time.
885 (define_insn "*addsi3_compare_op1"
886 [(set (reg:CC_C CC_REGNUM)
888 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
889 (match_operand:SI 2 "arm_add_operand" "rI,L"))
891 (set (match_operand:SI 0 "s_register_operand" "=r,r")
892 (plus:SI (match_dup 1) (match_dup 2)))]
896 sub%.\\t%0, %1, #%n2"
897 [(set_attr "conds" "set")]
900 (define_insn "*addsi3_compare_op2"
901 [(set (reg:CC_C CC_REGNUM)
903 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
904 (match_operand:SI 2 "arm_add_operand" "rI,L"))
906 (set (match_operand:SI 0 "s_register_operand" "=r,r")
907 (plus:SI (match_dup 1) (match_dup 2)))]
911 sub%.\\t%0, %1, #%n2"
912 [(set_attr "conds" "set")]
915 (define_insn "*compare_addsi2_op0"
916 [(set (reg:CC_C CC_REGNUM)
918 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
919 (match_operand:SI 1 "arm_add_operand" "rI,L"))
925 [(set_attr "conds" "set")]
928 (define_insn "*compare_addsi2_op1"
929 [(set (reg:CC_C CC_REGNUM)
931 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
932 (match_operand:SI 1 "arm_add_operand" "rI,L"))
938 [(set_attr "conds" "set")]
941 (define_insn "*addsi3_carryin_<optab>"
942 [(set (match_operand:SI 0 "s_register_operand" "=r")
943 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
944 (match_operand:SI 2 "arm_rhs_operand" "rI"))
945 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
948 [(set_attr "conds" "use")]
951 (define_insn "*addsi3_carryin_alt2_<optab>"
952 [(set (match_operand:SI 0 "s_register_operand" "=r")
953 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
954 (match_operand:SI 1 "s_register_operand" "%r"))
955 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
958 [(set_attr "conds" "use")]
961 (define_insn "*addsi3_carryin_shift_<optab>"
962 [(set (match_operand:SI 0 "s_register_operand" "=r")
964 (match_operator:SI 2 "shift_operator"
965 [(match_operand:SI 3 "s_register_operand" "r")
966 (match_operand:SI 4 "reg_or_int_operand" "rM")])
967 (match_operand:SI 1 "s_register_operand" "r"))
968 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
970 "adc%?\\t%0, %1, %3%S2"
971 [(set_attr "conds" "use")
972 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
973 (const_string "alu_shift")
974 (const_string "alu_shift_reg")))]
977 (define_expand "incscc"
978 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
979 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
980 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
981 (match_operand:SI 1 "s_register_operand" "0,?r")))]
986 (define_insn "*arm_incscc"
987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
988 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
989 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
990 (match_operand:SI 1 "s_register_operand" "0,?r")))]
994 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
995 [(set_attr "conds" "use")
996 (set_attr "length" "4,8")]
999 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1001 [(set (match_operand:SI 0 "s_register_operand" "")
1002 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1003 (match_operand:SI 2 "s_register_operand" ""))
1005 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1007 [(set (match_dup 3) (match_dup 1))
1008 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1010 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1013 (define_expand "addsf3"
1014 [(set (match_operand:SF 0 "s_register_operand" "")
1015 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1016 (match_operand:SF 2 "arm_float_add_operand" "")))]
1017 "TARGET_32BIT && TARGET_HARD_FLOAT"
1020 && !cirrus_fp_register (operands[2], SFmode))
1021 operands[2] = force_reg (SFmode, operands[2]);
1024 (define_expand "adddf3"
1025 [(set (match_operand:DF 0 "s_register_operand" "")
1026 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1027 (match_operand:DF 2 "arm_float_add_operand" "")))]
1028 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1031 && !cirrus_fp_register (operands[2], DFmode))
1032 operands[2] = force_reg (DFmode, operands[2]);
1035 (define_expand "subdi3"
1037 [(set (match_operand:DI 0 "s_register_operand" "")
1038 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1039 (match_operand:DI 2 "s_register_operand" "")))
1040 (clobber (reg:CC CC_REGNUM))])]
1043 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1045 && cirrus_fp_register (operands[0], DImode)
1046 && cirrus_fp_register (operands[1], DImode))
1048 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1054 if (GET_CODE (operands[1]) != REG)
1055 operands[1] = force_reg (DImode, operands[1]);
1056 if (GET_CODE (operands[2]) != REG)
1057 operands[2] = force_reg (DImode, operands[2]);
1062 (define_insn "*arm_subdi3"
1063 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1064 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1065 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1066 (clobber (reg:CC CC_REGNUM))]
1067 "TARGET_32BIT && !TARGET_NEON"
1068 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1069 [(set_attr "conds" "clob")
1070 (set_attr "length" "8")]
1073 (define_insn "*thumb_subdi3"
1074 [(set (match_operand:DI 0 "register_operand" "=l")
1075 (minus:DI (match_operand:DI 1 "register_operand" "0")
1076 (match_operand:DI 2 "register_operand" "l")))
1077 (clobber (reg:CC CC_REGNUM))]
1079 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1080 [(set_attr "length" "4")]
1083 (define_insn "*subdi_di_zesidi"
1084 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1085 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1087 (match_operand:SI 2 "s_register_operand" "r,r"))))
1088 (clobber (reg:CC CC_REGNUM))]
1090 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1091 [(set_attr "conds" "clob")
1092 (set_attr "length" "8")]
1095 (define_insn "*subdi_di_sesidi"
1096 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1097 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1099 (match_operand:SI 2 "s_register_operand" "r,r"))))
1100 (clobber (reg:CC CC_REGNUM))]
1102 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1103 [(set_attr "conds" "clob")
1104 (set_attr "length" "8")]
1107 (define_insn "*subdi_zesidi_di"
1108 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1109 (minus:DI (zero_extend:DI
1110 (match_operand:SI 2 "s_register_operand" "r,r"))
1111 (match_operand:DI 1 "s_register_operand" "0,r")))
1112 (clobber (reg:CC CC_REGNUM))]
1114 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1115 [(set_attr "conds" "clob")
1116 (set_attr "length" "8")]
1119 (define_insn "*subdi_sesidi_di"
1120 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1121 (minus:DI (sign_extend:DI
1122 (match_operand:SI 2 "s_register_operand" "r,r"))
1123 (match_operand:DI 1 "s_register_operand" "0,r")))
1124 (clobber (reg:CC CC_REGNUM))]
1126 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1127 [(set_attr "conds" "clob")
1128 (set_attr "length" "8")]
1131 (define_insn "*subdi_zesidi_zesidi"
1132 [(set (match_operand:DI 0 "s_register_operand" "=r")
1133 (minus:DI (zero_extend:DI
1134 (match_operand:SI 1 "s_register_operand" "r"))
1136 (match_operand:SI 2 "s_register_operand" "r"))))
1137 (clobber (reg:CC CC_REGNUM))]
1139 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1140 [(set_attr "conds" "clob")
1141 (set_attr "length" "8")]
1144 (define_expand "subsi3"
1145 [(set (match_operand:SI 0 "s_register_operand" "")
1146 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1147 (match_operand:SI 2 "s_register_operand" "")))]
1150 if (GET_CODE (operands[1]) == CONST_INT)
1154 arm_split_constant (MINUS, SImode, NULL_RTX,
1155 INTVAL (operands[1]), operands[0],
1156 operands[2], optimize && can_create_pseudo_p ());
1159 else /* TARGET_THUMB1 */
1160 operands[1] = force_reg (SImode, operands[1]);
1165 (define_insn "thumb1_subsi3_insn"
1166 [(set (match_operand:SI 0 "register_operand" "=l")
1167 (minus:SI (match_operand:SI 1 "register_operand" "l")
1168 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1171 [(set_attr "length" "2")
1172 (set_attr "conds" "set")])
1174 ; ??? Check Thumb-2 split length
1175 (define_insn_and_split "*arm_subsi3_insn"
1176 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1177 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1178 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1186 "&& ((GET_CODE (operands[1]) == CONST_INT
1187 && !const_ok_for_arm (INTVAL (operands[1])))
1188 || (GET_CODE (operands[2]) == CONST_INT
1189 && !const_ok_for_arm (INTVAL (operands[2]))))"
1190 [(clobber (const_int 0))]
1192 arm_split_constant (MINUS, SImode, curr_insn,
1193 INTVAL (operands[1]), operands[0], operands[2], 0);
1196 [(set_attr "length" "4,4,4,16,16")
1197 (set_attr "predicable" "yes")]
1201 [(match_scratch:SI 3 "r")
1202 (set (match_operand:SI 0 "arm_general_register_operand" "")
1203 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1204 (match_operand:SI 2 "arm_general_register_operand" "")))]
1206 && !const_ok_for_arm (INTVAL (operands[1]))
1207 && const_ok_for_arm (~INTVAL (operands[1]))"
1208 [(set (match_dup 3) (match_dup 1))
1209 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1213 (define_insn "*subsi3_compare0"
1214 [(set (reg:CC_NOOV CC_REGNUM)
1216 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1217 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1219 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1220 (minus:SI (match_dup 1) (match_dup 2)))]
1225 [(set_attr "conds" "set")]
1228 (define_insn "*subsi3_compare"
1229 [(set (reg:CC CC_REGNUM)
1230 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1231 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1232 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1233 (minus:SI (match_dup 1) (match_dup 2)))]
1238 [(set_attr "conds" "set")]
1241 (define_expand "decscc"
1242 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1243 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1244 (match_operator:SI 2 "arm_comparison_operator"
1245 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1250 (define_insn "*arm_decscc"
1251 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1252 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1253 (match_operator:SI 2 "arm_comparison_operator"
1254 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1258 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1259 [(set_attr "conds" "use")
1260 (set_attr "length" "*,8")]
1263 (define_expand "subsf3"
1264 [(set (match_operand:SF 0 "s_register_operand" "")
1265 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1266 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1267 "TARGET_32BIT && TARGET_HARD_FLOAT"
1269 if (TARGET_MAVERICK)
1271 if (!cirrus_fp_register (operands[1], SFmode))
1272 operands[1] = force_reg (SFmode, operands[1]);
1273 if (!cirrus_fp_register (operands[2], SFmode))
1274 operands[2] = force_reg (SFmode, operands[2]);
1278 (define_expand "subdf3"
1279 [(set (match_operand:DF 0 "s_register_operand" "")
1280 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1281 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1282 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1284 if (TARGET_MAVERICK)
1286 if (!cirrus_fp_register (operands[1], DFmode))
1287 operands[1] = force_reg (DFmode, operands[1]);
1288 if (!cirrus_fp_register (operands[2], DFmode))
1289 operands[2] = force_reg (DFmode, operands[2]);
1294 ;; Multiplication insns
1296 (define_expand "mulsi3"
1297 [(set (match_operand:SI 0 "s_register_operand" "")
1298 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1299 (match_operand:SI 1 "s_register_operand" "")))]
1304 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1305 (define_insn "*arm_mulsi3"
1306 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1307 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1308 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1309 "TARGET_32BIT && !arm_arch6"
1310 "mul%?\\t%0, %2, %1"
1311 [(set_attr "insn" "mul")
1312 (set_attr "predicable" "yes")]
1315 (define_insn "*arm_mulsi3_v6"
1316 [(set (match_operand:SI 0 "s_register_operand" "=r")
1317 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1318 (match_operand:SI 2 "s_register_operand" "r")))]
1319 "TARGET_32BIT && arm_arch6"
1320 "mul%?\\t%0, %1, %2"
1321 [(set_attr "insn" "mul")
1322 (set_attr "predicable" "yes")]
1325 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1326 ; 1 and 2; are the same, because reload will make operand 0 match
1327 ; operand 1 without realizing that this conflicts with operand 2. We fix
1328 ; this by adding another alternative to match this case, and then `reload'
1329 ; it ourselves. This alternative must come first.
1330 (define_insn "*thumb_mulsi3"
1331 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1332 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1333 (match_operand:SI 2 "register_operand" "l,l,l")))]
1334 "TARGET_THUMB1 && !arm_arch6"
1336 if (which_alternative < 2)
1337 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1339 return \"mul\\t%0, %2\";
1341 [(set_attr "length" "4,4,2")
1342 (set_attr "insn" "mul")]
1345 (define_insn "*thumb_mulsi3_v6"
1346 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1347 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1348 (match_operand:SI 2 "register_operand" "l,0,0")))]
1349 "TARGET_THUMB1 && arm_arch6"
1354 [(set_attr "length" "2")
1355 (set_attr "insn" "mul")]
1358 (define_insn "*mulsi3_compare0"
1359 [(set (reg:CC_NOOV CC_REGNUM)
1360 (compare:CC_NOOV (mult:SI
1361 (match_operand:SI 2 "s_register_operand" "r,r")
1362 (match_operand:SI 1 "s_register_operand" "%0,r"))
1364 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1365 (mult:SI (match_dup 2) (match_dup 1)))]
1366 "TARGET_ARM && !arm_arch6"
1367 "mul%.\\t%0, %2, %1"
1368 [(set_attr "conds" "set")
1369 (set_attr "insn" "muls")]
1372 (define_insn "*mulsi3_compare0_v6"
1373 [(set (reg:CC_NOOV CC_REGNUM)
1374 (compare:CC_NOOV (mult:SI
1375 (match_operand:SI 2 "s_register_operand" "r")
1376 (match_operand:SI 1 "s_register_operand" "r"))
1378 (set (match_operand:SI 0 "s_register_operand" "=r")
1379 (mult:SI (match_dup 2) (match_dup 1)))]
1380 "TARGET_ARM && arm_arch6 && optimize_size"
1381 "mul%.\\t%0, %2, %1"
1382 [(set_attr "conds" "set")
1383 (set_attr "insn" "muls")]
1386 (define_insn "*mulsi_compare0_scratch"
1387 [(set (reg:CC_NOOV CC_REGNUM)
1388 (compare:CC_NOOV (mult:SI
1389 (match_operand:SI 2 "s_register_operand" "r,r")
1390 (match_operand:SI 1 "s_register_operand" "%0,r"))
1392 (clobber (match_scratch:SI 0 "=&r,&r"))]
1393 "TARGET_ARM && !arm_arch6"
1394 "mul%.\\t%0, %2, %1"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "muls")]
1399 (define_insn "*mulsi_compare0_scratch_v6"
1400 [(set (reg:CC_NOOV CC_REGNUM)
1401 (compare:CC_NOOV (mult:SI
1402 (match_operand:SI 2 "s_register_operand" "r")
1403 (match_operand:SI 1 "s_register_operand" "r"))
1405 (clobber (match_scratch:SI 0 "=r"))]
1406 "TARGET_ARM && arm_arch6 && optimize_size"
1407 "mul%.\\t%0, %2, %1"
1408 [(set_attr "conds" "set")
1409 (set_attr "insn" "muls")]
1412 ;; Unnamed templates to match MLA instruction.
1414 (define_insn "*mulsi3addsi"
1415 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1417 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1418 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1419 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1420 "TARGET_32BIT && !arm_arch6"
1421 "mla%?\\t%0, %2, %1, %3"
1422 [(set_attr "insn" "mla")
1423 (set_attr "predicable" "yes")]
1426 (define_insn "*mulsi3addsi_v6"
1427 [(set (match_operand:SI 0 "s_register_operand" "=r")
1429 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1430 (match_operand:SI 1 "s_register_operand" "r"))
1431 (match_operand:SI 3 "s_register_operand" "r")))]
1432 "TARGET_32BIT && arm_arch6"
1433 "mla%?\\t%0, %2, %1, %3"
1434 [(set_attr "insn" "mla")
1435 (set_attr "predicable" "yes")]
1438 (define_insn "*mulsi3addsi_compare0"
1439 [(set (reg:CC_NOOV CC_REGNUM)
1442 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1443 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1444 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1446 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1447 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1449 "TARGET_ARM && arm_arch6"
1450 "mla%.\\t%0, %2, %1, %3"
1451 [(set_attr "conds" "set")
1452 (set_attr "insn" "mlas")]
1455 (define_insn "*mulsi3addsi_compare0_v6"
1456 [(set (reg:CC_NOOV CC_REGNUM)
1459 (match_operand:SI 2 "s_register_operand" "r")
1460 (match_operand:SI 1 "s_register_operand" "r"))
1461 (match_operand:SI 3 "s_register_operand" "r"))
1463 (set (match_operand:SI 0 "s_register_operand" "=r")
1464 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1466 "TARGET_ARM && arm_arch6 && optimize_size"
1467 "mla%.\\t%0, %2, %1, %3"
1468 [(set_attr "conds" "set")
1469 (set_attr "insn" "mlas")]
1472 (define_insn "*mulsi3addsi_compare0_scratch"
1473 [(set (reg:CC_NOOV CC_REGNUM)
1476 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1477 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1478 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1480 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1481 "TARGET_ARM && !arm_arch6"
1482 "mla%.\\t%0, %2, %1, %3"
1483 [(set_attr "conds" "set")
1484 (set_attr "insn" "mlas")]
1487 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1488 [(set (reg:CC_NOOV CC_REGNUM)
1491 (match_operand:SI 2 "s_register_operand" "r")
1492 (match_operand:SI 1 "s_register_operand" "r"))
1493 (match_operand:SI 3 "s_register_operand" "r"))
1495 (clobber (match_scratch:SI 0 "=r"))]
1496 "TARGET_ARM && arm_arch6 && optimize_size"
1497 "mla%.\\t%0, %2, %1, %3"
1498 [(set_attr "conds" "set")
1499 (set_attr "insn" "mlas")]
1502 (define_insn "*mulsi3subsi"
1503 [(set (match_operand:SI 0 "s_register_operand" "=r")
1505 (match_operand:SI 3 "s_register_operand" "r")
1506 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1507 (match_operand:SI 1 "s_register_operand" "r"))))]
1508 "TARGET_32BIT && arm_arch_thumb2"
1509 "mls%?\\t%0, %2, %1, %3"
1510 [(set_attr "insn" "mla")
1511 (set_attr "predicable" "yes")]
1514 (define_expand "maddsidi4"
1515 [(set (match_operand:DI 0 "s_register_operand" "")
1518 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1519 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1520 (match_operand:DI 3 "s_register_operand" "")))]
1521 "TARGET_32BIT && arm_arch3m"
1524 (define_insn "*mulsidi3adddi"
1525 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1528 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1529 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1530 (match_operand:DI 1 "s_register_operand" "0")))]
1531 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1532 "smlal%?\\t%Q0, %R0, %3, %2"
1533 [(set_attr "insn" "smlal")
1534 (set_attr "predicable" "yes")]
1537 (define_insn "*mulsidi3adddi_v6"
1538 [(set (match_operand:DI 0 "s_register_operand" "=r")
1541 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1542 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1543 (match_operand:DI 1 "s_register_operand" "0")))]
1544 "TARGET_32BIT && arm_arch6"
1545 "smlal%?\\t%Q0, %R0, %3, %2"
1546 [(set_attr "insn" "smlal")
1547 (set_attr "predicable" "yes")]
1550 ;; 32x32->64 widening multiply.
1551 ;; As with mulsi3, the only difference between the v3-5 and v6+
1552 ;; versions of these patterns is the requirement that the output not
1553 ;; overlap the inputs, but that still means we have to have a named
1554 ;; expander and two different starred insns.
1556 (define_expand "mulsidi3"
1557 [(set (match_operand:DI 0 "s_register_operand" "")
1559 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1560 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1561 "TARGET_32BIT && arm_arch3m"
1565 (define_insn "*mulsidi3_nov6"
1566 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1568 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1569 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1570 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1571 "smull%?\\t%Q0, %R0, %1, %2"
1572 [(set_attr "insn" "smull")
1573 (set_attr "predicable" "yes")]
1576 (define_insn "*mulsidi3_v6"
1577 [(set (match_operand:DI 0 "s_register_operand" "=r")
1579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1581 "TARGET_32BIT && arm_arch6"
1582 "smull%?\\t%Q0, %R0, %1, %2"
1583 [(set_attr "insn" "smull")
1584 (set_attr "predicable" "yes")]
1587 (define_expand "umulsidi3"
1588 [(set (match_operand:DI 0 "s_register_operand" "")
1590 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1591 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1592 "TARGET_32BIT && arm_arch3m"
1596 (define_insn "*umulsidi3_nov6"
1597 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1599 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1600 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1601 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1602 "umull%?\\t%Q0, %R0, %1, %2"
1603 [(set_attr "insn" "umull")
1604 (set_attr "predicable" "yes")]
1607 (define_insn "*umulsidi3_v6"
1608 [(set (match_operand:DI 0 "s_register_operand" "=r")
1610 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1611 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1612 "TARGET_32BIT && arm_arch6"
1613 "umull%?\\t%Q0, %R0, %1, %2"
1614 [(set_attr "insn" "umull")
1615 (set_attr "predicable" "yes")]
1618 (define_expand "umaddsidi4"
1619 [(set (match_operand:DI 0 "s_register_operand" "")
1622 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1623 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1624 (match_operand:DI 3 "s_register_operand" "")))]
1625 "TARGET_32BIT && arm_arch3m"
1628 (define_insn "*umulsidi3adddi"
1629 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1632 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1633 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1634 (match_operand:DI 1 "s_register_operand" "0")))]
1635 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1636 "umlal%?\\t%Q0, %R0, %3, %2"
1637 [(set_attr "insn" "umlal")
1638 (set_attr "predicable" "yes")]
1641 (define_insn "*umulsidi3adddi_v6"
1642 [(set (match_operand:DI 0 "s_register_operand" "=r")
1645 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1646 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1647 (match_operand:DI 1 "s_register_operand" "0")))]
1648 "TARGET_32BIT && arm_arch6"
1649 "umlal%?\\t%Q0, %R0, %3, %2"
1650 [(set_attr "insn" "umlal")
1651 (set_attr "predicable" "yes")]
1654 (define_expand "smulsi3_highpart"
1656 [(set (match_operand:SI 0 "s_register_operand" "")
1660 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1661 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1663 (clobber (match_scratch:SI 3 ""))])]
1664 "TARGET_32BIT && arm_arch3m"
1668 (define_insn "*smulsi3_highpart_nov6"
1669 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1673 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1674 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1676 (clobber (match_scratch:SI 3 "=&r,&r"))]
1677 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1678 "smull%?\\t%3, %0, %2, %1"
1679 [(set_attr "insn" "smull")
1680 (set_attr "predicable" "yes")]
1683 (define_insn "*smulsi3_highpart_v6"
1684 [(set (match_operand:SI 0 "s_register_operand" "=r")
1688 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1689 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1691 (clobber (match_scratch:SI 3 "=r"))]
1692 "TARGET_32BIT && arm_arch6"
1693 "smull%?\\t%3, %0, %2, %1"
1694 [(set_attr "insn" "smull")
1695 (set_attr "predicable" "yes")]
1698 (define_expand "umulsi3_highpart"
1700 [(set (match_operand:SI 0 "s_register_operand" "")
1704 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1705 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1707 (clobber (match_scratch:SI 3 ""))])]
1708 "TARGET_32BIT && arm_arch3m"
1712 (define_insn "*umulsi3_highpart_nov6"
1713 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1717 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1718 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1720 (clobber (match_scratch:SI 3 "=&r,&r"))]
1721 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1722 "umull%?\\t%3, %0, %2, %1"
1723 [(set_attr "insn" "umull")
1724 (set_attr "predicable" "yes")]
1727 (define_insn "*umulsi3_highpart_v6"
1728 [(set (match_operand:SI 0 "s_register_operand" "=r")
1732 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1733 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1735 (clobber (match_scratch:SI 3 "=r"))]
1736 "TARGET_32BIT && arm_arch6"
1737 "umull%?\\t%3, %0, %2, %1"
1738 [(set_attr "insn" "umull")
1739 (set_attr "predicable" "yes")]
1742 (define_insn "mulhisi3"
1743 [(set (match_operand:SI 0 "s_register_operand" "=r")
1744 (mult:SI (sign_extend:SI
1745 (match_operand:HI 1 "s_register_operand" "%r"))
1747 (match_operand:HI 2 "s_register_operand" "r"))))]
1748 "TARGET_DSP_MULTIPLY"
1749 "smulbb%?\\t%0, %1, %2"
1750 [(set_attr "insn" "smulxy")
1751 (set_attr "predicable" "yes")]
1754 (define_insn "*mulhisi3tb"
1755 [(set (match_operand:SI 0 "s_register_operand" "=r")
1756 (mult:SI (ashiftrt:SI
1757 (match_operand:SI 1 "s_register_operand" "r")
1760 (match_operand:HI 2 "s_register_operand" "r"))))]
1761 "TARGET_DSP_MULTIPLY"
1762 "smultb%?\\t%0, %1, %2"
1763 [(set_attr "insn" "smulxy")
1764 (set_attr "predicable" "yes")]
1767 (define_insn "*mulhisi3bt"
1768 [(set (match_operand:SI 0 "s_register_operand" "=r")
1769 (mult:SI (sign_extend:SI
1770 (match_operand:HI 1 "s_register_operand" "r"))
1772 (match_operand:SI 2 "s_register_operand" "r")
1774 "TARGET_DSP_MULTIPLY"
1775 "smulbt%?\\t%0, %1, %2"
1776 [(set_attr "insn" "smulxy")
1777 (set_attr "predicable" "yes")]
1780 (define_insn "*mulhisi3tt"
1781 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (mult:SI (ashiftrt:SI
1783 (match_operand:SI 1 "s_register_operand" "r")
1786 (match_operand:SI 2 "s_register_operand" "r")
1788 "TARGET_DSP_MULTIPLY"
1789 "smultt%?\\t%0, %1, %2"
1790 [(set_attr "insn" "smulxy")
1791 (set_attr "predicable" "yes")]
1794 (define_insn "maddhisi4"
1795 [(set (match_operand:SI 0 "s_register_operand" "=r")
1796 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1797 (mult:SI (sign_extend:SI
1798 (match_operand:HI 1 "s_register_operand" "%r"))
1800 (match_operand:HI 2 "s_register_operand" "r")))))]
1801 "TARGET_DSP_MULTIPLY"
1802 "smlabb%?\\t%0, %1, %2, %3"
1803 [(set_attr "insn" "smlaxy")
1804 (set_attr "predicable" "yes")]
1807 (define_insn "*maddhidi4"
1808 [(set (match_operand:DI 0 "s_register_operand" "=r")
1810 (match_operand:DI 3 "s_register_operand" "0")
1811 (mult:DI (sign_extend:DI
1812 (match_operand:HI 1 "s_register_operand" "%r"))
1814 (match_operand:HI 2 "s_register_operand" "r")))))]
1815 "TARGET_DSP_MULTIPLY"
1816 "smlalbb%?\\t%Q0, %R0, %1, %2"
1817 [(set_attr "insn" "smlalxy")
1818 (set_attr "predicable" "yes")])
1820 (define_expand "mulsf3"
1821 [(set (match_operand:SF 0 "s_register_operand" "")
1822 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1823 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1824 "TARGET_32BIT && TARGET_HARD_FLOAT"
1827 && !cirrus_fp_register (operands[2], SFmode))
1828 operands[2] = force_reg (SFmode, operands[2]);
1831 (define_expand "muldf3"
1832 [(set (match_operand:DF 0 "s_register_operand" "")
1833 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1834 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1835 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1838 && !cirrus_fp_register (operands[2], DFmode))
1839 operands[2] = force_reg (DFmode, operands[2]);
1844 (define_expand "divsf3"
1845 [(set (match_operand:SF 0 "s_register_operand" "")
1846 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1847 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1848 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1851 (define_expand "divdf3"
1852 [(set (match_operand:DF 0 "s_register_operand" "")
1853 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1854 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1855 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1860 (define_expand "modsf3"
1861 [(set (match_operand:SF 0 "s_register_operand" "")
1862 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1863 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1864 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1867 (define_expand "moddf3"
1868 [(set (match_operand:DF 0 "s_register_operand" "")
1869 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1870 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1871 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1874 ;; Boolean and,ior,xor insns
1876 ;; Split up double word logical operations
1878 ;; Split up simple DImode logical operations. Simply perform the logical
1879 ;; operation on the upper and lower halves of the registers.
1881 [(set (match_operand:DI 0 "s_register_operand" "")
1882 (match_operator:DI 6 "logical_binary_operator"
1883 [(match_operand:DI 1 "s_register_operand" "")
1884 (match_operand:DI 2 "s_register_operand" "")]))]
1885 "TARGET_32BIT && reload_completed
1886 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1887 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1888 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1889 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1892 operands[3] = gen_highpart (SImode, operands[0]);
1893 operands[0] = gen_lowpart (SImode, operands[0]);
1894 operands[4] = gen_highpart (SImode, operands[1]);
1895 operands[1] = gen_lowpart (SImode, operands[1]);
1896 operands[5] = gen_highpart (SImode, operands[2]);
1897 operands[2] = gen_lowpart (SImode, operands[2]);
1902 [(set (match_operand:DI 0 "s_register_operand" "")
1903 (match_operator:DI 6 "logical_binary_operator"
1904 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1905 (match_operand:DI 1 "s_register_operand" "")]))]
1906 "TARGET_32BIT && reload_completed"
1907 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1908 (set (match_dup 3) (match_op_dup:SI 6
1909 [(ashiftrt:SI (match_dup 2) (const_int 31))
1913 operands[3] = gen_highpart (SImode, operands[0]);
1914 operands[0] = gen_lowpart (SImode, operands[0]);
1915 operands[4] = gen_highpart (SImode, operands[1]);
1916 operands[1] = gen_lowpart (SImode, operands[1]);
1917 operands[5] = gen_highpart (SImode, operands[2]);
1918 operands[2] = gen_lowpart (SImode, operands[2]);
1922 ;; The zero extend of operand 2 means we can just copy the high part of
1923 ;; operand1 into operand0.
1925 [(set (match_operand:DI 0 "s_register_operand" "")
1927 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1928 (match_operand:DI 1 "s_register_operand" "")))]
1929 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1930 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1931 (set (match_dup 3) (match_dup 4))]
1934 operands[4] = gen_highpart (SImode, operands[1]);
1935 operands[3] = gen_highpart (SImode, operands[0]);
1936 operands[0] = gen_lowpart (SImode, operands[0]);
1937 operands[1] = gen_lowpart (SImode, operands[1]);
1941 ;; The zero extend of operand 2 means we can just copy the high part of
1942 ;; operand1 into operand0.
1944 [(set (match_operand:DI 0 "s_register_operand" "")
1946 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1947 (match_operand:DI 1 "s_register_operand" "")))]
1948 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1949 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1950 (set (match_dup 3) (match_dup 4))]
1953 operands[4] = gen_highpart (SImode, operands[1]);
1954 operands[3] = gen_highpart (SImode, operands[0]);
1955 operands[0] = gen_lowpart (SImode, operands[0]);
1956 operands[1] = gen_lowpart (SImode, operands[1]);
1960 (define_expand "anddi3"
1961 [(set (match_operand:DI 0 "s_register_operand" "")
1962 (and:DI (match_operand:DI 1 "s_register_operand" "")
1963 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1968 (define_insn "*anddi3_insn"
1969 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1970 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1971 (match_operand:DI 2 "s_register_operand" "r,r")))]
1972 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1974 [(set_attr "length" "8")]
1977 (define_insn_and_split "*anddi_zesidi_di"
1978 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1979 (and:DI (zero_extend:DI
1980 (match_operand:SI 2 "s_register_operand" "r,r"))
1981 (match_operand:DI 1 "s_register_operand" "0,r")))]
1984 "TARGET_32BIT && reload_completed"
1985 ; The zero extend of operand 2 clears the high word of the output
1987 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1988 (set (match_dup 3) (const_int 0))]
1991 operands[3] = gen_highpart (SImode, operands[0]);
1992 operands[0] = gen_lowpart (SImode, operands[0]);
1993 operands[1] = gen_lowpart (SImode, operands[1]);
1995 [(set_attr "length" "8")]
1998 (define_insn "*anddi_sesdi_di"
1999 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2000 (and:DI (sign_extend:DI
2001 (match_operand:SI 2 "s_register_operand" "r,r"))
2002 (match_operand:DI 1 "s_register_operand" "0,r")))]
2005 [(set_attr "length" "8")]
2008 (define_expand "andsi3"
2009 [(set (match_operand:SI 0 "s_register_operand" "")
2010 (and:SI (match_operand:SI 1 "s_register_operand" "")
2011 (match_operand:SI 2 "reg_or_int_operand" "")))]
2016 if (GET_CODE (operands[2]) == CONST_INT)
2018 if (INTVAL (operands[2]) == 255 && arm_arch6)
2020 operands[1] = convert_to_mode (QImode, operands[1], 1);
2021 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2025 arm_split_constant (AND, SImode, NULL_RTX,
2026 INTVAL (operands[2]), operands[0],
2028 optimize && can_create_pseudo_p ());
2033 else /* TARGET_THUMB1 */
2035 if (GET_CODE (operands[2]) != CONST_INT)
2037 rtx tmp = force_reg (SImode, operands[2]);
2038 if (rtx_equal_p (operands[0], operands[1]))
2042 operands[2] = operands[1];
2050 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2052 operands[2] = force_reg (SImode,
2053 GEN_INT (~INTVAL (operands[2])));
2055 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2060 for (i = 9; i <= 31; i++)
2062 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2064 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2068 else if ((((HOST_WIDE_INT) 1) << i) - 1
2069 == ~INTVAL (operands[2]))
2071 rtx shift = GEN_INT (i);
2072 rtx reg = gen_reg_rtx (SImode);
2074 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2075 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2081 operands[2] = force_reg (SImode, operands[2]);
2087 ; ??? Check split length for Thumb-2
2088 (define_insn_and_split "*arm_andsi3_insn"
2089 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2090 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2091 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2095 bic%?\\t%0, %1, #%B2
2098 && GET_CODE (operands[2]) == CONST_INT
2099 && !(const_ok_for_arm (INTVAL (operands[2]))
2100 || const_ok_for_arm (~INTVAL (operands[2])))"
2101 [(clobber (const_int 0))]
2103 arm_split_constant (AND, SImode, curr_insn,
2104 INTVAL (operands[2]), operands[0], operands[1], 0);
2107 [(set_attr "length" "4,4,16")
2108 (set_attr "predicable" "yes")]
2111 (define_insn "*thumb1_andsi3_insn"
2112 [(set (match_operand:SI 0 "register_operand" "=l")
2113 (and:SI (match_operand:SI 1 "register_operand" "%0")
2114 (match_operand:SI 2 "register_operand" "l")))]
2117 [(set_attr "length" "2")
2118 (set_attr "conds" "set")])
2120 (define_insn "*andsi3_compare0"
2121 [(set (reg:CC_NOOV CC_REGNUM)
2123 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2124 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2126 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2127 (and:SI (match_dup 1) (match_dup 2)))]
2131 bic%.\\t%0, %1, #%B2"
2132 [(set_attr "conds" "set")]
2135 (define_insn "*andsi3_compare0_scratch"
2136 [(set (reg:CC_NOOV CC_REGNUM)
2138 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2139 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2141 (clobber (match_scratch:SI 2 "=X,r"))]
2145 bic%.\\t%2, %0, #%B1"
2146 [(set_attr "conds" "set")]
2149 (define_insn "*zeroextractsi_compare0_scratch"
2150 [(set (reg:CC_NOOV CC_REGNUM)
2151 (compare:CC_NOOV (zero_extract:SI
2152 (match_operand:SI 0 "s_register_operand" "r")
2153 (match_operand 1 "const_int_operand" "n")
2154 (match_operand 2 "const_int_operand" "n"))
2157 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2158 && INTVAL (operands[1]) > 0
2159 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2160 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2162 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2163 << INTVAL (operands[2]));
2164 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2167 [(set_attr "conds" "set")]
2170 (define_insn_and_split "*ne_zeroextractsi"
2171 [(set (match_operand:SI 0 "s_register_operand" "=r")
2172 (ne:SI (zero_extract:SI
2173 (match_operand:SI 1 "s_register_operand" "r")
2174 (match_operand:SI 2 "const_int_operand" "n")
2175 (match_operand:SI 3 "const_int_operand" "n"))
2177 (clobber (reg:CC CC_REGNUM))]
2179 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2180 && INTVAL (operands[2]) > 0
2181 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2182 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2185 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2186 && INTVAL (operands[2]) > 0
2187 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2188 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2189 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2190 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2192 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2194 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2195 (match_dup 0) (const_int 1)))]
2197 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2198 << INTVAL (operands[3]));
2200 [(set_attr "conds" "clob")
2201 (set (attr "length")
2202 (if_then_else (eq_attr "is_thumb" "yes")
2207 (define_insn_and_split "*ne_zeroextractsi_shifted"
2208 [(set (match_operand:SI 0 "s_register_operand" "=r")
2209 (ne:SI (zero_extract:SI
2210 (match_operand:SI 1 "s_register_operand" "r")
2211 (match_operand:SI 2 "const_int_operand" "n")
2214 (clobber (reg:CC CC_REGNUM))]
2218 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2219 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2221 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2223 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2224 (match_dup 0) (const_int 1)))]
2226 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2228 [(set_attr "conds" "clob")
2229 (set_attr "length" "8")]
2232 (define_insn_and_split "*ite_ne_zeroextractsi"
2233 [(set (match_operand:SI 0 "s_register_operand" "=r")
2234 (if_then_else:SI (ne (zero_extract:SI
2235 (match_operand:SI 1 "s_register_operand" "r")
2236 (match_operand:SI 2 "const_int_operand" "n")
2237 (match_operand:SI 3 "const_int_operand" "n"))
2239 (match_operand:SI 4 "arm_not_operand" "rIK")
2241 (clobber (reg:CC CC_REGNUM))]
2243 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2244 && INTVAL (operands[2]) > 0
2245 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2246 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2247 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2250 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2251 && INTVAL (operands[2]) > 0
2252 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2253 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2254 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2255 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2256 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2258 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2260 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2261 (match_dup 0) (match_dup 4)))]
2263 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2264 << INTVAL (operands[3]));
2266 [(set_attr "conds" "clob")
2267 (set_attr "length" "8")]
2270 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2271 [(set (match_operand:SI 0 "s_register_operand" "=r")
2272 (if_then_else:SI (ne (zero_extract:SI
2273 (match_operand:SI 1 "s_register_operand" "r")
2274 (match_operand:SI 2 "const_int_operand" "n")
2277 (match_operand:SI 3 "arm_not_operand" "rIK")
2279 (clobber (reg:CC CC_REGNUM))]
2280 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2282 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2283 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2284 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2286 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2288 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2289 (match_dup 0) (match_dup 3)))]
2291 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2293 [(set_attr "conds" "clob")
2294 (set_attr "length" "8")]
2298 [(set (match_operand:SI 0 "s_register_operand" "")
2299 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2300 (match_operand:SI 2 "const_int_operand" "")
2301 (match_operand:SI 3 "const_int_operand" "")))
2302 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2304 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2305 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2307 HOST_WIDE_INT temp = INTVAL (operands[2]);
2309 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2310 operands[3] = GEN_INT (32 - temp);
2314 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2316 [(set (match_operand:SI 0 "s_register_operand" "")
2317 (match_operator:SI 1 "shiftable_operator"
2318 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2319 (match_operand:SI 3 "const_int_operand" "")
2320 (match_operand:SI 4 "const_int_operand" ""))
2321 (match_operand:SI 5 "s_register_operand" "")]))
2322 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2324 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2327 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2330 HOST_WIDE_INT temp = INTVAL (operands[3]);
2332 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2333 operands[4] = GEN_INT (32 - temp);
2338 [(set (match_operand:SI 0 "s_register_operand" "")
2339 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2340 (match_operand:SI 2 "const_int_operand" "")
2341 (match_operand:SI 3 "const_int_operand" "")))]
2343 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2344 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2346 HOST_WIDE_INT temp = INTVAL (operands[2]);
2348 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2349 operands[3] = GEN_INT (32 - temp);
2354 [(set (match_operand:SI 0 "s_register_operand" "")
2355 (match_operator:SI 1 "shiftable_operator"
2356 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2357 (match_operand:SI 3 "const_int_operand" "")
2358 (match_operand:SI 4 "const_int_operand" ""))
2359 (match_operand:SI 5 "s_register_operand" "")]))
2360 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2362 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2365 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2368 HOST_WIDE_INT temp = INTVAL (operands[3]);
2370 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2371 operands[4] = GEN_INT (32 - temp);
2375 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2376 ;;; represented by the bitfield, then this will produce incorrect results.
2377 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2378 ;;; which have a real bit-field insert instruction, the truncation happens
2379 ;;; in the bit-field insert instruction itself. Since arm does not have a
2380 ;;; bit-field insert instruction, we would have to emit code here to truncate
2381 ;;; the value before we insert. This loses some of the advantage of having
2382 ;;; this insv pattern, so this pattern needs to be reevalutated.
2384 (define_expand "insv"
2385 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2386 (match_operand:SI 1 "general_operand" "")
2387 (match_operand:SI 2 "general_operand" ""))
2388 (match_operand:SI 3 "reg_or_int_operand" ""))]
2389 "TARGET_ARM || arm_arch_thumb2"
2392 int start_bit = INTVAL (operands[2]);
2393 int width = INTVAL (operands[1]);
2394 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2395 rtx target, subtarget;
2397 if (arm_arch_thumb2)
2399 bool use_bfi = TRUE;
2401 if (GET_CODE (operands[3]) == CONST_INT)
2403 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2407 emit_insn (gen_insv_zero (operands[0], operands[1],
2412 /* See if the set can be done with a single orr instruction. */
2413 if (val == mask && const_ok_for_arm (val << start_bit))
2419 if (GET_CODE (operands[3]) != REG)
2420 operands[3] = force_reg (SImode, operands[3]);
2422 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2428 target = copy_rtx (operands[0]);
2429 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2430 subreg as the final target. */
2431 if (GET_CODE (target) == SUBREG)
2433 subtarget = gen_reg_rtx (SImode);
2434 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2435 < GET_MODE_SIZE (SImode))
2436 target = SUBREG_REG (target);
2441 if (GET_CODE (operands[3]) == CONST_INT)
2443 /* Since we are inserting a known constant, we may be able to
2444 reduce the number of bits that we have to clear so that
2445 the mask becomes simple. */
2446 /* ??? This code does not check to see if the new mask is actually
2447 simpler. It may not be. */
2448 rtx op1 = gen_reg_rtx (SImode);
2449 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2450 start of this pattern. */
2451 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2452 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2454 emit_insn (gen_andsi3 (op1, operands[0],
2455 gen_int_mode (~mask2, SImode)));
2456 emit_insn (gen_iorsi3 (subtarget, op1,
2457 gen_int_mode (op3_value << start_bit, SImode)));
2459 else if (start_bit == 0
2460 && !(const_ok_for_arm (mask)
2461 || const_ok_for_arm (~mask)))
2463 /* A Trick, since we are setting the bottom bits in the word,
2464 we can shift operand[3] up, operand[0] down, OR them together
2465 and rotate the result back again. This takes 3 insns, and
2466 the third might be mergeable into another op. */
2467 /* The shift up copes with the possibility that operand[3] is
2468 wider than the bitfield. */
2469 rtx op0 = gen_reg_rtx (SImode);
2470 rtx op1 = gen_reg_rtx (SImode);
2472 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2473 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2474 emit_insn (gen_iorsi3 (op1, op1, op0));
2475 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2477 else if ((width + start_bit == 32)
2478 && !(const_ok_for_arm (mask)
2479 || const_ok_for_arm (~mask)))
2481 /* Similar trick, but slightly less efficient. */
2483 rtx op0 = gen_reg_rtx (SImode);
2484 rtx op1 = gen_reg_rtx (SImode);
2486 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2487 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2488 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2489 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2493 rtx op0 = gen_int_mode (mask, SImode);
2494 rtx op1 = gen_reg_rtx (SImode);
2495 rtx op2 = gen_reg_rtx (SImode);
2497 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2499 rtx tmp = gen_reg_rtx (SImode);
2501 emit_insn (gen_movsi (tmp, op0));
2505 /* Mask out any bits in operand[3] that are not needed. */
2506 emit_insn (gen_andsi3 (op1, operands[3], op0));
2508 if (GET_CODE (op0) == CONST_INT
2509 && (const_ok_for_arm (mask << start_bit)
2510 || const_ok_for_arm (~(mask << start_bit))))
2512 op0 = gen_int_mode (~(mask << start_bit), SImode);
2513 emit_insn (gen_andsi3 (op2, operands[0], op0));
2517 if (GET_CODE (op0) == CONST_INT)
2519 rtx tmp = gen_reg_rtx (SImode);
2521 emit_insn (gen_movsi (tmp, op0));
2526 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2528 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2532 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2534 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2537 if (subtarget != target)
2539 /* If TARGET is still a SUBREG, then it must be wider than a word,
2540 so we must be careful only to set the subword we were asked to. */
2541 if (GET_CODE (target) == SUBREG)
2542 emit_move_insn (target, subtarget);
2544 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2551 (define_insn "insv_zero"
2552 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2553 (match_operand:SI 1 "const_int_operand" "M")
2554 (match_operand:SI 2 "const_int_operand" "M"))
2558 [(set_attr "length" "4")
2559 (set_attr "predicable" "yes")]
2562 (define_insn "insv_t2"
2563 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2564 (match_operand:SI 1 "const_int_operand" "M")
2565 (match_operand:SI 2 "const_int_operand" "M"))
2566 (match_operand:SI 3 "s_register_operand" "r"))]
2568 "bfi%?\t%0, %3, %2, %1"
2569 [(set_attr "length" "4")
2570 (set_attr "predicable" "yes")]
2573 ; constants for op 2 will never be given to these patterns.
2574 (define_insn_and_split "*anddi_notdi_di"
2575 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2576 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2577 (match_operand:DI 2 "s_register_operand" "r,0")))]
2580 "TARGET_32BIT && reload_completed
2581 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2582 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2583 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2584 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2587 operands[3] = gen_highpart (SImode, operands[0]);
2588 operands[0] = gen_lowpart (SImode, operands[0]);
2589 operands[4] = gen_highpart (SImode, operands[1]);
2590 operands[1] = gen_lowpart (SImode, operands[1]);
2591 operands[5] = gen_highpart (SImode, operands[2]);
2592 operands[2] = gen_lowpart (SImode, operands[2]);
2594 [(set_attr "length" "8")
2595 (set_attr "predicable" "yes")]
2598 (define_insn_and_split "*anddi_notzesidi_di"
2599 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600 (and:DI (not:DI (zero_extend:DI
2601 (match_operand:SI 2 "s_register_operand" "r,r")))
2602 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2605 bic%?\\t%Q0, %Q1, %2
2607 ; (not (zero_extend ...)) allows us to just copy the high word from
2608 ; operand1 to operand0.
2611 && operands[0] != operands[1]"
2612 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2613 (set (match_dup 3) (match_dup 4))]
2616 operands[3] = gen_highpart (SImode, operands[0]);
2617 operands[0] = gen_lowpart (SImode, operands[0]);
2618 operands[4] = gen_highpart (SImode, operands[1]);
2619 operands[1] = gen_lowpart (SImode, operands[1]);
2621 [(set_attr "length" "4,8")
2622 (set_attr "predicable" "yes")]
2625 (define_insn_and_split "*anddi_notsesidi_di"
2626 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2627 (and:DI (not:DI (sign_extend:DI
2628 (match_operand:SI 2 "s_register_operand" "r,r")))
2629 (match_operand:DI 1 "s_register_operand" "0,r")))]
2632 "TARGET_32BIT && reload_completed"
2633 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2634 (set (match_dup 3) (and:SI (not:SI
2635 (ashiftrt:SI (match_dup 2) (const_int 31)))
2639 operands[3] = gen_highpart (SImode, operands[0]);
2640 operands[0] = gen_lowpart (SImode, operands[0]);
2641 operands[4] = gen_highpart (SImode, operands[1]);
2642 operands[1] = gen_lowpart (SImode, operands[1]);
2644 [(set_attr "length" "8")
2645 (set_attr "predicable" "yes")]
2648 (define_insn "andsi_notsi_si"
2649 [(set (match_operand:SI 0 "s_register_operand" "=r")
2650 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2651 (match_operand:SI 1 "s_register_operand" "r")))]
2653 "bic%?\\t%0, %1, %2"
2654 [(set_attr "predicable" "yes")]
2657 (define_insn "thumb1_bicsi3"
2658 [(set (match_operand:SI 0 "register_operand" "=l")
2659 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2660 (match_operand:SI 2 "register_operand" "0")))]
2663 [(set_attr "length" "2")
2664 (set_attr "conds" "set")])
2666 (define_insn "andsi_not_shiftsi_si"
2667 [(set (match_operand:SI 0 "s_register_operand" "=r")
2668 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2669 [(match_operand:SI 2 "s_register_operand" "r")
2670 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2671 (match_operand:SI 1 "s_register_operand" "r")))]
2673 "bic%?\\t%0, %1, %2%S4"
2674 [(set_attr "predicable" "yes")
2675 (set_attr "shift" "2")
2676 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2677 (const_string "alu_shift")
2678 (const_string "alu_shift_reg")))]
2681 (define_insn "*andsi_notsi_si_compare0"
2682 [(set (reg:CC_NOOV CC_REGNUM)
2684 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2685 (match_operand:SI 1 "s_register_operand" "r"))
2687 (set (match_operand:SI 0 "s_register_operand" "=r")
2688 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2690 "bic%.\\t%0, %1, %2"
2691 [(set_attr "conds" "set")]
2694 (define_insn "*andsi_notsi_si_compare0_scratch"
2695 [(set (reg:CC_NOOV CC_REGNUM)
2697 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2698 (match_operand:SI 1 "s_register_operand" "r"))
2700 (clobber (match_scratch:SI 0 "=r"))]
2702 "bic%.\\t%0, %1, %2"
2703 [(set_attr "conds" "set")]
2706 (define_expand "iordi3"
2707 [(set (match_operand:DI 0 "s_register_operand" "")
2708 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2709 (match_operand:DI 2 "neon_logic_op2" "")))]
2714 (define_insn "*iordi3_insn"
2715 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2716 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2717 (match_operand:DI 2 "s_register_operand" "r,r")))]
2718 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2720 [(set_attr "length" "8")
2721 (set_attr "predicable" "yes")]
2724 (define_insn "*iordi_zesidi_di"
2725 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2726 (ior:DI (zero_extend:DI
2727 (match_operand:SI 2 "s_register_operand" "r,r"))
2728 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2731 orr%?\\t%Q0, %Q1, %2
2733 [(set_attr "length" "4,8")
2734 (set_attr "predicable" "yes")]
2737 (define_insn "*iordi_sesidi_di"
2738 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2739 (ior:DI (sign_extend:DI
2740 (match_operand:SI 2 "s_register_operand" "r,r"))
2741 (match_operand:DI 1 "s_register_operand" "0,r")))]
2744 [(set_attr "length" "8")
2745 (set_attr "predicable" "yes")]
2748 (define_expand "iorsi3"
2749 [(set (match_operand:SI 0 "s_register_operand" "")
2750 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2751 (match_operand:SI 2 "reg_or_int_operand" "")))]
2754 if (GET_CODE (operands[2]) == CONST_INT)
2758 arm_split_constant (IOR, SImode, NULL_RTX,
2759 INTVAL (operands[2]), operands[0], operands[1],
2760 optimize && can_create_pseudo_p ());
2763 else /* TARGET_THUMB1 */
2765 rtx tmp = force_reg (SImode, operands[2]);
2766 if (rtx_equal_p (operands[0], operands[1]))
2770 operands[2] = operands[1];
2778 (define_insn_and_split "*iorsi3_insn"
2779 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2780 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2781 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2785 orn%?\\t%0, %1, #%B2
2788 && GET_CODE (operands[2]) == CONST_INT
2789 && !(const_ok_for_arm (INTVAL (operands[2]))
2790 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2791 [(clobber (const_int 0))]
2793 arm_split_constant (IOR, SImode, curr_insn,
2794 INTVAL (operands[2]), operands[0], operands[1], 0);
2797 [(set_attr "length" "4,4,16")
2798 (set_attr "arch" "32,t2,32")
2799 (set_attr "predicable" "yes")])
2801 (define_insn "*thumb1_iorsi3_insn"
2802 [(set (match_operand:SI 0 "register_operand" "=l")
2803 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2804 (match_operand:SI 2 "register_operand" "l")))]
2807 [(set_attr "length" "2")
2808 (set_attr "conds" "set")])
2811 [(match_scratch:SI 3 "r")
2812 (set (match_operand:SI 0 "arm_general_register_operand" "")
2813 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2814 (match_operand:SI 2 "const_int_operand" "")))]
2816 && !const_ok_for_arm (INTVAL (operands[2]))
2817 && const_ok_for_arm (~INTVAL (operands[2]))"
2818 [(set (match_dup 3) (match_dup 2))
2819 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2823 (define_insn "*iorsi3_compare0"
2824 [(set (reg:CC_NOOV CC_REGNUM)
2825 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2826 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2828 (set (match_operand:SI 0 "s_register_operand" "=r")
2829 (ior:SI (match_dup 1) (match_dup 2)))]
2831 "orr%.\\t%0, %1, %2"
2832 [(set_attr "conds" "set")]
2835 (define_insn "*iorsi3_compare0_scratch"
2836 [(set (reg:CC_NOOV CC_REGNUM)
2837 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2838 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2840 (clobber (match_scratch:SI 0 "=r"))]
2842 "orr%.\\t%0, %1, %2"
2843 [(set_attr "conds" "set")]
2846 (define_expand "xordi3"
2847 [(set (match_operand:DI 0 "s_register_operand" "")
2848 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2849 (match_operand:DI 2 "s_register_operand" "")))]
2854 (define_insn "*xordi3_insn"
2855 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2856 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2857 (match_operand:DI 2 "s_register_operand" "r,r")))]
2858 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2860 [(set_attr "length" "8")
2861 (set_attr "predicable" "yes")]
2864 (define_insn "*xordi_zesidi_di"
2865 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2866 (xor:DI (zero_extend:DI
2867 (match_operand:SI 2 "s_register_operand" "r,r"))
2868 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2871 eor%?\\t%Q0, %Q1, %2
2873 [(set_attr "length" "4,8")
2874 (set_attr "predicable" "yes")]
2877 (define_insn "*xordi_sesidi_di"
2878 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2879 (xor:DI (sign_extend:DI
2880 (match_operand:SI 2 "s_register_operand" "r,r"))
2881 (match_operand:DI 1 "s_register_operand" "0,r")))]
2884 [(set_attr "length" "8")
2885 (set_attr "predicable" "yes")]
2888 (define_expand "xorsi3"
2889 [(set (match_operand:SI 0 "s_register_operand" "")
2890 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2891 (match_operand:SI 2 "reg_or_int_operand" "")))]
2893 "if (GET_CODE (operands[2]) == CONST_INT)
2897 arm_split_constant (XOR, SImode, NULL_RTX,
2898 INTVAL (operands[2]), operands[0], operands[1],
2899 optimize && can_create_pseudo_p ());
2902 else /* TARGET_THUMB1 */
2904 rtx tmp = force_reg (SImode, operands[2]);
2905 if (rtx_equal_p (operands[0], operands[1]))
2909 operands[2] = operands[1];
2916 (define_insn "*arm_xorsi3"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r")
2918 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2919 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2921 "eor%?\\t%0, %1, %2"
2922 [(set_attr "predicable" "yes")]
2925 (define_insn "*thumb1_xorsi3_insn"
2926 [(set (match_operand:SI 0 "register_operand" "=l")
2927 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2928 (match_operand:SI 2 "register_operand" "l")))]
2931 [(set_attr "length" "2")
2932 (set_attr "conds" "set")])
2934 (define_insn "*xorsi3_compare0"
2935 [(set (reg:CC_NOOV CC_REGNUM)
2936 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2937 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2939 (set (match_operand:SI 0 "s_register_operand" "=r")
2940 (xor:SI (match_dup 1) (match_dup 2)))]
2942 "eor%.\\t%0, %1, %2"
2943 [(set_attr "conds" "set")]
2946 (define_insn "*xorsi3_compare0_scratch"
2947 [(set (reg:CC_NOOV CC_REGNUM)
2948 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2949 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2953 [(set_attr "conds" "set")]
2956 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2957 ; (NOT D) we can sometimes merge the final NOT into one of the following
2961 [(set (match_operand:SI 0 "s_register_operand" "")
2962 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2963 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2964 (match_operand:SI 3 "arm_rhs_operand" "")))
2965 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2967 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2968 (not:SI (match_dup 3))))
2969 (set (match_dup 0) (not:SI (match_dup 4)))]
2973 (define_insn "*andsi_iorsi3_notsi"
2974 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2975 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2976 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2977 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2979 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2980 [(set_attr "length" "8")
2981 (set_attr "ce_count" "2")
2982 (set_attr "predicable" "yes")]
2985 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2986 ; insns are available?
2988 [(set (match_operand:SI 0 "s_register_operand" "")
2989 (match_operator:SI 1 "logical_binary_operator"
2990 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2991 (match_operand:SI 3 "const_int_operand" "")
2992 (match_operand:SI 4 "const_int_operand" ""))
2993 (match_operator:SI 9 "logical_binary_operator"
2994 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2995 (match_operand:SI 6 "const_int_operand" ""))
2996 (match_operand:SI 7 "s_register_operand" "")])]))
2997 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2999 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3000 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3003 [(ashift:SI (match_dup 2) (match_dup 4))
3007 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3010 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3014 [(set (match_operand:SI 0 "s_register_operand" "")
3015 (match_operator:SI 1 "logical_binary_operator"
3016 [(match_operator:SI 9 "logical_binary_operator"
3017 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3018 (match_operand:SI 6 "const_int_operand" ""))
3019 (match_operand:SI 7 "s_register_operand" "")])
3020 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3021 (match_operand:SI 3 "const_int_operand" "")
3022 (match_operand:SI 4 "const_int_operand" ""))]))
3023 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3025 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3026 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3029 [(ashift:SI (match_dup 2) (match_dup 4))
3033 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3036 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3040 [(set (match_operand:SI 0 "s_register_operand" "")
3041 (match_operator:SI 1 "logical_binary_operator"
3042 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3043 (match_operand:SI 3 "const_int_operand" "")
3044 (match_operand:SI 4 "const_int_operand" ""))
3045 (match_operator:SI 9 "logical_binary_operator"
3046 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3047 (match_operand:SI 6 "const_int_operand" ""))
3048 (match_operand:SI 7 "s_register_operand" "")])]))
3049 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3051 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3052 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3055 [(ashift:SI (match_dup 2) (match_dup 4))
3059 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3062 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3066 [(set (match_operand:SI 0 "s_register_operand" "")
3067 (match_operator:SI 1 "logical_binary_operator"
3068 [(match_operator:SI 9 "logical_binary_operator"
3069 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3070 (match_operand:SI 6 "const_int_operand" ""))
3071 (match_operand:SI 7 "s_register_operand" "")])
3072 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3073 (match_operand:SI 3 "const_int_operand" "")
3074 (match_operand:SI 4 "const_int_operand" ""))]))
3075 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3077 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3078 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3081 [(ashift:SI (match_dup 2) (match_dup 4))
3085 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3088 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3092 ;; Minimum and maximum insns
3094 (define_expand "smaxsi3"
3096 (set (match_operand:SI 0 "s_register_operand" "")
3097 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3098 (match_operand:SI 2 "arm_rhs_operand" "")))
3099 (clobber (reg:CC CC_REGNUM))])]
3102 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3104 /* No need for a clobber of the condition code register here. */
3105 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3106 gen_rtx_SMAX (SImode, operands[1],
3112 (define_insn "*smax_0"
3113 [(set (match_operand:SI 0 "s_register_operand" "=r")
3114 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3117 "bic%?\\t%0, %1, %1, asr #31"
3118 [(set_attr "predicable" "yes")]
3121 (define_insn "*smax_m1"
3122 [(set (match_operand:SI 0 "s_register_operand" "=r")
3123 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3126 "orr%?\\t%0, %1, %1, asr #31"
3127 [(set_attr "predicable" "yes")]
3130 (define_insn "*arm_smax_insn"
3131 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3132 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3133 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3134 (clobber (reg:CC CC_REGNUM))]
3137 cmp\\t%1, %2\;movlt\\t%0, %2
3138 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3139 [(set_attr "conds" "clob")
3140 (set_attr "length" "8,12")]
3143 (define_expand "sminsi3"
3145 (set (match_operand:SI 0 "s_register_operand" "")
3146 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3147 (match_operand:SI 2 "arm_rhs_operand" "")))
3148 (clobber (reg:CC CC_REGNUM))])]
3151 if (operands[2] == const0_rtx)
3153 /* No need for a clobber of the condition code register here. */
3154 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3155 gen_rtx_SMIN (SImode, operands[1],
3161 (define_insn "*smin_0"
3162 [(set (match_operand:SI 0 "s_register_operand" "=r")
3163 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3166 "and%?\\t%0, %1, %1, asr #31"
3167 [(set_attr "predicable" "yes")]
3170 (define_insn "*arm_smin_insn"
3171 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3172 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3173 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3174 (clobber (reg:CC CC_REGNUM))]
3177 cmp\\t%1, %2\;movge\\t%0, %2
3178 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3179 [(set_attr "conds" "clob")
3180 (set_attr "length" "8,12")]
3183 (define_expand "umaxsi3"
3185 (set (match_operand:SI 0 "s_register_operand" "")
3186 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3187 (match_operand:SI 2 "arm_rhs_operand" "")))
3188 (clobber (reg:CC CC_REGNUM))])]
3193 (define_insn "*arm_umaxsi3"
3194 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3195 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3196 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3197 (clobber (reg:CC CC_REGNUM))]
3200 cmp\\t%1, %2\;movcc\\t%0, %2
3201 cmp\\t%1, %2\;movcs\\t%0, %1
3202 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3203 [(set_attr "conds" "clob")
3204 (set_attr "length" "8,8,12")]
3207 (define_expand "uminsi3"
3209 (set (match_operand:SI 0 "s_register_operand" "")
3210 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3211 (match_operand:SI 2 "arm_rhs_operand" "")))
3212 (clobber (reg:CC CC_REGNUM))])]
3217 (define_insn "*arm_uminsi3"
3218 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3219 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3220 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3221 (clobber (reg:CC CC_REGNUM))]
3224 cmp\\t%1, %2\;movcs\\t%0, %2
3225 cmp\\t%1, %2\;movcc\\t%0, %1
3226 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3227 [(set_attr "conds" "clob")
3228 (set_attr "length" "8,8,12")]
3231 (define_insn "*store_minmaxsi"
3232 [(set (match_operand:SI 0 "memory_operand" "=m")
3233 (match_operator:SI 3 "minmax_operator"
3234 [(match_operand:SI 1 "s_register_operand" "r")
3235 (match_operand:SI 2 "s_register_operand" "r")]))
3236 (clobber (reg:CC CC_REGNUM))]
3239 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3240 operands[1], operands[2]);
3241 output_asm_insn (\"cmp\\t%1, %2\", operands);
3243 output_asm_insn (\"ite\t%d3\", operands);
3244 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3245 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3248 [(set_attr "conds" "clob")
3249 (set (attr "length")
3250 (if_then_else (eq_attr "is_thumb" "yes")
3253 (set_attr "type" "store1")]
3256 ; Reject the frame pointer in operand[1], since reloading this after
3257 ; it has been eliminated can cause carnage.
3258 (define_insn "*minmax_arithsi"
3259 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3260 (match_operator:SI 4 "shiftable_operator"
3261 [(match_operator:SI 5 "minmax_operator"
3262 [(match_operand:SI 2 "s_register_operand" "r,r")
3263 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3264 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3265 (clobber (reg:CC CC_REGNUM))]
3266 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3269 enum rtx_code code = GET_CODE (operands[4]);
3272 if (which_alternative != 0 || operands[3] != const0_rtx
3273 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3278 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3279 operands[2], operands[3]);
3280 output_asm_insn (\"cmp\\t%2, %3\", operands);
3284 output_asm_insn (\"ite\\t%d5\", operands);
3286 output_asm_insn (\"it\\t%d5\", operands);
3288 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3290 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3293 [(set_attr "conds" "clob")
3294 (set (attr "length")
3295 (if_then_else (eq_attr "is_thumb" "yes")
3301 ;; Shift and rotation insns
3303 (define_expand "ashldi3"
3304 [(set (match_operand:DI 0 "s_register_operand" "")
3305 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "reg_or_int_operand" "")))]
3309 if (GET_CODE (operands[2]) == CONST_INT)
3311 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3313 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3316 /* Ideally we shouldn't fail here if we could know that operands[1]
3317 ends up already living in an iwmmxt register. Otherwise it's
3318 cheaper to have the alternate code being generated than moving
3319 values to iwmmxt regs and back. */
3322 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3327 (define_insn "arm_ashldi3_1bit"
3328 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3329 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3331 (clobber (reg:CC CC_REGNUM))]
3333 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3334 [(set_attr "conds" "clob")
3335 (set_attr "length" "8")]
3338 (define_expand "ashlsi3"
3339 [(set (match_operand:SI 0 "s_register_operand" "")
3340 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3341 (match_operand:SI 2 "arm_rhs_operand" "")))]
3344 if (GET_CODE (operands[2]) == CONST_INT
3345 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3347 emit_insn (gen_movsi (operands[0], const0_rtx));
3353 (define_insn "*thumb1_ashlsi3"
3354 [(set (match_operand:SI 0 "register_operand" "=l,l")
3355 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3356 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3359 [(set_attr "length" "2")
3360 (set_attr "conds" "set")])
3362 (define_expand "ashrdi3"
3363 [(set (match_operand:DI 0 "s_register_operand" "")
3364 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3365 (match_operand:SI 2 "reg_or_int_operand" "")))]
3368 if (GET_CODE (operands[2]) == CONST_INT)
3370 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3372 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3375 /* Ideally we shouldn't fail here if we could know that operands[1]
3376 ends up already living in an iwmmxt register. Otherwise it's
3377 cheaper to have the alternate code being generated than moving
3378 values to iwmmxt regs and back. */
3381 else if (!TARGET_REALLY_IWMMXT)
3386 (define_insn "arm_ashrdi3_1bit"
3387 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3388 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3390 (clobber (reg:CC CC_REGNUM))]
3392 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3393 [(set_attr "conds" "clob")
3394 (set_attr "insn" "mov")
3395 (set_attr "length" "8")]
3398 (define_expand "ashrsi3"
3399 [(set (match_operand:SI 0 "s_register_operand" "")
3400 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3401 (match_operand:SI 2 "arm_rhs_operand" "")))]
3404 if (GET_CODE (operands[2]) == CONST_INT
3405 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3406 operands[2] = GEN_INT (31);
3410 (define_insn "*thumb1_ashrsi3"
3411 [(set (match_operand:SI 0 "register_operand" "=l,l")
3412 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3413 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3416 [(set_attr "length" "2")
3417 (set_attr "conds" "set")])
3419 (define_expand "lshrdi3"
3420 [(set (match_operand:DI 0 "s_register_operand" "")
3421 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3422 (match_operand:SI 2 "reg_or_int_operand" "")))]
3425 if (GET_CODE (operands[2]) == CONST_INT)
3427 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3429 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3432 /* Ideally we shouldn't fail here if we could know that operands[1]
3433 ends up already living in an iwmmxt register. Otherwise it's
3434 cheaper to have the alternate code being generated than moving
3435 values to iwmmxt regs and back. */
3438 else if (!TARGET_REALLY_IWMMXT)
3443 (define_insn "arm_lshrdi3_1bit"
3444 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3445 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3447 (clobber (reg:CC CC_REGNUM))]
3449 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3450 [(set_attr "conds" "clob")
3451 (set_attr "insn" "mov")
3452 (set_attr "length" "8")]
3455 (define_expand "lshrsi3"
3456 [(set (match_operand:SI 0 "s_register_operand" "")
3457 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3458 (match_operand:SI 2 "arm_rhs_operand" "")))]
3461 if (GET_CODE (operands[2]) == CONST_INT
3462 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3464 emit_insn (gen_movsi (operands[0], const0_rtx));
3470 (define_insn "*thumb1_lshrsi3"
3471 [(set (match_operand:SI 0 "register_operand" "=l,l")
3472 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3473 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3476 [(set_attr "length" "2")
3477 (set_attr "conds" "set")])
3479 (define_expand "rotlsi3"
3480 [(set (match_operand:SI 0 "s_register_operand" "")
3481 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3482 (match_operand:SI 2 "reg_or_int_operand" "")))]
3485 if (GET_CODE (operands[2]) == CONST_INT)
3486 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3489 rtx reg = gen_reg_rtx (SImode);
3490 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3496 (define_expand "rotrsi3"
3497 [(set (match_operand:SI 0 "s_register_operand" "")
3498 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3499 (match_operand:SI 2 "arm_rhs_operand" "")))]
3504 if (GET_CODE (operands[2]) == CONST_INT
3505 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3506 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3508 else /* TARGET_THUMB1 */
3510 if (GET_CODE (operands [2]) == CONST_INT)
3511 operands [2] = force_reg (SImode, operands[2]);
3516 (define_insn "*thumb1_rotrsi3"
3517 [(set (match_operand:SI 0 "register_operand" "=l")
3518 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3519 (match_operand:SI 2 "register_operand" "l")))]
3522 [(set_attr "length" "2")]
3525 (define_insn "*arm_shiftsi3"
3526 [(set (match_operand:SI 0 "s_register_operand" "=r")
3527 (match_operator:SI 3 "shift_operator"
3528 [(match_operand:SI 1 "s_register_operand" "r")
3529 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3531 "* return arm_output_shift(operands, 0);"
3532 [(set_attr "predicable" "yes")
3533 (set_attr "shift" "1")
3534 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3535 (const_string "alu_shift")
3536 (const_string "alu_shift_reg")))]
3539 (define_insn "*shiftsi3_compare0"
3540 [(set (reg:CC_NOOV CC_REGNUM)
3541 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3542 [(match_operand:SI 1 "s_register_operand" "r")
3543 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3545 (set (match_operand:SI 0 "s_register_operand" "=r")
3546 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3548 "* return arm_output_shift(operands, 1);"
3549 [(set_attr "conds" "set")
3550 (set_attr "shift" "1")
3551 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3552 (const_string "alu_shift")
3553 (const_string "alu_shift_reg")))]
3556 (define_insn "*shiftsi3_compare0_scratch"
3557 [(set (reg:CC_NOOV CC_REGNUM)
3558 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3559 [(match_operand:SI 1 "s_register_operand" "r")
3560 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3562 (clobber (match_scratch:SI 0 "=r"))]
3564 "* return arm_output_shift(operands, 1);"
3565 [(set_attr "conds" "set")
3566 (set_attr "shift" "1")]
3569 (define_insn "*not_shiftsi"
3570 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3571 (not:SI (match_operator:SI 3 "shift_operator"
3572 [(match_operand:SI 1 "s_register_operand" "r,r")
3573 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3576 [(set_attr "predicable" "yes")
3577 (set_attr "shift" "1")
3578 (set_attr "insn" "mvn")
3579 (set_attr "arch" "32,a")
3580 (set_attr "type" "alu_shift,alu_shift_reg")])
3582 (define_insn "*not_shiftsi_compare0"
3583 [(set (reg:CC_NOOV CC_REGNUM)
3585 (not:SI (match_operator:SI 3 "shift_operator"
3586 [(match_operand:SI 1 "s_register_operand" "r,r")
3587 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3589 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3590 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3593 [(set_attr "conds" "set")
3594 (set_attr "shift" "1")
3595 (set_attr "insn" "mvn")
3596 (set_attr "arch" "32,a")
3597 (set_attr "type" "alu_shift,alu_shift_reg")])
3599 (define_insn "*not_shiftsi_compare0_scratch"
3600 [(set (reg:CC_NOOV CC_REGNUM)
3602 (not:SI (match_operator:SI 3 "shift_operator"
3603 [(match_operand:SI 1 "s_register_operand" "r,r")
3604 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3606 (clobber (match_scratch:SI 0 "=r,r"))]
3609 [(set_attr "conds" "set")
3610 (set_attr "shift" "1")
3611 (set_attr "insn" "mvn")
3612 (set_attr "arch" "32,a")
3613 (set_attr "type" "alu_shift,alu_shift_reg")])
3615 ;; We don't really have extzv, but defining this using shifts helps
3616 ;; to reduce register pressure later on.
3618 (define_expand "extzv"
3620 (ashift:SI (match_operand:SI 1 "register_operand" "")
3621 (match_operand:SI 2 "const_int_operand" "")))
3622 (set (match_operand:SI 0 "register_operand" "")
3623 (lshiftrt:SI (match_dup 4)
3624 (match_operand:SI 3 "const_int_operand" "")))]
3625 "TARGET_THUMB1 || arm_arch_thumb2"
3628 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3629 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3631 if (arm_arch_thumb2)
3633 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3638 operands[3] = GEN_INT (rshift);
3642 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3646 operands[2] = GEN_INT (lshift);
3647 operands[4] = gen_reg_rtx (SImode);
3652 [(set (match_operand:SI 0 "s_register_operand" "=r")
3653 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3654 (match_operand:SI 2 "const_int_operand" "M")
3655 (match_operand:SI 3 "const_int_operand" "M")))]
3657 "sbfx%?\t%0, %1, %3, %2"
3658 [(set_attr "length" "4")
3659 (set_attr "predicable" "yes")]
3662 (define_insn "extzv_t2"
3663 [(set (match_operand:SI 0 "s_register_operand" "=r")
3664 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3665 (match_operand:SI 2 "const_int_operand" "M")
3666 (match_operand:SI 3 "const_int_operand" "M")))]
3668 "ubfx%?\t%0, %1, %3, %2"
3669 [(set_attr "length" "4")
3670 (set_attr "predicable" "yes")]
3674 ;; Unary arithmetic insns
3676 (define_expand "negdi2"
3678 [(set (match_operand:DI 0 "s_register_operand" "")
3679 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3680 (clobber (reg:CC CC_REGNUM))])]
3685 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3686 ;; The first alternative allows the common case of a *full* overlap.
3687 (define_insn "*arm_negdi2"
3688 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3689 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3690 (clobber (reg:CC CC_REGNUM))]
3692 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3693 [(set_attr "conds" "clob")
3694 (set_attr "length" "8")]
3697 (define_insn "*thumb1_negdi2"
3698 [(set (match_operand:DI 0 "register_operand" "=&l")
3699 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3700 (clobber (reg:CC CC_REGNUM))]
3702 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3703 [(set_attr "length" "6")]
3706 (define_expand "negsi2"
3707 [(set (match_operand:SI 0 "s_register_operand" "")
3708 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3713 (define_insn "*arm_negsi2"
3714 [(set (match_operand:SI 0 "s_register_operand" "=r")
3715 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3717 "rsb%?\\t%0, %1, #0"
3718 [(set_attr "predicable" "yes")]
3721 (define_insn "*thumb1_negsi2"
3722 [(set (match_operand:SI 0 "register_operand" "=l")
3723 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3726 [(set_attr "length" "2")]
3729 (define_expand "negsf2"
3730 [(set (match_operand:SF 0 "s_register_operand" "")
3731 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3732 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3736 (define_expand "negdf2"
3737 [(set (match_operand:DF 0 "s_register_operand" "")
3738 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3739 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3742 ;; abssi2 doesn't really clobber the condition codes if a different register
3743 ;; is being set. To keep things simple, assume during rtl manipulations that
3744 ;; it does, but tell the final scan operator the truth. Similarly for
3747 (define_expand "abssi2"
3749 [(set (match_operand:SI 0 "s_register_operand" "")
3750 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3751 (clobber (match_dup 2))])]
3755 operands[2] = gen_rtx_SCRATCH (SImode);
3757 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3760 (define_insn "*arm_abssi2"
3761 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3762 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3763 (clobber (reg:CC CC_REGNUM))]
3766 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3767 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3768 [(set_attr "conds" "clob,*")
3769 (set_attr "shift" "1")
3770 ;; predicable can't be set based on the variant, so left as no
3771 (set_attr "length" "8")]
3774 (define_insn_and_split "*thumb1_abssi2"
3775 [(set (match_operand:SI 0 "s_register_operand" "=l")
3776 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3777 (clobber (match_scratch:SI 2 "=&l"))]
3780 "TARGET_THUMB1 && reload_completed"
3781 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3782 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3783 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3785 [(set_attr "length" "6")]
3788 (define_insn "*arm_neg_abssi2"
3789 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3790 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3791 (clobber (reg:CC CC_REGNUM))]
3794 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3795 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3796 [(set_attr "conds" "clob,*")
3797 (set_attr "shift" "1")
3798 ;; predicable can't be set based on the variant, so left as no
3799 (set_attr "length" "8")]
3802 (define_insn_and_split "*thumb1_neg_abssi2"
3803 [(set (match_operand:SI 0 "s_register_operand" "=l")
3804 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3805 (clobber (match_scratch:SI 2 "=&l"))]
3808 "TARGET_THUMB1 && reload_completed"
3809 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3810 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3811 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3813 [(set_attr "length" "6")]
3816 (define_expand "abssf2"
3817 [(set (match_operand:SF 0 "s_register_operand" "")
3818 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3819 "TARGET_32BIT && TARGET_HARD_FLOAT"
3822 (define_expand "absdf2"
3823 [(set (match_operand:DF 0 "s_register_operand" "")
3824 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3825 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3828 (define_expand "sqrtsf2"
3829 [(set (match_operand:SF 0 "s_register_operand" "")
3830 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3831 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3834 (define_expand "sqrtdf2"
3835 [(set (match_operand:DF 0 "s_register_operand" "")
3836 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3837 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3840 (define_insn_and_split "one_cmpldi2"
3841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3842 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3845 "TARGET_32BIT && reload_completed"
3846 [(set (match_dup 0) (not:SI (match_dup 1)))
3847 (set (match_dup 2) (not:SI (match_dup 3)))]
3850 operands[2] = gen_highpart (SImode, operands[0]);
3851 operands[0] = gen_lowpart (SImode, operands[0]);
3852 operands[3] = gen_highpart (SImode, operands[1]);
3853 operands[1] = gen_lowpart (SImode, operands[1]);
3855 [(set_attr "length" "8")
3856 (set_attr "predicable" "yes")]
3859 (define_expand "one_cmplsi2"
3860 [(set (match_operand:SI 0 "s_register_operand" "")
3861 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3866 (define_insn "*arm_one_cmplsi2"
3867 [(set (match_operand:SI 0 "s_register_operand" "=r")
3868 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3871 [(set_attr "predicable" "yes")
3872 (set_attr "insn" "mvn")]
3875 (define_insn "*thumb1_one_cmplsi2"
3876 [(set (match_operand:SI 0 "register_operand" "=l")
3877 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3880 [(set_attr "length" "2")
3881 (set_attr "insn" "mvn")]
3884 (define_insn "*notsi_compare0"
3885 [(set (reg:CC_NOOV CC_REGNUM)
3886 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3888 (set (match_operand:SI 0 "s_register_operand" "=r")
3889 (not:SI (match_dup 1)))]
3892 [(set_attr "conds" "set")
3893 (set_attr "insn" "mvn")]
3896 (define_insn "*notsi_compare0_scratch"
3897 [(set (reg:CC_NOOV CC_REGNUM)
3898 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3900 (clobber (match_scratch:SI 0 "=r"))]
3903 [(set_attr "conds" "set")
3904 (set_attr "insn" "mvn")]
3907 ;; Fixed <--> Floating conversion insns
3909 (define_expand "floatsihf2"
3910 [(set (match_operand:HF 0 "general_operand" "")
3911 (float:HF (match_operand:SI 1 "general_operand" "")))]
3915 rtx op1 = gen_reg_rtx (SFmode);
3916 expand_float (op1, operands[1], 0);
3917 op1 = convert_to_mode (HFmode, op1, 0);
3918 emit_move_insn (operands[0], op1);
3923 (define_expand "floatdihf2"
3924 [(set (match_operand:HF 0 "general_operand" "")
3925 (float:HF (match_operand:DI 1 "general_operand" "")))]
3929 rtx op1 = gen_reg_rtx (SFmode);
3930 expand_float (op1, operands[1], 0);
3931 op1 = convert_to_mode (HFmode, op1, 0);
3932 emit_move_insn (operands[0], op1);
3937 (define_expand "floatsisf2"
3938 [(set (match_operand:SF 0 "s_register_operand" "")
3939 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3940 "TARGET_32BIT && TARGET_HARD_FLOAT"
3942 if (TARGET_MAVERICK)
3944 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3949 (define_expand "floatsidf2"
3950 [(set (match_operand:DF 0 "s_register_operand" "")
3951 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3952 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3954 if (TARGET_MAVERICK)
3956 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3961 (define_expand "fix_trunchfsi2"
3962 [(set (match_operand:SI 0 "general_operand" "")
3963 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3967 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3968 expand_fix (operands[0], op1, 0);
3973 (define_expand "fix_trunchfdi2"
3974 [(set (match_operand:DI 0 "general_operand" "")
3975 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3979 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3980 expand_fix (operands[0], op1, 0);
3985 (define_expand "fix_truncsfsi2"
3986 [(set (match_operand:SI 0 "s_register_operand" "")
3987 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3988 "TARGET_32BIT && TARGET_HARD_FLOAT"
3990 if (TARGET_MAVERICK)
3992 if (!cirrus_fp_register (operands[0], SImode))
3993 operands[0] = force_reg (SImode, operands[0]);
3994 if (!cirrus_fp_register (operands[1], SFmode))
3995 operands[1] = force_reg (SFmode, operands[0]);
3996 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4001 (define_expand "fix_truncdfsi2"
4002 [(set (match_operand:SI 0 "s_register_operand" "")
4003 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4004 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4006 if (TARGET_MAVERICK)
4008 if (!cirrus_fp_register (operands[1], DFmode))
4009 operands[1] = force_reg (DFmode, operands[0]);
4010 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4017 (define_expand "truncdfsf2"
4018 [(set (match_operand:SF 0 "s_register_operand" "")
4020 (match_operand:DF 1 "s_register_operand" "")))]
4021 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4025 /* DFmode -> HFmode conversions have to go through SFmode. */
4026 (define_expand "truncdfhf2"
4027 [(set (match_operand:HF 0 "general_operand" "")
4029 (match_operand:DF 1 "general_operand" "")))]
4034 op1 = convert_to_mode (SFmode, operands[1], 0);
4035 op1 = convert_to_mode (HFmode, op1, 0);
4036 emit_move_insn (operands[0], op1);
4041 ;; Zero and sign extension instructions.
4043 (define_insn "zero_extend<mode>di2"
4044 [(set (match_operand:DI 0 "s_register_operand" "=r")
4045 (zero_extend:DI (match_operand:QHSI 1 "nonimmediate_operand" "rm")))]
4046 "TARGET_32BIT <qhs_zextenddi_cond>"
4048 [(set_attr "length" "8")
4049 (set_attr "ce_count" "2")
4050 (set_attr "predicable" "yes")]
4053 (define_insn "extend<mode>di2"
4054 [(set (match_operand:DI 0 "s_register_operand" "=r")
4055 (sign_extend:DI (match_operand:QHSI 1 "nonimmediate_operand" "rm")))]
4056 "TARGET_32BIT <qhs_sextenddi_cond>"
4058 [(set_attr "length" "8")
4059 (set_attr "ce_count" "2")
4060 (set_attr "shift" "1")
4061 (set_attr "predicable" "yes")]
4064 ;; Splits for all extensions to DImode
4066 [(set (match_operand:DI 0 "s_register_operand" "")
4067 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4069 [(set (match_dup 0) (match_dup 1))]
4071 rtx lo_part = gen_lowpart (SImode, operands[0]);
4072 enum machine_mode src_mode = GET_MODE (operands[1]);
4074 if (REG_P (operands[0])
4075 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4076 emit_clobber (operands[0]);
4077 if (!REG_P (lo_part) || src_mode != SImode
4078 || !rtx_equal_p (lo_part, operands[1]))
4080 if (src_mode == SImode)
4081 emit_move_insn (lo_part, operands[1]);
4083 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4084 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4085 operands[1] = lo_part;
4087 operands[0] = gen_highpart (SImode, operands[0]);
4088 operands[1] = const0_rtx;
4092 [(set (match_operand:DI 0 "s_register_operand" "")
4093 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4095 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4097 rtx lo_part = gen_lowpart (SImode, operands[0]);
4098 enum machine_mode src_mode = GET_MODE (operands[1]);
4100 if (REG_P (operands[0])
4101 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4102 emit_clobber (operands[0]);
4104 if (!REG_P (lo_part) || src_mode != SImode
4105 || !rtx_equal_p (lo_part, operands[1]))
4107 if (src_mode == SImode)
4108 emit_move_insn (lo_part, operands[1]);
4110 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4111 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4112 operands[1] = lo_part;
4114 operands[0] = gen_highpart (SImode, operands[0]);
4117 (define_expand "zero_extendhisi2"
4118 [(set (match_operand:SI 0 "s_register_operand" "")
4119 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4122 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4124 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4127 if (!arm_arch6 && !MEM_P (operands[1]))
4129 rtx t = gen_lowpart (SImode, operands[1]);
4130 rtx tmp = gen_reg_rtx (SImode);
4131 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4132 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4138 [(set (match_operand:SI 0 "register_operand" "")
4139 (zero_extend:SI (match_operand:HI 1 "register_operand" "")))]
4140 "!TARGET_THUMB2 && !arm_arch6"
4141 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4142 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4144 operands[2] = gen_lowpart (SImode, operands[1]);
4147 (define_insn "*thumb1_zero_extendhisi2"
4148 [(set (match_operand:SI 0 "register_operand" "=l,l")
4149 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4154 if (which_alternative == 0 && arm_arch6)
4155 return "uxth\t%0, %1";
4156 if (which_alternative == 0)
4159 mem = XEXP (operands[1], 0);
4161 if (GET_CODE (mem) == CONST)
4162 mem = XEXP (mem, 0);
4164 if (GET_CODE (mem) == PLUS)
4166 rtx a = XEXP (mem, 0);
4168 /* This can happen due to bugs in reload. */
4169 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4172 ops[0] = operands[0];
4175 output_asm_insn ("mov\t%0, %1", ops);
4177 XEXP (mem, 0) = operands[0];
4181 return "ldrh\t%0, %1";
4183 [(set_attr_alternative "length"
4184 [(if_then_else (eq_attr "is_arch6" "yes")
4185 (const_int 2) (const_int 4))
4187 (set_attr "type" "alu_shift,load_byte")]
4190 (define_insn "*arm_zero_extendhisi2"
4191 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4192 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4193 "TARGET_ARM && arm_arch4 && !arm_arch6"
4197 [(set_attr "type" "alu_shift,load_byte")
4198 (set_attr "predicable" "yes")]
4201 (define_insn "*arm_zero_extendhisi2_v6"
4202 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4203 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4204 "TARGET_ARM && arm_arch6"
4208 [(set_attr "type" "alu_shift,load_byte")
4209 (set_attr "predicable" "yes")]
4212 (define_insn "*arm_zero_extendhisi2addsi"
4213 [(set (match_operand:SI 0 "s_register_operand" "=r")
4214 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4215 (match_operand:SI 2 "s_register_operand" "r")))]
4217 "uxtah%?\\t%0, %2, %1"
4218 [(set_attr "type" "alu_shift")
4219 (set_attr "predicable" "yes")]
4222 (define_expand "zero_extendqisi2"
4223 [(set (match_operand:SI 0 "s_register_operand" "")
4224 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4227 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4229 emit_insn (gen_andsi3 (operands[0],
4230 gen_lowpart (SImode, operands[1]),
4234 if (!arm_arch6 && !MEM_P (operands[1]))
4236 rtx t = gen_lowpart (SImode, operands[1]);
4237 rtx tmp = gen_reg_rtx (SImode);
4238 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4239 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4245 [(set (match_operand:SI 0 "register_operand" "")
4246 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4248 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4249 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4251 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4254 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4259 (define_insn "*thumb1_zero_extendqisi2"
4260 [(set (match_operand:SI 0 "register_operand" "=l,l")
4261 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4262 "TARGET_THUMB1 && !arm_arch6"
4266 [(set_attr "length" "4,2")
4267 (set_attr "type" "alu_shift,load_byte")
4268 (set_attr "pool_range" "*,32")]
4271 (define_insn "*thumb1_zero_extendqisi2_v6"
4272 [(set (match_operand:SI 0 "register_operand" "=l,l")
4273 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4274 "TARGET_THUMB1 && arm_arch6"
4278 [(set_attr "length" "2")
4279 (set_attr "type" "alu_shift,load_byte")]
4282 (define_insn "*arm_zero_extendqisi2"
4283 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4284 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4285 "TARGET_ARM && !arm_arch6"
4288 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4289 [(set_attr "length" "8,4")
4290 (set_attr "type" "alu_shift,load_byte")
4291 (set_attr "predicable" "yes")]
4294 (define_insn "*arm_zero_extendqisi2_v6"
4295 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4296 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4297 "TARGET_ARM && arm_arch6"
4300 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4301 [(set_attr "type" "alu_shift,load_byte")
4302 (set_attr "predicable" "yes")]
4305 (define_insn "*arm_zero_extendqisi2addsi"
4306 [(set (match_operand:SI 0 "s_register_operand" "=r")
4307 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4308 (match_operand:SI 2 "s_register_operand" "r")))]
4310 "uxtab%?\\t%0, %2, %1"
4311 [(set_attr "predicable" "yes")
4312 (set_attr "insn" "xtab")
4313 (set_attr "type" "alu_shift")]
4317 [(set (match_operand:SI 0 "s_register_operand" "")
4318 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4319 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4320 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4321 [(set (match_dup 2) (match_dup 1))
4322 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4327 [(set (match_operand:SI 0 "s_register_operand" "")
4328 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4329 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4330 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4331 [(set (match_dup 2) (match_dup 1))
4332 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4338 [(set (match_operand:SI 0 "s_register_operand" "")
4339 (ior_xor:SI (and:SI (ashift:SI
4340 (match_operand:SI 1 "s_register_operand" "")
4341 (match_operand:SI 2 "const_int_operand" ""))
4342 (match_operand:SI 3 "const_int_operand" ""))
4344 (match_operator 5 "subreg_lowpart_operator"
4345 [(match_operand:SI 4 "s_register_operand" "")]))))]
4347 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4348 == (GET_MODE_MASK (GET_MODE (operands[5]))
4349 & (GET_MODE_MASK (GET_MODE (operands[5]))
4350 << (INTVAL (operands[2])))))"
4351 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4353 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4354 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4357 (define_insn "*compareqi_eq0"
4358 [(set (reg:CC_Z CC_REGNUM)
4359 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4363 [(set_attr "conds" "set")]
4366 (define_expand "extendhisi2"
4367 [(set (match_operand:SI 0 "s_register_operand" "")
4368 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4373 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4376 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4378 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4382 if (!arm_arch6 && !MEM_P (operands[1]))
4384 rtx t = gen_lowpart (SImode, operands[1]);
4385 rtx tmp = gen_reg_rtx (SImode);
4386 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4387 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4394 [(set (match_operand:SI 0 "register_operand" "")
4395 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4396 (clobber (match_scratch:SI 2 ""))])]
4398 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4399 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4401 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4404 ;; We used to have an early-clobber on the scratch register here.
4405 ;; However, there's a bug somewhere in reload which means that this
4406 ;; can be partially ignored during spill allocation if the memory
4407 ;; address also needs reloading; this causes us to die later on when
4408 ;; we try to verify the operands. Fortunately, we don't really need
4409 ;; the early-clobber: we can always use operand 0 if operand 2
4410 ;; overlaps the address.
4411 (define_insn "thumb1_extendhisi2"
4412 [(set (match_operand:SI 0 "register_operand" "=l,l")
4413 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4414 (clobber (match_scratch:SI 2 "=X,l"))]
4421 if (which_alternative == 0 && !arm_arch6)
4423 if (which_alternative == 0)
4424 return \"sxth\\t%0, %1\";
4426 mem = XEXP (operands[1], 0);
4428 /* This code used to try to use 'V', and fix the address only if it was
4429 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4430 range of QImode offsets, and offsettable_address_p does a QImode
4433 if (GET_CODE (mem) == CONST)
4434 mem = XEXP (mem, 0);
4436 if (GET_CODE (mem) == LABEL_REF)
4437 return \"ldr\\t%0, %1\";
4439 if (GET_CODE (mem) == PLUS)
4441 rtx a = XEXP (mem, 0);
4442 rtx b = XEXP (mem, 1);
4444 if (GET_CODE (a) == LABEL_REF
4445 && GET_CODE (b) == CONST_INT)
4446 return \"ldr\\t%0, %1\";
4448 if (GET_CODE (b) == REG)
4449 return \"ldrsh\\t%0, %1\";
4457 ops[2] = const0_rtx;
4460 gcc_assert (GET_CODE (ops[1]) == REG);
4462 ops[0] = operands[0];
4463 if (reg_mentioned_p (operands[2], ops[1]))
4466 ops[3] = operands[2];
4467 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4470 [(set_attr_alternative "length"
4471 [(if_then_else (eq_attr "is_arch6" "yes")
4472 (const_int 2) (const_int 4))
4474 (set_attr "type" "alu_shift,load_byte")
4475 (set_attr "pool_range" "*,1020")]
4478 ;; This pattern will only be used when ldsh is not available
4479 (define_expand "extendhisi2_mem"
4480 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4482 (zero_extend:SI (match_dup 7)))
4483 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4484 (set (match_operand:SI 0 "" "")
4485 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4490 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4492 mem1 = change_address (operands[1], QImode, addr);
4493 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4494 operands[0] = gen_lowpart (SImode, operands[0]);
4496 operands[2] = gen_reg_rtx (SImode);
4497 operands[3] = gen_reg_rtx (SImode);
4498 operands[6] = gen_reg_rtx (SImode);
4501 if (BYTES_BIG_ENDIAN)
4503 operands[4] = operands[2];
4504 operands[5] = operands[3];
4508 operands[4] = operands[3];
4509 operands[5] = operands[2];
4515 [(set (match_operand:SI 0 "register_operand" "")
4516 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4518 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4519 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4521 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4524 (define_insn "*arm_extendhisi2"
4525 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4526 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4527 "TARGET_ARM && arm_arch4 && !arm_arch6"
4531 [(set_attr "length" "8,4")
4532 (set_attr "type" "alu_shift,load_byte")
4533 (set_attr "predicable" "yes")
4534 (set_attr "pool_range" "*,256")
4535 (set_attr "neg_pool_range" "*,244")]
4538 ;; ??? Check Thumb-2 pool range
4539 (define_insn "*arm_extendhisi2_v6"
4540 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4541 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4542 "TARGET_32BIT && arm_arch6"
4546 [(set_attr "type" "alu_shift,load_byte")
4547 (set_attr "predicable" "yes")
4548 (set_attr "pool_range" "*,256")
4549 (set_attr "neg_pool_range" "*,244")]
4552 (define_insn "*arm_extendhisi2addsi"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r")
4554 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4555 (match_operand:SI 2 "s_register_operand" "r")))]
4557 "sxtah%?\\t%0, %2, %1"
4560 (define_expand "extendqihi2"
4562 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4564 (set (match_operand:HI 0 "s_register_operand" "")
4565 (ashiftrt:SI (match_dup 2)
4570 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4572 emit_insn (gen_rtx_SET (VOIDmode,
4574 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4577 if (!s_register_operand (operands[1], QImode))
4578 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4579 operands[0] = gen_lowpart (SImode, operands[0]);
4580 operands[1] = gen_lowpart (SImode, operands[1]);
4581 operands[2] = gen_reg_rtx (SImode);
4585 (define_insn "*arm_extendqihi_insn"
4586 [(set (match_operand:HI 0 "s_register_operand" "=r")
4587 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4588 "TARGET_ARM && arm_arch4"
4589 "ldr%(sb%)\\t%0, %1"
4590 [(set_attr "type" "load_byte")
4591 (set_attr "predicable" "yes")
4592 (set_attr "pool_range" "256")
4593 (set_attr "neg_pool_range" "244")]
4596 (define_expand "extendqisi2"
4597 [(set (match_operand:SI 0 "s_register_operand" "")
4598 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4601 if (!arm_arch4 && MEM_P (operands[1]))
4602 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4604 if (!arm_arch6 && !MEM_P (operands[1]))
4606 rtx t = gen_lowpart (SImode, operands[1]);
4607 rtx tmp = gen_reg_rtx (SImode);
4608 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4609 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4615 [(set (match_operand:SI 0 "register_operand" "")
4616 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4618 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4619 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4621 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4624 (define_insn "*arm_extendqisi"
4625 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4626 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4627 "TARGET_ARM && arm_arch4 && !arm_arch6"
4631 [(set_attr "length" "8,4")
4632 (set_attr "type" "alu_shift,load_byte")
4633 (set_attr "predicable" "yes")
4634 (set_attr "pool_range" "*,256")
4635 (set_attr "neg_pool_range" "*,244")]
4638 (define_insn "*arm_extendqisi_v6"
4639 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4641 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4642 "TARGET_ARM && arm_arch6"
4646 [(set_attr "type" "alu_shift,load_byte")
4647 (set_attr "predicable" "yes")
4648 (set_attr "pool_range" "*,256")
4649 (set_attr "neg_pool_range" "*,244")]
4652 (define_insn "*arm_extendqisi2addsi"
4653 [(set (match_operand:SI 0 "s_register_operand" "=r")
4654 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4655 (match_operand:SI 2 "s_register_operand" "r")))]
4657 "sxtab%?\\t%0, %2, %1"
4658 [(set_attr "type" "alu_shift")
4659 (set_attr "insn" "xtab")
4660 (set_attr "predicable" "yes")]
4664 [(set (match_operand:SI 0 "register_operand" "")
4665 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4666 "TARGET_THUMB1 && reload_completed"
4667 [(set (match_dup 0) (match_dup 2))
4668 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4670 rtx addr = XEXP (operands[1], 0);
4672 if (GET_CODE (addr) == CONST)
4673 addr = XEXP (addr, 0);
4675 if (GET_CODE (addr) == PLUS
4676 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4677 /* No split necessary. */
4680 if (GET_CODE (addr) == PLUS
4681 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4684 if (reg_overlap_mentioned_p (operands[0], addr))
4686 rtx t = gen_lowpart (QImode, operands[0]);
4687 emit_move_insn (t, operands[1]);
4688 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4694 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4695 operands[2] = const0_rtx;
4697 else if (GET_CODE (addr) != PLUS)
4699 else if (REG_P (XEXP (addr, 0)))
4701 operands[2] = XEXP (addr, 1);
4702 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4706 operands[2] = XEXP (addr, 0);
4707 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4710 operands[3] = change_address (operands[1], QImode, addr);
4714 [(set (match_operand:SI 0 "register_operand" "")
4715 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4716 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4717 (set (match_operand:SI 3 "register_operand" "")
4718 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4720 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4721 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4722 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4723 && (peep2_reg_dead_p (3, operands[0])
4724 || rtx_equal_p (operands[0], operands[3]))
4725 && (peep2_reg_dead_p (3, operands[2])
4726 || rtx_equal_p (operands[2], operands[3]))"
4727 [(set (match_dup 2) (match_dup 1))
4728 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4730 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4731 operands[4] = change_address (operands[4], QImode, addr);
4734 (define_insn "thumb1_extendqisi2"
4735 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4736 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4741 if (which_alternative == 0 && arm_arch6)
4742 return "sxtb\\t%0, %1";
4743 if (which_alternative == 0)
4746 addr = XEXP (operands[1], 0);
4747 if (GET_CODE (addr) == PLUS
4748 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4749 return "ldrsb\\t%0, %1";
4753 [(set_attr_alternative "length"
4754 [(if_then_else (eq_attr "is_arch6" "yes")
4755 (const_int 2) (const_int 4))
4757 (if_then_else (eq_attr "is_arch6" "yes")
4758 (const_int 4) (const_int 6))])
4759 (set_attr "type" "alu_shift,load_byte,load_byte")]
4762 (define_expand "extendsfdf2"
4763 [(set (match_operand:DF 0 "s_register_operand" "")
4764 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4765 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4769 /* HFmode -> DFmode conversions have to go through SFmode. */
4770 (define_expand "extendhfdf2"
4771 [(set (match_operand:DF 0 "general_operand" "")
4772 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4777 op1 = convert_to_mode (SFmode, operands[1], 0);
4778 op1 = convert_to_mode (DFmode, op1, 0);
4779 emit_insn (gen_movdf (operands[0], op1));
4784 ;; Move insns (including loads and stores)
4786 ;; XXX Just some ideas about movti.
4787 ;; I don't think these are a good idea on the arm, there just aren't enough
4789 ;;(define_expand "loadti"
4790 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4791 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4794 ;;(define_expand "storeti"
4795 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4796 ;; (match_operand:TI 1 "s_register_operand" ""))]
4799 ;;(define_expand "movti"
4800 ;; [(set (match_operand:TI 0 "general_operand" "")
4801 ;; (match_operand:TI 1 "general_operand" ""))]
4807 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4808 ;; operands[1] = copy_to_reg (operands[1]);
4809 ;; if (GET_CODE (operands[0]) == MEM)
4810 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4811 ;; else if (GET_CODE (operands[1]) == MEM)
4812 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4816 ;; emit_insn (insn);
4820 ;; Recognize garbage generated above.
4823 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4824 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4828 ;; register mem = (which_alternative < 3);
4829 ;; register const char *template;
4831 ;; operands[mem] = XEXP (operands[mem], 0);
4832 ;; switch (which_alternative)
4834 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4835 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4836 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4837 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4838 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4839 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4841 ;; output_asm_insn (template, operands);
4845 (define_expand "movdi"
4846 [(set (match_operand:DI 0 "general_operand" "")
4847 (match_operand:DI 1 "general_operand" ""))]
4850 if (can_create_pseudo_p ())
4852 if (GET_CODE (operands[0]) != REG)
4853 operands[1] = force_reg (DImode, operands[1]);
4858 (define_insn "*arm_movdi"
4859 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4860 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4862 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4864 && ( register_operand (operands[0], DImode)
4865 || register_operand (operands[1], DImode))"
4867 switch (which_alternative)
4874 return output_move_double (operands);
4877 [(set_attr "length" "8,12,16,8,8")
4878 (set_attr "type" "*,*,*,load2,store2")
4879 (set_attr "arm_pool_range" "*,*,*,1020,*")
4880 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4881 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4882 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4886 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4887 (match_operand:ANY64 1 "const_double_operand" ""))]
4890 && (arm_const_double_inline_cost (operands[1])
4891 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4894 arm_split_constant (SET, SImode, curr_insn,
4895 INTVAL (gen_lowpart (SImode, operands[1])),
4896 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4897 arm_split_constant (SET, SImode, curr_insn,
4898 INTVAL (gen_highpart_mode (SImode,
4899 GET_MODE (operands[0]),
4901 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4906 ; If optimizing for size, or if we have load delay slots, then
4907 ; we want to split the constant into two separate operations.
4908 ; In both cases this may split a trivial part into a single data op
4909 ; leaving a single complex constant to load. We can also get longer
4910 ; offsets in a LDR which means we get better chances of sharing the pool
4911 ; entries. Finally, we can normally do a better job of scheduling
4912 ; LDR instructions than we can with LDM.
4913 ; This pattern will only match if the one above did not.
4915 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4916 (match_operand:ANY64 1 "const_double_operand" ""))]
4917 "TARGET_ARM && reload_completed
4918 && arm_const_double_by_parts (operands[1])"
4919 [(set (match_dup 0) (match_dup 1))
4920 (set (match_dup 2) (match_dup 3))]
4922 operands[2] = gen_highpart (SImode, operands[0]);
4923 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4925 operands[0] = gen_lowpart (SImode, operands[0]);
4926 operands[1] = gen_lowpart (SImode, operands[1]);
4931 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4932 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4933 "TARGET_EITHER && reload_completed"
4934 [(set (match_dup 0) (match_dup 1))
4935 (set (match_dup 2) (match_dup 3))]
4937 operands[2] = gen_highpart (SImode, operands[0]);
4938 operands[3] = gen_highpart (SImode, operands[1]);
4939 operands[0] = gen_lowpart (SImode, operands[0]);
4940 operands[1] = gen_lowpart (SImode, operands[1]);
4942 /* Handle a partial overlap. */
4943 if (rtx_equal_p (operands[0], operands[3]))
4945 rtx tmp0 = operands[0];
4946 rtx tmp1 = operands[1];
4948 operands[0] = operands[2];
4949 operands[1] = operands[3];
4956 ;; We can't actually do base+index doubleword loads if the index and
4957 ;; destination overlap. Split here so that we at least have chance to
4960 [(set (match_operand:DI 0 "s_register_operand" "")
4961 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4962 (match_operand:SI 2 "s_register_operand" ""))))]
4964 && reg_overlap_mentioned_p (operands[0], operands[1])
4965 && reg_overlap_mentioned_p (operands[0], operands[2])"
4967 (plus:SI (match_dup 1)
4970 (mem:DI (match_dup 4)))]
4972 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4976 ;;; ??? This should have alternatives for constants.
4977 ;;; ??? This was originally identical to the movdf_insn pattern.
4978 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4979 ;;; thumb_reorg with a memory reference.
4980 (define_insn "*thumb1_movdi_insn"
4981 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4982 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4984 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4985 && ( register_operand (operands[0], DImode)
4986 || register_operand (operands[1], DImode))"
4989 switch (which_alternative)
4993 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4994 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4995 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4997 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4999 operands[1] = GEN_INT (- INTVAL (operands[1]));
5000 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5002 return \"ldmia\\t%1, {%0, %H0}\";
5004 return \"stmia\\t%0, {%1, %H1}\";
5006 return thumb_load_double_from_address (operands);
5008 operands[2] = gen_rtx_MEM (SImode,
5009 plus_constant (XEXP (operands[0], 0), 4));
5010 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5013 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5014 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5015 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5018 [(set_attr "length" "4,4,6,2,2,6,4,4")
5019 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5020 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5021 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5024 (define_expand "movsi"
5025 [(set (match_operand:SI 0 "general_operand" "")
5026 (match_operand:SI 1 "general_operand" ""))]
5030 rtx base, offset, tmp;
5034 /* Everything except mem = const or mem = mem can be done easily. */
5035 if (GET_CODE (operands[0]) == MEM)
5036 operands[1] = force_reg (SImode, operands[1]);
5037 if (arm_general_register_operand (operands[0], SImode)
5038 && GET_CODE (operands[1]) == CONST_INT
5039 && !(const_ok_for_arm (INTVAL (operands[1]))
5040 || const_ok_for_arm (~INTVAL (operands[1]))))
5042 arm_split_constant (SET, SImode, NULL_RTX,
5043 INTVAL (operands[1]), operands[0], NULL_RTX,
5044 optimize && can_create_pseudo_p ());
5048 if (TARGET_USE_MOVT && !target_word_relocations
5049 && GET_CODE (operands[1]) == SYMBOL_REF
5050 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5052 arm_emit_movpair (operands[0], operands[1]);
5056 else /* TARGET_THUMB1... */
5058 if (can_create_pseudo_p ())
5060 if (GET_CODE (operands[0]) != REG)
5061 operands[1] = force_reg (SImode, operands[1]);
5065 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5067 split_const (operands[1], &base, &offset);
5068 if (GET_CODE (base) == SYMBOL_REF
5069 && !offset_within_block_p (base, INTVAL (offset)))
5071 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5072 emit_move_insn (tmp, base);
5073 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5078 /* Recognize the case where operand[1] is a reference to thread-local
5079 data and load its address to a register. */
5080 if (arm_tls_referenced_p (operands[1]))
5082 rtx tmp = operands[1];
5085 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5087 addend = XEXP (XEXP (tmp, 0), 1);
5088 tmp = XEXP (XEXP (tmp, 0), 0);
5091 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5092 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5094 tmp = legitimize_tls_address (tmp,
5095 !can_create_pseudo_p () ? operands[0] : 0);
5098 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5099 tmp = force_operand (tmp, operands[0]);
5104 && (CONSTANT_P (operands[1])
5105 || symbol_mentioned_p (operands[1])
5106 || label_mentioned_p (operands[1])))
5107 operands[1] = legitimize_pic_address (operands[1], SImode,
5108 (!can_create_pseudo_p ()
5115 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5116 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5117 ;; so this does not matter.
5118 (define_insn "*arm_movt"
5119 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5120 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5121 (match_operand:SI 2 "general_operand" "i")))]
5123 "movt%?\t%0, #:upper16:%c2"
5124 [(set_attr "predicable" "yes")
5125 (set_attr "length" "4")]
5128 (define_insn "*arm_movsi_insn"
5129 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5130 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5131 "TARGET_ARM && ! TARGET_IWMMXT
5132 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5133 && ( register_operand (operands[0], SImode)
5134 || register_operand (operands[1], SImode))"
5142 [(set_attr "type" "*,*,*,*,load1,store1")
5143 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5144 (set_attr "predicable" "yes")
5145 (set_attr "pool_range" "*,*,*,*,4096,*")
5146 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5150 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5151 (match_operand:SI 1 "const_int_operand" ""))]
5153 && (!(const_ok_for_arm (INTVAL (operands[1]))
5154 || const_ok_for_arm (~INTVAL (operands[1]))))"
5155 [(clobber (const_int 0))]
5157 arm_split_constant (SET, SImode, NULL_RTX,
5158 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5163 (define_insn "*thumb1_movsi_insn"
5164 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5165 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5167 && ( register_operand (operands[0], SImode)
5168 || register_operand (operands[1], SImode))"
5179 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5180 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5181 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5182 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5185 [(set (match_operand:SI 0 "register_operand" "")
5186 (match_operand:SI 1 "const_int_operand" ""))]
5187 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5188 [(set (match_dup 2) (match_dup 1))
5189 (set (match_dup 0) (neg:SI (match_dup 2)))]
5192 operands[1] = GEN_INT (- INTVAL (operands[1]));
5193 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5198 [(set (match_operand:SI 0 "register_operand" "")
5199 (match_operand:SI 1 "const_int_operand" ""))]
5200 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5201 [(set (match_dup 2) (match_dup 1))
5202 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5205 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5206 unsigned HOST_WIDE_INT mask = 0xff;
5209 for (i = 0; i < 25; i++)
5210 if ((val & (mask << i)) == val)
5213 /* Don't split if the shift is zero. */
5217 operands[1] = GEN_INT (val >> i);
5218 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5219 operands[3] = GEN_INT (i);
5223 ;; When generating pic, we need to load the symbol offset into a register.
5224 ;; So that the optimizer does not confuse this with a normal symbol load
5225 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5226 ;; since that is the only type of relocation we can use.
5228 ;; Wrap calculation of the whole PIC address in a single pattern for the
5229 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5230 ;; a PIC address involves two loads from memory, so we want to CSE it
5231 ;; as often as possible.
5232 ;; This pattern will be split into one of the pic_load_addr_* patterns
5233 ;; and a move after GCSE optimizations.
5235 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5236 (define_expand "calculate_pic_address"
5237 [(set (match_operand:SI 0 "register_operand" "")
5238 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5239 (unspec:SI [(match_operand:SI 2 "" "")]
5244 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5246 [(set (match_operand:SI 0 "register_operand" "")
5247 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5248 (unspec:SI [(match_operand:SI 2 "" "")]
5251 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5252 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5253 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5256 ;; The rather odd constraints on the following are to force reload to leave
5257 ;; the insn alone, and to force the minipool generation pass to then move
5258 ;; the GOT symbol to memory.
5260 (define_insn "pic_load_addr_32bit"
5261 [(set (match_operand:SI 0 "s_register_operand" "=r")
5262 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5263 "TARGET_32BIT && flag_pic"
5265 [(set_attr "type" "load1")
5266 (set_attr "pool_range" "4096")
5267 (set (attr "neg_pool_range")
5268 (if_then_else (eq_attr "is_thumb" "no")
5273 (define_insn "pic_load_addr_thumb1"
5274 [(set (match_operand:SI 0 "s_register_operand" "=l")
5275 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5276 "TARGET_THUMB1 && flag_pic"
5278 [(set_attr "type" "load1")
5279 (set (attr "pool_range") (const_int 1024))]
5282 (define_insn "pic_add_dot_plus_four"
5283 [(set (match_operand:SI 0 "register_operand" "=r")
5284 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5286 (match_operand 2 "" "")]
5290 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5291 INTVAL (operands[2]));
5292 return \"add\\t%0, %|pc\";
5294 [(set_attr "length" "2")]
5297 (define_insn "pic_add_dot_plus_eight"
5298 [(set (match_operand:SI 0 "register_operand" "=r")
5299 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5301 (match_operand 2 "" "")]
5305 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5306 INTVAL (operands[2]));
5307 return \"add%?\\t%0, %|pc, %1\";
5309 [(set_attr "predicable" "yes")]
5312 (define_insn "tls_load_dot_plus_eight"
5313 [(set (match_operand:SI 0 "register_operand" "=r")
5314 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5316 (match_operand 2 "" "")]
5320 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5321 INTVAL (operands[2]));
5322 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5324 [(set_attr "predicable" "yes")]
5327 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5328 ;; followed by a load. These sequences can be crunched down to
5329 ;; tls_load_dot_plus_eight by a peephole.
5332 [(set (match_operand:SI 0 "register_operand" "")
5333 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5335 (match_operand 1 "" "")]
5337 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5338 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5340 (mem:SI (unspec:SI [(match_dup 3)
5347 (define_insn "pic_offset_arm"
5348 [(set (match_operand:SI 0 "register_operand" "=r")
5349 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5350 (unspec:SI [(match_operand:SI 2 "" "X")]
5351 UNSPEC_PIC_OFFSET))))]
5352 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5353 "ldr%?\\t%0, [%1,%2]"
5354 [(set_attr "type" "load1")]
5357 (define_expand "builtin_setjmp_receiver"
5358 [(label_ref (match_operand 0 "" ""))]
5362 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5364 if (arm_pic_register != INVALID_REGNUM)
5365 arm_load_pic_register (1UL << 3);
5369 ;; If copying one reg to another we can set the condition codes according to
5370 ;; its value. Such a move is common after a return from subroutine and the
5371 ;; result is being tested against zero.
5373 (define_insn "*movsi_compare0"
5374 [(set (reg:CC CC_REGNUM)
5375 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5377 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5383 [(set_attr "conds" "set")]
5386 ;; Subroutine to store a half word from a register into memory.
5387 ;; Operand 0 is the source register (HImode)
5388 ;; Operand 1 is the destination address in a register (SImode)
5390 ;; In both this routine and the next, we must be careful not to spill
5391 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5392 ;; can generate unrecognizable rtl.
5394 (define_expand "storehi"
5395 [;; store the low byte
5396 (set (match_operand 1 "" "") (match_dup 3))
5397 ;; extract the high byte
5399 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5400 ;; store the high byte
5401 (set (match_dup 4) (match_dup 5))]
5405 rtx op1 = operands[1];
5406 rtx addr = XEXP (op1, 0);
5407 enum rtx_code code = GET_CODE (addr);
5409 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5411 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5413 operands[4] = adjust_address (op1, QImode, 1);
5414 operands[1] = adjust_address (operands[1], QImode, 0);
5415 operands[3] = gen_lowpart (QImode, operands[0]);
5416 operands[0] = gen_lowpart (SImode, operands[0]);
5417 operands[2] = gen_reg_rtx (SImode);
5418 operands[5] = gen_lowpart (QImode, operands[2]);
5422 (define_expand "storehi_bigend"
5423 [(set (match_dup 4) (match_dup 3))
5425 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5426 (set (match_operand 1 "" "") (match_dup 5))]
5430 rtx op1 = operands[1];
5431 rtx addr = XEXP (op1, 0);
5432 enum rtx_code code = GET_CODE (addr);
5434 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5436 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5438 operands[4] = adjust_address (op1, QImode, 1);
5439 operands[1] = adjust_address (operands[1], QImode, 0);
5440 operands[3] = gen_lowpart (QImode, operands[0]);
5441 operands[0] = gen_lowpart (SImode, operands[0]);
5442 operands[2] = gen_reg_rtx (SImode);
5443 operands[5] = gen_lowpart (QImode, operands[2]);
5447 ;; Subroutine to store a half word integer constant into memory.
5448 (define_expand "storeinthi"
5449 [(set (match_operand 0 "" "")
5450 (match_operand 1 "" ""))
5451 (set (match_dup 3) (match_dup 2))]
5455 HOST_WIDE_INT value = INTVAL (operands[1]);
5456 rtx addr = XEXP (operands[0], 0);
5457 rtx op0 = operands[0];
5458 enum rtx_code code = GET_CODE (addr);
5460 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5462 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5464 operands[1] = gen_reg_rtx (SImode);
5465 if (BYTES_BIG_ENDIAN)
5467 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5468 if ((value & 255) == ((value >> 8) & 255))
5469 operands[2] = operands[1];
5472 operands[2] = gen_reg_rtx (SImode);
5473 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5478 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5479 if ((value & 255) == ((value >> 8) & 255))
5480 operands[2] = operands[1];
5483 operands[2] = gen_reg_rtx (SImode);
5484 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5488 operands[3] = adjust_address (op0, QImode, 1);
5489 operands[0] = adjust_address (operands[0], QImode, 0);
5490 operands[2] = gen_lowpart (QImode, operands[2]);
5491 operands[1] = gen_lowpart (QImode, operands[1]);
5495 (define_expand "storehi_single_op"
5496 [(set (match_operand:HI 0 "memory_operand" "")
5497 (match_operand:HI 1 "general_operand" ""))]
5498 "TARGET_32BIT && arm_arch4"
5500 if (!s_register_operand (operands[1], HImode))
5501 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5505 (define_expand "movhi"
5506 [(set (match_operand:HI 0 "general_operand" "")
5507 (match_operand:HI 1 "general_operand" ""))]
5512 if (can_create_pseudo_p ())
5514 if (GET_CODE (operands[0]) == MEM)
5518 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5521 if (GET_CODE (operands[1]) == CONST_INT)
5522 emit_insn (gen_storeinthi (operands[0], operands[1]));
5525 if (GET_CODE (operands[1]) == MEM)
5526 operands[1] = force_reg (HImode, operands[1]);
5527 if (BYTES_BIG_ENDIAN)
5528 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5530 emit_insn (gen_storehi (operands[1], operands[0]));
5534 /* Sign extend a constant, and keep it in an SImode reg. */
5535 else if (GET_CODE (operands[1]) == CONST_INT)
5537 rtx reg = gen_reg_rtx (SImode);
5538 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5540 /* If the constant is already valid, leave it alone. */
5541 if (!const_ok_for_arm (val))
5543 /* If setting all the top bits will make the constant
5544 loadable in a single instruction, then set them.
5545 Otherwise, sign extend the number. */
5547 if (const_ok_for_arm (~(val | ~0xffff)))
5549 else if (val & 0x8000)
5553 emit_insn (gen_movsi (reg, GEN_INT (val)));
5554 operands[1] = gen_lowpart (HImode, reg);
5556 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5557 && GET_CODE (operands[1]) == MEM)
5559 rtx reg = gen_reg_rtx (SImode);
5561 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5562 operands[1] = gen_lowpart (HImode, reg);
5564 else if (!arm_arch4)
5566 if (GET_CODE (operands[1]) == MEM)
5569 rtx offset = const0_rtx;
5570 rtx reg = gen_reg_rtx (SImode);
5572 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5573 || (GET_CODE (base) == PLUS
5574 && (GET_CODE (offset = XEXP (base, 1))
5576 && ((INTVAL(offset) & 1) != 1)
5577 && GET_CODE (base = XEXP (base, 0)) == REG))
5578 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5582 new_rtx = widen_memory_access (operands[1], SImode,
5583 ((INTVAL (offset) & ~3)
5584 - INTVAL (offset)));
5585 emit_insn (gen_movsi (reg, new_rtx));
5586 if (((INTVAL (offset) & 2) != 0)
5587 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5589 rtx reg2 = gen_reg_rtx (SImode);
5591 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5596 emit_insn (gen_movhi_bytes (reg, operands[1]));
5598 operands[1] = gen_lowpart (HImode, reg);
5602 /* Handle loading a large integer during reload. */
5603 else if (GET_CODE (operands[1]) == CONST_INT
5604 && !const_ok_for_arm (INTVAL (operands[1]))
5605 && !const_ok_for_arm (~INTVAL (operands[1])))
5607 /* Writing a constant to memory needs a scratch, which should
5608 be handled with SECONDARY_RELOADs. */
5609 gcc_assert (GET_CODE (operands[0]) == REG);
5611 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5612 emit_insn (gen_movsi (operands[0], operands[1]));
5616 else if (TARGET_THUMB2)
5618 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5619 if (can_create_pseudo_p ())
5621 if (GET_CODE (operands[0]) != REG)
5622 operands[1] = force_reg (HImode, operands[1]);
5623 /* Zero extend a constant, and keep it in an SImode reg. */
5624 else if (GET_CODE (operands[1]) == CONST_INT)
5626 rtx reg = gen_reg_rtx (SImode);
5627 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5629 emit_insn (gen_movsi (reg, GEN_INT (val)));
5630 operands[1] = gen_lowpart (HImode, reg);
5634 else /* TARGET_THUMB1 */
5636 if (can_create_pseudo_p ())
5638 if (GET_CODE (operands[1]) == CONST_INT)
5640 rtx reg = gen_reg_rtx (SImode);
5642 emit_insn (gen_movsi (reg, operands[1]));
5643 operands[1] = gen_lowpart (HImode, reg);
5646 /* ??? We shouldn't really get invalid addresses here, but this can
5647 happen if we are passed a SP (never OK for HImode/QImode) or
5648 virtual register (also rejected as illegitimate for HImode/QImode)
5649 relative address. */
5650 /* ??? This should perhaps be fixed elsewhere, for instance, in
5651 fixup_stack_1, by checking for other kinds of invalid addresses,
5652 e.g. a bare reference to a virtual register. This may confuse the
5653 alpha though, which must handle this case differently. */
5654 if (GET_CODE (operands[0]) == MEM
5655 && !memory_address_p (GET_MODE (operands[0]),
5656 XEXP (operands[0], 0)))
5658 = replace_equiv_address (operands[0],
5659 copy_to_reg (XEXP (operands[0], 0)));
5661 if (GET_CODE (operands[1]) == MEM
5662 && !memory_address_p (GET_MODE (operands[1]),
5663 XEXP (operands[1], 0)))
5665 = replace_equiv_address (operands[1],
5666 copy_to_reg (XEXP (operands[1], 0)));
5668 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5670 rtx reg = gen_reg_rtx (SImode);
5672 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5673 operands[1] = gen_lowpart (HImode, reg);
5676 if (GET_CODE (operands[0]) == MEM)
5677 operands[1] = force_reg (HImode, operands[1]);
5679 else if (GET_CODE (operands[1]) == CONST_INT
5680 && !satisfies_constraint_I (operands[1]))
5682 /* Handle loading a large integer during reload. */
5684 /* Writing a constant to memory needs a scratch, which should
5685 be handled with SECONDARY_RELOADs. */
5686 gcc_assert (GET_CODE (operands[0]) == REG);
5688 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5689 emit_insn (gen_movsi (operands[0], operands[1]));
5696 (define_insn "*thumb1_movhi_insn"
5697 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5698 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5700 && ( register_operand (operands[0], HImode)
5701 || register_operand (operands[1], HImode))"
5703 switch (which_alternative)
5705 case 0: return \"add %0, %1, #0\";
5706 case 2: return \"strh %1, %0\";
5707 case 3: return \"mov %0, %1\";
5708 case 4: return \"mov %0, %1\";
5709 case 5: return \"mov %0, %1\";
5710 default: gcc_unreachable ();
5712 /* The stack pointer can end up being taken as an index register.
5713 Catch this case here and deal with it. */
5714 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5715 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5716 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5719 ops[0] = operands[0];
5720 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5722 output_asm_insn (\"mov %0, %1\", ops);
5724 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5727 return \"ldrh %0, %1\";
5729 [(set_attr "length" "2,4,2,2,2,2")
5730 (set_attr "type" "*,load1,store1,*,*,*")
5731 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5734 (define_expand "movhi_bytes"
5735 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5737 (zero_extend:SI (match_dup 6)))
5738 (set (match_operand:SI 0 "" "")
5739 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5744 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5746 mem1 = change_address (operands[1], QImode, addr);
5747 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5748 operands[0] = gen_lowpart (SImode, operands[0]);
5750 operands[2] = gen_reg_rtx (SImode);
5751 operands[3] = gen_reg_rtx (SImode);
5754 if (BYTES_BIG_ENDIAN)
5756 operands[4] = operands[2];
5757 operands[5] = operands[3];
5761 operands[4] = operands[3];
5762 operands[5] = operands[2];
5767 (define_expand "movhi_bigend"
5769 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5772 (ashiftrt:SI (match_dup 2) (const_int 16)))
5773 (set (match_operand:HI 0 "s_register_operand" "")
5777 operands[2] = gen_reg_rtx (SImode);
5778 operands[3] = gen_reg_rtx (SImode);
5779 operands[4] = gen_lowpart (HImode, operands[3]);
5783 ;; Pattern to recognize insn generated default case above
5784 (define_insn "*movhi_insn_arch4"
5785 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5786 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5789 && (GET_CODE (operands[1]) != CONST_INT
5790 || const_ok_for_arm (INTVAL (operands[1]))
5791 || const_ok_for_arm (~INTVAL (operands[1])))"
5793 mov%?\\t%0, %1\\t%@ movhi
5794 mvn%?\\t%0, #%B1\\t%@ movhi
5795 str%(h%)\\t%1, %0\\t%@ movhi
5796 ldr%(h%)\\t%0, %1\\t%@ movhi"
5797 [(set_attr "type" "*,*,store1,load1")
5798 (set_attr "predicable" "yes")
5799 (set_attr "insn" "mov,mvn,*,*")
5800 (set_attr "pool_range" "*,*,*,256")
5801 (set_attr "neg_pool_range" "*,*,*,244")]
5804 (define_insn "*movhi_bytes"
5805 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5806 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5809 mov%?\\t%0, %1\\t%@ movhi
5810 mvn%?\\t%0, #%B1\\t%@ movhi"
5811 [(set_attr "predicable" "yes")
5812 (set_attr "insn" "mov,mvn")]
5815 (define_expand "thumb_movhi_clobber"
5816 [(set (match_operand:HI 0 "memory_operand" "")
5817 (match_operand:HI 1 "register_operand" ""))
5818 (clobber (match_operand:DI 2 "register_operand" ""))]
5821 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5822 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5824 emit_insn (gen_movhi (operands[0], operands[1]));
5827 /* XXX Fixme, need to handle other cases here as well. */
5832 ;; We use a DImode scratch because we may occasionally need an additional
5833 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5834 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5835 (define_expand "reload_outhi"
5836 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5837 (match_operand:HI 1 "s_register_operand" "r")
5838 (match_operand:DI 2 "s_register_operand" "=&l")])]
5841 arm_reload_out_hi (operands);
5843 thumb_reload_out_hi (operands);
5848 (define_expand "reload_inhi"
5849 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5850 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5851 (match_operand:DI 2 "s_register_operand" "=&r")])]
5855 arm_reload_in_hi (operands);
5857 thumb_reload_out_hi (operands);
5861 (define_expand "movqi"
5862 [(set (match_operand:QI 0 "general_operand" "")
5863 (match_operand:QI 1 "general_operand" ""))]
5866 /* Everything except mem = const or mem = mem can be done easily */
5868 if (can_create_pseudo_p ())
5870 if (GET_CODE (operands[1]) == CONST_INT)
5872 rtx reg = gen_reg_rtx (SImode);
5874 /* For thumb we want an unsigned immediate, then we are more likely
5875 to be able to use a movs insn. */
5877 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5879 emit_insn (gen_movsi (reg, operands[1]));
5880 operands[1] = gen_lowpart (QImode, reg);
5885 /* ??? We shouldn't really get invalid addresses here, but this can
5886 happen if we are passed a SP (never OK for HImode/QImode) or
5887 virtual register (also rejected as illegitimate for HImode/QImode)
5888 relative address. */
5889 /* ??? This should perhaps be fixed elsewhere, for instance, in
5890 fixup_stack_1, by checking for other kinds of invalid addresses,
5891 e.g. a bare reference to a virtual register. This may confuse the
5892 alpha though, which must handle this case differently. */
5893 if (GET_CODE (operands[0]) == MEM
5894 && !memory_address_p (GET_MODE (operands[0]),
5895 XEXP (operands[0], 0)))
5897 = replace_equiv_address (operands[0],
5898 copy_to_reg (XEXP (operands[0], 0)));
5899 if (GET_CODE (operands[1]) == MEM
5900 && !memory_address_p (GET_MODE (operands[1]),
5901 XEXP (operands[1], 0)))
5903 = replace_equiv_address (operands[1],
5904 copy_to_reg (XEXP (operands[1], 0)));
5907 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5909 rtx reg = gen_reg_rtx (SImode);
5911 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5912 operands[1] = gen_lowpart (QImode, reg);
5915 if (GET_CODE (operands[0]) == MEM)
5916 operands[1] = force_reg (QImode, operands[1]);
5918 else if (TARGET_THUMB
5919 && GET_CODE (operands[1]) == CONST_INT
5920 && !satisfies_constraint_I (operands[1]))
5922 /* Handle loading a large integer during reload. */
5924 /* Writing a constant to memory needs a scratch, which should
5925 be handled with SECONDARY_RELOADs. */
5926 gcc_assert (GET_CODE (operands[0]) == REG);
5928 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5929 emit_insn (gen_movsi (operands[0], operands[1]));
5936 (define_insn "*arm_movqi_insn"
5937 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5938 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5940 && ( register_operand (operands[0], QImode)
5941 || register_operand (operands[1], QImode))"
5947 [(set_attr "type" "*,*,load1,store1")
5948 (set_attr "insn" "mov,mvn,*,*")
5949 (set_attr "predicable" "yes")]
5952 (define_insn "*thumb1_movqi_insn"
5953 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5954 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5956 && ( register_operand (operands[0], QImode)
5957 || register_operand (operands[1], QImode))"
5965 [(set_attr "length" "2")
5966 (set_attr "type" "*,load1,store1,*,*,*")
5967 (set_attr "insn" "*,*,*,mov,mov,mov")
5968 (set_attr "pool_range" "*,32,*,*,*,*")
5969 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5972 (define_expand "movhf"
5973 [(set (match_operand:HF 0 "general_operand" "")
5974 (match_operand:HF 1 "general_operand" ""))]
5979 if (GET_CODE (operands[0]) == MEM)
5980 operands[1] = force_reg (HFmode, operands[1]);
5982 else /* TARGET_THUMB1 */
5984 if (can_create_pseudo_p ())
5986 if (GET_CODE (operands[0]) != REG)
5987 operands[1] = force_reg (HFmode, operands[1]);
5993 (define_insn "*arm32_movhf"
5994 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5995 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5996 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5997 && ( s_register_operand (operands[0], HFmode)
5998 || s_register_operand (operands[1], HFmode))"
6000 switch (which_alternative)
6002 case 0: /* ARM register from memory */
6003 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6004 case 1: /* memory from ARM register */
6005 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6006 case 2: /* ARM register from ARM register */
6007 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6008 case 3: /* ARM register from constant */
6014 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6015 bits = real_to_target (NULL, &r, HFmode);
6016 ops[0] = operands[0];
6017 ops[1] = GEN_INT (bits);
6018 ops[2] = GEN_INT (bits & 0xff00);
6019 ops[3] = GEN_INT (bits & 0x00ff);
6021 if (arm_arch_thumb2)
6022 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6024 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6031 [(set_attr "conds" "unconditional")
6032 (set_attr "type" "load1,store1,*,*")
6033 (set_attr "insn" "*,*,mov,mov")
6034 (set_attr "length" "4,4,4,8")
6035 (set_attr "predicable" "yes")]
6038 (define_insn "*thumb1_movhf"
6039 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6040 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6042 && ( s_register_operand (operands[0], HFmode)
6043 || s_register_operand (operands[1], HFmode))"
6045 switch (which_alternative)
6050 gcc_assert (GET_CODE(operands[1]) == MEM);
6051 addr = XEXP (operands[1], 0);
6052 if (GET_CODE (addr) == LABEL_REF
6053 || (GET_CODE (addr) == CONST
6054 && GET_CODE (XEXP (addr, 0)) == PLUS
6055 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6056 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6058 /* Constant pool entry. */
6059 return \"ldr\\t%0, %1\";
6061 return \"ldrh\\t%0, %1\";
6063 case 2: return \"strh\\t%1, %0\";
6064 default: return \"mov\\t%0, %1\";
6067 [(set_attr "length" "2")
6068 (set_attr "type" "*,load1,store1,*,*")
6069 (set_attr "insn" "mov,*,*,mov,mov")
6070 (set_attr "pool_range" "*,1020,*,*,*")
6071 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6073 (define_expand "movsf"
6074 [(set (match_operand:SF 0 "general_operand" "")
6075 (match_operand:SF 1 "general_operand" ""))]
6080 if (GET_CODE (operands[0]) == MEM)
6081 operands[1] = force_reg (SFmode, operands[1]);
6083 else /* TARGET_THUMB1 */
6085 if (can_create_pseudo_p ())
6087 if (GET_CODE (operands[0]) != REG)
6088 operands[1] = force_reg (SFmode, operands[1]);
6094 ;; Transform a floating-point move of a constant into a core register into
6095 ;; an SImode operation.
6097 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6098 (match_operand:SF 1 "immediate_operand" ""))]
6101 && GET_CODE (operands[1]) == CONST_DOUBLE"
6102 [(set (match_dup 2) (match_dup 3))]
6104 operands[2] = gen_lowpart (SImode, operands[0]);
6105 operands[3] = gen_lowpart (SImode, operands[1]);
6106 if (operands[2] == 0 || operands[3] == 0)
6111 (define_insn "*arm_movsf_soft_insn"
6112 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6113 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6115 && TARGET_SOFT_FLOAT
6116 && (GET_CODE (operands[0]) != MEM
6117 || register_operand (operands[1], SFmode))"
6120 ldr%?\\t%0, %1\\t%@ float
6121 str%?\\t%1, %0\\t%@ float"
6122 [(set_attr "predicable" "yes")
6123 (set_attr "type" "*,load1,store1")
6124 (set_attr "insn" "mov,*,*")
6125 (set_attr "pool_range" "*,4096,*")
6126 (set_attr "arm_neg_pool_range" "*,4084,*")
6127 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6130 ;;; ??? This should have alternatives for constants.
6131 (define_insn "*thumb1_movsf_insn"
6132 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6133 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6135 && ( register_operand (operands[0], SFmode)
6136 || register_operand (operands[1], SFmode))"
6145 [(set_attr "length" "2")
6146 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6147 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6148 (set_attr "insn" "*,*,*,*,*,mov,mov")
6149 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6152 (define_expand "movdf"
6153 [(set (match_operand:DF 0 "general_operand" "")
6154 (match_operand:DF 1 "general_operand" ""))]
6159 if (GET_CODE (operands[0]) == MEM)
6160 operands[1] = force_reg (DFmode, operands[1]);
6162 else /* TARGET_THUMB */
6164 if (can_create_pseudo_p ())
6166 if (GET_CODE (operands[0]) != REG)
6167 operands[1] = force_reg (DFmode, operands[1]);
6173 ;; Reloading a df mode value stored in integer regs to memory can require a
6175 (define_expand "reload_outdf"
6176 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6177 (match_operand:DF 1 "s_register_operand" "r")
6178 (match_operand:SI 2 "s_register_operand" "=&r")]
6182 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6185 operands[2] = XEXP (operands[0], 0);
6186 else if (code == POST_INC || code == PRE_DEC)
6188 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6189 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6190 emit_insn (gen_movdi (operands[0], operands[1]));
6193 else if (code == PRE_INC)
6195 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6197 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6200 else if (code == POST_DEC)
6201 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6203 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6204 XEXP (XEXP (operands[0], 0), 1)));
6206 emit_insn (gen_rtx_SET (VOIDmode,
6207 replace_equiv_address (operands[0], operands[2]),
6210 if (code == POST_DEC)
6211 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6217 (define_insn "*movdf_soft_insn"
6218 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6219 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6220 "TARGET_32BIT && TARGET_SOFT_FLOAT
6221 && ( register_operand (operands[0], DFmode)
6222 || register_operand (operands[1], DFmode))"
6224 switch (which_alternative)
6231 return output_move_double (operands);
6234 [(set_attr "length" "8,12,16,8,8")
6235 (set_attr "type" "*,*,*,load2,store2")
6236 (set_attr "pool_range" "*,*,*,1020,*")
6237 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6238 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6241 ;;; ??? This should have alternatives for constants.
6242 ;;; ??? This was originally identical to the movdi_insn pattern.
6243 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6244 ;;; thumb_reorg with a memory reference.
6245 (define_insn "*thumb_movdf_insn"
6246 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6247 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6249 && ( register_operand (operands[0], DFmode)
6250 || register_operand (operands[1], DFmode))"
6252 switch (which_alternative)
6256 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6257 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6258 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6260 return \"ldmia\\t%1, {%0, %H0}\";
6262 return \"stmia\\t%0, {%1, %H1}\";
6264 return thumb_load_double_from_address (operands);
6266 operands[2] = gen_rtx_MEM (SImode,
6267 plus_constant (XEXP (operands[0], 0), 4));
6268 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6271 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6272 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6273 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6276 [(set_attr "length" "4,2,2,6,4,4")
6277 (set_attr "type" "*,load2,store2,load2,store2,*")
6278 (set_attr "insn" "*,*,*,*,*,mov")
6279 (set_attr "pool_range" "*,*,*,1020,*,*")]
6282 (define_expand "movxf"
6283 [(set (match_operand:XF 0 "general_operand" "")
6284 (match_operand:XF 1 "general_operand" ""))]
6285 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6287 if (GET_CODE (operands[0]) == MEM)
6288 operands[1] = force_reg (XFmode, operands[1]);
6294 ;; load- and store-multiple insns
6295 ;; The arm can load/store any set of registers, provided that they are in
6296 ;; ascending order, but these expanders assume a contiguous set.
6298 (define_expand "load_multiple"
6299 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6300 (match_operand:SI 1 "" ""))
6301 (use (match_operand:SI 2 "" ""))])]
6304 HOST_WIDE_INT offset = 0;
6306 /* Support only fixed point registers. */
6307 if (GET_CODE (operands[2]) != CONST_INT
6308 || INTVAL (operands[2]) > 14
6309 || INTVAL (operands[2]) < 2
6310 || GET_CODE (operands[1]) != MEM
6311 || GET_CODE (operands[0]) != REG
6312 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6313 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6317 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6318 INTVAL (operands[2]),
6319 force_reg (SImode, XEXP (operands[1], 0)),
6320 FALSE, operands[1], &offset);
6323 (define_expand "store_multiple"
6324 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6325 (match_operand:SI 1 "" ""))
6326 (use (match_operand:SI 2 "" ""))])]
6329 HOST_WIDE_INT offset = 0;
6331 /* Support only fixed point registers. */
6332 if (GET_CODE (operands[2]) != CONST_INT
6333 || INTVAL (operands[2]) > 14
6334 || INTVAL (operands[2]) < 2
6335 || GET_CODE (operands[1]) != REG
6336 || GET_CODE (operands[0]) != MEM
6337 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6338 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6342 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6343 INTVAL (operands[2]),
6344 force_reg (SImode, XEXP (operands[0], 0)),
6345 FALSE, operands[0], &offset);
6349 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6350 ;; We could let this apply for blocks of less than this, but it clobbers so
6351 ;; many registers that there is then probably a better way.
6353 (define_expand "movmemqi"
6354 [(match_operand:BLK 0 "general_operand" "")
6355 (match_operand:BLK 1 "general_operand" "")
6356 (match_operand:SI 2 "const_int_operand" "")
6357 (match_operand:SI 3 "const_int_operand" "")]
6362 if (arm_gen_movmemqi (operands))
6366 else /* TARGET_THUMB1 */
6368 if ( INTVAL (operands[3]) != 4
6369 || INTVAL (operands[2]) > 48)
6372 thumb_expand_movmemqi (operands);
6378 ;; Thumb block-move insns
6380 (define_insn "movmem12b"
6381 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6382 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6383 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6384 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6385 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6386 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6387 (set (match_operand:SI 0 "register_operand" "=l")
6388 (plus:SI (match_dup 2) (const_int 12)))
6389 (set (match_operand:SI 1 "register_operand" "=l")
6390 (plus:SI (match_dup 3) (const_int 12)))
6391 (clobber (match_scratch:SI 4 "=&l"))
6392 (clobber (match_scratch:SI 5 "=&l"))
6393 (clobber (match_scratch:SI 6 "=&l"))]
6395 "* return thumb_output_move_mem_multiple (3, operands);"
6396 [(set_attr "length" "4")
6397 ; This isn't entirely accurate... It loads as well, but in terms of
6398 ; scheduling the following insn it is better to consider it as a store
6399 (set_attr "type" "store3")]
6402 (define_insn "movmem8b"
6403 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6404 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6405 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6406 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6407 (set (match_operand:SI 0 "register_operand" "=l")
6408 (plus:SI (match_dup 2) (const_int 8)))
6409 (set (match_operand:SI 1 "register_operand" "=l")
6410 (plus:SI (match_dup 3) (const_int 8)))
6411 (clobber (match_scratch:SI 4 "=&l"))
6412 (clobber (match_scratch:SI 5 "=&l"))]
6414 "* return thumb_output_move_mem_multiple (2, operands);"
6415 [(set_attr "length" "4")
6416 ; This isn't entirely accurate... It loads as well, but in terms of
6417 ; scheduling the following insn it is better to consider it as a store
6418 (set_attr "type" "store2")]
6423 ;; Compare & branch insns
6424 ;; The range calculations are based as follows:
6425 ;; For forward branches, the address calculation returns the address of
6426 ;; the next instruction. This is 2 beyond the branch instruction.
6427 ;; For backward branches, the address calculation returns the address of
6428 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6429 ;; instruction for the shortest sequence, and 4 before the branch instruction
6430 ;; if we have to jump around an unconditional branch.
6431 ;; To the basic branch range the PC offset must be added (this is +4).
6432 ;; So for forward branches we have
6433 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6434 ;; And for backward branches we have
6435 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6437 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6438 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6440 (define_expand "cbranchsi4"
6441 [(set (pc) (if_then_else
6442 (match_operator 0 "arm_comparison_operator"
6443 [(match_operand:SI 1 "s_register_operand" "")
6444 (match_operand:SI 2 "nonmemory_operand" "")])
6445 (label_ref (match_operand 3 "" ""))
6447 "TARGET_THUMB1 || TARGET_32BIT"
6451 if (!arm_add_operand (operands[2], SImode))
6452 operands[2] = force_reg (SImode, operands[2]);
6453 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6457 if (thumb1_cmpneg_operand (operands[2], SImode))
6459 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6460 operands[3], operands[0]));
6463 if (!thumb1_cmp_operand (operands[2], SImode))
6464 operands[2] = force_reg (SImode, operands[2]);
6467 ;; A pattern to recognize a special situation and optimize for it.
6468 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6469 ;; due to the available addressing modes. Hence, convert a signed comparison
6470 ;; with zero into an unsigned comparison with 127 if possible.
6471 (define_expand "cbranchqi4"
6472 [(set (pc) (if_then_else
6473 (match_operator 0 "lt_ge_comparison_operator"
6474 [(match_operand:QI 1 "memory_operand" "")
6475 (match_operand:QI 2 "const0_operand" "")])
6476 (label_ref (match_operand 3 "" ""))
6481 xops[1] = gen_reg_rtx (SImode);
6482 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6483 xops[2] = GEN_INT (127);
6484 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6485 VOIDmode, xops[1], xops[2]);
6486 xops[3] = operands[3];
6487 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6491 (define_expand "cbranchsf4"
6492 [(set (pc) (if_then_else
6493 (match_operator 0 "arm_comparison_operator"
6494 [(match_operand:SF 1 "s_register_operand" "")
6495 (match_operand:SF 2 "arm_float_compare_operand" "")])
6496 (label_ref (match_operand 3 "" ""))
6498 "TARGET_32BIT && TARGET_HARD_FLOAT"
6499 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6500 operands[3])); DONE;"
6503 (define_expand "cbranchdf4"
6504 [(set (pc) (if_then_else
6505 (match_operator 0 "arm_comparison_operator"
6506 [(match_operand:DF 1 "s_register_operand" "")
6507 (match_operand:DF 2 "arm_float_compare_operand" "")])
6508 (label_ref (match_operand 3 "" ""))
6510 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6511 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6512 operands[3])); DONE;"
6515 (define_expand "cbranchdi4"
6516 [(set (pc) (if_then_else
6517 (match_operator 0 "arm_comparison_operator"
6518 [(match_operand:DI 1 "cmpdi_operand" "")
6519 (match_operand:DI 2 "cmpdi_operand" "")])
6520 (label_ref (match_operand 3 "" ""))
6524 rtx swap = NULL_RTX;
6525 enum rtx_code code = GET_CODE (operands[0]);
6527 /* We should not have two constants. */
6528 gcc_assert (GET_MODE (operands[1]) == DImode
6529 || GET_MODE (operands[2]) == DImode);
6531 /* Flip unimplemented DImode comparisons to a form that
6532 arm_gen_compare_reg can handle. */
6536 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6538 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6540 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6542 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6547 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6550 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6556 (define_insn "cbranchsi4_insn"
6557 [(set (pc) (if_then_else
6558 (match_operator 0 "arm_comparison_operator"
6559 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6560 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6561 (label_ref (match_operand 3 "" ""))
6565 rtx t = cfun->machine->thumb1_cc_insn;
6568 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6569 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6571 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6573 if (!noov_comparison_operator (operands[0], VOIDmode))
6576 else if (cfun->machine->thumb1_cc_mode != CCmode)
6581 output_asm_insn ("cmp\t%1, %2", operands);
6582 cfun->machine->thumb1_cc_insn = insn;
6583 cfun->machine->thumb1_cc_op0 = operands[1];
6584 cfun->machine->thumb1_cc_op1 = operands[2];
6585 cfun->machine->thumb1_cc_mode = CCmode;
6588 /* Ensure we emit the right type of condition code on the jump. */
6589 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6592 switch (get_attr_length (insn))
6594 case 4: return \"b%d0\\t%l3\";
6595 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6596 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6599 [(set (attr "far_jump")
6601 (eq_attr "length" "8")
6602 (const_string "yes")
6603 (const_string "no")))
6604 (set (attr "length")
6606 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6607 (le (minus (match_dup 3) (pc)) (const_int 256)))
6610 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6611 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6616 (define_insn "cbranchsi4_scratch"
6617 [(set (pc) (if_then_else
6618 (match_operator 4 "arm_comparison_operator"
6619 [(match_operand:SI 1 "s_register_operand" "l,0")
6620 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6621 (label_ref (match_operand 3 "" ""))
6623 (clobber (match_scratch:SI 0 "=l,l"))]
6626 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6628 switch (get_attr_length (insn))
6630 case 4: return \"b%d4\\t%l3\";
6631 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6632 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6635 [(set (attr "far_jump")
6637 (eq_attr "length" "8")
6638 (const_string "yes")
6639 (const_string "no")))
6640 (set (attr "length")
6642 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6643 (le (minus (match_dup 3) (pc)) (const_int 256)))
6646 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6647 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6652 ;; Two peepholes to generate subtract of 0 instead of a move if the
6653 ;; condition codes will be useful.
6655 [(set (match_operand:SI 0 "low_register_operand" "")
6656 (match_operand:SI 1 "low_register_operand" ""))
6658 (if_then_else (match_operator 2 "arm_comparison_operator"
6659 [(match_dup 1) (const_int 0)])
6660 (label_ref (match_operand 3 "" ""))
6663 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6665 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6666 (label_ref (match_dup 3))
6670 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6671 ;; merge cases like this because the op1 is a hard register in
6672 ;; CLASS_LIKELY_SPILLED_P.
6674 [(set (match_operand:SI 0 "low_register_operand" "")
6675 (match_operand:SI 1 "low_register_operand" ""))
6677 (if_then_else (match_operator 2 "arm_comparison_operator"
6678 [(match_dup 0) (const_int 0)])
6679 (label_ref (match_operand 3 "" ""))
6682 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6684 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6685 (label_ref (match_dup 3))
6689 (define_insn "*negated_cbranchsi4"
6692 (match_operator 0 "equality_operator"
6693 [(match_operand:SI 1 "s_register_operand" "l")
6694 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6695 (label_ref (match_operand 3 "" ""))
6699 output_asm_insn (\"cmn\\t%1, %2\", operands);
6700 switch (get_attr_length (insn))
6702 case 4: return \"b%d0\\t%l3\";
6703 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6704 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6707 [(set (attr "far_jump")
6709 (eq_attr "length" "8")
6710 (const_string "yes")
6711 (const_string "no")))
6712 (set (attr "length")
6714 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6715 (le (minus (match_dup 3) (pc)) (const_int 256)))
6718 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6719 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6724 (define_insn "*tbit_cbranch"
6727 (match_operator 0 "equality_operator"
6728 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6730 (match_operand:SI 2 "const_int_operand" "i"))
6732 (label_ref (match_operand 3 "" ""))
6734 (clobber (match_scratch:SI 4 "=l"))]
6739 op[0] = operands[4];
6740 op[1] = operands[1];
6741 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6743 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6744 switch (get_attr_length (insn))
6746 case 4: return \"b%d0\\t%l3\";
6747 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6748 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6751 [(set (attr "far_jump")
6753 (eq_attr "length" "8")
6754 (const_string "yes")
6755 (const_string "no")))
6756 (set (attr "length")
6758 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6759 (le (minus (match_dup 3) (pc)) (const_int 256)))
6762 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6763 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6768 (define_insn "*tlobits_cbranch"
6771 (match_operator 0 "equality_operator"
6772 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6773 (match_operand:SI 2 "const_int_operand" "i")
6776 (label_ref (match_operand 3 "" ""))
6778 (clobber (match_scratch:SI 4 "=l"))]
6783 op[0] = operands[4];
6784 op[1] = operands[1];
6785 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6787 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6788 switch (get_attr_length (insn))
6790 case 4: return \"b%d0\\t%l3\";
6791 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6792 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6795 [(set (attr "far_jump")
6797 (eq_attr "length" "8")
6798 (const_string "yes")
6799 (const_string "no")))
6800 (set (attr "length")
6802 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6803 (le (minus (match_dup 3) (pc)) (const_int 256)))
6806 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6807 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6812 (define_insn "*tstsi3_cbranch"
6815 (match_operator 3 "equality_operator"
6816 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6817 (match_operand:SI 1 "s_register_operand" "l"))
6819 (label_ref (match_operand 2 "" ""))
6824 output_asm_insn (\"tst\\t%0, %1\", operands);
6825 switch (get_attr_length (insn))
6827 case 4: return \"b%d3\\t%l2\";
6828 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6829 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6832 [(set (attr "far_jump")
6834 (eq_attr "length" "8")
6835 (const_string "yes")
6836 (const_string "no")))
6837 (set (attr "length")
6839 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6840 (le (minus (match_dup 2) (pc)) (const_int 256)))
6843 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6844 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6849 (define_insn "*cbranchne_decr1"
6851 (if_then_else (match_operator 3 "equality_operator"
6852 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6854 (label_ref (match_operand 4 "" ""))
6856 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6857 (plus:SI (match_dup 2) (const_int -1)))
6858 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6863 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6865 VOIDmode, operands[2], const1_rtx);
6866 cond[1] = operands[4];
6868 if (which_alternative == 0)
6869 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6870 else if (which_alternative == 1)
6872 /* We must provide an alternative for a hi reg because reload
6873 cannot handle output reloads on a jump instruction, but we
6874 can't subtract into that. Fortunately a mov from lo to hi
6875 does not clobber the condition codes. */
6876 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6877 output_asm_insn (\"mov\\t%0, %1\", operands);
6881 /* Similarly, but the target is memory. */
6882 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6883 output_asm_insn (\"str\\t%1, %0\", operands);
6886 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6889 output_asm_insn (\"b%d0\\t%l1\", cond);
6892 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6893 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6895 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6896 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6900 [(set (attr "far_jump")
6902 (ior (and (eq (symbol_ref ("which_alternative"))
6904 (eq_attr "length" "8"))
6905 (eq_attr "length" "10"))
6906 (const_string "yes")
6907 (const_string "no")))
6908 (set_attr_alternative "length"
6912 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6913 (le (minus (match_dup 4) (pc)) (const_int 256)))
6916 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6917 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6922 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6923 (le (minus (match_dup 4) (pc)) (const_int 256)))
6926 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6927 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6932 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6933 (le (minus (match_dup 4) (pc)) (const_int 256)))
6936 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6937 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6942 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6943 (le (minus (match_dup 4) (pc)) (const_int 256)))
6946 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6947 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6952 (define_insn "*addsi3_cbranch"
6955 (match_operator 4 "arm_comparison_operator"
6957 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6958 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6960 (label_ref (match_operand 5 "" ""))
6963 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6964 (plus:SI (match_dup 2) (match_dup 3)))
6965 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6967 && (GET_CODE (operands[4]) == EQ
6968 || GET_CODE (operands[4]) == NE
6969 || GET_CODE (operands[4]) == GE
6970 || GET_CODE (operands[4]) == LT)"
6975 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6976 cond[1] = operands[2];
6977 cond[2] = operands[3];
6979 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6980 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6982 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6984 if (which_alternative >= 2
6985 && which_alternative < 4)
6986 output_asm_insn (\"mov\\t%0, %1\", operands);
6987 else if (which_alternative >= 4)
6988 output_asm_insn (\"str\\t%1, %0\", operands);
6990 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
6993 return \"b%d4\\t%l5\";
6995 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6997 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7001 [(set (attr "far_jump")
7003 (ior (and (lt (symbol_ref ("which_alternative"))
7005 (eq_attr "length" "8"))
7006 (eq_attr "length" "10"))
7007 (const_string "yes")
7008 (const_string "no")))
7009 (set (attr "length")
7011 (lt (symbol_ref ("which_alternative"))
7014 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7015 (le (minus (match_dup 5) (pc)) (const_int 256)))
7018 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7019 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7023 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7024 (le (minus (match_dup 5) (pc)) (const_int 256)))
7027 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7028 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7033 (define_insn "*addsi3_cbranch_scratch"
7036 (match_operator 3 "arm_comparison_operator"
7038 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7039 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7041 (label_ref (match_operand 4 "" ""))
7043 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7045 && (GET_CODE (operands[3]) == EQ
7046 || GET_CODE (operands[3]) == NE
7047 || GET_CODE (operands[3]) == GE
7048 || GET_CODE (operands[3]) == LT)"
7051 switch (which_alternative)
7054 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7057 output_asm_insn (\"cmn\t%1, %2\", operands);
7060 if (INTVAL (operands[2]) < 0)
7061 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7063 output_asm_insn (\"add\t%0, %1, %2\", operands);
7066 if (INTVAL (operands[2]) < 0)
7067 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7069 output_asm_insn (\"add\t%0, %0, %2\", operands);
7073 switch (get_attr_length (insn))
7076 return \"b%d3\\t%l4\";
7078 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7080 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7084 [(set (attr "far_jump")
7086 (eq_attr "length" "8")
7087 (const_string "yes")
7088 (const_string "no")))
7089 (set (attr "length")
7091 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7092 (le (minus (match_dup 4) (pc)) (const_int 256)))
7095 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7096 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7102 ;; Comparison and test insns
7104 (define_insn "*arm_cmpsi_insn"
7105 [(set (reg:CC CC_REGNUM)
7106 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7107 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7112 [(set_attr "conds" "set")]
7115 (define_insn "*cmpsi_shiftsi"
7116 [(set (reg:CC CC_REGNUM)
7117 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7118 (match_operator:SI 3 "shift_operator"
7119 [(match_operand:SI 1 "s_register_operand" "r,r")
7120 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7123 [(set_attr "conds" "set")
7124 (set_attr "shift" "1")
7125 (set_attr "arch" "32,a")
7126 (set_attr "type" "alu_shift,alu_shift_reg")])
7128 (define_insn "*cmpsi_shiftsi_swp"
7129 [(set (reg:CC_SWP CC_REGNUM)
7130 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7131 [(match_operand:SI 1 "s_register_operand" "r,r")
7132 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7133 (match_operand:SI 0 "s_register_operand" "r,r")))]
7136 [(set_attr "conds" "set")
7137 (set_attr "shift" "1")
7138 (set_attr "arch" "32,a")
7139 (set_attr "type" "alu_shift,alu_shift_reg")])
7141 (define_insn "*arm_cmpsi_negshiftsi_si"
7142 [(set (reg:CC_Z CC_REGNUM)
7144 (neg:SI (match_operator:SI 1 "shift_operator"
7145 [(match_operand:SI 2 "s_register_operand" "r")
7146 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7147 (match_operand:SI 0 "s_register_operand" "r")))]
7150 [(set_attr "conds" "set")
7151 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7152 (const_string "alu_shift")
7153 (const_string "alu_shift_reg")))]
7156 ;; DImode comparisons. The generic code generates branches that
7157 ;; if-conversion can not reduce to a conditional compare, so we do
7160 (define_insn "*arm_cmpdi_insn"
7161 [(set (reg:CC_NCV CC_REGNUM)
7162 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7163 (match_operand:DI 1 "arm_di_operand" "rDi")))
7164 (clobber (match_scratch:SI 2 "=r"))]
7165 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7166 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7167 [(set_attr "conds" "set")
7168 (set_attr "length" "8")]
7171 (define_insn "*arm_cmpdi_unsigned"
7172 [(set (reg:CC_CZ CC_REGNUM)
7173 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7174 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7176 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7177 [(set_attr "conds" "set")
7178 (set_attr "length" "8")]
7181 (define_insn "*arm_cmpdi_zero"
7182 [(set (reg:CC_Z CC_REGNUM)
7183 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7185 (clobber (match_scratch:SI 1 "=r"))]
7187 "orr%.\\t%1, %Q0, %R0"
7188 [(set_attr "conds" "set")]
7191 (define_insn "*thumb_cmpdi_zero"
7192 [(set (reg:CC_Z CC_REGNUM)
7193 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7195 (clobber (match_scratch:SI 1 "=l"))]
7197 "orr\\t%1, %Q0, %R0"
7198 [(set_attr "conds" "set")
7199 (set_attr "length" "2")]
7202 ;; Cirrus SF compare instruction
7203 (define_insn "*cirrus_cmpsf"
7204 [(set (reg:CCFP CC_REGNUM)
7205 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7206 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7207 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7208 "cfcmps%?\\tr15, %V0, %V1"
7209 [(set_attr "type" "mav_farith")
7210 (set_attr "cirrus" "compare")]
7213 ;; Cirrus DF compare instruction
7214 (define_insn "*cirrus_cmpdf"
7215 [(set (reg:CCFP CC_REGNUM)
7216 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7217 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7218 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7219 "cfcmpd%?\\tr15, %V0, %V1"
7220 [(set_attr "type" "mav_farith")
7221 (set_attr "cirrus" "compare")]
7224 (define_insn "*cirrus_cmpdi"
7225 [(set (reg:CC CC_REGNUM)
7226 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7227 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7228 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7229 "cfcmp64%?\\tr15, %V0, %V1"
7230 [(set_attr "type" "mav_farith")
7231 (set_attr "cirrus" "compare")]
7234 ; This insn allows redundant compares to be removed by cse, nothing should
7235 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7236 ; is deleted later on. The match_dup will match the mode here, so that
7237 ; mode changes of the condition codes aren't lost by this even though we don't
7238 ; specify what they are.
7240 (define_insn "*deleted_compare"
7241 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7243 "\\t%@ deleted compare"
7244 [(set_attr "conds" "set")
7245 (set_attr "length" "0")]
7249 ;; Conditional branch insns
7251 (define_expand "cbranch_cc"
7253 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7254 (match_operand 2 "" "")])
7255 (label_ref (match_operand 3 "" ""))
7258 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7259 operands[1], operands[2]);
7260 operands[2] = const0_rtx;"
7264 ;; Patterns to match conditional branch insns.
7267 (define_insn "*arm_cond_branch"
7269 (if_then_else (match_operator 1 "arm_comparison_operator"
7270 [(match_operand 2 "cc_register" "") (const_int 0)])
7271 (label_ref (match_operand 0 "" ""))
7275 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7277 arm_ccfsm_state += 2;
7280 return \"b%d1\\t%l0\";
7282 [(set_attr "conds" "use")
7283 (set_attr "type" "branch")]
7286 (define_insn "*arm_cond_branch_reversed"
7288 (if_then_else (match_operator 1 "arm_comparison_operator"
7289 [(match_operand 2 "cc_register" "") (const_int 0)])
7291 (label_ref (match_operand 0 "" ""))))]
7294 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7296 arm_ccfsm_state += 2;
7299 return \"b%D1\\t%l0\";
7301 [(set_attr "conds" "use")
7302 (set_attr "type" "branch")]
7309 (define_expand "cstore_cc"
7310 [(set (match_operand:SI 0 "s_register_operand" "")
7311 (match_operator:SI 1 "" [(match_operand 2 "" "")
7312 (match_operand 3 "" "")]))]
7314 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7315 operands[2], operands[3]);
7316 operands[3] = const0_rtx;"
7319 (define_insn "*mov_scc"
7320 [(set (match_operand:SI 0 "s_register_operand" "=r")
7321 (match_operator:SI 1 "arm_comparison_operator"
7322 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7324 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7325 [(set_attr "conds" "use")
7326 (set_attr "insn" "mov")
7327 (set_attr "length" "8")]
7330 (define_insn "*mov_negscc"
7331 [(set (match_operand:SI 0 "s_register_operand" "=r")
7332 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7333 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7335 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7336 [(set_attr "conds" "use")
7337 (set_attr "insn" "mov")
7338 (set_attr "length" "8")]
7341 (define_insn "*mov_notscc"
7342 [(set (match_operand:SI 0 "s_register_operand" "=r")
7343 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7344 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7346 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7347 [(set_attr "conds" "use")
7348 (set_attr "insn" "mov")
7349 (set_attr "length" "8")]
7352 (define_expand "cstoresi4"
7353 [(set (match_operand:SI 0 "s_register_operand" "")
7354 (match_operator:SI 1 "arm_comparison_operator"
7355 [(match_operand:SI 2 "s_register_operand" "")
7356 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7357 "TARGET_32BIT || TARGET_THUMB1"
7359 rtx op3, scratch, scratch2;
7363 if (!arm_add_operand (operands[3], SImode))
7364 operands[3] = force_reg (SImode, operands[3]);
7365 emit_insn (gen_cstore_cc (operands[0], operands[1],
7366 operands[2], operands[3]));
7370 if (operands[3] == const0_rtx)
7372 switch (GET_CODE (operands[1]))
7375 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7379 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7383 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7384 NULL_RTX, 0, OPTAB_WIDEN);
7385 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7386 NULL_RTX, 0, OPTAB_WIDEN);
7387 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7388 operands[0], 1, OPTAB_WIDEN);
7392 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7394 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7395 NULL_RTX, 1, OPTAB_WIDEN);
7399 scratch = expand_binop (SImode, ashr_optab, operands[2],
7400 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7401 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7402 NULL_RTX, 0, OPTAB_WIDEN);
7403 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7407 /* LT is handled by generic code. No need for unsigned with 0. */
7414 switch (GET_CODE (operands[1]))
7417 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7418 NULL_RTX, 0, OPTAB_WIDEN);
7419 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7423 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7424 NULL_RTX, 0, OPTAB_WIDEN);
7425 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7429 op3 = force_reg (SImode, operands[3]);
7431 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7432 NULL_RTX, 1, OPTAB_WIDEN);
7433 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7434 NULL_RTX, 0, OPTAB_WIDEN);
7435 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7441 if (!thumb1_cmp_operand (op3, SImode))
7442 op3 = force_reg (SImode, op3);
7443 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7444 NULL_RTX, 0, OPTAB_WIDEN);
7445 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7446 NULL_RTX, 1, OPTAB_WIDEN);
7447 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7452 op3 = force_reg (SImode, operands[3]);
7453 scratch = force_reg (SImode, const0_rtx);
7454 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7460 if (!thumb1_cmp_operand (op3, SImode))
7461 op3 = force_reg (SImode, op3);
7462 scratch = force_reg (SImode, const0_rtx);
7463 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7469 if (!thumb1_cmp_operand (op3, SImode))
7470 op3 = force_reg (SImode, op3);
7471 scratch = gen_reg_rtx (SImode);
7472 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7476 op3 = force_reg (SImode, operands[3]);
7477 scratch = gen_reg_rtx (SImode);
7478 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7481 /* No good sequences for GT, LT. */
7488 (define_expand "cstoresf4"
7489 [(set (match_operand:SI 0 "s_register_operand" "")
7490 (match_operator:SI 1 "arm_comparison_operator"
7491 [(match_operand:SF 2 "s_register_operand" "")
7492 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7493 "TARGET_32BIT && TARGET_HARD_FLOAT"
7494 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7495 operands[2], operands[3])); DONE;"
7498 (define_expand "cstoredf4"
7499 [(set (match_operand:SI 0 "s_register_operand" "")
7500 (match_operator:SI 1 "arm_comparison_operator"
7501 [(match_operand:DF 2 "s_register_operand" "")
7502 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7503 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7504 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7505 operands[2], operands[3])); DONE;"
7508 (define_expand "cstoredi4"
7509 [(set (match_operand:SI 0 "s_register_operand" "")
7510 (match_operator:SI 1 "arm_comparison_operator"
7511 [(match_operand:DI 2 "cmpdi_operand" "")
7512 (match_operand:DI 3 "cmpdi_operand" "")]))]
7515 rtx swap = NULL_RTX;
7516 enum rtx_code code = GET_CODE (operands[1]);
7518 /* We should not have two constants. */
7519 gcc_assert (GET_MODE (operands[2]) == DImode
7520 || GET_MODE (operands[3]) == DImode);
7522 /* Flip unimplemented DImode comparisons to a form that
7523 arm_gen_compare_reg can handle. */
7527 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7529 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7531 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7533 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7538 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7541 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7547 (define_expand "cstoresi_eq0_thumb1"
7549 [(set (match_operand:SI 0 "s_register_operand" "")
7550 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7552 (clobber (match_dup:SI 2))])]
7554 "operands[2] = gen_reg_rtx (SImode);"
7557 (define_expand "cstoresi_ne0_thumb1"
7559 [(set (match_operand:SI 0 "s_register_operand" "")
7560 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7562 (clobber (match_dup:SI 2))])]
7564 "operands[2] = gen_reg_rtx (SImode);"
7567 (define_insn "*cstoresi_eq0_thumb1_insn"
7568 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7569 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7571 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7574 neg\\t%0, %1\;adc\\t%0, %0, %1
7575 neg\\t%2, %1\;adc\\t%0, %1, %2"
7576 [(set_attr "length" "4")]
7579 (define_insn "*cstoresi_ne0_thumb1_insn"
7580 [(set (match_operand:SI 0 "s_register_operand" "=l")
7581 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7583 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7585 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7586 [(set_attr "length" "4")]
7589 ;; Used as part of the expansion of thumb ltu and gtu sequences
7590 (define_insn "cstoresi_nltu_thumb1"
7591 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7592 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7593 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7595 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7596 [(set_attr "length" "4")]
7599 (define_insn_and_split "cstoresi_ltu_thumb1"
7600 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7601 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7602 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7607 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7608 (set (match_dup 0) (neg:SI (match_dup 3)))]
7609 "operands[3] = gen_reg_rtx (SImode);"
7610 [(set_attr "length" "4")]
7613 ;; Used as part of the expansion of thumb les sequence.
7614 (define_insn "thumb1_addsi3_addgeu"
7615 [(set (match_operand:SI 0 "s_register_operand" "=l")
7616 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7617 (match_operand:SI 2 "s_register_operand" "l"))
7618 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7619 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7621 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7622 [(set_attr "length" "4")]
7626 ;; Conditional move insns
7628 (define_expand "movsicc"
7629 [(set (match_operand:SI 0 "s_register_operand" "")
7630 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7631 (match_operand:SI 2 "arm_not_operand" "")
7632 (match_operand:SI 3 "arm_not_operand" "")))]
7636 enum rtx_code code = GET_CODE (operands[1]);
7639 if (code == UNEQ || code == LTGT)
7642 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7643 XEXP (operands[1], 1));
7644 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7648 (define_expand "movsfcc"
7649 [(set (match_operand:SF 0 "s_register_operand" "")
7650 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7651 (match_operand:SF 2 "s_register_operand" "")
7652 (match_operand:SF 3 "nonmemory_operand" "")))]
7653 "TARGET_32BIT && TARGET_HARD_FLOAT"
7656 enum rtx_code code = GET_CODE (operands[1]);
7659 if (code == UNEQ || code == LTGT)
7662 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7663 Otherwise, ensure it is a valid FP add operand */
7664 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7665 || (!arm_float_add_operand (operands[3], SFmode)))
7666 operands[3] = force_reg (SFmode, operands[3]);
7668 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7669 XEXP (operands[1], 1));
7670 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7674 (define_expand "movdfcc"
7675 [(set (match_operand:DF 0 "s_register_operand" "")
7676 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7677 (match_operand:DF 2 "s_register_operand" "")
7678 (match_operand:DF 3 "arm_float_add_operand" "")))]
7679 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7682 enum rtx_code code = GET_CODE (operands[1]);
7685 if (code == UNEQ || code == LTGT)
7688 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7689 XEXP (operands[1], 1));
7690 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7694 (define_insn "*movsicc_insn"
7695 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7697 (match_operator 3 "arm_comparison_operator"
7698 [(match_operand 4 "cc_register" "") (const_int 0)])
7699 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7700 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7707 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7708 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7709 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7710 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7711 [(set_attr "length" "4,4,4,4,8,8,8,8")
7712 (set_attr "conds" "use")
7713 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7716 (define_insn "*movsfcc_soft_insn"
7717 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7718 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7719 [(match_operand 4 "cc_register" "") (const_int 0)])
7720 (match_operand:SF 1 "s_register_operand" "0,r")
7721 (match_operand:SF 2 "s_register_operand" "r,0")))]
7722 "TARGET_ARM && TARGET_SOFT_FLOAT"
7726 [(set_attr "conds" "use")
7727 (set_attr "insn" "mov")]
7731 ;; Jump and linkage insns
7733 (define_expand "jump"
7735 (label_ref (match_operand 0 "" "")))]
7740 (define_insn "*arm_jump"
7742 (label_ref (match_operand 0 "" "")))]
7746 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7748 arm_ccfsm_state += 2;
7751 return \"b%?\\t%l0\";
7754 [(set_attr "predicable" "yes")]
7757 (define_insn "*thumb_jump"
7759 (label_ref (match_operand 0 "" "")))]
7762 if (get_attr_length (insn) == 2)
7764 return \"bl\\t%l0\\t%@ far jump\";
7766 [(set (attr "far_jump")
7768 (eq_attr "length" "4")
7769 (const_string "yes")
7770 (const_string "no")))
7771 (set (attr "length")
7773 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7774 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7779 (define_expand "call"
7780 [(parallel [(call (match_operand 0 "memory_operand" "")
7781 (match_operand 1 "general_operand" ""))
7782 (use (match_operand 2 "" ""))
7783 (clobber (reg:SI LR_REGNUM))])]
7789 /* In an untyped call, we can get NULL for operand 2. */
7790 if (operands[2] == NULL_RTX)
7791 operands[2] = const0_rtx;
7793 /* Decide if we should generate indirect calls by loading the
7794 32-bit address of the callee into a register before performing the
7796 callee = XEXP (operands[0], 0);
7797 if (GET_CODE (callee) == SYMBOL_REF
7798 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7800 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7802 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7803 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7808 (define_expand "call_internal"
7809 [(parallel [(call (match_operand 0 "memory_operand" "")
7810 (match_operand 1 "general_operand" ""))
7811 (use (match_operand 2 "" ""))
7812 (clobber (reg:SI LR_REGNUM))])])
7814 (define_insn "*call_reg_armv5"
7815 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7816 (match_operand 1 "" ""))
7817 (use (match_operand 2 "" ""))
7818 (clobber (reg:SI LR_REGNUM))]
7819 "TARGET_ARM && arm_arch5"
7821 [(set_attr "type" "call")]
7824 (define_insn "*call_reg_arm"
7825 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7826 (match_operand 1 "" ""))
7827 (use (match_operand 2 "" ""))
7828 (clobber (reg:SI LR_REGNUM))]
7829 "TARGET_ARM && !arm_arch5"
7831 return output_call (operands);
7833 ;; length is worst case, normally it is only two
7834 [(set_attr "length" "12")
7835 (set_attr "type" "call")]
7839 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7840 ;; considered a function call by the branch predictor of some cores (PR40887).
7841 ;; Falls back to blx rN (*call_reg_armv5).
7843 (define_insn "*call_mem"
7844 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7845 (match_operand 1 "" ""))
7846 (use (match_operand 2 "" ""))
7847 (clobber (reg:SI LR_REGNUM))]
7848 "TARGET_ARM && !arm_arch5"
7850 return output_call_mem (operands);
7852 [(set_attr "length" "12")
7853 (set_attr "type" "call")]
7856 (define_insn "*call_reg_thumb1_v5"
7857 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7858 (match_operand 1 "" ""))
7859 (use (match_operand 2 "" ""))
7860 (clobber (reg:SI LR_REGNUM))]
7861 "TARGET_THUMB1 && arm_arch5"
7863 [(set_attr "length" "2")
7864 (set_attr "type" "call")]
7867 (define_insn "*call_reg_thumb1"
7868 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7869 (match_operand 1 "" ""))
7870 (use (match_operand 2 "" ""))
7871 (clobber (reg:SI LR_REGNUM))]
7872 "TARGET_THUMB1 && !arm_arch5"
7875 if (!TARGET_CALLER_INTERWORKING)
7876 return thumb_call_via_reg (operands[0]);
7877 else if (operands[1] == const0_rtx)
7878 return \"bl\\t%__interwork_call_via_%0\";
7879 else if (frame_pointer_needed)
7880 return \"bl\\t%__interwork_r7_call_via_%0\";
7882 return \"bl\\t%__interwork_r11_call_via_%0\";
7884 [(set_attr "type" "call")]
7887 (define_expand "call_value"
7888 [(parallel [(set (match_operand 0 "" "")
7889 (call (match_operand 1 "memory_operand" "")
7890 (match_operand 2 "general_operand" "")))
7891 (use (match_operand 3 "" ""))
7892 (clobber (reg:SI LR_REGNUM))])]
7898 /* In an untyped call, we can get NULL for operand 2. */
7899 if (operands[3] == 0)
7900 operands[3] = const0_rtx;
7902 /* Decide if we should generate indirect calls by loading the
7903 32-bit address of the callee into a register before performing the
7905 callee = XEXP (operands[1], 0);
7906 if (GET_CODE (callee) == SYMBOL_REF
7907 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7909 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7911 pat = gen_call_value_internal (operands[0], operands[1],
7912 operands[2], operands[3]);
7913 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7918 (define_expand "call_value_internal"
7919 [(parallel [(set (match_operand 0 "" "")
7920 (call (match_operand 1 "memory_operand" "")
7921 (match_operand 2 "general_operand" "")))
7922 (use (match_operand 3 "" ""))
7923 (clobber (reg:SI LR_REGNUM))])])
7925 (define_insn "*call_value_reg_armv5"
7926 [(set (match_operand 0 "" "")
7927 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7928 (match_operand 2 "" "")))
7929 (use (match_operand 3 "" ""))
7930 (clobber (reg:SI LR_REGNUM))]
7931 "TARGET_ARM && arm_arch5"
7933 [(set_attr "type" "call")]
7936 (define_insn "*call_value_reg_arm"
7937 [(set (match_operand 0 "" "")
7938 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7939 (match_operand 2 "" "")))
7940 (use (match_operand 3 "" ""))
7941 (clobber (reg:SI LR_REGNUM))]
7942 "TARGET_ARM && !arm_arch5"
7944 return output_call (&operands[1]);
7946 [(set_attr "length" "12")
7947 (set_attr "type" "call")]
7950 ;; Note: see *call_mem
7952 (define_insn "*call_value_mem"
7953 [(set (match_operand 0 "" "")
7954 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7955 (match_operand 2 "" "")))
7956 (use (match_operand 3 "" ""))
7957 (clobber (reg:SI LR_REGNUM))]
7958 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7960 return output_call_mem (&operands[1]);
7962 [(set_attr "length" "12")
7963 (set_attr "type" "call")]
7966 (define_insn "*call_value_reg_thumb1_v5"
7967 [(set (match_operand 0 "" "")
7968 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7969 (match_operand 2 "" "")))
7970 (use (match_operand 3 "" ""))
7971 (clobber (reg:SI LR_REGNUM))]
7972 "TARGET_THUMB1 && arm_arch5"
7974 [(set_attr "length" "2")
7975 (set_attr "type" "call")]
7978 (define_insn "*call_value_reg_thumb1"
7979 [(set (match_operand 0 "" "")
7980 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7981 (match_operand 2 "" "")))
7982 (use (match_operand 3 "" ""))
7983 (clobber (reg:SI LR_REGNUM))]
7984 "TARGET_THUMB1 && !arm_arch5"
7987 if (!TARGET_CALLER_INTERWORKING)
7988 return thumb_call_via_reg (operands[1]);
7989 else if (operands[2] == const0_rtx)
7990 return \"bl\\t%__interwork_call_via_%1\";
7991 else if (frame_pointer_needed)
7992 return \"bl\\t%__interwork_r7_call_via_%1\";
7994 return \"bl\\t%__interwork_r11_call_via_%1\";
7996 [(set_attr "type" "call")]
7999 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8000 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8002 (define_insn "*call_symbol"
8003 [(call (mem:SI (match_operand:SI 0 "" ""))
8004 (match_operand 1 "" ""))
8005 (use (match_operand 2 "" ""))
8006 (clobber (reg:SI LR_REGNUM))]
8008 && (GET_CODE (operands[0]) == SYMBOL_REF)
8009 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8012 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8014 [(set_attr "type" "call")]
8017 (define_insn "*call_value_symbol"
8018 [(set (match_operand 0 "" "")
8019 (call (mem:SI (match_operand:SI 1 "" ""))
8020 (match_operand:SI 2 "" "")))
8021 (use (match_operand 3 "" ""))
8022 (clobber (reg:SI LR_REGNUM))]
8024 && (GET_CODE (operands[1]) == SYMBOL_REF)
8025 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8028 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8030 [(set_attr "type" "call")]
8033 (define_insn "*call_insn"
8034 [(call (mem:SI (match_operand:SI 0 "" ""))
8035 (match_operand:SI 1 "" ""))
8036 (use (match_operand 2 "" ""))
8037 (clobber (reg:SI LR_REGNUM))]
8039 && GET_CODE (operands[0]) == SYMBOL_REF
8040 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8042 [(set_attr "length" "4")
8043 (set_attr "type" "call")]
8046 (define_insn "*call_value_insn"
8047 [(set (match_operand 0 "" "")
8048 (call (mem:SI (match_operand 1 "" ""))
8049 (match_operand 2 "" "")))
8050 (use (match_operand 3 "" ""))
8051 (clobber (reg:SI LR_REGNUM))]
8053 && GET_CODE (operands[1]) == SYMBOL_REF
8054 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8056 [(set_attr "length" "4")
8057 (set_attr "type" "call")]
8060 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8061 (define_expand "sibcall"
8062 [(parallel [(call (match_operand 0 "memory_operand" "")
8063 (match_operand 1 "general_operand" ""))
8065 (use (match_operand 2 "" ""))])]
8069 if (operands[2] == NULL_RTX)
8070 operands[2] = const0_rtx;
8074 (define_expand "sibcall_value"
8075 [(parallel [(set (match_operand 0 "" "")
8076 (call (match_operand 1 "memory_operand" "")
8077 (match_operand 2 "general_operand" "")))
8079 (use (match_operand 3 "" ""))])]
8083 if (operands[3] == NULL_RTX)
8084 operands[3] = const0_rtx;
8088 (define_insn "*sibcall_insn"
8089 [(call (mem:SI (match_operand:SI 0 "" "X"))
8090 (match_operand 1 "" ""))
8092 (use (match_operand 2 "" ""))]
8093 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8095 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8097 [(set_attr "type" "call")]
8100 (define_insn "*sibcall_value_insn"
8101 [(set (match_operand 0 "" "")
8102 (call (mem:SI (match_operand:SI 1 "" "X"))
8103 (match_operand 2 "" "")))
8105 (use (match_operand 3 "" ""))]
8106 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8108 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8110 [(set_attr "type" "call")]
8113 (define_expand "return"
8115 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8118 ;; Often the return insn will be the same as loading from memory, so set attr
8119 (define_insn "*arm_return"
8121 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8124 if (arm_ccfsm_state == 2)
8126 arm_ccfsm_state += 2;
8129 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8131 [(set_attr "type" "load1")
8132 (set_attr "length" "12")
8133 (set_attr "predicable" "yes")]
8136 (define_insn "*cond_return"
8138 (if_then_else (match_operator 0 "arm_comparison_operator"
8139 [(match_operand 1 "cc_register" "") (const_int 0)])
8142 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8145 if (arm_ccfsm_state == 2)
8147 arm_ccfsm_state += 2;
8150 return output_return_instruction (operands[0], TRUE, FALSE);
8152 [(set_attr "conds" "use")
8153 (set_attr "length" "12")
8154 (set_attr "type" "load1")]
8157 (define_insn "*cond_return_inverted"
8159 (if_then_else (match_operator 0 "arm_comparison_operator"
8160 [(match_operand 1 "cc_register" "") (const_int 0)])
8163 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8166 if (arm_ccfsm_state == 2)
8168 arm_ccfsm_state += 2;
8171 return output_return_instruction (operands[0], TRUE, TRUE);
8173 [(set_attr "conds" "use")
8174 (set_attr "length" "12")
8175 (set_attr "type" "load1")]
8178 ;; Generate a sequence of instructions to determine if the processor is
8179 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8182 (define_expand "return_addr_mask"
8184 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8186 (set (match_operand:SI 0 "s_register_operand" "")
8187 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8189 (const_int 67108860)))] ; 0x03fffffc
8192 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8195 (define_insn "*check_arch2"
8196 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8197 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8200 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8201 [(set_attr "length" "8")
8202 (set_attr "conds" "set")]
8205 ;; Call subroutine returning any type.
8207 (define_expand "untyped_call"
8208 [(parallel [(call (match_operand 0 "" "")
8210 (match_operand 1 "" "")
8211 (match_operand 2 "" "")])]
8216 rtx par = gen_rtx_PARALLEL (VOIDmode,
8217 rtvec_alloc (XVECLEN (operands[2], 0)));
8218 rtx addr = gen_reg_rtx (Pmode);
8222 emit_move_insn (addr, XEXP (operands[1], 0));
8223 mem = change_address (operands[1], BLKmode, addr);
8225 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8227 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8229 /* Default code only uses r0 as a return value, but we could
8230 be using anything up to 4 registers. */
8231 if (REGNO (src) == R0_REGNUM)
8232 src = gen_rtx_REG (TImode, R0_REGNUM);
8234 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8236 size += GET_MODE_SIZE (GET_MODE (src));
8239 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8244 for (i = 0; i < XVECLEN (par, 0); i++)
8246 HOST_WIDE_INT offset = 0;
8247 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8250 emit_move_insn (addr, plus_constant (addr, size));
8252 mem = change_address (mem, GET_MODE (reg), NULL);
8253 if (REGNO (reg) == R0_REGNUM)
8255 /* On thumb we have to use a write-back instruction. */
8256 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8257 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8258 size = TARGET_ARM ? 16 : 0;
8262 emit_move_insn (mem, reg);
8263 size = GET_MODE_SIZE (GET_MODE (reg));
8267 /* The optimizer does not know that the call sets the function value
8268 registers we stored in the result block. We avoid problems by
8269 claiming that all hard registers are used and clobbered at this
8271 emit_insn (gen_blockage ());
8277 (define_expand "untyped_return"
8278 [(match_operand:BLK 0 "memory_operand" "")
8279 (match_operand 1 "" "")]
8284 rtx addr = gen_reg_rtx (Pmode);
8288 emit_move_insn (addr, XEXP (operands[0], 0));
8289 mem = change_address (operands[0], BLKmode, addr);
8291 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8293 HOST_WIDE_INT offset = 0;
8294 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8297 emit_move_insn (addr, plus_constant (addr, size));
8299 mem = change_address (mem, GET_MODE (reg), NULL);
8300 if (REGNO (reg) == R0_REGNUM)
8302 /* On thumb we have to use a write-back instruction. */
8303 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8304 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8305 size = TARGET_ARM ? 16 : 0;
8309 emit_move_insn (reg, mem);
8310 size = GET_MODE_SIZE (GET_MODE (reg));
8314 /* Emit USE insns before the return. */
8315 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8316 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8318 /* Construct the return. */
8319 expand_naked_return ();
8325 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8326 ;; all of memory. This blocks insns from being moved across this point.
8328 (define_insn "blockage"
8329 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8332 [(set_attr "length" "0")
8333 (set_attr "type" "block")]
8336 (define_expand "casesi"
8337 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8338 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8339 (match_operand:SI 2 "const_int_operand" "") ; total range
8340 (match_operand:SI 3 "" "") ; table label
8341 (match_operand:SI 4 "" "")] ; Out of range label
8342 "TARGET_32BIT || optimize_size || flag_pic"
8345 enum insn_code code;
8346 if (operands[1] != const0_rtx)
8348 rtx reg = gen_reg_rtx (SImode);
8350 emit_insn (gen_addsi3 (reg, operands[0],
8351 GEN_INT (-INTVAL (operands[1]))));
8356 code = CODE_FOR_arm_casesi_internal;
8357 else if (TARGET_THUMB1)
8358 code = CODE_FOR_thumb1_casesi_internal_pic;
8360 code = CODE_FOR_thumb2_casesi_internal_pic;
8362 code = CODE_FOR_thumb2_casesi_internal;
8364 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8365 operands[2] = force_reg (SImode, operands[2]);
8367 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8368 operands[3], operands[4]));
8373 ;; The USE in this pattern is needed to tell flow analysis that this is
8374 ;; a CASESI insn. It has no other purpose.
8375 (define_insn "arm_casesi_internal"
8376 [(parallel [(set (pc)
8378 (leu (match_operand:SI 0 "s_register_operand" "r")
8379 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8380 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8381 (label_ref (match_operand 2 "" ""))))
8382 (label_ref (match_operand 3 "" ""))))
8383 (clobber (reg:CC CC_REGNUM))
8384 (use (label_ref (match_dup 2)))])]
8388 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8389 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8391 [(set_attr "conds" "clob")
8392 (set_attr "length" "12")]
8395 (define_expand "thumb1_casesi_internal_pic"
8396 [(match_operand:SI 0 "s_register_operand" "")
8397 (match_operand:SI 1 "thumb1_cmp_operand" "")
8398 (match_operand 2 "" "")
8399 (match_operand 3 "" "")]
8403 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8404 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8406 reg0 = gen_rtx_REG (SImode, 0);
8407 emit_move_insn (reg0, operands[0]);
8408 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8413 (define_insn "thumb1_casesi_dispatch"
8414 [(parallel [(set (pc) (unspec [(reg:SI 0)
8415 (label_ref (match_operand 0 "" ""))
8416 ;; (label_ref (match_operand 1 "" ""))
8418 UNSPEC_THUMB1_CASESI))
8419 (clobber (reg:SI IP_REGNUM))
8420 (clobber (reg:SI LR_REGNUM))])]
8422 "* return thumb1_output_casesi(operands);"
8423 [(set_attr "length" "4")]
8426 (define_expand "indirect_jump"
8428 (match_operand:SI 0 "s_register_operand" ""))]
8431 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8432 address and use bx. */
8436 tmp = gen_reg_rtx (SImode);
8437 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8443 ;; NB Never uses BX.
8444 (define_insn "*arm_indirect_jump"
8446 (match_operand:SI 0 "s_register_operand" "r"))]
8448 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8449 [(set_attr "predicable" "yes")]
8452 (define_insn "*load_indirect_jump"
8454 (match_operand:SI 0 "memory_operand" "m"))]
8456 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8457 [(set_attr "type" "load1")
8458 (set_attr "pool_range" "4096")
8459 (set_attr "neg_pool_range" "4084")
8460 (set_attr "predicable" "yes")]
8463 ;; NB Never uses BX.
8464 (define_insn "*thumb1_indirect_jump"
8466 (match_operand:SI 0 "register_operand" "l*r"))]
8469 [(set_attr "conds" "clob")
8470 (set_attr "length" "2")]
8480 if (TARGET_UNIFIED_ASM)
8483 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8484 return \"mov\\tr8, r8\";
8486 [(set (attr "length")
8487 (if_then_else (eq_attr "is_thumb" "yes")
8493 ;; Patterns to allow combination of arithmetic, cond code and shifts
8495 (define_insn "*arith_shiftsi"
8496 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8497 (match_operator:SI 1 "shiftable_operator"
8498 [(match_operator:SI 3 "shift_operator"
8499 [(match_operand:SI 4 "s_register_operand" "r,r")
8500 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8501 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8503 "%i1%?\\t%0, %2, %4%S3"
8504 [(set_attr "predicable" "yes")
8505 (set_attr "shift" "4")
8506 (set_attr "arch" "32,a")
8507 ;; We have to make sure to disable the second alternative if
8508 ;; the shift_operator is MULT, since otherwise the insn will
8509 ;; also match a multiply_accumulate pattern and validate_change
8510 ;; will allow a replacement of the constant with a register
8511 ;; despite the checks done in shift_operator.
8512 (set_attr_alternative "insn_enabled"
8513 [(const_string "yes")
8515 (match_operand:SI 3 "mult_operator" "")
8516 (const_string "no") (const_string "yes"))])
8517 (set_attr "type" "alu_shift,alu_shift_reg")])
8520 [(set (match_operand:SI 0 "s_register_operand" "")
8521 (match_operator:SI 1 "shiftable_operator"
8522 [(match_operator:SI 2 "shiftable_operator"
8523 [(match_operator:SI 3 "shift_operator"
8524 [(match_operand:SI 4 "s_register_operand" "")
8525 (match_operand:SI 5 "reg_or_int_operand" "")])
8526 (match_operand:SI 6 "s_register_operand" "")])
8527 (match_operand:SI 7 "arm_rhs_operand" "")]))
8528 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8531 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8534 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8537 (define_insn "*arith_shiftsi_compare0"
8538 [(set (reg:CC_NOOV CC_REGNUM)
8540 (match_operator:SI 1 "shiftable_operator"
8541 [(match_operator:SI 3 "shift_operator"
8542 [(match_operand:SI 4 "s_register_operand" "r,r")
8543 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8544 (match_operand:SI 2 "s_register_operand" "r,r")])
8546 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8547 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8550 "%i1%.\\t%0, %2, %4%S3"
8551 [(set_attr "conds" "set")
8552 (set_attr "shift" "4")
8553 (set_attr "arch" "32,a")
8554 (set_attr "type" "alu_shift,alu_shift_reg")])
8556 (define_insn "*arith_shiftsi_compare0_scratch"
8557 [(set (reg:CC_NOOV CC_REGNUM)
8559 (match_operator:SI 1 "shiftable_operator"
8560 [(match_operator:SI 3 "shift_operator"
8561 [(match_operand:SI 4 "s_register_operand" "r,r")
8562 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8563 (match_operand:SI 2 "s_register_operand" "r,r")])
8565 (clobber (match_scratch:SI 0 "=r,r"))]
8567 "%i1%.\\t%0, %2, %4%S3"
8568 [(set_attr "conds" "set")
8569 (set_attr "shift" "4")
8570 (set_attr "arch" "32,a")
8571 (set_attr "type" "alu_shift,alu_shift_reg")])
8573 (define_insn "*sub_shiftsi"
8574 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8575 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8576 (match_operator:SI 2 "shift_operator"
8577 [(match_operand:SI 3 "s_register_operand" "r,r")
8578 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8580 "sub%?\\t%0, %1, %3%S2"
8581 [(set_attr "predicable" "yes")
8582 (set_attr "shift" "3")
8583 (set_attr "arch" "32,a")
8584 (set_attr "type" "alu_shift,alu_shift_reg")])
8586 (define_insn "*sub_shiftsi_compare0"
8587 [(set (reg:CC_NOOV CC_REGNUM)
8589 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8590 (match_operator:SI 2 "shift_operator"
8591 [(match_operand:SI 3 "s_register_operand" "r,r")
8592 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8594 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8595 (minus:SI (match_dup 1)
8596 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8598 "sub%.\\t%0, %1, %3%S2"
8599 [(set_attr "conds" "set")
8600 (set_attr "shift" "3")
8601 (set_attr "arch" "32,a")
8602 (set_attr "type" "alu_shift,alu_shift_reg")])
8604 (define_insn "*sub_shiftsi_compare0_scratch"
8605 [(set (reg:CC_NOOV CC_REGNUM)
8607 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8608 (match_operator:SI 2 "shift_operator"
8609 [(match_operand:SI 3 "s_register_operand" "r,r")
8610 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8612 (clobber (match_scratch:SI 0 "=r,r"))]
8614 "sub%.\\t%0, %1, %3%S2"
8615 [(set_attr "conds" "set")
8616 (set_attr "shift" "3")
8617 (set_attr "arch" "32,a")
8618 (set_attr "type" "alu_shift,alu_shift_reg")])
8621 (define_insn "*and_scc"
8622 [(set (match_operand:SI 0 "s_register_operand" "=r")
8623 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8624 [(match_operand 3 "cc_register" "") (const_int 0)])
8625 (match_operand:SI 2 "s_register_operand" "r")))]
8627 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8628 [(set_attr "conds" "use")
8629 (set_attr "insn" "mov")
8630 (set_attr "length" "8")]
8633 (define_insn "*ior_scc"
8634 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8635 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8636 [(match_operand 3 "cc_register" "") (const_int 0)])
8637 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8641 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8642 [(set_attr "conds" "use")
8643 (set_attr "length" "4,8")]
8646 ; A series of splitters for the compare_scc pattern below. Note that
8647 ; order is important.
8649 [(set (match_operand:SI 0 "s_register_operand" "")
8650 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8652 (clobber (reg:CC CC_REGNUM))]
8653 "TARGET_32BIT && reload_completed"
8654 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8657 [(set (match_operand:SI 0 "s_register_operand" "")
8658 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8660 (clobber (reg:CC CC_REGNUM))]
8661 "TARGET_32BIT && reload_completed"
8662 [(set (match_dup 0) (not:SI (match_dup 1)))
8663 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8666 [(set (match_operand:SI 0 "s_register_operand" "")
8667 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8669 (clobber (reg:CC CC_REGNUM))]
8670 "TARGET_32BIT && reload_completed"
8672 [(set (reg:CC CC_REGNUM)
8673 (compare:CC (const_int 1) (match_dup 1)))
8675 (minus:SI (const_int 1) (match_dup 1)))])
8676 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8677 (set (match_dup 0) (const_int 0)))])
8680 [(set (match_operand:SI 0 "s_register_operand" "")
8681 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8682 (match_operand:SI 2 "const_int_operand" "")))
8683 (clobber (reg:CC CC_REGNUM))]
8684 "TARGET_32BIT && reload_completed"
8686 [(set (reg:CC CC_REGNUM)
8687 (compare:CC (match_dup 1) (match_dup 2)))
8688 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8689 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8690 (set (match_dup 0) (const_int 1)))]
8692 operands[3] = GEN_INT (-INTVAL (operands[2]));
8696 [(set (match_operand:SI 0 "s_register_operand" "")
8697 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8698 (match_operand:SI 2 "arm_add_operand" "")))
8699 (clobber (reg:CC CC_REGNUM))]
8700 "TARGET_32BIT && reload_completed"
8702 [(set (reg:CC_NOOV CC_REGNUM)
8703 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8705 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8706 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8707 (set (match_dup 0) (const_int 1)))])
8709 (define_insn_and_split "*compare_scc"
8710 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8711 (match_operator:SI 1 "arm_comparison_operator"
8712 [(match_operand:SI 2 "s_register_operand" "r,r")
8713 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8714 (clobber (reg:CC CC_REGNUM))]
8717 "&& reload_completed"
8718 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8719 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8720 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8723 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8724 operands[2], operands[3]);
8725 enum rtx_code rc = GET_CODE (operands[1]);
8727 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8729 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8730 if (mode == CCFPmode || mode == CCFPEmode)
8731 rc = reverse_condition_maybe_unordered (rc);
8733 rc = reverse_condition (rc);
8734 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8737 ;; Attempt to improve the sequence generated by the compare_scc splitters
8738 ;; not to use conditional execution.
8740 [(set (reg:CC CC_REGNUM)
8741 (compare:CC (match_operand:SI 1 "register_operand" "")
8742 (match_operand:SI 2 "arm_rhs_operand" "")))
8743 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8744 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8745 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8746 (set (match_dup 0) (const_int 1)))
8747 (match_scratch:SI 3 "r")]
8749 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8751 [(set (reg:CC CC_REGNUM)
8752 (compare:CC (const_int 0) (match_dup 3)))
8753 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8755 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8756 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8758 (define_insn "*cond_move"
8759 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8760 (if_then_else:SI (match_operator 3 "equality_operator"
8761 [(match_operator 4 "arm_comparison_operator"
8762 [(match_operand 5 "cc_register" "") (const_int 0)])
8764 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8765 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8768 if (GET_CODE (operands[3]) == NE)
8770 if (which_alternative != 1)
8771 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8772 if (which_alternative != 0)
8773 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8776 if (which_alternative != 0)
8777 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8778 if (which_alternative != 1)
8779 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8782 [(set_attr "conds" "use")
8783 (set_attr "insn" "mov")
8784 (set_attr "length" "4,4,8")]
8787 (define_insn "*cond_arith"
8788 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8789 (match_operator:SI 5 "shiftable_operator"
8790 [(match_operator:SI 4 "arm_comparison_operator"
8791 [(match_operand:SI 2 "s_register_operand" "r,r")
8792 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8793 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8794 (clobber (reg:CC CC_REGNUM))]
8797 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8798 return \"%i5\\t%0, %1, %2, lsr #31\";
8800 output_asm_insn (\"cmp\\t%2, %3\", operands);
8801 if (GET_CODE (operands[5]) == AND)
8802 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8803 else if (GET_CODE (operands[5]) == MINUS)
8804 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8805 else if (which_alternative != 0)
8806 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8807 return \"%i5%d4\\t%0, %1, #1\";
8809 [(set_attr "conds" "clob")
8810 (set_attr "length" "12")]
8813 (define_insn "*cond_sub"
8814 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8815 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8816 (match_operator:SI 4 "arm_comparison_operator"
8817 [(match_operand:SI 2 "s_register_operand" "r,r")
8818 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8819 (clobber (reg:CC CC_REGNUM))]
8822 output_asm_insn (\"cmp\\t%2, %3\", operands);
8823 if (which_alternative != 0)
8824 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8825 return \"sub%d4\\t%0, %1, #1\";
8827 [(set_attr "conds" "clob")
8828 (set_attr "length" "8,12")]
8831 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8832 (define_insn "*cmp_ite0"
8833 [(set (match_operand 6 "dominant_cc_register" "")
8836 (match_operator 4 "arm_comparison_operator"
8837 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8838 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8839 (match_operator:SI 5 "arm_comparison_operator"
8840 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8841 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8847 static const char * const opcodes[4][2] =
8849 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8850 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8851 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8852 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8853 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8854 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8855 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8856 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8859 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8861 return opcodes[which_alternative][swap];
8863 [(set_attr "conds" "set")
8864 (set_attr "length" "8")]
8867 (define_insn "*cmp_ite1"
8868 [(set (match_operand 6 "dominant_cc_register" "")
8871 (match_operator 4 "arm_comparison_operator"
8872 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8873 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8874 (match_operator:SI 5 "arm_comparison_operator"
8875 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8876 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8882 static const char * const opcodes[4][2] =
8884 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8885 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8886 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8887 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8888 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8889 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8890 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8891 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8894 comparison_dominates_p (GET_CODE (operands[5]),
8895 reverse_condition (GET_CODE (operands[4])));
8897 return opcodes[which_alternative][swap];
8899 [(set_attr "conds" "set")
8900 (set_attr "length" "8")]
8903 (define_insn "*cmp_and"
8904 [(set (match_operand 6 "dominant_cc_register" "")
8907 (match_operator 4 "arm_comparison_operator"
8908 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8909 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8910 (match_operator:SI 5 "arm_comparison_operator"
8911 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8912 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8917 static const char *const opcodes[4][2] =
8919 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8920 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8921 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8922 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8923 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8924 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8925 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8926 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8929 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8931 return opcodes[which_alternative][swap];
8933 [(set_attr "conds" "set")
8934 (set_attr "predicable" "no")
8935 (set_attr "length" "8")]
8938 (define_insn "*cmp_ior"
8939 [(set (match_operand 6 "dominant_cc_register" "")
8942 (match_operator 4 "arm_comparison_operator"
8943 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8944 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8945 (match_operator:SI 5 "arm_comparison_operator"
8946 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8947 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8952 static const char *const opcodes[4][2] =
8954 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8955 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8956 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8957 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8958 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8959 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8960 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8961 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8964 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8966 return opcodes[which_alternative][swap];
8969 [(set_attr "conds" "set")
8970 (set_attr "length" "8")]
8973 (define_insn_and_split "*ior_scc_scc"
8974 [(set (match_operand:SI 0 "s_register_operand" "=r")
8975 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8976 [(match_operand:SI 1 "s_register_operand" "r")
8977 (match_operand:SI 2 "arm_add_operand" "rIL")])
8978 (match_operator:SI 6 "arm_comparison_operator"
8979 [(match_operand:SI 4 "s_register_operand" "r")
8980 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8981 (clobber (reg:CC CC_REGNUM))]
8983 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8986 "TARGET_ARM && reload_completed"
8990 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8991 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8993 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8995 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8998 [(set_attr "conds" "clob")
8999 (set_attr "length" "16")])
9001 ; If the above pattern is followed by a CMP insn, then the compare is
9002 ; redundant, since we can rework the conditional instruction that follows.
9003 (define_insn_and_split "*ior_scc_scc_cmp"
9004 [(set (match_operand 0 "dominant_cc_register" "")
9005 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9006 [(match_operand:SI 1 "s_register_operand" "r")
9007 (match_operand:SI 2 "arm_add_operand" "rIL")])
9008 (match_operator:SI 6 "arm_comparison_operator"
9009 [(match_operand:SI 4 "s_register_operand" "r")
9010 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9012 (set (match_operand:SI 7 "s_register_operand" "=r")
9013 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9014 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9017 "TARGET_ARM && reload_completed"
9021 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9022 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9024 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9026 [(set_attr "conds" "set")
9027 (set_attr "length" "16")])
9029 (define_insn_and_split "*and_scc_scc"
9030 [(set (match_operand:SI 0 "s_register_operand" "=r")
9031 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9032 [(match_operand:SI 1 "s_register_operand" "r")
9033 (match_operand:SI 2 "arm_add_operand" "rIL")])
9034 (match_operator:SI 6 "arm_comparison_operator"
9035 [(match_operand:SI 4 "s_register_operand" "r")
9036 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9037 (clobber (reg:CC CC_REGNUM))]
9039 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9042 "TARGET_ARM && reload_completed
9043 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9048 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9049 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9051 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9053 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9056 [(set_attr "conds" "clob")
9057 (set_attr "length" "16")])
9059 ; If the above pattern is followed by a CMP insn, then the compare is
9060 ; redundant, since we can rework the conditional instruction that follows.
9061 (define_insn_and_split "*and_scc_scc_cmp"
9062 [(set (match_operand 0 "dominant_cc_register" "")
9063 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9064 [(match_operand:SI 1 "s_register_operand" "r")
9065 (match_operand:SI 2 "arm_add_operand" "rIL")])
9066 (match_operator:SI 6 "arm_comparison_operator"
9067 [(match_operand:SI 4 "s_register_operand" "r")
9068 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9070 (set (match_operand:SI 7 "s_register_operand" "=r")
9071 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9072 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9075 "TARGET_ARM && reload_completed"
9079 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9080 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9082 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9084 [(set_attr "conds" "set")
9085 (set_attr "length" "16")])
9087 ;; If there is no dominance in the comparison, then we can still save an
9088 ;; instruction in the AND case, since we can know that the second compare
9089 ;; need only zero the value if false (if true, then the value is already
9091 (define_insn_and_split "*and_scc_scc_nodom"
9092 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9093 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9094 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9095 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9096 (match_operator:SI 6 "arm_comparison_operator"
9097 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9098 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9099 (clobber (reg:CC CC_REGNUM))]
9101 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9104 "TARGET_ARM && reload_completed"
9105 [(parallel [(set (match_dup 0)
9106 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9107 (clobber (reg:CC CC_REGNUM))])
9108 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9110 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9113 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9114 operands[4], operands[5]),
9116 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9118 [(set_attr "conds" "clob")
9119 (set_attr "length" "20")])
9122 [(set (reg:CC_NOOV CC_REGNUM)
9123 (compare:CC_NOOV (ior:SI
9124 (and:SI (match_operand:SI 0 "s_register_operand" "")
9126 (match_operator:SI 1 "arm_comparison_operator"
9127 [(match_operand:SI 2 "s_register_operand" "")
9128 (match_operand:SI 3 "arm_add_operand" "")]))
9130 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9133 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9135 (set (reg:CC_NOOV CC_REGNUM)
9136 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9141 [(set (reg:CC_NOOV CC_REGNUM)
9142 (compare:CC_NOOV (ior:SI
9143 (match_operator:SI 1 "arm_comparison_operator"
9144 [(match_operand:SI 2 "s_register_operand" "")
9145 (match_operand:SI 3 "arm_add_operand" "")])
9146 (and:SI (match_operand:SI 0 "s_register_operand" "")
9149 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9152 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9154 (set (reg:CC_NOOV CC_REGNUM)
9155 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9158 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9160 (define_insn "*negscc"
9161 [(set (match_operand:SI 0 "s_register_operand" "=r")
9162 (neg:SI (match_operator 3 "arm_comparison_operator"
9163 [(match_operand:SI 1 "s_register_operand" "r")
9164 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9165 (clobber (reg:CC CC_REGNUM))]
9168 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9169 return \"mov\\t%0, %1, asr #31\";
9171 if (GET_CODE (operands[3]) == NE)
9172 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9174 output_asm_insn (\"cmp\\t%1, %2\", operands);
9175 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9176 return \"mvn%d3\\t%0, #0\";
9178 [(set_attr "conds" "clob")
9179 (set_attr "length" "12")]
9182 (define_insn "movcond"
9183 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9185 (match_operator 5 "arm_comparison_operator"
9186 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9187 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9188 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9189 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9190 (clobber (reg:CC CC_REGNUM))]
9193 if (GET_CODE (operands[5]) == LT
9194 && (operands[4] == const0_rtx))
9196 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9198 if (operands[2] == const0_rtx)
9199 return \"and\\t%0, %1, %3, asr #31\";
9200 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9202 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9204 if (operands[1] == const0_rtx)
9205 return \"bic\\t%0, %2, %3, asr #31\";
9206 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9208 /* The only case that falls through to here is when both ops 1 & 2
9212 if (GET_CODE (operands[5]) == GE
9213 && (operands[4] == const0_rtx))
9215 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9217 if (operands[2] == const0_rtx)
9218 return \"bic\\t%0, %1, %3, asr #31\";
9219 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9221 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9223 if (operands[1] == const0_rtx)
9224 return \"and\\t%0, %2, %3, asr #31\";
9225 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9227 /* The only case that falls through to here is when both ops 1 & 2
9230 if (GET_CODE (operands[4]) == CONST_INT
9231 && !const_ok_for_arm (INTVAL (operands[4])))
9232 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9234 output_asm_insn (\"cmp\\t%3, %4\", operands);
9235 if (which_alternative != 0)
9236 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9237 if (which_alternative != 1)
9238 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9241 [(set_attr "conds" "clob")
9242 (set_attr "length" "8,8,12")]
9245 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9247 (define_insn "*ifcompare_plus_move"
9248 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9249 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9250 [(match_operand:SI 4 "s_register_operand" "r,r")
9251 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9253 (match_operand:SI 2 "s_register_operand" "r,r")
9254 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9255 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9256 (clobber (reg:CC CC_REGNUM))]
9259 [(set_attr "conds" "clob")
9260 (set_attr "length" "8,12")]
9263 (define_insn "*if_plus_move"
9264 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9266 (match_operator 4 "arm_comparison_operator"
9267 [(match_operand 5 "cc_register" "") (const_int 0)])
9269 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9270 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9271 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9275 sub%d4\\t%0, %2, #%n3
9276 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9277 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9278 [(set_attr "conds" "use")
9279 (set_attr "length" "4,4,8,8")
9280 (set_attr "type" "*,*,*,*")]
9283 (define_insn "*ifcompare_move_plus"
9284 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9285 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9286 [(match_operand:SI 4 "s_register_operand" "r,r")
9287 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9288 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9290 (match_operand:SI 2 "s_register_operand" "r,r")
9291 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9292 (clobber (reg:CC CC_REGNUM))]
9295 [(set_attr "conds" "clob")
9296 (set_attr "length" "8,12")]
9299 (define_insn "*if_move_plus"
9300 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9302 (match_operator 4 "arm_comparison_operator"
9303 [(match_operand 5 "cc_register" "") (const_int 0)])
9304 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9306 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9307 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9311 sub%D4\\t%0, %2, #%n3
9312 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9313 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9314 [(set_attr "conds" "use")
9315 (set_attr "length" "4,4,8,8")
9316 (set_attr "type" "*,*,*,*")]
9319 (define_insn "*ifcompare_arith_arith"
9320 [(set (match_operand:SI 0 "s_register_operand" "=r")
9321 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9322 [(match_operand:SI 5 "s_register_operand" "r")
9323 (match_operand:SI 6 "arm_add_operand" "rIL")])
9324 (match_operator:SI 8 "shiftable_operator"
9325 [(match_operand:SI 1 "s_register_operand" "r")
9326 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9327 (match_operator:SI 7 "shiftable_operator"
9328 [(match_operand:SI 3 "s_register_operand" "r")
9329 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9330 (clobber (reg:CC CC_REGNUM))]
9333 [(set_attr "conds" "clob")
9334 (set_attr "length" "12")]
9337 (define_insn "*if_arith_arith"
9338 [(set (match_operand:SI 0 "s_register_operand" "=r")
9339 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9340 [(match_operand 8 "cc_register" "") (const_int 0)])
9341 (match_operator:SI 6 "shiftable_operator"
9342 [(match_operand:SI 1 "s_register_operand" "r")
9343 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9344 (match_operator:SI 7 "shiftable_operator"
9345 [(match_operand:SI 3 "s_register_operand" "r")
9346 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9348 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9349 [(set_attr "conds" "use")
9350 (set_attr "length" "8")]
9353 (define_insn "*ifcompare_arith_move"
9354 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9355 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9356 [(match_operand:SI 2 "s_register_operand" "r,r")
9357 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9358 (match_operator:SI 7 "shiftable_operator"
9359 [(match_operand:SI 4 "s_register_operand" "r,r")
9360 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9361 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9362 (clobber (reg:CC CC_REGNUM))]
9365 /* If we have an operation where (op x 0) is the identity operation and
9366 the conditional operator is LT or GE and we are comparing against zero and
9367 everything is in registers then we can do this in two instructions. */
9368 if (operands[3] == const0_rtx
9369 && GET_CODE (operands[7]) != AND
9370 && GET_CODE (operands[5]) == REG
9371 && GET_CODE (operands[1]) == REG
9372 && REGNO (operands[1]) == REGNO (operands[4])
9373 && REGNO (operands[4]) != REGNO (operands[0]))
9375 if (GET_CODE (operands[6]) == LT)
9376 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9377 else if (GET_CODE (operands[6]) == GE)
9378 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9380 if (GET_CODE (operands[3]) == CONST_INT
9381 && !const_ok_for_arm (INTVAL (operands[3])))
9382 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9384 output_asm_insn (\"cmp\\t%2, %3\", operands);
9385 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9386 if (which_alternative != 0)
9387 return \"mov%D6\\t%0, %1\";
9390 [(set_attr "conds" "clob")
9391 (set_attr "length" "8,12")]
9394 (define_insn "*if_arith_move"
9395 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9396 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9397 [(match_operand 6 "cc_register" "") (const_int 0)])
9398 (match_operator:SI 5 "shiftable_operator"
9399 [(match_operand:SI 2 "s_register_operand" "r,r")
9400 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9401 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9405 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9406 [(set_attr "conds" "use")
9407 (set_attr "length" "4,8")
9408 (set_attr "type" "*,*")]
9411 (define_insn "*ifcompare_move_arith"
9412 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9413 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9414 [(match_operand:SI 4 "s_register_operand" "r,r")
9415 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9416 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9417 (match_operator:SI 7 "shiftable_operator"
9418 [(match_operand:SI 2 "s_register_operand" "r,r")
9419 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9420 (clobber (reg:CC CC_REGNUM))]
9423 /* If we have an operation where (op x 0) is the identity operation and
9424 the conditional operator is LT or GE and we are comparing against zero and
9425 everything is in registers then we can do this in two instructions */
9426 if (operands[5] == const0_rtx
9427 && GET_CODE (operands[7]) != AND
9428 && GET_CODE (operands[3]) == REG
9429 && GET_CODE (operands[1]) == REG
9430 && REGNO (operands[1]) == REGNO (operands[2])
9431 && REGNO (operands[2]) != REGNO (operands[0]))
9433 if (GET_CODE (operands[6]) == GE)
9434 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9435 else if (GET_CODE (operands[6]) == LT)
9436 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9439 if (GET_CODE (operands[5]) == CONST_INT
9440 && !const_ok_for_arm (INTVAL (operands[5])))
9441 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9443 output_asm_insn (\"cmp\\t%4, %5\", operands);
9445 if (which_alternative != 0)
9446 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9447 return \"%I7%D6\\t%0, %2, %3\";
9449 [(set_attr "conds" "clob")
9450 (set_attr "length" "8,12")]
9453 (define_insn "*if_move_arith"
9454 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9456 (match_operator 4 "arm_comparison_operator"
9457 [(match_operand 6 "cc_register" "") (const_int 0)])
9458 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9459 (match_operator:SI 5 "shiftable_operator"
9460 [(match_operand:SI 2 "s_register_operand" "r,r")
9461 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9465 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9466 [(set_attr "conds" "use")
9467 (set_attr "length" "4,8")
9468 (set_attr "type" "*,*")]
9471 (define_insn "*ifcompare_move_not"
9472 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9474 (match_operator 5 "arm_comparison_operator"
9475 [(match_operand:SI 3 "s_register_operand" "r,r")
9476 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9477 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9479 (match_operand:SI 2 "s_register_operand" "r,r"))))
9480 (clobber (reg:CC CC_REGNUM))]
9483 [(set_attr "conds" "clob")
9484 (set_attr "length" "8,12")]
9487 (define_insn "*if_move_not"
9488 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9490 (match_operator 4 "arm_comparison_operator"
9491 [(match_operand 3 "cc_register" "") (const_int 0)])
9492 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9493 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9497 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9498 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9499 [(set_attr "conds" "use")
9500 (set_attr "insn" "mvn")
9501 (set_attr "length" "4,8,8")]
9504 (define_insn "*ifcompare_not_move"
9505 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9507 (match_operator 5 "arm_comparison_operator"
9508 [(match_operand:SI 3 "s_register_operand" "r,r")
9509 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9511 (match_operand:SI 2 "s_register_operand" "r,r"))
9512 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9513 (clobber (reg:CC CC_REGNUM))]
9516 [(set_attr "conds" "clob")
9517 (set_attr "length" "8,12")]
9520 (define_insn "*if_not_move"
9521 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9523 (match_operator 4 "arm_comparison_operator"
9524 [(match_operand 3 "cc_register" "") (const_int 0)])
9525 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9526 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9530 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9531 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9532 [(set_attr "conds" "use")
9533 (set_attr "insn" "mvn")
9534 (set_attr "length" "4,8,8")]
9537 (define_insn "*ifcompare_shift_move"
9538 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9540 (match_operator 6 "arm_comparison_operator"
9541 [(match_operand:SI 4 "s_register_operand" "r,r")
9542 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9543 (match_operator:SI 7 "shift_operator"
9544 [(match_operand:SI 2 "s_register_operand" "r,r")
9545 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9546 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9547 (clobber (reg:CC CC_REGNUM))]
9550 [(set_attr "conds" "clob")
9551 (set_attr "length" "8,12")]
9554 (define_insn "*if_shift_move"
9555 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9557 (match_operator 5 "arm_comparison_operator"
9558 [(match_operand 6 "cc_register" "") (const_int 0)])
9559 (match_operator:SI 4 "shift_operator"
9560 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9561 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9562 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9566 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9567 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9568 [(set_attr "conds" "use")
9569 (set_attr "shift" "2")
9570 (set_attr "length" "4,8,8")
9571 (set_attr "insn" "mov")
9572 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9573 (const_string "alu_shift")
9574 (const_string "alu_shift_reg")))]
9577 (define_insn "*ifcompare_move_shift"
9578 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9580 (match_operator 6 "arm_comparison_operator"
9581 [(match_operand:SI 4 "s_register_operand" "r,r")
9582 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9583 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9584 (match_operator:SI 7 "shift_operator"
9585 [(match_operand:SI 2 "s_register_operand" "r,r")
9586 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9587 (clobber (reg:CC CC_REGNUM))]
9590 [(set_attr "conds" "clob")
9591 (set_attr "length" "8,12")]
9594 (define_insn "*if_move_shift"
9595 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9597 (match_operator 5 "arm_comparison_operator"
9598 [(match_operand 6 "cc_register" "") (const_int 0)])
9599 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9600 (match_operator:SI 4 "shift_operator"
9601 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9602 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9606 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9607 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9608 [(set_attr "conds" "use")
9609 (set_attr "shift" "2")
9610 (set_attr "length" "4,8,8")
9611 (set_attr "insn" "mov")
9612 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9613 (const_string "alu_shift")
9614 (const_string "alu_shift_reg")))]
9617 (define_insn "*ifcompare_shift_shift"
9618 [(set (match_operand:SI 0 "s_register_operand" "=r")
9620 (match_operator 7 "arm_comparison_operator"
9621 [(match_operand:SI 5 "s_register_operand" "r")
9622 (match_operand:SI 6 "arm_add_operand" "rIL")])
9623 (match_operator:SI 8 "shift_operator"
9624 [(match_operand:SI 1 "s_register_operand" "r")
9625 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9626 (match_operator:SI 9 "shift_operator"
9627 [(match_operand:SI 3 "s_register_operand" "r")
9628 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9629 (clobber (reg:CC CC_REGNUM))]
9632 [(set_attr "conds" "clob")
9633 (set_attr "length" "12")]
9636 (define_insn "*if_shift_shift"
9637 [(set (match_operand:SI 0 "s_register_operand" "=r")
9639 (match_operator 5 "arm_comparison_operator"
9640 [(match_operand 8 "cc_register" "") (const_int 0)])
9641 (match_operator:SI 6 "shift_operator"
9642 [(match_operand:SI 1 "s_register_operand" "r")
9643 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9644 (match_operator:SI 7 "shift_operator"
9645 [(match_operand:SI 3 "s_register_operand" "r")
9646 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9648 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9649 [(set_attr "conds" "use")
9650 (set_attr "shift" "1")
9651 (set_attr "length" "8")
9652 (set_attr "insn" "mov")
9653 (set (attr "type") (if_then_else
9654 (and (match_operand 2 "const_int_operand" "")
9655 (match_operand 4 "const_int_operand" ""))
9656 (const_string "alu_shift")
9657 (const_string "alu_shift_reg")))]
9660 (define_insn "*ifcompare_not_arith"
9661 [(set (match_operand:SI 0 "s_register_operand" "=r")
9663 (match_operator 6 "arm_comparison_operator"
9664 [(match_operand:SI 4 "s_register_operand" "r")
9665 (match_operand:SI 5 "arm_add_operand" "rIL")])
9666 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9667 (match_operator:SI 7 "shiftable_operator"
9668 [(match_operand:SI 2 "s_register_operand" "r")
9669 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9670 (clobber (reg:CC CC_REGNUM))]
9673 [(set_attr "conds" "clob")
9674 (set_attr "length" "12")]
9677 (define_insn "*if_not_arith"
9678 [(set (match_operand:SI 0 "s_register_operand" "=r")
9680 (match_operator 5 "arm_comparison_operator"
9681 [(match_operand 4 "cc_register" "") (const_int 0)])
9682 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9683 (match_operator:SI 6 "shiftable_operator"
9684 [(match_operand:SI 2 "s_register_operand" "r")
9685 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9687 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9688 [(set_attr "conds" "use")
9689 (set_attr "insn" "mvn")
9690 (set_attr "length" "8")]
9693 (define_insn "*ifcompare_arith_not"
9694 [(set (match_operand:SI 0 "s_register_operand" "=r")
9696 (match_operator 6 "arm_comparison_operator"
9697 [(match_operand:SI 4 "s_register_operand" "r")
9698 (match_operand:SI 5 "arm_add_operand" "rIL")])
9699 (match_operator:SI 7 "shiftable_operator"
9700 [(match_operand:SI 2 "s_register_operand" "r")
9701 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9702 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9703 (clobber (reg:CC CC_REGNUM))]
9706 [(set_attr "conds" "clob")
9707 (set_attr "length" "12")]
9710 (define_insn "*if_arith_not"
9711 [(set (match_operand:SI 0 "s_register_operand" "=r")
9713 (match_operator 5 "arm_comparison_operator"
9714 [(match_operand 4 "cc_register" "") (const_int 0)])
9715 (match_operator:SI 6 "shiftable_operator"
9716 [(match_operand:SI 2 "s_register_operand" "r")
9717 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9718 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9720 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9721 [(set_attr "conds" "use")
9722 (set_attr "insn" "mvn")
9723 (set_attr "length" "8")]
9726 (define_insn "*ifcompare_neg_move"
9727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9729 (match_operator 5 "arm_comparison_operator"
9730 [(match_operand:SI 3 "s_register_operand" "r,r")
9731 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9732 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9733 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9734 (clobber (reg:CC CC_REGNUM))]
9737 [(set_attr "conds" "clob")
9738 (set_attr "length" "8,12")]
9741 (define_insn "*if_neg_move"
9742 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9744 (match_operator 4 "arm_comparison_operator"
9745 [(match_operand 3 "cc_register" "") (const_int 0)])
9746 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9747 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9751 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9752 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9753 [(set_attr "conds" "use")
9754 (set_attr "length" "4,8,8")]
9757 (define_insn "*ifcompare_move_neg"
9758 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9760 (match_operator 5 "arm_comparison_operator"
9761 [(match_operand:SI 3 "s_register_operand" "r,r")
9762 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9763 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9764 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9765 (clobber (reg:CC CC_REGNUM))]
9768 [(set_attr "conds" "clob")
9769 (set_attr "length" "8,12")]
9772 (define_insn "*if_move_neg"
9773 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9775 (match_operator 4 "arm_comparison_operator"
9776 [(match_operand 3 "cc_register" "") (const_int 0)])
9777 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9778 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9782 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9783 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9784 [(set_attr "conds" "use")
9785 (set_attr "length" "4,8,8")]
9788 (define_insn "*arith_adjacentmem"
9789 [(set (match_operand:SI 0 "s_register_operand" "=r")
9790 (match_operator:SI 1 "shiftable_operator"
9791 [(match_operand:SI 2 "memory_operand" "m")
9792 (match_operand:SI 3 "memory_operand" "m")]))
9793 (clobber (match_scratch:SI 4 "=r"))]
9794 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9800 HOST_WIDE_INT val1 = 0, val2 = 0;
9802 if (REGNO (operands[0]) > REGNO (operands[4]))
9804 ldm[1] = operands[4];
9805 ldm[2] = operands[0];
9809 ldm[1] = operands[0];
9810 ldm[2] = operands[4];
9813 base_reg = XEXP (operands[2], 0);
9815 if (!REG_P (base_reg))
9817 val1 = INTVAL (XEXP (base_reg, 1));
9818 base_reg = XEXP (base_reg, 0);
9821 if (!REG_P (XEXP (operands[3], 0)))
9822 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9824 arith[0] = operands[0];
9825 arith[3] = operands[1];
9839 if (val1 !=0 && val2 != 0)
9843 if (val1 == 4 || val2 == 4)
9844 /* Other val must be 8, since we know they are adjacent and neither
9846 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9847 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9849 ldm[0] = ops[0] = operands[4];
9851 ops[2] = GEN_INT (val1);
9852 output_add_immediate (ops);
9854 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9856 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9860 /* Offset is out of range for a single add, so use two ldr. */
9863 ops[2] = GEN_INT (val1);
9864 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9866 ops[2] = GEN_INT (val2);
9867 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9873 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9875 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9880 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9882 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9884 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9887 [(set_attr "length" "12")
9888 (set_attr "predicable" "yes")
9889 (set_attr "type" "load1")]
9892 ; This pattern is never tried by combine, so do it as a peephole
9895 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9896 (match_operand:SI 1 "arm_general_register_operand" ""))
9897 (set (reg:CC CC_REGNUM)
9898 (compare:CC (match_dup 1) (const_int 0)))]
9900 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9901 (set (match_dup 0) (match_dup 1))])]
9906 [(set (match_operand:SI 0 "s_register_operand" "")
9907 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9909 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9910 [(match_operand:SI 3 "s_register_operand" "")
9911 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9912 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9914 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9915 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9920 ;; This split can be used because CC_Z mode implies that the following
9921 ;; branch will be an equality, or an unsigned inequality, so the sign
9922 ;; extension is not needed.
9925 [(set (reg:CC_Z CC_REGNUM)
9927 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9929 (match_operand 1 "const_int_operand" "")))
9930 (clobber (match_scratch:SI 2 ""))]
9932 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9933 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9934 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9935 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9937 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9940 ;; ??? Check the patterns above for Thumb-2 usefulness
9942 (define_expand "prologue"
9943 [(clobber (const_int 0))]
9946 arm_expand_prologue ();
9948 thumb1_expand_prologue ();
9953 (define_expand "epilogue"
9954 [(clobber (const_int 0))]
9957 if (crtl->calls_eh_return)
9958 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9960 thumb1_expand_epilogue ();
9961 else if (USE_RETURN_INSN (FALSE))
9963 emit_jump_insn (gen_return ());
9966 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9968 gen_rtx_RETURN (VOIDmode)),
9974 ;; Note - although unspec_volatile's USE all hard registers,
9975 ;; USEs are ignored after relaod has completed. Thus we need
9976 ;; to add an unspec of the link register to ensure that flow
9977 ;; does not think that it is unused by the sibcall branch that
9978 ;; will replace the standard function epilogue.
9979 (define_insn "sibcall_epilogue"
9980 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9981 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9984 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9985 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9986 return arm_output_epilogue (next_nonnote_insn (insn));
9988 ;; Length is absolute worst case
9989 [(set_attr "length" "44")
9990 (set_attr "type" "block")
9991 ;; We don't clobber the conditions, but the potential length of this
9992 ;; operation is sufficient to make conditionalizing the sequence
9993 ;; unlikely to be profitable.
9994 (set_attr "conds" "clob")]
9997 (define_insn "*epilogue_insns"
9998 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10002 return arm_output_epilogue (NULL);
10003 else /* TARGET_THUMB1 */
10004 return thumb_unexpanded_epilogue ();
10006 ; Length is absolute worst case
10007 [(set_attr "length" "44")
10008 (set_attr "type" "block")
10009 ;; We don't clobber the conditions, but the potential length of this
10010 ;; operation is sufficient to make conditionalizing the sequence
10011 ;; unlikely to be profitable.
10012 (set_attr "conds" "clob")]
10015 (define_expand "eh_epilogue"
10016 [(use (match_operand:SI 0 "register_operand" ""))
10017 (use (match_operand:SI 1 "register_operand" ""))
10018 (use (match_operand:SI 2 "register_operand" ""))]
10022 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10023 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10025 rtx ra = gen_rtx_REG (Pmode, 2);
10027 emit_move_insn (ra, operands[2]);
10030 /* This is a hack -- we may have crystalized the function type too
10032 cfun->machine->func_type = 0;
10036 ;; This split is only used during output to reduce the number of patterns
10037 ;; that need assembler instructions adding to them. We allowed the setting
10038 ;; of the conditions to be implicit during rtl generation so that
10039 ;; the conditional compare patterns would work. However this conflicts to
10040 ;; some extent with the conditional data operations, so we have to split them
10043 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10044 ;; conditional execution sufficient?
10047 [(set (match_operand:SI 0 "s_register_operand" "")
10048 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10049 [(match_operand 2 "" "") (match_operand 3 "" "")])
10051 (match_operand 4 "" "")))
10052 (clobber (reg:CC CC_REGNUM))]
10053 "TARGET_ARM && reload_completed"
10054 [(set (match_dup 5) (match_dup 6))
10055 (cond_exec (match_dup 7)
10056 (set (match_dup 0) (match_dup 4)))]
10059 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10060 operands[2], operands[3]);
10061 enum rtx_code rc = GET_CODE (operands[1]);
10063 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10064 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10065 if (mode == CCFPmode || mode == CCFPEmode)
10066 rc = reverse_condition_maybe_unordered (rc);
10068 rc = reverse_condition (rc);
10070 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10075 [(set (match_operand:SI 0 "s_register_operand" "")
10076 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10077 [(match_operand 2 "" "") (match_operand 3 "" "")])
10078 (match_operand 4 "" "")
10080 (clobber (reg:CC CC_REGNUM))]
10081 "TARGET_ARM && reload_completed"
10082 [(set (match_dup 5) (match_dup 6))
10083 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10084 (set (match_dup 0) (match_dup 4)))]
10087 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10088 operands[2], operands[3]);
10090 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10091 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10096 [(set (match_operand:SI 0 "s_register_operand" "")
10097 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10098 [(match_operand 2 "" "") (match_operand 3 "" "")])
10099 (match_operand 4 "" "")
10100 (match_operand 5 "" "")))
10101 (clobber (reg:CC CC_REGNUM))]
10102 "TARGET_ARM && reload_completed"
10103 [(set (match_dup 6) (match_dup 7))
10104 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10105 (set (match_dup 0) (match_dup 4)))
10106 (cond_exec (match_dup 8)
10107 (set (match_dup 0) (match_dup 5)))]
10110 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10111 operands[2], operands[3]);
10112 enum rtx_code rc = GET_CODE (operands[1]);
10114 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10115 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10116 if (mode == CCFPmode || mode == CCFPEmode)
10117 rc = reverse_condition_maybe_unordered (rc);
10119 rc = reverse_condition (rc);
10121 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10126 [(set (match_operand:SI 0 "s_register_operand" "")
10127 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10128 [(match_operand:SI 2 "s_register_operand" "")
10129 (match_operand:SI 3 "arm_add_operand" "")])
10130 (match_operand:SI 4 "arm_rhs_operand" "")
10132 (match_operand:SI 5 "s_register_operand" ""))))
10133 (clobber (reg:CC CC_REGNUM))]
10134 "TARGET_ARM && reload_completed"
10135 [(set (match_dup 6) (match_dup 7))
10136 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10137 (set (match_dup 0) (match_dup 4)))
10138 (cond_exec (match_dup 8)
10139 (set (match_dup 0) (not:SI (match_dup 5))))]
10142 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10143 operands[2], operands[3]);
10144 enum rtx_code rc = GET_CODE (operands[1]);
10146 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10147 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10148 if (mode == CCFPmode || mode == CCFPEmode)
10149 rc = reverse_condition_maybe_unordered (rc);
10151 rc = reverse_condition (rc);
10153 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10157 (define_insn "*cond_move_not"
10158 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10159 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10160 [(match_operand 3 "cc_register" "") (const_int 0)])
10161 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10163 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10167 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10168 [(set_attr "conds" "use")
10169 (set_attr "insn" "mvn")
10170 (set_attr "length" "4,8")]
10173 ;; The next two patterns occur when an AND operation is followed by a
10174 ;; scc insn sequence
10176 (define_insn "*sign_extract_onebit"
10177 [(set (match_operand:SI 0 "s_register_operand" "=r")
10178 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10180 (match_operand:SI 2 "const_int_operand" "n")))
10181 (clobber (reg:CC CC_REGNUM))]
10184 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10185 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10186 return \"mvnne\\t%0, #0\";
10188 [(set_attr "conds" "clob")
10189 (set_attr "length" "8")]
10192 (define_insn "*not_signextract_onebit"
10193 [(set (match_operand:SI 0 "s_register_operand" "=r")
10195 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10197 (match_operand:SI 2 "const_int_operand" "n"))))
10198 (clobber (reg:CC CC_REGNUM))]
10201 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10202 output_asm_insn (\"tst\\t%1, %2\", operands);
10203 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10204 return \"movne\\t%0, #0\";
10206 [(set_attr "conds" "clob")
10207 (set_attr "length" "12")]
10209 ;; ??? The above patterns need auditing for Thumb-2
10211 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10212 ;; expressions. For simplicity, the first register is also in the unspec
10214 (define_insn "*push_multi"
10215 [(match_parallel 2 "multi_register_push"
10216 [(set (match_operand:BLK 0 "memory_operand" "=m")
10217 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10218 UNSPEC_PUSH_MULT))])]
10222 int num_saves = XVECLEN (operands[2], 0);
10224 /* For the StrongARM at least it is faster to
10225 use STR to store only a single register.
10226 In Thumb mode always use push, and the assembler will pick
10227 something appropriate. */
10228 if (num_saves == 1 && TARGET_ARM)
10229 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10236 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10238 strcpy (pattern, \"push\\t{%1\");
10240 for (i = 1; i < num_saves; i++)
10242 strcat (pattern, \", %|\");
10244 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10247 strcat (pattern, \"}\");
10248 output_asm_insn (pattern, operands);
10253 [(set_attr "type" "store4")]
10256 (define_insn "stack_tie"
10257 [(set (mem:BLK (scratch))
10258 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10259 (match_operand:SI 1 "s_register_operand" "rk")]
10263 [(set_attr "length" "0")]
10266 ;; Similarly for the floating point registers
10267 (define_insn "*push_fp_multi"
10268 [(match_parallel 2 "multi_register_push"
10269 [(set (match_operand:BLK 0 "memory_operand" "=m")
10270 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10271 UNSPEC_PUSH_MULT))])]
10272 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10277 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10278 output_asm_insn (pattern, operands);
10281 [(set_attr "type" "f_store")]
10284 ;; Special patterns for dealing with the constant pool
10286 (define_insn "align_4"
10287 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10290 assemble_align (32);
10295 (define_insn "align_8"
10296 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10299 assemble_align (64);
10304 (define_insn "consttable_end"
10305 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10308 making_const_table = FALSE;
10313 (define_insn "consttable_1"
10314 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10317 making_const_table = TRUE;
10318 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10319 assemble_zeros (3);
10322 [(set_attr "length" "4")]
10325 (define_insn "consttable_2"
10326 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10329 making_const_table = TRUE;
10330 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10331 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10332 assemble_zeros (2);
10335 [(set_attr "length" "4")]
10338 (define_insn "consttable_4"
10339 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10343 rtx x = operands[0];
10344 making_const_table = TRUE;
10345 switch (GET_MODE_CLASS (GET_MODE (x)))
10348 if (GET_MODE (x) == HFmode)
10349 arm_emit_fp16_const (x);
10353 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10354 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10358 /* XXX: Sometimes gcc does something really dumb and ends up with
10359 a HIGH in a constant pool entry, usually because it's trying to
10360 load into a VFP register. We know this will always be used in
10361 combination with a LO_SUM which ignores the high bits, so just
10362 strip off the HIGH. */
10363 if (GET_CODE (x) == HIGH)
10365 assemble_integer (x, 4, BITS_PER_WORD, 1);
10366 mark_symbol_refs_as_used (x);
10371 [(set_attr "length" "4")]
10374 (define_insn "consttable_8"
10375 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10379 making_const_table = TRUE;
10380 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10385 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10386 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10390 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10395 [(set_attr "length" "8")]
10398 (define_insn "consttable_16"
10399 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10403 making_const_table = TRUE;
10404 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10409 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10410 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10414 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10419 [(set_attr "length" "16")]
10422 ;; Miscellaneous Thumb patterns
10424 (define_expand "tablejump"
10425 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10426 (use (label_ref (match_operand 1 "" "")))])]
10431 /* Hopefully, CSE will eliminate this copy. */
10432 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10433 rtx reg2 = gen_reg_rtx (SImode);
10435 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10436 operands[0] = reg2;
10441 ;; NB never uses BX.
10442 (define_insn "*thumb1_tablejump"
10443 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10444 (use (label_ref (match_operand 1 "" "")))]
10447 [(set_attr "length" "2")]
10450 ;; V5 Instructions,
10452 (define_insn "clzsi2"
10453 [(set (match_operand:SI 0 "s_register_operand" "=r")
10454 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10455 "TARGET_32BIT && arm_arch5"
10457 [(set_attr "predicable" "yes")
10458 (set_attr "insn" "clz")])
10460 (define_insn "rbitsi2"
10461 [(set (match_operand:SI 0 "s_register_operand" "=r")
10462 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10463 "TARGET_32BIT && arm_arch_thumb2"
10465 [(set_attr "predicable" "yes")
10466 (set_attr "insn" "clz")])
10468 (define_expand "ctzsi2"
10469 [(set (match_operand:SI 0 "s_register_operand" "")
10470 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10471 "TARGET_32BIT && arm_arch_thumb2"
10474 rtx tmp = gen_reg_rtx (SImode);
10475 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10476 emit_insn (gen_clzsi2 (operands[0], tmp));
10482 ;; V5E instructions.
10484 (define_insn "prefetch"
10485 [(prefetch (match_operand:SI 0 "address_operand" "p")
10486 (match_operand:SI 1 "" "")
10487 (match_operand:SI 2 "" ""))]
10488 "TARGET_32BIT && arm_arch5e"
10491 ;; General predication pattern
10494 [(match_operator 0 "arm_comparison_operator"
10495 [(match_operand 1 "cc_register" "")
10501 (define_insn "prologue_use"
10502 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10504 "%@ %0 needed for prologue"
10505 [(set_attr "length" "0")]
10509 ;; Patterns for exception handling
10511 (define_expand "eh_return"
10512 [(use (match_operand 0 "general_operand" ""))]
10517 emit_insn (gen_arm_eh_return (operands[0]));
10519 emit_insn (gen_thumb_eh_return (operands[0]));
10524 ;; We can't expand this before we know where the link register is stored.
10525 (define_insn_and_split "arm_eh_return"
10526 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10528 (clobber (match_scratch:SI 1 "=&r"))]
10531 "&& reload_completed"
10535 arm_set_return_address (operands[0], operands[1]);
10540 (define_insn_and_split "thumb_eh_return"
10541 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10543 (clobber (match_scratch:SI 1 "=&l"))]
10546 "&& reload_completed"
10550 thumb_set_return_address (operands[0], operands[1]);
10558 (define_insn "load_tp_hard"
10559 [(set (match_operand:SI 0 "register_operand" "=r")
10560 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10562 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10563 [(set_attr "predicable" "yes")]
10566 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10567 (define_insn "load_tp_soft"
10568 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10569 (clobber (reg:SI LR_REGNUM))
10570 (clobber (reg:SI IP_REGNUM))
10571 (clobber (reg:CC CC_REGNUM))]
10573 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10574 [(set_attr "conds" "clob")]
10577 (define_insn "*arm_movtas_ze"
10578 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10581 (match_operand:SI 1 "const_int_operand" ""))]
10584 [(set_attr "predicable" "yes")
10585 (set_attr "length" "4")]
10588 (define_insn "*arm_rev"
10589 [(set (match_operand:SI 0 "s_register_operand" "=r")
10590 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10591 "TARGET_32BIT && arm_arch6"
10593 [(set_attr "predicable" "yes")
10594 (set_attr "length" "4")]
10597 (define_insn "*thumb1_rev"
10598 [(set (match_operand:SI 0 "s_register_operand" "=l")
10599 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10600 "TARGET_THUMB1 && arm_arch6"
10602 [(set_attr "length" "2")]
10605 (define_expand "arm_legacy_rev"
10606 [(set (match_operand:SI 2 "s_register_operand" "")
10607 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10611 (lshiftrt:SI (match_dup 2)
10613 (set (match_operand:SI 3 "s_register_operand" "")
10614 (rotatert:SI (match_dup 1)
10617 (and:SI (match_dup 2)
10618 (const_int -65281)))
10619 (set (match_operand:SI 0 "s_register_operand" "")
10620 (xor:SI (match_dup 3)
10626 ;; Reuse temporaries to keep register pressure down.
10627 (define_expand "thumb_legacy_rev"
10628 [(set (match_operand:SI 2 "s_register_operand" "")
10629 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10631 (set (match_operand:SI 3 "s_register_operand" "")
10632 (lshiftrt:SI (match_dup 1)
10635 (ior:SI (match_dup 3)
10637 (set (match_operand:SI 4 "s_register_operand" "")
10639 (set (match_operand:SI 5 "s_register_operand" "")
10640 (rotatert:SI (match_dup 1)
10643 (ashift:SI (match_dup 5)
10646 (lshiftrt:SI (match_dup 5)
10649 (ior:SI (match_dup 5)
10652 (rotatert:SI (match_dup 5)
10654 (set (match_operand:SI 0 "s_register_operand" "")
10655 (ior:SI (match_dup 5)
10661 (define_expand "bswapsi2"
10662 [(set (match_operand:SI 0 "s_register_operand" "=r")
10663 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10664 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10668 rtx op2 = gen_reg_rtx (SImode);
10669 rtx op3 = gen_reg_rtx (SImode);
10673 rtx op4 = gen_reg_rtx (SImode);
10674 rtx op5 = gen_reg_rtx (SImode);
10676 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10677 op2, op3, op4, op5));
10681 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10690 ;; Load the load/store multiple patterns
10691 (include "ldmstm.md")
10692 ;; Load the FPA co-processor patterns
10694 ;; Load the Maverick co-processor patterns
10695 (include "cirrus.md")
10696 ;; Vector bits common to IWMMXT and Neon
10697 (include "vec-common.md")
10698 ;; Load the Intel Wireless Multimedia Extension patterns
10699 (include "iwmmxt.md")
10700 ;; Load the VFP co-processor patterns
10702 ;; Thumb-2 patterns
10703 (include "thumb2.md")
10705 (include "neon.md")
10706 ;; Synchronization Primitives
10707 (include "sync.md")