1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
110 ;; UNSPEC_VOLATILE Usage:
113 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
115 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
116 ; instruction epilogue sequence that isn't expanded
117 ; into normal RTL. Used for both normal and sibcall
119 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
123 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
125 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
127 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
129 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
131 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
133 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
134 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
135 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
136 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
137 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
138 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
139 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap.
142 (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set.
143 (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op>
144 (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op>
145 (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op>
149 ;;---------------------------------------------------------------------------
152 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
153 ; generating ARM code. This is used to control the length of some insn
154 ; patterns that share the same RTL in both ARM and Thumb code.
155 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
157 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
158 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
160 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
161 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
163 ;; Operand number of an input operand that is shifted. Zero if the
164 ;; given instruction does not shift one of its input operands.
165 (define_attr "shift" "" (const_int 0))
167 ; Floating Point Unit. If we only have floating point emulation, then there
168 ; is no point in scheduling the floating point insns. (Well, for best
169 ; performance we should try and group them together).
170 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
171 (const (symbol_ref "arm_fpu_attr")))
173 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
174 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
180 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
181 (const_string "none"))
183 ; LENGTH of an instruction (in bytes)
184 (define_attr "length" ""
185 (cond [(not (eq_attr "sync_memory" "none"))
186 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
189 ; The architecture which supports the instruction (or alternative).
190 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
191 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
192 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
193 ; arm_arch6. This attribute is used to compute attribute "enabled",
194 ; use type "any" to enable an alternative in all cases.
195 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
196 (const_string "any"))
198 (define_attr "arch_enabled" "no,yes"
199 (cond [(eq_attr "arch" "any")
202 (and (eq_attr "arch" "a")
203 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
206 (and (eq_attr "arch" "t")
207 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
210 (and (eq_attr "arch" "t1")
211 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
214 (and (eq_attr "arch" "t2")
215 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
218 (and (eq_attr "arch" "32")
219 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
222 (and (eq_attr "arch" "v6")
223 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
226 (and (eq_attr "arch" "nov6")
227 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
228 (const_string "yes")]
229 (const_string "no")))
231 ; Allows an insn to disable certain alternatives for reasons other than
233 (define_attr "insn_enabled" "no,yes"
234 (const_string "yes"))
236 ; Enable all alternatives that are both arch_enabled and insn_enabled.
237 (define_attr "enabled" "no,yes"
238 (if_then_else (eq_attr "insn_enabled" "yes")
239 (if_then_else (eq_attr "arch_enabled" "yes")
242 (const_string "no")))
244 ; POOL_RANGE is how far away from a constant pool entry that this insn
245 ; can be placed. If the distance is zero, then this insn will never
246 ; reference the pool.
247 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
248 ; before its address.
249 (define_attr "arm_pool_range" "" (const_int 0))
250 (define_attr "thumb2_pool_range" "" (const_int 0))
251 (define_attr "arm_neg_pool_range" "" (const_int 0))
252 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
254 (define_attr "pool_range" ""
255 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
256 (attr "arm_pool_range")))
257 (define_attr "neg_pool_range" ""
258 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
259 (attr "arm_neg_pool_range")))
261 ; An assembler sequence may clobber the condition codes without us knowing.
262 ; If such an insn references the pool, then we have no way of knowing how,
263 ; so use the most conservative value for pool_range.
264 (define_asm_attributes
265 [(set_attr "conds" "clob")
266 (set_attr "length" "4")
267 (set_attr "pool_range" "250")])
269 ;; The instruction used to implement a particular pattern. This
270 ;; information is used by pipeline descriptions to provide accurate
271 ;; scheduling information.
274 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
275 (const_string "other"))
277 ; TYPE attribute is used to detect floating point instructions which, if
278 ; running on a co-processor can run in parallel with other, basic instructions
279 ; If write-buffer scheduling is enabled then it can also be used in the
280 ; scheduling of writes.
282 ; Classification of each insn
283 ; Note: vfp.md has different meanings for some of these, and some further
284 ; types as well. See that file for details.
285 ; alu any alu instruction that doesn't hit memory or fp
286 ; regs or have a shifted source operand
287 ; alu_shift any data instruction that doesn't hit memory or fp
288 ; regs, but has a source operand shifted by a constant
289 ; alu_shift_reg any data instruction that doesn't hit memory or fp
290 ; regs, but has a source operand shifted by a register value
291 ; mult a multiply instruction
292 ; block blockage insn, this blocks all functional units
293 ; float a floating point arithmetic operation (subject to expansion)
294 ; fdivd DFmode floating point division
295 ; fdivs SFmode floating point division
296 ; fmul Floating point multiply
297 ; ffmul Fast floating point multiply
298 ; farith Floating point arithmetic (4 cycle)
299 ; ffarith Fast floating point arithmetic (2 cycle)
300 ; float_em a floating point arithmetic operation that is normally emulated
301 ; even on a machine with an fpa.
302 ; f_load a floating point load from memory
303 ; f_store a floating point store to memory
304 ; f_load[sd] single/double load from memory
305 ; f_store[sd] single/double store to memory
306 ; f_flag a transfer of co-processor flags to the CPSR
307 ; f_mem_r a transfer of a floating point register to a real reg via mem
308 ; r_mem_f the reverse of f_mem_r
309 ; f_2_r fast transfer float to arm (no memory needed)
310 ; r_2_f fast transfer arm to float
311 ; f_cvt convert floating<->integral
313 ; call a subroutine call
314 ; load_byte load byte(s) from memory to arm registers
315 ; load1 load 1 word from memory to arm registers
316 ; load2 load 2 words from memory to arm registers
317 ; load3 load 3 words from memory to arm registers
318 ; load4 load 4 words from memory to arm registers
319 ; store store 1 word to memory from arm registers
320 ; store2 store 2 words
321 ; store3 store 3 words
322 ; store4 store 4 (or more) words
323 ; Additions for Cirrus Maverick co-processor:
324 ; mav_farith Floating point arithmetic (4 cycle)
325 ; mav_dmult Double multiplies (7 cycle)
329 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
331 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
332 (const_string "mult")
333 (const_string "alu")))
335 ; Load scheduling, set from the arm_ld_sched variable
336 ; initialized by arm_option_override()
337 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
339 ;; Classification of NEON instructions for scheduling purposes.
340 ;; Do not set this attribute and the "type" attribute together in
341 ;; any one instruction pattern.
342 (define_attr "neon_type"
353 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
354 neon_mul_qqq_8_16_32_ddd_32,\
355 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
356 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
358 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
359 neon_mla_qqq_32_qqd_32_scalar,\
360 neon_mul_ddd_16_scalar_32_16_long_scalar,\
361 neon_mul_qqd_32_scalar,\
362 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
367 neon_vqshl_vrshl_vqrshl_qqq,\
369 neon_fp_vadd_ddd_vabs_dd,\
370 neon_fp_vadd_qqq_vabs_qq,\
376 neon_fp_vmla_ddd_scalar,\
377 neon_fp_vmla_qqq_scalar,\
378 neon_fp_vrecps_vrsqrts_ddd,\
379 neon_fp_vrecps_vrsqrts_qqq,\
387 neon_vld2_2_regs_vld1_vld2_all_lanes,\
390 neon_vst1_1_2_regs_vst2_2_regs,\
392 neon_vst2_4_regs_vst3_vst4,\
394 neon_vld1_vld2_lane,\
395 neon_vld3_vld4_lane,\
396 neon_vst1_vst2_lane,\
397 neon_vst3_vst4_lane,\
398 neon_vld3_vld4_all_lanes,\
406 (const_string "none"))
408 ; condition codes: this one is used by final_prescan_insn to speed up
409 ; conditionalizing instructions. It saves having to scan the rtl to see if
410 ; it uses or alters the condition codes.
412 ; USE means that the condition codes are used by the insn in the process of
413 ; outputting code, this means (at present) that we can't use the insn in
416 ; SET means that the purpose of the insn is to set the condition codes in a
417 ; well defined manner.
419 ; CLOB means that the condition codes are altered in an undefined manner, if
420 ; they are altered at all
422 ; UNCONDITIONAL means the instruction can not be conditionally executed and
423 ; that the instruction does not use or alter the condition codes.
425 ; NOCOND means that the instruction does not use or alter the condition
426 ; codes but can be converted into a conditionally exectuted instruction.
428 (define_attr "conds" "use,set,clob,unconditional,nocond"
430 (ior (eq_attr "is_thumb1" "yes")
431 (eq_attr "type" "call"))
432 (const_string "clob")
433 (if_then_else (eq_attr "neon_type" "none")
434 (const_string "nocond")
435 (const_string "unconditional"))))
437 ; Predicable means that the insn can be conditionally executed based on
438 ; an automatically added predicate (additional patterns are generated by
439 ; gen...). We default to 'no' because no Thumb patterns match this rule
440 ; and not all ARM patterns do.
441 (define_attr "predicable" "no,yes" (const_string "no"))
443 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
444 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
445 ; suffer blockages enough to warrant modelling this (and it can adversely
446 ; affect the schedule).
447 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
449 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
450 ; to stall the processor. Used with model_wbuf above.
451 (define_attr "write_conflict" "no,yes"
452 (if_then_else (eq_attr "type"
453 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
455 (const_string "no")))
457 ; Classify the insns into those that take one cycle and those that take more
458 ; than one on the main cpu execution unit.
459 (define_attr "core_cycles" "single,multi"
460 (if_then_else (eq_attr "type"
461 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
462 (const_string "single")
463 (const_string "multi")))
465 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
466 ;; distant label. Only applicable to Thumb code.
467 (define_attr "far_jump" "yes,no" (const_string "no"))
470 ;; The number of machine instructions this pattern expands to.
471 ;; Used for Thumb-2 conditional execution.
472 (define_attr "ce_count" "" (const_int 1))
474 ;;---------------------------------------------------------------------------
477 (include "iterators.md")
479 ;;---------------------------------------------------------------------------
482 (include "predicates.md")
483 (include "constraints.md")
485 ;;---------------------------------------------------------------------------
486 ;; Pipeline descriptions
488 ;; Processor type. This is created automatically from arm-cores.def.
489 (include "arm-tune.md")
491 (define_attr "tune_cortexr4" "yes,no"
493 (eq_attr "tune" "cortexr4,cortexr4f")
495 (const_string "no"))))
497 ;; True if the generic scheduling description should be used.
499 (define_attr "generic_sched" "yes,no"
501 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
502 (eq_attr "tune_cortexr4" "yes"))
504 (const_string "yes"))))
506 (define_attr "generic_vfp" "yes,no"
508 (and (eq_attr "fpu" "vfp")
509 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
510 (eq_attr "tune_cortexr4" "no"))
512 (const_string "no"))))
514 (include "arm-generic.md")
515 (include "arm926ejs.md")
516 (include "arm1020e.md")
517 (include "arm1026ejs.md")
518 (include "arm1136jfs.md")
519 (include "cortex-a5.md")
520 (include "cortex-a8.md")
521 (include "cortex-a9.md")
522 (include "cortex-r4.md")
523 (include "cortex-r4f.md")
524 (include "cortex-m4.md")
525 (include "cortex-m4-fpu.md")
529 ;;---------------------------------------------------------------------------
534 ;; Note: For DImode insns, there is normally no reason why operands should
535 ;; not be in the same register, what we don't want is for something being
536 ;; written to partially overlap something that is an input.
537 ;; Cirrus 64bit additions should not be split because we have a native
538 ;; 64bit addition instructions.
540 (define_expand "adddi3"
542 [(set (match_operand:DI 0 "s_register_operand" "")
543 (plus:DI (match_operand:DI 1 "s_register_operand" "")
544 (match_operand:DI 2 "s_register_operand" "")))
545 (clobber (reg:CC CC_REGNUM))])]
548 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
550 if (!cirrus_fp_register (operands[0], DImode))
551 operands[0] = force_reg (DImode, operands[0]);
552 if (!cirrus_fp_register (operands[1], DImode))
553 operands[1] = force_reg (DImode, operands[1]);
554 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
560 if (GET_CODE (operands[1]) != REG)
561 operands[1] = force_reg (DImode, operands[1]);
562 if (GET_CODE (operands[2]) != REG)
563 operands[2] = force_reg (DImode, operands[2]);
568 (define_insn "*thumb1_adddi3"
569 [(set (match_operand:DI 0 "register_operand" "=l")
570 (plus:DI (match_operand:DI 1 "register_operand" "%0")
571 (match_operand:DI 2 "register_operand" "l")))
572 (clobber (reg:CC CC_REGNUM))
575 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
576 [(set_attr "length" "4")]
579 (define_insn_and_split "*arm_adddi3"
580 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
581 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
582 (match_operand:DI 2 "s_register_operand" "r, 0")))
583 (clobber (reg:CC CC_REGNUM))]
584 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
586 "TARGET_32BIT && reload_completed
587 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
588 [(parallel [(set (reg:CC_C CC_REGNUM)
589 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
591 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
592 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
593 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
596 operands[3] = gen_highpart (SImode, operands[0]);
597 operands[0] = gen_lowpart (SImode, operands[0]);
598 operands[4] = gen_highpart (SImode, operands[1]);
599 operands[1] = gen_lowpart (SImode, operands[1]);
600 operands[5] = gen_highpart (SImode, operands[2]);
601 operands[2] = gen_lowpart (SImode, operands[2]);
603 [(set_attr "conds" "clob")
604 (set_attr "length" "8")]
607 (define_insn_and_split "*adddi_sesidi_di"
608 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
609 (plus:DI (sign_extend:DI
610 (match_operand:SI 2 "s_register_operand" "r,r"))
611 (match_operand:DI 1 "s_register_operand" "0,r")))
612 (clobber (reg:CC CC_REGNUM))]
613 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
615 "TARGET_32BIT && reload_completed"
616 [(parallel [(set (reg:CC_C CC_REGNUM)
617 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
619 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
620 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
623 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
626 operands[3] = gen_highpart (SImode, operands[0]);
627 operands[0] = gen_lowpart (SImode, operands[0]);
628 operands[4] = gen_highpart (SImode, operands[1]);
629 operands[1] = gen_lowpart (SImode, operands[1]);
630 operands[2] = gen_lowpart (SImode, operands[2]);
632 [(set_attr "conds" "clob")
633 (set_attr "length" "8")]
636 (define_insn_and_split "*adddi_zesidi_di"
637 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
638 (plus:DI (zero_extend:DI
639 (match_operand:SI 2 "s_register_operand" "r,r"))
640 (match_operand:DI 1 "s_register_operand" "0,r")))
641 (clobber (reg:CC CC_REGNUM))]
642 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
644 "TARGET_32BIT && reload_completed"
645 [(parallel [(set (reg:CC_C CC_REGNUM)
646 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
648 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
649 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
650 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
653 operands[3] = gen_highpart (SImode, operands[0]);
654 operands[0] = gen_lowpart (SImode, operands[0]);
655 operands[4] = gen_highpart (SImode, operands[1]);
656 operands[1] = gen_lowpart (SImode, operands[1]);
657 operands[2] = gen_lowpart (SImode, operands[2]);
659 [(set_attr "conds" "clob")
660 (set_attr "length" "8")]
663 (define_expand "addsi3"
664 [(set (match_operand:SI 0 "s_register_operand" "")
665 (plus:SI (match_operand:SI 1 "s_register_operand" "")
666 (match_operand:SI 2 "reg_or_int_operand" "")))]
669 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
671 arm_split_constant (PLUS, SImode, NULL_RTX,
672 INTVAL (operands[2]), operands[0], operands[1],
673 optimize && can_create_pseudo_p ());
679 ; If there is a scratch available, this will be faster than synthesizing the
682 [(match_scratch:SI 3 "r")
683 (set (match_operand:SI 0 "arm_general_register_operand" "")
684 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
685 (match_operand:SI 2 "const_int_operand" "")))]
687 !(const_ok_for_arm (INTVAL (operands[2]))
688 || const_ok_for_arm (-INTVAL (operands[2])))
689 && const_ok_for_arm (~INTVAL (operands[2]))"
690 [(set (match_dup 3) (match_dup 2))
691 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
695 ;; The r/r/k alternative is required when reloading the address
696 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
697 ;; put the duplicated register first, and not try the commutative version.
698 (define_insn_and_split "*arm_addsi3"
699 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
700 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
701 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
711 && GET_CODE (operands[2]) == CONST_INT
712 && !(const_ok_for_arm (INTVAL (operands[2]))
713 || const_ok_for_arm (-INTVAL (operands[2])))
714 && (reload_completed || !arm_eliminable_register (operands[1]))"
715 [(clobber (const_int 0))]
717 arm_split_constant (PLUS, SImode, curr_insn,
718 INTVAL (operands[2]), operands[0],
722 [(set_attr "length" "4,4,4,4,4,16")
723 (set_attr "predicable" "yes")]
726 (define_insn_and_split "*thumb1_addsi3"
727 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
728 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
729 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
732 static const char * const asms[] =
734 \"add\\t%0, %0, %2\",
735 \"sub\\t%0, %0, #%n2\",
736 \"add\\t%0, %1, %2\",
737 \"add\\t%0, %0, %2\",
738 \"add\\t%0, %0, %2\",
739 \"add\\t%0, %1, %2\",
740 \"add\\t%0, %1, %2\",
745 if ((which_alternative == 2 || which_alternative == 6)
746 && GET_CODE (operands[2]) == CONST_INT
747 && INTVAL (operands[2]) < 0)
748 return \"sub\\t%0, %1, #%n2\";
749 return asms[which_alternative];
751 "&& reload_completed && CONST_INT_P (operands[2])
752 && ((operands[1] != stack_pointer_rtx
753 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
754 || (operands[1] == stack_pointer_rtx
755 && INTVAL (operands[2]) > 1020))"
756 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
757 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
759 HOST_WIDE_INT offset = INTVAL (operands[2]);
760 if (operands[1] == stack_pointer_rtx)
766 else if (offset < -255)
769 operands[3] = GEN_INT (offset);
770 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
772 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
775 ;; Reloading and elimination of the frame pointer can
776 ;; sometimes cause this optimization to be missed.
778 [(set (match_operand:SI 0 "arm_general_register_operand" "")
779 (match_operand:SI 1 "const_int_operand" ""))
781 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
783 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
784 && (INTVAL (operands[1]) & 3) == 0"
785 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
789 (define_insn "*addsi3_compare0"
790 [(set (reg:CC_NOOV CC_REGNUM)
792 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
793 (match_operand:SI 2 "arm_add_operand" "rI,L"))
795 (set (match_operand:SI 0 "s_register_operand" "=r,r")
796 (plus:SI (match_dup 1) (match_dup 2)))]
800 sub%.\\t%0, %1, #%n2"
801 [(set_attr "conds" "set")]
804 (define_insn "*addsi3_compare0_scratch"
805 [(set (reg:CC_NOOV CC_REGNUM)
807 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
808 (match_operand:SI 1 "arm_add_operand" "rI,L"))
814 [(set_attr "conds" "set")]
817 (define_insn "*compare_negsi_si"
818 [(set (reg:CC_Z CC_REGNUM)
820 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
821 (match_operand:SI 1 "s_register_operand" "r")))]
824 [(set_attr "conds" "set")]
827 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
828 ;; addend is a constant.
829 (define_insn "*cmpsi2_addneg"
830 [(set (reg:CC CC_REGNUM)
832 (match_operand:SI 1 "s_register_operand" "r,r")
833 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
834 (set (match_operand:SI 0 "s_register_operand" "=r,r")
835 (plus:SI (match_dup 1)
836 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
837 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
840 sub%.\\t%0, %1, #%n3"
841 [(set_attr "conds" "set")]
844 ;; Convert the sequence
846 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
850 ;; bcs dest ((unsigned)rn >= 1)
851 ;; similarly for the beq variant using bcc.
852 ;; This is a common looping idiom (while (n--))
854 [(set (match_operand:SI 0 "arm_general_register_operand" "")
855 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
857 (set (match_operand 2 "cc_register" "")
858 (compare (match_dup 0) (const_int -1)))
860 (if_then_else (match_operator 3 "equality_operator"
861 [(match_dup 2) (const_int 0)])
862 (match_operand 4 "" "")
863 (match_operand 5 "" "")))]
864 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
868 (match_dup 1) (const_int 1)))
869 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
871 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
874 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
875 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
878 operands[2], const0_rtx);"
881 ;; The next four insns work because they compare the result with one of
882 ;; the operands, and we know that the use of the condition code is
883 ;; either GEU or LTU, so we can use the carry flag from the addition
884 ;; instead of doing the compare a second time.
885 (define_insn "*addsi3_compare_op1"
886 [(set (reg:CC_C CC_REGNUM)
888 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
889 (match_operand:SI 2 "arm_add_operand" "rI,L"))
891 (set (match_operand:SI 0 "s_register_operand" "=r,r")
892 (plus:SI (match_dup 1) (match_dup 2)))]
896 sub%.\\t%0, %1, #%n2"
897 [(set_attr "conds" "set")]
900 (define_insn "*addsi3_compare_op2"
901 [(set (reg:CC_C CC_REGNUM)
903 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
904 (match_operand:SI 2 "arm_add_operand" "rI,L"))
906 (set (match_operand:SI 0 "s_register_operand" "=r,r")
907 (plus:SI (match_dup 1) (match_dup 2)))]
911 sub%.\\t%0, %1, #%n2"
912 [(set_attr "conds" "set")]
915 (define_insn "*compare_addsi2_op0"
916 [(set (reg:CC_C CC_REGNUM)
918 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
919 (match_operand:SI 1 "arm_add_operand" "rI,L"))
925 [(set_attr "conds" "set")]
928 (define_insn "*compare_addsi2_op1"
929 [(set (reg:CC_C CC_REGNUM)
931 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
932 (match_operand:SI 1 "arm_add_operand" "rI,L"))
938 [(set_attr "conds" "set")]
941 (define_insn "*addsi3_carryin_<optab>"
942 [(set (match_operand:SI 0 "s_register_operand" "=r")
943 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
944 (match_operand:SI 2 "arm_rhs_operand" "rI"))
945 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
948 [(set_attr "conds" "use")]
951 (define_insn "*addsi3_carryin_alt2_<optab>"
952 [(set (match_operand:SI 0 "s_register_operand" "=r")
953 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
954 (match_operand:SI 1 "s_register_operand" "%r"))
955 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
958 [(set_attr "conds" "use")]
961 (define_insn "*addsi3_carryin_shift_<optab>"
962 [(set (match_operand:SI 0 "s_register_operand" "=r")
964 (match_operator:SI 2 "shift_operator"
965 [(match_operand:SI 3 "s_register_operand" "r")
966 (match_operand:SI 4 "reg_or_int_operand" "rM")])
967 (match_operand:SI 1 "s_register_operand" "r"))
968 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
970 "adc%?\\t%0, %1, %3%S2"
971 [(set_attr "conds" "use")
972 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
973 (const_string "alu_shift")
974 (const_string "alu_shift_reg")))]
977 (define_expand "incscc"
978 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
979 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
980 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
981 (match_operand:SI 1 "s_register_operand" "0,?r")))]
986 (define_insn "*arm_incscc"
987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
988 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
989 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
990 (match_operand:SI 1 "s_register_operand" "0,?r")))]
994 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
995 [(set_attr "conds" "use")
996 (set_attr "length" "4,8")]
999 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1001 [(set (match_operand:SI 0 "s_register_operand" "")
1002 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1003 (match_operand:SI 2 "s_register_operand" ""))
1005 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1007 [(set (match_dup 3) (match_dup 1))
1008 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1010 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1013 (define_expand "addsf3"
1014 [(set (match_operand:SF 0 "s_register_operand" "")
1015 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1016 (match_operand:SF 2 "arm_float_add_operand" "")))]
1017 "TARGET_32BIT && TARGET_HARD_FLOAT"
1020 && !cirrus_fp_register (operands[2], SFmode))
1021 operands[2] = force_reg (SFmode, operands[2]);
1024 (define_expand "adddf3"
1025 [(set (match_operand:DF 0 "s_register_operand" "")
1026 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1027 (match_operand:DF 2 "arm_float_add_operand" "")))]
1028 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1031 && !cirrus_fp_register (operands[2], DFmode))
1032 operands[2] = force_reg (DFmode, operands[2]);
1035 (define_expand "subdi3"
1037 [(set (match_operand:DI 0 "s_register_operand" "")
1038 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1039 (match_operand:DI 2 "s_register_operand" "")))
1040 (clobber (reg:CC CC_REGNUM))])]
1043 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1045 && cirrus_fp_register (operands[0], DImode)
1046 && cirrus_fp_register (operands[1], DImode))
1048 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1054 if (GET_CODE (operands[1]) != REG)
1055 operands[1] = force_reg (DImode, operands[1]);
1056 if (GET_CODE (operands[2]) != REG)
1057 operands[2] = force_reg (DImode, operands[2]);
1062 (define_insn "*arm_subdi3"
1063 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1064 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1065 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1066 (clobber (reg:CC CC_REGNUM))]
1067 "TARGET_32BIT && !TARGET_NEON"
1068 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1069 [(set_attr "conds" "clob")
1070 (set_attr "length" "8")]
1073 (define_insn "*thumb_subdi3"
1074 [(set (match_operand:DI 0 "register_operand" "=l")
1075 (minus:DI (match_operand:DI 1 "register_operand" "0")
1076 (match_operand:DI 2 "register_operand" "l")))
1077 (clobber (reg:CC CC_REGNUM))]
1079 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1080 [(set_attr "length" "4")]
1083 (define_insn "*subdi_di_zesidi"
1084 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1085 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1087 (match_operand:SI 2 "s_register_operand" "r,r"))))
1088 (clobber (reg:CC CC_REGNUM))]
1090 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1091 [(set_attr "conds" "clob")
1092 (set_attr "length" "8")]
1095 (define_insn "*subdi_di_sesidi"
1096 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1097 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1099 (match_operand:SI 2 "s_register_operand" "r,r"))))
1100 (clobber (reg:CC CC_REGNUM))]
1102 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1103 [(set_attr "conds" "clob")
1104 (set_attr "length" "8")]
1107 (define_insn "*subdi_zesidi_di"
1108 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1109 (minus:DI (zero_extend:DI
1110 (match_operand:SI 2 "s_register_operand" "r,r"))
1111 (match_operand:DI 1 "s_register_operand" "0,r")))
1112 (clobber (reg:CC CC_REGNUM))]
1114 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1115 [(set_attr "conds" "clob")
1116 (set_attr "length" "8")]
1119 (define_insn "*subdi_sesidi_di"
1120 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1121 (minus:DI (sign_extend:DI
1122 (match_operand:SI 2 "s_register_operand" "r,r"))
1123 (match_operand:DI 1 "s_register_operand" "0,r")))
1124 (clobber (reg:CC CC_REGNUM))]
1126 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1127 [(set_attr "conds" "clob")
1128 (set_attr "length" "8")]
1131 (define_insn "*subdi_zesidi_zesidi"
1132 [(set (match_operand:DI 0 "s_register_operand" "=r")
1133 (minus:DI (zero_extend:DI
1134 (match_operand:SI 1 "s_register_operand" "r"))
1136 (match_operand:SI 2 "s_register_operand" "r"))))
1137 (clobber (reg:CC CC_REGNUM))]
1139 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1140 [(set_attr "conds" "clob")
1141 (set_attr "length" "8")]
1144 (define_expand "subsi3"
1145 [(set (match_operand:SI 0 "s_register_operand" "")
1146 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1147 (match_operand:SI 2 "s_register_operand" "")))]
1150 if (GET_CODE (operands[1]) == CONST_INT)
1154 arm_split_constant (MINUS, SImode, NULL_RTX,
1155 INTVAL (operands[1]), operands[0],
1156 operands[2], optimize && can_create_pseudo_p ());
1159 else /* TARGET_THUMB1 */
1160 operands[1] = force_reg (SImode, operands[1]);
1165 (define_insn "thumb1_subsi3_insn"
1166 [(set (match_operand:SI 0 "register_operand" "=l")
1167 (minus:SI (match_operand:SI 1 "register_operand" "l")
1168 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1171 [(set_attr "length" "2")
1172 (set_attr "conds" "set")])
1174 ; ??? Check Thumb-2 split length
1175 (define_insn_and_split "*arm_subsi3_insn"
1176 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1177 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1178 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1186 "&& ((GET_CODE (operands[1]) == CONST_INT
1187 && !const_ok_for_arm (INTVAL (operands[1])))
1188 || (GET_CODE (operands[2]) == CONST_INT
1189 && !const_ok_for_arm (INTVAL (operands[2]))))"
1190 [(clobber (const_int 0))]
1192 arm_split_constant (MINUS, SImode, curr_insn,
1193 INTVAL (operands[1]), operands[0], operands[2], 0);
1196 [(set_attr "length" "4,4,4,16,16")
1197 (set_attr "predicable" "yes")]
1201 [(match_scratch:SI 3 "r")
1202 (set (match_operand:SI 0 "arm_general_register_operand" "")
1203 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1204 (match_operand:SI 2 "arm_general_register_operand" "")))]
1206 && !const_ok_for_arm (INTVAL (operands[1]))
1207 && const_ok_for_arm (~INTVAL (operands[1]))"
1208 [(set (match_dup 3) (match_dup 1))
1209 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1213 (define_insn "*subsi3_compare0"
1214 [(set (reg:CC_NOOV CC_REGNUM)
1216 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1217 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1219 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1220 (minus:SI (match_dup 1) (match_dup 2)))]
1225 [(set_attr "conds" "set")]
1228 (define_insn "*subsi3_compare"
1229 [(set (reg:CC CC_REGNUM)
1230 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1231 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1232 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1233 (minus:SI (match_dup 1) (match_dup 2)))]
1238 [(set_attr "conds" "set")]
1241 (define_expand "decscc"
1242 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1243 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1244 (match_operator:SI 2 "arm_comparison_operator"
1245 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1250 (define_insn "*arm_decscc"
1251 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1252 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1253 (match_operator:SI 2 "arm_comparison_operator"
1254 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1258 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1259 [(set_attr "conds" "use")
1260 (set_attr "length" "*,8")]
1263 (define_expand "subsf3"
1264 [(set (match_operand:SF 0 "s_register_operand" "")
1265 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1266 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1267 "TARGET_32BIT && TARGET_HARD_FLOAT"
1269 if (TARGET_MAVERICK)
1271 if (!cirrus_fp_register (operands[1], SFmode))
1272 operands[1] = force_reg (SFmode, operands[1]);
1273 if (!cirrus_fp_register (operands[2], SFmode))
1274 operands[2] = force_reg (SFmode, operands[2]);
1278 (define_expand "subdf3"
1279 [(set (match_operand:DF 0 "s_register_operand" "")
1280 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1281 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1282 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1284 if (TARGET_MAVERICK)
1286 if (!cirrus_fp_register (operands[1], DFmode))
1287 operands[1] = force_reg (DFmode, operands[1]);
1288 if (!cirrus_fp_register (operands[2], DFmode))
1289 operands[2] = force_reg (DFmode, operands[2]);
1294 ;; Multiplication insns
1296 (define_expand "mulsi3"
1297 [(set (match_operand:SI 0 "s_register_operand" "")
1298 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1299 (match_operand:SI 1 "s_register_operand" "")))]
1304 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1305 (define_insn "*arm_mulsi3"
1306 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1307 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1308 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1309 "TARGET_32BIT && !arm_arch6"
1310 "mul%?\\t%0, %2, %1"
1311 [(set_attr "insn" "mul")
1312 (set_attr "predicable" "yes")]
1315 (define_insn "*arm_mulsi3_v6"
1316 [(set (match_operand:SI 0 "s_register_operand" "=r")
1317 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1318 (match_operand:SI 2 "s_register_operand" "r")))]
1319 "TARGET_32BIT && arm_arch6"
1320 "mul%?\\t%0, %1, %2"
1321 [(set_attr "insn" "mul")
1322 (set_attr "predicable" "yes")]
1325 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1326 ; 1 and 2; are the same, because reload will make operand 0 match
1327 ; operand 1 without realizing that this conflicts with operand 2. We fix
1328 ; this by adding another alternative to match this case, and then `reload'
1329 ; it ourselves. This alternative must come first.
1330 (define_insn "*thumb_mulsi3"
1331 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1332 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1333 (match_operand:SI 2 "register_operand" "l,l,l")))]
1334 "TARGET_THUMB1 && !arm_arch6"
1336 if (which_alternative < 2)
1337 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1339 return \"mul\\t%0, %2\";
1341 [(set_attr "length" "4,4,2")
1342 (set_attr "insn" "mul")]
1345 (define_insn "*thumb_mulsi3_v6"
1346 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1347 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1348 (match_operand:SI 2 "register_operand" "l,0,0")))]
1349 "TARGET_THUMB1 && arm_arch6"
1354 [(set_attr "length" "2")
1355 (set_attr "insn" "mul")]
1358 (define_insn "*mulsi3_compare0"
1359 [(set (reg:CC_NOOV CC_REGNUM)
1360 (compare:CC_NOOV (mult:SI
1361 (match_operand:SI 2 "s_register_operand" "r,r")
1362 (match_operand:SI 1 "s_register_operand" "%0,r"))
1364 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1365 (mult:SI (match_dup 2) (match_dup 1)))]
1366 "TARGET_ARM && !arm_arch6"
1367 "mul%.\\t%0, %2, %1"
1368 [(set_attr "conds" "set")
1369 (set_attr "insn" "muls")]
1372 (define_insn "*mulsi3_compare0_v6"
1373 [(set (reg:CC_NOOV CC_REGNUM)
1374 (compare:CC_NOOV (mult:SI
1375 (match_operand:SI 2 "s_register_operand" "r")
1376 (match_operand:SI 1 "s_register_operand" "r"))
1378 (set (match_operand:SI 0 "s_register_operand" "=r")
1379 (mult:SI (match_dup 2) (match_dup 1)))]
1380 "TARGET_ARM && arm_arch6 && optimize_size"
1381 "mul%.\\t%0, %2, %1"
1382 [(set_attr "conds" "set")
1383 (set_attr "insn" "muls")]
1386 (define_insn "*mulsi_compare0_scratch"
1387 [(set (reg:CC_NOOV CC_REGNUM)
1388 (compare:CC_NOOV (mult:SI
1389 (match_operand:SI 2 "s_register_operand" "r,r")
1390 (match_operand:SI 1 "s_register_operand" "%0,r"))
1392 (clobber (match_scratch:SI 0 "=&r,&r"))]
1393 "TARGET_ARM && !arm_arch6"
1394 "mul%.\\t%0, %2, %1"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "muls")]
1399 (define_insn "*mulsi_compare0_scratch_v6"
1400 [(set (reg:CC_NOOV CC_REGNUM)
1401 (compare:CC_NOOV (mult:SI
1402 (match_operand:SI 2 "s_register_operand" "r")
1403 (match_operand:SI 1 "s_register_operand" "r"))
1405 (clobber (match_scratch:SI 0 "=r"))]
1406 "TARGET_ARM && arm_arch6 && optimize_size"
1407 "mul%.\\t%0, %2, %1"
1408 [(set_attr "conds" "set")
1409 (set_attr "insn" "muls")]
1412 ;; Unnamed templates to match MLA instruction.
1414 (define_insn "*mulsi3addsi"
1415 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1417 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1418 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1419 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1420 "TARGET_32BIT && !arm_arch6"
1421 "mla%?\\t%0, %2, %1, %3"
1422 [(set_attr "insn" "mla")
1423 (set_attr "predicable" "yes")]
1426 (define_insn "*mulsi3addsi_v6"
1427 [(set (match_operand:SI 0 "s_register_operand" "=r")
1429 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1430 (match_operand:SI 1 "s_register_operand" "r"))
1431 (match_operand:SI 3 "s_register_operand" "r")))]
1432 "TARGET_32BIT && arm_arch6"
1433 "mla%?\\t%0, %2, %1, %3"
1434 [(set_attr "insn" "mla")
1435 (set_attr "predicable" "yes")]
1438 (define_insn "*mulsi3addsi_compare0"
1439 [(set (reg:CC_NOOV CC_REGNUM)
1442 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1443 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1444 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1446 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1447 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1449 "TARGET_ARM && arm_arch6"
1450 "mla%.\\t%0, %2, %1, %3"
1451 [(set_attr "conds" "set")
1452 (set_attr "insn" "mlas")]
1455 (define_insn "*mulsi3addsi_compare0_v6"
1456 [(set (reg:CC_NOOV CC_REGNUM)
1459 (match_operand:SI 2 "s_register_operand" "r")
1460 (match_operand:SI 1 "s_register_operand" "r"))
1461 (match_operand:SI 3 "s_register_operand" "r"))
1463 (set (match_operand:SI 0 "s_register_operand" "=r")
1464 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1466 "TARGET_ARM && arm_arch6 && optimize_size"
1467 "mla%.\\t%0, %2, %1, %3"
1468 [(set_attr "conds" "set")
1469 (set_attr "insn" "mlas")]
1472 (define_insn "*mulsi3addsi_compare0_scratch"
1473 [(set (reg:CC_NOOV CC_REGNUM)
1476 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1477 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1478 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1480 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1481 "TARGET_ARM && !arm_arch6"
1482 "mla%.\\t%0, %2, %1, %3"
1483 [(set_attr "conds" "set")
1484 (set_attr "insn" "mlas")]
1487 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1488 [(set (reg:CC_NOOV CC_REGNUM)
1491 (match_operand:SI 2 "s_register_operand" "r")
1492 (match_operand:SI 1 "s_register_operand" "r"))
1493 (match_operand:SI 3 "s_register_operand" "r"))
1495 (clobber (match_scratch:SI 0 "=r"))]
1496 "TARGET_ARM && arm_arch6 && optimize_size"
1497 "mla%.\\t%0, %2, %1, %3"
1498 [(set_attr "conds" "set")
1499 (set_attr "insn" "mlas")]
1502 (define_insn "*mulsi3subsi"
1503 [(set (match_operand:SI 0 "s_register_operand" "=r")
1505 (match_operand:SI 3 "s_register_operand" "r")
1506 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1507 (match_operand:SI 1 "s_register_operand" "r"))))]
1508 "TARGET_32BIT && arm_arch_thumb2"
1509 "mls%?\\t%0, %2, %1, %3"
1510 [(set_attr "insn" "mla")
1511 (set_attr "predicable" "yes")]
1514 (define_expand "maddsidi4"
1515 [(set (match_operand:DI 0 "s_register_operand" "")
1518 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1519 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1520 (match_operand:DI 3 "s_register_operand" "")))]
1521 "TARGET_32BIT && arm_arch3m"
1524 (define_insn "*mulsidi3adddi"
1525 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1528 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1529 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1530 (match_operand:DI 1 "s_register_operand" "0")))]
1531 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1532 "smlal%?\\t%Q0, %R0, %3, %2"
1533 [(set_attr "insn" "smlal")
1534 (set_attr "predicable" "yes")]
1537 (define_insn "*mulsidi3adddi_v6"
1538 [(set (match_operand:DI 0 "s_register_operand" "=r")
1541 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1542 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1543 (match_operand:DI 1 "s_register_operand" "0")))]
1544 "TARGET_32BIT && arm_arch6"
1545 "smlal%?\\t%Q0, %R0, %3, %2"
1546 [(set_attr "insn" "smlal")
1547 (set_attr "predicable" "yes")]
1550 ;; 32x32->64 widening multiply.
1551 ;; As with mulsi3, the only difference between the v3-5 and v6+
1552 ;; versions of these patterns is the requirement that the output not
1553 ;; overlap the inputs, but that still means we have to have a named
1554 ;; expander and two different starred insns.
1556 (define_expand "mulsidi3"
1557 [(set (match_operand:DI 0 "s_register_operand" "")
1559 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1560 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1561 "TARGET_32BIT && arm_arch3m"
1565 (define_insn "*mulsidi3_nov6"
1566 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1568 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1569 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1570 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1571 "smull%?\\t%Q0, %R0, %1, %2"
1572 [(set_attr "insn" "smull")
1573 (set_attr "predicable" "yes")]
1576 (define_insn "*mulsidi3_v6"
1577 [(set (match_operand:DI 0 "s_register_operand" "=r")
1579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1581 "TARGET_32BIT && arm_arch6"
1582 "smull%?\\t%Q0, %R0, %1, %2"
1583 [(set_attr "insn" "smull")
1584 (set_attr "predicable" "yes")]
1587 (define_expand "umulsidi3"
1588 [(set (match_operand:DI 0 "s_register_operand" "")
1590 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1591 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1592 "TARGET_32BIT && arm_arch3m"
1596 (define_insn "*umulsidi3_nov6"
1597 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1599 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1600 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1601 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1602 "umull%?\\t%Q0, %R0, %1, %2"
1603 [(set_attr "insn" "umull")
1604 (set_attr "predicable" "yes")]
1607 (define_insn "*umulsidi3_v6"
1608 [(set (match_operand:DI 0 "s_register_operand" "=r")
1610 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1611 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1612 "TARGET_32BIT && arm_arch6"
1613 "umull%?\\t%Q0, %R0, %1, %2"
1614 [(set_attr "insn" "umull")
1615 (set_attr "predicable" "yes")]
1618 (define_expand "umaddsidi4"
1619 [(set (match_operand:DI 0 "s_register_operand" "")
1622 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1623 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1624 (match_operand:DI 3 "s_register_operand" "")))]
1625 "TARGET_32BIT && arm_arch3m"
1628 (define_insn "*umulsidi3adddi"
1629 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1632 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1633 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1634 (match_operand:DI 1 "s_register_operand" "0")))]
1635 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1636 "umlal%?\\t%Q0, %R0, %3, %2"
1637 [(set_attr "insn" "umlal")
1638 (set_attr "predicable" "yes")]
1641 (define_insn "*umulsidi3adddi_v6"
1642 [(set (match_operand:DI 0 "s_register_operand" "=r")
1645 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1646 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1647 (match_operand:DI 1 "s_register_operand" "0")))]
1648 "TARGET_32BIT && arm_arch6"
1649 "umlal%?\\t%Q0, %R0, %3, %2"
1650 [(set_attr "insn" "umlal")
1651 (set_attr "predicable" "yes")]
1654 (define_expand "smulsi3_highpart"
1656 [(set (match_operand:SI 0 "s_register_operand" "")
1660 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1661 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1663 (clobber (match_scratch:SI 3 ""))])]
1664 "TARGET_32BIT && arm_arch3m"
1668 (define_insn "*smulsi3_highpart_nov6"
1669 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1673 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1674 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1676 (clobber (match_scratch:SI 3 "=&r,&r"))]
1677 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1678 "smull%?\\t%3, %0, %2, %1"
1679 [(set_attr "insn" "smull")
1680 (set_attr "predicable" "yes")]
1683 (define_insn "*smulsi3_highpart_v6"
1684 [(set (match_operand:SI 0 "s_register_operand" "=r")
1688 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1689 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1691 (clobber (match_scratch:SI 3 "=r"))]
1692 "TARGET_32BIT && arm_arch6"
1693 "smull%?\\t%3, %0, %2, %1"
1694 [(set_attr "insn" "smull")
1695 (set_attr "predicable" "yes")]
1698 (define_expand "umulsi3_highpart"
1700 [(set (match_operand:SI 0 "s_register_operand" "")
1704 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1705 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1707 (clobber (match_scratch:SI 3 ""))])]
1708 "TARGET_32BIT && arm_arch3m"
1712 (define_insn "*umulsi3_highpart_nov6"
1713 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1717 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1718 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1720 (clobber (match_scratch:SI 3 "=&r,&r"))]
1721 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1722 "umull%?\\t%3, %0, %2, %1"
1723 [(set_attr "insn" "umull")
1724 (set_attr "predicable" "yes")]
1727 (define_insn "*umulsi3_highpart_v6"
1728 [(set (match_operand:SI 0 "s_register_operand" "=r")
1732 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1733 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1735 (clobber (match_scratch:SI 3 "=r"))]
1736 "TARGET_32BIT && arm_arch6"
1737 "umull%?\\t%3, %0, %2, %1"
1738 [(set_attr "insn" "umull")
1739 (set_attr "predicable" "yes")]
1742 (define_insn "mulhisi3"
1743 [(set (match_operand:SI 0 "s_register_operand" "=r")
1744 (mult:SI (sign_extend:SI
1745 (match_operand:HI 1 "s_register_operand" "%r"))
1747 (match_operand:HI 2 "s_register_operand" "r"))))]
1748 "TARGET_DSP_MULTIPLY"
1749 "smulbb%?\\t%0, %1, %2"
1750 [(set_attr "insn" "smulxy")
1751 (set_attr "predicable" "yes")]
1754 (define_insn "*mulhisi3tb"
1755 [(set (match_operand:SI 0 "s_register_operand" "=r")
1756 (mult:SI (ashiftrt:SI
1757 (match_operand:SI 1 "s_register_operand" "r")
1760 (match_operand:HI 2 "s_register_operand" "r"))))]
1761 "TARGET_DSP_MULTIPLY"
1762 "smultb%?\\t%0, %1, %2"
1763 [(set_attr "insn" "smulxy")
1764 (set_attr "predicable" "yes")]
1767 (define_insn "*mulhisi3bt"
1768 [(set (match_operand:SI 0 "s_register_operand" "=r")
1769 (mult:SI (sign_extend:SI
1770 (match_operand:HI 1 "s_register_operand" "r"))
1772 (match_operand:SI 2 "s_register_operand" "r")
1774 "TARGET_DSP_MULTIPLY"
1775 "smulbt%?\\t%0, %1, %2"
1776 [(set_attr "insn" "smulxy")
1777 (set_attr "predicable" "yes")]
1780 (define_insn "*mulhisi3tt"
1781 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (mult:SI (ashiftrt:SI
1783 (match_operand:SI 1 "s_register_operand" "r")
1786 (match_operand:SI 2 "s_register_operand" "r")
1788 "TARGET_DSP_MULTIPLY"
1789 "smultt%?\\t%0, %1, %2"
1790 [(set_attr "insn" "smulxy")
1791 (set_attr "predicable" "yes")]
1794 (define_insn "maddhisi4"
1795 [(set (match_operand:SI 0 "s_register_operand" "=r")
1796 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1797 (mult:SI (sign_extend:SI
1798 (match_operand:HI 1 "s_register_operand" "%r"))
1800 (match_operand:HI 2 "s_register_operand" "r")))))]
1801 "TARGET_DSP_MULTIPLY"
1802 "smlabb%?\\t%0, %1, %2, %3"
1803 [(set_attr "insn" "smlaxy")
1804 (set_attr "predicable" "yes")]
1807 (define_insn "*maddhidi4"
1808 [(set (match_operand:DI 0 "s_register_operand" "=r")
1810 (match_operand:DI 3 "s_register_operand" "0")
1811 (mult:DI (sign_extend:DI
1812 (match_operand:HI 1 "s_register_operand" "%r"))
1814 (match_operand:HI 2 "s_register_operand" "r")))))]
1815 "TARGET_DSP_MULTIPLY"
1816 "smlalbb%?\\t%Q0, %R0, %1, %2"
1817 [(set_attr "insn" "smlalxy")
1818 (set_attr "predicable" "yes")])
1820 (define_expand "mulsf3"
1821 [(set (match_operand:SF 0 "s_register_operand" "")
1822 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1823 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1824 "TARGET_32BIT && TARGET_HARD_FLOAT"
1827 && !cirrus_fp_register (operands[2], SFmode))
1828 operands[2] = force_reg (SFmode, operands[2]);
1831 (define_expand "muldf3"
1832 [(set (match_operand:DF 0 "s_register_operand" "")
1833 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1834 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1835 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1838 && !cirrus_fp_register (operands[2], DFmode))
1839 operands[2] = force_reg (DFmode, operands[2]);
1844 (define_expand "divsf3"
1845 [(set (match_operand:SF 0 "s_register_operand" "")
1846 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1847 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1848 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1851 (define_expand "divdf3"
1852 [(set (match_operand:DF 0 "s_register_operand" "")
1853 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1854 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1855 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1860 (define_expand "modsf3"
1861 [(set (match_operand:SF 0 "s_register_operand" "")
1862 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1863 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1864 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1867 (define_expand "moddf3"
1868 [(set (match_operand:DF 0 "s_register_operand" "")
1869 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1870 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1871 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1874 ;; Boolean and,ior,xor insns
1876 ;; Split up double word logical operations
1878 ;; Split up simple DImode logical operations. Simply perform the logical
1879 ;; operation on the upper and lower halves of the registers.
1881 [(set (match_operand:DI 0 "s_register_operand" "")
1882 (match_operator:DI 6 "logical_binary_operator"
1883 [(match_operand:DI 1 "s_register_operand" "")
1884 (match_operand:DI 2 "s_register_operand" "")]))]
1885 "TARGET_32BIT && reload_completed
1886 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1887 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1888 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1889 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1892 operands[3] = gen_highpart (SImode, operands[0]);
1893 operands[0] = gen_lowpart (SImode, operands[0]);
1894 operands[4] = gen_highpart (SImode, operands[1]);
1895 operands[1] = gen_lowpart (SImode, operands[1]);
1896 operands[5] = gen_highpart (SImode, operands[2]);
1897 operands[2] = gen_lowpart (SImode, operands[2]);
1902 [(set (match_operand:DI 0 "s_register_operand" "")
1903 (match_operator:DI 6 "logical_binary_operator"
1904 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1905 (match_operand:DI 1 "s_register_operand" "")]))]
1906 "TARGET_32BIT && reload_completed"
1907 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1908 (set (match_dup 3) (match_op_dup:SI 6
1909 [(ashiftrt:SI (match_dup 2) (const_int 31))
1913 operands[3] = gen_highpart (SImode, operands[0]);
1914 operands[0] = gen_lowpart (SImode, operands[0]);
1915 operands[4] = gen_highpart (SImode, operands[1]);
1916 operands[1] = gen_lowpart (SImode, operands[1]);
1917 operands[5] = gen_highpart (SImode, operands[2]);
1918 operands[2] = gen_lowpart (SImode, operands[2]);
1922 ;; The zero extend of operand 2 means we can just copy the high part of
1923 ;; operand1 into operand0.
1925 [(set (match_operand:DI 0 "s_register_operand" "")
1927 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1928 (match_operand:DI 1 "s_register_operand" "")))]
1929 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1930 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1931 (set (match_dup 3) (match_dup 4))]
1934 operands[4] = gen_highpart (SImode, operands[1]);
1935 operands[3] = gen_highpart (SImode, operands[0]);
1936 operands[0] = gen_lowpart (SImode, operands[0]);
1937 operands[1] = gen_lowpart (SImode, operands[1]);
1941 ;; The zero extend of operand 2 means we can just copy the high part of
1942 ;; operand1 into operand0.
1944 [(set (match_operand:DI 0 "s_register_operand" "")
1946 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1947 (match_operand:DI 1 "s_register_operand" "")))]
1948 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1949 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1950 (set (match_dup 3) (match_dup 4))]
1953 operands[4] = gen_highpart (SImode, operands[1]);
1954 operands[3] = gen_highpart (SImode, operands[0]);
1955 operands[0] = gen_lowpart (SImode, operands[0]);
1956 operands[1] = gen_lowpart (SImode, operands[1]);
1960 (define_expand "anddi3"
1961 [(set (match_operand:DI 0 "s_register_operand" "")
1962 (and:DI (match_operand:DI 1 "s_register_operand" "")
1963 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1968 (define_insn "*anddi3_insn"
1969 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1970 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1971 (match_operand:DI 2 "s_register_operand" "r,r")))]
1972 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1974 [(set_attr "length" "8")]
1977 (define_insn_and_split "*anddi_zesidi_di"
1978 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1979 (and:DI (zero_extend:DI
1980 (match_operand:SI 2 "s_register_operand" "r,r"))
1981 (match_operand:DI 1 "s_register_operand" "0,r")))]
1984 "TARGET_32BIT && reload_completed"
1985 ; The zero extend of operand 2 clears the high word of the output
1987 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1988 (set (match_dup 3) (const_int 0))]
1991 operands[3] = gen_highpart (SImode, operands[0]);
1992 operands[0] = gen_lowpart (SImode, operands[0]);
1993 operands[1] = gen_lowpart (SImode, operands[1]);
1995 [(set_attr "length" "8")]
1998 (define_insn "*anddi_sesdi_di"
1999 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2000 (and:DI (sign_extend:DI
2001 (match_operand:SI 2 "s_register_operand" "r,r"))
2002 (match_operand:DI 1 "s_register_operand" "0,r")))]
2005 [(set_attr "length" "8")]
2008 (define_expand "andsi3"
2009 [(set (match_operand:SI 0 "s_register_operand" "")
2010 (and:SI (match_operand:SI 1 "s_register_operand" "")
2011 (match_operand:SI 2 "reg_or_int_operand" "")))]
2016 if (GET_CODE (operands[2]) == CONST_INT)
2018 if (INTVAL (operands[2]) == 255 && arm_arch6)
2020 operands[1] = convert_to_mode (QImode, operands[1], 1);
2021 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2025 arm_split_constant (AND, SImode, NULL_RTX,
2026 INTVAL (operands[2]), operands[0],
2028 optimize && can_create_pseudo_p ());
2033 else /* TARGET_THUMB1 */
2035 if (GET_CODE (operands[2]) != CONST_INT)
2037 rtx tmp = force_reg (SImode, operands[2]);
2038 if (rtx_equal_p (operands[0], operands[1]))
2042 operands[2] = operands[1];
2050 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2052 operands[2] = force_reg (SImode,
2053 GEN_INT (~INTVAL (operands[2])));
2055 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2060 for (i = 9; i <= 31; i++)
2062 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2064 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2068 else if ((((HOST_WIDE_INT) 1) << i) - 1
2069 == ~INTVAL (operands[2]))
2071 rtx shift = GEN_INT (i);
2072 rtx reg = gen_reg_rtx (SImode);
2074 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2075 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2081 operands[2] = force_reg (SImode, operands[2]);
2087 ; ??? Check split length for Thumb-2
2088 (define_insn_and_split "*arm_andsi3_insn"
2089 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2090 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2091 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2095 bic%?\\t%0, %1, #%B2
2098 && GET_CODE (operands[2]) == CONST_INT
2099 && !(const_ok_for_arm (INTVAL (operands[2]))
2100 || const_ok_for_arm (~INTVAL (operands[2])))"
2101 [(clobber (const_int 0))]
2103 arm_split_constant (AND, SImode, curr_insn,
2104 INTVAL (operands[2]), operands[0], operands[1], 0);
2107 [(set_attr "length" "4,4,16")
2108 (set_attr "predicable" "yes")]
2111 (define_insn "*thumb1_andsi3_insn"
2112 [(set (match_operand:SI 0 "register_operand" "=l")
2113 (and:SI (match_operand:SI 1 "register_operand" "%0")
2114 (match_operand:SI 2 "register_operand" "l")))]
2117 [(set_attr "length" "2")
2118 (set_attr "conds" "set")])
2120 (define_insn "*andsi3_compare0"
2121 [(set (reg:CC_NOOV CC_REGNUM)
2123 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2124 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2126 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2127 (and:SI (match_dup 1) (match_dup 2)))]
2131 bic%.\\t%0, %1, #%B2"
2132 [(set_attr "conds" "set")]
2135 (define_insn "*andsi3_compare0_scratch"
2136 [(set (reg:CC_NOOV CC_REGNUM)
2138 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2139 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2141 (clobber (match_scratch:SI 2 "=X,r"))]
2145 bic%.\\t%2, %0, #%B1"
2146 [(set_attr "conds" "set")]
2149 (define_insn "*zeroextractsi_compare0_scratch"
2150 [(set (reg:CC_NOOV CC_REGNUM)
2151 (compare:CC_NOOV (zero_extract:SI
2152 (match_operand:SI 0 "s_register_operand" "r")
2153 (match_operand 1 "const_int_operand" "n")
2154 (match_operand 2 "const_int_operand" "n"))
2157 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2158 && INTVAL (operands[1]) > 0
2159 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2160 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2162 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2163 << INTVAL (operands[2]));
2164 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2167 [(set_attr "conds" "set")]
2170 (define_insn_and_split "*ne_zeroextractsi"
2171 [(set (match_operand:SI 0 "s_register_operand" "=r")
2172 (ne:SI (zero_extract:SI
2173 (match_operand:SI 1 "s_register_operand" "r")
2174 (match_operand:SI 2 "const_int_operand" "n")
2175 (match_operand:SI 3 "const_int_operand" "n"))
2177 (clobber (reg:CC CC_REGNUM))]
2179 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2180 && INTVAL (operands[2]) > 0
2181 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2182 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2185 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2186 && INTVAL (operands[2]) > 0
2187 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2188 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2189 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2190 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2192 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2194 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2195 (match_dup 0) (const_int 1)))]
2197 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2198 << INTVAL (operands[3]));
2200 [(set_attr "conds" "clob")
2201 (set (attr "length")
2202 (if_then_else (eq_attr "is_thumb" "yes")
2207 (define_insn_and_split "*ne_zeroextractsi_shifted"
2208 [(set (match_operand:SI 0 "s_register_operand" "=r")
2209 (ne:SI (zero_extract:SI
2210 (match_operand:SI 1 "s_register_operand" "r")
2211 (match_operand:SI 2 "const_int_operand" "n")
2214 (clobber (reg:CC CC_REGNUM))]
2218 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2219 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2221 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2223 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2224 (match_dup 0) (const_int 1)))]
2226 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2228 [(set_attr "conds" "clob")
2229 (set_attr "length" "8")]
2232 (define_insn_and_split "*ite_ne_zeroextractsi"
2233 [(set (match_operand:SI 0 "s_register_operand" "=r")
2234 (if_then_else:SI (ne (zero_extract:SI
2235 (match_operand:SI 1 "s_register_operand" "r")
2236 (match_operand:SI 2 "const_int_operand" "n")
2237 (match_operand:SI 3 "const_int_operand" "n"))
2239 (match_operand:SI 4 "arm_not_operand" "rIK")
2241 (clobber (reg:CC CC_REGNUM))]
2243 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2244 && INTVAL (operands[2]) > 0
2245 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2246 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2247 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2250 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2251 && INTVAL (operands[2]) > 0
2252 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2253 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2254 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2255 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2256 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2258 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2260 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2261 (match_dup 0) (match_dup 4)))]
2263 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2264 << INTVAL (operands[3]));
2266 [(set_attr "conds" "clob")
2267 (set_attr "length" "8")]
2270 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2271 [(set (match_operand:SI 0 "s_register_operand" "=r")
2272 (if_then_else:SI (ne (zero_extract:SI
2273 (match_operand:SI 1 "s_register_operand" "r")
2274 (match_operand:SI 2 "const_int_operand" "n")
2277 (match_operand:SI 3 "arm_not_operand" "rIK")
2279 (clobber (reg:CC CC_REGNUM))]
2280 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2282 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2283 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2284 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2286 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2288 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2289 (match_dup 0) (match_dup 3)))]
2291 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2293 [(set_attr "conds" "clob")
2294 (set_attr "length" "8")]
2298 [(set (match_operand:SI 0 "s_register_operand" "")
2299 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2300 (match_operand:SI 2 "const_int_operand" "")
2301 (match_operand:SI 3 "const_int_operand" "")))
2302 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2304 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2305 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2307 HOST_WIDE_INT temp = INTVAL (operands[2]);
2309 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2310 operands[3] = GEN_INT (32 - temp);
2314 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2316 [(set (match_operand:SI 0 "s_register_operand" "")
2317 (match_operator:SI 1 "shiftable_operator"
2318 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2319 (match_operand:SI 3 "const_int_operand" "")
2320 (match_operand:SI 4 "const_int_operand" ""))
2321 (match_operand:SI 5 "s_register_operand" "")]))
2322 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2324 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2327 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2330 HOST_WIDE_INT temp = INTVAL (operands[3]);
2332 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2333 operands[4] = GEN_INT (32 - temp);
2338 [(set (match_operand:SI 0 "s_register_operand" "")
2339 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2340 (match_operand:SI 2 "const_int_operand" "")
2341 (match_operand:SI 3 "const_int_operand" "")))]
2343 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2344 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2346 HOST_WIDE_INT temp = INTVAL (operands[2]);
2348 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2349 operands[3] = GEN_INT (32 - temp);
2354 [(set (match_operand:SI 0 "s_register_operand" "")
2355 (match_operator:SI 1 "shiftable_operator"
2356 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2357 (match_operand:SI 3 "const_int_operand" "")
2358 (match_operand:SI 4 "const_int_operand" ""))
2359 (match_operand:SI 5 "s_register_operand" "")]))
2360 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2362 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2365 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2368 HOST_WIDE_INT temp = INTVAL (operands[3]);
2370 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2371 operands[4] = GEN_INT (32 - temp);
2375 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2376 ;;; represented by the bitfield, then this will produce incorrect results.
2377 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2378 ;;; which have a real bit-field insert instruction, the truncation happens
2379 ;;; in the bit-field insert instruction itself. Since arm does not have a
2380 ;;; bit-field insert instruction, we would have to emit code here to truncate
2381 ;;; the value before we insert. This loses some of the advantage of having
2382 ;;; this insv pattern, so this pattern needs to be reevalutated.
2384 (define_expand "insv"
2385 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2386 (match_operand:SI 1 "general_operand" "")
2387 (match_operand:SI 2 "general_operand" ""))
2388 (match_operand:SI 3 "reg_or_int_operand" ""))]
2389 "TARGET_ARM || arm_arch_thumb2"
2392 int start_bit = INTVAL (operands[2]);
2393 int width = INTVAL (operands[1]);
2394 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2395 rtx target, subtarget;
2397 if (arm_arch_thumb2)
2399 bool use_bfi = TRUE;
2401 if (GET_CODE (operands[3]) == CONST_INT)
2403 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2407 emit_insn (gen_insv_zero (operands[0], operands[1],
2412 /* See if the set can be done with a single orr instruction. */
2413 if (val == mask && const_ok_for_arm (val << start_bit))
2419 if (GET_CODE (operands[3]) != REG)
2420 operands[3] = force_reg (SImode, operands[3]);
2422 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2428 target = copy_rtx (operands[0]);
2429 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2430 subreg as the final target. */
2431 if (GET_CODE (target) == SUBREG)
2433 subtarget = gen_reg_rtx (SImode);
2434 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2435 < GET_MODE_SIZE (SImode))
2436 target = SUBREG_REG (target);
2441 if (GET_CODE (operands[3]) == CONST_INT)
2443 /* Since we are inserting a known constant, we may be able to
2444 reduce the number of bits that we have to clear so that
2445 the mask becomes simple. */
2446 /* ??? This code does not check to see if the new mask is actually
2447 simpler. It may not be. */
2448 rtx op1 = gen_reg_rtx (SImode);
2449 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2450 start of this pattern. */
2451 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2452 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2454 emit_insn (gen_andsi3 (op1, operands[0],
2455 gen_int_mode (~mask2, SImode)));
2456 emit_insn (gen_iorsi3 (subtarget, op1,
2457 gen_int_mode (op3_value << start_bit, SImode)));
2459 else if (start_bit == 0
2460 && !(const_ok_for_arm (mask)
2461 || const_ok_for_arm (~mask)))
2463 /* A Trick, since we are setting the bottom bits in the word,
2464 we can shift operand[3] up, operand[0] down, OR them together
2465 and rotate the result back again. This takes 3 insns, and
2466 the third might be mergeable into another op. */
2467 /* The shift up copes with the possibility that operand[3] is
2468 wider than the bitfield. */
2469 rtx op0 = gen_reg_rtx (SImode);
2470 rtx op1 = gen_reg_rtx (SImode);
2472 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2473 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2474 emit_insn (gen_iorsi3 (op1, op1, op0));
2475 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2477 else if ((width + start_bit == 32)
2478 && !(const_ok_for_arm (mask)
2479 || const_ok_for_arm (~mask)))
2481 /* Similar trick, but slightly less efficient. */
2483 rtx op0 = gen_reg_rtx (SImode);
2484 rtx op1 = gen_reg_rtx (SImode);
2486 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2487 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2488 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2489 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2493 rtx op0 = gen_int_mode (mask, SImode);
2494 rtx op1 = gen_reg_rtx (SImode);
2495 rtx op2 = gen_reg_rtx (SImode);
2497 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2499 rtx tmp = gen_reg_rtx (SImode);
2501 emit_insn (gen_movsi (tmp, op0));
2505 /* Mask out any bits in operand[3] that are not needed. */
2506 emit_insn (gen_andsi3 (op1, operands[3], op0));
2508 if (GET_CODE (op0) == CONST_INT
2509 && (const_ok_for_arm (mask << start_bit)
2510 || const_ok_for_arm (~(mask << start_bit))))
2512 op0 = gen_int_mode (~(mask << start_bit), SImode);
2513 emit_insn (gen_andsi3 (op2, operands[0], op0));
2517 if (GET_CODE (op0) == CONST_INT)
2519 rtx tmp = gen_reg_rtx (SImode);
2521 emit_insn (gen_movsi (tmp, op0));
2526 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2528 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2532 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2534 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2537 if (subtarget != target)
2539 /* If TARGET is still a SUBREG, then it must be wider than a word,
2540 so we must be careful only to set the subword we were asked to. */
2541 if (GET_CODE (target) == SUBREG)
2542 emit_move_insn (target, subtarget);
2544 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2551 (define_insn "insv_zero"
2552 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2553 (match_operand:SI 1 "const_int_operand" "M")
2554 (match_operand:SI 2 "const_int_operand" "M"))
2558 [(set_attr "length" "4")
2559 (set_attr "predicable" "yes")]
2562 (define_insn "insv_t2"
2563 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2564 (match_operand:SI 1 "const_int_operand" "M")
2565 (match_operand:SI 2 "const_int_operand" "M"))
2566 (match_operand:SI 3 "s_register_operand" "r"))]
2568 "bfi%?\t%0, %3, %2, %1"
2569 [(set_attr "length" "4")
2570 (set_attr "predicable" "yes")]
2573 ; constants for op 2 will never be given to these patterns.
2574 (define_insn_and_split "*anddi_notdi_di"
2575 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2576 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2577 (match_operand:DI 2 "s_register_operand" "r,0")))]
2580 "TARGET_32BIT && reload_completed
2581 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2582 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2583 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2584 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2587 operands[3] = gen_highpart (SImode, operands[0]);
2588 operands[0] = gen_lowpart (SImode, operands[0]);
2589 operands[4] = gen_highpart (SImode, operands[1]);
2590 operands[1] = gen_lowpart (SImode, operands[1]);
2591 operands[5] = gen_highpart (SImode, operands[2]);
2592 operands[2] = gen_lowpart (SImode, operands[2]);
2594 [(set_attr "length" "8")
2595 (set_attr "predicable" "yes")]
2598 (define_insn_and_split "*anddi_notzesidi_di"
2599 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600 (and:DI (not:DI (zero_extend:DI
2601 (match_operand:SI 2 "s_register_operand" "r,r")))
2602 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2605 bic%?\\t%Q0, %Q1, %2
2607 ; (not (zero_extend ...)) allows us to just copy the high word from
2608 ; operand1 to operand0.
2611 && operands[0] != operands[1]"
2612 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2613 (set (match_dup 3) (match_dup 4))]
2616 operands[3] = gen_highpart (SImode, operands[0]);
2617 operands[0] = gen_lowpart (SImode, operands[0]);
2618 operands[4] = gen_highpart (SImode, operands[1]);
2619 operands[1] = gen_lowpart (SImode, operands[1]);
2621 [(set_attr "length" "4,8")
2622 (set_attr "predicable" "yes")]
2625 (define_insn_and_split "*anddi_notsesidi_di"
2626 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2627 (and:DI (not:DI (sign_extend:DI
2628 (match_operand:SI 2 "s_register_operand" "r,r")))
2629 (match_operand:DI 1 "s_register_operand" "0,r")))]
2632 "TARGET_32BIT && reload_completed"
2633 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2634 (set (match_dup 3) (and:SI (not:SI
2635 (ashiftrt:SI (match_dup 2) (const_int 31)))
2639 operands[3] = gen_highpart (SImode, operands[0]);
2640 operands[0] = gen_lowpart (SImode, operands[0]);
2641 operands[4] = gen_highpart (SImode, operands[1]);
2642 operands[1] = gen_lowpart (SImode, operands[1]);
2644 [(set_attr "length" "8")
2645 (set_attr "predicable" "yes")]
2648 (define_insn "andsi_notsi_si"
2649 [(set (match_operand:SI 0 "s_register_operand" "=r")
2650 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2651 (match_operand:SI 1 "s_register_operand" "r")))]
2653 "bic%?\\t%0, %1, %2"
2654 [(set_attr "predicable" "yes")]
2657 (define_insn "thumb1_bicsi3"
2658 [(set (match_operand:SI 0 "register_operand" "=l")
2659 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2660 (match_operand:SI 2 "register_operand" "0")))]
2663 [(set_attr "length" "2")
2664 (set_attr "conds" "set")])
2666 (define_insn "andsi_not_shiftsi_si"
2667 [(set (match_operand:SI 0 "s_register_operand" "=r")
2668 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2669 [(match_operand:SI 2 "s_register_operand" "r")
2670 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2671 (match_operand:SI 1 "s_register_operand" "r")))]
2673 "bic%?\\t%0, %1, %2%S4"
2674 [(set_attr "predicable" "yes")
2675 (set_attr "shift" "2")
2676 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2677 (const_string "alu_shift")
2678 (const_string "alu_shift_reg")))]
2681 (define_insn "*andsi_notsi_si_compare0"
2682 [(set (reg:CC_NOOV CC_REGNUM)
2684 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2685 (match_operand:SI 1 "s_register_operand" "r"))
2687 (set (match_operand:SI 0 "s_register_operand" "=r")
2688 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2690 "bic%.\\t%0, %1, %2"
2691 [(set_attr "conds" "set")]
2694 (define_insn "*andsi_notsi_si_compare0_scratch"
2695 [(set (reg:CC_NOOV CC_REGNUM)
2697 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2698 (match_operand:SI 1 "s_register_operand" "r"))
2700 (clobber (match_scratch:SI 0 "=r"))]
2702 "bic%.\\t%0, %1, %2"
2703 [(set_attr "conds" "set")]
2706 (define_expand "iordi3"
2707 [(set (match_operand:DI 0 "s_register_operand" "")
2708 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2709 (match_operand:DI 2 "neon_logic_op2" "")))]
2714 (define_insn "*iordi3_insn"
2715 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2716 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2717 (match_operand:DI 2 "s_register_operand" "r,r")))]
2718 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2720 [(set_attr "length" "8")
2721 (set_attr "predicable" "yes")]
2724 (define_insn "*iordi_zesidi_di"
2725 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2726 (ior:DI (zero_extend:DI
2727 (match_operand:SI 2 "s_register_operand" "r,r"))
2728 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2731 orr%?\\t%Q0, %Q1, %2
2733 [(set_attr "length" "4,8")
2734 (set_attr "predicable" "yes")]
2737 (define_insn "*iordi_sesidi_di"
2738 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2739 (ior:DI (sign_extend:DI
2740 (match_operand:SI 2 "s_register_operand" "r,r"))
2741 (match_operand:DI 1 "s_register_operand" "0,r")))]
2744 [(set_attr "length" "8")
2745 (set_attr "predicable" "yes")]
2748 (define_expand "iorsi3"
2749 [(set (match_operand:SI 0 "s_register_operand" "")
2750 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2751 (match_operand:SI 2 "reg_or_int_operand" "")))]
2754 if (GET_CODE (operands[2]) == CONST_INT)
2758 arm_split_constant (IOR, SImode, NULL_RTX,
2759 INTVAL (operands[2]), operands[0], operands[1],
2760 optimize && can_create_pseudo_p ());
2763 else /* TARGET_THUMB1 */
2765 rtx tmp = force_reg (SImode, operands[2]);
2766 if (rtx_equal_p (operands[0], operands[1]))
2770 operands[2] = operands[1];
2778 (define_insn_and_split "*iorsi3_insn"
2779 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2780 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2781 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2785 orn%?\\t%0, %1, #%B2
2788 && GET_CODE (operands[2]) == CONST_INT
2789 && !(const_ok_for_arm (INTVAL (operands[2]))
2790 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2791 [(clobber (const_int 0))]
2793 arm_split_constant (IOR, SImode, curr_insn,
2794 INTVAL (operands[2]), operands[0], operands[1], 0);
2797 [(set_attr "length" "4,4,16")
2798 (set_attr "arch" "32,t2,32")
2799 (set_attr "predicable" "yes")])
2801 (define_insn "*thumb1_iorsi3_insn"
2802 [(set (match_operand:SI 0 "register_operand" "=l")
2803 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2804 (match_operand:SI 2 "register_operand" "l")))]
2807 [(set_attr "length" "2")
2808 (set_attr "conds" "set")])
2811 [(match_scratch:SI 3 "r")
2812 (set (match_operand:SI 0 "arm_general_register_operand" "")
2813 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2814 (match_operand:SI 2 "const_int_operand" "")))]
2816 && !const_ok_for_arm (INTVAL (operands[2]))
2817 && const_ok_for_arm (~INTVAL (operands[2]))"
2818 [(set (match_dup 3) (match_dup 2))
2819 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2823 (define_insn "*iorsi3_compare0"
2824 [(set (reg:CC_NOOV CC_REGNUM)
2825 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2826 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2828 (set (match_operand:SI 0 "s_register_operand" "=r")
2829 (ior:SI (match_dup 1) (match_dup 2)))]
2831 "orr%.\\t%0, %1, %2"
2832 [(set_attr "conds" "set")]
2835 (define_insn "*iorsi3_compare0_scratch"
2836 [(set (reg:CC_NOOV CC_REGNUM)
2837 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2838 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2840 (clobber (match_scratch:SI 0 "=r"))]
2842 "orr%.\\t%0, %1, %2"
2843 [(set_attr "conds" "set")]
2846 (define_expand "xordi3"
2847 [(set (match_operand:DI 0 "s_register_operand" "")
2848 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2849 (match_operand:DI 2 "s_register_operand" "")))]
2854 (define_insn "*xordi3_insn"
2855 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2856 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2857 (match_operand:DI 2 "s_register_operand" "r,r")))]
2858 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2860 [(set_attr "length" "8")
2861 (set_attr "predicable" "yes")]
2864 (define_insn "*xordi_zesidi_di"
2865 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2866 (xor:DI (zero_extend:DI
2867 (match_operand:SI 2 "s_register_operand" "r,r"))
2868 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2871 eor%?\\t%Q0, %Q1, %2
2873 [(set_attr "length" "4,8")
2874 (set_attr "predicable" "yes")]
2877 (define_insn "*xordi_sesidi_di"
2878 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2879 (xor:DI (sign_extend:DI
2880 (match_operand:SI 2 "s_register_operand" "r,r"))
2881 (match_operand:DI 1 "s_register_operand" "0,r")))]
2884 [(set_attr "length" "8")
2885 (set_attr "predicable" "yes")]
2888 (define_expand "xorsi3"
2889 [(set (match_operand:SI 0 "s_register_operand" "")
2890 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2891 (match_operand:SI 2 "reg_or_int_operand" "")))]
2893 "if (GET_CODE (operands[2]) == CONST_INT)
2897 arm_split_constant (XOR, SImode, NULL_RTX,
2898 INTVAL (operands[2]), operands[0], operands[1],
2899 optimize && can_create_pseudo_p ());
2902 else /* TARGET_THUMB1 */
2904 rtx tmp = force_reg (SImode, operands[2]);
2905 if (rtx_equal_p (operands[0], operands[1]))
2909 operands[2] = operands[1];
2916 (define_insn "*arm_xorsi3"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r")
2918 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2919 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2921 "eor%?\\t%0, %1, %2"
2922 [(set_attr "predicable" "yes")]
2925 (define_insn "*thumb1_xorsi3_insn"
2926 [(set (match_operand:SI 0 "register_operand" "=l")
2927 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2928 (match_operand:SI 2 "register_operand" "l")))]
2931 [(set_attr "length" "2")
2932 (set_attr "conds" "set")])
2934 (define_insn "*xorsi3_compare0"
2935 [(set (reg:CC_NOOV CC_REGNUM)
2936 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2937 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2939 (set (match_operand:SI 0 "s_register_operand" "=r")
2940 (xor:SI (match_dup 1) (match_dup 2)))]
2942 "eor%.\\t%0, %1, %2"
2943 [(set_attr "conds" "set")]
2946 (define_insn "*xorsi3_compare0_scratch"
2947 [(set (reg:CC_NOOV CC_REGNUM)
2948 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2949 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2953 [(set_attr "conds" "set")]
2956 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2957 ; (NOT D) we can sometimes merge the final NOT into one of the following
2961 [(set (match_operand:SI 0 "s_register_operand" "")
2962 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2963 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2964 (match_operand:SI 3 "arm_rhs_operand" "")))
2965 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2967 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2968 (not:SI (match_dup 3))))
2969 (set (match_dup 0) (not:SI (match_dup 4)))]
2973 (define_insn "*andsi_iorsi3_notsi"
2974 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2975 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2976 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2977 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2979 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2980 [(set_attr "length" "8")
2981 (set_attr "ce_count" "2")
2982 (set_attr "predicable" "yes")]
2985 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2986 ; insns are available?
2988 [(set (match_operand:SI 0 "s_register_operand" "")
2989 (match_operator:SI 1 "logical_binary_operator"
2990 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2991 (match_operand:SI 3 "const_int_operand" "")
2992 (match_operand:SI 4 "const_int_operand" ""))
2993 (match_operator:SI 9 "logical_binary_operator"
2994 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2995 (match_operand:SI 6 "const_int_operand" ""))
2996 (match_operand:SI 7 "s_register_operand" "")])]))
2997 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2999 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3000 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3003 [(ashift:SI (match_dup 2) (match_dup 4))
3007 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3010 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3014 [(set (match_operand:SI 0 "s_register_operand" "")
3015 (match_operator:SI 1 "logical_binary_operator"
3016 [(match_operator:SI 9 "logical_binary_operator"
3017 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3018 (match_operand:SI 6 "const_int_operand" ""))
3019 (match_operand:SI 7 "s_register_operand" "")])
3020 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3021 (match_operand:SI 3 "const_int_operand" "")
3022 (match_operand:SI 4 "const_int_operand" ""))]))
3023 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3025 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3026 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3029 [(ashift:SI (match_dup 2) (match_dup 4))
3033 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3036 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3040 [(set (match_operand:SI 0 "s_register_operand" "")
3041 (match_operator:SI 1 "logical_binary_operator"
3042 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3043 (match_operand:SI 3 "const_int_operand" "")
3044 (match_operand:SI 4 "const_int_operand" ""))
3045 (match_operator:SI 9 "logical_binary_operator"
3046 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3047 (match_operand:SI 6 "const_int_operand" ""))
3048 (match_operand:SI 7 "s_register_operand" "")])]))
3049 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3051 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3052 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3055 [(ashift:SI (match_dup 2) (match_dup 4))
3059 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3062 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3066 [(set (match_operand:SI 0 "s_register_operand" "")
3067 (match_operator:SI 1 "logical_binary_operator"
3068 [(match_operator:SI 9 "logical_binary_operator"
3069 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3070 (match_operand:SI 6 "const_int_operand" ""))
3071 (match_operand:SI 7 "s_register_operand" "")])
3072 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3073 (match_operand:SI 3 "const_int_operand" "")
3074 (match_operand:SI 4 "const_int_operand" ""))]))
3075 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3077 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3078 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3081 [(ashift:SI (match_dup 2) (match_dup 4))
3085 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3088 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3092 ;; Minimum and maximum insns
3094 (define_expand "smaxsi3"
3096 (set (match_operand:SI 0 "s_register_operand" "")
3097 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3098 (match_operand:SI 2 "arm_rhs_operand" "")))
3099 (clobber (reg:CC CC_REGNUM))])]
3102 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3104 /* No need for a clobber of the condition code register here. */
3105 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3106 gen_rtx_SMAX (SImode, operands[1],
3112 (define_insn "*smax_0"
3113 [(set (match_operand:SI 0 "s_register_operand" "=r")
3114 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3117 "bic%?\\t%0, %1, %1, asr #31"
3118 [(set_attr "predicable" "yes")]
3121 (define_insn "*smax_m1"
3122 [(set (match_operand:SI 0 "s_register_operand" "=r")
3123 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3126 "orr%?\\t%0, %1, %1, asr #31"
3127 [(set_attr "predicable" "yes")]
3130 (define_insn "*arm_smax_insn"
3131 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3132 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3133 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3134 (clobber (reg:CC CC_REGNUM))]
3137 cmp\\t%1, %2\;movlt\\t%0, %2
3138 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3139 [(set_attr "conds" "clob")
3140 (set_attr "length" "8,12")]
3143 (define_expand "sminsi3"
3145 (set (match_operand:SI 0 "s_register_operand" "")
3146 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3147 (match_operand:SI 2 "arm_rhs_operand" "")))
3148 (clobber (reg:CC CC_REGNUM))])]
3151 if (operands[2] == const0_rtx)
3153 /* No need for a clobber of the condition code register here. */
3154 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3155 gen_rtx_SMIN (SImode, operands[1],
3161 (define_insn "*smin_0"
3162 [(set (match_operand:SI 0 "s_register_operand" "=r")
3163 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3166 "and%?\\t%0, %1, %1, asr #31"
3167 [(set_attr "predicable" "yes")]
3170 (define_insn "*arm_smin_insn"
3171 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3172 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3173 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3174 (clobber (reg:CC CC_REGNUM))]
3177 cmp\\t%1, %2\;movge\\t%0, %2
3178 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3179 [(set_attr "conds" "clob")
3180 (set_attr "length" "8,12")]
3183 (define_expand "umaxsi3"
3185 (set (match_operand:SI 0 "s_register_operand" "")
3186 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3187 (match_operand:SI 2 "arm_rhs_operand" "")))
3188 (clobber (reg:CC CC_REGNUM))])]
3193 (define_insn "*arm_umaxsi3"
3194 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3195 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3196 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3197 (clobber (reg:CC CC_REGNUM))]
3200 cmp\\t%1, %2\;movcc\\t%0, %2
3201 cmp\\t%1, %2\;movcs\\t%0, %1
3202 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3203 [(set_attr "conds" "clob")
3204 (set_attr "length" "8,8,12")]
3207 (define_expand "uminsi3"
3209 (set (match_operand:SI 0 "s_register_operand" "")
3210 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3211 (match_operand:SI 2 "arm_rhs_operand" "")))
3212 (clobber (reg:CC CC_REGNUM))])]
3217 (define_insn "*arm_uminsi3"
3218 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3219 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3220 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3221 (clobber (reg:CC CC_REGNUM))]
3224 cmp\\t%1, %2\;movcs\\t%0, %2
3225 cmp\\t%1, %2\;movcc\\t%0, %1
3226 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3227 [(set_attr "conds" "clob")
3228 (set_attr "length" "8,8,12")]
3231 (define_insn "*store_minmaxsi"
3232 [(set (match_operand:SI 0 "memory_operand" "=m")
3233 (match_operator:SI 3 "minmax_operator"
3234 [(match_operand:SI 1 "s_register_operand" "r")
3235 (match_operand:SI 2 "s_register_operand" "r")]))
3236 (clobber (reg:CC CC_REGNUM))]
3239 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3240 operands[1], operands[2]);
3241 output_asm_insn (\"cmp\\t%1, %2\", operands);
3243 output_asm_insn (\"ite\t%d3\", operands);
3244 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3245 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3248 [(set_attr "conds" "clob")
3249 (set (attr "length")
3250 (if_then_else (eq_attr "is_thumb" "yes")
3253 (set_attr "type" "store1")]
3256 ; Reject the frame pointer in operand[1], since reloading this after
3257 ; it has been eliminated can cause carnage.
3258 (define_insn "*minmax_arithsi"
3259 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3260 (match_operator:SI 4 "shiftable_operator"
3261 [(match_operator:SI 5 "minmax_operator"
3262 [(match_operand:SI 2 "s_register_operand" "r,r")
3263 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3264 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3265 (clobber (reg:CC CC_REGNUM))]
3266 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3269 enum rtx_code code = GET_CODE (operands[4]);
3272 if (which_alternative != 0 || operands[3] != const0_rtx
3273 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3278 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3279 operands[2], operands[3]);
3280 output_asm_insn (\"cmp\\t%2, %3\", operands);
3284 output_asm_insn (\"ite\\t%d5\", operands);
3286 output_asm_insn (\"it\\t%d5\", operands);
3288 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3290 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3293 [(set_attr "conds" "clob")
3294 (set (attr "length")
3295 (if_then_else (eq_attr "is_thumb" "yes")
3301 ;; Shift and rotation insns
3303 (define_expand "ashldi3"
3304 [(set (match_operand:DI 0 "s_register_operand" "")
3305 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "reg_or_int_operand" "")))]
3309 if (GET_CODE (operands[2]) == CONST_INT)
3311 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3313 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3316 /* Ideally we shouldn't fail here if we could know that operands[1]
3317 ends up already living in an iwmmxt register. Otherwise it's
3318 cheaper to have the alternate code being generated than moving
3319 values to iwmmxt regs and back. */
3322 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3327 (define_insn "arm_ashldi3_1bit"
3328 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3329 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3331 (clobber (reg:CC CC_REGNUM))]
3333 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3334 [(set_attr "conds" "clob")
3335 (set_attr "length" "8")]
3338 (define_expand "ashlsi3"
3339 [(set (match_operand:SI 0 "s_register_operand" "")
3340 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3341 (match_operand:SI 2 "arm_rhs_operand" "")))]
3344 if (GET_CODE (operands[2]) == CONST_INT
3345 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3347 emit_insn (gen_movsi (operands[0], const0_rtx));
3353 (define_insn "*thumb1_ashlsi3"
3354 [(set (match_operand:SI 0 "register_operand" "=l,l")
3355 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3356 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3359 [(set_attr "length" "2")
3360 (set_attr "conds" "set")])
3362 (define_expand "ashrdi3"
3363 [(set (match_operand:DI 0 "s_register_operand" "")
3364 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3365 (match_operand:SI 2 "reg_or_int_operand" "")))]
3368 if (GET_CODE (operands[2]) == CONST_INT)
3370 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3372 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3375 /* Ideally we shouldn't fail here if we could know that operands[1]
3376 ends up already living in an iwmmxt register. Otherwise it's
3377 cheaper to have the alternate code being generated than moving
3378 values to iwmmxt regs and back. */
3381 else if (!TARGET_REALLY_IWMMXT)
3386 (define_insn "arm_ashrdi3_1bit"
3387 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3388 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3390 (clobber (reg:CC CC_REGNUM))]
3392 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3393 [(set_attr "conds" "clob")
3394 (set_attr "insn" "mov")
3395 (set_attr "length" "8")]
3398 (define_expand "ashrsi3"
3399 [(set (match_operand:SI 0 "s_register_operand" "")
3400 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3401 (match_operand:SI 2 "arm_rhs_operand" "")))]
3404 if (GET_CODE (operands[2]) == CONST_INT
3405 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3406 operands[2] = GEN_INT (31);
3410 (define_insn "*thumb1_ashrsi3"
3411 [(set (match_operand:SI 0 "register_operand" "=l,l")
3412 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3413 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3416 [(set_attr "length" "2")
3417 (set_attr "conds" "set")])
3419 (define_expand "lshrdi3"
3420 [(set (match_operand:DI 0 "s_register_operand" "")
3421 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3422 (match_operand:SI 2 "reg_or_int_operand" "")))]
3425 if (GET_CODE (operands[2]) == CONST_INT)
3427 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3429 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3432 /* Ideally we shouldn't fail here if we could know that operands[1]
3433 ends up already living in an iwmmxt register. Otherwise it's
3434 cheaper to have the alternate code being generated than moving
3435 values to iwmmxt regs and back. */
3438 else if (!TARGET_REALLY_IWMMXT)
3443 (define_insn "arm_lshrdi3_1bit"
3444 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3445 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3447 (clobber (reg:CC CC_REGNUM))]
3449 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3450 [(set_attr "conds" "clob")
3451 (set_attr "insn" "mov")
3452 (set_attr "length" "8")]
3455 (define_expand "lshrsi3"
3456 [(set (match_operand:SI 0 "s_register_operand" "")
3457 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3458 (match_operand:SI 2 "arm_rhs_operand" "")))]
3461 if (GET_CODE (operands[2]) == CONST_INT
3462 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3464 emit_insn (gen_movsi (operands[0], const0_rtx));
3470 (define_insn "*thumb1_lshrsi3"
3471 [(set (match_operand:SI 0 "register_operand" "=l,l")
3472 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3473 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3476 [(set_attr "length" "2")
3477 (set_attr "conds" "set")])
3479 (define_expand "rotlsi3"
3480 [(set (match_operand:SI 0 "s_register_operand" "")
3481 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3482 (match_operand:SI 2 "reg_or_int_operand" "")))]
3485 if (GET_CODE (operands[2]) == CONST_INT)
3486 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3489 rtx reg = gen_reg_rtx (SImode);
3490 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3496 (define_expand "rotrsi3"
3497 [(set (match_operand:SI 0 "s_register_operand" "")
3498 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3499 (match_operand:SI 2 "arm_rhs_operand" "")))]
3504 if (GET_CODE (operands[2]) == CONST_INT
3505 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3506 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3508 else /* TARGET_THUMB1 */
3510 if (GET_CODE (operands [2]) == CONST_INT)
3511 operands [2] = force_reg (SImode, operands[2]);
3516 (define_insn "*thumb1_rotrsi3"
3517 [(set (match_operand:SI 0 "register_operand" "=l")
3518 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3519 (match_operand:SI 2 "register_operand" "l")))]
3522 [(set_attr "length" "2")]
3525 (define_insn "*arm_shiftsi3"
3526 [(set (match_operand:SI 0 "s_register_operand" "=r")
3527 (match_operator:SI 3 "shift_operator"
3528 [(match_operand:SI 1 "s_register_operand" "r")
3529 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3531 "* return arm_output_shift(operands, 0);"
3532 [(set_attr "predicable" "yes")
3533 (set_attr "shift" "1")
3534 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3535 (const_string "alu_shift")
3536 (const_string "alu_shift_reg")))]
3539 (define_insn "*shiftsi3_compare0"
3540 [(set (reg:CC_NOOV CC_REGNUM)
3541 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3542 [(match_operand:SI 1 "s_register_operand" "r")
3543 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3545 (set (match_operand:SI 0 "s_register_operand" "=r")
3546 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3548 "* return arm_output_shift(operands, 1);"
3549 [(set_attr "conds" "set")
3550 (set_attr "shift" "1")
3551 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3552 (const_string "alu_shift")
3553 (const_string "alu_shift_reg")))]
3556 (define_insn "*shiftsi3_compare0_scratch"
3557 [(set (reg:CC_NOOV CC_REGNUM)
3558 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3559 [(match_operand:SI 1 "s_register_operand" "r")
3560 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3562 (clobber (match_scratch:SI 0 "=r"))]
3564 "* return arm_output_shift(operands, 1);"
3565 [(set_attr "conds" "set")
3566 (set_attr "shift" "1")]
3569 (define_insn "*not_shiftsi"
3570 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3571 (not:SI (match_operator:SI 3 "shift_operator"
3572 [(match_operand:SI 1 "s_register_operand" "r,r")
3573 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3576 [(set_attr "predicable" "yes")
3577 (set_attr "shift" "1")
3578 (set_attr "insn" "mvn")
3579 (set_attr "arch" "32,a")
3580 (set_attr "type" "alu_shift,alu_shift_reg")])
3582 (define_insn "*not_shiftsi_compare0"
3583 [(set (reg:CC_NOOV CC_REGNUM)
3585 (not:SI (match_operator:SI 3 "shift_operator"
3586 [(match_operand:SI 1 "s_register_operand" "r,r")
3587 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3589 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3590 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3593 [(set_attr "conds" "set")
3594 (set_attr "shift" "1")
3595 (set_attr "insn" "mvn")
3596 (set_attr "arch" "32,a")
3597 (set_attr "type" "alu_shift,alu_shift_reg")])
3599 (define_insn "*not_shiftsi_compare0_scratch"
3600 [(set (reg:CC_NOOV CC_REGNUM)
3602 (not:SI (match_operator:SI 3 "shift_operator"
3603 [(match_operand:SI 1 "s_register_operand" "r,r")
3604 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3606 (clobber (match_scratch:SI 0 "=r,r"))]
3609 [(set_attr "conds" "set")
3610 (set_attr "shift" "1")
3611 (set_attr "insn" "mvn")
3612 (set_attr "arch" "32,a")
3613 (set_attr "type" "alu_shift,alu_shift_reg")])
3615 ;; We don't really have extzv, but defining this using shifts helps
3616 ;; to reduce register pressure later on.
3618 (define_expand "extzv"
3620 (ashift:SI (match_operand:SI 1 "register_operand" "")
3621 (match_operand:SI 2 "const_int_operand" "")))
3622 (set (match_operand:SI 0 "register_operand" "")
3623 (lshiftrt:SI (match_dup 4)
3624 (match_operand:SI 3 "const_int_operand" "")))]
3625 "TARGET_THUMB1 || arm_arch_thumb2"
3628 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3629 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3631 if (arm_arch_thumb2)
3633 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3638 operands[3] = GEN_INT (rshift);
3642 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3646 operands[2] = GEN_INT (lshift);
3647 operands[4] = gen_reg_rtx (SImode);
3652 [(set (match_operand:SI 0 "s_register_operand" "=r")
3653 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3654 (match_operand:SI 2 "const_int_operand" "M")
3655 (match_operand:SI 3 "const_int_operand" "M")))]
3657 "sbfx%?\t%0, %1, %3, %2"
3658 [(set_attr "length" "4")
3659 (set_attr "predicable" "yes")]
3662 (define_insn "extzv_t2"
3663 [(set (match_operand:SI 0 "s_register_operand" "=r")
3664 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3665 (match_operand:SI 2 "const_int_operand" "M")
3666 (match_operand:SI 3 "const_int_operand" "M")))]
3668 "ubfx%?\t%0, %1, %3, %2"
3669 [(set_attr "length" "4")
3670 (set_attr "predicable" "yes")]
3674 ;; Unary arithmetic insns
3676 (define_expand "negdi2"
3678 [(set (match_operand:DI 0 "s_register_operand" "")
3679 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3680 (clobber (reg:CC CC_REGNUM))])]
3685 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3686 ;; The first alternative allows the common case of a *full* overlap.
3687 (define_insn "*arm_negdi2"
3688 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3689 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3690 (clobber (reg:CC CC_REGNUM))]
3692 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3693 [(set_attr "conds" "clob")
3694 (set_attr "length" "8")]
3697 (define_insn "*thumb1_negdi2"
3698 [(set (match_operand:DI 0 "register_operand" "=&l")
3699 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3700 (clobber (reg:CC CC_REGNUM))]
3702 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3703 [(set_attr "length" "6")]
3706 (define_expand "negsi2"
3707 [(set (match_operand:SI 0 "s_register_operand" "")
3708 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3713 (define_insn "*arm_negsi2"
3714 [(set (match_operand:SI 0 "s_register_operand" "=r")
3715 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3717 "rsb%?\\t%0, %1, #0"
3718 [(set_attr "predicable" "yes")]
3721 (define_insn "*thumb1_negsi2"
3722 [(set (match_operand:SI 0 "register_operand" "=l")
3723 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3726 [(set_attr "length" "2")]
3729 (define_expand "negsf2"
3730 [(set (match_operand:SF 0 "s_register_operand" "")
3731 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3732 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3736 (define_expand "negdf2"
3737 [(set (match_operand:DF 0 "s_register_operand" "")
3738 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3739 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3742 ;; abssi2 doesn't really clobber the condition codes if a different register
3743 ;; is being set. To keep things simple, assume during rtl manipulations that
3744 ;; it does, but tell the final scan operator the truth. Similarly for
3747 (define_expand "abssi2"
3749 [(set (match_operand:SI 0 "s_register_operand" "")
3750 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3751 (clobber (match_dup 2))])]
3755 operands[2] = gen_rtx_SCRATCH (SImode);
3757 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3760 (define_insn "*arm_abssi2"
3761 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3762 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3763 (clobber (reg:CC CC_REGNUM))]
3766 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3767 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3768 [(set_attr "conds" "clob,*")
3769 (set_attr "shift" "1")
3770 ;; predicable can't be set based on the variant, so left as no
3771 (set_attr "length" "8")]
3774 (define_insn_and_split "*thumb1_abssi2"
3775 [(set (match_operand:SI 0 "s_register_operand" "=l")
3776 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3777 (clobber (match_scratch:SI 2 "=&l"))]
3780 "TARGET_THUMB1 && reload_completed"
3781 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3782 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3783 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3785 [(set_attr "length" "6")]
3788 (define_insn "*arm_neg_abssi2"
3789 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3790 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3791 (clobber (reg:CC CC_REGNUM))]
3794 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3795 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3796 [(set_attr "conds" "clob,*")
3797 (set_attr "shift" "1")
3798 ;; predicable can't be set based on the variant, so left as no
3799 (set_attr "length" "8")]
3802 (define_insn_and_split "*thumb1_neg_abssi2"
3803 [(set (match_operand:SI 0 "s_register_operand" "=l")
3804 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3805 (clobber (match_scratch:SI 2 "=&l"))]
3808 "TARGET_THUMB1 && reload_completed"
3809 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3810 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3811 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3813 [(set_attr "length" "6")]
3816 (define_expand "abssf2"
3817 [(set (match_operand:SF 0 "s_register_operand" "")
3818 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3819 "TARGET_32BIT && TARGET_HARD_FLOAT"
3822 (define_expand "absdf2"
3823 [(set (match_operand:DF 0 "s_register_operand" "")
3824 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3825 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3828 (define_expand "sqrtsf2"
3829 [(set (match_operand:SF 0 "s_register_operand" "")
3830 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3831 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3834 (define_expand "sqrtdf2"
3835 [(set (match_operand:DF 0 "s_register_operand" "")
3836 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3837 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3840 (define_insn_and_split "one_cmpldi2"
3841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3842 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3845 "TARGET_32BIT && reload_completed"
3846 [(set (match_dup 0) (not:SI (match_dup 1)))
3847 (set (match_dup 2) (not:SI (match_dup 3)))]
3850 operands[2] = gen_highpart (SImode, operands[0]);
3851 operands[0] = gen_lowpart (SImode, operands[0]);
3852 operands[3] = gen_highpart (SImode, operands[1]);
3853 operands[1] = gen_lowpart (SImode, operands[1]);
3855 [(set_attr "length" "8")
3856 (set_attr "predicable" "yes")]
3859 (define_expand "one_cmplsi2"
3860 [(set (match_operand:SI 0 "s_register_operand" "")
3861 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3866 (define_insn "*arm_one_cmplsi2"
3867 [(set (match_operand:SI 0 "s_register_operand" "=r")
3868 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3871 [(set_attr "predicable" "yes")
3872 (set_attr "insn" "mvn")]
3875 (define_insn "*thumb1_one_cmplsi2"
3876 [(set (match_operand:SI 0 "register_operand" "=l")
3877 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3880 [(set_attr "length" "2")
3881 (set_attr "insn" "mvn")]
3884 (define_insn "*notsi_compare0"
3885 [(set (reg:CC_NOOV CC_REGNUM)
3886 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3888 (set (match_operand:SI 0 "s_register_operand" "=r")
3889 (not:SI (match_dup 1)))]
3892 [(set_attr "conds" "set")
3893 (set_attr "insn" "mvn")]
3896 (define_insn "*notsi_compare0_scratch"
3897 [(set (reg:CC_NOOV CC_REGNUM)
3898 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3900 (clobber (match_scratch:SI 0 "=r"))]
3903 [(set_attr "conds" "set")
3904 (set_attr "insn" "mvn")]
3907 ;; Fixed <--> Floating conversion insns
3909 (define_expand "floatsihf2"
3910 [(set (match_operand:HF 0 "general_operand" "")
3911 (float:HF (match_operand:SI 1 "general_operand" "")))]
3915 rtx op1 = gen_reg_rtx (SFmode);
3916 expand_float (op1, operands[1], 0);
3917 op1 = convert_to_mode (HFmode, op1, 0);
3918 emit_move_insn (operands[0], op1);
3923 (define_expand "floatdihf2"
3924 [(set (match_operand:HF 0 "general_operand" "")
3925 (float:HF (match_operand:DI 1 "general_operand" "")))]
3929 rtx op1 = gen_reg_rtx (SFmode);
3930 expand_float (op1, operands[1], 0);
3931 op1 = convert_to_mode (HFmode, op1, 0);
3932 emit_move_insn (operands[0], op1);
3937 (define_expand "floatsisf2"
3938 [(set (match_operand:SF 0 "s_register_operand" "")
3939 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3940 "TARGET_32BIT && TARGET_HARD_FLOAT"
3942 if (TARGET_MAVERICK)
3944 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3949 (define_expand "floatsidf2"
3950 [(set (match_operand:DF 0 "s_register_operand" "")
3951 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3952 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3954 if (TARGET_MAVERICK)
3956 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3961 (define_expand "fix_trunchfsi2"
3962 [(set (match_operand:SI 0 "general_operand" "")
3963 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3967 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3968 expand_fix (operands[0], op1, 0);
3973 (define_expand "fix_trunchfdi2"
3974 [(set (match_operand:DI 0 "general_operand" "")
3975 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3979 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3980 expand_fix (operands[0], op1, 0);
3985 (define_expand "fix_truncsfsi2"
3986 [(set (match_operand:SI 0 "s_register_operand" "")
3987 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3988 "TARGET_32BIT && TARGET_HARD_FLOAT"
3990 if (TARGET_MAVERICK)
3992 if (!cirrus_fp_register (operands[0], SImode))
3993 operands[0] = force_reg (SImode, operands[0]);
3994 if (!cirrus_fp_register (operands[1], SFmode))
3995 operands[1] = force_reg (SFmode, operands[0]);
3996 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4001 (define_expand "fix_truncdfsi2"
4002 [(set (match_operand:SI 0 "s_register_operand" "")
4003 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4004 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4006 if (TARGET_MAVERICK)
4008 if (!cirrus_fp_register (operands[1], DFmode))
4009 operands[1] = force_reg (DFmode, operands[0]);
4010 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4017 (define_expand "truncdfsf2"
4018 [(set (match_operand:SF 0 "s_register_operand" "")
4020 (match_operand:DF 1 "s_register_operand" "")))]
4021 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4025 /* DFmode -> HFmode conversions have to go through SFmode. */
4026 (define_expand "truncdfhf2"
4027 [(set (match_operand:HF 0 "general_operand" "")
4029 (match_operand:DF 1 "general_operand" "")))]
4034 op1 = convert_to_mode (SFmode, operands[1], 0);
4035 op1 = convert_to_mode (HFmode, op1, 0);
4036 emit_move_insn (operands[0], op1);
4041 ;; Zero and sign extension instructions.
4043 (define_insn "zero_extend<mode>di2"
4044 [(set (match_operand:DI 0 "s_register_operand" "=r")
4045 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4046 "<qhs_extenddi_cstr>")))]
4047 "TARGET_32BIT <qhs_zextenddi_cond>"
4049 [(set_attr "length" "8")
4050 (set_attr "ce_count" "2")
4051 (set_attr "predicable" "yes")]
4054 (define_insn "extend<mode>di2"
4055 [(set (match_operand:DI 0 "s_register_operand" "=r")
4056 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4057 "<qhs_extenddi_cstr>")))]
4058 "TARGET_32BIT <qhs_sextenddi_cond>"
4060 [(set_attr "length" "8")
4061 (set_attr "ce_count" "2")
4062 (set_attr "shift" "1")
4063 (set_attr "predicable" "yes")]
4066 ;; Splits for all extensions to DImode
4068 [(set (match_operand:DI 0 "s_register_operand" "")
4069 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4071 [(set (match_dup 0) (match_dup 1))]
4073 rtx lo_part = gen_lowpart (SImode, operands[0]);
4074 enum machine_mode src_mode = GET_MODE (operands[1]);
4076 if (REG_P (operands[0])
4077 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4078 emit_clobber (operands[0]);
4079 if (!REG_P (lo_part) || src_mode != SImode
4080 || !rtx_equal_p (lo_part, operands[1]))
4082 if (src_mode == SImode)
4083 emit_move_insn (lo_part, operands[1]);
4085 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4086 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4087 operands[1] = lo_part;
4089 operands[0] = gen_highpart (SImode, operands[0]);
4090 operands[1] = const0_rtx;
4094 [(set (match_operand:DI 0 "s_register_operand" "")
4095 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4097 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4099 rtx lo_part = gen_lowpart (SImode, operands[0]);
4100 enum machine_mode src_mode = GET_MODE (operands[1]);
4102 if (REG_P (operands[0])
4103 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4104 emit_clobber (operands[0]);
4106 if (!REG_P (lo_part) || src_mode != SImode
4107 || !rtx_equal_p (lo_part, operands[1]))
4109 if (src_mode == SImode)
4110 emit_move_insn (lo_part, operands[1]);
4112 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4113 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4114 operands[1] = lo_part;
4116 operands[0] = gen_highpart (SImode, operands[0]);
4119 (define_expand "zero_extendhisi2"
4120 [(set (match_operand:SI 0 "s_register_operand" "")
4121 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4124 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4126 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4129 if (!arm_arch6 && !MEM_P (operands[1]))
4131 rtx t = gen_lowpart (SImode, operands[1]);
4132 rtx tmp = gen_reg_rtx (SImode);
4133 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4134 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4140 [(set (match_operand:SI 0 "register_operand" "")
4141 (zero_extend:SI (match_operand:HI 1 "register_operand" "")))]
4142 "!TARGET_THUMB2 && !arm_arch6"
4143 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4144 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4146 operands[2] = gen_lowpart (SImode, operands[1]);
4149 (define_insn "*thumb1_zero_extendhisi2"
4150 [(set (match_operand:SI 0 "register_operand" "=l,l")
4151 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4156 if (which_alternative == 0 && arm_arch6)
4157 return "uxth\t%0, %1";
4158 if (which_alternative == 0)
4161 mem = XEXP (operands[1], 0);
4163 if (GET_CODE (mem) == CONST)
4164 mem = XEXP (mem, 0);
4166 if (GET_CODE (mem) == PLUS)
4168 rtx a = XEXP (mem, 0);
4170 /* This can happen due to bugs in reload. */
4171 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4174 ops[0] = operands[0];
4177 output_asm_insn ("mov\t%0, %1", ops);
4179 XEXP (mem, 0) = operands[0];
4183 return "ldrh\t%0, %1";
4185 [(set_attr_alternative "length"
4186 [(if_then_else (eq_attr "is_arch6" "yes")
4187 (const_int 2) (const_int 4))
4189 (set_attr "type" "alu_shift,load_byte")]
4192 (define_insn "*arm_zero_extendhisi2"
4193 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4194 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4195 "TARGET_ARM && arm_arch4 && !arm_arch6"
4199 [(set_attr "type" "alu_shift,load_byte")
4200 (set_attr "predicable" "yes")]
4203 (define_insn "*arm_zero_extendhisi2_v6"
4204 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4205 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4206 "TARGET_ARM && arm_arch6"
4210 [(set_attr "type" "alu_shift,load_byte")
4211 (set_attr "predicable" "yes")]
4214 (define_insn "*arm_zero_extendhisi2addsi"
4215 [(set (match_operand:SI 0 "s_register_operand" "=r")
4216 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4217 (match_operand:SI 2 "s_register_operand" "r")))]
4219 "uxtah%?\\t%0, %2, %1"
4220 [(set_attr "type" "alu_shift")
4221 (set_attr "predicable" "yes")]
4224 (define_expand "zero_extendqisi2"
4225 [(set (match_operand:SI 0 "s_register_operand" "")
4226 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4229 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4231 emit_insn (gen_andsi3 (operands[0],
4232 gen_lowpart (SImode, operands[1]),
4236 if (!arm_arch6 && !MEM_P (operands[1]))
4238 rtx t = gen_lowpart (SImode, operands[1]);
4239 rtx tmp = gen_reg_rtx (SImode);
4240 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4241 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4247 [(set (match_operand:SI 0 "register_operand" "")
4248 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4250 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4251 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4253 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4256 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4261 (define_insn "*thumb1_zero_extendqisi2"
4262 [(set (match_operand:SI 0 "register_operand" "=l,l")
4263 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4264 "TARGET_THUMB1 && !arm_arch6"
4268 [(set_attr "length" "4,2")
4269 (set_attr "type" "alu_shift,load_byte")
4270 (set_attr "pool_range" "*,32")]
4273 (define_insn "*thumb1_zero_extendqisi2_v6"
4274 [(set (match_operand:SI 0 "register_operand" "=l,l")
4275 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4276 "TARGET_THUMB1 && arm_arch6"
4280 [(set_attr "length" "2")
4281 (set_attr "type" "alu_shift,load_byte")]
4284 (define_insn "*arm_zero_extendqisi2"
4285 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4286 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4287 "TARGET_ARM && !arm_arch6"
4290 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4291 [(set_attr "length" "8,4")
4292 (set_attr "type" "alu_shift,load_byte")
4293 (set_attr "predicable" "yes")]
4296 (define_insn "*arm_zero_extendqisi2_v6"
4297 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4298 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4299 "TARGET_ARM && arm_arch6"
4302 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4303 [(set_attr "type" "alu_shift,load_byte")
4304 (set_attr "predicable" "yes")]
4307 (define_insn "*arm_zero_extendqisi2addsi"
4308 [(set (match_operand:SI 0 "s_register_operand" "=r")
4309 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4310 (match_operand:SI 2 "s_register_operand" "r")))]
4312 "uxtab%?\\t%0, %2, %1"
4313 [(set_attr "predicable" "yes")
4314 (set_attr "insn" "xtab")
4315 (set_attr "type" "alu_shift")]
4319 [(set (match_operand:SI 0 "s_register_operand" "")
4320 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4321 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4322 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4323 [(set (match_dup 2) (match_dup 1))
4324 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4329 [(set (match_operand:SI 0 "s_register_operand" "")
4330 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4331 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4332 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4333 [(set (match_dup 2) (match_dup 1))
4334 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4340 [(set (match_operand:SI 0 "s_register_operand" "")
4341 (ior_xor:SI (and:SI (ashift:SI
4342 (match_operand:SI 1 "s_register_operand" "")
4343 (match_operand:SI 2 "const_int_operand" ""))
4344 (match_operand:SI 3 "const_int_operand" ""))
4346 (match_operator 5 "subreg_lowpart_operator"
4347 [(match_operand:SI 4 "s_register_operand" "")]))))]
4349 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4350 == (GET_MODE_MASK (GET_MODE (operands[5]))
4351 & (GET_MODE_MASK (GET_MODE (operands[5]))
4352 << (INTVAL (operands[2])))))"
4353 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4355 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4356 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4359 (define_insn "*compareqi_eq0"
4360 [(set (reg:CC_Z CC_REGNUM)
4361 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4365 [(set_attr "conds" "set")]
4368 (define_expand "extendhisi2"
4369 [(set (match_operand:SI 0 "s_register_operand" "")
4370 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4375 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4378 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4380 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4384 if (!arm_arch6 && !MEM_P (operands[1]))
4386 rtx t = gen_lowpart (SImode, operands[1]);
4387 rtx tmp = gen_reg_rtx (SImode);
4388 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4389 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4396 [(set (match_operand:SI 0 "register_operand" "")
4397 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4398 (clobber (match_scratch:SI 2 ""))])]
4400 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4401 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4403 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4406 ;; We used to have an early-clobber on the scratch register here.
4407 ;; However, there's a bug somewhere in reload which means that this
4408 ;; can be partially ignored during spill allocation if the memory
4409 ;; address also needs reloading; this causes us to die later on when
4410 ;; we try to verify the operands. Fortunately, we don't really need
4411 ;; the early-clobber: we can always use operand 0 if operand 2
4412 ;; overlaps the address.
4413 (define_insn "thumb1_extendhisi2"
4414 [(set (match_operand:SI 0 "register_operand" "=l,l")
4415 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4416 (clobber (match_scratch:SI 2 "=X,l"))]
4423 if (which_alternative == 0 && !arm_arch6)
4425 if (which_alternative == 0)
4426 return \"sxth\\t%0, %1\";
4428 mem = XEXP (operands[1], 0);
4430 /* This code used to try to use 'V', and fix the address only if it was
4431 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4432 range of QImode offsets, and offsettable_address_p does a QImode
4435 if (GET_CODE (mem) == CONST)
4436 mem = XEXP (mem, 0);
4438 if (GET_CODE (mem) == LABEL_REF)
4439 return \"ldr\\t%0, %1\";
4441 if (GET_CODE (mem) == PLUS)
4443 rtx a = XEXP (mem, 0);
4444 rtx b = XEXP (mem, 1);
4446 if (GET_CODE (a) == LABEL_REF
4447 && GET_CODE (b) == CONST_INT)
4448 return \"ldr\\t%0, %1\";
4450 if (GET_CODE (b) == REG)
4451 return \"ldrsh\\t%0, %1\";
4459 ops[2] = const0_rtx;
4462 gcc_assert (GET_CODE (ops[1]) == REG);
4464 ops[0] = operands[0];
4465 if (reg_mentioned_p (operands[2], ops[1]))
4468 ops[3] = operands[2];
4469 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4472 [(set_attr_alternative "length"
4473 [(if_then_else (eq_attr "is_arch6" "yes")
4474 (const_int 2) (const_int 4))
4476 (set_attr "type" "alu_shift,load_byte")
4477 (set_attr "pool_range" "*,1020")]
4480 ;; This pattern will only be used when ldsh is not available
4481 (define_expand "extendhisi2_mem"
4482 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4484 (zero_extend:SI (match_dup 7)))
4485 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4486 (set (match_operand:SI 0 "" "")
4487 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4492 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4494 mem1 = change_address (operands[1], QImode, addr);
4495 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4496 operands[0] = gen_lowpart (SImode, operands[0]);
4498 operands[2] = gen_reg_rtx (SImode);
4499 operands[3] = gen_reg_rtx (SImode);
4500 operands[6] = gen_reg_rtx (SImode);
4503 if (BYTES_BIG_ENDIAN)
4505 operands[4] = operands[2];
4506 operands[5] = operands[3];
4510 operands[4] = operands[3];
4511 operands[5] = operands[2];
4517 [(set (match_operand:SI 0 "register_operand" "")
4518 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4520 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4521 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4523 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4526 (define_insn "*arm_extendhisi2"
4527 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4528 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4529 "TARGET_ARM && arm_arch4 && !arm_arch6"
4533 [(set_attr "length" "8,4")
4534 (set_attr "type" "alu_shift,load_byte")
4535 (set_attr "predicable" "yes")
4536 (set_attr "pool_range" "*,256")
4537 (set_attr "neg_pool_range" "*,244")]
4540 ;; ??? Check Thumb-2 pool range
4541 (define_insn "*arm_extendhisi2_v6"
4542 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4543 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4544 "TARGET_32BIT && arm_arch6"
4548 [(set_attr "type" "alu_shift,load_byte")
4549 (set_attr "predicable" "yes")
4550 (set_attr "pool_range" "*,256")
4551 (set_attr "neg_pool_range" "*,244")]
4554 (define_insn "*arm_extendhisi2addsi"
4555 [(set (match_operand:SI 0 "s_register_operand" "=r")
4556 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4557 (match_operand:SI 2 "s_register_operand" "r")))]
4559 "sxtah%?\\t%0, %2, %1"
4562 (define_expand "extendqihi2"
4564 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4566 (set (match_operand:HI 0 "s_register_operand" "")
4567 (ashiftrt:SI (match_dup 2)
4572 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4574 emit_insn (gen_rtx_SET (VOIDmode,
4576 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4579 if (!s_register_operand (operands[1], QImode))
4580 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4581 operands[0] = gen_lowpart (SImode, operands[0]);
4582 operands[1] = gen_lowpart (SImode, operands[1]);
4583 operands[2] = gen_reg_rtx (SImode);
4587 (define_insn "*arm_extendqihi_insn"
4588 [(set (match_operand:HI 0 "s_register_operand" "=r")
4589 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4590 "TARGET_ARM && arm_arch4"
4591 "ldr%(sb%)\\t%0, %1"
4592 [(set_attr "type" "load_byte")
4593 (set_attr "predicable" "yes")
4594 (set_attr "pool_range" "256")
4595 (set_attr "neg_pool_range" "244")]
4598 (define_expand "extendqisi2"
4599 [(set (match_operand:SI 0 "s_register_operand" "")
4600 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4603 if (!arm_arch4 && MEM_P (operands[1]))
4604 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4606 if (!arm_arch6 && !MEM_P (operands[1]))
4608 rtx t = gen_lowpart (SImode, operands[1]);
4609 rtx tmp = gen_reg_rtx (SImode);
4610 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4611 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4617 [(set (match_operand:SI 0 "register_operand" "")
4618 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4620 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4621 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4623 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4626 (define_insn "*arm_extendqisi"
4627 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4628 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4629 "TARGET_ARM && arm_arch4 && !arm_arch6"
4633 [(set_attr "length" "8,4")
4634 (set_attr "type" "alu_shift,load_byte")
4635 (set_attr "predicable" "yes")
4636 (set_attr "pool_range" "*,256")
4637 (set_attr "neg_pool_range" "*,244")]
4640 (define_insn "*arm_extendqisi_v6"
4641 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4643 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4644 "TARGET_ARM && arm_arch6"
4648 [(set_attr "type" "alu_shift,load_byte")
4649 (set_attr "predicable" "yes")
4650 (set_attr "pool_range" "*,256")
4651 (set_attr "neg_pool_range" "*,244")]
4654 (define_insn "*arm_extendqisi2addsi"
4655 [(set (match_operand:SI 0 "s_register_operand" "=r")
4656 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4657 (match_operand:SI 2 "s_register_operand" "r")))]
4659 "sxtab%?\\t%0, %2, %1"
4660 [(set_attr "type" "alu_shift")
4661 (set_attr "insn" "xtab")
4662 (set_attr "predicable" "yes")]
4666 [(set (match_operand:SI 0 "register_operand" "")
4667 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4668 "TARGET_THUMB1 && reload_completed"
4669 [(set (match_dup 0) (match_dup 2))
4670 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4672 rtx addr = XEXP (operands[1], 0);
4674 if (GET_CODE (addr) == CONST)
4675 addr = XEXP (addr, 0);
4677 if (GET_CODE (addr) == PLUS
4678 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4679 /* No split necessary. */
4682 if (GET_CODE (addr) == PLUS
4683 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4686 if (reg_overlap_mentioned_p (operands[0], addr))
4688 rtx t = gen_lowpart (QImode, operands[0]);
4689 emit_move_insn (t, operands[1]);
4690 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4696 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4697 operands[2] = const0_rtx;
4699 else if (GET_CODE (addr) != PLUS)
4701 else if (REG_P (XEXP (addr, 0)))
4703 operands[2] = XEXP (addr, 1);
4704 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4708 operands[2] = XEXP (addr, 0);
4709 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4712 operands[3] = change_address (operands[1], QImode, addr);
4716 [(set (match_operand:SI 0 "register_operand" "")
4717 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4718 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4719 (set (match_operand:SI 3 "register_operand" "")
4720 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4722 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4723 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4724 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4725 && (peep2_reg_dead_p (3, operands[0])
4726 || rtx_equal_p (operands[0], operands[3]))
4727 && (peep2_reg_dead_p (3, operands[2])
4728 || rtx_equal_p (operands[2], operands[3]))"
4729 [(set (match_dup 2) (match_dup 1))
4730 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4732 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4733 operands[4] = change_address (operands[4], QImode, addr);
4736 (define_insn "thumb1_extendqisi2"
4737 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4738 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4743 if (which_alternative == 0 && arm_arch6)
4744 return "sxtb\\t%0, %1";
4745 if (which_alternative == 0)
4748 addr = XEXP (operands[1], 0);
4749 if (GET_CODE (addr) == PLUS
4750 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4751 return "ldrsb\\t%0, %1";
4755 [(set_attr_alternative "length"
4756 [(if_then_else (eq_attr "is_arch6" "yes")
4757 (const_int 2) (const_int 4))
4759 (if_then_else (eq_attr "is_arch6" "yes")
4760 (const_int 4) (const_int 6))])
4761 (set_attr "type" "alu_shift,load_byte,load_byte")]
4764 (define_expand "extendsfdf2"
4765 [(set (match_operand:DF 0 "s_register_operand" "")
4766 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4767 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4771 /* HFmode -> DFmode conversions have to go through SFmode. */
4772 (define_expand "extendhfdf2"
4773 [(set (match_operand:DF 0 "general_operand" "")
4774 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4779 op1 = convert_to_mode (SFmode, operands[1], 0);
4780 op1 = convert_to_mode (DFmode, op1, 0);
4781 emit_insn (gen_movdf (operands[0], op1));
4786 ;; Move insns (including loads and stores)
4788 ;; XXX Just some ideas about movti.
4789 ;; I don't think these are a good idea on the arm, there just aren't enough
4791 ;;(define_expand "loadti"
4792 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4793 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4796 ;;(define_expand "storeti"
4797 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4798 ;; (match_operand:TI 1 "s_register_operand" ""))]
4801 ;;(define_expand "movti"
4802 ;; [(set (match_operand:TI 0 "general_operand" "")
4803 ;; (match_operand:TI 1 "general_operand" ""))]
4809 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4810 ;; operands[1] = copy_to_reg (operands[1]);
4811 ;; if (GET_CODE (operands[0]) == MEM)
4812 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4813 ;; else if (GET_CODE (operands[1]) == MEM)
4814 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4818 ;; emit_insn (insn);
4822 ;; Recognize garbage generated above.
4825 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4826 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4830 ;; register mem = (which_alternative < 3);
4831 ;; register const char *template;
4833 ;; operands[mem] = XEXP (operands[mem], 0);
4834 ;; switch (which_alternative)
4836 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4837 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4838 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4839 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4840 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4841 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4843 ;; output_asm_insn (template, operands);
4847 (define_expand "movdi"
4848 [(set (match_operand:DI 0 "general_operand" "")
4849 (match_operand:DI 1 "general_operand" ""))]
4852 if (can_create_pseudo_p ())
4854 if (GET_CODE (operands[0]) != REG)
4855 operands[1] = force_reg (DImode, operands[1]);
4860 (define_insn "*arm_movdi"
4861 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4862 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4864 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4866 && ( register_operand (operands[0], DImode)
4867 || register_operand (operands[1], DImode))"
4869 switch (which_alternative)
4876 return output_move_double (operands);
4879 [(set_attr "length" "8,12,16,8,8")
4880 (set_attr "type" "*,*,*,load2,store2")
4881 (set_attr "arm_pool_range" "*,*,*,1020,*")
4882 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4883 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4884 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4888 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4889 (match_operand:ANY64 1 "const_double_operand" ""))]
4892 && (arm_const_double_inline_cost (operands[1])
4893 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4896 arm_split_constant (SET, SImode, curr_insn,
4897 INTVAL (gen_lowpart (SImode, operands[1])),
4898 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4899 arm_split_constant (SET, SImode, curr_insn,
4900 INTVAL (gen_highpart_mode (SImode,
4901 GET_MODE (operands[0]),
4903 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4908 ; If optimizing for size, or if we have load delay slots, then
4909 ; we want to split the constant into two separate operations.
4910 ; In both cases this may split a trivial part into a single data op
4911 ; leaving a single complex constant to load. We can also get longer
4912 ; offsets in a LDR which means we get better chances of sharing the pool
4913 ; entries. Finally, we can normally do a better job of scheduling
4914 ; LDR instructions than we can with LDM.
4915 ; This pattern will only match if the one above did not.
4917 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4918 (match_operand:ANY64 1 "const_double_operand" ""))]
4919 "TARGET_ARM && reload_completed
4920 && arm_const_double_by_parts (operands[1])"
4921 [(set (match_dup 0) (match_dup 1))
4922 (set (match_dup 2) (match_dup 3))]
4924 operands[2] = gen_highpart (SImode, operands[0]);
4925 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4927 operands[0] = gen_lowpart (SImode, operands[0]);
4928 operands[1] = gen_lowpart (SImode, operands[1]);
4933 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4934 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4935 "TARGET_EITHER && reload_completed"
4936 [(set (match_dup 0) (match_dup 1))
4937 (set (match_dup 2) (match_dup 3))]
4939 operands[2] = gen_highpart (SImode, operands[0]);
4940 operands[3] = gen_highpart (SImode, operands[1]);
4941 operands[0] = gen_lowpart (SImode, operands[0]);
4942 operands[1] = gen_lowpart (SImode, operands[1]);
4944 /* Handle a partial overlap. */
4945 if (rtx_equal_p (operands[0], operands[3]))
4947 rtx tmp0 = operands[0];
4948 rtx tmp1 = operands[1];
4950 operands[0] = operands[2];
4951 operands[1] = operands[3];
4958 ;; We can't actually do base+index doubleword loads if the index and
4959 ;; destination overlap. Split here so that we at least have chance to
4962 [(set (match_operand:DI 0 "s_register_operand" "")
4963 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4964 (match_operand:SI 2 "s_register_operand" ""))))]
4966 && reg_overlap_mentioned_p (operands[0], operands[1])
4967 && reg_overlap_mentioned_p (operands[0], operands[2])"
4969 (plus:SI (match_dup 1)
4972 (mem:DI (match_dup 4)))]
4974 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4978 ;;; ??? This should have alternatives for constants.
4979 ;;; ??? This was originally identical to the movdf_insn pattern.
4980 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4981 ;;; thumb_reorg with a memory reference.
4982 (define_insn "*thumb1_movdi_insn"
4983 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4984 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4986 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4987 && ( register_operand (operands[0], DImode)
4988 || register_operand (operands[1], DImode))"
4991 switch (which_alternative)
4995 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4996 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4997 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4999 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5001 operands[1] = GEN_INT (- INTVAL (operands[1]));
5002 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5004 return \"ldmia\\t%1, {%0, %H0}\";
5006 return \"stmia\\t%0, {%1, %H1}\";
5008 return thumb_load_double_from_address (operands);
5010 operands[2] = gen_rtx_MEM (SImode,
5011 plus_constant (XEXP (operands[0], 0), 4));
5012 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5015 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5016 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5017 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5020 [(set_attr "length" "4,4,6,2,2,6,4,4")
5021 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5022 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5023 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5026 (define_expand "movsi"
5027 [(set (match_operand:SI 0 "general_operand" "")
5028 (match_operand:SI 1 "general_operand" ""))]
5032 rtx base, offset, tmp;
5036 /* Everything except mem = const or mem = mem can be done easily. */
5037 if (GET_CODE (operands[0]) == MEM)
5038 operands[1] = force_reg (SImode, operands[1]);
5039 if (arm_general_register_operand (operands[0], SImode)
5040 && GET_CODE (operands[1]) == CONST_INT
5041 && !(const_ok_for_arm (INTVAL (operands[1]))
5042 || const_ok_for_arm (~INTVAL (operands[1]))))
5044 arm_split_constant (SET, SImode, NULL_RTX,
5045 INTVAL (operands[1]), operands[0], NULL_RTX,
5046 optimize && can_create_pseudo_p ());
5050 if (TARGET_USE_MOVT && !target_word_relocations
5051 && GET_CODE (operands[1]) == SYMBOL_REF
5052 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5054 arm_emit_movpair (operands[0], operands[1]);
5058 else /* TARGET_THUMB1... */
5060 if (can_create_pseudo_p ())
5062 if (GET_CODE (operands[0]) != REG)
5063 operands[1] = force_reg (SImode, operands[1]);
5067 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5069 split_const (operands[1], &base, &offset);
5070 if (GET_CODE (base) == SYMBOL_REF
5071 && !offset_within_block_p (base, INTVAL (offset)))
5073 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5074 emit_move_insn (tmp, base);
5075 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5080 /* Recognize the case where operand[1] is a reference to thread-local
5081 data and load its address to a register. */
5082 if (arm_tls_referenced_p (operands[1]))
5084 rtx tmp = operands[1];
5087 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5089 addend = XEXP (XEXP (tmp, 0), 1);
5090 tmp = XEXP (XEXP (tmp, 0), 0);
5093 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5094 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5096 tmp = legitimize_tls_address (tmp,
5097 !can_create_pseudo_p () ? operands[0] : 0);
5100 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5101 tmp = force_operand (tmp, operands[0]);
5106 && (CONSTANT_P (operands[1])
5107 || symbol_mentioned_p (operands[1])
5108 || label_mentioned_p (operands[1])))
5109 operands[1] = legitimize_pic_address (operands[1], SImode,
5110 (!can_create_pseudo_p ()
5117 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5118 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5119 ;; so this does not matter.
5120 (define_insn "*arm_movt"
5121 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5122 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5123 (match_operand:SI 2 "general_operand" "i")))]
5125 "movt%?\t%0, #:upper16:%c2"
5126 [(set_attr "predicable" "yes")
5127 (set_attr "length" "4")]
5130 (define_insn "*arm_movsi_insn"
5131 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5132 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5133 "TARGET_ARM && ! TARGET_IWMMXT
5134 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5135 && ( register_operand (operands[0], SImode)
5136 || register_operand (operands[1], SImode))"
5144 [(set_attr "type" "*,*,*,*,load1,store1")
5145 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5146 (set_attr "predicable" "yes")
5147 (set_attr "pool_range" "*,*,*,*,4096,*")
5148 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5152 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5153 (match_operand:SI 1 "const_int_operand" ""))]
5155 && (!(const_ok_for_arm (INTVAL (operands[1]))
5156 || const_ok_for_arm (~INTVAL (operands[1]))))"
5157 [(clobber (const_int 0))]
5159 arm_split_constant (SET, SImode, NULL_RTX,
5160 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5165 (define_insn "*thumb1_movsi_insn"
5166 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5167 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5169 && ( register_operand (operands[0], SImode)
5170 || register_operand (operands[1], SImode))"
5181 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5182 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5183 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5184 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5187 [(set (match_operand:SI 0 "register_operand" "")
5188 (match_operand:SI 1 "const_int_operand" ""))]
5189 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5190 [(set (match_dup 2) (match_dup 1))
5191 (set (match_dup 0) (neg:SI (match_dup 2)))]
5194 operands[1] = GEN_INT (- INTVAL (operands[1]));
5195 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5200 [(set (match_operand:SI 0 "register_operand" "")
5201 (match_operand:SI 1 "const_int_operand" ""))]
5202 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5203 [(set (match_dup 2) (match_dup 1))
5204 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5207 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5208 unsigned HOST_WIDE_INT mask = 0xff;
5211 for (i = 0; i < 25; i++)
5212 if ((val & (mask << i)) == val)
5215 /* Don't split if the shift is zero. */
5219 operands[1] = GEN_INT (val >> i);
5220 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5221 operands[3] = GEN_INT (i);
5225 ;; When generating pic, we need to load the symbol offset into a register.
5226 ;; So that the optimizer does not confuse this with a normal symbol load
5227 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5228 ;; since that is the only type of relocation we can use.
5230 ;; Wrap calculation of the whole PIC address in a single pattern for the
5231 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5232 ;; a PIC address involves two loads from memory, so we want to CSE it
5233 ;; as often as possible.
5234 ;; This pattern will be split into one of the pic_load_addr_* patterns
5235 ;; and a move after GCSE optimizations.
5237 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5238 (define_expand "calculate_pic_address"
5239 [(set (match_operand:SI 0 "register_operand" "")
5240 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5241 (unspec:SI [(match_operand:SI 2 "" "")]
5246 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5248 [(set (match_operand:SI 0 "register_operand" "")
5249 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5250 (unspec:SI [(match_operand:SI 2 "" "")]
5253 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5254 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5255 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5258 ;; The rather odd constraints on the following are to force reload to leave
5259 ;; the insn alone, and to force the minipool generation pass to then move
5260 ;; the GOT symbol to memory.
5262 (define_insn "pic_load_addr_32bit"
5263 [(set (match_operand:SI 0 "s_register_operand" "=r")
5264 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5265 "TARGET_32BIT && flag_pic"
5267 [(set_attr "type" "load1")
5268 (set_attr "pool_range" "4096")
5269 (set (attr "neg_pool_range")
5270 (if_then_else (eq_attr "is_thumb" "no")
5275 (define_insn "pic_load_addr_thumb1"
5276 [(set (match_operand:SI 0 "s_register_operand" "=l")
5277 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5278 "TARGET_THUMB1 && flag_pic"
5280 [(set_attr "type" "load1")
5281 (set (attr "pool_range") (const_int 1024))]
5284 (define_insn "pic_add_dot_plus_four"
5285 [(set (match_operand:SI 0 "register_operand" "=r")
5286 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5288 (match_operand 2 "" "")]
5292 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5293 INTVAL (operands[2]));
5294 return \"add\\t%0, %|pc\";
5296 [(set_attr "length" "2")]
5299 (define_insn "pic_add_dot_plus_eight"
5300 [(set (match_operand:SI 0 "register_operand" "=r")
5301 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5303 (match_operand 2 "" "")]
5307 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5308 INTVAL (operands[2]));
5309 return \"add%?\\t%0, %|pc, %1\";
5311 [(set_attr "predicable" "yes")]
5314 (define_insn "tls_load_dot_plus_eight"
5315 [(set (match_operand:SI 0 "register_operand" "=r")
5316 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5318 (match_operand 2 "" "")]
5322 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5323 INTVAL (operands[2]));
5324 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5326 [(set_attr "predicable" "yes")]
5329 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5330 ;; followed by a load. These sequences can be crunched down to
5331 ;; tls_load_dot_plus_eight by a peephole.
5334 [(set (match_operand:SI 0 "register_operand" "")
5335 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5337 (match_operand 1 "" "")]
5339 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5340 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5342 (mem:SI (unspec:SI [(match_dup 3)
5349 (define_insn "pic_offset_arm"
5350 [(set (match_operand:SI 0 "register_operand" "=r")
5351 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5352 (unspec:SI [(match_operand:SI 2 "" "X")]
5353 UNSPEC_PIC_OFFSET))))]
5354 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5355 "ldr%?\\t%0, [%1,%2]"
5356 [(set_attr "type" "load1")]
5359 (define_expand "builtin_setjmp_receiver"
5360 [(label_ref (match_operand 0 "" ""))]
5364 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5366 if (arm_pic_register != INVALID_REGNUM)
5367 arm_load_pic_register (1UL << 3);
5371 ;; If copying one reg to another we can set the condition codes according to
5372 ;; its value. Such a move is common after a return from subroutine and the
5373 ;; result is being tested against zero.
5375 (define_insn "*movsi_compare0"
5376 [(set (reg:CC CC_REGNUM)
5377 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5379 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5385 [(set_attr "conds" "set")]
5388 ;; Subroutine to store a half word from a register into memory.
5389 ;; Operand 0 is the source register (HImode)
5390 ;; Operand 1 is the destination address in a register (SImode)
5392 ;; In both this routine and the next, we must be careful not to spill
5393 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5394 ;; can generate unrecognizable rtl.
5396 (define_expand "storehi"
5397 [;; store the low byte
5398 (set (match_operand 1 "" "") (match_dup 3))
5399 ;; extract the high byte
5401 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5402 ;; store the high byte
5403 (set (match_dup 4) (match_dup 5))]
5407 rtx op1 = operands[1];
5408 rtx addr = XEXP (op1, 0);
5409 enum rtx_code code = GET_CODE (addr);
5411 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5413 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5415 operands[4] = adjust_address (op1, QImode, 1);
5416 operands[1] = adjust_address (operands[1], QImode, 0);
5417 operands[3] = gen_lowpart (QImode, operands[0]);
5418 operands[0] = gen_lowpart (SImode, operands[0]);
5419 operands[2] = gen_reg_rtx (SImode);
5420 operands[5] = gen_lowpart (QImode, operands[2]);
5424 (define_expand "storehi_bigend"
5425 [(set (match_dup 4) (match_dup 3))
5427 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5428 (set (match_operand 1 "" "") (match_dup 5))]
5432 rtx op1 = operands[1];
5433 rtx addr = XEXP (op1, 0);
5434 enum rtx_code code = GET_CODE (addr);
5436 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5438 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5440 operands[4] = adjust_address (op1, QImode, 1);
5441 operands[1] = adjust_address (operands[1], QImode, 0);
5442 operands[3] = gen_lowpart (QImode, operands[0]);
5443 operands[0] = gen_lowpart (SImode, operands[0]);
5444 operands[2] = gen_reg_rtx (SImode);
5445 operands[5] = gen_lowpart (QImode, operands[2]);
5449 ;; Subroutine to store a half word integer constant into memory.
5450 (define_expand "storeinthi"
5451 [(set (match_operand 0 "" "")
5452 (match_operand 1 "" ""))
5453 (set (match_dup 3) (match_dup 2))]
5457 HOST_WIDE_INT value = INTVAL (operands[1]);
5458 rtx addr = XEXP (operands[0], 0);
5459 rtx op0 = operands[0];
5460 enum rtx_code code = GET_CODE (addr);
5462 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5464 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5466 operands[1] = gen_reg_rtx (SImode);
5467 if (BYTES_BIG_ENDIAN)
5469 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5470 if ((value & 255) == ((value >> 8) & 255))
5471 operands[2] = operands[1];
5474 operands[2] = gen_reg_rtx (SImode);
5475 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5480 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5481 if ((value & 255) == ((value >> 8) & 255))
5482 operands[2] = operands[1];
5485 operands[2] = gen_reg_rtx (SImode);
5486 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5490 operands[3] = adjust_address (op0, QImode, 1);
5491 operands[0] = adjust_address (operands[0], QImode, 0);
5492 operands[2] = gen_lowpart (QImode, operands[2]);
5493 operands[1] = gen_lowpart (QImode, operands[1]);
5497 (define_expand "storehi_single_op"
5498 [(set (match_operand:HI 0 "memory_operand" "")
5499 (match_operand:HI 1 "general_operand" ""))]
5500 "TARGET_32BIT && arm_arch4"
5502 if (!s_register_operand (operands[1], HImode))
5503 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5507 (define_expand "movhi"
5508 [(set (match_operand:HI 0 "general_operand" "")
5509 (match_operand:HI 1 "general_operand" ""))]
5514 if (can_create_pseudo_p ())
5516 if (GET_CODE (operands[0]) == MEM)
5520 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5523 if (GET_CODE (operands[1]) == CONST_INT)
5524 emit_insn (gen_storeinthi (operands[0], operands[1]));
5527 if (GET_CODE (operands[1]) == MEM)
5528 operands[1] = force_reg (HImode, operands[1]);
5529 if (BYTES_BIG_ENDIAN)
5530 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5532 emit_insn (gen_storehi (operands[1], operands[0]));
5536 /* Sign extend a constant, and keep it in an SImode reg. */
5537 else if (GET_CODE (operands[1]) == CONST_INT)
5539 rtx reg = gen_reg_rtx (SImode);
5540 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5542 /* If the constant is already valid, leave it alone. */
5543 if (!const_ok_for_arm (val))
5545 /* If setting all the top bits will make the constant
5546 loadable in a single instruction, then set them.
5547 Otherwise, sign extend the number. */
5549 if (const_ok_for_arm (~(val | ~0xffff)))
5551 else if (val & 0x8000)
5555 emit_insn (gen_movsi (reg, GEN_INT (val)));
5556 operands[1] = gen_lowpart (HImode, reg);
5558 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5559 && GET_CODE (operands[1]) == MEM)
5561 rtx reg = gen_reg_rtx (SImode);
5563 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5564 operands[1] = gen_lowpart (HImode, reg);
5566 else if (!arm_arch4)
5568 if (GET_CODE (operands[1]) == MEM)
5571 rtx offset = const0_rtx;
5572 rtx reg = gen_reg_rtx (SImode);
5574 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5575 || (GET_CODE (base) == PLUS
5576 && (GET_CODE (offset = XEXP (base, 1))
5578 && ((INTVAL(offset) & 1) != 1)
5579 && GET_CODE (base = XEXP (base, 0)) == REG))
5580 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5584 new_rtx = widen_memory_access (operands[1], SImode,
5585 ((INTVAL (offset) & ~3)
5586 - INTVAL (offset)));
5587 emit_insn (gen_movsi (reg, new_rtx));
5588 if (((INTVAL (offset) & 2) != 0)
5589 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5591 rtx reg2 = gen_reg_rtx (SImode);
5593 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5598 emit_insn (gen_movhi_bytes (reg, operands[1]));
5600 operands[1] = gen_lowpart (HImode, reg);
5604 /* Handle loading a large integer during reload. */
5605 else if (GET_CODE (operands[1]) == CONST_INT
5606 && !const_ok_for_arm (INTVAL (operands[1]))
5607 && !const_ok_for_arm (~INTVAL (operands[1])))
5609 /* Writing a constant to memory needs a scratch, which should
5610 be handled with SECONDARY_RELOADs. */
5611 gcc_assert (GET_CODE (operands[0]) == REG);
5613 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5614 emit_insn (gen_movsi (operands[0], operands[1]));
5618 else if (TARGET_THUMB2)
5620 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5621 if (can_create_pseudo_p ())
5623 if (GET_CODE (operands[0]) != REG)
5624 operands[1] = force_reg (HImode, operands[1]);
5625 /* Zero extend a constant, and keep it in an SImode reg. */
5626 else if (GET_CODE (operands[1]) == CONST_INT)
5628 rtx reg = gen_reg_rtx (SImode);
5629 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5631 emit_insn (gen_movsi (reg, GEN_INT (val)));
5632 operands[1] = gen_lowpart (HImode, reg);
5636 else /* TARGET_THUMB1 */
5638 if (can_create_pseudo_p ())
5640 if (GET_CODE (operands[1]) == CONST_INT)
5642 rtx reg = gen_reg_rtx (SImode);
5644 emit_insn (gen_movsi (reg, operands[1]));
5645 operands[1] = gen_lowpart (HImode, reg);
5648 /* ??? We shouldn't really get invalid addresses here, but this can
5649 happen if we are passed a SP (never OK for HImode/QImode) or
5650 virtual register (also rejected as illegitimate for HImode/QImode)
5651 relative address. */
5652 /* ??? This should perhaps be fixed elsewhere, for instance, in
5653 fixup_stack_1, by checking for other kinds of invalid addresses,
5654 e.g. a bare reference to a virtual register. This may confuse the
5655 alpha though, which must handle this case differently. */
5656 if (GET_CODE (operands[0]) == MEM
5657 && !memory_address_p (GET_MODE (operands[0]),
5658 XEXP (operands[0], 0)))
5660 = replace_equiv_address (operands[0],
5661 copy_to_reg (XEXP (operands[0], 0)));
5663 if (GET_CODE (operands[1]) == MEM
5664 && !memory_address_p (GET_MODE (operands[1]),
5665 XEXP (operands[1], 0)))
5667 = replace_equiv_address (operands[1],
5668 copy_to_reg (XEXP (operands[1], 0)));
5670 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5672 rtx reg = gen_reg_rtx (SImode);
5674 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5675 operands[1] = gen_lowpart (HImode, reg);
5678 if (GET_CODE (operands[0]) == MEM)
5679 operands[1] = force_reg (HImode, operands[1]);
5681 else if (GET_CODE (operands[1]) == CONST_INT
5682 && !satisfies_constraint_I (operands[1]))
5684 /* Handle loading a large integer during reload. */
5686 /* Writing a constant to memory needs a scratch, which should
5687 be handled with SECONDARY_RELOADs. */
5688 gcc_assert (GET_CODE (operands[0]) == REG);
5690 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5691 emit_insn (gen_movsi (operands[0], operands[1]));
5698 (define_insn "*thumb1_movhi_insn"
5699 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5700 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5702 && ( register_operand (operands[0], HImode)
5703 || register_operand (operands[1], HImode))"
5705 switch (which_alternative)
5707 case 0: return \"add %0, %1, #0\";
5708 case 2: return \"strh %1, %0\";
5709 case 3: return \"mov %0, %1\";
5710 case 4: return \"mov %0, %1\";
5711 case 5: return \"mov %0, %1\";
5712 default: gcc_unreachable ();
5714 /* The stack pointer can end up being taken as an index register.
5715 Catch this case here and deal with it. */
5716 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5717 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5718 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5721 ops[0] = operands[0];
5722 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5724 output_asm_insn (\"mov %0, %1\", ops);
5726 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5729 return \"ldrh %0, %1\";
5731 [(set_attr "length" "2,4,2,2,2,2")
5732 (set_attr "type" "*,load1,store1,*,*,*")
5733 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5736 (define_expand "movhi_bytes"
5737 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5739 (zero_extend:SI (match_dup 6)))
5740 (set (match_operand:SI 0 "" "")
5741 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5746 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5748 mem1 = change_address (operands[1], QImode, addr);
5749 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5750 operands[0] = gen_lowpart (SImode, operands[0]);
5752 operands[2] = gen_reg_rtx (SImode);
5753 operands[3] = gen_reg_rtx (SImode);
5756 if (BYTES_BIG_ENDIAN)
5758 operands[4] = operands[2];
5759 operands[5] = operands[3];
5763 operands[4] = operands[3];
5764 operands[5] = operands[2];
5769 (define_expand "movhi_bigend"
5771 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5774 (ashiftrt:SI (match_dup 2) (const_int 16)))
5775 (set (match_operand:HI 0 "s_register_operand" "")
5779 operands[2] = gen_reg_rtx (SImode);
5780 operands[3] = gen_reg_rtx (SImode);
5781 operands[4] = gen_lowpart (HImode, operands[3]);
5785 ;; Pattern to recognize insn generated default case above
5786 (define_insn "*movhi_insn_arch4"
5787 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5788 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5791 && (GET_CODE (operands[1]) != CONST_INT
5792 || const_ok_for_arm (INTVAL (operands[1]))
5793 || const_ok_for_arm (~INTVAL (operands[1])))"
5795 mov%?\\t%0, %1\\t%@ movhi
5796 mvn%?\\t%0, #%B1\\t%@ movhi
5797 str%(h%)\\t%1, %0\\t%@ movhi
5798 ldr%(h%)\\t%0, %1\\t%@ movhi"
5799 [(set_attr "type" "*,*,store1,load1")
5800 (set_attr "predicable" "yes")
5801 (set_attr "insn" "mov,mvn,*,*")
5802 (set_attr "pool_range" "*,*,*,256")
5803 (set_attr "neg_pool_range" "*,*,*,244")]
5806 (define_insn "*movhi_bytes"
5807 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5808 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5811 mov%?\\t%0, %1\\t%@ movhi
5812 mvn%?\\t%0, #%B1\\t%@ movhi"
5813 [(set_attr "predicable" "yes")
5814 (set_attr "insn" "mov,mvn")]
5817 (define_expand "thumb_movhi_clobber"
5818 [(set (match_operand:HI 0 "memory_operand" "")
5819 (match_operand:HI 1 "register_operand" ""))
5820 (clobber (match_operand:DI 2 "register_operand" ""))]
5823 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5824 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5826 emit_insn (gen_movhi (operands[0], operands[1]));
5829 /* XXX Fixme, need to handle other cases here as well. */
5834 ;; We use a DImode scratch because we may occasionally need an additional
5835 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5836 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5837 (define_expand "reload_outhi"
5838 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5839 (match_operand:HI 1 "s_register_operand" "r")
5840 (match_operand:DI 2 "s_register_operand" "=&l")])]
5843 arm_reload_out_hi (operands);
5845 thumb_reload_out_hi (operands);
5850 (define_expand "reload_inhi"
5851 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5852 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5853 (match_operand:DI 2 "s_register_operand" "=&r")])]
5857 arm_reload_in_hi (operands);
5859 thumb_reload_out_hi (operands);
5863 (define_expand "movqi"
5864 [(set (match_operand:QI 0 "general_operand" "")
5865 (match_operand:QI 1 "general_operand" ""))]
5868 /* Everything except mem = const or mem = mem can be done easily */
5870 if (can_create_pseudo_p ())
5872 if (GET_CODE (operands[1]) == CONST_INT)
5874 rtx reg = gen_reg_rtx (SImode);
5876 /* For thumb we want an unsigned immediate, then we are more likely
5877 to be able to use a movs insn. */
5879 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5881 emit_insn (gen_movsi (reg, operands[1]));
5882 operands[1] = gen_lowpart (QImode, reg);
5887 /* ??? We shouldn't really get invalid addresses here, but this can
5888 happen if we are passed a SP (never OK for HImode/QImode) or
5889 virtual register (also rejected as illegitimate for HImode/QImode)
5890 relative address. */
5891 /* ??? This should perhaps be fixed elsewhere, for instance, in
5892 fixup_stack_1, by checking for other kinds of invalid addresses,
5893 e.g. a bare reference to a virtual register. This may confuse the
5894 alpha though, which must handle this case differently. */
5895 if (GET_CODE (operands[0]) == MEM
5896 && !memory_address_p (GET_MODE (operands[0]),
5897 XEXP (operands[0], 0)))
5899 = replace_equiv_address (operands[0],
5900 copy_to_reg (XEXP (operands[0], 0)));
5901 if (GET_CODE (operands[1]) == MEM
5902 && !memory_address_p (GET_MODE (operands[1]),
5903 XEXP (operands[1], 0)))
5905 = replace_equiv_address (operands[1],
5906 copy_to_reg (XEXP (operands[1], 0)));
5909 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5911 rtx reg = gen_reg_rtx (SImode);
5913 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5914 operands[1] = gen_lowpart (QImode, reg);
5917 if (GET_CODE (operands[0]) == MEM)
5918 operands[1] = force_reg (QImode, operands[1]);
5920 else if (TARGET_THUMB
5921 && GET_CODE (operands[1]) == CONST_INT
5922 && !satisfies_constraint_I (operands[1]))
5924 /* Handle loading a large integer during reload. */
5926 /* Writing a constant to memory needs a scratch, which should
5927 be handled with SECONDARY_RELOADs. */
5928 gcc_assert (GET_CODE (operands[0]) == REG);
5930 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5931 emit_insn (gen_movsi (operands[0], operands[1]));
5938 (define_insn "*arm_movqi_insn"
5939 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5940 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5942 && ( register_operand (operands[0], QImode)
5943 || register_operand (operands[1], QImode))"
5949 [(set_attr "type" "*,*,load1,store1")
5950 (set_attr "insn" "mov,mvn,*,*")
5951 (set_attr "predicable" "yes")]
5954 (define_insn "*thumb1_movqi_insn"
5955 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5956 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5958 && ( register_operand (operands[0], QImode)
5959 || register_operand (operands[1], QImode))"
5967 [(set_attr "length" "2")
5968 (set_attr "type" "*,load1,store1,*,*,*")
5969 (set_attr "insn" "*,*,*,mov,mov,mov")
5970 (set_attr "pool_range" "*,32,*,*,*,*")
5971 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5974 (define_expand "movhf"
5975 [(set (match_operand:HF 0 "general_operand" "")
5976 (match_operand:HF 1 "general_operand" ""))]
5981 if (GET_CODE (operands[0]) == MEM)
5982 operands[1] = force_reg (HFmode, operands[1]);
5984 else /* TARGET_THUMB1 */
5986 if (can_create_pseudo_p ())
5988 if (GET_CODE (operands[0]) != REG)
5989 operands[1] = force_reg (HFmode, operands[1]);
5995 (define_insn "*arm32_movhf"
5996 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5997 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5998 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5999 && ( s_register_operand (operands[0], HFmode)
6000 || s_register_operand (operands[1], HFmode))"
6002 switch (which_alternative)
6004 case 0: /* ARM register from memory */
6005 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6006 case 1: /* memory from ARM register */
6007 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6008 case 2: /* ARM register from ARM register */
6009 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6010 case 3: /* ARM register from constant */
6016 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6017 bits = real_to_target (NULL, &r, HFmode);
6018 ops[0] = operands[0];
6019 ops[1] = GEN_INT (bits);
6020 ops[2] = GEN_INT (bits & 0xff00);
6021 ops[3] = GEN_INT (bits & 0x00ff);
6023 if (arm_arch_thumb2)
6024 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6026 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6033 [(set_attr "conds" "unconditional")
6034 (set_attr "type" "load1,store1,*,*")
6035 (set_attr "insn" "*,*,mov,mov")
6036 (set_attr "length" "4,4,4,8")
6037 (set_attr "predicable" "yes")]
6040 (define_insn "*thumb1_movhf"
6041 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6042 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6044 && ( s_register_operand (operands[0], HFmode)
6045 || s_register_operand (operands[1], HFmode))"
6047 switch (which_alternative)
6052 gcc_assert (GET_CODE(operands[1]) == MEM);
6053 addr = XEXP (operands[1], 0);
6054 if (GET_CODE (addr) == LABEL_REF
6055 || (GET_CODE (addr) == CONST
6056 && GET_CODE (XEXP (addr, 0)) == PLUS
6057 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6058 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6060 /* Constant pool entry. */
6061 return \"ldr\\t%0, %1\";
6063 return \"ldrh\\t%0, %1\";
6065 case 2: return \"strh\\t%1, %0\";
6066 default: return \"mov\\t%0, %1\";
6069 [(set_attr "length" "2")
6070 (set_attr "type" "*,load1,store1,*,*")
6071 (set_attr "insn" "mov,*,*,mov,mov")
6072 (set_attr "pool_range" "*,1020,*,*,*")
6073 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6075 (define_expand "movsf"
6076 [(set (match_operand:SF 0 "general_operand" "")
6077 (match_operand:SF 1 "general_operand" ""))]
6082 if (GET_CODE (operands[0]) == MEM)
6083 operands[1] = force_reg (SFmode, operands[1]);
6085 else /* TARGET_THUMB1 */
6087 if (can_create_pseudo_p ())
6089 if (GET_CODE (operands[0]) != REG)
6090 operands[1] = force_reg (SFmode, operands[1]);
6096 ;; Transform a floating-point move of a constant into a core register into
6097 ;; an SImode operation.
6099 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6100 (match_operand:SF 1 "immediate_operand" ""))]
6103 && GET_CODE (operands[1]) == CONST_DOUBLE"
6104 [(set (match_dup 2) (match_dup 3))]
6106 operands[2] = gen_lowpart (SImode, operands[0]);
6107 operands[3] = gen_lowpart (SImode, operands[1]);
6108 if (operands[2] == 0 || operands[3] == 0)
6113 (define_insn "*arm_movsf_soft_insn"
6114 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6115 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6117 && TARGET_SOFT_FLOAT
6118 && (GET_CODE (operands[0]) != MEM
6119 || register_operand (operands[1], SFmode))"
6122 ldr%?\\t%0, %1\\t%@ float
6123 str%?\\t%1, %0\\t%@ float"
6124 [(set_attr "predicable" "yes")
6125 (set_attr "type" "*,load1,store1")
6126 (set_attr "insn" "mov,*,*")
6127 (set_attr "pool_range" "*,4096,*")
6128 (set_attr "arm_neg_pool_range" "*,4084,*")
6129 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6132 ;;; ??? This should have alternatives for constants.
6133 (define_insn "*thumb1_movsf_insn"
6134 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6135 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6137 && ( register_operand (operands[0], SFmode)
6138 || register_operand (operands[1], SFmode))"
6147 [(set_attr "length" "2")
6148 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6149 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6150 (set_attr "insn" "*,*,*,*,*,mov,mov")
6151 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6154 (define_expand "movdf"
6155 [(set (match_operand:DF 0 "general_operand" "")
6156 (match_operand:DF 1 "general_operand" ""))]
6161 if (GET_CODE (operands[0]) == MEM)
6162 operands[1] = force_reg (DFmode, operands[1]);
6164 else /* TARGET_THUMB */
6166 if (can_create_pseudo_p ())
6168 if (GET_CODE (operands[0]) != REG)
6169 operands[1] = force_reg (DFmode, operands[1]);
6175 ;; Reloading a df mode value stored in integer regs to memory can require a
6177 (define_expand "reload_outdf"
6178 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6179 (match_operand:DF 1 "s_register_operand" "r")
6180 (match_operand:SI 2 "s_register_operand" "=&r")]
6184 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6187 operands[2] = XEXP (operands[0], 0);
6188 else if (code == POST_INC || code == PRE_DEC)
6190 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6191 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6192 emit_insn (gen_movdi (operands[0], operands[1]));
6195 else if (code == PRE_INC)
6197 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6199 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6202 else if (code == POST_DEC)
6203 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6205 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6206 XEXP (XEXP (operands[0], 0), 1)));
6208 emit_insn (gen_rtx_SET (VOIDmode,
6209 replace_equiv_address (operands[0], operands[2]),
6212 if (code == POST_DEC)
6213 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6219 (define_insn "*movdf_soft_insn"
6220 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6221 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6222 "TARGET_32BIT && TARGET_SOFT_FLOAT
6223 && ( register_operand (operands[0], DFmode)
6224 || register_operand (operands[1], DFmode))"
6226 switch (which_alternative)
6233 return output_move_double (operands);
6236 [(set_attr "length" "8,12,16,8,8")
6237 (set_attr "type" "*,*,*,load2,store2")
6238 (set_attr "pool_range" "*,*,*,1020,*")
6239 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6240 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6243 ;;; ??? This should have alternatives for constants.
6244 ;;; ??? This was originally identical to the movdi_insn pattern.
6245 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6246 ;;; thumb_reorg with a memory reference.
6247 (define_insn "*thumb_movdf_insn"
6248 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6249 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6251 && ( register_operand (operands[0], DFmode)
6252 || register_operand (operands[1], DFmode))"
6254 switch (which_alternative)
6258 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6259 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6260 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6262 return \"ldmia\\t%1, {%0, %H0}\";
6264 return \"stmia\\t%0, {%1, %H1}\";
6266 return thumb_load_double_from_address (operands);
6268 operands[2] = gen_rtx_MEM (SImode,
6269 plus_constant (XEXP (operands[0], 0), 4));
6270 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6273 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6274 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6275 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6278 [(set_attr "length" "4,2,2,6,4,4")
6279 (set_attr "type" "*,load2,store2,load2,store2,*")
6280 (set_attr "insn" "*,*,*,*,*,mov")
6281 (set_attr "pool_range" "*,*,*,1020,*,*")]
6284 (define_expand "movxf"
6285 [(set (match_operand:XF 0 "general_operand" "")
6286 (match_operand:XF 1 "general_operand" ""))]
6287 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6289 if (GET_CODE (operands[0]) == MEM)
6290 operands[1] = force_reg (XFmode, operands[1]);
6296 ;; load- and store-multiple insns
6297 ;; The arm can load/store any set of registers, provided that they are in
6298 ;; ascending order, but these expanders assume a contiguous set.
6300 (define_expand "load_multiple"
6301 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6302 (match_operand:SI 1 "" ""))
6303 (use (match_operand:SI 2 "" ""))])]
6306 HOST_WIDE_INT offset = 0;
6308 /* Support only fixed point registers. */
6309 if (GET_CODE (operands[2]) != CONST_INT
6310 || INTVAL (operands[2]) > 14
6311 || INTVAL (operands[2]) < 2
6312 || GET_CODE (operands[1]) != MEM
6313 || GET_CODE (operands[0]) != REG
6314 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6315 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6319 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6320 INTVAL (operands[2]),
6321 force_reg (SImode, XEXP (operands[1], 0)),
6322 FALSE, operands[1], &offset);
6325 (define_expand "store_multiple"
6326 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6327 (match_operand:SI 1 "" ""))
6328 (use (match_operand:SI 2 "" ""))])]
6331 HOST_WIDE_INT offset = 0;
6333 /* Support only fixed point registers. */
6334 if (GET_CODE (operands[2]) != CONST_INT
6335 || INTVAL (operands[2]) > 14
6336 || INTVAL (operands[2]) < 2
6337 || GET_CODE (operands[1]) != REG
6338 || GET_CODE (operands[0]) != MEM
6339 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6340 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6344 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6345 INTVAL (operands[2]),
6346 force_reg (SImode, XEXP (operands[0], 0)),
6347 FALSE, operands[0], &offset);
6351 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6352 ;; We could let this apply for blocks of less than this, but it clobbers so
6353 ;; many registers that there is then probably a better way.
6355 (define_expand "movmemqi"
6356 [(match_operand:BLK 0 "general_operand" "")
6357 (match_operand:BLK 1 "general_operand" "")
6358 (match_operand:SI 2 "const_int_operand" "")
6359 (match_operand:SI 3 "const_int_operand" "")]
6364 if (arm_gen_movmemqi (operands))
6368 else /* TARGET_THUMB1 */
6370 if ( INTVAL (operands[3]) != 4
6371 || INTVAL (operands[2]) > 48)
6374 thumb_expand_movmemqi (operands);
6380 ;; Thumb block-move insns
6382 (define_insn "movmem12b"
6383 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6384 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6385 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6386 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6387 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6388 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6389 (set (match_operand:SI 0 "register_operand" "=l")
6390 (plus:SI (match_dup 2) (const_int 12)))
6391 (set (match_operand:SI 1 "register_operand" "=l")
6392 (plus:SI (match_dup 3) (const_int 12)))
6393 (clobber (match_scratch:SI 4 "=&l"))
6394 (clobber (match_scratch:SI 5 "=&l"))
6395 (clobber (match_scratch:SI 6 "=&l"))]
6397 "* return thumb_output_move_mem_multiple (3, operands);"
6398 [(set_attr "length" "4")
6399 ; This isn't entirely accurate... It loads as well, but in terms of
6400 ; scheduling the following insn it is better to consider it as a store
6401 (set_attr "type" "store3")]
6404 (define_insn "movmem8b"
6405 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6406 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6407 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6408 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6409 (set (match_operand:SI 0 "register_operand" "=l")
6410 (plus:SI (match_dup 2) (const_int 8)))
6411 (set (match_operand:SI 1 "register_operand" "=l")
6412 (plus:SI (match_dup 3) (const_int 8)))
6413 (clobber (match_scratch:SI 4 "=&l"))
6414 (clobber (match_scratch:SI 5 "=&l"))]
6416 "* return thumb_output_move_mem_multiple (2, operands);"
6417 [(set_attr "length" "4")
6418 ; This isn't entirely accurate... It loads as well, but in terms of
6419 ; scheduling the following insn it is better to consider it as a store
6420 (set_attr "type" "store2")]
6425 ;; Compare & branch insns
6426 ;; The range calculations are based as follows:
6427 ;; For forward branches, the address calculation returns the address of
6428 ;; the next instruction. This is 2 beyond the branch instruction.
6429 ;; For backward branches, the address calculation returns the address of
6430 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6431 ;; instruction for the shortest sequence, and 4 before the branch instruction
6432 ;; if we have to jump around an unconditional branch.
6433 ;; To the basic branch range the PC offset must be added (this is +4).
6434 ;; So for forward branches we have
6435 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6436 ;; And for backward branches we have
6437 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6439 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6440 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6442 (define_expand "cbranchsi4"
6443 [(set (pc) (if_then_else
6444 (match_operator 0 "arm_comparison_operator"
6445 [(match_operand:SI 1 "s_register_operand" "")
6446 (match_operand:SI 2 "nonmemory_operand" "")])
6447 (label_ref (match_operand 3 "" ""))
6449 "TARGET_THUMB1 || TARGET_32BIT"
6453 if (!arm_add_operand (operands[2], SImode))
6454 operands[2] = force_reg (SImode, operands[2]);
6455 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6459 if (thumb1_cmpneg_operand (operands[2], SImode))
6461 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6462 operands[3], operands[0]));
6465 if (!thumb1_cmp_operand (operands[2], SImode))
6466 operands[2] = force_reg (SImode, operands[2]);
6469 ;; A pattern to recognize a special situation and optimize for it.
6470 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6471 ;; due to the available addressing modes. Hence, convert a signed comparison
6472 ;; with zero into an unsigned comparison with 127 if possible.
6473 (define_expand "cbranchqi4"
6474 [(set (pc) (if_then_else
6475 (match_operator 0 "lt_ge_comparison_operator"
6476 [(match_operand:QI 1 "memory_operand" "")
6477 (match_operand:QI 2 "const0_operand" "")])
6478 (label_ref (match_operand 3 "" ""))
6483 xops[1] = gen_reg_rtx (SImode);
6484 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6485 xops[2] = GEN_INT (127);
6486 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6487 VOIDmode, xops[1], xops[2]);
6488 xops[3] = operands[3];
6489 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6493 (define_expand "cbranchsf4"
6494 [(set (pc) (if_then_else
6495 (match_operator 0 "arm_comparison_operator"
6496 [(match_operand:SF 1 "s_register_operand" "")
6497 (match_operand:SF 2 "arm_float_compare_operand" "")])
6498 (label_ref (match_operand 3 "" ""))
6500 "TARGET_32BIT && TARGET_HARD_FLOAT"
6501 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6502 operands[3])); DONE;"
6505 (define_expand "cbranchdf4"
6506 [(set (pc) (if_then_else
6507 (match_operator 0 "arm_comparison_operator"
6508 [(match_operand:DF 1 "s_register_operand" "")
6509 (match_operand:DF 2 "arm_float_compare_operand" "")])
6510 (label_ref (match_operand 3 "" ""))
6512 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6513 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6514 operands[3])); DONE;"
6517 (define_expand "cbranchdi4"
6518 [(set (pc) (if_then_else
6519 (match_operator 0 "arm_comparison_operator"
6520 [(match_operand:DI 1 "cmpdi_operand" "")
6521 (match_operand:DI 2 "cmpdi_operand" "")])
6522 (label_ref (match_operand 3 "" ""))
6526 rtx swap = NULL_RTX;
6527 enum rtx_code code = GET_CODE (operands[0]);
6529 /* We should not have two constants. */
6530 gcc_assert (GET_MODE (operands[1]) == DImode
6531 || GET_MODE (operands[2]) == DImode);
6533 /* Flip unimplemented DImode comparisons to a form that
6534 arm_gen_compare_reg can handle. */
6538 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6540 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6542 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6544 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6549 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6552 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6558 (define_insn "cbranchsi4_insn"
6559 [(set (pc) (if_then_else
6560 (match_operator 0 "arm_comparison_operator"
6561 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6562 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6563 (label_ref (match_operand 3 "" ""))
6567 rtx t = cfun->machine->thumb1_cc_insn;
6570 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6571 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6573 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6575 if (!noov_comparison_operator (operands[0], VOIDmode))
6578 else if (cfun->machine->thumb1_cc_mode != CCmode)
6583 output_asm_insn ("cmp\t%1, %2", operands);
6584 cfun->machine->thumb1_cc_insn = insn;
6585 cfun->machine->thumb1_cc_op0 = operands[1];
6586 cfun->machine->thumb1_cc_op1 = operands[2];
6587 cfun->machine->thumb1_cc_mode = CCmode;
6590 /* Ensure we emit the right type of condition code on the jump. */
6591 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6594 switch (get_attr_length (insn))
6596 case 4: return \"b%d0\\t%l3\";
6597 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6598 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6601 [(set (attr "far_jump")
6603 (eq_attr "length" "8")
6604 (const_string "yes")
6605 (const_string "no")))
6606 (set (attr "length")
6608 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6609 (le (minus (match_dup 3) (pc)) (const_int 256)))
6612 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6613 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6618 (define_insn "cbranchsi4_scratch"
6619 [(set (pc) (if_then_else
6620 (match_operator 4 "arm_comparison_operator"
6621 [(match_operand:SI 1 "s_register_operand" "l,0")
6622 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6623 (label_ref (match_operand 3 "" ""))
6625 (clobber (match_scratch:SI 0 "=l,l"))]
6628 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6630 switch (get_attr_length (insn))
6632 case 4: return \"b%d4\\t%l3\";
6633 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6634 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6637 [(set (attr "far_jump")
6639 (eq_attr "length" "8")
6640 (const_string "yes")
6641 (const_string "no")))
6642 (set (attr "length")
6644 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6645 (le (minus (match_dup 3) (pc)) (const_int 256)))
6648 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6649 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6654 ;; Two peepholes to generate subtract of 0 instead of a move if the
6655 ;; condition codes will be useful.
6657 [(set (match_operand:SI 0 "low_register_operand" "")
6658 (match_operand:SI 1 "low_register_operand" ""))
6660 (if_then_else (match_operator 2 "arm_comparison_operator"
6661 [(match_dup 1) (const_int 0)])
6662 (label_ref (match_operand 3 "" ""))
6665 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6667 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6668 (label_ref (match_dup 3))
6672 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6673 ;; merge cases like this because the op1 is a hard register in
6674 ;; CLASS_LIKELY_SPILLED_P.
6676 [(set (match_operand:SI 0 "low_register_operand" "")
6677 (match_operand:SI 1 "low_register_operand" ""))
6679 (if_then_else (match_operator 2 "arm_comparison_operator"
6680 [(match_dup 0) (const_int 0)])
6681 (label_ref (match_operand 3 "" ""))
6684 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6686 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6687 (label_ref (match_dup 3))
6691 (define_insn "*negated_cbranchsi4"
6694 (match_operator 0 "equality_operator"
6695 [(match_operand:SI 1 "s_register_operand" "l")
6696 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6697 (label_ref (match_operand 3 "" ""))
6701 output_asm_insn (\"cmn\\t%1, %2\", operands);
6702 switch (get_attr_length (insn))
6704 case 4: return \"b%d0\\t%l3\";
6705 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6706 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6709 [(set (attr "far_jump")
6711 (eq_attr "length" "8")
6712 (const_string "yes")
6713 (const_string "no")))
6714 (set (attr "length")
6716 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6717 (le (minus (match_dup 3) (pc)) (const_int 256)))
6720 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6721 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6726 (define_insn "*tbit_cbranch"
6729 (match_operator 0 "equality_operator"
6730 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6732 (match_operand:SI 2 "const_int_operand" "i"))
6734 (label_ref (match_operand 3 "" ""))
6736 (clobber (match_scratch:SI 4 "=l"))]
6741 op[0] = operands[4];
6742 op[1] = operands[1];
6743 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6745 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6746 switch (get_attr_length (insn))
6748 case 4: return \"b%d0\\t%l3\";
6749 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6750 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6753 [(set (attr "far_jump")
6755 (eq_attr "length" "8")
6756 (const_string "yes")
6757 (const_string "no")))
6758 (set (attr "length")
6760 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6761 (le (minus (match_dup 3) (pc)) (const_int 256)))
6764 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6765 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6770 (define_insn "*tlobits_cbranch"
6773 (match_operator 0 "equality_operator"
6774 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6775 (match_operand:SI 2 "const_int_operand" "i")
6778 (label_ref (match_operand 3 "" ""))
6780 (clobber (match_scratch:SI 4 "=l"))]
6785 op[0] = operands[4];
6786 op[1] = operands[1];
6787 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6789 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6790 switch (get_attr_length (insn))
6792 case 4: return \"b%d0\\t%l3\";
6793 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6794 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6797 [(set (attr "far_jump")
6799 (eq_attr "length" "8")
6800 (const_string "yes")
6801 (const_string "no")))
6802 (set (attr "length")
6804 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6805 (le (minus (match_dup 3) (pc)) (const_int 256)))
6808 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6809 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6814 (define_insn "*tstsi3_cbranch"
6817 (match_operator 3 "equality_operator"
6818 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6819 (match_operand:SI 1 "s_register_operand" "l"))
6821 (label_ref (match_operand 2 "" ""))
6826 output_asm_insn (\"tst\\t%0, %1\", operands);
6827 switch (get_attr_length (insn))
6829 case 4: return \"b%d3\\t%l2\";
6830 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6831 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6834 [(set (attr "far_jump")
6836 (eq_attr "length" "8")
6837 (const_string "yes")
6838 (const_string "no")))
6839 (set (attr "length")
6841 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6842 (le (minus (match_dup 2) (pc)) (const_int 256)))
6845 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6846 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6851 (define_insn "*cbranchne_decr1"
6853 (if_then_else (match_operator 3 "equality_operator"
6854 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6856 (label_ref (match_operand 4 "" ""))
6858 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6859 (plus:SI (match_dup 2) (const_int -1)))
6860 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6865 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6867 VOIDmode, operands[2], const1_rtx);
6868 cond[1] = operands[4];
6870 if (which_alternative == 0)
6871 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6872 else if (which_alternative == 1)
6874 /* We must provide an alternative for a hi reg because reload
6875 cannot handle output reloads on a jump instruction, but we
6876 can't subtract into that. Fortunately a mov from lo to hi
6877 does not clobber the condition codes. */
6878 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6879 output_asm_insn (\"mov\\t%0, %1\", operands);
6883 /* Similarly, but the target is memory. */
6884 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6885 output_asm_insn (\"str\\t%1, %0\", operands);
6888 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6891 output_asm_insn (\"b%d0\\t%l1\", cond);
6894 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6895 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6897 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6898 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6902 [(set (attr "far_jump")
6904 (ior (and (eq (symbol_ref ("which_alternative"))
6906 (eq_attr "length" "8"))
6907 (eq_attr "length" "10"))
6908 (const_string "yes")
6909 (const_string "no")))
6910 (set_attr_alternative "length"
6914 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6915 (le (minus (match_dup 4) (pc)) (const_int 256)))
6918 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6919 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6924 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6925 (le (minus (match_dup 4) (pc)) (const_int 256)))
6928 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6929 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6934 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6935 (le (minus (match_dup 4) (pc)) (const_int 256)))
6938 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6939 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6944 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6945 (le (minus (match_dup 4) (pc)) (const_int 256)))
6948 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6949 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6954 (define_insn "*addsi3_cbranch"
6957 (match_operator 4 "arm_comparison_operator"
6959 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6960 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6962 (label_ref (match_operand 5 "" ""))
6965 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6966 (plus:SI (match_dup 2) (match_dup 3)))
6967 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6969 && (GET_CODE (operands[4]) == EQ
6970 || GET_CODE (operands[4]) == NE
6971 || GET_CODE (operands[4]) == GE
6972 || GET_CODE (operands[4]) == LT)"
6977 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6978 cond[1] = operands[2];
6979 cond[2] = operands[3];
6981 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6982 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6984 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6986 if (which_alternative >= 2
6987 && which_alternative < 4)
6988 output_asm_insn (\"mov\\t%0, %1\", operands);
6989 else if (which_alternative >= 4)
6990 output_asm_insn (\"str\\t%1, %0\", operands);
6992 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
6995 return \"b%d4\\t%l5\";
6997 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6999 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7003 [(set (attr "far_jump")
7005 (ior (and (lt (symbol_ref ("which_alternative"))
7007 (eq_attr "length" "8"))
7008 (eq_attr "length" "10"))
7009 (const_string "yes")
7010 (const_string "no")))
7011 (set (attr "length")
7013 (lt (symbol_ref ("which_alternative"))
7016 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7017 (le (minus (match_dup 5) (pc)) (const_int 256)))
7020 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7021 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7025 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7026 (le (minus (match_dup 5) (pc)) (const_int 256)))
7029 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7030 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7035 (define_insn "*addsi3_cbranch_scratch"
7038 (match_operator 3 "arm_comparison_operator"
7040 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7041 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7043 (label_ref (match_operand 4 "" ""))
7045 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7047 && (GET_CODE (operands[3]) == EQ
7048 || GET_CODE (operands[3]) == NE
7049 || GET_CODE (operands[3]) == GE
7050 || GET_CODE (operands[3]) == LT)"
7053 switch (which_alternative)
7056 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7059 output_asm_insn (\"cmn\t%1, %2\", operands);
7062 if (INTVAL (operands[2]) < 0)
7063 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7065 output_asm_insn (\"add\t%0, %1, %2\", operands);
7068 if (INTVAL (operands[2]) < 0)
7069 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7071 output_asm_insn (\"add\t%0, %0, %2\", operands);
7075 switch (get_attr_length (insn))
7078 return \"b%d3\\t%l4\";
7080 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7082 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7086 [(set (attr "far_jump")
7088 (eq_attr "length" "8")
7089 (const_string "yes")
7090 (const_string "no")))
7091 (set (attr "length")
7093 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7094 (le (minus (match_dup 4) (pc)) (const_int 256)))
7097 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7098 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7104 ;; Comparison and test insns
7106 (define_insn "*arm_cmpsi_insn"
7107 [(set (reg:CC CC_REGNUM)
7108 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7109 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7114 [(set_attr "conds" "set")]
7117 (define_insn "*cmpsi_shiftsi"
7118 [(set (reg:CC CC_REGNUM)
7119 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7120 (match_operator:SI 3 "shift_operator"
7121 [(match_operand:SI 1 "s_register_operand" "r,r")
7122 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7125 [(set_attr "conds" "set")
7126 (set_attr "shift" "1")
7127 (set_attr "arch" "32,a")
7128 (set_attr "type" "alu_shift,alu_shift_reg")])
7130 (define_insn "*cmpsi_shiftsi_swp"
7131 [(set (reg:CC_SWP CC_REGNUM)
7132 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7133 [(match_operand:SI 1 "s_register_operand" "r,r")
7134 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7135 (match_operand:SI 0 "s_register_operand" "r,r")))]
7138 [(set_attr "conds" "set")
7139 (set_attr "shift" "1")
7140 (set_attr "arch" "32,a")
7141 (set_attr "type" "alu_shift,alu_shift_reg")])
7143 (define_insn "*arm_cmpsi_negshiftsi_si"
7144 [(set (reg:CC_Z CC_REGNUM)
7146 (neg:SI (match_operator:SI 1 "shift_operator"
7147 [(match_operand:SI 2 "s_register_operand" "r")
7148 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7149 (match_operand:SI 0 "s_register_operand" "r")))]
7152 [(set_attr "conds" "set")
7153 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7154 (const_string "alu_shift")
7155 (const_string "alu_shift_reg")))]
7158 ;; DImode comparisons. The generic code generates branches that
7159 ;; if-conversion can not reduce to a conditional compare, so we do
7162 (define_insn "*arm_cmpdi_insn"
7163 [(set (reg:CC_NCV CC_REGNUM)
7164 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7165 (match_operand:DI 1 "arm_di_operand" "rDi")))
7166 (clobber (match_scratch:SI 2 "=r"))]
7167 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7168 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7169 [(set_attr "conds" "set")
7170 (set_attr "length" "8")]
7173 (define_insn "*arm_cmpdi_unsigned"
7174 [(set (reg:CC_CZ CC_REGNUM)
7175 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7176 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7178 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7179 [(set_attr "conds" "set")
7180 (set_attr "length" "8")]
7183 (define_insn "*arm_cmpdi_zero"
7184 [(set (reg:CC_Z CC_REGNUM)
7185 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7187 (clobber (match_scratch:SI 1 "=r"))]
7189 "orr%.\\t%1, %Q0, %R0"
7190 [(set_attr "conds" "set")]
7193 (define_insn "*thumb_cmpdi_zero"
7194 [(set (reg:CC_Z CC_REGNUM)
7195 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7197 (clobber (match_scratch:SI 1 "=l"))]
7199 "orr\\t%1, %Q0, %R0"
7200 [(set_attr "conds" "set")
7201 (set_attr "length" "2")]
7204 ;; Cirrus SF compare instruction
7205 (define_insn "*cirrus_cmpsf"
7206 [(set (reg:CCFP CC_REGNUM)
7207 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7208 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7209 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7210 "cfcmps%?\\tr15, %V0, %V1"
7211 [(set_attr "type" "mav_farith")
7212 (set_attr "cirrus" "compare")]
7215 ;; Cirrus DF compare instruction
7216 (define_insn "*cirrus_cmpdf"
7217 [(set (reg:CCFP CC_REGNUM)
7218 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7219 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7220 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7221 "cfcmpd%?\\tr15, %V0, %V1"
7222 [(set_attr "type" "mav_farith")
7223 (set_attr "cirrus" "compare")]
7226 (define_insn "*cirrus_cmpdi"
7227 [(set (reg:CC CC_REGNUM)
7228 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7229 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7230 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7231 "cfcmp64%?\\tr15, %V0, %V1"
7232 [(set_attr "type" "mav_farith")
7233 (set_attr "cirrus" "compare")]
7236 ; This insn allows redundant compares to be removed by cse, nothing should
7237 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7238 ; is deleted later on. The match_dup will match the mode here, so that
7239 ; mode changes of the condition codes aren't lost by this even though we don't
7240 ; specify what they are.
7242 (define_insn "*deleted_compare"
7243 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7245 "\\t%@ deleted compare"
7246 [(set_attr "conds" "set")
7247 (set_attr "length" "0")]
7251 ;; Conditional branch insns
7253 (define_expand "cbranch_cc"
7255 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7256 (match_operand 2 "" "")])
7257 (label_ref (match_operand 3 "" ""))
7260 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7261 operands[1], operands[2]);
7262 operands[2] = const0_rtx;"
7266 ;; Patterns to match conditional branch insns.
7269 (define_insn "*arm_cond_branch"
7271 (if_then_else (match_operator 1 "arm_comparison_operator"
7272 [(match_operand 2 "cc_register" "") (const_int 0)])
7273 (label_ref (match_operand 0 "" ""))
7277 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7279 arm_ccfsm_state += 2;
7282 return \"b%d1\\t%l0\";
7284 [(set_attr "conds" "use")
7285 (set_attr "type" "branch")]
7288 (define_insn "*arm_cond_branch_reversed"
7290 (if_then_else (match_operator 1 "arm_comparison_operator"
7291 [(match_operand 2 "cc_register" "") (const_int 0)])
7293 (label_ref (match_operand 0 "" ""))))]
7296 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7298 arm_ccfsm_state += 2;
7301 return \"b%D1\\t%l0\";
7303 [(set_attr "conds" "use")
7304 (set_attr "type" "branch")]
7311 (define_expand "cstore_cc"
7312 [(set (match_operand:SI 0 "s_register_operand" "")
7313 (match_operator:SI 1 "" [(match_operand 2 "" "")
7314 (match_operand 3 "" "")]))]
7316 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7317 operands[2], operands[3]);
7318 operands[3] = const0_rtx;"
7321 (define_insn "*mov_scc"
7322 [(set (match_operand:SI 0 "s_register_operand" "=r")
7323 (match_operator:SI 1 "arm_comparison_operator"
7324 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7326 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7327 [(set_attr "conds" "use")
7328 (set_attr "insn" "mov")
7329 (set_attr "length" "8")]
7332 (define_insn "*mov_negscc"
7333 [(set (match_operand:SI 0 "s_register_operand" "=r")
7334 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7335 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7337 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7338 [(set_attr "conds" "use")
7339 (set_attr "insn" "mov")
7340 (set_attr "length" "8")]
7343 (define_insn "*mov_notscc"
7344 [(set (match_operand:SI 0 "s_register_operand" "=r")
7345 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7346 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7348 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7349 [(set_attr "conds" "use")
7350 (set_attr "insn" "mov")
7351 (set_attr "length" "8")]
7354 (define_expand "cstoresi4"
7355 [(set (match_operand:SI 0 "s_register_operand" "")
7356 (match_operator:SI 1 "arm_comparison_operator"
7357 [(match_operand:SI 2 "s_register_operand" "")
7358 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7359 "TARGET_32BIT || TARGET_THUMB1"
7361 rtx op3, scratch, scratch2;
7365 if (!arm_add_operand (operands[3], SImode))
7366 operands[3] = force_reg (SImode, operands[3]);
7367 emit_insn (gen_cstore_cc (operands[0], operands[1],
7368 operands[2], operands[3]));
7372 if (operands[3] == const0_rtx)
7374 switch (GET_CODE (operands[1]))
7377 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7381 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7385 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7386 NULL_RTX, 0, OPTAB_WIDEN);
7387 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7388 NULL_RTX, 0, OPTAB_WIDEN);
7389 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7390 operands[0], 1, OPTAB_WIDEN);
7394 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7396 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7397 NULL_RTX, 1, OPTAB_WIDEN);
7401 scratch = expand_binop (SImode, ashr_optab, operands[2],
7402 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7403 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7404 NULL_RTX, 0, OPTAB_WIDEN);
7405 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7409 /* LT is handled by generic code. No need for unsigned with 0. */
7416 switch (GET_CODE (operands[1]))
7419 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7420 NULL_RTX, 0, OPTAB_WIDEN);
7421 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7425 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7426 NULL_RTX, 0, OPTAB_WIDEN);
7427 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7431 op3 = force_reg (SImode, operands[3]);
7433 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7434 NULL_RTX, 1, OPTAB_WIDEN);
7435 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7436 NULL_RTX, 0, OPTAB_WIDEN);
7437 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7443 if (!thumb1_cmp_operand (op3, SImode))
7444 op3 = force_reg (SImode, op3);
7445 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7446 NULL_RTX, 0, OPTAB_WIDEN);
7447 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7448 NULL_RTX, 1, OPTAB_WIDEN);
7449 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7454 op3 = force_reg (SImode, operands[3]);
7455 scratch = force_reg (SImode, const0_rtx);
7456 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7462 if (!thumb1_cmp_operand (op3, SImode))
7463 op3 = force_reg (SImode, op3);
7464 scratch = force_reg (SImode, const0_rtx);
7465 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7471 if (!thumb1_cmp_operand (op3, SImode))
7472 op3 = force_reg (SImode, op3);
7473 scratch = gen_reg_rtx (SImode);
7474 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7478 op3 = force_reg (SImode, operands[3]);
7479 scratch = gen_reg_rtx (SImode);
7480 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7483 /* No good sequences for GT, LT. */
7490 (define_expand "cstoresf4"
7491 [(set (match_operand:SI 0 "s_register_operand" "")
7492 (match_operator:SI 1 "arm_comparison_operator"
7493 [(match_operand:SF 2 "s_register_operand" "")
7494 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7495 "TARGET_32BIT && TARGET_HARD_FLOAT"
7496 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7497 operands[2], operands[3])); DONE;"
7500 (define_expand "cstoredf4"
7501 [(set (match_operand:SI 0 "s_register_operand" "")
7502 (match_operator:SI 1 "arm_comparison_operator"
7503 [(match_operand:DF 2 "s_register_operand" "")
7504 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7505 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7506 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7507 operands[2], operands[3])); DONE;"
7510 (define_expand "cstoredi4"
7511 [(set (match_operand:SI 0 "s_register_operand" "")
7512 (match_operator:SI 1 "arm_comparison_operator"
7513 [(match_operand:DI 2 "cmpdi_operand" "")
7514 (match_operand:DI 3 "cmpdi_operand" "")]))]
7517 rtx swap = NULL_RTX;
7518 enum rtx_code code = GET_CODE (operands[1]);
7520 /* We should not have two constants. */
7521 gcc_assert (GET_MODE (operands[2]) == DImode
7522 || GET_MODE (operands[3]) == DImode);
7524 /* Flip unimplemented DImode comparisons to a form that
7525 arm_gen_compare_reg can handle. */
7529 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7531 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7533 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7535 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7540 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7543 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7549 (define_expand "cstoresi_eq0_thumb1"
7551 [(set (match_operand:SI 0 "s_register_operand" "")
7552 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7554 (clobber (match_dup:SI 2))])]
7556 "operands[2] = gen_reg_rtx (SImode);"
7559 (define_expand "cstoresi_ne0_thumb1"
7561 [(set (match_operand:SI 0 "s_register_operand" "")
7562 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7564 (clobber (match_dup:SI 2))])]
7566 "operands[2] = gen_reg_rtx (SImode);"
7569 (define_insn "*cstoresi_eq0_thumb1_insn"
7570 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7571 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7573 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7576 neg\\t%0, %1\;adc\\t%0, %0, %1
7577 neg\\t%2, %1\;adc\\t%0, %1, %2"
7578 [(set_attr "length" "4")]
7581 (define_insn "*cstoresi_ne0_thumb1_insn"
7582 [(set (match_operand:SI 0 "s_register_operand" "=l")
7583 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7585 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7587 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7588 [(set_attr "length" "4")]
7591 ;; Used as part of the expansion of thumb ltu and gtu sequences
7592 (define_insn "cstoresi_nltu_thumb1"
7593 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7594 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7595 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7597 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7598 [(set_attr "length" "4")]
7601 (define_insn_and_split "cstoresi_ltu_thumb1"
7602 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7603 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7604 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7609 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7610 (set (match_dup 0) (neg:SI (match_dup 3)))]
7611 "operands[3] = gen_reg_rtx (SImode);"
7612 [(set_attr "length" "4")]
7615 ;; Used as part of the expansion of thumb les sequence.
7616 (define_insn "thumb1_addsi3_addgeu"
7617 [(set (match_operand:SI 0 "s_register_operand" "=l")
7618 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7619 (match_operand:SI 2 "s_register_operand" "l"))
7620 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7621 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7623 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7624 [(set_attr "length" "4")]
7628 ;; Conditional move insns
7630 (define_expand "movsicc"
7631 [(set (match_operand:SI 0 "s_register_operand" "")
7632 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7633 (match_operand:SI 2 "arm_not_operand" "")
7634 (match_operand:SI 3 "arm_not_operand" "")))]
7638 enum rtx_code code = GET_CODE (operands[1]);
7641 if (code == UNEQ || code == LTGT)
7644 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7645 XEXP (operands[1], 1));
7646 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7650 (define_expand "movsfcc"
7651 [(set (match_operand:SF 0 "s_register_operand" "")
7652 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7653 (match_operand:SF 2 "s_register_operand" "")
7654 (match_operand:SF 3 "nonmemory_operand" "")))]
7655 "TARGET_32BIT && TARGET_HARD_FLOAT"
7658 enum rtx_code code = GET_CODE (operands[1]);
7661 if (code == UNEQ || code == LTGT)
7664 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7665 Otherwise, ensure it is a valid FP add operand */
7666 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7667 || (!arm_float_add_operand (operands[3], SFmode)))
7668 operands[3] = force_reg (SFmode, operands[3]);
7670 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7671 XEXP (operands[1], 1));
7672 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7676 (define_expand "movdfcc"
7677 [(set (match_operand:DF 0 "s_register_operand" "")
7678 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7679 (match_operand:DF 2 "s_register_operand" "")
7680 (match_operand:DF 3 "arm_float_add_operand" "")))]
7681 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7684 enum rtx_code code = GET_CODE (operands[1]);
7687 if (code == UNEQ || code == LTGT)
7690 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7691 XEXP (operands[1], 1));
7692 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7696 (define_insn "*movsicc_insn"
7697 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7699 (match_operator 3 "arm_comparison_operator"
7700 [(match_operand 4 "cc_register" "") (const_int 0)])
7701 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7702 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7709 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7710 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7711 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7712 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7713 [(set_attr "length" "4,4,4,4,8,8,8,8")
7714 (set_attr "conds" "use")
7715 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7718 (define_insn "*movsfcc_soft_insn"
7719 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7720 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7721 [(match_operand 4 "cc_register" "") (const_int 0)])
7722 (match_operand:SF 1 "s_register_operand" "0,r")
7723 (match_operand:SF 2 "s_register_operand" "r,0")))]
7724 "TARGET_ARM && TARGET_SOFT_FLOAT"
7728 [(set_attr "conds" "use")
7729 (set_attr "insn" "mov")]
7733 ;; Jump and linkage insns
7735 (define_expand "jump"
7737 (label_ref (match_operand 0 "" "")))]
7742 (define_insn "*arm_jump"
7744 (label_ref (match_operand 0 "" "")))]
7748 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7750 arm_ccfsm_state += 2;
7753 return \"b%?\\t%l0\";
7756 [(set_attr "predicable" "yes")]
7759 (define_insn "*thumb_jump"
7761 (label_ref (match_operand 0 "" "")))]
7764 if (get_attr_length (insn) == 2)
7766 return \"bl\\t%l0\\t%@ far jump\";
7768 [(set (attr "far_jump")
7770 (eq_attr "length" "4")
7771 (const_string "yes")
7772 (const_string "no")))
7773 (set (attr "length")
7775 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7776 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7781 (define_expand "call"
7782 [(parallel [(call (match_operand 0 "memory_operand" "")
7783 (match_operand 1 "general_operand" ""))
7784 (use (match_operand 2 "" ""))
7785 (clobber (reg:SI LR_REGNUM))])]
7791 /* In an untyped call, we can get NULL for operand 2. */
7792 if (operands[2] == NULL_RTX)
7793 operands[2] = const0_rtx;
7795 /* Decide if we should generate indirect calls by loading the
7796 32-bit address of the callee into a register before performing the
7798 callee = XEXP (operands[0], 0);
7799 if (GET_CODE (callee) == SYMBOL_REF
7800 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7802 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7804 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7805 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7810 (define_expand "call_internal"
7811 [(parallel [(call (match_operand 0 "memory_operand" "")
7812 (match_operand 1 "general_operand" ""))
7813 (use (match_operand 2 "" ""))
7814 (clobber (reg:SI LR_REGNUM))])])
7816 (define_insn "*call_reg_armv5"
7817 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7818 (match_operand 1 "" ""))
7819 (use (match_operand 2 "" ""))
7820 (clobber (reg:SI LR_REGNUM))]
7821 "TARGET_ARM && arm_arch5"
7823 [(set_attr "type" "call")]
7826 (define_insn "*call_reg_arm"
7827 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7828 (match_operand 1 "" ""))
7829 (use (match_operand 2 "" ""))
7830 (clobber (reg:SI LR_REGNUM))]
7831 "TARGET_ARM && !arm_arch5"
7833 return output_call (operands);
7835 ;; length is worst case, normally it is only two
7836 [(set_attr "length" "12")
7837 (set_attr "type" "call")]
7841 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7842 ;; considered a function call by the branch predictor of some cores (PR40887).
7843 ;; Falls back to blx rN (*call_reg_armv5).
7845 (define_insn "*call_mem"
7846 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7847 (match_operand 1 "" ""))
7848 (use (match_operand 2 "" ""))
7849 (clobber (reg:SI LR_REGNUM))]
7850 "TARGET_ARM && !arm_arch5"
7852 return output_call_mem (operands);
7854 [(set_attr "length" "12")
7855 (set_attr "type" "call")]
7858 (define_insn "*call_reg_thumb1_v5"
7859 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7860 (match_operand 1 "" ""))
7861 (use (match_operand 2 "" ""))
7862 (clobber (reg:SI LR_REGNUM))]
7863 "TARGET_THUMB1 && arm_arch5"
7865 [(set_attr "length" "2")
7866 (set_attr "type" "call")]
7869 (define_insn "*call_reg_thumb1"
7870 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7871 (match_operand 1 "" ""))
7872 (use (match_operand 2 "" ""))
7873 (clobber (reg:SI LR_REGNUM))]
7874 "TARGET_THUMB1 && !arm_arch5"
7877 if (!TARGET_CALLER_INTERWORKING)
7878 return thumb_call_via_reg (operands[0]);
7879 else if (operands[1] == const0_rtx)
7880 return \"bl\\t%__interwork_call_via_%0\";
7881 else if (frame_pointer_needed)
7882 return \"bl\\t%__interwork_r7_call_via_%0\";
7884 return \"bl\\t%__interwork_r11_call_via_%0\";
7886 [(set_attr "type" "call")]
7889 (define_expand "call_value"
7890 [(parallel [(set (match_operand 0 "" "")
7891 (call (match_operand 1 "memory_operand" "")
7892 (match_operand 2 "general_operand" "")))
7893 (use (match_operand 3 "" ""))
7894 (clobber (reg:SI LR_REGNUM))])]
7900 /* In an untyped call, we can get NULL for operand 2. */
7901 if (operands[3] == 0)
7902 operands[3] = const0_rtx;
7904 /* Decide if we should generate indirect calls by loading the
7905 32-bit address of the callee into a register before performing the
7907 callee = XEXP (operands[1], 0);
7908 if (GET_CODE (callee) == SYMBOL_REF
7909 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7911 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7913 pat = gen_call_value_internal (operands[0], operands[1],
7914 operands[2], operands[3]);
7915 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7920 (define_expand "call_value_internal"
7921 [(parallel [(set (match_operand 0 "" "")
7922 (call (match_operand 1 "memory_operand" "")
7923 (match_operand 2 "general_operand" "")))
7924 (use (match_operand 3 "" ""))
7925 (clobber (reg:SI LR_REGNUM))])])
7927 (define_insn "*call_value_reg_armv5"
7928 [(set (match_operand 0 "" "")
7929 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7930 (match_operand 2 "" "")))
7931 (use (match_operand 3 "" ""))
7932 (clobber (reg:SI LR_REGNUM))]
7933 "TARGET_ARM && arm_arch5"
7935 [(set_attr "type" "call")]
7938 (define_insn "*call_value_reg_arm"
7939 [(set (match_operand 0 "" "")
7940 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7941 (match_operand 2 "" "")))
7942 (use (match_operand 3 "" ""))
7943 (clobber (reg:SI LR_REGNUM))]
7944 "TARGET_ARM && !arm_arch5"
7946 return output_call (&operands[1]);
7948 [(set_attr "length" "12")
7949 (set_attr "type" "call")]
7952 ;; Note: see *call_mem
7954 (define_insn "*call_value_mem"
7955 [(set (match_operand 0 "" "")
7956 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7957 (match_operand 2 "" "")))
7958 (use (match_operand 3 "" ""))
7959 (clobber (reg:SI LR_REGNUM))]
7960 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7962 return output_call_mem (&operands[1]);
7964 [(set_attr "length" "12")
7965 (set_attr "type" "call")]
7968 (define_insn "*call_value_reg_thumb1_v5"
7969 [(set (match_operand 0 "" "")
7970 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7971 (match_operand 2 "" "")))
7972 (use (match_operand 3 "" ""))
7973 (clobber (reg:SI LR_REGNUM))]
7974 "TARGET_THUMB1 && arm_arch5"
7976 [(set_attr "length" "2")
7977 (set_attr "type" "call")]
7980 (define_insn "*call_value_reg_thumb1"
7981 [(set (match_operand 0 "" "")
7982 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7983 (match_operand 2 "" "")))
7984 (use (match_operand 3 "" ""))
7985 (clobber (reg:SI LR_REGNUM))]
7986 "TARGET_THUMB1 && !arm_arch5"
7989 if (!TARGET_CALLER_INTERWORKING)
7990 return thumb_call_via_reg (operands[1]);
7991 else if (operands[2] == const0_rtx)
7992 return \"bl\\t%__interwork_call_via_%1\";
7993 else if (frame_pointer_needed)
7994 return \"bl\\t%__interwork_r7_call_via_%1\";
7996 return \"bl\\t%__interwork_r11_call_via_%1\";
7998 [(set_attr "type" "call")]
8001 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8002 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8004 (define_insn "*call_symbol"
8005 [(call (mem:SI (match_operand:SI 0 "" ""))
8006 (match_operand 1 "" ""))
8007 (use (match_operand 2 "" ""))
8008 (clobber (reg:SI LR_REGNUM))]
8010 && (GET_CODE (operands[0]) == SYMBOL_REF)
8011 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8014 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8016 [(set_attr "type" "call")]
8019 (define_insn "*call_value_symbol"
8020 [(set (match_operand 0 "" "")
8021 (call (mem:SI (match_operand:SI 1 "" ""))
8022 (match_operand:SI 2 "" "")))
8023 (use (match_operand 3 "" ""))
8024 (clobber (reg:SI LR_REGNUM))]
8026 && (GET_CODE (operands[1]) == SYMBOL_REF)
8027 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8030 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8032 [(set_attr "type" "call")]
8035 (define_insn "*call_insn"
8036 [(call (mem:SI (match_operand:SI 0 "" ""))
8037 (match_operand:SI 1 "" ""))
8038 (use (match_operand 2 "" ""))
8039 (clobber (reg:SI LR_REGNUM))]
8041 && GET_CODE (operands[0]) == SYMBOL_REF
8042 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8044 [(set_attr "length" "4")
8045 (set_attr "type" "call")]
8048 (define_insn "*call_value_insn"
8049 [(set (match_operand 0 "" "")
8050 (call (mem:SI (match_operand 1 "" ""))
8051 (match_operand 2 "" "")))
8052 (use (match_operand 3 "" ""))
8053 (clobber (reg:SI LR_REGNUM))]
8055 && GET_CODE (operands[1]) == SYMBOL_REF
8056 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8058 [(set_attr "length" "4")
8059 (set_attr "type" "call")]
8062 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8063 (define_expand "sibcall"
8064 [(parallel [(call (match_operand 0 "memory_operand" "")
8065 (match_operand 1 "general_operand" ""))
8067 (use (match_operand 2 "" ""))])]
8071 if (operands[2] == NULL_RTX)
8072 operands[2] = const0_rtx;
8076 (define_expand "sibcall_value"
8077 [(parallel [(set (match_operand 0 "" "")
8078 (call (match_operand 1 "memory_operand" "")
8079 (match_operand 2 "general_operand" "")))
8081 (use (match_operand 3 "" ""))])]
8085 if (operands[3] == NULL_RTX)
8086 operands[3] = const0_rtx;
8090 (define_insn "*sibcall_insn"
8091 [(call (mem:SI (match_operand:SI 0 "" "X"))
8092 (match_operand 1 "" ""))
8094 (use (match_operand 2 "" ""))]
8095 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8097 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8099 [(set_attr "type" "call")]
8102 (define_insn "*sibcall_value_insn"
8103 [(set (match_operand 0 "" "")
8104 (call (mem:SI (match_operand:SI 1 "" "X"))
8105 (match_operand 2 "" "")))
8107 (use (match_operand 3 "" ""))]
8108 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8110 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8112 [(set_attr "type" "call")]
8115 (define_expand "return"
8117 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8120 ;; Often the return insn will be the same as loading from memory, so set attr
8121 (define_insn "*arm_return"
8123 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8126 if (arm_ccfsm_state == 2)
8128 arm_ccfsm_state += 2;
8131 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8133 [(set_attr "type" "load1")
8134 (set_attr "length" "12")
8135 (set_attr "predicable" "yes")]
8138 (define_insn "*cond_return"
8140 (if_then_else (match_operator 0 "arm_comparison_operator"
8141 [(match_operand 1 "cc_register" "") (const_int 0)])
8144 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8147 if (arm_ccfsm_state == 2)
8149 arm_ccfsm_state += 2;
8152 return output_return_instruction (operands[0], TRUE, FALSE);
8154 [(set_attr "conds" "use")
8155 (set_attr "length" "12")
8156 (set_attr "type" "load1")]
8159 (define_insn "*cond_return_inverted"
8161 (if_then_else (match_operator 0 "arm_comparison_operator"
8162 [(match_operand 1 "cc_register" "") (const_int 0)])
8165 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8168 if (arm_ccfsm_state == 2)
8170 arm_ccfsm_state += 2;
8173 return output_return_instruction (operands[0], TRUE, TRUE);
8175 [(set_attr "conds" "use")
8176 (set_attr "length" "12")
8177 (set_attr "type" "load1")]
8180 ;; Generate a sequence of instructions to determine if the processor is
8181 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8184 (define_expand "return_addr_mask"
8186 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8188 (set (match_operand:SI 0 "s_register_operand" "")
8189 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8191 (const_int 67108860)))] ; 0x03fffffc
8194 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8197 (define_insn "*check_arch2"
8198 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8199 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8202 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8203 [(set_attr "length" "8")
8204 (set_attr "conds" "set")]
8207 ;; Call subroutine returning any type.
8209 (define_expand "untyped_call"
8210 [(parallel [(call (match_operand 0 "" "")
8212 (match_operand 1 "" "")
8213 (match_operand 2 "" "")])]
8218 rtx par = gen_rtx_PARALLEL (VOIDmode,
8219 rtvec_alloc (XVECLEN (operands[2], 0)));
8220 rtx addr = gen_reg_rtx (Pmode);
8224 emit_move_insn (addr, XEXP (operands[1], 0));
8225 mem = change_address (operands[1], BLKmode, addr);
8227 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8229 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8231 /* Default code only uses r0 as a return value, but we could
8232 be using anything up to 4 registers. */
8233 if (REGNO (src) == R0_REGNUM)
8234 src = gen_rtx_REG (TImode, R0_REGNUM);
8236 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8238 size += GET_MODE_SIZE (GET_MODE (src));
8241 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8246 for (i = 0; i < XVECLEN (par, 0); i++)
8248 HOST_WIDE_INT offset = 0;
8249 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8252 emit_move_insn (addr, plus_constant (addr, size));
8254 mem = change_address (mem, GET_MODE (reg), NULL);
8255 if (REGNO (reg) == R0_REGNUM)
8257 /* On thumb we have to use a write-back instruction. */
8258 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8259 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8260 size = TARGET_ARM ? 16 : 0;
8264 emit_move_insn (mem, reg);
8265 size = GET_MODE_SIZE (GET_MODE (reg));
8269 /* The optimizer does not know that the call sets the function value
8270 registers we stored in the result block. We avoid problems by
8271 claiming that all hard registers are used and clobbered at this
8273 emit_insn (gen_blockage ());
8279 (define_expand "untyped_return"
8280 [(match_operand:BLK 0 "memory_operand" "")
8281 (match_operand 1 "" "")]
8286 rtx addr = gen_reg_rtx (Pmode);
8290 emit_move_insn (addr, XEXP (operands[0], 0));
8291 mem = change_address (operands[0], BLKmode, addr);
8293 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8295 HOST_WIDE_INT offset = 0;
8296 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8299 emit_move_insn (addr, plus_constant (addr, size));
8301 mem = change_address (mem, GET_MODE (reg), NULL);
8302 if (REGNO (reg) == R0_REGNUM)
8304 /* On thumb we have to use a write-back instruction. */
8305 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8306 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8307 size = TARGET_ARM ? 16 : 0;
8311 emit_move_insn (reg, mem);
8312 size = GET_MODE_SIZE (GET_MODE (reg));
8316 /* Emit USE insns before the return. */
8317 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8318 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8320 /* Construct the return. */
8321 expand_naked_return ();
8327 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8328 ;; all of memory. This blocks insns from being moved across this point.
8330 (define_insn "blockage"
8331 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8334 [(set_attr "length" "0")
8335 (set_attr "type" "block")]
8338 (define_expand "casesi"
8339 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8340 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8341 (match_operand:SI 2 "const_int_operand" "") ; total range
8342 (match_operand:SI 3 "" "") ; table label
8343 (match_operand:SI 4 "" "")] ; Out of range label
8344 "TARGET_32BIT || optimize_size || flag_pic"
8347 enum insn_code code;
8348 if (operands[1] != const0_rtx)
8350 rtx reg = gen_reg_rtx (SImode);
8352 emit_insn (gen_addsi3 (reg, operands[0],
8353 GEN_INT (-INTVAL (operands[1]))));
8358 code = CODE_FOR_arm_casesi_internal;
8359 else if (TARGET_THUMB1)
8360 code = CODE_FOR_thumb1_casesi_internal_pic;
8362 code = CODE_FOR_thumb2_casesi_internal_pic;
8364 code = CODE_FOR_thumb2_casesi_internal;
8366 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8367 operands[2] = force_reg (SImode, operands[2]);
8369 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8370 operands[3], operands[4]));
8375 ;; The USE in this pattern is needed to tell flow analysis that this is
8376 ;; a CASESI insn. It has no other purpose.
8377 (define_insn "arm_casesi_internal"
8378 [(parallel [(set (pc)
8380 (leu (match_operand:SI 0 "s_register_operand" "r")
8381 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8382 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8383 (label_ref (match_operand 2 "" ""))))
8384 (label_ref (match_operand 3 "" ""))))
8385 (clobber (reg:CC CC_REGNUM))
8386 (use (label_ref (match_dup 2)))])]
8390 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8391 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8393 [(set_attr "conds" "clob")
8394 (set_attr "length" "12")]
8397 (define_expand "thumb1_casesi_internal_pic"
8398 [(match_operand:SI 0 "s_register_operand" "")
8399 (match_operand:SI 1 "thumb1_cmp_operand" "")
8400 (match_operand 2 "" "")
8401 (match_operand 3 "" "")]
8405 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8406 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8408 reg0 = gen_rtx_REG (SImode, 0);
8409 emit_move_insn (reg0, operands[0]);
8410 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8415 (define_insn "thumb1_casesi_dispatch"
8416 [(parallel [(set (pc) (unspec [(reg:SI 0)
8417 (label_ref (match_operand 0 "" ""))
8418 ;; (label_ref (match_operand 1 "" ""))
8420 UNSPEC_THUMB1_CASESI))
8421 (clobber (reg:SI IP_REGNUM))
8422 (clobber (reg:SI LR_REGNUM))])]
8424 "* return thumb1_output_casesi(operands);"
8425 [(set_attr "length" "4")]
8428 (define_expand "indirect_jump"
8430 (match_operand:SI 0 "s_register_operand" ""))]
8433 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8434 address and use bx. */
8438 tmp = gen_reg_rtx (SImode);
8439 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8445 ;; NB Never uses BX.
8446 (define_insn "*arm_indirect_jump"
8448 (match_operand:SI 0 "s_register_operand" "r"))]
8450 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8451 [(set_attr "predicable" "yes")]
8454 (define_insn "*load_indirect_jump"
8456 (match_operand:SI 0 "memory_operand" "m"))]
8458 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8459 [(set_attr "type" "load1")
8460 (set_attr "pool_range" "4096")
8461 (set_attr "neg_pool_range" "4084")
8462 (set_attr "predicable" "yes")]
8465 ;; NB Never uses BX.
8466 (define_insn "*thumb1_indirect_jump"
8468 (match_operand:SI 0 "register_operand" "l*r"))]
8471 [(set_attr "conds" "clob")
8472 (set_attr "length" "2")]
8482 if (TARGET_UNIFIED_ASM)
8485 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8486 return \"mov\\tr8, r8\";
8488 [(set (attr "length")
8489 (if_then_else (eq_attr "is_thumb" "yes")
8495 ;; Patterns to allow combination of arithmetic, cond code and shifts
8497 (define_insn "*arith_shiftsi"
8498 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8499 (match_operator:SI 1 "shiftable_operator"
8500 [(match_operator:SI 3 "shift_operator"
8501 [(match_operand:SI 4 "s_register_operand" "r,r")
8502 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8503 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8505 "%i1%?\\t%0, %2, %4%S3"
8506 [(set_attr "predicable" "yes")
8507 (set_attr "shift" "4")
8508 (set_attr "arch" "32,a")
8509 ;; We have to make sure to disable the second alternative if
8510 ;; the shift_operator is MULT, since otherwise the insn will
8511 ;; also match a multiply_accumulate pattern and validate_change
8512 ;; will allow a replacement of the constant with a register
8513 ;; despite the checks done in shift_operator.
8514 (set_attr_alternative "insn_enabled"
8515 [(const_string "yes")
8517 (match_operand:SI 3 "mult_operator" "")
8518 (const_string "no") (const_string "yes"))])
8519 (set_attr "type" "alu_shift,alu_shift_reg")])
8522 [(set (match_operand:SI 0 "s_register_operand" "")
8523 (match_operator:SI 1 "shiftable_operator"
8524 [(match_operator:SI 2 "shiftable_operator"
8525 [(match_operator:SI 3 "shift_operator"
8526 [(match_operand:SI 4 "s_register_operand" "")
8527 (match_operand:SI 5 "reg_or_int_operand" "")])
8528 (match_operand:SI 6 "s_register_operand" "")])
8529 (match_operand:SI 7 "arm_rhs_operand" "")]))
8530 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8533 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8536 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8539 (define_insn "*arith_shiftsi_compare0"
8540 [(set (reg:CC_NOOV CC_REGNUM)
8542 (match_operator:SI 1 "shiftable_operator"
8543 [(match_operator:SI 3 "shift_operator"
8544 [(match_operand:SI 4 "s_register_operand" "r,r")
8545 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8546 (match_operand:SI 2 "s_register_operand" "r,r")])
8548 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8549 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8552 "%i1%.\\t%0, %2, %4%S3"
8553 [(set_attr "conds" "set")
8554 (set_attr "shift" "4")
8555 (set_attr "arch" "32,a")
8556 (set_attr "type" "alu_shift,alu_shift_reg")])
8558 (define_insn "*arith_shiftsi_compare0_scratch"
8559 [(set (reg:CC_NOOV CC_REGNUM)
8561 (match_operator:SI 1 "shiftable_operator"
8562 [(match_operator:SI 3 "shift_operator"
8563 [(match_operand:SI 4 "s_register_operand" "r,r")
8564 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8565 (match_operand:SI 2 "s_register_operand" "r,r")])
8567 (clobber (match_scratch:SI 0 "=r,r"))]
8569 "%i1%.\\t%0, %2, %4%S3"
8570 [(set_attr "conds" "set")
8571 (set_attr "shift" "4")
8572 (set_attr "arch" "32,a")
8573 (set_attr "type" "alu_shift,alu_shift_reg")])
8575 (define_insn "*sub_shiftsi"
8576 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8577 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8578 (match_operator:SI 2 "shift_operator"
8579 [(match_operand:SI 3 "s_register_operand" "r,r")
8580 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8582 "sub%?\\t%0, %1, %3%S2"
8583 [(set_attr "predicable" "yes")
8584 (set_attr "shift" "3")
8585 (set_attr "arch" "32,a")
8586 (set_attr "type" "alu_shift,alu_shift_reg")])
8588 (define_insn "*sub_shiftsi_compare0"
8589 [(set (reg:CC_NOOV CC_REGNUM)
8591 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8592 (match_operator:SI 2 "shift_operator"
8593 [(match_operand:SI 3 "s_register_operand" "r,r")
8594 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8596 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8597 (minus:SI (match_dup 1)
8598 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8600 "sub%.\\t%0, %1, %3%S2"
8601 [(set_attr "conds" "set")
8602 (set_attr "shift" "3")
8603 (set_attr "arch" "32,a")
8604 (set_attr "type" "alu_shift,alu_shift_reg")])
8606 (define_insn "*sub_shiftsi_compare0_scratch"
8607 [(set (reg:CC_NOOV CC_REGNUM)
8609 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8610 (match_operator:SI 2 "shift_operator"
8611 [(match_operand:SI 3 "s_register_operand" "r,r")
8612 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8614 (clobber (match_scratch:SI 0 "=r,r"))]
8616 "sub%.\\t%0, %1, %3%S2"
8617 [(set_attr "conds" "set")
8618 (set_attr "shift" "3")
8619 (set_attr "arch" "32,a")
8620 (set_attr "type" "alu_shift,alu_shift_reg")])
8623 (define_insn "*and_scc"
8624 [(set (match_operand:SI 0 "s_register_operand" "=r")
8625 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8626 [(match_operand 3 "cc_register" "") (const_int 0)])
8627 (match_operand:SI 2 "s_register_operand" "r")))]
8629 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8630 [(set_attr "conds" "use")
8631 (set_attr "insn" "mov")
8632 (set_attr "length" "8")]
8635 (define_insn "*ior_scc"
8636 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8637 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8638 [(match_operand 3 "cc_register" "") (const_int 0)])
8639 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8643 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8644 [(set_attr "conds" "use")
8645 (set_attr "length" "4,8")]
8648 ; A series of splitters for the compare_scc pattern below. Note that
8649 ; order is important.
8651 [(set (match_operand:SI 0 "s_register_operand" "")
8652 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8654 (clobber (reg:CC CC_REGNUM))]
8655 "TARGET_32BIT && reload_completed"
8656 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8659 [(set (match_operand:SI 0 "s_register_operand" "")
8660 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8662 (clobber (reg:CC CC_REGNUM))]
8663 "TARGET_32BIT && reload_completed"
8664 [(set (match_dup 0) (not:SI (match_dup 1)))
8665 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8668 [(set (match_operand:SI 0 "s_register_operand" "")
8669 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8671 (clobber (reg:CC CC_REGNUM))]
8672 "TARGET_32BIT && reload_completed"
8674 [(set (reg:CC CC_REGNUM)
8675 (compare:CC (const_int 1) (match_dup 1)))
8677 (minus:SI (const_int 1) (match_dup 1)))])
8678 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8679 (set (match_dup 0) (const_int 0)))])
8682 [(set (match_operand:SI 0 "s_register_operand" "")
8683 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8684 (match_operand:SI 2 "const_int_operand" "")))
8685 (clobber (reg:CC CC_REGNUM))]
8686 "TARGET_32BIT && reload_completed"
8688 [(set (reg:CC CC_REGNUM)
8689 (compare:CC (match_dup 1) (match_dup 2)))
8690 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8691 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8692 (set (match_dup 0) (const_int 1)))]
8694 operands[3] = GEN_INT (-INTVAL (operands[2]));
8698 [(set (match_operand:SI 0 "s_register_operand" "")
8699 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8700 (match_operand:SI 2 "arm_add_operand" "")))
8701 (clobber (reg:CC CC_REGNUM))]
8702 "TARGET_32BIT && reload_completed"
8704 [(set (reg:CC_NOOV CC_REGNUM)
8705 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8707 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8708 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8709 (set (match_dup 0) (const_int 1)))])
8711 (define_insn_and_split "*compare_scc"
8712 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8713 (match_operator:SI 1 "arm_comparison_operator"
8714 [(match_operand:SI 2 "s_register_operand" "r,r")
8715 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8716 (clobber (reg:CC CC_REGNUM))]
8719 "&& reload_completed"
8720 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8721 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8722 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8725 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8726 operands[2], operands[3]);
8727 enum rtx_code rc = GET_CODE (operands[1]);
8729 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8731 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8732 if (mode == CCFPmode || mode == CCFPEmode)
8733 rc = reverse_condition_maybe_unordered (rc);
8735 rc = reverse_condition (rc);
8736 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8739 ;; Attempt to improve the sequence generated by the compare_scc splitters
8740 ;; not to use conditional execution.
8742 [(set (reg:CC CC_REGNUM)
8743 (compare:CC (match_operand:SI 1 "register_operand" "")
8744 (match_operand:SI 2 "arm_rhs_operand" "")))
8745 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8746 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8747 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8748 (set (match_dup 0) (const_int 1)))
8749 (match_scratch:SI 3 "r")]
8751 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8753 [(set (reg:CC CC_REGNUM)
8754 (compare:CC (const_int 0) (match_dup 3)))
8755 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8757 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8758 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8760 (define_insn "*cond_move"
8761 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8762 (if_then_else:SI (match_operator 3 "equality_operator"
8763 [(match_operator 4 "arm_comparison_operator"
8764 [(match_operand 5 "cc_register" "") (const_int 0)])
8766 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8767 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8770 if (GET_CODE (operands[3]) == NE)
8772 if (which_alternative != 1)
8773 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8774 if (which_alternative != 0)
8775 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8778 if (which_alternative != 0)
8779 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8780 if (which_alternative != 1)
8781 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8784 [(set_attr "conds" "use")
8785 (set_attr "insn" "mov")
8786 (set_attr "length" "4,4,8")]
8789 (define_insn "*cond_arith"
8790 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8791 (match_operator:SI 5 "shiftable_operator"
8792 [(match_operator:SI 4 "arm_comparison_operator"
8793 [(match_operand:SI 2 "s_register_operand" "r,r")
8794 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8795 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8796 (clobber (reg:CC CC_REGNUM))]
8799 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8800 return \"%i5\\t%0, %1, %2, lsr #31\";
8802 output_asm_insn (\"cmp\\t%2, %3\", operands);
8803 if (GET_CODE (operands[5]) == AND)
8804 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8805 else if (GET_CODE (operands[5]) == MINUS)
8806 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8807 else if (which_alternative != 0)
8808 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8809 return \"%i5%d4\\t%0, %1, #1\";
8811 [(set_attr "conds" "clob")
8812 (set_attr "length" "12")]
8815 (define_insn "*cond_sub"
8816 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8817 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8818 (match_operator:SI 4 "arm_comparison_operator"
8819 [(match_operand:SI 2 "s_register_operand" "r,r")
8820 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8821 (clobber (reg:CC CC_REGNUM))]
8824 output_asm_insn (\"cmp\\t%2, %3\", operands);
8825 if (which_alternative != 0)
8826 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8827 return \"sub%d4\\t%0, %1, #1\";
8829 [(set_attr "conds" "clob")
8830 (set_attr "length" "8,12")]
8833 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8834 (define_insn "*cmp_ite0"
8835 [(set (match_operand 6 "dominant_cc_register" "")
8838 (match_operator 4 "arm_comparison_operator"
8839 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8840 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8841 (match_operator:SI 5 "arm_comparison_operator"
8842 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8843 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8849 static const char * const opcodes[4][2] =
8851 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8852 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8853 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8854 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8855 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8856 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8857 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8858 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8861 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8863 return opcodes[which_alternative][swap];
8865 [(set_attr "conds" "set")
8866 (set_attr "length" "8")]
8869 (define_insn "*cmp_ite1"
8870 [(set (match_operand 6 "dominant_cc_register" "")
8873 (match_operator 4 "arm_comparison_operator"
8874 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8875 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8876 (match_operator:SI 5 "arm_comparison_operator"
8877 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8878 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8884 static const char * const opcodes[4][2] =
8886 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8887 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8888 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8889 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8890 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8891 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8892 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8893 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8896 comparison_dominates_p (GET_CODE (operands[5]),
8897 reverse_condition (GET_CODE (operands[4])));
8899 return opcodes[which_alternative][swap];
8901 [(set_attr "conds" "set")
8902 (set_attr "length" "8")]
8905 (define_insn "*cmp_and"
8906 [(set (match_operand 6 "dominant_cc_register" "")
8909 (match_operator 4 "arm_comparison_operator"
8910 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8911 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8912 (match_operator:SI 5 "arm_comparison_operator"
8913 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8914 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8919 static const char *const opcodes[4][2] =
8921 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8922 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8923 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8924 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8925 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8926 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8927 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8928 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8931 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8933 return opcodes[which_alternative][swap];
8935 [(set_attr "conds" "set")
8936 (set_attr "predicable" "no")
8937 (set_attr "length" "8")]
8940 (define_insn "*cmp_ior"
8941 [(set (match_operand 6 "dominant_cc_register" "")
8944 (match_operator 4 "arm_comparison_operator"
8945 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8946 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8947 (match_operator:SI 5 "arm_comparison_operator"
8948 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8949 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8954 static const char *const opcodes[4][2] =
8956 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8957 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8958 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8959 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8960 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8961 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8962 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8963 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8966 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8968 return opcodes[which_alternative][swap];
8971 [(set_attr "conds" "set")
8972 (set_attr "length" "8")]
8975 (define_insn_and_split "*ior_scc_scc"
8976 [(set (match_operand:SI 0 "s_register_operand" "=r")
8977 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8978 [(match_operand:SI 1 "s_register_operand" "r")
8979 (match_operand:SI 2 "arm_add_operand" "rIL")])
8980 (match_operator:SI 6 "arm_comparison_operator"
8981 [(match_operand:SI 4 "s_register_operand" "r")
8982 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8983 (clobber (reg:CC CC_REGNUM))]
8985 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8988 "TARGET_ARM && reload_completed"
8992 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8993 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8995 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8997 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9000 [(set_attr "conds" "clob")
9001 (set_attr "length" "16")])
9003 ; If the above pattern is followed by a CMP insn, then the compare is
9004 ; redundant, since we can rework the conditional instruction that follows.
9005 (define_insn_and_split "*ior_scc_scc_cmp"
9006 [(set (match_operand 0 "dominant_cc_register" "")
9007 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9008 [(match_operand:SI 1 "s_register_operand" "r")
9009 (match_operand:SI 2 "arm_add_operand" "rIL")])
9010 (match_operator:SI 6 "arm_comparison_operator"
9011 [(match_operand:SI 4 "s_register_operand" "r")
9012 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9014 (set (match_operand:SI 7 "s_register_operand" "=r")
9015 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9016 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9019 "TARGET_ARM && reload_completed"
9023 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9024 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9026 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9028 [(set_attr "conds" "set")
9029 (set_attr "length" "16")])
9031 (define_insn_and_split "*and_scc_scc"
9032 [(set (match_operand:SI 0 "s_register_operand" "=r")
9033 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9034 [(match_operand:SI 1 "s_register_operand" "r")
9035 (match_operand:SI 2 "arm_add_operand" "rIL")])
9036 (match_operator:SI 6 "arm_comparison_operator"
9037 [(match_operand:SI 4 "s_register_operand" "r")
9038 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9039 (clobber (reg:CC CC_REGNUM))]
9041 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9044 "TARGET_ARM && reload_completed
9045 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9050 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9051 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9053 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9055 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9058 [(set_attr "conds" "clob")
9059 (set_attr "length" "16")])
9061 ; If the above pattern is followed by a CMP insn, then the compare is
9062 ; redundant, since we can rework the conditional instruction that follows.
9063 (define_insn_and_split "*and_scc_scc_cmp"
9064 [(set (match_operand 0 "dominant_cc_register" "")
9065 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9066 [(match_operand:SI 1 "s_register_operand" "r")
9067 (match_operand:SI 2 "arm_add_operand" "rIL")])
9068 (match_operator:SI 6 "arm_comparison_operator"
9069 [(match_operand:SI 4 "s_register_operand" "r")
9070 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9072 (set (match_operand:SI 7 "s_register_operand" "=r")
9073 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9074 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9077 "TARGET_ARM && reload_completed"
9081 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9082 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9084 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9086 [(set_attr "conds" "set")
9087 (set_attr "length" "16")])
9089 ;; If there is no dominance in the comparison, then we can still save an
9090 ;; instruction in the AND case, since we can know that the second compare
9091 ;; need only zero the value if false (if true, then the value is already
9093 (define_insn_and_split "*and_scc_scc_nodom"
9094 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9095 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9096 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9097 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9098 (match_operator:SI 6 "arm_comparison_operator"
9099 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9100 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9101 (clobber (reg:CC CC_REGNUM))]
9103 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9106 "TARGET_ARM && reload_completed"
9107 [(parallel [(set (match_dup 0)
9108 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9109 (clobber (reg:CC CC_REGNUM))])
9110 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9112 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9115 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9116 operands[4], operands[5]),
9118 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9120 [(set_attr "conds" "clob")
9121 (set_attr "length" "20")])
9124 [(set (reg:CC_NOOV CC_REGNUM)
9125 (compare:CC_NOOV (ior:SI
9126 (and:SI (match_operand:SI 0 "s_register_operand" "")
9128 (match_operator:SI 1 "arm_comparison_operator"
9129 [(match_operand:SI 2 "s_register_operand" "")
9130 (match_operand:SI 3 "arm_add_operand" "")]))
9132 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9135 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9137 (set (reg:CC_NOOV CC_REGNUM)
9138 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9143 [(set (reg:CC_NOOV CC_REGNUM)
9144 (compare:CC_NOOV (ior:SI
9145 (match_operator:SI 1 "arm_comparison_operator"
9146 [(match_operand:SI 2 "s_register_operand" "")
9147 (match_operand:SI 3 "arm_add_operand" "")])
9148 (and:SI (match_operand:SI 0 "s_register_operand" "")
9151 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9154 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9156 (set (reg:CC_NOOV CC_REGNUM)
9157 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9160 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9162 (define_insn "*negscc"
9163 [(set (match_operand:SI 0 "s_register_operand" "=r")
9164 (neg:SI (match_operator 3 "arm_comparison_operator"
9165 [(match_operand:SI 1 "s_register_operand" "r")
9166 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9167 (clobber (reg:CC CC_REGNUM))]
9170 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9171 return \"mov\\t%0, %1, asr #31\";
9173 if (GET_CODE (operands[3]) == NE)
9174 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9176 output_asm_insn (\"cmp\\t%1, %2\", operands);
9177 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9178 return \"mvn%d3\\t%0, #0\";
9180 [(set_attr "conds" "clob")
9181 (set_attr "length" "12")]
9184 (define_insn "movcond"
9185 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9187 (match_operator 5 "arm_comparison_operator"
9188 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9189 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9190 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9191 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9192 (clobber (reg:CC CC_REGNUM))]
9195 if (GET_CODE (operands[5]) == LT
9196 && (operands[4] == const0_rtx))
9198 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9200 if (operands[2] == const0_rtx)
9201 return \"and\\t%0, %1, %3, asr #31\";
9202 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9204 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9206 if (operands[1] == const0_rtx)
9207 return \"bic\\t%0, %2, %3, asr #31\";
9208 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9210 /* The only case that falls through to here is when both ops 1 & 2
9214 if (GET_CODE (operands[5]) == GE
9215 && (operands[4] == const0_rtx))
9217 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9219 if (operands[2] == const0_rtx)
9220 return \"bic\\t%0, %1, %3, asr #31\";
9221 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9223 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9225 if (operands[1] == const0_rtx)
9226 return \"and\\t%0, %2, %3, asr #31\";
9227 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9229 /* The only case that falls through to here is when both ops 1 & 2
9232 if (GET_CODE (operands[4]) == CONST_INT
9233 && !const_ok_for_arm (INTVAL (operands[4])))
9234 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9236 output_asm_insn (\"cmp\\t%3, %4\", operands);
9237 if (which_alternative != 0)
9238 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9239 if (which_alternative != 1)
9240 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9243 [(set_attr "conds" "clob")
9244 (set_attr "length" "8,8,12")]
9247 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9249 (define_insn "*ifcompare_plus_move"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9251 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9252 [(match_operand:SI 4 "s_register_operand" "r,r")
9253 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9255 (match_operand:SI 2 "s_register_operand" "r,r")
9256 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9257 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9258 (clobber (reg:CC CC_REGNUM))]
9261 [(set_attr "conds" "clob")
9262 (set_attr "length" "8,12")]
9265 (define_insn "*if_plus_move"
9266 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9268 (match_operator 4 "arm_comparison_operator"
9269 [(match_operand 5 "cc_register" "") (const_int 0)])
9271 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9272 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9273 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9277 sub%d4\\t%0, %2, #%n3
9278 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9279 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9280 [(set_attr "conds" "use")
9281 (set_attr "length" "4,4,8,8")
9282 (set_attr "type" "*,*,*,*")]
9285 (define_insn "*ifcompare_move_plus"
9286 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9287 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9288 [(match_operand:SI 4 "s_register_operand" "r,r")
9289 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9290 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9292 (match_operand:SI 2 "s_register_operand" "r,r")
9293 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9294 (clobber (reg:CC CC_REGNUM))]
9297 [(set_attr "conds" "clob")
9298 (set_attr "length" "8,12")]
9301 (define_insn "*if_move_plus"
9302 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9304 (match_operator 4 "arm_comparison_operator"
9305 [(match_operand 5 "cc_register" "") (const_int 0)])
9306 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9308 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9309 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9313 sub%D4\\t%0, %2, #%n3
9314 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9315 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9316 [(set_attr "conds" "use")
9317 (set_attr "length" "4,4,8,8")
9318 (set_attr "type" "*,*,*,*")]
9321 (define_insn "*ifcompare_arith_arith"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r")
9323 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9324 [(match_operand:SI 5 "s_register_operand" "r")
9325 (match_operand:SI 6 "arm_add_operand" "rIL")])
9326 (match_operator:SI 8 "shiftable_operator"
9327 [(match_operand:SI 1 "s_register_operand" "r")
9328 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9329 (match_operator:SI 7 "shiftable_operator"
9330 [(match_operand:SI 3 "s_register_operand" "r")
9331 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9332 (clobber (reg:CC CC_REGNUM))]
9335 [(set_attr "conds" "clob")
9336 (set_attr "length" "12")]
9339 (define_insn "*if_arith_arith"
9340 [(set (match_operand:SI 0 "s_register_operand" "=r")
9341 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9342 [(match_operand 8 "cc_register" "") (const_int 0)])
9343 (match_operator:SI 6 "shiftable_operator"
9344 [(match_operand:SI 1 "s_register_operand" "r")
9345 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9346 (match_operator:SI 7 "shiftable_operator"
9347 [(match_operand:SI 3 "s_register_operand" "r")
9348 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9350 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9351 [(set_attr "conds" "use")
9352 (set_attr "length" "8")]
9355 (define_insn "*ifcompare_arith_move"
9356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9357 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9358 [(match_operand:SI 2 "s_register_operand" "r,r")
9359 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9360 (match_operator:SI 7 "shiftable_operator"
9361 [(match_operand:SI 4 "s_register_operand" "r,r")
9362 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9363 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9364 (clobber (reg:CC CC_REGNUM))]
9367 /* If we have an operation where (op x 0) is the identity operation and
9368 the conditional operator is LT or GE and we are comparing against zero and
9369 everything is in registers then we can do this in two instructions. */
9370 if (operands[3] == const0_rtx
9371 && GET_CODE (operands[7]) != AND
9372 && GET_CODE (operands[5]) == REG
9373 && GET_CODE (operands[1]) == REG
9374 && REGNO (operands[1]) == REGNO (operands[4])
9375 && REGNO (operands[4]) != REGNO (operands[0]))
9377 if (GET_CODE (operands[6]) == LT)
9378 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9379 else if (GET_CODE (operands[6]) == GE)
9380 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9382 if (GET_CODE (operands[3]) == CONST_INT
9383 && !const_ok_for_arm (INTVAL (operands[3])))
9384 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9386 output_asm_insn (\"cmp\\t%2, %3\", operands);
9387 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9388 if (which_alternative != 0)
9389 return \"mov%D6\\t%0, %1\";
9392 [(set_attr "conds" "clob")
9393 (set_attr "length" "8,12")]
9396 (define_insn "*if_arith_move"
9397 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9398 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9399 [(match_operand 6 "cc_register" "") (const_int 0)])
9400 (match_operator:SI 5 "shiftable_operator"
9401 [(match_operand:SI 2 "s_register_operand" "r,r")
9402 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9403 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9407 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9408 [(set_attr "conds" "use")
9409 (set_attr "length" "4,8")
9410 (set_attr "type" "*,*")]
9413 (define_insn "*ifcompare_move_arith"
9414 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9415 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9416 [(match_operand:SI 4 "s_register_operand" "r,r")
9417 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9418 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9419 (match_operator:SI 7 "shiftable_operator"
9420 [(match_operand:SI 2 "s_register_operand" "r,r")
9421 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9422 (clobber (reg:CC CC_REGNUM))]
9425 /* If we have an operation where (op x 0) is the identity operation and
9426 the conditional operator is LT or GE and we are comparing against zero and
9427 everything is in registers then we can do this in two instructions */
9428 if (operands[5] == const0_rtx
9429 && GET_CODE (operands[7]) != AND
9430 && GET_CODE (operands[3]) == REG
9431 && GET_CODE (operands[1]) == REG
9432 && REGNO (operands[1]) == REGNO (operands[2])
9433 && REGNO (operands[2]) != REGNO (operands[0]))
9435 if (GET_CODE (operands[6]) == GE)
9436 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9437 else if (GET_CODE (operands[6]) == LT)
9438 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9441 if (GET_CODE (operands[5]) == CONST_INT
9442 && !const_ok_for_arm (INTVAL (operands[5])))
9443 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9445 output_asm_insn (\"cmp\\t%4, %5\", operands);
9447 if (which_alternative != 0)
9448 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9449 return \"%I7%D6\\t%0, %2, %3\";
9451 [(set_attr "conds" "clob")
9452 (set_attr "length" "8,12")]
9455 (define_insn "*if_move_arith"
9456 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9458 (match_operator 4 "arm_comparison_operator"
9459 [(match_operand 6 "cc_register" "") (const_int 0)])
9460 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9461 (match_operator:SI 5 "shiftable_operator"
9462 [(match_operand:SI 2 "s_register_operand" "r,r")
9463 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9467 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9468 [(set_attr "conds" "use")
9469 (set_attr "length" "4,8")
9470 (set_attr "type" "*,*")]
9473 (define_insn "*ifcompare_move_not"
9474 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9476 (match_operator 5 "arm_comparison_operator"
9477 [(match_operand:SI 3 "s_register_operand" "r,r")
9478 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9479 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9481 (match_operand:SI 2 "s_register_operand" "r,r"))))
9482 (clobber (reg:CC CC_REGNUM))]
9485 [(set_attr "conds" "clob")
9486 (set_attr "length" "8,12")]
9489 (define_insn "*if_move_not"
9490 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9492 (match_operator 4 "arm_comparison_operator"
9493 [(match_operand 3 "cc_register" "") (const_int 0)])
9494 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9495 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9499 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9500 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9501 [(set_attr "conds" "use")
9502 (set_attr "insn" "mvn")
9503 (set_attr "length" "4,8,8")]
9506 (define_insn "*ifcompare_not_move"
9507 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9509 (match_operator 5 "arm_comparison_operator"
9510 [(match_operand:SI 3 "s_register_operand" "r,r")
9511 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9513 (match_operand:SI 2 "s_register_operand" "r,r"))
9514 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9515 (clobber (reg:CC CC_REGNUM))]
9518 [(set_attr "conds" "clob")
9519 (set_attr "length" "8,12")]
9522 (define_insn "*if_not_move"
9523 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9525 (match_operator 4 "arm_comparison_operator"
9526 [(match_operand 3 "cc_register" "") (const_int 0)])
9527 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9528 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9532 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9533 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9534 [(set_attr "conds" "use")
9535 (set_attr "insn" "mvn")
9536 (set_attr "length" "4,8,8")]
9539 (define_insn "*ifcompare_shift_move"
9540 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9542 (match_operator 6 "arm_comparison_operator"
9543 [(match_operand:SI 4 "s_register_operand" "r,r")
9544 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9545 (match_operator:SI 7 "shift_operator"
9546 [(match_operand:SI 2 "s_register_operand" "r,r")
9547 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9548 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9549 (clobber (reg:CC CC_REGNUM))]
9552 [(set_attr "conds" "clob")
9553 (set_attr "length" "8,12")]
9556 (define_insn "*if_shift_move"
9557 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9559 (match_operator 5 "arm_comparison_operator"
9560 [(match_operand 6 "cc_register" "") (const_int 0)])
9561 (match_operator:SI 4 "shift_operator"
9562 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9563 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9564 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9568 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9569 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9570 [(set_attr "conds" "use")
9571 (set_attr "shift" "2")
9572 (set_attr "length" "4,8,8")
9573 (set_attr "insn" "mov")
9574 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9575 (const_string "alu_shift")
9576 (const_string "alu_shift_reg")))]
9579 (define_insn "*ifcompare_move_shift"
9580 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9582 (match_operator 6 "arm_comparison_operator"
9583 [(match_operand:SI 4 "s_register_operand" "r,r")
9584 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9585 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9586 (match_operator:SI 7 "shift_operator"
9587 [(match_operand:SI 2 "s_register_operand" "r,r")
9588 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9589 (clobber (reg:CC CC_REGNUM))]
9592 [(set_attr "conds" "clob")
9593 (set_attr "length" "8,12")]
9596 (define_insn "*if_move_shift"
9597 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9599 (match_operator 5 "arm_comparison_operator"
9600 [(match_operand 6 "cc_register" "") (const_int 0)])
9601 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9602 (match_operator:SI 4 "shift_operator"
9603 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9604 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9608 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9609 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9610 [(set_attr "conds" "use")
9611 (set_attr "shift" "2")
9612 (set_attr "length" "4,8,8")
9613 (set_attr "insn" "mov")
9614 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9615 (const_string "alu_shift")
9616 (const_string "alu_shift_reg")))]
9619 (define_insn "*ifcompare_shift_shift"
9620 [(set (match_operand:SI 0 "s_register_operand" "=r")
9622 (match_operator 7 "arm_comparison_operator"
9623 [(match_operand:SI 5 "s_register_operand" "r")
9624 (match_operand:SI 6 "arm_add_operand" "rIL")])
9625 (match_operator:SI 8 "shift_operator"
9626 [(match_operand:SI 1 "s_register_operand" "r")
9627 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9628 (match_operator:SI 9 "shift_operator"
9629 [(match_operand:SI 3 "s_register_operand" "r")
9630 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9631 (clobber (reg:CC CC_REGNUM))]
9634 [(set_attr "conds" "clob")
9635 (set_attr "length" "12")]
9638 (define_insn "*if_shift_shift"
9639 [(set (match_operand:SI 0 "s_register_operand" "=r")
9641 (match_operator 5 "arm_comparison_operator"
9642 [(match_operand 8 "cc_register" "") (const_int 0)])
9643 (match_operator:SI 6 "shift_operator"
9644 [(match_operand:SI 1 "s_register_operand" "r")
9645 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9646 (match_operator:SI 7 "shift_operator"
9647 [(match_operand:SI 3 "s_register_operand" "r")
9648 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9650 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9651 [(set_attr "conds" "use")
9652 (set_attr "shift" "1")
9653 (set_attr "length" "8")
9654 (set_attr "insn" "mov")
9655 (set (attr "type") (if_then_else
9656 (and (match_operand 2 "const_int_operand" "")
9657 (match_operand 4 "const_int_operand" ""))
9658 (const_string "alu_shift")
9659 (const_string "alu_shift_reg")))]
9662 (define_insn "*ifcompare_not_arith"
9663 [(set (match_operand:SI 0 "s_register_operand" "=r")
9665 (match_operator 6 "arm_comparison_operator"
9666 [(match_operand:SI 4 "s_register_operand" "r")
9667 (match_operand:SI 5 "arm_add_operand" "rIL")])
9668 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9669 (match_operator:SI 7 "shiftable_operator"
9670 [(match_operand:SI 2 "s_register_operand" "r")
9671 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9672 (clobber (reg:CC CC_REGNUM))]
9675 [(set_attr "conds" "clob")
9676 (set_attr "length" "12")]
9679 (define_insn "*if_not_arith"
9680 [(set (match_operand:SI 0 "s_register_operand" "=r")
9682 (match_operator 5 "arm_comparison_operator"
9683 [(match_operand 4 "cc_register" "") (const_int 0)])
9684 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9685 (match_operator:SI 6 "shiftable_operator"
9686 [(match_operand:SI 2 "s_register_operand" "r")
9687 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9689 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9690 [(set_attr "conds" "use")
9691 (set_attr "insn" "mvn")
9692 (set_attr "length" "8")]
9695 (define_insn "*ifcompare_arith_not"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r")
9698 (match_operator 6 "arm_comparison_operator"
9699 [(match_operand:SI 4 "s_register_operand" "r")
9700 (match_operand:SI 5 "arm_add_operand" "rIL")])
9701 (match_operator:SI 7 "shiftable_operator"
9702 [(match_operand:SI 2 "s_register_operand" "r")
9703 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9704 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9705 (clobber (reg:CC CC_REGNUM))]
9708 [(set_attr "conds" "clob")
9709 (set_attr "length" "12")]
9712 (define_insn "*if_arith_not"
9713 [(set (match_operand:SI 0 "s_register_operand" "=r")
9715 (match_operator 5 "arm_comparison_operator"
9716 [(match_operand 4 "cc_register" "") (const_int 0)])
9717 (match_operator:SI 6 "shiftable_operator"
9718 [(match_operand:SI 2 "s_register_operand" "r")
9719 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9720 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9722 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9723 [(set_attr "conds" "use")
9724 (set_attr "insn" "mvn")
9725 (set_attr "length" "8")]
9728 (define_insn "*ifcompare_neg_move"
9729 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9731 (match_operator 5 "arm_comparison_operator"
9732 [(match_operand:SI 3 "s_register_operand" "r,r")
9733 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9734 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9735 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9736 (clobber (reg:CC CC_REGNUM))]
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "8,12")]
9743 (define_insn "*if_neg_move"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9746 (match_operator 4 "arm_comparison_operator"
9747 [(match_operand 3 "cc_register" "") (const_int 0)])
9748 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9749 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9753 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9754 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9755 [(set_attr "conds" "use")
9756 (set_attr "length" "4,8,8")]
9759 (define_insn "*ifcompare_move_neg"
9760 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9762 (match_operator 5 "arm_comparison_operator"
9763 [(match_operand:SI 3 "s_register_operand" "r,r")
9764 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9765 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9766 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9767 (clobber (reg:CC CC_REGNUM))]
9770 [(set_attr "conds" "clob")
9771 (set_attr "length" "8,12")]
9774 (define_insn "*if_move_neg"
9775 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9777 (match_operator 4 "arm_comparison_operator"
9778 [(match_operand 3 "cc_register" "") (const_int 0)])
9779 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9780 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9784 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9785 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9786 [(set_attr "conds" "use")
9787 (set_attr "length" "4,8,8")]
9790 (define_insn "*arith_adjacentmem"
9791 [(set (match_operand:SI 0 "s_register_operand" "=r")
9792 (match_operator:SI 1 "shiftable_operator"
9793 [(match_operand:SI 2 "memory_operand" "m")
9794 (match_operand:SI 3 "memory_operand" "m")]))
9795 (clobber (match_scratch:SI 4 "=r"))]
9796 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9802 HOST_WIDE_INT val1 = 0, val2 = 0;
9804 if (REGNO (operands[0]) > REGNO (operands[4]))
9806 ldm[1] = operands[4];
9807 ldm[2] = operands[0];
9811 ldm[1] = operands[0];
9812 ldm[2] = operands[4];
9815 base_reg = XEXP (operands[2], 0);
9817 if (!REG_P (base_reg))
9819 val1 = INTVAL (XEXP (base_reg, 1));
9820 base_reg = XEXP (base_reg, 0);
9823 if (!REG_P (XEXP (operands[3], 0)))
9824 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9826 arith[0] = operands[0];
9827 arith[3] = operands[1];
9841 if (val1 !=0 && val2 != 0)
9845 if (val1 == 4 || val2 == 4)
9846 /* Other val must be 8, since we know they are adjacent and neither
9848 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9849 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9851 ldm[0] = ops[0] = operands[4];
9853 ops[2] = GEN_INT (val1);
9854 output_add_immediate (ops);
9856 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9858 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9862 /* Offset is out of range for a single add, so use two ldr. */
9865 ops[2] = GEN_INT (val1);
9866 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9868 ops[2] = GEN_INT (val2);
9869 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9875 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9877 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9882 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9884 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9886 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9889 [(set_attr "length" "12")
9890 (set_attr "predicable" "yes")
9891 (set_attr "type" "load1")]
9894 ; This pattern is never tried by combine, so do it as a peephole
9897 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9898 (match_operand:SI 1 "arm_general_register_operand" ""))
9899 (set (reg:CC CC_REGNUM)
9900 (compare:CC (match_dup 1) (const_int 0)))]
9902 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9903 (set (match_dup 0) (match_dup 1))])]
9908 [(set (match_operand:SI 0 "s_register_operand" "")
9909 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9911 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9912 [(match_operand:SI 3 "s_register_operand" "")
9913 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9914 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9916 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9917 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9922 ;; This split can be used because CC_Z mode implies that the following
9923 ;; branch will be an equality, or an unsigned inequality, so the sign
9924 ;; extension is not needed.
9927 [(set (reg:CC_Z CC_REGNUM)
9929 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9931 (match_operand 1 "const_int_operand" "")))
9932 (clobber (match_scratch:SI 2 ""))]
9934 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9935 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9936 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9937 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9939 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9942 ;; ??? Check the patterns above for Thumb-2 usefulness
9944 (define_expand "prologue"
9945 [(clobber (const_int 0))]
9948 arm_expand_prologue ();
9950 thumb1_expand_prologue ();
9955 (define_expand "epilogue"
9956 [(clobber (const_int 0))]
9959 if (crtl->calls_eh_return)
9960 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9962 thumb1_expand_epilogue ();
9963 else if (USE_RETURN_INSN (FALSE))
9965 emit_jump_insn (gen_return ());
9968 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9970 gen_rtx_RETURN (VOIDmode)),
9976 ;; Note - although unspec_volatile's USE all hard registers,
9977 ;; USEs are ignored after relaod has completed. Thus we need
9978 ;; to add an unspec of the link register to ensure that flow
9979 ;; does not think that it is unused by the sibcall branch that
9980 ;; will replace the standard function epilogue.
9981 (define_insn "sibcall_epilogue"
9982 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9983 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9986 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9987 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9988 return arm_output_epilogue (next_nonnote_insn (insn));
9990 ;; Length is absolute worst case
9991 [(set_attr "length" "44")
9992 (set_attr "type" "block")
9993 ;; We don't clobber the conditions, but the potential length of this
9994 ;; operation is sufficient to make conditionalizing the sequence
9995 ;; unlikely to be profitable.
9996 (set_attr "conds" "clob")]
9999 (define_insn "*epilogue_insns"
10000 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10004 return arm_output_epilogue (NULL);
10005 else /* TARGET_THUMB1 */
10006 return thumb_unexpanded_epilogue ();
10008 ; Length is absolute worst case
10009 [(set_attr "length" "44")
10010 (set_attr "type" "block")
10011 ;; We don't clobber the conditions, but the potential length of this
10012 ;; operation is sufficient to make conditionalizing the sequence
10013 ;; unlikely to be profitable.
10014 (set_attr "conds" "clob")]
10017 (define_expand "eh_epilogue"
10018 [(use (match_operand:SI 0 "register_operand" ""))
10019 (use (match_operand:SI 1 "register_operand" ""))
10020 (use (match_operand:SI 2 "register_operand" ""))]
10024 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10025 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10027 rtx ra = gen_rtx_REG (Pmode, 2);
10029 emit_move_insn (ra, operands[2]);
10032 /* This is a hack -- we may have crystalized the function type too
10034 cfun->machine->func_type = 0;
10038 ;; This split is only used during output to reduce the number of patterns
10039 ;; that need assembler instructions adding to them. We allowed the setting
10040 ;; of the conditions to be implicit during rtl generation so that
10041 ;; the conditional compare patterns would work. However this conflicts to
10042 ;; some extent with the conditional data operations, so we have to split them
10045 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10046 ;; conditional execution sufficient?
10049 [(set (match_operand:SI 0 "s_register_operand" "")
10050 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10051 [(match_operand 2 "" "") (match_operand 3 "" "")])
10053 (match_operand 4 "" "")))
10054 (clobber (reg:CC CC_REGNUM))]
10055 "TARGET_ARM && reload_completed"
10056 [(set (match_dup 5) (match_dup 6))
10057 (cond_exec (match_dup 7)
10058 (set (match_dup 0) (match_dup 4)))]
10061 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10062 operands[2], operands[3]);
10063 enum rtx_code rc = GET_CODE (operands[1]);
10065 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10066 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10067 if (mode == CCFPmode || mode == CCFPEmode)
10068 rc = reverse_condition_maybe_unordered (rc);
10070 rc = reverse_condition (rc);
10072 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10077 [(set (match_operand:SI 0 "s_register_operand" "")
10078 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10079 [(match_operand 2 "" "") (match_operand 3 "" "")])
10080 (match_operand 4 "" "")
10082 (clobber (reg:CC CC_REGNUM))]
10083 "TARGET_ARM && reload_completed"
10084 [(set (match_dup 5) (match_dup 6))
10085 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10086 (set (match_dup 0) (match_dup 4)))]
10089 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10090 operands[2], operands[3]);
10092 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10093 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10098 [(set (match_operand:SI 0 "s_register_operand" "")
10099 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10100 [(match_operand 2 "" "") (match_operand 3 "" "")])
10101 (match_operand 4 "" "")
10102 (match_operand 5 "" "")))
10103 (clobber (reg:CC CC_REGNUM))]
10104 "TARGET_ARM && reload_completed"
10105 [(set (match_dup 6) (match_dup 7))
10106 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10107 (set (match_dup 0) (match_dup 4)))
10108 (cond_exec (match_dup 8)
10109 (set (match_dup 0) (match_dup 5)))]
10112 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10113 operands[2], operands[3]);
10114 enum rtx_code rc = GET_CODE (operands[1]);
10116 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10117 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10118 if (mode == CCFPmode || mode == CCFPEmode)
10119 rc = reverse_condition_maybe_unordered (rc);
10121 rc = reverse_condition (rc);
10123 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10128 [(set (match_operand:SI 0 "s_register_operand" "")
10129 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10130 [(match_operand:SI 2 "s_register_operand" "")
10131 (match_operand:SI 3 "arm_add_operand" "")])
10132 (match_operand:SI 4 "arm_rhs_operand" "")
10134 (match_operand:SI 5 "s_register_operand" ""))))
10135 (clobber (reg:CC CC_REGNUM))]
10136 "TARGET_ARM && reload_completed"
10137 [(set (match_dup 6) (match_dup 7))
10138 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10139 (set (match_dup 0) (match_dup 4)))
10140 (cond_exec (match_dup 8)
10141 (set (match_dup 0) (not:SI (match_dup 5))))]
10144 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10145 operands[2], operands[3]);
10146 enum rtx_code rc = GET_CODE (operands[1]);
10148 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10149 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10150 if (mode == CCFPmode || mode == CCFPEmode)
10151 rc = reverse_condition_maybe_unordered (rc);
10153 rc = reverse_condition (rc);
10155 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10159 (define_insn "*cond_move_not"
10160 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10161 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10162 [(match_operand 3 "cc_register" "") (const_int 0)])
10163 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10165 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10169 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10170 [(set_attr "conds" "use")
10171 (set_attr "insn" "mvn")
10172 (set_attr "length" "4,8")]
10175 ;; The next two patterns occur when an AND operation is followed by a
10176 ;; scc insn sequence
10178 (define_insn "*sign_extract_onebit"
10179 [(set (match_operand:SI 0 "s_register_operand" "=r")
10180 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10182 (match_operand:SI 2 "const_int_operand" "n")))
10183 (clobber (reg:CC CC_REGNUM))]
10186 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10187 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10188 return \"mvnne\\t%0, #0\";
10190 [(set_attr "conds" "clob")
10191 (set_attr "length" "8")]
10194 (define_insn "*not_signextract_onebit"
10195 [(set (match_operand:SI 0 "s_register_operand" "=r")
10197 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10199 (match_operand:SI 2 "const_int_operand" "n"))))
10200 (clobber (reg:CC CC_REGNUM))]
10203 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10204 output_asm_insn (\"tst\\t%1, %2\", operands);
10205 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10206 return \"movne\\t%0, #0\";
10208 [(set_attr "conds" "clob")
10209 (set_attr "length" "12")]
10211 ;; ??? The above patterns need auditing for Thumb-2
10213 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10214 ;; expressions. For simplicity, the first register is also in the unspec
10216 (define_insn "*push_multi"
10217 [(match_parallel 2 "multi_register_push"
10218 [(set (match_operand:BLK 0 "memory_operand" "=m")
10219 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10220 UNSPEC_PUSH_MULT))])]
10224 int num_saves = XVECLEN (operands[2], 0);
10226 /* For the StrongARM at least it is faster to
10227 use STR to store only a single register.
10228 In Thumb mode always use push, and the assembler will pick
10229 something appropriate. */
10230 if (num_saves == 1 && TARGET_ARM)
10231 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10238 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10240 strcpy (pattern, \"push\\t{%1\");
10242 for (i = 1; i < num_saves; i++)
10244 strcat (pattern, \", %|\");
10246 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10249 strcat (pattern, \"}\");
10250 output_asm_insn (pattern, operands);
10255 [(set_attr "type" "store4")]
10258 (define_insn "stack_tie"
10259 [(set (mem:BLK (scratch))
10260 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10261 (match_operand:SI 1 "s_register_operand" "rk")]
10265 [(set_attr "length" "0")]
10268 ;; Similarly for the floating point registers
10269 (define_insn "*push_fp_multi"
10270 [(match_parallel 2 "multi_register_push"
10271 [(set (match_operand:BLK 0 "memory_operand" "=m")
10272 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10273 UNSPEC_PUSH_MULT))])]
10274 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10279 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10280 output_asm_insn (pattern, operands);
10283 [(set_attr "type" "f_store")]
10286 ;; Special patterns for dealing with the constant pool
10288 (define_insn "align_4"
10289 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10292 assemble_align (32);
10297 (define_insn "align_8"
10298 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10301 assemble_align (64);
10306 (define_insn "consttable_end"
10307 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10310 making_const_table = FALSE;
10315 (define_insn "consttable_1"
10316 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10319 making_const_table = TRUE;
10320 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10321 assemble_zeros (3);
10324 [(set_attr "length" "4")]
10327 (define_insn "consttable_2"
10328 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10331 making_const_table = TRUE;
10332 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10333 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10334 assemble_zeros (2);
10337 [(set_attr "length" "4")]
10340 (define_insn "consttable_4"
10341 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10345 rtx x = operands[0];
10346 making_const_table = TRUE;
10347 switch (GET_MODE_CLASS (GET_MODE (x)))
10350 if (GET_MODE (x) == HFmode)
10351 arm_emit_fp16_const (x);
10355 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10356 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10360 /* XXX: Sometimes gcc does something really dumb and ends up with
10361 a HIGH in a constant pool entry, usually because it's trying to
10362 load into a VFP register. We know this will always be used in
10363 combination with a LO_SUM which ignores the high bits, so just
10364 strip off the HIGH. */
10365 if (GET_CODE (x) == HIGH)
10367 assemble_integer (x, 4, BITS_PER_WORD, 1);
10368 mark_symbol_refs_as_used (x);
10373 [(set_attr "length" "4")]
10376 (define_insn "consttable_8"
10377 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10381 making_const_table = TRUE;
10382 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10387 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10388 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10392 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10397 [(set_attr "length" "8")]
10400 (define_insn "consttable_16"
10401 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10405 making_const_table = TRUE;
10406 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10411 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10412 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10416 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10421 [(set_attr "length" "16")]
10424 ;; Miscellaneous Thumb patterns
10426 (define_expand "tablejump"
10427 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10428 (use (label_ref (match_operand 1 "" "")))])]
10433 /* Hopefully, CSE will eliminate this copy. */
10434 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10435 rtx reg2 = gen_reg_rtx (SImode);
10437 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10438 operands[0] = reg2;
10443 ;; NB never uses BX.
10444 (define_insn "*thumb1_tablejump"
10445 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10446 (use (label_ref (match_operand 1 "" "")))]
10449 [(set_attr "length" "2")]
10452 ;; V5 Instructions,
10454 (define_insn "clzsi2"
10455 [(set (match_operand:SI 0 "s_register_operand" "=r")
10456 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10457 "TARGET_32BIT && arm_arch5"
10459 [(set_attr "predicable" "yes")
10460 (set_attr "insn" "clz")])
10462 (define_insn "rbitsi2"
10463 [(set (match_operand:SI 0 "s_register_operand" "=r")
10464 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10465 "TARGET_32BIT && arm_arch_thumb2"
10467 [(set_attr "predicable" "yes")
10468 (set_attr "insn" "clz")])
10470 (define_expand "ctzsi2"
10471 [(set (match_operand:SI 0 "s_register_operand" "")
10472 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10473 "TARGET_32BIT && arm_arch_thumb2"
10476 rtx tmp = gen_reg_rtx (SImode);
10477 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10478 emit_insn (gen_clzsi2 (operands[0], tmp));
10484 ;; V5E instructions.
10486 (define_insn "prefetch"
10487 [(prefetch (match_operand:SI 0 "address_operand" "p")
10488 (match_operand:SI 1 "" "")
10489 (match_operand:SI 2 "" ""))]
10490 "TARGET_32BIT && arm_arch5e"
10493 ;; General predication pattern
10496 [(match_operator 0 "arm_comparison_operator"
10497 [(match_operand 1 "cc_register" "")
10503 (define_insn "prologue_use"
10504 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10506 "%@ %0 needed for prologue"
10507 [(set_attr "length" "0")]
10511 ;; Patterns for exception handling
10513 (define_expand "eh_return"
10514 [(use (match_operand 0 "general_operand" ""))]
10519 emit_insn (gen_arm_eh_return (operands[0]));
10521 emit_insn (gen_thumb_eh_return (operands[0]));
10526 ;; We can't expand this before we know where the link register is stored.
10527 (define_insn_and_split "arm_eh_return"
10528 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10530 (clobber (match_scratch:SI 1 "=&r"))]
10533 "&& reload_completed"
10537 arm_set_return_address (operands[0], operands[1]);
10542 (define_insn_and_split "thumb_eh_return"
10543 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10545 (clobber (match_scratch:SI 1 "=&l"))]
10548 "&& reload_completed"
10552 thumb_set_return_address (operands[0], operands[1]);
10560 (define_insn "load_tp_hard"
10561 [(set (match_operand:SI 0 "register_operand" "=r")
10562 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10564 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10565 [(set_attr "predicable" "yes")]
10568 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10569 (define_insn "load_tp_soft"
10570 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10571 (clobber (reg:SI LR_REGNUM))
10572 (clobber (reg:SI IP_REGNUM))
10573 (clobber (reg:CC CC_REGNUM))]
10575 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10576 [(set_attr "conds" "clob")]
10579 (define_insn "*arm_movtas_ze"
10580 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10583 (match_operand:SI 1 "const_int_operand" ""))]
10586 [(set_attr "predicable" "yes")
10587 (set_attr "length" "4")]
10590 (define_insn "*arm_rev"
10591 [(set (match_operand:SI 0 "s_register_operand" "=r")
10592 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10593 "TARGET_32BIT && arm_arch6"
10595 [(set_attr "predicable" "yes")
10596 (set_attr "length" "4")]
10599 (define_insn "*thumb1_rev"
10600 [(set (match_operand:SI 0 "s_register_operand" "=l")
10601 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10602 "TARGET_THUMB1 && arm_arch6"
10604 [(set_attr "length" "2")]
10607 (define_expand "arm_legacy_rev"
10608 [(set (match_operand:SI 2 "s_register_operand" "")
10609 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10613 (lshiftrt:SI (match_dup 2)
10615 (set (match_operand:SI 3 "s_register_operand" "")
10616 (rotatert:SI (match_dup 1)
10619 (and:SI (match_dup 2)
10620 (const_int -65281)))
10621 (set (match_operand:SI 0 "s_register_operand" "")
10622 (xor:SI (match_dup 3)
10628 ;; Reuse temporaries to keep register pressure down.
10629 (define_expand "thumb_legacy_rev"
10630 [(set (match_operand:SI 2 "s_register_operand" "")
10631 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10633 (set (match_operand:SI 3 "s_register_operand" "")
10634 (lshiftrt:SI (match_dup 1)
10637 (ior:SI (match_dup 3)
10639 (set (match_operand:SI 4 "s_register_operand" "")
10641 (set (match_operand:SI 5 "s_register_operand" "")
10642 (rotatert:SI (match_dup 1)
10645 (ashift:SI (match_dup 5)
10648 (lshiftrt:SI (match_dup 5)
10651 (ior:SI (match_dup 5)
10654 (rotatert:SI (match_dup 5)
10656 (set (match_operand:SI 0 "s_register_operand" "")
10657 (ior:SI (match_dup 5)
10663 (define_expand "bswapsi2"
10664 [(set (match_operand:SI 0 "s_register_operand" "=r")
10665 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10666 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10670 rtx op2 = gen_reg_rtx (SImode);
10671 rtx op3 = gen_reg_rtx (SImode);
10675 rtx op4 = gen_reg_rtx (SImode);
10676 rtx op5 = gen_reg_rtx (SImode);
10678 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10679 op2, op3, op4, op5));
10683 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10692 ;; Load the load/store multiple patterns
10693 (include "ldmstm.md")
10694 ;; Load the FPA co-processor patterns
10696 ;; Load the Maverick co-processor patterns
10697 (include "cirrus.md")
10698 ;; Vector bits common to IWMMXT and Neon
10699 (include "vec-common.md")
10700 ;; Load the Intel Wireless Multimedia Extension patterns
10701 (include "iwmmxt.md")
10702 ;; Load the VFP co-processor patterns
10704 ;; Thumb-2 patterns
10705 (include "thumb2.md")
10707 (include "neon.md")
10708 ;; Synchronization Primitives
10709 (include "sync.md")