1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
106 ;; UNSPEC_VOLATILE Usage:
109 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
111 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
112 ; instruction epilogue sequence that isn't expanded
113 ; into normal RTL. Used for both normal and sibcall
115 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
116 ; for inlined constants.
117 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
119 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
121 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
123 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
125 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
127 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
129 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
130 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
131 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
132 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
133 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
134 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
135 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
140 ;;---------------------------------------------------------------------------
143 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
144 ; generating ARM code. This is used to control the length of some insn
145 ; patterns that share the same RTL in both ARM and Thumb code.
146 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
148 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
149 ; scheduling decisions for the load unit and the multiplier.
150 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
152 ; IS_XSCALE is set to 'yes' when compiling for XScale.
153 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
155 ;; Operand number of an input operand that is shifted. Zero if the
156 ;; given instruction does not shift one of its input operands.
157 (define_attr "shift" "" (const_int 0))
159 ; Floating Point Unit. If we only have floating point emulation, then there
160 ; is no point in scheduling the floating point insns. (Well, for best
161 ; performance we should try and group them together).
162 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon,neon_fp16"
163 (const (symbol_ref "arm_fpu_attr")))
165 ; LENGTH of an instruction (in bytes)
166 (define_attr "length" "" (const_int 4))
168 ; POOL_RANGE is how far away from a constant pool entry that this insn
169 ; can be placed. If the distance is zero, then this insn will never
170 ; reference the pool.
171 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
172 ; before its address.
173 (define_attr "pool_range" "" (const_int 0))
174 (define_attr "neg_pool_range" "" (const_int 0))
176 ; An assembler sequence may clobber the condition codes without us knowing.
177 ; If such an insn references the pool, then we have no way of knowing how,
178 ; so use the most conservative value for pool_range.
179 (define_asm_attributes
180 [(set_attr "conds" "clob")
181 (set_attr "length" "4")
182 (set_attr "pool_range" "250")])
184 ;; The instruction used to implement a particular pattern. This
185 ;; information is used by pipeline descriptions to provide accurate
186 ;; scheduling information.
189 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
190 (const_string "other"))
192 ; TYPE attribute is used to detect floating point instructions which, if
193 ; running on a co-processor can run in parallel with other, basic instructions
194 ; If write-buffer scheduling is enabled then it can also be used in the
195 ; scheduling of writes.
197 ; Classification of each insn
198 ; Note: vfp.md has different meanings for some of these, and some further
199 ; types as well. See that file for details.
200 ; alu any alu instruction that doesn't hit memory or fp
201 ; regs or have a shifted source operand
202 ; alu_shift any data instruction that doesn't hit memory or fp
203 ; regs, but has a source operand shifted by a constant
204 ; alu_shift_reg any data instruction that doesn't hit memory or fp
205 ; regs, but has a source operand shifted by a register value
206 ; mult a multiply instruction
207 ; block blockage insn, this blocks all functional units
208 ; float a floating point arithmetic operation (subject to expansion)
209 ; fdivd DFmode floating point division
210 ; fdivs SFmode floating point division
211 ; fmul Floating point multiply
212 ; ffmul Fast floating point multiply
213 ; farith Floating point arithmetic (4 cycle)
214 ; ffarith Fast floating point arithmetic (2 cycle)
215 ; float_em a floating point arithmetic operation that is normally emulated
216 ; even on a machine with an fpa.
217 ; f_load a floating point load from memory
218 ; f_store a floating point store to memory
219 ; f_load[sd] single/double load from memory
220 ; f_store[sd] single/double store to memory
221 ; f_flag a transfer of co-processor flags to the CPSR
222 ; f_mem_r a transfer of a floating point register to a real reg via mem
223 ; r_mem_f the reverse of f_mem_r
224 ; f_2_r fast transfer float to arm (no memory needed)
225 ; r_2_f fast transfer arm to float
226 ; f_cvt convert floating<->integral
228 ; call a subroutine call
229 ; load_byte load byte(s) from memory to arm registers
230 ; load1 load 1 word from memory to arm registers
231 ; load2 load 2 words from memory to arm registers
232 ; load3 load 3 words from memory to arm registers
233 ; load4 load 4 words from memory to arm registers
234 ; store store 1 word to memory from arm registers
235 ; store2 store 2 words
236 ; store3 store 3 words
237 ; store4 store 4 (or more) words
238 ; Additions for Cirrus Maverick co-processor:
239 ; mav_farith Floating point arithmetic (4 cycle)
240 ; mav_dmult Double multiplies (7 cycle)
244 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
246 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
247 (const_string "mult")
248 (const_string "alu")))
250 ; Load scheduling, set from the arm_ld_sched variable
251 ; initialized by arm_override_options()
252 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
254 ;; Classification of NEON instructions for scheduling purposes.
255 ;; Do not set this attribute and the "type" attribute together in
256 ;; any one instruction pattern.
257 (define_attr "neon_type"
268 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
269 neon_mul_qqq_8_16_32_ddd_32,\
270 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
271 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
273 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
274 neon_mla_qqq_32_qqd_32_scalar,\
275 neon_mul_ddd_16_scalar_32_16_long_scalar,\
276 neon_mul_qqd_32_scalar,\
277 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
282 neon_vqshl_vrshl_vqrshl_qqq,\
284 neon_fp_vadd_ddd_vabs_dd,\
285 neon_fp_vadd_qqq_vabs_qq,\
291 neon_fp_vmla_ddd_scalar,\
292 neon_fp_vmla_qqq_scalar,\
293 neon_fp_vrecps_vrsqrts_ddd,\
294 neon_fp_vrecps_vrsqrts_qqq,\
302 neon_vld2_2_regs_vld1_vld2_all_lanes,\
305 neon_vst1_1_2_regs_vst2_2_regs,\
307 neon_vst2_4_regs_vst3_vst4,\
309 neon_vld1_vld2_lane,\
310 neon_vld3_vld4_lane,\
311 neon_vst1_vst2_lane,\
312 neon_vst3_vst4_lane,\
313 neon_vld3_vld4_all_lanes,\
321 (const_string "none"))
323 ; condition codes: this one is used by final_prescan_insn to speed up
324 ; conditionalizing instructions. It saves having to scan the rtl to see if
325 ; it uses or alters the condition codes.
327 ; USE means that the condition codes are used by the insn in the process of
328 ; outputting code, this means (at present) that we can't use the insn in
331 ; SET means that the purpose of the insn is to set the condition codes in a
332 ; well defined manner.
334 ; CLOB means that the condition codes are altered in an undefined manner, if
335 ; they are altered at all
337 ; UNCONDITIONAL means the instions can not be conditionally executed.
339 ; NOCOND means that the condition codes are neither altered nor affect the
340 ; output of this insn
342 (define_attr "conds" "use,set,clob,unconditional,nocond"
343 (if_then_else (eq_attr "type" "call")
344 (const_string "clob")
345 (if_then_else (eq_attr "neon_type" "none")
346 (const_string "nocond")
347 (const_string "unconditional"))))
349 ; Predicable means that the insn can be conditionally executed based on
350 ; an automatically added predicate (additional patterns are generated by
351 ; gen...). We default to 'no' because no Thumb patterns match this rule
352 ; and not all ARM patterns do.
353 (define_attr "predicable" "no,yes" (const_string "no"))
355 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
356 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
357 ; suffer blockages enough to warrant modelling this (and it can adversely
358 ; affect the schedule).
359 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
361 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
362 ; to stall the processor. Used with model_wbuf above.
363 (define_attr "write_conflict" "no,yes"
364 (if_then_else (eq_attr "type"
365 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
367 (const_string "no")))
369 ; Classify the insns into those that take one cycle and those that take more
370 ; than one on the main cpu execution unit.
371 (define_attr "core_cycles" "single,multi"
372 (if_then_else (eq_attr "type"
373 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
374 (const_string "single")
375 (const_string "multi")))
377 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
378 ;; distant label. Only applicable to Thumb code.
379 (define_attr "far_jump" "yes,no" (const_string "no"))
382 ;; The number of machine instructions this pattern expands to.
383 ;; Used for Thumb-2 conditional execution.
384 (define_attr "ce_count" "" (const_int 1))
386 ;;---------------------------------------------------------------------------
389 ; A list of modes that are exactly 64 bits in size. We use this to expand
390 ; some splits that are the same for all modes when operating on ARM
392 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
394 ;;---------------------------------------------------------------------------
397 (include "predicates.md")
398 (include "constraints.md")
400 ;;---------------------------------------------------------------------------
401 ;; Pipeline descriptions
403 ;; Processor type. This is created automatically from arm-cores.def.
404 (include "arm-tune.md")
406 (define_attr "tune_cortexr4" "yes,no"
408 (eq_attr "tune" "cortexr4,cortexr4f")
410 (const_string "no"))))
412 ;; True if the generic scheduling description should be used.
414 (define_attr "generic_sched" "yes,no"
416 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
417 (eq_attr "tune_cortexr4" "yes"))
419 (const_string "yes"))))
421 (define_attr "generic_vfp" "yes,no"
423 (and (eq_attr "fpu" "vfp")
424 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
425 (eq_attr "tune_cortexr4" "no"))
427 (const_string "no"))))
429 (include "arm-generic.md")
430 (include "arm926ejs.md")
431 (include "arm1020e.md")
432 (include "arm1026ejs.md")
433 (include "arm1136jfs.md")
434 (include "cortex-a8.md")
435 (include "cortex-a9.md")
436 (include "cortex-r4.md")
437 (include "cortex-r4f.md")
441 ;;---------------------------------------------------------------------------
446 ;; Note: For DImode insns, there is normally no reason why operands should
447 ;; not be in the same register, what we don't want is for something being
448 ;; written to partially overlap something that is an input.
449 ;; Cirrus 64bit additions should not be split because we have a native
450 ;; 64bit addition instructions.
452 (define_expand "adddi3"
454 [(set (match_operand:DI 0 "s_register_operand" "")
455 (plus:DI (match_operand:DI 1 "s_register_operand" "")
456 (match_operand:DI 2 "s_register_operand" "")))
457 (clobber (reg:CC CC_REGNUM))])]
460 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
462 if (!cirrus_fp_register (operands[0], DImode))
463 operands[0] = force_reg (DImode, operands[0]);
464 if (!cirrus_fp_register (operands[1], DImode))
465 operands[1] = force_reg (DImode, operands[1]);
466 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
472 if (GET_CODE (operands[1]) != REG)
473 operands[1] = force_reg (DImode, operands[1]);
474 if (GET_CODE (operands[2]) != REG)
475 operands[2] = force_reg (DImode, operands[2]);
480 (define_insn "*thumb1_adddi3"
481 [(set (match_operand:DI 0 "register_operand" "=l")
482 (plus:DI (match_operand:DI 1 "register_operand" "%0")
483 (match_operand:DI 2 "register_operand" "l")))
484 (clobber (reg:CC CC_REGNUM))
487 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
488 [(set_attr "length" "4")]
491 (define_insn_and_split "*arm_adddi3"
492 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
493 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
494 (match_operand:DI 2 "s_register_operand" "r, 0")))
495 (clobber (reg:CC CC_REGNUM))]
496 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
498 "TARGET_32BIT && reload_completed"
499 [(parallel [(set (reg:CC_C CC_REGNUM)
500 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
502 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
503 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
504 (plus:SI (match_dup 4) (match_dup 5))))]
507 operands[3] = gen_highpart (SImode, operands[0]);
508 operands[0] = gen_lowpart (SImode, operands[0]);
509 operands[4] = gen_highpart (SImode, operands[1]);
510 operands[1] = gen_lowpart (SImode, operands[1]);
511 operands[5] = gen_highpart (SImode, operands[2]);
512 operands[2] = gen_lowpart (SImode, operands[2]);
514 [(set_attr "conds" "clob")
515 (set_attr "length" "8")]
518 (define_insn_and_split "*adddi_sesidi_di"
519 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
520 (plus:DI (sign_extend:DI
521 (match_operand:SI 2 "s_register_operand" "r,r"))
522 (match_operand:DI 1 "s_register_operand" "r,0")))
523 (clobber (reg:CC CC_REGNUM))]
524 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
526 "TARGET_32BIT && reload_completed"
527 [(parallel [(set (reg:CC_C CC_REGNUM)
528 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
530 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
531 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
532 (plus:SI (ashiftrt:SI (match_dup 2)
537 operands[3] = gen_highpart (SImode, operands[0]);
538 operands[0] = gen_lowpart (SImode, operands[0]);
539 operands[4] = gen_highpart (SImode, operands[1]);
540 operands[1] = gen_lowpart (SImode, operands[1]);
541 operands[2] = gen_lowpart (SImode, operands[2]);
543 [(set_attr "conds" "clob")
544 (set_attr "length" "8")]
547 (define_insn_and_split "*adddi_zesidi_di"
548 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
549 (plus:DI (zero_extend:DI
550 (match_operand:SI 2 "s_register_operand" "r,r"))
551 (match_operand:DI 1 "s_register_operand" "r,0")))
552 (clobber (reg:CC CC_REGNUM))]
553 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
555 "TARGET_32BIT && reload_completed"
556 [(parallel [(set (reg:CC_C CC_REGNUM)
557 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
559 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
560 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
561 (plus:SI (match_dup 4) (const_int 0))))]
564 operands[3] = gen_highpart (SImode, operands[0]);
565 operands[0] = gen_lowpart (SImode, operands[0]);
566 operands[4] = gen_highpart (SImode, operands[1]);
567 operands[1] = gen_lowpart (SImode, operands[1]);
568 operands[2] = gen_lowpart (SImode, operands[2]);
570 [(set_attr "conds" "clob")
571 (set_attr "length" "8")]
574 (define_expand "addsi3"
575 [(set (match_operand:SI 0 "s_register_operand" "")
576 (plus:SI (match_operand:SI 1 "s_register_operand" "")
577 (match_operand:SI 2 "reg_or_int_operand" "")))]
580 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
582 arm_split_constant (PLUS, SImode, NULL_RTX,
583 INTVAL (operands[2]), operands[0], operands[1],
584 optimize && can_create_pseudo_p ());
590 ; If there is a scratch available, this will be faster than synthesizing the
593 [(match_scratch:SI 3 "r")
594 (set (match_operand:SI 0 "arm_general_register_operand" "")
595 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
596 (match_operand:SI 2 "const_int_operand" "")))]
598 !(const_ok_for_arm (INTVAL (operands[2]))
599 || const_ok_for_arm (-INTVAL (operands[2])))
600 && const_ok_for_arm (~INTVAL (operands[2]))"
601 [(set (match_dup 3) (match_dup 2))
602 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
606 ;; The r/r/k alternative is required when reloading the address
607 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
608 ;; put the duplicated register first, and not try the commutative version.
609 (define_insn_and_split "*arm_addsi3"
610 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
611 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
612 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
622 && GET_CODE (operands[2]) == CONST_INT
623 && !(const_ok_for_arm (INTVAL (operands[2]))
624 || const_ok_for_arm (-INTVAL (operands[2])))
625 && (reload_completed || !arm_eliminable_register (operands[1]))"
626 [(clobber (const_int 0))]
628 arm_split_constant (PLUS, SImode, curr_insn,
629 INTVAL (operands[2]), operands[0],
633 [(set_attr "length" "4,4,4,4,4,16")
634 (set_attr "predicable" "yes")]
637 ;; Register group 'k' is a single register group containing only the stack
638 ;; register. Trying to reload it will always fail catastrophically,
639 ;; so never allow those alternatives to match if reloading is needed.
641 (define_insn_and_split "*thumb1_addsi3"
642 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
643 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
644 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
647 static const char * const asms[] =
649 \"add\\t%0, %0, %2\",
650 \"sub\\t%0, %0, #%n2\",
651 \"add\\t%0, %1, %2\",
652 \"add\\t%0, %0, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %1, %2\",
655 \"add\\t%0, %1, %2\",
659 if ((which_alternative == 2 || which_alternative == 6)
660 && GET_CODE (operands[2]) == CONST_INT
661 && INTVAL (operands[2]) < 0)
662 return \"sub\\t%0, %1, #%n2\";
663 return asms[which_alternative];
665 "&& reload_completed && CONST_INT_P (operands[2])
666 && operands[1] != stack_pointer_rtx
667 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
668 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
669 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
671 HOST_WIDE_INT offset = INTVAL (operands[2]);
674 else if (offset < -255)
677 operands[3] = GEN_INT (offset);
678 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
680 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
683 ;; Reloading and elimination of the frame pointer can
684 ;; sometimes cause this optimization to be missed.
686 [(set (match_operand:SI 0 "arm_general_register_operand" "")
687 (match_operand:SI 1 "const_int_operand" ""))
689 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
691 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
692 && (INTVAL (operands[1]) & 3) == 0"
693 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
697 ;; ??? Make Thumb-2 variants which prefer low regs
698 (define_insn "*addsi3_compare0"
699 [(set (reg:CC_NOOV CC_REGNUM)
701 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
702 (match_operand:SI 2 "arm_add_operand" "rI,L"))
704 (set (match_operand:SI 0 "s_register_operand" "=r,r")
705 (plus:SI (match_dup 1) (match_dup 2)))]
709 sub%.\\t%0, %1, #%n2"
710 [(set_attr "conds" "set")]
713 (define_insn "*addsi3_compare0_scratch"
714 [(set (reg:CC_NOOV CC_REGNUM)
716 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
717 (match_operand:SI 1 "arm_add_operand" "rI,L"))
723 [(set_attr "conds" "set")]
726 (define_insn "*compare_negsi_si"
727 [(set (reg:CC_Z CC_REGNUM)
729 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
730 (match_operand:SI 1 "s_register_operand" "r")))]
733 [(set_attr "conds" "set")]
736 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
737 ;; addend is a constant.
738 (define_insn "*cmpsi2_addneg"
739 [(set (reg:CC CC_REGNUM)
741 (match_operand:SI 1 "s_register_operand" "r,r")
742 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
743 (set (match_operand:SI 0 "s_register_operand" "=r,r")
744 (plus:SI (match_dup 1)
745 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
746 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
749 add%.\\t%0, %1, #%n2"
750 [(set_attr "conds" "set")]
753 ;; Convert the sequence
755 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
759 ;; bcs dest ((unsigned)rn >= 1)
760 ;; similarly for the beq variant using bcc.
761 ;; This is a common looping idiom (while (n--))
763 [(set (match_operand:SI 0 "arm_general_register_operand" "")
764 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
766 (set (match_operand 2 "cc_register" "")
767 (compare (match_dup 0) (const_int -1)))
769 (if_then_else (match_operator 3 "equality_operator"
770 [(match_dup 2) (const_int 0)])
771 (match_operand 4 "" "")
772 (match_operand 5 "" "")))]
773 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
777 (match_dup 1) (const_int 1)))
778 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
780 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
783 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
784 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
787 operands[2], const0_rtx);"
790 ;; The next four insns work because they compare the result with one of
791 ;; the operands, and we know that the use of the condition code is
792 ;; either GEU or LTU, so we can use the carry flag from the addition
793 ;; instead of doing the compare a second time.
794 (define_insn "*addsi3_compare_op1"
795 [(set (reg:CC_C CC_REGNUM)
797 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
798 (match_operand:SI 2 "arm_add_operand" "rI,L"))
800 (set (match_operand:SI 0 "s_register_operand" "=r,r")
801 (plus:SI (match_dup 1) (match_dup 2)))]
805 sub%.\\t%0, %1, #%n2"
806 [(set_attr "conds" "set")]
809 (define_insn "*addsi3_compare_op2"
810 [(set (reg:CC_C CC_REGNUM)
812 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
813 (match_operand:SI 2 "arm_add_operand" "rI,L"))
815 (set (match_operand:SI 0 "s_register_operand" "=r,r")
816 (plus:SI (match_dup 1) (match_dup 2)))]
820 sub%.\\t%0, %1, #%n2"
821 [(set_attr "conds" "set")]
824 (define_insn "*compare_addsi2_op0"
825 [(set (reg:CC_C CC_REGNUM)
827 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
828 (match_operand:SI 1 "arm_add_operand" "rI,L"))
834 [(set_attr "conds" "set")]
837 (define_insn "*compare_addsi2_op1"
838 [(set (reg:CC_C CC_REGNUM)
840 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
841 (match_operand:SI 1 "arm_add_operand" "rI,L"))
847 [(set_attr "conds" "set")]
850 (define_insn "*addsi3_carryin"
851 [(set (match_operand:SI 0 "s_register_operand" "=r")
852 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
853 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
854 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
857 [(set_attr "conds" "use")]
860 (define_insn "*addsi3_carryin_shift"
861 [(set (match_operand:SI 0 "s_register_operand" "=r")
862 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
864 (match_operator:SI 2 "shift_operator"
865 [(match_operand:SI 3 "s_register_operand" "r")
866 (match_operand:SI 4 "reg_or_int_operand" "rM")])
867 (match_operand:SI 1 "s_register_operand" "r"))))]
869 "adc%?\\t%0, %1, %3%S2"
870 [(set_attr "conds" "use")
871 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
872 (const_string "alu_shift")
873 (const_string "alu_shift_reg")))]
876 (define_insn "*addsi3_carryin_alt1"
877 [(set (match_operand:SI 0 "s_register_operand" "=r")
878 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
879 (match_operand:SI 2 "arm_rhs_operand" "rI"))
880 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
883 [(set_attr "conds" "use")]
886 (define_insn "*addsi3_carryin_alt2"
887 [(set (match_operand:SI 0 "s_register_operand" "=r")
888 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
889 (match_operand:SI 1 "s_register_operand" "r"))
890 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
893 [(set_attr "conds" "use")]
896 (define_insn "*addsi3_carryin_alt3"
897 [(set (match_operand:SI 0 "s_register_operand" "=r")
898 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
899 (match_operand:SI 2 "arm_rhs_operand" "rI"))
900 (match_operand:SI 1 "s_register_operand" "r")))]
903 [(set_attr "conds" "use")]
906 (define_expand "incscc"
907 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
908 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
909 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
910 (match_operand:SI 1 "s_register_operand" "0,?r")))]
915 (define_insn "*arm_incscc"
916 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
917 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
918 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
919 (match_operand:SI 1 "s_register_operand" "0,?r")))]
923 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
924 [(set_attr "conds" "use")
925 (set_attr "length" "4,8")]
928 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
930 [(set (match_operand:SI 0 "s_register_operand" "")
931 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
932 (match_operand:SI 2 "s_register_operand" ""))
934 (clobber (match_operand:SI 3 "s_register_operand" ""))]
936 [(set (match_dup 3) (match_dup 1))
937 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
939 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
942 (define_expand "addsf3"
943 [(set (match_operand:SF 0 "s_register_operand" "")
944 (plus:SF (match_operand:SF 1 "s_register_operand" "")
945 (match_operand:SF 2 "arm_float_add_operand" "")))]
946 "TARGET_32BIT && TARGET_HARD_FLOAT"
949 && !cirrus_fp_register (operands[2], SFmode))
950 operands[2] = force_reg (SFmode, operands[2]);
953 (define_expand "adddf3"
954 [(set (match_operand:DF 0 "s_register_operand" "")
955 (plus:DF (match_operand:DF 1 "s_register_operand" "")
956 (match_operand:DF 2 "arm_float_add_operand" "")))]
957 "TARGET_32BIT && TARGET_HARD_FLOAT"
960 && !cirrus_fp_register (operands[2], DFmode))
961 operands[2] = force_reg (DFmode, operands[2]);
964 (define_expand "subdi3"
966 [(set (match_operand:DI 0 "s_register_operand" "")
967 (minus:DI (match_operand:DI 1 "s_register_operand" "")
968 (match_operand:DI 2 "s_register_operand" "")))
969 (clobber (reg:CC CC_REGNUM))])]
972 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
974 && cirrus_fp_register (operands[0], DImode)
975 && cirrus_fp_register (operands[1], DImode))
977 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
983 if (GET_CODE (operands[1]) != REG)
984 operands[1] = force_reg (DImode, operands[1]);
985 if (GET_CODE (operands[2]) != REG)
986 operands[2] = force_reg (DImode, operands[2]);
991 (define_insn "*arm_subdi3"
992 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
993 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
994 (match_operand:DI 2 "s_register_operand" "r,0,0")))
995 (clobber (reg:CC CC_REGNUM))]
997 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
998 [(set_attr "conds" "clob")
999 (set_attr "length" "8")]
1002 (define_insn "*thumb_subdi3"
1003 [(set (match_operand:DI 0 "register_operand" "=l")
1004 (minus:DI (match_operand:DI 1 "register_operand" "0")
1005 (match_operand:DI 2 "register_operand" "l")))
1006 (clobber (reg:CC CC_REGNUM))]
1008 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1009 [(set_attr "length" "4")]
1012 (define_insn "*subdi_di_zesidi"
1013 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1014 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
1016 (match_operand:SI 2 "s_register_operand" "r,r"))))
1017 (clobber (reg:CC CC_REGNUM))]
1019 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1020 [(set_attr "conds" "clob")
1021 (set_attr "length" "8")]
1024 (define_insn "*subdi_di_sesidi"
1025 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1026 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1028 (match_operand:SI 2 "s_register_operand" "r,r"))))
1029 (clobber (reg:CC CC_REGNUM))]
1031 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1032 [(set_attr "conds" "clob")
1033 (set_attr "length" "8")]
1036 (define_insn "*subdi_zesidi_di"
1037 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1038 (minus:DI (zero_extend:DI
1039 (match_operand:SI 2 "s_register_operand" "r,r"))
1040 (match_operand:DI 1 "s_register_operand" "?r,0")))
1041 (clobber (reg:CC CC_REGNUM))]
1043 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1044 [(set_attr "conds" "clob")
1045 (set_attr "length" "8")]
1048 (define_insn "*subdi_sesidi_di"
1049 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1050 (minus:DI (sign_extend:DI
1051 (match_operand:SI 2 "s_register_operand" "r,r"))
1052 (match_operand:DI 1 "s_register_operand" "?r,0")))
1053 (clobber (reg:CC CC_REGNUM))]
1055 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1056 [(set_attr "conds" "clob")
1057 (set_attr "length" "8")]
1060 (define_insn "*subdi_zesidi_zesidi"
1061 [(set (match_operand:DI 0 "s_register_operand" "=r")
1062 (minus:DI (zero_extend:DI
1063 (match_operand:SI 1 "s_register_operand" "r"))
1065 (match_operand:SI 2 "s_register_operand" "r"))))
1066 (clobber (reg:CC CC_REGNUM))]
1068 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1069 [(set_attr "conds" "clob")
1070 (set_attr "length" "8")]
1073 (define_expand "subsi3"
1074 [(set (match_operand:SI 0 "s_register_operand" "")
1075 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1076 (match_operand:SI 2 "s_register_operand" "")))]
1079 if (GET_CODE (operands[1]) == CONST_INT)
1083 arm_split_constant (MINUS, SImode, NULL_RTX,
1084 INTVAL (operands[1]), operands[0],
1085 operands[2], optimize && can_create_pseudo_p ());
1088 else /* TARGET_THUMB1 */
1089 operands[1] = force_reg (SImode, operands[1]);
1094 (define_insn "*thumb1_subsi3_insn"
1095 [(set (match_operand:SI 0 "register_operand" "=l")
1096 (minus:SI (match_operand:SI 1 "register_operand" "l")
1097 (match_operand:SI 2 "register_operand" "l")))]
1100 [(set_attr "length" "2")]
1103 ; ??? Check Thumb-2 split length
1104 (define_insn_and_split "*arm_subsi3_insn"
1105 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1106 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1107 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1114 && GET_CODE (operands[1]) == CONST_INT
1115 && !const_ok_for_arm (INTVAL (operands[1]))"
1116 [(clobber (const_int 0))]
1118 arm_split_constant (MINUS, SImode, curr_insn,
1119 INTVAL (operands[1]), operands[0], operands[2], 0);
1122 [(set_attr "length" "4,4,16")
1123 (set_attr "predicable" "yes")]
1127 [(match_scratch:SI 3 "r")
1128 (set (match_operand:SI 0 "arm_general_register_operand" "")
1129 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1130 (match_operand:SI 2 "arm_general_register_operand" "")))]
1132 && !const_ok_for_arm (INTVAL (operands[1]))
1133 && const_ok_for_arm (~INTVAL (operands[1]))"
1134 [(set (match_dup 3) (match_dup 1))
1135 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1139 (define_insn "*subsi3_compare0"
1140 [(set (reg:CC_NOOV CC_REGNUM)
1142 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1143 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1145 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1146 (minus:SI (match_dup 1) (match_dup 2)))]
1151 [(set_attr "conds" "set")]
1154 (define_expand "decscc"
1155 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1156 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1157 (match_operator:SI 2 "arm_comparison_operator"
1158 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1163 (define_insn "*arm_decscc"
1164 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1165 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1166 (match_operator:SI 2 "arm_comparison_operator"
1167 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1171 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1172 [(set_attr "conds" "use")
1173 (set_attr "length" "*,8")]
1176 (define_expand "subsf3"
1177 [(set (match_operand:SF 0 "s_register_operand" "")
1178 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1179 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1180 "TARGET_32BIT && TARGET_HARD_FLOAT"
1182 if (TARGET_MAVERICK)
1184 if (!cirrus_fp_register (operands[1], SFmode))
1185 operands[1] = force_reg (SFmode, operands[1]);
1186 if (!cirrus_fp_register (operands[2], SFmode))
1187 operands[2] = force_reg (SFmode, operands[2]);
1191 (define_expand "subdf3"
1192 [(set (match_operand:DF 0 "s_register_operand" "")
1193 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1194 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1195 "TARGET_32BIT && TARGET_HARD_FLOAT"
1197 if (TARGET_MAVERICK)
1199 if (!cirrus_fp_register (operands[1], DFmode))
1200 operands[1] = force_reg (DFmode, operands[1]);
1201 if (!cirrus_fp_register (operands[2], DFmode))
1202 operands[2] = force_reg (DFmode, operands[2]);
1207 ;; Multiplication insns
1209 (define_expand "mulsi3"
1210 [(set (match_operand:SI 0 "s_register_operand" "")
1211 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1212 (match_operand:SI 1 "s_register_operand" "")))]
1217 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1218 (define_insn "*arm_mulsi3"
1219 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1220 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1221 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1222 "TARGET_32BIT && !arm_arch6"
1223 "mul%?\\t%0, %2, %1"
1224 [(set_attr "insn" "mul")
1225 (set_attr "predicable" "yes")]
1228 (define_insn "*arm_mulsi3_v6"
1229 [(set (match_operand:SI 0 "s_register_operand" "=r")
1230 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1231 (match_operand:SI 2 "s_register_operand" "r")))]
1232 "TARGET_32BIT && arm_arch6"
1233 "mul%?\\t%0, %1, %2"
1234 [(set_attr "insn" "mul")
1235 (set_attr "predicable" "yes")]
1238 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1239 ; 1 and 2; are the same, because reload will make operand 0 match
1240 ; operand 1 without realizing that this conflicts with operand 2. We fix
1241 ; this by adding another alternative to match this case, and then `reload'
1242 ; it ourselves. This alternative must come first.
1243 (define_insn "*thumb_mulsi3"
1244 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1245 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1246 (match_operand:SI 2 "register_operand" "l,l,l")))]
1247 "TARGET_THUMB1 && !arm_arch6"
1249 if (which_alternative < 2)
1250 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1252 return \"mul\\t%0, %2\";
1254 [(set_attr "length" "4,4,2")
1255 (set_attr "insn" "mul")]
1258 (define_insn "*thumb_mulsi3_v6"
1259 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1260 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1261 (match_operand:SI 2 "register_operand" "l,0,0")))]
1262 "TARGET_THUMB1 && arm_arch6"
1267 [(set_attr "length" "2")
1268 (set_attr "insn" "mul")]
1271 (define_insn "*mulsi3_compare0"
1272 [(set (reg:CC_NOOV CC_REGNUM)
1273 (compare:CC_NOOV (mult:SI
1274 (match_operand:SI 2 "s_register_operand" "r,r")
1275 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1277 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1278 (mult:SI (match_dup 2) (match_dup 1)))]
1279 "TARGET_ARM && !arm_arch6"
1280 "mul%.\\t%0, %2, %1"
1281 [(set_attr "conds" "set")
1282 (set_attr "insn" "muls")]
1285 (define_insn "*mulsi3_compare0_v6"
1286 [(set (reg:CC_NOOV CC_REGNUM)
1287 (compare:CC_NOOV (mult:SI
1288 (match_operand:SI 2 "s_register_operand" "r")
1289 (match_operand:SI 1 "s_register_operand" "r"))
1291 (set (match_operand:SI 0 "s_register_operand" "=r")
1292 (mult:SI (match_dup 2) (match_dup 1)))]
1293 "TARGET_ARM && arm_arch6 && optimize_size"
1294 "mul%.\\t%0, %2, %1"
1295 [(set_attr "conds" "set")
1296 (set_attr "insn" "muls")]
1299 (define_insn "*mulsi_compare0_scratch"
1300 [(set (reg:CC_NOOV CC_REGNUM)
1301 (compare:CC_NOOV (mult:SI
1302 (match_operand:SI 2 "s_register_operand" "r,r")
1303 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1305 (clobber (match_scratch:SI 0 "=&r,&r"))]
1306 "TARGET_ARM && !arm_arch6"
1307 "mul%.\\t%0, %2, %1"
1308 [(set_attr "conds" "set")
1309 (set_attr "insn" "muls")]
1312 (define_insn "*mulsi_compare0_scratch_v6"
1313 [(set (reg:CC_NOOV CC_REGNUM)
1314 (compare:CC_NOOV (mult:SI
1315 (match_operand:SI 2 "s_register_operand" "r")
1316 (match_operand:SI 1 "s_register_operand" "r"))
1318 (clobber (match_scratch:SI 0 "=r"))]
1319 "TARGET_ARM && arm_arch6 && optimize_size"
1320 "mul%.\\t%0, %2, %1"
1321 [(set_attr "conds" "set")
1322 (set_attr "insn" "muls")]
1325 ;; Unnamed templates to match MLA instruction.
1327 (define_insn "*mulsi3addsi"
1328 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1330 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1331 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1332 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1333 "TARGET_32BIT && !arm_arch6"
1334 "mla%?\\t%0, %2, %1, %3"
1335 [(set_attr "insn" "mla")
1336 (set_attr "predicable" "yes")]
1339 (define_insn "*mulsi3addsi_v6"
1340 [(set (match_operand:SI 0 "s_register_operand" "=r")
1342 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1343 (match_operand:SI 1 "s_register_operand" "r"))
1344 (match_operand:SI 3 "s_register_operand" "r")))]
1345 "TARGET_32BIT && arm_arch6"
1346 "mla%?\\t%0, %2, %1, %3"
1347 [(set_attr "insn" "mla")
1348 (set_attr "predicable" "yes")]
1351 (define_insn "*mulsi3addsi_compare0"
1352 [(set (reg:CC_NOOV CC_REGNUM)
1355 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1356 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1357 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1359 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1360 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1362 "TARGET_ARM && arm_arch6"
1363 "mla%.\\t%0, %2, %1, %3"
1364 [(set_attr "conds" "set")
1365 (set_attr "insn" "mlas")]
1368 (define_insn "*mulsi3addsi_compare0_v6"
1369 [(set (reg:CC_NOOV CC_REGNUM)
1372 (match_operand:SI 2 "s_register_operand" "r")
1373 (match_operand:SI 1 "s_register_operand" "r"))
1374 (match_operand:SI 3 "s_register_operand" "r"))
1376 (set (match_operand:SI 0 "s_register_operand" "=r")
1377 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1379 "TARGET_ARM && arm_arch6 && optimize_size"
1380 "mla%.\\t%0, %2, %1, %3"
1381 [(set_attr "conds" "set")
1382 (set_attr "insn" "mlas")]
1385 (define_insn "*mulsi3addsi_compare0_scratch"
1386 [(set (reg:CC_NOOV CC_REGNUM)
1389 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1390 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1391 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1393 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1394 "TARGET_ARM && !arm_arch6"
1395 "mla%.\\t%0, %2, %1, %3"
1396 [(set_attr "conds" "set")
1397 (set_attr "insn" "mlas")]
1400 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1401 [(set (reg:CC_NOOV CC_REGNUM)
1404 (match_operand:SI 2 "s_register_operand" "r")
1405 (match_operand:SI 1 "s_register_operand" "r"))
1406 (match_operand:SI 3 "s_register_operand" "r"))
1408 (clobber (match_scratch:SI 0 "=r"))]
1409 "TARGET_ARM && arm_arch6 && optimize_size"
1410 "mla%.\\t%0, %2, %1, %3"
1411 [(set_attr "conds" "set")
1412 (set_attr "insn" "mlas")]
1415 (define_insn "*mulsi3subsi"
1416 [(set (match_operand:SI 0 "s_register_operand" "=r")
1418 (match_operand:SI 3 "s_register_operand" "r")
1419 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1420 (match_operand:SI 1 "s_register_operand" "r"))))]
1421 "TARGET_32BIT && arm_arch_thumb2"
1422 "mls%?\\t%0, %2, %1, %3"
1423 [(set_attr "insn" "mla")
1424 (set_attr "predicable" "yes")]
1427 ;; Unnamed template to match long long multiply-accumulate (smlal)
1429 (define_insn "*mulsidi3adddi"
1430 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1433 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1434 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1435 (match_operand:DI 1 "s_register_operand" "0")))]
1436 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1437 "smlal%?\\t%Q0, %R0, %3, %2"
1438 [(set_attr "insn" "smlal")
1439 (set_attr "predicable" "yes")]
1442 (define_insn "*mulsidi3adddi_v6"
1443 [(set (match_operand:DI 0 "s_register_operand" "=r")
1446 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1447 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1448 (match_operand:DI 1 "s_register_operand" "0")))]
1449 "TARGET_32BIT && arm_arch6"
1450 "smlal%?\\t%Q0, %R0, %3, %2"
1451 [(set_attr "insn" "smlal")
1452 (set_attr "predicable" "yes")]
1455 ;; 32x32->64 widening multiply.
1456 ;; As with mulsi3, the only difference between the v3-5 and v6+
1457 ;; versions of these patterns is the requirement that the output not
1458 ;; overlap the inputs, but that still means we have to have a named
1459 ;; expander and two different starred insns.
1461 (define_expand "mulsidi3"
1462 [(set (match_operand:DI 0 "s_register_operand" "")
1464 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1465 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1466 "TARGET_32BIT && arm_arch3m"
1470 (define_insn "*mulsidi3_nov6"
1471 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1473 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1474 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1475 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1476 "smull%?\\t%Q0, %R0, %1, %2"
1477 [(set_attr "insn" "smull")
1478 (set_attr "predicable" "yes")]
1481 (define_insn "*mulsidi3_v6"
1482 [(set (match_operand:DI 0 "s_register_operand" "=r")
1484 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1485 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1486 "TARGET_32BIT && arm_arch6"
1487 "smull%?\\t%Q0, %R0, %1, %2"
1488 [(set_attr "insn" "smull")
1489 (set_attr "predicable" "yes")]
1492 (define_expand "umulsidi3"
1493 [(set (match_operand:DI 0 "s_register_operand" "")
1495 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1496 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1497 "TARGET_32BIT && arm_arch3m"
1501 (define_insn "*umulsidi3_nov6"
1502 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1504 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1505 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1506 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1507 "umull%?\\t%Q0, %R0, %1, %2"
1508 [(set_attr "insn" "umull")
1509 (set_attr "predicable" "yes")]
1512 (define_insn "*umulsidi3_v6"
1513 [(set (match_operand:DI 0 "s_register_operand" "=r")
1515 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1516 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1517 "TARGET_32BIT && arm_arch6"
1518 "umull%?\\t%Q0, %R0, %1, %2"
1519 [(set_attr "insn" "umull")
1520 (set_attr "predicable" "yes")]
1523 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1525 (define_insn "*umulsidi3adddi"
1526 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1529 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1530 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1531 (match_operand:DI 1 "s_register_operand" "0")))]
1532 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1533 "umlal%?\\t%Q0, %R0, %3, %2"
1534 [(set_attr "insn" "umlal")
1535 (set_attr "predicable" "yes")]
1538 (define_insn "*umulsidi3adddi_v6"
1539 [(set (match_operand:DI 0 "s_register_operand" "=r")
1542 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1543 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1544 (match_operand:DI 1 "s_register_operand" "0")))]
1545 "TARGET_32BIT && arm_arch6"
1546 "umlal%?\\t%Q0, %R0, %3, %2"
1547 [(set_attr "insn" "umlal")
1548 (set_attr "predicable" "yes")]
1551 (define_expand "smulsi3_highpart"
1553 [(set (match_operand:SI 0 "s_register_operand" "")
1557 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1558 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1560 (clobber (match_scratch:SI 3 ""))])]
1561 "TARGET_32BIT && arm_arch3m"
1565 (define_insn "*smulsi3_highpart_nov6"
1566 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1570 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1571 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1573 (clobber (match_scratch:SI 3 "=&r,&r"))]
1574 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1575 "smull%?\\t%3, %0, %2, %1"
1576 [(set_attr "insn" "smull")
1577 (set_attr "predicable" "yes")]
1580 (define_insn "*smulsi3_highpart_v6"
1581 [(set (match_operand:SI 0 "s_register_operand" "=r")
1585 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1586 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1588 (clobber (match_scratch:SI 3 "=r"))]
1589 "TARGET_32BIT && arm_arch6"
1590 "smull%?\\t%3, %0, %2, %1"
1591 [(set_attr "insn" "smull")
1592 (set_attr "predicable" "yes")]
1595 (define_expand "umulsi3_highpart"
1597 [(set (match_operand:SI 0 "s_register_operand" "")
1601 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1602 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1604 (clobber (match_scratch:SI 3 ""))])]
1605 "TARGET_32BIT && arm_arch3m"
1609 (define_insn "*umulsi3_highpart_nov6"
1610 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1614 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1615 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1617 (clobber (match_scratch:SI 3 "=&r,&r"))]
1618 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1619 "umull%?\\t%3, %0, %2, %1"
1620 [(set_attr "insn" "umull")
1621 (set_attr "predicable" "yes")]
1624 (define_insn "*umulsi3_highpart_v6"
1625 [(set (match_operand:SI 0 "s_register_operand" "=r")
1629 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1630 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1632 (clobber (match_scratch:SI 3 "=r"))]
1633 "TARGET_32BIT && arm_arch6"
1634 "umull%?\\t%3, %0, %2, %1"
1635 [(set_attr "insn" "umull")
1636 (set_attr "predicable" "yes")]
1639 (define_insn "mulhisi3"
1640 [(set (match_operand:SI 0 "s_register_operand" "=r")
1641 (mult:SI (sign_extend:SI
1642 (match_operand:HI 1 "s_register_operand" "%r"))
1644 (match_operand:HI 2 "s_register_operand" "r"))))]
1645 "TARGET_DSP_MULTIPLY"
1646 "smulbb%?\\t%0, %1, %2"
1647 [(set_attr "insn" "smulxy")
1648 (set_attr "predicable" "yes")]
1651 (define_insn "*mulhisi3tb"
1652 [(set (match_operand:SI 0 "s_register_operand" "=r")
1653 (mult:SI (ashiftrt:SI
1654 (match_operand:SI 1 "s_register_operand" "r")
1657 (match_operand:HI 2 "s_register_operand" "r"))))]
1658 "TARGET_DSP_MULTIPLY"
1659 "smultb%?\\t%0, %1, %2"
1660 [(set_attr "insn" "smulxy")
1661 (set_attr "predicable" "yes")]
1664 (define_insn "*mulhisi3bt"
1665 [(set (match_operand:SI 0 "s_register_operand" "=r")
1666 (mult:SI (sign_extend:SI
1667 (match_operand:HI 1 "s_register_operand" "r"))
1669 (match_operand:SI 2 "s_register_operand" "r")
1671 "TARGET_DSP_MULTIPLY"
1672 "smulbt%?\\t%0, %1, %2"
1673 [(set_attr "insn" "smulxy")
1674 (set_attr "predicable" "yes")]
1677 (define_insn "*mulhisi3tt"
1678 [(set (match_operand:SI 0 "s_register_operand" "=r")
1679 (mult:SI (ashiftrt:SI
1680 (match_operand:SI 1 "s_register_operand" "r")
1683 (match_operand:SI 2 "s_register_operand" "r")
1685 "TARGET_DSP_MULTIPLY"
1686 "smultt%?\\t%0, %1, %2"
1687 [(set_attr "insn" "smulxy")
1688 (set_attr "predicable" "yes")]
1691 (define_insn "*mulhisi3addsi"
1692 [(set (match_operand:SI 0 "s_register_operand" "=r")
1693 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1694 (mult:SI (sign_extend:SI
1695 (match_operand:HI 2 "s_register_operand" "%r"))
1697 (match_operand:HI 3 "s_register_operand" "r")))))]
1698 "TARGET_DSP_MULTIPLY"
1699 "smlabb%?\\t%0, %2, %3, %1"
1700 [(set_attr "insn" "smlaxy")
1701 (set_attr "predicable" "yes")]
1704 (define_insn "*mulhidi3adddi"
1705 [(set (match_operand:DI 0 "s_register_operand" "=r")
1707 (match_operand:DI 1 "s_register_operand" "0")
1708 (mult:DI (sign_extend:DI
1709 (match_operand:HI 2 "s_register_operand" "%r"))
1711 (match_operand:HI 3 "s_register_operand" "r")))))]
1712 "TARGET_DSP_MULTIPLY"
1713 "smlalbb%?\\t%Q0, %R0, %2, %3"
1714 [(set_attr "insn" "smlalxy")
1715 (set_attr "predicable" "yes")])
1717 (define_expand "mulsf3"
1718 [(set (match_operand:SF 0 "s_register_operand" "")
1719 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1720 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1721 "TARGET_32BIT && TARGET_HARD_FLOAT"
1724 && !cirrus_fp_register (operands[2], SFmode))
1725 operands[2] = force_reg (SFmode, operands[2]);
1728 (define_expand "muldf3"
1729 [(set (match_operand:DF 0 "s_register_operand" "")
1730 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1731 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1732 "TARGET_32BIT && TARGET_HARD_FLOAT"
1735 && !cirrus_fp_register (operands[2], DFmode))
1736 operands[2] = force_reg (DFmode, operands[2]);
1741 (define_expand "divsf3"
1742 [(set (match_operand:SF 0 "s_register_operand" "")
1743 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1744 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1745 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1748 (define_expand "divdf3"
1749 [(set (match_operand:DF 0 "s_register_operand" "")
1750 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1751 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1752 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1757 (define_expand "modsf3"
1758 [(set (match_operand:SF 0 "s_register_operand" "")
1759 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1760 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1761 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1764 (define_expand "moddf3"
1765 [(set (match_operand:DF 0 "s_register_operand" "")
1766 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1767 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1768 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1771 ;; Boolean and,ior,xor insns
1773 ;; Split up double word logical operations
1775 ;; Split up simple DImode logical operations. Simply perform the logical
1776 ;; operation on the upper and lower halves of the registers.
1778 [(set (match_operand:DI 0 "s_register_operand" "")
1779 (match_operator:DI 6 "logical_binary_operator"
1780 [(match_operand:DI 1 "s_register_operand" "")
1781 (match_operand:DI 2 "s_register_operand" "")]))]
1782 "TARGET_32BIT && reload_completed
1783 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1784 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1785 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1788 operands[3] = gen_highpart (SImode, operands[0]);
1789 operands[0] = gen_lowpart (SImode, operands[0]);
1790 operands[4] = gen_highpart (SImode, operands[1]);
1791 operands[1] = gen_lowpart (SImode, operands[1]);
1792 operands[5] = gen_highpart (SImode, operands[2]);
1793 operands[2] = gen_lowpart (SImode, operands[2]);
1798 [(set (match_operand:DI 0 "s_register_operand" "")
1799 (match_operator:DI 6 "logical_binary_operator"
1800 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1801 (match_operand:DI 1 "s_register_operand" "")]))]
1802 "TARGET_32BIT && reload_completed"
1803 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1804 (set (match_dup 3) (match_op_dup:SI 6
1805 [(ashiftrt:SI (match_dup 2) (const_int 31))
1809 operands[3] = gen_highpart (SImode, operands[0]);
1810 operands[0] = gen_lowpart (SImode, operands[0]);
1811 operands[4] = gen_highpart (SImode, operands[1]);
1812 operands[1] = gen_lowpart (SImode, operands[1]);
1813 operands[5] = gen_highpart (SImode, operands[2]);
1814 operands[2] = gen_lowpart (SImode, operands[2]);
1818 ;; The zero extend of operand 2 means we can just copy the high part of
1819 ;; operand1 into operand0.
1821 [(set (match_operand:DI 0 "s_register_operand" "")
1823 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1824 (match_operand:DI 1 "s_register_operand" "")))]
1825 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1826 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1827 (set (match_dup 3) (match_dup 4))]
1830 operands[4] = gen_highpart (SImode, operands[1]);
1831 operands[3] = gen_highpart (SImode, operands[0]);
1832 operands[0] = gen_lowpart (SImode, operands[0]);
1833 operands[1] = gen_lowpart (SImode, operands[1]);
1837 ;; The zero extend of operand 2 means we can just copy the high part of
1838 ;; operand1 into operand0.
1840 [(set (match_operand:DI 0 "s_register_operand" "")
1842 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1843 (match_operand:DI 1 "s_register_operand" "")))]
1844 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1845 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1846 (set (match_dup 3) (match_dup 4))]
1849 operands[4] = gen_highpart (SImode, operands[1]);
1850 operands[3] = gen_highpart (SImode, operands[0]);
1851 operands[0] = gen_lowpart (SImode, operands[0]);
1852 operands[1] = gen_lowpart (SImode, operands[1]);
1856 (define_insn "anddi3"
1857 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1858 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1859 (match_operand:DI 2 "s_register_operand" "r,r")))]
1860 "TARGET_32BIT && ! TARGET_IWMMXT"
1862 [(set_attr "length" "8")]
1865 (define_insn_and_split "*anddi_zesidi_di"
1866 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1867 (and:DI (zero_extend:DI
1868 (match_operand:SI 2 "s_register_operand" "r,r"))
1869 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1872 "TARGET_32BIT && reload_completed"
1873 ; The zero extend of operand 2 clears the high word of the output
1875 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1876 (set (match_dup 3) (const_int 0))]
1879 operands[3] = gen_highpart (SImode, operands[0]);
1880 operands[0] = gen_lowpart (SImode, operands[0]);
1881 operands[1] = gen_lowpart (SImode, operands[1]);
1883 [(set_attr "length" "8")]
1886 (define_insn "*anddi_sesdi_di"
1887 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1888 (and:DI (sign_extend:DI
1889 (match_operand:SI 2 "s_register_operand" "r,r"))
1890 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1893 [(set_attr "length" "8")]
1896 (define_expand "andsi3"
1897 [(set (match_operand:SI 0 "s_register_operand" "")
1898 (and:SI (match_operand:SI 1 "s_register_operand" "")
1899 (match_operand:SI 2 "reg_or_int_operand" "")))]
1904 if (GET_CODE (operands[2]) == CONST_INT)
1906 arm_split_constant (AND, SImode, NULL_RTX,
1907 INTVAL (operands[2]), operands[0],
1908 operands[1], optimize && can_create_pseudo_p ());
1913 else /* TARGET_THUMB1 */
1915 if (GET_CODE (operands[2]) != CONST_INT)
1916 operands[2] = force_reg (SImode, operands[2]);
1921 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1923 operands[2] = force_reg (SImode,
1924 GEN_INT (~INTVAL (operands[2])));
1926 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1931 for (i = 9; i <= 31; i++)
1933 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1935 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1939 else if ((((HOST_WIDE_INT) 1) << i) - 1
1940 == ~INTVAL (operands[2]))
1942 rtx shift = GEN_INT (i);
1943 rtx reg = gen_reg_rtx (SImode);
1945 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1946 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1952 operands[2] = force_reg (SImode, operands[2]);
1958 ; ??? Check split length for Thumb-2
1959 (define_insn_and_split "*arm_andsi3_insn"
1960 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1961 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1962 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1966 bic%?\\t%0, %1, #%B2
1969 && GET_CODE (operands[2]) == CONST_INT
1970 && !(const_ok_for_arm (INTVAL (operands[2]))
1971 || const_ok_for_arm (~INTVAL (operands[2])))"
1972 [(clobber (const_int 0))]
1974 arm_split_constant (AND, SImode, curr_insn,
1975 INTVAL (operands[2]), operands[0], operands[1], 0);
1978 [(set_attr "length" "4,4,16")
1979 (set_attr "predicable" "yes")]
1982 (define_insn "*thumb1_andsi3_insn"
1983 [(set (match_operand:SI 0 "register_operand" "=l")
1984 (and:SI (match_operand:SI 1 "register_operand" "%0")
1985 (match_operand:SI 2 "register_operand" "l")))]
1988 [(set_attr "length" "2")]
1991 (define_insn "*andsi3_compare0"
1992 [(set (reg:CC_NOOV CC_REGNUM)
1994 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1995 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1997 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1998 (and:SI (match_dup 1) (match_dup 2)))]
2002 bic%.\\t%0, %1, #%B2"
2003 [(set_attr "conds" "set")]
2006 (define_insn "*andsi3_compare0_scratch"
2007 [(set (reg:CC_NOOV CC_REGNUM)
2009 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2010 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2012 (clobber (match_scratch:SI 2 "=X,r"))]
2016 bic%.\\t%2, %0, #%B1"
2017 [(set_attr "conds" "set")]
2020 (define_insn "*zeroextractsi_compare0_scratch"
2021 [(set (reg:CC_NOOV CC_REGNUM)
2022 (compare:CC_NOOV (zero_extract:SI
2023 (match_operand:SI 0 "s_register_operand" "r")
2024 (match_operand 1 "const_int_operand" "n")
2025 (match_operand 2 "const_int_operand" "n"))
2028 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2029 && INTVAL (operands[1]) > 0
2030 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2031 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2033 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2034 << INTVAL (operands[2]));
2035 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2038 [(set_attr "conds" "set")]
2041 (define_insn_and_split "*ne_zeroextractsi"
2042 [(set (match_operand:SI 0 "s_register_operand" "=r")
2043 (ne:SI (zero_extract:SI
2044 (match_operand:SI 1 "s_register_operand" "r")
2045 (match_operand:SI 2 "const_int_operand" "n")
2046 (match_operand:SI 3 "const_int_operand" "n"))
2048 (clobber (reg:CC CC_REGNUM))]
2050 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2051 && INTVAL (operands[2]) > 0
2052 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2053 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2056 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2057 && INTVAL (operands[2]) > 0
2058 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2059 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2060 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2061 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2063 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2065 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2066 (match_dup 0) (const_int 1)))]
2068 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2069 << INTVAL (operands[3]));
2071 [(set_attr "conds" "clob")
2072 (set (attr "length")
2073 (if_then_else (eq_attr "is_thumb" "yes")
2078 (define_insn_and_split "*ne_zeroextractsi_shifted"
2079 [(set (match_operand:SI 0 "s_register_operand" "=r")
2080 (ne:SI (zero_extract:SI
2081 (match_operand:SI 1 "s_register_operand" "r")
2082 (match_operand:SI 2 "const_int_operand" "n")
2085 (clobber (reg:CC CC_REGNUM))]
2089 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2090 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2092 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2094 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2095 (match_dup 0) (const_int 1)))]
2097 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2099 [(set_attr "conds" "clob")
2100 (set_attr "length" "8")]
2103 (define_insn_and_split "*ite_ne_zeroextractsi"
2104 [(set (match_operand:SI 0 "s_register_operand" "=r")
2105 (if_then_else:SI (ne (zero_extract:SI
2106 (match_operand:SI 1 "s_register_operand" "r")
2107 (match_operand:SI 2 "const_int_operand" "n")
2108 (match_operand:SI 3 "const_int_operand" "n"))
2110 (match_operand:SI 4 "arm_not_operand" "rIK")
2112 (clobber (reg:CC CC_REGNUM))]
2114 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2115 && INTVAL (operands[2]) > 0
2116 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2117 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2118 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2121 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2122 && INTVAL (operands[2]) > 0
2123 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2124 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2125 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2126 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2127 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2129 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2131 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2132 (match_dup 0) (match_dup 4)))]
2134 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2135 << INTVAL (operands[3]));
2137 [(set_attr "conds" "clob")
2138 (set_attr "length" "8")]
2141 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2142 [(set (match_operand:SI 0 "s_register_operand" "=r")
2143 (if_then_else:SI (ne (zero_extract:SI
2144 (match_operand:SI 1 "s_register_operand" "r")
2145 (match_operand:SI 2 "const_int_operand" "n")
2148 (match_operand:SI 3 "arm_not_operand" "rIK")
2150 (clobber (reg:CC CC_REGNUM))]
2151 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2153 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2154 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2155 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2157 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2159 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2160 (match_dup 0) (match_dup 3)))]
2162 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2164 [(set_attr "conds" "clob")
2165 (set_attr "length" "8")]
2169 [(set (match_operand:SI 0 "s_register_operand" "")
2170 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2171 (match_operand:SI 2 "const_int_operand" "")
2172 (match_operand:SI 3 "const_int_operand" "")))
2173 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2175 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2176 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2178 HOST_WIDE_INT temp = INTVAL (operands[2]);
2180 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2181 operands[3] = GEN_INT (32 - temp);
2185 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2187 [(set (match_operand:SI 0 "s_register_operand" "")
2188 (match_operator:SI 1 "shiftable_operator"
2189 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2190 (match_operand:SI 3 "const_int_operand" "")
2191 (match_operand:SI 4 "const_int_operand" ""))
2192 (match_operand:SI 5 "s_register_operand" "")]))
2193 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2195 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2198 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2201 HOST_WIDE_INT temp = INTVAL (operands[3]);
2203 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2204 operands[4] = GEN_INT (32 - temp);
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2211 (match_operand:SI 2 "const_int_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")))]
2214 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2215 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2217 HOST_WIDE_INT temp = INTVAL (operands[2]);
2219 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2220 operands[3] = GEN_INT (32 - temp);
2225 [(set (match_operand:SI 0 "s_register_operand" "")
2226 (match_operator:SI 1 "shiftable_operator"
2227 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2228 (match_operand:SI 3 "const_int_operand" "")
2229 (match_operand:SI 4 "const_int_operand" ""))
2230 (match_operand:SI 5 "s_register_operand" "")]))
2231 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2233 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2236 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2239 HOST_WIDE_INT temp = INTVAL (operands[3]);
2241 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2242 operands[4] = GEN_INT (32 - temp);
2246 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2247 ;;; represented by the bitfield, then this will produce incorrect results.
2248 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2249 ;;; which have a real bit-field insert instruction, the truncation happens
2250 ;;; in the bit-field insert instruction itself. Since arm does not have a
2251 ;;; bit-field insert instruction, we would have to emit code here to truncate
2252 ;;; the value before we insert. This loses some of the advantage of having
2253 ;;; this insv pattern, so this pattern needs to be reevalutated.
2255 (define_expand "insv"
2256 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2257 (match_operand:SI 1 "general_operand" "")
2258 (match_operand:SI 2 "general_operand" ""))
2259 (match_operand:SI 3 "reg_or_int_operand" ""))]
2260 "TARGET_ARM || arm_arch_thumb2"
2263 int start_bit = INTVAL (operands[2]);
2264 int width = INTVAL (operands[1]);
2265 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2266 rtx target, subtarget;
2268 if (arm_arch_thumb2)
2270 bool use_bfi = TRUE;
2272 if (GET_CODE (operands[3]) == CONST_INT)
2274 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2278 emit_insn (gen_insv_zero (operands[0], operands[1],
2283 /* See if the set can be done with a single orr instruction. */
2284 if (val == mask && const_ok_for_arm (val << start_bit))
2290 if (GET_CODE (operands[3]) != REG)
2291 operands[3] = force_reg (SImode, operands[3]);
2293 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2299 target = copy_rtx (operands[0]);
2300 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2301 subreg as the final target. */
2302 if (GET_CODE (target) == SUBREG)
2304 subtarget = gen_reg_rtx (SImode);
2305 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2306 < GET_MODE_SIZE (SImode))
2307 target = SUBREG_REG (target);
2312 if (GET_CODE (operands[3]) == CONST_INT)
2314 /* Since we are inserting a known constant, we may be able to
2315 reduce the number of bits that we have to clear so that
2316 the mask becomes simple. */
2317 /* ??? This code does not check to see if the new mask is actually
2318 simpler. It may not be. */
2319 rtx op1 = gen_reg_rtx (SImode);
2320 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2321 start of this pattern. */
2322 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2323 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2325 emit_insn (gen_andsi3 (op1, operands[0],
2326 gen_int_mode (~mask2, SImode)));
2327 emit_insn (gen_iorsi3 (subtarget, op1,
2328 gen_int_mode (op3_value << start_bit, SImode)));
2330 else if (start_bit == 0
2331 && !(const_ok_for_arm (mask)
2332 || const_ok_for_arm (~mask)))
2334 /* A Trick, since we are setting the bottom bits in the word,
2335 we can shift operand[3] up, operand[0] down, OR them together
2336 and rotate the result back again. This takes 3 insns, and
2337 the third might be mergeable into another op. */
2338 /* The shift up copes with the possibility that operand[3] is
2339 wider than the bitfield. */
2340 rtx op0 = gen_reg_rtx (SImode);
2341 rtx op1 = gen_reg_rtx (SImode);
2343 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2344 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2345 emit_insn (gen_iorsi3 (op1, op1, op0));
2346 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2348 else if ((width + start_bit == 32)
2349 && !(const_ok_for_arm (mask)
2350 || const_ok_for_arm (~mask)))
2352 /* Similar trick, but slightly less efficient. */
2354 rtx op0 = gen_reg_rtx (SImode);
2355 rtx op1 = gen_reg_rtx (SImode);
2357 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2358 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2359 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2360 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2364 rtx op0 = gen_int_mode (mask, SImode);
2365 rtx op1 = gen_reg_rtx (SImode);
2366 rtx op2 = gen_reg_rtx (SImode);
2368 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2370 rtx tmp = gen_reg_rtx (SImode);
2372 emit_insn (gen_movsi (tmp, op0));
2376 /* Mask out any bits in operand[3] that are not needed. */
2377 emit_insn (gen_andsi3 (op1, operands[3], op0));
2379 if (GET_CODE (op0) == CONST_INT
2380 && (const_ok_for_arm (mask << start_bit)
2381 || const_ok_for_arm (~(mask << start_bit))))
2383 op0 = gen_int_mode (~(mask << start_bit), SImode);
2384 emit_insn (gen_andsi3 (op2, operands[0], op0));
2388 if (GET_CODE (op0) == CONST_INT)
2390 rtx tmp = gen_reg_rtx (SImode);
2392 emit_insn (gen_movsi (tmp, op0));
2397 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2399 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2403 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2405 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2408 if (subtarget != target)
2410 /* If TARGET is still a SUBREG, then it must be wider than a word,
2411 so we must be careful only to set the subword we were asked to. */
2412 if (GET_CODE (target) == SUBREG)
2413 emit_move_insn (target, subtarget);
2415 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2422 (define_insn "insv_zero"
2423 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2424 (match_operand:SI 1 "const_int_operand" "M")
2425 (match_operand:SI 2 "const_int_operand" "M"))
2429 [(set_attr "length" "4")
2430 (set_attr "predicable" "yes")]
2433 (define_insn "insv_t2"
2434 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2435 (match_operand:SI 1 "const_int_operand" "M")
2436 (match_operand:SI 2 "const_int_operand" "M"))
2437 (match_operand:SI 3 "s_register_operand" "r"))]
2439 "bfi%?\t%0, %3, %2, %1"
2440 [(set_attr "length" "4")
2441 (set_attr "predicable" "yes")]
2444 ; constants for op 2 will never be given to these patterns.
2445 (define_insn_and_split "*anddi_notdi_di"
2446 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2447 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2448 (match_operand:DI 2 "s_register_operand" "0,r")))]
2451 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2452 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2453 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2456 operands[3] = gen_highpart (SImode, operands[0]);
2457 operands[0] = gen_lowpart (SImode, operands[0]);
2458 operands[4] = gen_highpart (SImode, operands[1]);
2459 operands[1] = gen_lowpart (SImode, operands[1]);
2460 operands[5] = gen_highpart (SImode, operands[2]);
2461 operands[2] = gen_lowpart (SImode, operands[2]);
2463 [(set_attr "length" "8")
2464 (set_attr "predicable" "yes")]
2467 (define_insn_and_split "*anddi_notzesidi_di"
2468 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2469 (and:DI (not:DI (zero_extend:DI
2470 (match_operand:SI 2 "s_register_operand" "r,r")))
2471 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2474 bic%?\\t%Q0, %Q1, %2
2476 ; (not (zero_extend ...)) allows us to just copy the high word from
2477 ; operand1 to operand0.
2480 && operands[0] != operands[1]"
2481 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2482 (set (match_dup 3) (match_dup 4))]
2485 operands[3] = gen_highpart (SImode, operands[0]);
2486 operands[0] = gen_lowpart (SImode, operands[0]);
2487 operands[4] = gen_highpart (SImode, operands[1]);
2488 operands[1] = gen_lowpart (SImode, operands[1]);
2490 [(set_attr "length" "4,8")
2491 (set_attr "predicable" "yes")]
2494 (define_insn_and_split "*anddi_notsesidi_di"
2495 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2496 (and:DI (not:DI (sign_extend:DI
2497 (match_operand:SI 2 "s_register_operand" "r,r")))
2498 (match_operand:DI 1 "s_register_operand" "0,r")))]
2501 "TARGET_32BIT && reload_completed"
2502 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2503 (set (match_dup 3) (and:SI (not:SI
2504 (ashiftrt:SI (match_dup 2) (const_int 31)))
2508 operands[3] = gen_highpart (SImode, operands[0]);
2509 operands[0] = gen_lowpart (SImode, operands[0]);
2510 operands[4] = gen_highpart (SImode, operands[1]);
2511 operands[1] = gen_lowpart (SImode, operands[1]);
2513 [(set_attr "length" "8")
2514 (set_attr "predicable" "yes")]
2517 (define_insn "andsi_notsi_si"
2518 [(set (match_operand:SI 0 "s_register_operand" "=r")
2519 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2520 (match_operand:SI 1 "s_register_operand" "r")))]
2522 "bic%?\\t%0, %1, %2"
2523 [(set_attr "predicable" "yes")]
2526 (define_insn "bicsi3"
2527 [(set (match_operand:SI 0 "register_operand" "=l")
2528 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2529 (match_operand:SI 2 "register_operand" "0")))]
2532 [(set_attr "length" "2")]
2535 (define_insn "andsi_not_shiftsi_si"
2536 [(set (match_operand:SI 0 "s_register_operand" "=r")
2537 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2538 [(match_operand:SI 2 "s_register_operand" "r")
2539 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2540 (match_operand:SI 1 "s_register_operand" "r")))]
2542 "bic%?\\t%0, %1, %2%S4"
2543 [(set_attr "predicable" "yes")
2544 (set_attr "shift" "2")
2545 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2546 (const_string "alu_shift")
2547 (const_string "alu_shift_reg")))]
2550 (define_insn "*andsi_notsi_si_compare0"
2551 [(set (reg:CC_NOOV CC_REGNUM)
2553 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2554 (match_operand:SI 1 "s_register_operand" "r"))
2556 (set (match_operand:SI 0 "s_register_operand" "=r")
2557 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2559 "bic%.\\t%0, %1, %2"
2560 [(set_attr "conds" "set")]
2563 (define_insn "*andsi_notsi_si_compare0_scratch"
2564 [(set (reg:CC_NOOV CC_REGNUM)
2566 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2567 (match_operand:SI 1 "s_register_operand" "r"))
2569 (clobber (match_scratch:SI 0 "=r"))]
2571 "bic%.\\t%0, %1, %2"
2572 [(set_attr "conds" "set")]
2575 (define_insn "iordi3"
2576 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2577 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2578 (match_operand:DI 2 "s_register_operand" "r,r")))]
2579 "TARGET_32BIT && ! TARGET_IWMMXT"
2581 [(set_attr "length" "8")
2582 (set_attr "predicable" "yes")]
2585 (define_insn "*iordi_zesidi_di"
2586 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2587 (ior:DI (zero_extend:DI
2588 (match_operand:SI 2 "s_register_operand" "r,r"))
2589 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2592 orr%?\\t%Q0, %Q1, %2
2594 [(set_attr "length" "4,8")
2595 (set_attr "predicable" "yes")]
2598 (define_insn "*iordi_sesidi_di"
2599 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600 (ior:DI (sign_extend:DI
2601 (match_operand:SI 2 "s_register_operand" "r,r"))
2602 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2605 [(set_attr "length" "8")
2606 (set_attr "predicable" "yes")]
2609 (define_expand "iorsi3"
2610 [(set (match_operand:SI 0 "s_register_operand" "")
2611 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2612 (match_operand:SI 2 "reg_or_int_operand" "")))]
2615 if (GET_CODE (operands[2]) == CONST_INT)
2619 arm_split_constant (IOR, SImode, NULL_RTX,
2620 INTVAL (operands[2]), operands[0], operands[1],
2621 optimize && can_create_pseudo_p ());
2624 else /* TARGET_THUMB1 */
2625 operands [2] = force_reg (SImode, operands [2]);
2630 (define_insn_and_split "*arm_iorsi3"
2631 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2632 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2633 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2639 && GET_CODE (operands[2]) == CONST_INT
2640 && !const_ok_for_arm (INTVAL (operands[2]))"
2641 [(clobber (const_int 0))]
2643 arm_split_constant (IOR, SImode, curr_insn,
2644 INTVAL (operands[2]), operands[0], operands[1], 0);
2647 [(set_attr "length" "4,16")
2648 (set_attr "predicable" "yes")]
2651 (define_insn "*thumb1_iorsi3"
2652 [(set (match_operand:SI 0 "register_operand" "=l")
2653 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2654 (match_operand:SI 2 "register_operand" "l")))]
2657 [(set_attr "length" "2")]
2661 [(match_scratch:SI 3 "r")
2662 (set (match_operand:SI 0 "arm_general_register_operand" "")
2663 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2664 (match_operand:SI 2 "const_int_operand" "")))]
2666 && !const_ok_for_arm (INTVAL (operands[2]))
2667 && const_ok_for_arm (~INTVAL (operands[2]))"
2668 [(set (match_dup 3) (match_dup 2))
2669 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2673 (define_insn "*iorsi3_compare0"
2674 [(set (reg:CC_NOOV CC_REGNUM)
2675 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2676 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2678 (set (match_operand:SI 0 "s_register_operand" "=r")
2679 (ior:SI (match_dup 1) (match_dup 2)))]
2681 "orr%.\\t%0, %1, %2"
2682 [(set_attr "conds" "set")]
2685 (define_insn "*iorsi3_compare0_scratch"
2686 [(set (reg:CC_NOOV CC_REGNUM)
2687 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2688 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2690 (clobber (match_scratch:SI 0 "=r"))]
2692 "orr%.\\t%0, %1, %2"
2693 [(set_attr "conds" "set")]
2696 (define_insn "xordi3"
2697 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2698 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2699 (match_operand:DI 2 "s_register_operand" "r,r")))]
2700 "TARGET_32BIT && !TARGET_IWMMXT"
2702 [(set_attr "length" "8")
2703 (set_attr "predicable" "yes")]
2706 (define_insn "*xordi_zesidi_di"
2707 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2708 (xor:DI (zero_extend:DI
2709 (match_operand:SI 2 "s_register_operand" "r,r"))
2710 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2713 eor%?\\t%Q0, %Q1, %2
2715 [(set_attr "length" "4,8")
2716 (set_attr "predicable" "yes")]
2719 (define_insn "*xordi_sesidi_di"
2720 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2721 (xor:DI (sign_extend:DI
2722 (match_operand:SI 2 "s_register_operand" "r,r"))
2723 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2726 [(set_attr "length" "8")
2727 (set_attr "predicable" "yes")]
2730 (define_expand "xorsi3"
2731 [(set (match_operand:SI 0 "s_register_operand" "")
2732 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2733 (match_operand:SI 2 "arm_rhs_operand" "")))]
2736 if (GET_CODE (operands[2]) == CONST_INT)
2737 operands[2] = force_reg (SImode, operands[2]);
2741 (define_insn "*arm_xorsi3"
2742 [(set (match_operand:SI 0 "s_register_operand" "=r")
2743 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2744 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2746 "eor%?\\t%0, %1, %2"
2747 [(set_attr "predicable" "yes")]
2750 (define_insn "*thumb1_xorsi3"
2751 [(set (match_operand:SI 0 "register_operand" "=l")
2752 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2753 (match_operand:SI 2 "register_operand" "l")))]
2756 [(set_attr "length" "2")]
2759 (define_insn "*xorsi3_compare0"
2760 [(set (reg:CC_NOOV CC_REGNUM)
2761 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2762 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2764 (set (match_operand:SI 0 "s_register_operand" "=r")
2765 (xor:SI (match_dup 1) (match_dup 2)))]
2767 "eor%.\\t%0, %1, %2"
2768 [(set_attr "conds" "set")]
2771 (define_insn "*xorsi3_compare0_scratch"
2772 [(set (reg:CC_NOOV CC_REGNUM)
2773 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2774 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2778 [(set_attr "conds" "set")]
2781 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2782 ; (NOT D) we can sometimes merge the final NOT into one of the following
2786 [(set (match_operand:SI 0 "s_register_operand" "")
2787 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2788 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2789 (match_operand:SI 3 "arm_rhs_operand" "")))
2790 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2792 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2793 (not:SI (match_dup 3))))
2794 (set (match_dup 0) (not:SI (match_dup 4)))]
2798 (define_insn "*andsi_iorsi3_notsi"
2799 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2800 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2801 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2802 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2804 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2805 [(set_attr "length" "8")
2806 (set_attr "ce_count" "2")
2807 (set_attr "predicable" "yes")]
2810 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2811 ; insns are available?
2813 [(set (match_operand:SI 0 "s_register_operand" "")
2814 (match_operator:SI 1 "logical_binary_operator"
2815 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2816 (match_operand:SI 3 "const_int_operand" "")
2817 (match_operand:SI 4 "const_int_operand" ""))
2818 (match_operator:SI 9 "logical_binary_operator"
2819 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2820 (match_operand:SI 6 "const_int_operand" ""))
2821 (match_operand:SI 7 "s_register_operand" "")])]))
2822 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2824 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2825 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2828 [(ashift:SI (match_dup 2) (match_dup 4))
2832 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2835 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2839 [(set (match_operand:SI 0 "s_register_operand" "")
2840 (match_operator:SI 1 "logical_binary_operator"
2841 [(match_operator:SI 9 "logical_binary_operator"
2842 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2843 (match_operand:SI 6 "const_int_operand" ""))
2844 (match_operand:SI 7 "s_register_operand" "")])
2845 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2846 (match_operand:SI 3 "const_int_operand" "")
2847 (match_operand:SI 4 "const_int_operand" ""))]))
2848 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2850 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2851 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2854 [(ashift:SI (match_dup 2) (match_dup 4))
2858 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2861 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2865 [(set (match_operand:SI 0 "s_register_operand" "")
2866 (match_operator:SI 1 "logical_binary_operator"
2867 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2868 (match_operand:SI 3 "const_int_operand" "")
2869 (match_operand:SI 4 "const_int_operand" ""))
2870 (match_operator:SI 9 "logical_binary_operator"
2871 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2872 (match_operand:SI 6 "const_int_operand" ""))
2873 (match_operand:SI 7 "s_register_operand" "")])]))
2874 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2876 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2877 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2880 [(ashift:SI (match_dup 2) (match_dup 4))
2884 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2887 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2891 [(set (match_operand:SI 0 "s_register_operand" "")
2892 (match_operator:SI 1 "logical_binary_operator"
2893 [(match_operator:SI 9 "logical_binary_operator"
2894 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2895 (match_operand:SI 6 "const_int_operand" ""))
2896 (match_operand:SI 7 "s_register_operand" "")])
2897 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2898 (match_operand:SI 3 "const_int_operand" "")
2899 (match_operand:SI 4 "const_int_operand" ""))]))
2900 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2902 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2903 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2906 [(ashift:SI (match_dup 2) (match_dup 4))
2910 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2913 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2917 ;; Minimum and maximum insns
2919 (define_expand "smaxsi3"
2921 (set (match_operand:SI 0 "s_register_operand" "")
2922 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2923 (match_operand:SI 2 "arm_rhs_operand" "")))
2924 (clobber (reg:CC CC_REGNUM))])]
2927 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2929 /* No need for a clobber of the condition code register here. */
2930 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2931 gen_rtx_SMAX (SImode, operands[1],
2937 (define_insn "*smax_0"
2938 [(set (match_operand:SI 0 "s_register_operand" "=r")
2939 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2942 "bic%?\\t%0, %1, %1, asr #31"
2943 [(set_attr "predicable" "yes")]
2946 (define_insn "*smax_m1"
2947 [(set (match_operand:SI 0 "s_register_operand" "=r")
2948 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2951 "orr%?\\t%0, %1, %1, asr #31"
2952 [(set_attr "predicable" "yes")]
2955 (define_insn "*arm_smax_insn"
2956 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2957 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2958 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2959 (clobber (reg:CC CC_REGNUM))]
2962 cmp\\t%1, %2\;movlt\\t%0, %2
2963 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2964 [(set_attr "conds" "clob")
2965 (set_attr "length" "8,12")]
2968 (define_expand "sminsi3"
2970 (set (match_operand:SI 0 "s_register_operand" "")
2971 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2972 (match_operand:SI 2 "arm_rhs_operand" "")))
2973 (clobber (reg:CC CC_REGNUM))])]
2976 if (operands[2] == const0_rtx)
2978 /* No need for a clobber of the condition code register here. */
2979 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2980 gen_rtx_SMIN (SImode, operands[1],
2986 (define_insn "*smin_0"
2987 [(set (match_operand:SI 0 "s_register_operand" "=r")
2988 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2991 "and%?\\t%0, %1, %1, asr #31"
2992 [(set_attr "predicable" "yes")]
2995 (define_insn "*arm_smin_insn"
2996 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2997 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2998 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2999 (clobber (reg:CC CC_REGNUM))]
3002 cmp\\t%1, %2\;movge\\t%0, %2
3003 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3004 [(set_attr "conds" "clob")
3005 (set_attr "length" "8,12")]
3008 (define_expand "umaxsi3"
3010 (set (match_operand:SI 0 "s_register_operand" "")
3011 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3012 (match_operand:SI 2 "arm_rhs_operand" "")))
3013 (clobber (reg:CC CC_REGNUM))])]
3018 (define_insn "*arm_umaxsi3"
3019 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3020 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3021 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3022 (clobber (reg:CC CC_REGNUM))]
3025 cmp\\t%1, %2\;movcc\\t%0, %2
3026 cmp\\t%1, %2\;movcs\\t%0, %1
3027 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3028 [(set_attr "conds" "clob")
3029 (set_attr "length" "8,8,12")]
3032 (define_expand "uminsi3"
3034 (set (match_operand:SI 0 "s_register_operand" "")
3035 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3036 (match_operand:SI 2 "arm_rhs_operand" "")))
3037 (clobber (reg:CC CC_REGNUM))])]
3042 (define_insn "*arm_uminsi3"
3043 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3044 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3045 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3046 (clobber (reg:CC CC_REGNUM))]
3049 cmp\\t%1, %2\;movcs\\t%0, %2
3050 cmp\\t%1, %2\;movcc\\t%0, %1
3051 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3052 [(set_attr "conds" "clob")
3053 (set_attr "length" "8,8,12")]
3056 (define_insn "*store_minmaxsi"
3057 [(set (match_operand:SI 0 "memory_operand" "=m")
3058 (match_operator:SI 3 "minmax_operator"
3059 [(match_operand:SI 1 "s_register_operand" "r")
3060 (match_operand:SI 2 "s_register_operand" "r")]))
3061 (clobber (reg:CC CC_REGNUM))]
3064 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3065 operands[1], operands[2]);
3066 output_asm_insn (\"cmp\\t%1, %2\", operands);
3068 output_asm_insn (\"ite\t%d3\", operands);
3069 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3070 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3073 [(set_attr "conds" "clob")
3074 (set (attr "length")
3075 (if_then_else (eq_attr "is_thumb" "yes")
3078 (set_attr "type" "store1")]
3081 ; Reject the frame pointer in operand[1], since reloading this after
3082 ; it has been eliminated can cause carnage.
3083 (define_insn "*minmax_arithsi"
3084 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3085 (match_operator:SI 4 "shiftable_operator"
3086 [(match_operator:SI 5 "minmax_operator"
3087 [(match_operand:SI 2 "s_register_operand" "r,r")
3088 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3089 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3090 (clobber (reg:CC CC_REGNUM))]
3091 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3094 enum rtx_code code = GET_CODE (operands[4]);
3097 if (which_alternative != 0 || operands[3] != const0_rtx
3098 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3103 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3104 operands[2], operands[3]);
3105 output_asm_insn (\"cmp\\t%2, %3\", operands);
3109 output_asm_insn (\"ite\\t%d5\", operands);
3111 output_asm_insn (\"it\\t%d5\", operands);
3113 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3115 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3118 [(set_attr "conds" "clob")
3119 (set (attr "length")
3120 (if_then_else (eq_attr "is_thumb" "yes")
3126 ;; Shift and rotation insns
3128 (define_expand "ashldi3"
3129 [(set (match_operand:DI 0 "s_register_operand" "")
3130 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3131 (match_operand:SI 2 "reg_or_int_operand" "")))]
3134 if (GET_CODE (operands[2]) == CONST_INT)
3136 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3138 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3141 /* Ideally we shouldn't fail here if we could know that operands[1]
3142 ends up already living in an iwmmxt register. Otherwise it's
3143 cheaper to have the alternate code being generated than moving
3144 values to iwmmxt regs and back. */
3147 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3152 (define_insn "arm_ashldi3_1bit"
3153 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3154 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3156 (clobber (reg:CC CC_REGNUM))]
3158 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3159 [(set_attr "conds" "clob")
3160 (set_attr "length" "8")]
3163 (define_expand "ashlsi3"
3164 [(set (match_operand:SI 0 "s_register_operand" "")
3165 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3166 (match_operand:SI 2 "arm_rhs_operand" "")))]
3169 if (GET_CODE (operands[2]) == CONST_INT
3170 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3172 emit_insn (gen_movsi (operands[0], const0_rtx));
3178 (define_insn "*thumb1_ashlsi3"
3179 [(set (match_operand:SI 0 "register_operand" "=l,l")
3180 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3181 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3184 [(set_attr "length" "2")]
3187 (define_expand "ashrdi3"
3188 [(set (match_operand:DI 0 "s_register_operand" "")
3189 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3190 (match_operand:SI 2 "reg_or_int_operand" "")))]
3193 if (GET_CODE (operands[2]) == CONST_INT)
3195 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3197 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3200 /* Ideally we shouldn't fail here if we could know that operands[1]
3201 ends up already living in an iwmmxt register. Otherwise it's
3202 cheaper to have the alternate code being generated than moving
3203 values to iwmmxt regs and back. */
3206 else if (!TARGET_REALLY_IWMMXT)
3211 (define_insn "arm_ashrdi3_1bit"
3212 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3213 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3215 (clobber (reg:CC CC_REGNUM))]
3217 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3218 [(set_attr "conds" "clob")
3219 (set_attr "length" "8")]
3222 (define_expand "ashrsi3"
3223 [(set (match_operand:SI 0 "s_register_operand" "")
3224 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3225 (match_operand:SI 2 "arm_rhs_operand" "")))]
3228 if (GET_CODE (operands[2]) == CONST_INT
3229 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3230 operands[2] = GEN_INT (31);
3234 (define_insn "*thumb1_ashrsi3"
3235 [(set (match_operand:SI 0 "register_operand" "=l,l")
3236 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3237 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3240 [(set_attr "length" "2")]
3243 (define_expand "lshrdi3"
3244 [(set (match_operand:DI 0 "s_register_operand" "")
3245 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3246 (match_operand:SI 2 "reg_or_int_operand" "")))]
3249 if (GET_CODE (operands[2]) == CONST_INT)
3251 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3253 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3256 /* Ideally we shouldn't fail here if we could know that operands[1]
3257 ends up already living in an iwmmxt register. Otherwise it's
3258 cheaper to have the alternate code being generated than moving
3259 values to iwmmxt regs and back. */
3262 else if (!TARGET_REALLY_IWMMXT)
3267 (define_insn "arm_lshrdi3_1bit"
3268 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3269 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3271 (clobber (reg:CC CC_REGNUM))]
3273 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3274 [(set_attr "conds" "clob")
3275 (set_attr "length" "8")]
3278 (define_expand "lshrsi3"
3279 [(set (match_operand:SI 0 "s_register_operand" "")
3280 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3281 (match_operand:SI 2 "arm_rhs_operand" "")))]
3284 if (GET_CODE (operands[2]) == CONST_INT
3285 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3287 emit_insn (gen_movsi (operands[0], const0_rtx));
3293 (define_insn "*thumb1_lshrsi3"
3294 [(set (match_operand:SI 0 "register_operand" "=l,l")
3295 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3296 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3299 [(set_attr "length" "2")]
3302 (define_expand "rotlsi3"
3303 [(set (match_operand:SI 0 "s_register_operand" "")
3304 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3305 (match_operand:SI 2 "reg_or_int_operand" "")))]
3308 if (GET_CODE (operands[2]) == CONST_INT)
3309 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3312 rtx reg = gen_reg_rtx (SImode);
3313 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3319 (define_expand "rotrsi3"
3320 [(set (match_operand:SI 0 "s_register_operand" "")
3321 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3322 (match_operand:SI 2 "arm_rhs_operand" "")))]
3327 if (GET_CODE (operands[2]) == CONST_INT
3328 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3329 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3331 else /* TARGET_THUMB1 */
3333 if (GET_CODE (operands [2]) == CONST_INT)
3334 operands [2] = force_reg (SImode, operands[2]);
3339 (define_insn "*thumb1_rotrsi3"
3340 [(set (match_operand:SI 0 "register_operand" "=l")
3341 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3342 (match_operand:SI 2 "register_operand" "l")))]
3345 [(set_attr "length" "2")]
3348 (define_insn "*arm_shiftsi3"
3349 [(set (match_operand:SI 0 "s_register_operand" "=r")
3350 (match_operator:SI 3 "shift_operator"
3351 [(match_operand:SI 1 "s_register_operand" "r")
3352 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3354 "* return arm_output_shift(operands, 0);"
3355 [(set_attr "predicable" "yes")
3356 (set_attr "shift" "1")
3357 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3358 (const_string "alu_shift")
3359 (const_string "alu_shift_reg")))]
3362 (define_insn "*shiftsi3_compare0"
3363 [(set (reg:CC_NOOV CC_REGNUM)
3364 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3365 [(match_operand:SI 1 "s_register_operand" "r")
3366 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3368 (set (match_operand:SI 0 "s_register_operand" "=r")
3369 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3371 "* return arm_output_shift(operands, 1);"
3372 [(set_attr "conds" "set")
3373 (set_attr "shift" "1")
3374 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3375 (const_string "alu_shift")
3376 (const_string "alu_shift_reg")))]
3379 (define_insn "*shiftsi3_compare0_scratch"
3380 [(set (reg:CC_NOOV CC_REGNUM)
3381 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3382 [(match_operand:SI 1 "s_register_operand" "r")
3383 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3385 (clobber (match_scratch:SI 0 "=r"))]
3387 "* return arm_output_shift(operands, 1);"
3388 [(set_attr "conds" "set")
3389 (set_attr "shift" "1")]
3392 (define_insn "*arm_notsi_shiftsi"
3393 [(set (match_operand:SI 0 "s_register_operand" "=r")
3394 (not:SI (match_operator:SI 3 "shift_operator"
3395 [(match_operand:SI 1 "s_register_operand" "r")
3396 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3399 [(set_attr "predicable" "yes")
3400 (set_attr "shift" "1")
3401 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3402 (const_string "alu_shift")
3403 (const_string "alu_shift_reg")))]
3406 (define_insn "*arm_notsi_shiftsi_compare0"
3407 [(set (reg:CC_NOOV CC_REGNUM)
3408 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3409 [(match_operand:SI 1 "s_register_operand" "r")
3410 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3412 (set (match_operand:SI 0 "s_register_operand" "=r")
3413 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3416 [(set_attr "conds" "set")
3417 (set_attr "shift" "1")
3418 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3419 (const_string "alu_shift")
3420 (const_string "alu_shift_reg")))]
3423 (define_insn "*arm_not_shiftsi_compare0_scratch"
3424 [(set (reg:CC_NOOV CC_REGNUM)
3425 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3426 [(match_operand:SI 1 "s_register_operand" "r")
3427 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3429 (clobber (match_scratch:SI 0 "=r"))]
3432 [(set_attr "conds" "set")
3433 (set_attr "shift" "1")
3434 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3435 (const_string "alu_shift")
3436 (const_string "alu_shift_reg")))]
3439 ;; We don't really have extzv, but defining this using shifts helps
3440 ;; to reduce register pressure later on.
3442 (define_expand "extzv"
3444 (ashift:SI (match_operand:SI 1 "register_operand" "")
3445 (match_operand:SI 2 "const_int_operand" "")))
3446 (set (match_operand:SI 0 "register_operand" "")
3447 (lshiftrt:SI (match_dup 4)
3448 (match_operand:SI 3 "const_int_operand" "")))]
3449 "TARGET_THUMB1 || arm_arch_thumb2"
3452 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3453 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3455 if (arm_arch_thumb2)
3457 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3462 operands[3] = GEN_INT (rshift);
3466 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3470 operands[2] = GEN_INT (lshift);
3471 operands[4] = gen_reg_rtx (SImode);
3476 [(set (match_operand:SI 0 "s_register_operand" "=r")
3477 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3478 (match_operand:SI 2 "const_int_operand" "M")
3479 (match_operand:SI 3 "const_int_operand" "M")))]
3481 "sbfx%?\t%0, %1, %3, %2"
3482 [(set_attr "length" "4")
3483 (set_attr "predicable" "yes")]
3486 (define_insn "extzv_t2"
3487 [(set (match_operand:SI 0 "s_register_operand" "=r")
3488 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3489 (match_operand:SI 2 "const_int_operand" "M")
3490 (match_operand:SI 3 "const_int_operand" "M")))]
3492 "ubfx%?\t%0, %1, %3, %2"
3493 [(set_attr "length" "4")
3494 (set_attr "predicable" "yes")]
3498 ;; Unary arithmetic insns
3500 (define_expand "negdi2"
3502 [(set (match_operand:DI 0 "s_register_operand" "")
3503 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3504 (clobber (reg:CC CC_REGNUM))])]
3509 if (GET_CODE (operands[1]) != REG)
3510 operands[1] = force_reg (DImode, operands[1]);
3515 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3516 ;; The second alternative is to allow the common case of a *full* overlap.
3517 (define_insn "*arm_negdi2"
3518 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3519 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3520 (clobber (reg:CC CC_REGNUM))]
3522 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3523 [(set_attr "conds" "clob")
3524 (set_attr "length" "8")]
3527 (define_insn "*thumb1_negdi2"
3528 [(set (match_operand:DI 0 "register_operand" "=&l")
3529 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3530 (clobber (reg:CC CC_REGNUM))]
3532 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3533 [(set_attr "length" "6")]
3536 (define_expand "negsi2"
3537 [(set (match_operand:SI 0 "s_register_operand" "")
3538 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3543 (define_insn "*arm_negsi2"
3544 [(set (match_operand:SI 0 "s_register_operand" "=r")
3545 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3547 "rsb%?\\t%0, %1, #0"
3548 [(set_attr "predicable" "yes")]
3551 (define_insn "*thumb1_negsi2"
3552 [(set (match_operand:SI 0 "register_operand" "=l")
3553 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3556 [(set_attr "length" "2")]
3559 (define_expand "negsf2"
3560 [(set (match_operand:SF 0 "s_register_operand" "")
3561 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3562 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3566 (define_expand "negdf2"
3567 [(set (match_operand:DF 0 "s_register_operand" "")
3568 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3569 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3572 ;; abssi2 doesn't really clobber the condition codes if a different register
3573 ;; is being set. To keep things simple, assume during rtl manipulations that
3574 ;; it does, but tell the final scan operator the truth. Similarly for
3577 (define_expand "abssi2"
3579 [(set (match_operand:SI 0 "s_register_operand" "")
3580 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3581 (clobber (match_dup 2))])]
3585 operands[2] = gen_rtx_SCRATCH (SImode);
3587 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3590 (define_insn "*arm_abssi2"
3591 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3592 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3593 (clobber (reg:CC CC_REGNUM))]
3596 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3597 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3598 [(set_attr "conds" "clob,*")
3599 (set_attr "shift" "1")
3600 ;; predicable can't be set based on the variant, so left as no
3601 (set_attr "length" "8")]
3604 (define_insn_and_split "*thumb1_abssi2"
3605 [(set (match_operand:SI 0 "s_register_operand" "=l")
3606 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3607 (clobber (match_scratch:SI 2 "=&l"))]
3610 "TARGET_THUMB1 && reload_completed"
3611 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3612 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3613 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3615 [(set_attr "length" "6")]
3618 (define_insn "*arm_neg_abssi2"
3619 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3620 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3621 (clobber (reg:CC CC_REGNUM))]
3624 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3625 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3626 [(set_attr "conds" "clob,*")
3627 (set_attr "shift" "1")
3628 ;; predicable can't be set based on the variant, so left as no
3629 (set_attr "length" "8")]
3632 (define_insn_and_split "*thumb1_neg_abssi2"
3633 [(set (match_operand:SI 0 "s_register_operand" "=l")
3634 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3635 (clobber (match_scratch:SI 2 "=&l"))]
3638 "TARGET_THUMB1 && reload_completed"
3639 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3640 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3641 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3643 [(set_attr "length" "6")]
3646 (define_expand "abssf2"
3647 [(set (match_operand:SF 0 "s_register_operand" "")
3648 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3649 "TARGET_32BIT && TARGET_HARD_FLOAT"
3652 (define_expand "absdf2"
3653 [(set (match_operand:DF 0 "s_register_operand" "")
3654 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3655 "TARGET_32BIT && TARGET_HARD_FLOAT"
3658 (define_expand "sqrtsf2"
3659 [(set (match_operand:SF 0 "s_register_operand" "")
3660 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3661 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3664 (define_expand "sqrtdf2"
3665 [(set (match_operand:DF 0 "s_register_operand" "")
3666 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3667 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3670 (define_insn_and_split "one_cmpldi2"
3671 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3672 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3675 "TARGET_32BIT && reload_completed"
3676 [(set (match_dup 0) (not:SI (match_dup 1)))
3677 (set (match_dup 2) (not:SI (match_dup 3)))]
3680 operands[2] = gen_highpart (SImode, operands[0]);
3681 operands[0] = gen_lowpart (SImode, operands[0]);
3682 operands[3] = gen_highpart (SImode, operands[1]);
3683 operands[1] = gen_lowpart (SImode, operands[1]);
3685 [(set_attr "length" "8")
3686 (set_attr "predicable" "yes")]
3689 (define_expand "one_cmplsi2"
3690 [(set (match_operand:SI 0 "s_register_operand" "")
3691 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3696 (define_insn "*arm_one_cmplsi2"
3697 [(set (match_operand:SI 0 "s_register_operand" "=r")
3698 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3701 [(set_attr "predicable" "yes")]
3704 (define_insn "*thumb1_one_cmplsi2"
3705 [(set (match_operand:SI 0 "register_operand" "=l")
3706 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3709 [(set_attr "length" "2")]
3712 (define_insn "*notsi_compare0"
3713 [(set (reg:CC_NOOV CC_REGNUM)
3714 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3716 (set (match_operand:SI 0 "s_register_operand" "=r")
3717 (not:SI (match_dup 1)))]
3720 [(set_attr "conds" "set")]
3723 (define_insn "*notsi_compare0_scratch"
3724 [(set (reg:CC_NOOV CC_REGNUM)
3725 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3727 (clobber (match_scratch:SI 0 "=r"))]
3730 [(set_attr "conds" "set")]
3733 ;; Fixed <--> Floating conversion insns
3735 (define_expand "floatsihf2"
3736 [(set (match_operand:HF 0 "general_operand" "")
3737 (float:HF (match_operand:SI 1 "general_operand" "")))]
3741 rtx op1 = gen_reg_rtx (SFmode);
3742 expand_float (op1, operands[1], 0);
3743 op1 = convert_to_mode (HFmode, op1, 0);
3744 emit_move_insn (operands[0], op1);
3749 (define_expand "floatdihf2"
3750 [(set (match_operand:HF 0 "general_operand" "")
3751 (float:HF (match_operand:DI 1 "general_operand" "")))]
3755 rtx op1 = gen_reg_rtx (SFmode);
3756 expand_float (op1, operands[1], 0);
3757 op1 = convert_to_mode (HFmode, op1, 0);
3758 emit_move_insn (operands[0], op1);
3763 (define_expand "floatsisf2"
3764 [(set (match_operand:SF 0 "s_register_operand" "")
3765 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3766 "TARGET_32BIT && TARGET_HARD_FLOAT"
3768 if (TARGET_MAVERICK)
3770 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3775 (define_expand "floatsidf2"
3776 [(set (match_operand:DF 0 "s_register_operand" "")
3777 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3778 "TARGET_32BIT && TARGET_HARD_FLOAT"
3780 if (TARGET_MAVERICK)
3782 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3787 (define_expand "fix_trunchfsi2"
3788 [(set (match_operand:SI 0 "general_operand" "")
3789 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3793 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3794 expand_fix (operands[0], op1, 0);
3799 (define_expand "fix_trunchfdi2"
3800 [(set (match_operand:DI 0 "general_operand" "")
3801 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3805 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3806 expand_fix (operands[0], op1, 0);
3811 (define_expand "fix_truncsfsi2"
3812 [(set (match_operand:SI 0 "s_register_operand" "")
3813 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3814 "TARGET_32BIT && TARGET_HARD_FLOAT"
3816 if (TARGET_MAVERICK)
3818 if (!cirrus_fp_register (operands[0], SImode))
3819 operands[0] = force_reg (SImode, operands[0]);
3820 if (!cirrus_fp_register (operands[1], SFmode))
3821 operands[1] = force_reg (SFmode, operands[0]);
3822 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3827 (define_expand "fix_truncdfsi2"
3828 [(set (match_operand:SI 0 "s_register_operand" "")
3829 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3830 "TARGET_32BIT && TARGET_HARD_FLOAT"
3832 if (TARGET_MAVERICK)
3834 if (!cirrus_fp_register (operands[1], DFmode))
3835 operands[1] = force_reg (DFmode, operands[0]);
3836 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3843 (define_expand "truncdfsf2"
3844 [(set (match_operand:SF 0 "s_register_operand" "")
3846 (match_operand:DF 1 "s_register_operand" "")))]
3847 "TARGET_32BIT && TARGET_HARD_FLOAT"
3851 /* DFmode -> HFmode conversions have to go through SFmode. */
3852 (define_expand "truncdfhf2"
3853 [(set (match_operand:HF 0 "general_operand" "")
3855 (match_operand:DF 1 "general_operand" "")))]
3860 op1 = convert_to_mode (SFmode, operands[1], 0);
3861 op1 = convert_to_mode (HFmode, op1, 0);
3862 emit_move_insn (operands[0], op1);
3867 ;; Zero and sign extension instructions.
3869 (define_expand "zero_extendsidi2"
3870 [(set (match_operand:DI 0 "s_register_operand" "")
3871 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3876 (define_insn "*arm_zero_extendsidi2"
3877 [(set (match_operand:DI 0 "s_register_operand" "=r")
3878 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3881 if (REGNO (operands[1])
3882 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3883 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3884 return \"mov%?\\t%R0, #0\";
3886 [(set_attr "length" "8")
3887 (set_attr "predicable" "yes")]
3890 (define_expand "zero_extendqidi2"
3891 [(set (match_operand:DI 0 "s_register_operand" "")
3892 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3897 (define_insn "*arm_zero_extendqidi2"
3898 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3899 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3902 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3903 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3904 [(set_attr "length" "8")
3905 (set_attr "predicable" "yes")
3906 (set_attr "type" "*,load_byte")
3907 (set_attr "pool_range" "*,4092")
3908 (set_attr "neg_pool_range" "*,4084")]
3911 (define_expand "extendsidi2"
3912 [(set (match_operand:DI 0 "s_register_operand" "")
3913 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3918 (define_insn "*arm_extendsidi2"
3919 [(set (match_operand:DI 0 "s_register_operand" "=r")
3920 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3923 if (REGNO (operands[1])
3924 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3925 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3926 return \"mov%?\\t%R0, %Q0, asr #31\";
3928 [(set_attr "length" "8")
3929 (set_attr "shift" "1")
3930 (set_attr "predicable" "yes")]
3933 (define_expand "zero_extendhisi2"
3935 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3937 (set (match_operand:SI 0 "s_register_operand" "")
3938 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3942 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3944 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3945 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3949 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3951 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3955 if (!s_register_operand (operands[1], HImode))
3956 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3960 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3961 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3965 operands[1] = gen_lowpart (SImode, operands[1]);
3966 operands[2] = gen_reg_rtx (SImode);
3970 (define_insn "*thumb1_zero_extendhisi2"
3971 [(set (match_operand:SI 0 "register_operand" "=l")
3972 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3973 "TARGET_THUMB1 && !arm_arch6"
3975 rtx mem = XEXP (operands[1], 0);
3977 if (GET_CODE (mem) == CONST)
3978 mem = XEXP (mem, 0);
3980 if (GET_CODE (mem) == LABEL_REF)
3981 return \"ldr\\t%0, %1\";
3983 if (GET_CODE (mem) == PLUS)
3985 rtx a = XEXP (mem, 0);
3986 rtx b = XEXP (mem, 1);
3988 /* This can happen due to bugs in reload. */
3989 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3992 ops[0] = operands[0];
3995 output_asm_insn (\"mov %0, %1\", ops);
3997 XEXP (mem, 0) = operands[0];
4000 else if ( GET_CODE (a) == LABEL_REF
4001 && GET_CODE (b) == CONST_INT)
4002 return \"ldr\\t%0, %1\";
4005 return \"ldrh\\t%0, %1\";
4007 [(set_attr "length" "4")
4008 (set_attr "type" "load_byte")
4009 (set_attr "pool_range" "60")]
4012 (define_insn "*thumb1_zero_extendhisi2_v6"
4013 [(set (match_operand:SI 0 "register_operand" "=l,l")
4014 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4015 "TARGET_THUMB1 && arm_arch6"
4019 if (which_alternative == 0)
4020 return \"uxth\\t%0, %1\";
4022 mem = XEXP (operands[1], 0);
4024 if (GET_CODE (mem) == CONST)
4025 mem = XEXP (mem, 0);
4027 if (GET_CODE (mem) == LABEL_REF)
4028 return \"ldr\\t%0, %1\";
4030 if (GET_CODE (mem) == PLUS)
4032 rtx a = XEXP (mem, 0);
4033 rtx b = XEXP (mem, 1);
4035 /* This can happen due to bugs in reload. */
4036 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4039 ops[0] = operands[0];
4042 output_asm_insn (\"mov %0, %1\", ops);
4044 XEXP (mem, 0) = operands[0];
4047 else if ( GET_CODE (a) == LABEL_REF
4048 && GET_CODE (b) == CONST_INT)
4049 return \"ldr\\t%0, %1\";
4052 return \"ldrh\\t%0, %1\";
4054 [(set_attr "length" "2,4")
4055 (set_attr "type" "alu_shift,load_byte")
4056 (set_attr "pool_range" "*,60")]
4059 (define_insn "*arm_zero_extendhisi2"
4060 [(set (match_operand:SI 0 "s_register_operand" "=r")
4061 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4062 "TARGET_ARM && arm_arch4 && !arm_arch6"
4064 [(set_attr "type" "load_byte")
4065 (set_attr "predicable" "yes")
4066 (set_attr "pool_range" "256")
4067 (set_attr "neg_pool_range" "244")]
4070 (define_insn "*arm_zero_extendhisi2_v6"
4071 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4072 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4073 "TARGET_ARM && arm_arch6"
4077 [(set_attr "type" "alu_shift,load_byte")
4078 (set_attr "predicable" "yes")
4079 (set_attr "pool_range" "*,256")
4080 (set_attr "neg_pool_range" "*,244")]
4083 (define_insn "*arm_zero_extendhisi2addsi"
4084 [(set (match_operand:SI 0 "s_register_operand" "=r")
4085 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4086 (match_operand:SI 2 "s_register_operand" "r")))]
4088 "uxtah%?\\t%0, %2, %1"
4089 [(set_attr "type" "alu_shift")
4090 (set_attr "predicable" "yes")]
4093 (define_expand "zero_extendqisi2"
4094 [(set (match_operand:SI 0 "s_register_operand" "")
4095 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4098 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4102 emit_insn (gen_andsi3 (operands[0],
4103 gen_lowpart (SImode, operands[1]),
4106 else /* TARGET_THUMB */
4108 rtx temp = gen_reg_rtx (SImode);
4111 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4112 operands[1] = gen_lowpart (SImode, operands[1]);
4115 ops[1] = operands[1];
4116 ops[2] = GEN_INT (24);
4118 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4119 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4121 ops[0] = operands[0];
4123 ops[2] = GEN_INT (24);
4125 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4126 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4133 (define_insn "*thumb1_zero_extendqisi2"
4134 [(set (match_operand:SI 0 "register_operand" "=l")
4135 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4136 "TARGET_THUMB1 && !arm_arch6"
4138 [(set_attr "length" "2")
4139 (set_attr "type" "load_byte")
4140 (set_attr "pool_range" "32")]
4143 (define_insn "*thumb1_zero_extendqisi2_v6"
4144 [(set (match_operand:SI 0 "register_operand" "=l,l")
4145 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4146 "TARGET_THUMB1 && arm_arch6"
4150 [(set_attr "length" "2,2")
4151 (set_attr "type" "alu_shift,load_byte")
4152 (set_attr "pool_range" "*,32")]
4155 (define_insn "*arm_zero_extendqisi2"
4156 [(set (match_operand:SI 0 "s_register_operand" "=r")
4157 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4158 "TARGET_ARM && !arm_arch6"
4159 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4160 [(set_attr "type" "load_byte")
4161 (set_attr "predicable" "yes")
4162 (set_attr "pool_range" "4096")
4163 (set_attr "neg_pool_range" "4084")]
4166 (define_insn "*arm_zero_extendqisi2_v6"
4167 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4168 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4169 "TARGET_ARM && arm_arch6"
4172 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4173 [(set_attr "type" "alu_shift,load_byte")
4174 (set_attr "predicable" "yes")
4175 (set_attr "pool_range" "*,4096")
4176 (set_attr "neg_pool_range" "*,4084")]
4179 (define_insn "*arm_zero_extendqisi2addsi"
4180 [(set (match_operand:SI 0 "s_register_operand" "=r")
4181 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4182 (match_operand:SI 2 "s_register_operand" "r")))]
4184 "uxtab%?\\t%0, %2, %1"
4185 [(set_attr "predicable" "yes")
4186 (set_attr "insn" "xtab")
4187 (set_attr "type" "alu_shift")]
4191 [(set (match_operand:SI 0 "s_register_operand" "")
4192 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4193 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4194 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4195 [(set (match_dup 2) (match_dup 1))
4196 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4201 [(set (match_operand:SI 0 "s_register_operand" "")
4202 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4203 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4204 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4205 [(set (match_dup 2) (match_dup 1))
4206 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4210 (define_code_iterator ior_xor [ior xor])
4213 [(set (match_operand:SI 0 "s_register_operand" "")
4214 (ior_xor:SI (and:SI (ashift:SI
4215 (match_operand:SI 1 "s_register_operand" "")
4216 (match_operand:SI 2 "const_int_operand" ""))
4217 (match_operand:SI 3 "const_int_operand" ""))
4219 (match_operator 5 "subreg_lowpart_operator"
4220 [(match_operand:SI 4 "s_register_operand" "")]))))]
4222 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4223 == (GET_MODE_MASK (GET_MODE (operands[5]))
4224 & (GET_MODE_MASK (GET_MODE (operands[5]))
4225 << (INTVAL (operands[2])))))"
4226 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4228 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4229 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4232 (define_insn "*compareqi_eq0"
4233 [(set (reg:CC_Z CC_REGNUM)
4234 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4238 [(set_attr "conds" "set")]
4241 (define_expand "extendhisi2"
4243 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4245 (set (match_operand:SI 0 "s_register_operand" "")
4246 (ashiftrt:SI (match_dup 2)
4251 if (GET_CODE (operands[1]) == MEM)
4255 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4260 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4261 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4266 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4268 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4272 if (!s_register_operand (operands[1], HImode))
4273 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4278 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4280 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4281 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4286 operands[1] = gen_lowpart (SImode, operands[1]);
4287 operands[2] = gen_reg_rtx (SImode);
4291 (define_insn "thumb1_extendhisi2"
4292 [(set (match_operand:SI 0 "register_operand" "=l")
4293 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4294 (clobber (match_scratch:SI 2 "=&l"))]
4295 "TARGET_THUMB1 && !arm_arch6"
4299 rtx mem = XEXP (operands[1], 0);
4301 /* This code used to try to use 'V', and fix the address only if it was
4302 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4303 range of QImode offsets, and offsettable_address_p does a QImode
4306 if (GET_CODE (mem) == CONST)
4307 mem = XEXP (mem, 0);
4309 if (GET_CODE (mem) == LABEL_REF)
4310 return \"ldr\\t%0, %1\";
4312 if (GET_CODE (mem) == PLUS)
4314 rtx a = XEXP (mem, 0);
4315 rtx b = XEXP (mem, 1);
4317 if (GET_CODE (a) == LABEL_REF
4318 && GET_CODE (b) == CONST_INT)
4319 return \"ldr\\t%0, %1\";
4321 if (GET_CODE (b) == REG)
4322 return \"ldrsh\\t%0, %1\";
4330 ops[2] = const0_rtx;
4333 gcc_assert (GET_CODE (ops[1]) == REG);
4335 ops[0] = operands[0];
4336 ops[3] = operands[2];
4337 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4340 [(set_attr "length" "4")
4341 (set_attr "type" "load_byte")
4342 (set_attr "pool_range" "1020")]
4345 ;; We used to have an early-clobber on the scratch register here.
4346 ;; However, there's a bug somewhere in reload which means that this
4347 ;; can be partially ignored during spill allocation if the memory
4348 ;; address also needs reloading; this causes us to die later on when
4349 ;; we try to verify the operands. Fortunately, we don't really need
4350 ;; the early-clobber: we can always use operand 0 if operand 2
4351 ;; overlaps the address.
4352 (define_insn "*thumb1_extendhisi2_insn_v6"
4353 [(set (match_operand:SI 0 "register_operand" "=l,l")
4354 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4355 (clobber (match_scratch:SI 2 "=X,l"))]
4356 "TARGET_THUMB1 && arm_arch6"
4362 if (which_alternative == 0)
4363 return \"sxth\\t%0, %1\";
4365 mem = XEXP (operands[1], 0);
4367 /* This code used to try to use 'V', and fix the address only if it was
4368 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4369 range of QImode offsets, and offsettable_address_p does a QImode
4372 if (GET_CODE (mem) == CONST)
4373 mem = XEXP (mem, 0);
4375 if (GET_CODE (mem) == LABEL_REF)
4376 return \"ldr\\t%0, %1\";
4378 if (GET_CODE (mem) == PLUS)
4380 rtx a = XEXP (mem, 0);
4381 rtx b = XEXP (mem, 1);
4383 if (GET_CODE (a) == LABEL_REF
4384 && GET_CODE (b) == CONST_INT)
4385 return \"ldr\\t%0, %1\";
4387 if (GET_CODE (b) == REG)
4388 return \"ldrsh\\t%0, %1\";
4396 ops[2] = const0_rtx;
4399 gcc_assert (GET_CODE (ops[1]) == REG);
4401 ops[0] = operands[0];
4402 if (reg_mentioned_p (operands[2], ops[1]))
4405 ops[3] = operands[2];
4406 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4409 [(set_attr "length" "2,4")
4410 (set_attr "type" "alu_shift,load_byte")
4411 (set_attr "pool_range" "*,1020")]
4414 ;; This pattern will only be used when ldsh is not available
4415 (define_expand "extendhisi2_mem"
4416 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4418 (zero_extend:SI (match_dup 7)))
4419 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4420 (set (match_operand:SI 0 "" "")
4421 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4426 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4428 mem1 = change_address (operands[1], QImode, addr);
4429 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4430 operands[0] = gen_lowpart (SImode, operands[0]);
4432 operands[2] = gen_reg_rtx (SImode);
4433 operands[3] = gen_reg_rtx (SImode);
4434 operands[6] = gen_reg_rtx (SImode);
4437 if (BYTES_BIG_ENDIAN)
4439 operands[4] = operands[2];
4440 operands[5] = operands[3];
4444 operands[4] = operands[3];
4445 operands[5] = operands[2];
4450 (define_insn "*arm_extendhisi2"
4451 [(set (match_operand:SI 0 "s_register_operand" "=r")
4452 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4453 "TARGET_ARM && arm_arch4 && !arm_arch6"
4454 "ldr%(sh%)\\t%0, %1"
4455 [(set_attr "type" "load_byte")
4456 (set_attr "predicable" "yes")
4457 (set_attr "pool_range" "256")
4458 (set_attr "neg_pool_range" "244")]
4461 ;; ??? Check Thumb-2 pool range
4462 (define_insn "*arm_extendhisi2_v6"
4463 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4464 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4465 "TARGET_32BIT && arm_arch6"
4469 [(set_attr "type" "alu_shift,load_byte")
4470 (set_attr "predicable" "yes")
4471 (set_attr "pool_range" "*,256")
4472 (set_attr "neg_pool_range" "*,244")]
4475 (define_insn "*arm_extendhisi2addsi"
4476 [(set (match_operand:SI 0 "s_register_operand" "=r")
4477 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4478 (match_operand:SI 2 "s_register_operand" "r")))]
4480 "sxtah%?\\t%0, %2, %1"
4483 (define_expand "extendqihi2"
4485 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4487 (set (match_operand:HI 0 "s_register_operand" "")
4488 (ashiftrt:SI (match_dup 2)
4493 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4495 emit_insn (gen_rtx_SET (VOIDmode,
4497 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4500 if (!s_register_operand (operands[1], QImode))
4501 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4502 operands[0] = gen_lowpart (SImode, operands[0]);
4503 operands[1] = gen_lowpart (SImode, operands[1]);
4504 operands[2] = gen_reg_rtx (SImode);
4508 (define_insn "*arm_extendqihi_insn"
4509 [(set (match_operand:HI 0 "s_register_operand" "=r")
4510 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4511 "TARGET_ARM && arm_arch4"
4512 "ldr%(sb%)\\t%0, %1"
4513 [(set_attr "type" "load_byte")
4514 (set_attr "predicable" "yes")
4515 (set_attr "pool_range" "256")
4516 (set_attr "neg_pool_range" "244")]
4519 (define_expand "extendqisi2"
4521 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4523 (set (match_operand:SI 0 "s_register_operand" "")
4524 (ashiftrt:SI (match_dup 2)
4529 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4531 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4532 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4536 if (!s_register_operand (operands[1], QImode))
4537 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4541 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4542 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4546 operands[1] = gen_lowpart (SImode, operands[1]);
4547 operands[2] = gen_reg_rtx (SImode);
4551 (define_insn "*arm_extendqisi"
4552 [(set (match_operand:SI 0 "s_register_operand" "=r")
4553 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4554 "TARGET_ARM && arm_arch4 && !arm_arch6"
4555 "ldr%(sb%)\\t%0, %1"
4556 [(set_attr "type" "load_byte")
4557 (set_attr "predicable" "yes")
4558 (set_attr "pool_range" "256")
4559 (set_attr "neg_pool_range" "244")]
4562 (define_insn "*arm_extendqisi_v6"
4563 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4565 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4566 "TARGET_ARM && arm_arch6"
4570 [(set_attr "type" "alu_shift,load_byte")
4571 (set_attr "predicable" "yes")
4572 (set_attr "pool_range" "*,256")
4573 (set_attr "neg_pool_range" "*,244")]
4576 (define_insn "*arm_extendqisi2addsi"
4577 [(set (match_operand:SI 0 "s_register_operand" "=r")
4578 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4579 (match_operand:SI 2 "s_register_operand" "r")))]
4581 "sxtab%?\\t%0, %2, %1"
4582 [(set_attr "type" "alu_shift")
4583 (set_attr "insn" "xtab")
4584 (set_attr "predicable" "yes")]
4587 (define_insn "*thumb1_extendqisi2"
4588 [(set (match_operand:SI 0 "register_operand" "=l,l")
4589 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4590 "TARGET_THUMB1 && !arm_arch6"
4594 rtx mem = XEXP (operands[1], 0);
4596 if (GET_CODE (mem) == CONST)
4597 mem = XEXP (mem, 0);
4599 if (GET_CODE (mem) == LABEL_REF)
4600 return \"ldr\\t%0, %1\";
4602 if (GET_CODE (mem) == PLUS
4603 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4604 return \"ldr\\t%0, %1\";
4606 if (which_alternative == 0)
4607 return \"ldrsb\\t%0, %1\";
4609 ops[0] = operands[0];
4611 if (GET_CODE (mem) == PLUS)
4613 rtx a = XEXP (mem, 0);
4614 rtx b = XEXP (mem, 1);
4619 if (GET_CODE (a) == REG)
4621 if (GET_CODE (b) == REG)
4622 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4623 else if (REGNO (a) == REGNO (ops[0]))
4625 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4626 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4627 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4630 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4634 gcc_assert (GET_CODE (b) == REG);
4635 if (REGNO (b) == REGNO (ops[0]))
4637 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4638 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4639 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4642 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4645 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4647 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4648 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4649 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4654 ops[2] = const0_rtx;
4656 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4660 [(set_attr "length" "2,6")
4661 (set_attr "type" "load_byte,load_byte")
4662 (set_attr "pool_range" "32,32")]
4665 (define_insn "*thumb1_extendqisi2_v6"
4666 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4667 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4668 "TARGET_THUMB1 && arm_arch6"
4674 if (which_alternative == 0)
4675 return \"sxtb\\t%0, %1\";
4677 mem = XEXP (operands[1], 0);
4679 if (GET_CODE (mem) == CONST)
4680 mem = XEXP (mem, 0);
4682 if (GET_CODE (mem) == LABEL_REF)
4683 return \"ldr\\t%0, %1\";
4685 if (GET_CODE (mem) == PLUS
4686 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4687 return \"ldr\\t%0, %1\";
4689 if (which_alternative == 0)
4690 return \"ldrsb\\t%0, %1\";
4692 ops[0] = operands[0];
4694 if (GET_CODE (mem) == PLUS)
4696 rtx a = XEXP (mem, 0);
4697 rtx b = XEXP (mem, 1);
4702 if (GET_CODE (a) == REG)
4704 if (GET_CODE (b) == REG)
4705 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4706 else if (REGNO (a) == REGNO (ops[0]))
4708 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4709 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4712 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4716 gcc_assert (GET_CODE (b) == REG);
4717 if (REGNO (b) == REGNO (ops[0]))
4719 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4720 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4723 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4726 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4728 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4729 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4734 ops[2] = const0_rtx;
4736 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4740 [(set_attr "length" "2,2,4")
4741 (set_attr "type" "alu_shift,load_byte,load_byte")
4742 (set_attr "pool_range" "*,32,32")]
4745 (define_expand "extendsfdf2"
4746 [(set (match_operand:DF 0 "s_register_operand" "")
4747 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4748 "TARGET_32BIT && TARGET_HARD_FLOAT"
4752 /* HFmode -> DFmode conversions have to go through SFmode. */
4753 (define_expand "extendhfdf2"
4754 [(set (match_operand:DF 0 "general_operand" "")
4755 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4760 op1 = convert_to_mode (SFmode, operands[1], 0);
4761 op1 = convert_to_mode (DFmode, op1, 0);
4762 emit_insn (gen_movdf (operands[0], op1));
4767 ;; Move insns (including loads and stores)
4769 ;; XXX Just some ideas about movti.
4770 ;; I don't think these are a good idea on the arm, there just aren't enough
4772 ;;(define_expand "loadti"
4773 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4774 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4777 ;;(define_expand "storeti"
4778 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4779 ;; (match_operand:TI 1 "s_register_operand" ""))]
4782 ;;(define_expand "movti"
4783 ;; [(set (match_operand:TI 0 "general_operand" "")
4784 ;; (match_operand:TI 1 "general_operand" ""))]
4790 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4791 ;; operands[1] = copy_to_reg (operands[1]);
4792 ;; if (GET_CODE (operands[0]) == MEM)
4793 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4794 ;; else if (GET_CODE (operands[1]) == MEM)
4795 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4799 ;; emit_insn (insn);
4803 ;; Recognize garbage generated above.
4806 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4807 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4811 ;; register mem = (which_alternative < 3);
4812 ;; register const char *template;
4814 ;; operands[mem] = XEXP (operands[mem], 0);
4815 ;; switch (which_alternative)
4817 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4818 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4819 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4820 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4821 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4822 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4824 ;; output_asm_insn (template, operands);
4828 (define_expand "movdi"
4829 [(set (match_operand:DI 0 "general_operand" "")
4830 (match_operand:DI 1 "general_operand" ""))]
4833 if (can_create_pseudo_p ())
4835 if (GET_CODE (operands[0]) != REG)
4836 operands[1] = force_reg (DImode, operands[1]);
4841 (define_insn "*arm_movdi"
4842 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4843 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4845 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4847 && ( register_operand (operands[0], DImode)
4848 || register_operand (operands[1], DImode))"
4850 switch (which_alternative)
4857 return output_move_double (operands);
4860 [(set_attr "length" "8,12,16,8,8")
4861 (set_attr "type" "*,*,*,load2,store2")
4862 (set_attr "pool_range" "*,*,*,1020,*")
4863 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4867 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4868 (match_operand:ANY64 1 "const_double_operand" ""))]
4871 && (arm_const_double_inline_cost (operands[1])
4872 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4875 arm_split_constant (SET, SImode, curr_insn,
4876 INTVAL (gen_lowpart (SImode, operands[1])),
4877 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4878 arm_split_constant (SET, SImode, curr_insn,
4879 INTVAL (gen_highpart_mode (SImode,
4880 GET_MODE (operands[0]),
4882 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4887 ; If optimizing for size, or if we have load delay slots, then
4888 ; we want to split the constant into two separate operations.
4889 ; In both cases this may split a trivial part into a single data op
4890 ; leaving a single complex constant to load. We can also get longer
4891 ; offsets in a LDR which means we get better chances of sharing the pool
4892 ; entries. Finally, we can normally do a better job of scheduling
4893 ; LDR instructions than we can with LDM.
4894 ; This pattern will only match if the one above did not.
4896 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4897 (match_operand:ANY64 1 "const_double_operand" ""))]
4898 "TARGET_ARM && reload_completed
4899 && arm_const_double_by_parts (operands[1])"
4900 [(set (match_dup 0) (match_dup 1))
4901 (set (match_dup 2) (match_dup 3))]
4903 operands[2] = gen_highpart (SImode, operands[0]);
4904 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4906 operands[0] = gen_lowpart (SImode, operands[0]);
4907 operands[1] = gen_lowpart (SImode, operands[1]);
4912 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4913 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4914 "TARGET_EITHER && reload_completed"
4915 [(set (match_dup 0) (match_dup 1))
4916 (set (match_dup 2) (match_dup 3))]
4918 operands[2] = gen_highpart (SImode, operands[0]);
4919 operands[3] = gen_highpart (SImode, operands[1]);
4920 operands[0] = gen_lowpart (SImode, operands[0]);
4921 operands[1] = gen_lowpart (SImode, operands[1]);
4923 /* Handle a partial overlap. */
4924 if (rtx_equal_p (operands[0], operands[3]))
4926 rtx tmp0 = operands[0];
4927 rtx tmp1 = operands[1];
4929 operands[0] = operands[2];
4930 operands[1] = operands[3];
4937 ;; We can't actually do base+index doubleword loads if the index and
4938 ;; destination overlap. Split here so that we at least have chance to
4941 [(set (match_operand:DI 0 "s_register_operand" "")
4942 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4943 (match_operand:SI 2 "s_register_operand" ""))))]
4945 && reg_overlap_mentioned_p (operands[0], operands[1])
4946 && reg_overlap_mentioned_p (operands[0], operands[2])"
4948 (plus:SI (match_dup 1)
4951 (mem:DI (match_dup 4)))]
4953 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4957 ;;; ??? This should have alternatives for constants.
4958 ;;; ??? This was originally identical to the movdf_insn pattern.
4959 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4960 ;;; thumb_reorg with a memory reference.
4961 (define_insn "*thumb1_movdi_insn"
4962 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4963 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4965 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4966 && ( register_operand (operands[0], DImode)
4967 || register_operand (operands[1], DImode))"
4970 switch (which_alternative)
4974 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4975 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4976 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4978 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4980 operands[1] = GEN_INT (- INTVAL (operands[1]));
4981 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4983 return \"ldmia\\t%1, {%0, %H0}\";
4985 return \"stmia\\t%0, {%1, %H1}\";
4987 return thumb_load_double_from_address (operands);
4989 operands[2] = gen_rtx_MEM (SImode,
4990 plus_constant (XEXP (operands[0], 0), 4));
4991 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4994 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4995 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4996 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4999 [(set_attr "length" "4,4,6,2,2,6,4,4")
5000 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5001 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5004 (define_expand "movsi"
5005 [(set (match_operand:SI 0 "general_operand" "")
5006 (match_operand:SI 1 "general_operand" ""))]
5010 rtx base, offset, tmp;
5014 /* Everything except mem = const or mem = mem can be done easily. */
5015 if (GET_CODE (operands[0]) == MEM)
5016 operands[1] = force_reg (SImode, operands[1]);
5017 if (arm_general_register_operand (operands[0], SImode)
5018 && GET_CODE (operands[1]) == CONST_INT
5019 && !(const_ok_for_arm (INTVAL (operands[1]))
5020 || const_ok_for_arm (~INTVAL (operands[1]))))
5022 arm_split_constant (SET, SImode, NULL_RTX,
5023 INTVAL (operands[1]), operands[0], NULL_RTX,
5024 optimize && can_create_pseudo_p ());
5028 if (TARGET_USE_MOVT && !target_word_relocations
5029 && GET_CODE (operands[1]) == SYMBOL_REF
5030 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5032 arm_emit_movpair (operands[0], operands[1]);
5036 else /* TARGET_THUMB1... */
5038 if (can_create_pseudo_p ())
5040 if (GET_CODE (operands[0]) != REG)
5041 operands[1] = force_reg (SImode, operands[1]);
5045 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5047 split_const (operands[1], &base, &offset);
5048 if (GET_CODE (base) == SYMBOL_REF
5049 && !offset_within_block_p (base, INTVAL (offset)))
5051 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5052 emit_move_insn (tmp, base);
5053 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5058 /* Recognize the case where operand[1] is a reference to thread-local
5059 data and load its address to a register. */
5060 if (arm_tls_referenced_p (operands[1]))
5062 rtx tmp = operands[1];
5065 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5067 addend = XEXP (XEXP (tmp, 0), 1);
5068 tmp = XEXP (XEXP (tmp, 0), 0);
5071 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5072 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5074 tmp = legitimize_tls_address (tmp,
5075 !can_create_pseudo_p () ? operands[0] : 0);
5078 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5079 tmp = force_operand (tmp, operands[0]);
5084 && (CONSTANT_P (operands[1])
5085 || symbol_mentioned_p (operands[1])
5086 || label_mentioned_p (operands[1])))
5087 operands[1] = legitimize_pic_address (operands[1], SImode,
5088 (!can_create_pseudo_p ()
5095 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5096 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5097 ;; so this does not matter.
5098 (define_insn "*arm_movt"
5099 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5100 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5101 (match_operand:SI 2 "general_operand" "i")))]
5103 "movt%?\t%0, #:upper16:%c2"
5104 [(set_attr "predicable" "yes")
5105 (set_attr "length" "4")]
5108 (define_insn "*arm_movsi_insn"
5109 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5110 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5111 "TARGET_ARM && ! TARGET_IWMMXT
5112 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5113 && ( register_operand (operands[0], SImode)
5114 || register_operand (operands[1], SImode))"
5122 [(set_attr "type" "*,*,*,*,load1,store1")
5123 (set_attr "predicable" "yes")
5124 (set_attr "pool_range" "*,*,*,*,4096,*")
5125 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5129 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5130 (match_operand:SI 1 "const_int_operand" ""))]
5132 && (!(const_ok_for_arm (INTVAL (operands[1]))
5133 || const_ok_for_arm (~INTVAL (operands[1]))))"
5134 [(clobber (const_int 0))]
5136 arm_split_constant (SET, SImode, NULL_RTX,
5137 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5142 (define_insn "*thumb1_movsi_insn"
5143 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5144 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5146 && ( register_operand (operands[0], SImode)
5147 || register_operand (operands[1], SImode))"
5158 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5159 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5160 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5164 [(set (match_operand:SI 0 "register_operand" "")
5165 (match_operand:SI 1 "const_int_operand" ""))]
5166 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5167 [(set (match_dup 0) (match_dup 1))
5168 (set (match_dup 0) (neg:SI (match_dup 0)))]
5169 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5173 [(set (match_operand:SI 0 "register_operand" "")
5174 (match_operand:SI 1 "const_int_operand" ""))]
5175 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5176 [(set (match_dup 0) (match_dup 1))
5177 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5180 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5181 unsigned HOST_WIDE_INT mask = 0xff;
5184 for (i = 0; i < 25; i++)
5185 if ((val & (mask << i)) == val)
5188 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5192 operands[1] = GEN_INT (val >> i);
5193 operands[2] = GEN_INT (i);
5197 ;; When generating pic, we need to load the symbol offset into a register.
5198 ;; So that the optimizer does not confuse this with a normal symbol load
5199 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5200 ;; since that is the only type of relocation we can use.
5202 ;; The rather odd constraints on the following are to force reload to leave
5203 ;; the insn alone, and to force the minipool generation pass to then move
5204 ;; the GOT symbol to memory.
5206 (define_insn "pic_load_addr_arm"
5207 [(set (match_operand:SI 0 "s_register_operand" "=r")
5208 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5209 "TARGET_ARM && flag_pic"
5211 [(set_attr "type" "load1")
5212 (set (attr "pool_range") (const_int 4096))
5213 (set (attr "neg_pool_range") (const_int 4084))]
5216 (define_insn "pic_load_addr_thumb1"
5217 [(set (match_operand:SI 0 "s_register_operand" "=l")
5218 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5219 "TARGET_THUMB1 && flag_pic"
5221 [(set_attr "type" "load1")
5222 (set (attr "pool_range") (const_int 1024))]
5225 (define_insn "pic_add_dot_plus_four"
5226 [(set (match_operand:SI 0 "register_operand" "=r")
5227 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5229 (match_operand 2 "" "")]
5233 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5234 INTVAL (operands[2]));
5235 return \"add\\t%0, %|pc\";
5237 [(set_attr "length" "2")]
5240 (define_insn "pic_add_dot_plus_eight"
5241 [(set (match_operand:SI 0 "register_operand" "=r")
5242 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5244 (match_operand 2 "" "")]
5248 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5249 INTVAL (operands[2]));
5250 return \"add%?\\t%0, %|pc, %1\";
5252 [(set_attr "predicable" "yes")]
5255 (define_insn "tls_load_dot_plus_eight"
5256 [(set (match_operand:SI 0 "register_operand" "+r")
5257 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5259 (match_operand 2 "" "")]
5263 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5264 INTVAL (operands[2]));
5265 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5267 [(set_attr "predicable" "yes")]
5270 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5271 ;; followed by a load. These sequences can be crunched down to
5272 ;; tls_load_dot_plus_eight by a peephole.
5275 [(set (match_operand:SI 0 "register_operand" "")
5276 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5278 (match_operand 1 "" "")]
5280 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5281 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5283 (mem:SI (unspec:SI [(match_dup 3)
5290 (define_insn "pic_offset_arm"
5291 [(set (match_operand:SI 0 "register_operand" "=r")
5292 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5293 (unspec:SI [(match_operand:SI 2 "" "X")]
5294 UNSPEC_PIC_OFFSET))))]
5295 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5296 "ldr%?\\t%0, [%1,%2]"
5297 [(set_attr "type" "load1")]
5300 (define_expand "builtin_setjmp_receiver"
5301 [(label_ref (match_operand 0 "" ""))]
5305 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5307 if (arm_pic_register != INVALID_REGNUM)
5308 arm_load_pic_register (1UL << 3);
5312 ;; If copying one reg to another we can set the condition codes according to
5313 ;; its value. Such a move is common after a return from subroutine and the
5314 ;; result is being tested against zero.
5316 (define_insn "*movsi_compare0"
5317 [(set (reg:CC CC_REGNUM)
5318 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5320 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5326 [(set_attr "conds" "set")]
5329 ;; Subroutine to store a half word from a register into memory.
5330 ;; Operand 0 is the source register (HImode)
5331 ;; Operand 1 is the destination address in a register (SImode)
5333 ;; In both this routine and the next, we must be careful not to spill
5334 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5335 ;; can generate unrecognizable rtl.
5337 (define_expand "storehi"
5338 [;; store the low byte
5339 (set (match_operand 1 "" "") (match_dup 3))
5340 ;; extract the high byte
5342 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5343 ;; store the high byte
5344 (set (match_dup 4) (match_dup 5))]
5348 rtx op1 = operands[1];
5349 rtx addr = XEXP (op1, 0);
5350 enum rtx_code code = GET_CODE (addr);
5352 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5354 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5356 operands[4] = adjust_address (op1, QImode, 1);
5357 operands[1] = adjust_address (operands[1], QImode, 0);
5358 operands[3] = gen_lowpart (QImode, operands[0]);
5359 operands[0] = gen_lowpart (SImode, operands[0]);
5360 operands[2] = gen_reg_rtx (SImode);
5361 operands[5] = gen_lowpart (QImode, operands[2]);
5365 (define_expand "storehi_bigend"
5366 [(set (match_dup 4) (match_dup 3))
5368 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5369 (set (match_operand 1 "" "") (match_dup 5))]
5373 rtx op1 = operands[1];
5374 rtx addr = XEXP (op1, 0);
5375 enum rtx_code code = GET_CODE (addr);
5377 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5379 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5381 operands[4] = adjust_address (op1, QImode, 1);
5382 operands[1] = adjust_address (operands[1], QImode, 0);
5383 operands[3] = gen_lowpart (QImode, operands[0]);
5384 operands[0] = gen_lowpart (SImode, operands[0]);
5385 operands[2] = gen_reg_rtx (SImode);
5386 operands[5] = gen_lowpart (QImode, operands[2]);
5390 ;; Subroutine to store a half word integer constant into memory.
5391 (define_expand "storeinthi"
5392 [(set (match_operand 0 "" "")
5393 (match_operand 1 "" ""))
5394 (set (match_dup 3) (match_dup 2))]
5398 HOST_WIDE_INT value = INTVAL (operands[1]);
5399 rtx addr = XEXP (operands[0], 0);
5400 rtx op0 = operands[0];
5401 enum rtx_code code = GET_CODE (addr);
5403 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5405 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5407 operands[1] = gen_reg_rtx (SImode);
5408 if (BYTES_BIG_ENDIAN)
5410 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5411 if ((value & 255) == ((value >> 8) & 255))
5412 operands[2] = operands[1];
5415 operands[2] = gen_reg_rtx (SImode);
5416 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5421 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5422 if ((value & 255) == ((value >> 8) & 255))
5423 operands[2] = operands[1];
5426 operands[2] = gen_reg_rtx (SImode);
5427 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5431 operands[3] = adjust_address (op0, QImode, 1);
5432 operands[0] = adjust_address (operands[0], QImode, 0);
5433 operands[2] = gen_lowpart (QImode, operands[2]);
5434 operands[1] = gen_lowpart (QImode, operands[1]);
5438 (define_expand "storehi_single_op"
5439 [(set (match_operand:HI 0 "memory_operand" "")
5440 (match_operand:HI 1 "general_operand" ""))]
5441 "TARGET_32BIT && arm_arch4"
5443 if (!s_register_operand (operands[1], HImode))
5444 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5448 (define_expand "movhi"
5449 [(set (match_operand:HI 0 "general_operand" "")
5450 (match_operand:HI 1 "general_operand" ""))]
5455 if (can_create_pseudo_p ())
5457 if (GET_CODE (operands[0]) == MEM)
5461 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5464 if (GET_CODE (operands[1]) == CONST_INT)
5465 emit_insn (gen_storeinthi (operands[0], operands[1]));
5468 if (GET_CODE (operands[1]) == MEM)
5469 operands[1] = force_reg (HImode, operands[1]);
5470 if (BYTES_BIG_ENDIAN)
5471 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5473 emit_insn (gen_storehi (operands[1], operands[0]));
5477 /* Sign extend a constant, and keep it in an SImode reg. */
5478 else if (GET_CODE (operands[1]) == CONST_INT)
5480 rtx reg = gen_reg_rtx (SImode);
5481 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5483 /* If the constant is already valid, leave it alone. */
5484 if (!const_ok_for_arm (val))
5486 /* If setting all the top bits will make the constant
5487 loadable in a single instruction, then set them.
5488 Otherwise, sign extend the number. */
5490 if (const_ok_for_arm (~(val | ~0xffff)))
5492 else if (val & 0x8000)
5496 emit_insn (gen_movsi (reg, GEN_INT (val)));
5497 operands[1] = gen_lowpart (HImode, reg);
5499 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5500 && GET_CODE (operands[1]) == MEM)
5502 rtx reg = gen_reg_rtx (SImode);
5504 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5505 operands[1] = gen_lowpart (HImode, reg);
5507 else if (!arm_arch4)
5509 if (GET_CODE (operands[1]) == MEM)
5512 rtx offset = const0_rtx;
5513 rtx reg = gen_reg_rtx (SImode);
5515 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5516 || (GET_CODE (base) == PLUS
5517 && (GET_CODE (offset = XEXP (base, 1))
5519 && ((INTVAL(offset) & 1) != 1)
5520 && GET_CODE (base = XEXP (base, 0)) == REG))
5521 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5525 new_rtx = widen_memory_access (operands[1], SImode,
5526 ((INTVAL (offset) & ~3)
5527 - INTVAL (offset)));
5528 emit_insn (gen_movsi (reg, new_rtx));
5529 if (((INTVAL (offset) & 2) != 0)
5530 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5532 rtx reg2 = gen_reg_rtx (SImode);
5534 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5539 emit_insn (gen_movhi_bytes (reg, operands[1]));
5541 operands[1] = gen_lowpart (HImode, reg);
5545 /* Handle loading a large integer during reload. */
5546 else if (GET_CODE (operands[1]) == CONST_INT
5547 && !const_ok_for_arm (INTVAL (operands[1]))
5548 && !const_ok_for_arm (~INTVAL (operands[1])))
5550 /* Writing a constant to memory needs a scratch, which should
5551 be handled with SECONDARY_RELOADs. */
5552 gcc_assert (GET_CODE (operands[0]) == REG);
5554 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5555 emit_insn (gen_movsi (operands[0], operands[1]));
5559 else if (TARGET_THUMB2)
5561 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5562 if (can_create_pseudo_p ())
5564 if (GET_CODE (operands[0]) != REG)
5565 operands[1] = force_reg (HImode, operands[1]);
5566 /* Zero extend a constant, and keep it in an SImode reg. */
5567 else if (GET_CODE (operands[1]) == CONST_INT)
5569 rtx reg = gen_reg_rtx (SImode);
5570 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5572 emit_insn (gen_movsi (reg, GEN_INT (val)));
5573 operands[1] = gen_lowpart (HImode, reg);
5577 else /* TARGET_THUMB1 */
5579 if (can_create_pseudo_p ())
5581 if (GET_CODE (operands[1]) == CONST_INT)
5583 rtx reg = gen_reg_rtx (SImode);
5585 emit_insn (gen_movsi (reg, operands[1]));
5586 operands[1] = gen_lowpart (HImode, reg);
5589 /* ??? We shouldn't really get invalid addresses here, but this can
5590 happen if we are passed a SP (never OK for HImode/QImode) or
5591 virtual register (also rejected as illegitimate for HImode/QImode)
5592 relative address. */
5593 /* ??? This should perhaps be fixed elsewhere, for instance, in
5594 fixup_stack_1, by checking for other kinds of invalid addresses,
5595 e.g. a bare reference to a virtual register. This may confuse the
5596 alpha though, which must handle this case differently. */
5597 if (GET_CODE (operands[0]) == MEM
5598 && !memory_address_p (GET_MODE (operands[0]),
5599 XEXP (operands[0], 0)))
5601 = replace_equiv_address (operands[0],
5602 copy_to_reg (XEXP (operands[0], 0)));
5604 if (GET_CODE (operands[1]) == MEM
5605 && !memory_address_p (GET_MODE (operands[1]),
5606 XEXP (operands[1], 0)))
5608 = replace_equiv_address (operands[1],
5609 copy_to_reg (XEXP (operands[1], 0)));
5611 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5613 rtx reg = gen_reg_rtx (SImode);
5615 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5616 operands[1] = gen_lowpart (HImode, reg);
5619 if (GET_CODE (operands[0]) == MEM)
5620 operands[1] = force_reg (HImode, operands[1]);
5622 else if (GET_CODE (operands[1]) == CONST_INT
5623 && !satisfies_constraint_I (operands[1]))
5625 /* Handle loading a large integer during reload. */
5627 /* Writing a constant to memory needs a scratch, which should
5628 be handled with SECONDARY_RELOADs. */
5629 gcc_assert (GET_CODE (operands[0]) == REG);
5631 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5632 emit_insn (gen_movsi (operands[0], operands[1]));
5639 (define_insn "*thumb1_movhi_insn"
5640 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5641 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5643 && ( register_operand (operands[0], HImode)
5644 || register_operand (operands[1], HImode))"
5646 switch (which_alternative)
5648 case 0: return \"add %0, %1, #0\";
5649 case 2: return \"strh %1, %0\";
5650 case 3: return \"mov %0, %1\";
5651 case 4: return \"mov %0, %1\";
5652 case 5: return \"mov %0, %1\";
5653 default: gcc_unreachable ();
5655 /* The stack pointer can end up being taken as an index register.
5656 Catch this case here and deal with it. */
5657 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5658 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5659 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5662 ops[0] = operands[0];
5663 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5665 output_asm_insn (\"mov %0, %1\", ops);
5667 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5670 return \"ldrh %0, %1\";
5672 [(set_attr "length" "2,4,2,2,2,2")
5673 (set_attr "type" "*,load1,store1,*,*,*")]
5677 (define_expand "movhi_bytes"
5678 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5680 (zero_extend:SI (match_dup 6)))
5681 (set (match_operand:SI 0 "" "")
5682 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5687 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5689 mem1 = change_address (operands[1], QImode, addr);
5690 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5691 operands[0] = gen_lowpart (SImode, operands[0]);
5693 operands[2] = gen_reg_rtx (SImode);
5694 operands[3] = gen_reg_rtx (SImode);
5697 if (BYTES_BIG_ENDIAN)
5699 operands[4] = operands[2];
5700 operands[5] = operands[3];
5704 operands[4] = operands[3];
5705 operands[5] = operands[2];
5710 (define_expand "movhi_bigend"
5712 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5715 (ashiftrt:SI (match_dup 2) (const_int 16)))
5716 (set (match_operand:HI 0 "s_register_operand" "")
5720 operands[2] = gen_reg_rtx (SImode);
5721 operands[3] = gen_reg_rtx (SImode);
5722 operands[4] = gen_lowpart (HImode, operands[3]);
5726 ;; Pattern to recognize insn generated default case above
5727 (define_insn "*movhi_insn_arch4"
5728 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5729 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5732 && (GET_CODE (operands[1]) != CONST_INT
5733 || const_ok_for_arm (INTVAL (operands[1]))
5734 || const_ok_for_arm (~INTVAL (operands[1])))"
5736 mov%?\\t%0, %1\\t%@ movhi
5737 mvn%?\\t%0, #%B1\\t%@ movhi
5738 str%(h%)\\t%1, %0\\t%@ movhi
5739 ldr%(h%)\\t%0, %1\\t%@ movhi"
5740 [(set_attr "type" "*,*,store1,load1")
5741 (set_attr "predicable" "yes")
5742 (set_attr "pool_range" "*,*,*,256")
5743 (set_attr "neg_pool_range" "*,*,*,244")]
5746 (define_insn "*movhi_bytes"
5747 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5748 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5751 mov%?\\t%0, %1\\t%@ movhi
5752 mvn%?\\t%0, #%B1\\t%@ movhi"
5753 [(set_attr "predicable" "yes")]
5756 (define_expand "thumb_movhi_clobber"
5757 [(set (match_operand:HI 0 "memory_operand" "")
5758 (match_operand:HI 1 "register_operand" ""))
5759 (clobber (match_operand:DI 2 "register_operand" ""))]
5762 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5763 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5765 emit_insn (gen_movhi (operands[0], operands[1]));
5768 /* XXX Fixme, need to handle other cases here as well. */
5773 ;; We use a DImode scratch because we may occasionally need an additional
5774 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5775 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5776 (define_expand "reload_outhi"
5777 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5778 (match_operand:HI 1 "s_register_operand" "r")
5779 (match_operand:DI 2 "s_register_operand" "=&l")])]
5782 arm_reload_out_hi (operands);
5784 thumb_reload_out_hi (operands);
5789 (define_expand "reload_inhi"
5790 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5791 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5792 (match_operand:DI 2 "s_register_operand" "=&r")])]
5796 arm_reload_in_hi (operands);
5798 thumb_reload_out_hi (operands);
5802 (define_expand "movqi"
5803 [(set (match_operand:QI 0 "general_operand" "")
5804 (match_operand:QI 1 "general_operand" ""))]
5807 /* Everything except mem = const or mem = mem can be done easily */
5809 if (can_create_pseudo_p ())
5811 if (GET_CODE (operands[1]) == CONST_INT)
5813 rtx reg = gen_reg_rtx (SImode);
5815 emit_insn (gen_movsi (reg, operands[1]));
5816 operands[1] = gen_lowpart (QImode, reg);
5821 /* ??? We shouldn't really get invalid addresses here, but this can
5822 happen if we are passed a SP (never OK for HImode/QImode) or
5823 virtual register (also rejected as illegitimate for HImode/QImode)
5824 relative address. */
5825 /* ??? This should perhaps be fixed elsewhere, for instance, in
5826 fixup_stack_1, by checking for other kinds of invalid addresses,
5827 e.g. a bare reference to a virtual register. This may confuse the
5828 alpha though, which must handle this case differently. */
5829 if (GET_CODE (operands[0]) == MEM
5830 && !memory_address_p (GET_MODE (operands[0]),
5831 XEXP (operands[0], 0)))
5833 = replace_equiv_address (operands[0],
5834 copy_to_reg (XEXP (operands[0], 0)));
5835 if (GET_CODE (operands[1]) == MEM
5836 && !memory_address_p (GET_MODE (operands[1]),
5837 XEXP (operands[1], 0)))
5839 = replace_equiv_address (operands[1],
5840 copy_to_reg (XEXP (operands[1], 0)));
5843 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5845 rtx reg = gen_reg_rtx (SImode);
5847 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5848 operands[1] = gen_lowpart (QImode, reg);
5851 if (GET_CODE (operands[0]) == MEM)
5852 operands[1] = force_reg (QImode, operands[1]);
5854 else if (TARGET_THUMB
5855 && GET_CODE (operands[1]) == CONST_INT
5856 && !satisfies_constraint_I (operands[1]))
5858 /* Handle loading a large integer during reload. */
5860 /* Writing a constant to memory needs a scratch, which should
5861 be handled with SECONDARY_RELOADs. */
5862 gcc_assert (GET_CODE (operands[0]) == REG);
5864 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5865 emit_insn (gen_movsi (operands[0], operands[1]));
5872 (define_insn "*arm_movqi_insn"
5873 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5874 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5876 && ( register_operand (operands[0], QImode)
5877 || register_operand (operands[1], QImode))"
5883 [(set_attr "type" "*,*,load1,store1")
5884 (set_attr "predicable" "yes")]
5887 (define_insn "*thumb1_movqi_insn"
5888 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5889 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5891 && ( register_operand (operands[0], QImode)
5892 || register_operand (operands[1], QImode))"
5900 [(set_attr "length" "2")
5901 (set_attr "type" "*,load1,store1,*,*,*")
5902 (set_attr "pool_range" "*,32,*,*,*,*")]
5906 (define_expand "movhf"
5907 [(set (match_operand:HF 0 "general_operand" "")
5908 (match_operand:HF 1 "general_operand" ""))]
5913 if (GET_CODE (operands[0]) == MEM)
5914 operands[1] = force_reg (HFmode, operands[1]);
5916 else /* TARGET_THUMB1 */
5918 if (can_create_pseudo_p ())
5920 if (GET_CODE (operands[0]) != REG)
5921 operands[1] = force_reg (HFmode, operands[1]);
5927 (define_insn "*arm32_movhf"
5928 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5929 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5930 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_NEON_FP16)
5931 && ( s_register_operand (operands[0], HFmode)
5932 || s_register_operand (operands[1], HFmode))"
5934 switch (which_alternative)
5936 case 0: /* ARM register from memory */
5937 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5938 case 1: /* memory from ARM register */
5939 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5940 case 2: /* ARM register from ARM register */
5941 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5942 case 3: /* ARM register from constant */
5948 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5949 bits = real_to_target (NULL, &r, HFmode);
5950 ops[0] = operands[0];
5951 ops[1] = GEN_INT (bits);
5952 ops[2] = GEN_INT (bits & 0xff00);
5953 ops[3] = GEN_INT (bits & 0x00ff);
5955 if (arm_arch_thumb2)
5956 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5958 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5965 [(set_attr "conds" "unconditional")
5966 (set_attr "type" "load1,store1,*,*")
5967 (set_attr "length" "4,4,4,8")
5968 (set_attr "predicable" "yes")
5972 (define_insn "*thumb1_movhf"
5973 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
5974 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
5976 && ( s_register_operand (operands[0], HFmode)
5977 || s_register_operand (operands[1], HFmode))"
5979 switch (which_alternative)
5984 gcc_assert (GET_CODE(operands[1]) == MEM);
5985 addr = XEXP (operands[1], 0);
5986 if (GET_CODE (addr) == LABEL_REF
5987 || (GET_CODE (addr) == CONST
5988 && GET_CODE (XEXP (addr, 0)) == PLUS
5989 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
5990 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
5992 /* Constant pool entry. */
5993 return \"ldr\\t%0, %1\";
5995 return \"ldrh\\t%0, %1\";
5997 case 2: return \"strh\\t%1, %0\";
5998 default: return \"mov\\t%0, %1\";
6001 [(set_attr "length" "2")
6002 (set_attr "type" "*,load1,store1,*,*")
6003 (set_attr "pool_range" "*,1020,*,*,*")]
6006 (define_expand "movsf"
6007 [(set (match_operand:SF 0 "general_operand" "")
6008 (match_operand:SF 1 "general_operand" ""))]
6013 if (GET_CODE (operands[0]) == MEM)
6014 operands[1] = force_reg (SFmode, operands[1]);
6016 else /* TARGET_THUMB1 */
6018 if (can_create_pseudo_p ())
6020 if (GET_CODE (operands[0]) != REG)
6021 operands[1] = force_reg (SFmode, operands[1]);
6027 ;; Transform a floating-point move of a constant into a core register into
6028 ;; an SImode operation.
6030 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6031 (match_operand:SF 1 "immediate_operand" ""))]
6034 && GET_CODE (operands[1]) == CONST_DOUBLE"
6035 [(set (match_dup 2) (match_dup 3))]
6037 operands[2] = gen_lowpart (SImode, operands[0]);
6038 operands[3] = gen_lowpart (SImode, operands[1]);
6039 if (operands[2] == 0 || operands[3] == 0)
6044 (define_insn "*arm_movsf_soft_insn"
6045 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6046 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6048 && TARGET_SOFT_FLOAT
6049 && (GET_CODE (operands[0]) != MEM
6050 || register_operand (operands[1], SFmode))"
6053 ldr%?\\t%0, %1\\t%@ float
6054 str%?\\t%1, %0\\t%@ float"
6055 [(set_attr "length" "4,4,4")
6056 (set_attr "predicable" "yes")
6057 (set_attr "type" "*,load1,store1")
6058 (set_attr "pool_range" "*,4096,*")
6059 (set_attr "neg_pool_range" "*,4084,*")]
6062 ;;; ??? This should have alternatives for constants.
6063 (define_insn "*thumb1_movsf_insn"
6064 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6065 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6067 && ( register_operand (operands[0], SFmode)
6068 || register_operand (operands[1], SFmode))"
6077 [(set_attr "length" "2")
6078 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6079 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6082 (define_expand "movdf"
6083 [(set (match_operand:DF 0 "general_operand" "")
6084 (match_operand:DF 1 "general_operand" ""))]
6089 if (GET_CODE (operands[0]) == MEM)
6090 operands[1] = force_reg (DFmode, operands[1]);
6092 else /* TARGET_THUMB */
6094 if (can_create_pseudo_p ())
6096 if (GET_CODE (operands[0]) != REG)
6097 operands[1] = force_reg (DFmode, operands[1]);
6103 ;; Reloading a df mode value stored in integer regs to memory can require a
6105 (define_expand "reload_outdf"
6106 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6107 (match_operand:DF 1 "s_register_operand" "r")
6108 (match_operand:SI 2 "s_register_operand" "=&r")]
6112 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6115 operands[2] = XEXP (operands[0], 0);
6116 else if (code == POST_INC || code == PRE_DEC)
6118 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6119 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6120 emit_insn (gen_movdi (operands[0], operands[1]));
6123 else if (code == PRE_INC)
6125 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6127 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6130 else if (code == POST_DEC)
6131 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6133 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6134 XEXP (XEXP (operands[0], 0), 1)));
6136 emit_insn (gen_rtx_SET (VOIDmode,
6137 replace_equiv_address (operands[0], operands[2]),
6140 if (code == POST_DEC)
6141 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6147 (define_insn "*movdf_soft_insn"
6148 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6149 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6150 "TARGET_ARM && TARGET_SOFT_FLOAT
6151 && ( register_operand (operands[0], DFmode)
6152 || register_operand (operands[1], DFmode))"
6154 switch (which_alternative)
6161 return output_move_double (operands);
6164 [(set_attr "length" "8,12,16,8,8")
6165 (set_attr "type" "*,*,*,load2,store2")
6166 (set_attr "pool_range" "1020")
6167 (set_attr "neg_pool_range" "1008")]
6170 ;;; ??? This should have alternatives for constants.
6171 ;;; ??? This was originally identical to the movdi_insn pattern.
6172 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6173 ;;; thumb_reorg with a memory reference.
6174 (define_insn "*thumb_movdf_insn"
6175 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6176 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6178 && ( register_operand (operands[0], DFmode)
6179 || register_operand (operands[1], DFmode))"
6181 switch (which_alternative)
6185 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6186 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6187 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6189 return \"ldmia\\t%1, {%0, %H0}\";
6191 return \"stmia\\t%0, {%1, %H1}\";
6193 return thumb_load_double_from_address (operands);
6195 operands[2] = gen_rtx_MEM (SImode,
6196 plus_constant (XEXP (operands[0], 0), 4));
6197 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6200 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6201 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6202 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6205 [(set_attr "length" "4,2,2,6,4,4")
6206 (set_attr "type" "*,load2,store2,load2,store2,*")
6207 (set_attr "pool_range" "*,*,*,1020,*,*")]
6210 (define_expand "movxf"
6211 [(set (match_operand:XF 0 "general_operand" "")
6212 (match_operand:XF 1 "general_operand" ""))]
6213 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6215 if (GET_CODE (operands[0]) == MEM)
6216 operands[1] = force_reg (XFmode, operands[1]);
6222 ;; load- and store-multiple insns
6223 ;; The arm can load/store any set of registers, provided that they are in
6224 ;; ascending order; but that is beyond GCC so stick with what it knows.
6226 (define_expand "load_multiple"
6227 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6228 (match_operand:SI 1 "" ""))
6229 (use (match_operand:SI 2 "" ""))])]
6232 HOST_WIDE_INT offset = 0;
6234 /* Support only fixed point registers. */
6235 if (GET_CODE (operands[2]) != CONST_INT
6236 || INTVAL (operands[2]) > 14
6237 || INTVAL (operands[2]) < 2
6238 || GET_CODE (operands[1]) != MEM
6239 || GET_CODE (operands[0]) != REG
6240 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6241 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6245 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6246 force_reg (SImode, XEXP (operands[1], 0)),
6247 TRUE, FALSE, operands[1], &offset);
6250 ;; Load multiple with write-back
6252 (define_insn "*ldmsi_postinc4"
6253 [(match_parallel 0 "load_multiple_operation"
6254 [(set (match_operand:SI 1 "s_register_operand" "=r")
6255 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6257 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6258 (mem:SI (match_dup 2)))
6259 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6260 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6261 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6262 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6263 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6264 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6265 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6266 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6267 [(set_attr "type" "load4")
6268 (set_attr "predicable" "yes")]
6271 (define_insn "*ldmsi_postinc4_thumb1"
6272 [(match_parallel 0 "load_multiple_operation"
6273 [(set (match_operand:SI 1 "s_register_operand" "=l")
6274 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6276 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6277 (mem:SI (match_dup 2)))
6278 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6279 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6280 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6281 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6282 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6283 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6284 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6285 "ldmia\\t%1!, {%3, %4, %5, %6}"
6286 [(set_attr "type" "load4")]
6289 (define_insn "*ldmsi_postinc3"
6290 [(match_parallel 0 "load_multiple_operation"
6291 [(set (match_operand:SI 1 "s_register_operand" "=r")
6292 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6294 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6295 (mem:SI (match_dup 2)))
6296 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6297 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6298 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6299 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6300 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6301 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6302 [(set_attr "type" "load3")
6303 (set_attr "predicable" "yes")]
6306 (define_insn "*ldmsi_postinc2"
6307 [(match_parallel 0 "load_multiple_operation"
6308 [(set (match_operand:SI 1 "s_register_operand" "=r")
6309 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6311 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6312 (mem:SI (match_dup 2)))
6313 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6314 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6315 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6316 "ldm%(ia%)\\t%1!, {%3, %4}"
6317 [(set_attr "type" "load2")
6318 (set_attr "predicable" "yes")]
6321 ;; Ordinary load multiple
6323 (define_insn "*ldmsi4"
6324 [(match_parallel 0 "load_multiple_operation"
6325 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6326 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6327 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6328 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6329 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6330 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6331 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6332 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6333 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6334 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6335 [(set_attr "type" "load4")
6336 (set_attr "predicable" "yes")]
6339 (define_insn "*ldmsi3"
6340 [(match_parallel 0 "load_multiple_operation"
6341 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6342 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6343 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6344 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6345 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6346 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6347 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6348 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6349 [(set_attr "type" "load3")
6350 (set_attr "predicable" "yes")]
6353 (define_insn "*ldmsi2"
6354 [(match_parallel 0 "load_multiple_operation"
6355 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6356 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6357 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6358 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6359 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6360 "ldm%(ia%)\\t%1, {%2, %3}"
6361 [(set_attr "type" "load2")
6362 (set_attr "predicable" "yes")]
6365 (define_expand "store_multiple"
6366 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6367 (match_operand:SI 1 "" ""))
6368 (use (match_operand:SI 2 "" ""))])]
6371 HOST_WIDE_INT offset = 0;
6373 /* Support only fixed point registers. */
6374 if (GET_CODE (operands[2]) != CONST_INT
6375 || INTVAL (operands[2]) > 14
6376 || INTVAL (operands[2]) < 2
6377 || GET_CODE (operands[1]) != REG
6378 || GET_CODE (operands[0]) != MEM
6379 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6380 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6384 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6385 force_reg (SImode, XEXP (operands[0], 0)),
6386 TRUE, FALSE, operands[0], &offset);
6389 ;; Store multiple with write-back
6391 (define_insn "*stmsi_postinc4"
6392 [(match_parallel 0 "store_multiple_operation"
6393 [(set (match_operand:SI 1 "s_register_operand" "=r")
6394 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6396 (set (mem:SI (match_dup 2))
6397 (match_operand:SI 3 "arm_hard_register_operand" ""))
6398 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6399 (match_operand:SI 4 "arm_hard_register_operand" ""))
6400 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6401 (match_operand:SI 5 "arm_hard_register_operand" ""))
6402 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6403 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6404 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6405 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6406 [(set_attr "predicable" "yes")
6407 (set_attr "type" "store4")]
6410 (define_insn "*stmsi_postinc4_thumb1"
6411 [(match_parallel 0 "store_multiple_operation"
6412 [(set (match_operand:SI 1 "s_register_operand" "=l")
6413 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6415 (set (mem:SI (match_dup 2))
6416 (match_operand:SI 3 "arm_hard_register_operand" ""))
6417 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6418 (match_operand:SI 4 "arm_hard_register_operand" ""))
6419 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6420 (match_operand:SI 5 "arm_hard_register_operand" ""))
6421 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6422 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6423 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6424 "stmia\\t%1!, {%3, %4, %5, %6}"
6425 [(set_attr "type" "store4")]
6428 (define_insn "*stmsi_postinc3"
6429 [(match_parallel 0 "store_multiple_operation"
6430 [(set (match_operand:SI 1 "s_register_operand" "=r")
6431 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6433 (set (mem:SI (match_dup 2))
6434 (match_operand:SI 3 "arm_hard_register_operand" ""))
6435 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6436 (match_operand:SI 4 "arm_hard_register_operand" ""))
6437 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6438 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6439 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6440 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6441 [(set_attr "predicable" "yes")
6442 (set_attr "type" "store3")]
6445 (define_insn "*stmsi_postinc2"
6446 [(match_parallel 0 "store_multiple_operation"
6447 [(set (match_operand:SI 1 "s_register_operand" "=r")
6448 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6450 (set (mem:SI (match_dup 2))
6451 (match_operand:SI 3 "arm_hard_register_operand" ""))
6452 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6453 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6454 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6455 "stm%(ia%)\\t%1!, {%3, %4}"
6456 [(set_attr "predicable" "yes")
6457 (set_attr "type" "store2")]
6460 ;; Ordinary store multiple
6462 (define_insn "*stmsi4"
6463 [(match_parallel 0 "store_multiple_operation"
6464 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6465 (match_operand:SI 2 "arm_hard_register_operand" ""))
6466 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6467 (match_operand:SI 3 "arm_hard_register_operand" ""))
6468 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6469 (match_operand:SI 4 "arm_hard_register_operand" ""))
6470 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6471 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6472 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6473 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6474 [(set_attr "predicable" "yes")
6475 (set_attr "type" "store4")]
6478 (define_insn "*stmsi3"
6479 [(match_parallel 0 "store_multiple_operation"
6480 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6481 (match_operand:SI 2 "arm_hard_register_operand" ""))
6482 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6483 (match_operand:SI 3 "arm_hard_register_operand" ""))
6484 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6485 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6486 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6487 "stm%(ia%)\\t%1, {%2, %3, %4}"
6488 [(set_attr "predicable" "yes")
6489 (set_attr "type" "store3")]
6492 (define_insn "*stmsi2"
6493 [(match_parallel 0 "store_multiple_operation"
6494 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6495 (match_operand:SI 2 "arm_hard_register_operand" ""))
6496 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6497 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6498 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6499 "stm%(ia%)\\t%1, {%2, %3}"
6500 [(set_attr "predicable" "yes")
6501 (set_attr "type" "store2")]
6504 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6505 ;; We could let this apply for blocks of less than this, but it clobbers so
6506 ;; many registers that there is then probably a better way.
6508 (define_expand "movmemqi"
6509 [(match_operand:BLK 0 "general_operand" "")
6510 (match_operand:BLK 1 "general_operand" "")
6511 (match_operand:SI 2 "const_int_operand" "")
6512 (match_operand:SI 3 "const_int_operand" "")]
6517 if (arm_gen_movmemqi (operands))
6521 else /* TARGET_THUMB1 */
6523 if ( INTVAL (operands[3]) != 4
6524 || INTVAL (operands[2]) > 48)
6527 thumb_expand_movmemqi (operands);
6533 ;; Thumb block-move insns
6535 (define_insn "movmem12b"
6536 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6537 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6538 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6539 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6540 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6541 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6542 (set (match_operand:SI 0 "register_operand" "=l")
6543 (plus:SI (match_dup 2) (const_int 12)))
6544 (set (match_operand:SI 1 "register_operand" "=l")
6545 (plus:SI (match_dup 3) (const_int 12)))
6546 (clobber (match_scratch:SI 4 "=&l"))
6547 (clobber (match_scratch:SI 5 "=&l"))
6548 (clobber (match_scratch:SI 6 "=&l"))]
6550 "* return thumb_output_move_mem_multiple (3, operands);"
6551 [(set_attr "length" "4")
6552 ; This isn't entirely accurate... It loads as well, but in terms of
6553 ; scheduling the following insn it is better to consider it as a store
6554 (set_attr "type" "store3")]
6557 (define_insn "movmem8b"
6558 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6559 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6560 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6561 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6562 (set (match_operand:SI 0 "register_operand" "=l")
6563 (plus:SI (match_dup 2) (const_int 8)))
6564 (set (match_operand:SI 1 "register_operand" "=l")
6565 (plus:SI (match_dup 3) (const_int 8)))
6566 (clobber (match_scratch:SI 4 "=&l"))
6567 (clobber (match_scratch:SI 5 "=&l"))]
6569 "* return thumb_output_move_mem_multiple (2, operands);"
6570 [(set_attr "length" "4")
6571 ; This isn't entirely accurate... It loads as well, but in terms of
6572 ; scheduling the following insn it is better to consider it as a store
6573 (set_attr "type" "store2")]
6578 ;; Compare & branch insns
6579 ;; The range calculations are based as follows:
6580 ;; For forward branches, the address calculation returns the address of
6581 ;; the next instruction. This is 2 beyond the branch instruction.
6582 ;; For backward branches, the address calculation returns the address of
6583 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6584 ;; instruction for the shortest sequence, and 4 before the branch instruction
6585 ;; if we have to jump around an unconditional branch.
6586 ;; To the basic branch range the PC offset must be added (this is +4).
6587 ;; So for forward branches we have
6588 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6589 ;; And for backward branches we have
6590 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6592 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6593 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6595 (define_expand "cbranchsi4"
6596 [(set (pc) (if_then_else
6597 (match_operator 0 "arm_comparison_operator"
6598 [(match_operand:SI 1 "s_register_operand" "")
6599 (match_operand:SI 2 "nonmemory_operand" "")])
6600 (label_ref (match_operand 3 "" ""))
6602 "TARGET_THUMB1 || TARGET_32BIT"
6606 if (!arm_add_operand (operands[2], SImode))
6607 operands[2] = force_reg (SImode, operands[2]);
6608 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6612 if (thumb1_cmpneg_operand (operands[2], SImode))
6614 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6615 operands[3], operands[0]));
6618 if (!thumb1_cmp_operand (operands[2], SImode))
6619 operands[2] = force_reg (SImode, operands[2]);
6622 (define_expand "cbranchsf4"
6623 [(set (pc) (if_then_else
6624 (match_operator 0 "arm_comparison_operator"
6625 [(match_operand:SF 1 "s_register_operand" "")
6626 (match_operand:SF 2 "arm_float_compare_operand" "")])
6627 (label_ref (match_operand 3 "" ""))
6629 "TARGET_32BIT && TARGET_HARD_FLOAT"
6630 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6631 operands[3])); DONE;"
6634 (define_expand "cbranchdf4"
6635 [(set (pc) (if_then_else
6636 (match_operator 0 "arm_comparison_operator"
6637 [(match_operand:DF 1 "s_register_operand" "")
6638 (match_operand:DF 2 "arm_float_compare_operand" "")])
6639 (label_ref (match_operand 3 "" ""))
6641 "TARGET_32BIT && TARGET_HARD_FLOAT"
6642 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6643 operands[3])); DONE;"
6646 ;; this uses the Cirrus DI compare instruction
6647 (define_expand "cbranchdi4"
6648 [(set (pc) (if_then_else
6649 (match_operator 0 "arm_comparison_operator"
6650 [(match_operand:DI 1 "cirrus_fp_register" "")
6651 (match_operand:DI 2 "cirrus_fp_register" "")])
6652 (label_ref (match_operand 3 "" ""))
6654 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6655 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6656 operands[3])); DONE;"
6659 (define_insn "*cbranchsi4_insn"
6660 [(set (pc) (if_then_else
6661 (match_operator 0 "arm_comparison_operator"
6662 [(match_operand:SI 1 "s_register_operand" "l,*h")
6663 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6664 (label_ref (match_operand 3 "" ""))
6668 output_asm_insn (\"cmp\\t%1, %2\", operands);
6670 switch (get_attr_length (insn))
6672 case 4: return \"b%d0\\t%l3\";
6673 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6674 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6677 [(set (attr "far_jump")
6679 (eq_attr "length" "8")
6680 (const_string "yes")
6681 (const_string "no")))
6682 (set (attr "length")
6684 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6685 (le (minus (match_dup 3) (pc)) (const_int 256)))
6688 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6689 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6694 (define_insn "cbranchsi4_scratch"
6695 [(set (pc) (if_then_else
6696 (match_operator 4 "arm_comparison_operator"
6697 [(match_operand:SI 1 "s_register_operand" "l,0")
6698 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6699 (label_ref (match_operand 3 "" ""))
6701 (clobber (match_scratch:SI 0 "=l,l"))]
6704 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6706 switch (get_attr_length (insn))
6708 case 4: return \"b%d4\\t%l3\";
6709 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6710 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6713 [(set (attr "far_jump")
6715 (eq_attr "length" "8")
6716 (const_string "yes")
6717 (const_string "no")))
6718 (set (attr "length")
6720 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6721 (le (minus (match_dup 3) (pc)) (const_int 256)))
6724 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6725 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6729 (define_insn "*movsi_cbranchsi4"
6732 (match_operator 3 "arm_comparison_operator"
6733 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6735 (label_ref (match_operand 2 "" ""))
6737 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6741 if (which_alternative == 0)
6742 output_asm_insn (\"cmp\t%0, #0\", operands);
6743 else if (which_alternative == 1)
6744 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6747 output_asm_insn (\"cmp\t%1, #0\", operands);
6748 if (which_alternative == 2)
6749 output_asm_insn (\"mov\t%0, %1\", operands);
6751 output_asm_insn (\"str\t%1, %0\", operands);
6753 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6755 case 4: return \"b%d3\\t%l2\";
6756 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6757 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6760 [(set (attr "far_jump")
6762 (ior (and (gt (symbol_ref ("which_alternative"))
6764 (eq_attr "length" "8"))
6765 (eq_attr "length" "10"))
6766 (const_string "yes")
6767 (const_string "no")))
6768 (set (attr "length")
6770 (le (symbol_ref ("which_alternative"))
6773 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6774 (le (minus (match_dup 2) (pc)) (const_int 256)))
6777 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6778 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6782 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6783 (le (minus (match_dup 2) (pc)) (const_int 256)))
6786 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6787 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6792 (define_insn "*negated_cbranchsi4"
6795 (match_operator 0 "equality_operator"
6796 [(match_operand:SI 1 "s_register_operand" "l")
6797 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6798 (label_ref (match_operand 3 "" ""))
6802 output_asm_insn (\"cmn\\t%1, %2\", operands);
6803 switch (get_attr_length (insn))
6805 case 4: return \"b%d0\\t%l3\";
6806 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6807 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6810 [(set (attr "far_jump")
6812 (eq_attr "length" "8")
6813 (const_string "yes")
6814 (const_string "no")))
6815 (set (attr "length")
6817 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6818 (le (minus (match_dup 3) (pc)) (const_int 256)))
6821 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6822 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6827 (define_insn "*tbit_cbranch"
6830 (match_operator 0 "equality_operator"
6831 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6833 (match_operand:SI 2 "const_int_operand" "i"))
6835 (label_ref (match_operand 3 "" ""))
6837 (clobber (match_scratch:SI 4 "=l"))]
6842 op[0] = operands[4];
6843 op[1] = operands[1];
6844 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6846 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6847 switch (get_attr_length (insn))
6849 case 4: return \"b%d0\\t%l3\";
6850 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6851 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6854 [(set (attr "far_jump")
6856 (eq_attr "length" "8")
6857 (const_string "yes")
6858 (const_string "no")))
6859 (set (attr "length")
6861 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6862 (le (minus (match_dup 3) (pc)) (const_int 256)))
6865 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6866 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6871 (define_insn "*tlobits_cbranch"
6874 (match_operator 0 "equality_operator"
6875 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6876 (match_operand:SI 2 "const_int_operand" "i")
6879 (label_ref (match_operand 3 "" ""))
6881 (clobber (match_scratch:SI 4 "=l"))]
6886 op[0] = operands[4];
6887 op[1] = operands[1];
6888 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6890 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6891 switch (get_attr_length (insn))
6893 case 4: return \"b%d0\\t%l3\";
6894 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6895 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6898 [(set (attr "far_jump")
6900 (eq_attr "length" "8")
6901 (const_string "yes")
6902 (const_string "no")))
6903 (set (attr "length")
6905 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6906 (le (minus (match_dup 3) (pc)) (const_int 256)))
6909 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6910 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6915 (define_insn "*tstsi3_cbranch"
6918 (match_operator 3 "equality_operator"
6919 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6920 (match_operand:SI 1 "s_register_operand" "l"))
6922 (label_ref (match_operand 2 "" ""))
6927 output_asm_insn (\"tst\\t%0, %1\", operands);
6928 switch (get_attr_length (insn))
6930 case 4: return \"b%d3\\t%l2\";
6931 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6932 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6935 [(set (attr "far_jump")
6937 (eq_attr "length" "8")
6938 (const_string "yes")
6939 (const_string "no")))
6940 (set (attr "length")
6942 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6943 (le (minus (match_dup 2) (pc)) (const_int 256)))
6946 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6947 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6952 (define_insn "*andsi3_cbranch"
6955 (match_operator 5 "equality_operator"
6956 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6957 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6959 (label_ref (match_operand 4 "" ""))
6961 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6962 (and:SI (match_dup 2) (match_dup 3)))
6963 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6967 if (which_alternative == 0)
6968 output_asm_insn (\"and\\t%0, %3\", operands);
6969 else if (which_alternative == 1)
6971 output_asm_insn (\"and\\t%1, %3\", operands);
6972 output_asm_insn (\"mov\\t%0, %1\", operands);
6976 output_asm_insn (\"and\\t%1, %3\", operands);
6977 output_asm_insn (\"str\\t%1, %0\", operands);
6980 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6982 case 4: return \"b%d5\\t%l4\";
6983 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6984 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6987 [(set (attr "far_jump")
6989 (ior (and (eq (symbol_ref ("which_alternative"))
6991 (eq_attr "length" "8"))
6992 (eq_attr "length" "10"))
6993 (const_string "yes")
6994 (const_string "no")))
6995 (set (attr "length")
6997 (eq (symbol_ref ("which_alternative"))
7000 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7001 (le (minus (match_dup 4) (pc)) (const_int 256)))
7004 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7005 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7009 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7010 (le (minus (match_dup 4) (pc)) (const_int 256)))
7013 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7014 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7019 (define_insn "*orrsi3_cbranch_scratch"
7022 (match_operator 4 "equality_operator"
7023 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7024 (match_operand:SI 2 "s_register_operand" "l"))
7026 (label_ref (match_operand 3 "" ""))
7028 (clobber (match_scratch:SI 0 "=l"))]
7032 output_asm_insn (\"orr\\t%0, %2\", operands);
7033 switch (get_attr_length (insn))
7035 case 4: return \"b%d4\\t%l3\";
7036 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7037 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7040 [(set (attr "far_jump")
7042 (eq_attr "length" "8")
7043 (const_string "yes")
7044 (const_string "no")))
7045 (set (attr "length")
7047 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7048 (le (minus (match_dup 3) (pc)) (const_int 256)))
7051 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7052 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7057 (define_insn "*orrsi3_cbranch"
7060 (match_operator 5 "equality_operator"
7061 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7062 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7064 (label_ref (match_operand 4 "" ""))
7066 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7067 (ior:SI (match_dup 2) (match_dup 3)))
7068 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7072 if (which_alternative == 0)
7073 output_asm_insn (\"orr\\t%0, %3\", operands);
7074 else if (which_alternative == 1)
7076 output_asm_insn (\"orr\\t%1, %3\", operands);
7077 output_asm_insn (\"mov\\t%0, %1\", operands);
7081 output_asm_insn (\"orr\\t%1, %3\", operands);
7082 output_asm_insn (\"str\\t%1, %0\", operands);
7085 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7087 case 4: return \"b%d5\\t%l4\";
7088 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7089 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7092 [(set (attr "far_jump")
7094 (ior (and (eq (symbol_ref ("which_alternative"))
7096 (eq_attr "length" "8"))
7097 (eq_attr "length" "10"))
7098 (const_string "yes")
7099 (const_string "no")))
7100 (set (attr "length")
7102 (eq (symbol_ref ("which_alternative"))
7105 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7106 (le (minus (match_dup 4) (pc)) (const_int 256)))
7109 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7110 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7114 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7115 (le (minus (match_dup 4) (pc)) (const_int 256)))
7118 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7119 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7124 (define_insn "*xorsi3_cbranch_scratch"
7127 (match_operator 4 "equality_operator"
7128 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7129 (match_operand:SI 2 "s_register_operand" "l"))
7131 (label_ref (match_operand 3 "" ""))
7133 (clobber (match_scratch:SI 0 "=l"))]
7137 output_asm_insn (\"eor\\t%0, %2\", operands);
7138 switch (get_attr_length (insn))
7140 case 4: return \"b%d4\\t%l3\";
7141 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7142 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7145 [(set (attr "far_jump")
7147 (eq_attr "length" "8")
7148 (const_string "yes")
7149 (const_string "no")))
7150 (set (attr "length")
7152 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7153 (le (minus (match_dup 3) (pc)) (const_int 256)))
7156 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7157 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7162 (define_insn "*xorsi3_cbranch"
7165 (match_operator 5 "equality_operator"
7166 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7167 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7169 (label_ref (match_operand 4 "" ""))
7171 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7172 (xor:SI (match_dup 2) (match_dup 3)))
7173 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7177 if (which_alternative == 0)
7178 output_asm_insn (\"eor\\t%0, %3\", operands);
7179 else if (which_alternative == 1)
7181 output_asm_insn (\"eor\\t%1, %3\", operands);
7182 output_asm_insn (\"mov\\t%0, %1\", operands);
7186 output_asm_insn (\"eor\\t%1, %3\", operands);
7187 output_asm_insn (\"str\\t%1, %0\", operands);
7190 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7192 case 4: return \"b%d5\\t%l4\";
7193 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7194 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7197 [(set (attr "far_jump")
7199 (ior (and (eq (symbol_ref ("which_alternative"))
7201 (eq_attr "length" "8"))
7202 (eq_attr "length" "10"))
7203 (const_string "yes")
7204 (const_string "no")))
7205 (set (attr "length")
7207 (eq (symbol_ref ("which_alternative"))
7210 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7211 (le (minus (match_dup 4) (pc)) (const_int 256)))
7214 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7215 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7219 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7220 (le (minus (match_dup 4) (pc)) (const_int 256)))
7223 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7224 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7229 (define_insn "*bicsi3_cbranch_scratch"
7232 (match_operator 4 "equality_operator"
7233 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7234 (match_operand:SI 1 "s_register_operand" "0"))
7236 (label_ref (match_operand 3 "" ""))
7238 (clobber (match_scratch:SI 0 "=l"))]
7242 output_asm_insn (\"bic\\t%0, %2\", operands);
7243 switch (get_attr_length (insn))
7245 case 4: return \"b%d4\\t%l3\";
7246 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7247 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7250 [(set (attr "far_jump")
7252 (eq_attr "length" "8")
7253 (const_string "yes")
7254 (const_string "no")))
7255 (set (attr "length")
7257 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7258 (le (minus (match_dup 3) (pc)) (const_int 256)))
7261 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7262 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7267 (define_insn "*bicsi3_cbranch"
7270 (match_operator 5 "equality_operator"
7271 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7272 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7274 (label_ref (match_operand 4 "" ""))
7276 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7277 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7278 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7282 if (which_alternative == 0)
7283 output_asm_insn (\"bic\\t%0, %3\", operands);
7284 else if (which_alternative <= 2)
7286 output_asm_insn (\"bic\\t%1, %3\", operands);
7287 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7288 conditions again, since we're only testing for equality. */
7289 output_asm_insn (\"mov\\t%0, %1\", operands);
7293 output_asm_insn (\"bic\\t%1, %3\", operands);
7294 output_asm_insn (\"str\\t%1, %0\", operands);
7297 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7299 case 4: return \"b%d5\\t%l4\";
7300 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7301 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7304 [(set (attr "far_jump")
7306 (ior (and (eq (symbol_ref ("which_alternative"))
7308 (eq_attr "length" "8"))
7309 (eq_attr "length" "10"))
7310 (const_string "yes")
7311 (const_string "no")))
7312 (set (attr "length")
7314 (eq (symbol_ref ("which_alternative"))
7317 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7318 (le (minus (match_dup 4) (pc)) (const_int 256)))
7321 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7322 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7326 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7327 (le (minus (match_dup 4) (pc)) (const_int 256)))
7330 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7331 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7336 (define_insn "*cbranchne_decr1"
7338 (if_then_else (match_operator 3 "equality_operator"
7339 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7341 (label_ref (match_operand 4 "" ""))
7343 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7344 (plus:SI (match_dup 2) (const_int -1)))
7345 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7350 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7352 VOIDmode, operands[2], const1_rtx);
7353 cond[1] = operands[4];
7355 if (which_alternative == 0)
7356 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7357 else if (which_alternative == 1)
7359 /* We must provide an alternative for a hi reg because reload
7360 cannot handle output reloads on a jump instruction, but we
7361 can't subtract into that. Fortunately a mov from lo to hi
7362 does not clobber the condition codes. */
7363 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7364 output_asm_insn (\"mov\\t%0, %1\", operands);
7368 /* Similarly, but the target is memory. */
7369 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7370 output_asm_insn (\"str\\t%1, %0\", operands);
7373 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7376 output_asm_insn (\"b%d0\\t%l1\", cond);
7379 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7380 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7382 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7383 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7387 [(set (attr "far_jump")
7389 (ior (and (eq (symbol_ref ("which_alternative"))
7391 (eq_attr "length" "8"))
7392 (eq_attr "length" "10"))
7393 (const_string "yes")
7394 (const_string "no")))
7395 (set_attr_alternative "length"
7399 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7400 (le (minus (match_dup 4) (pc)) (const_int 256)))
7403 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7404 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7409 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7410 (le (minus (match_dup 4) (pc)) (const_int 256)))
7413 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7414 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7419 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7420 (le (minus (match_dup 4) (pc)) (const_int 256)))
7423 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7424 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7429 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7430 (le (minus (match_dup 4) (pc)) (const_int 256)))
7433 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7434 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7439 (define_insn "*addsi3_cbranch"
7442 (match_operator 4 "arm_comparison_operator"
7444 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7445 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7447 (label_ref (match_operand 5 "" ""))
7450 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7451 (plus:SI (match_dup 2) (match_dup 3)))
7452 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7454 && (GET_CODE (operands[4]) == EQ
7455 || GET_CODE (operands[4]) == NE
7456 || GET_CODE (operands[4]) == GE
7457 || GET_CODE (operands[4]) == LT)"
7463 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7464 cond[1] = operands[2];
7465 cond[2] = operands[3];
7467 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7468 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7470 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7472 if (which_alternative >= 3
7473 && which_alternative < 4)
7474 output_asm_insn (\"mov\\t%0, %1\", operands);
7475 else if (which_alternative >= 4)
7476 output_asm_insn (\"str\\t%1, %0\", operands);
7478 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7481 return \"b%d4\\t%l5\";
7483 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7485 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7489 [(set (attr "far_jump")
7491 (ior (and (lt (symbol_ref ("which_alternative"))
7493 (eq_attr "length" "8"))
7494 (eq_attr "length" "10"))
7495 (const_string "yes")
7496 (const_string "no")))
7497 (set (attr "length")
7499 (lt (symbol_ref ("which_alternative"))
7502 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7503 (le (minus (match_dup 5) (pc)) (const_int 256)))
7506 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7507 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7511 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7512 (le (minus (match_dup 5) (pc)) (const_int 256)))
7515 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7516 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7521 (define_insn "*addsi3_cbranch_scratch"
7524 (match_operator 3 "arm_comparison_operator"
7526 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7527 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7529 (label_ref (match_operand 4 "" ""))
7531 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7533 && (GET_CODE (operands[3]) == EQ
7534 || GET_CODE (operands[3]) == NE
7535 || GET_CODE (operands[3]) == GE
7536 || GET_CODE (operands[3]) == LT)"
7539 switch (which_alternative)
7542 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7545 output_asm_insn (\"cmn\t%1, %2\", operands);
7548 if (INTVAL (operands[2]) < 0)
7549 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7551 output_asm_insn (\"add\t%0, %1, %2\", operands);
7554 if (INTVAL (operands[2]) < 0)
7555 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7557 output_asm_insn (\"add\t%0, %0, %2\", operands);
7561 switch (get_attr_length (insn))
7564 return \"b%d3\\t%l4\";
7566 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7568 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7572 [(set (attr "far_jump")
7574 (eq_attr "length" "8")
7575 (const_string "yes")
7576 (const_string "no")))
7577 (set (attr "length")
7579 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7580 (le (minus (match_dup 4) (pc)) (const_int 256)))
7583 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7584 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7589 (define_insn "*subsi3_cbranch"
7592 (match_operator 4 "arm_comparison_operator"
7594 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7595 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7597 (label_ref (match_operand 5 "" ""))
7599 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7600 (minus:SI (match_dup 2) (match_dup 3)))
7601 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7603 && (GET_CODE (operands[4]) == EQ
7604 || GET_CODE (operands[4]) == NE
7605 || GET_CODE (operands[4]) == GE
7606 || GET_CODE (operands[4]) == LT)"
7609 if (which_alternative == 0)
7610 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7611 else if (which_alternative == 1)
7613 /* We must provide an alternative for a hi reg because reload
7614 cannot handle output reloads on a jump instruction, but we
7615 can't subtract into that. Fortunately a mov from lo to hi
7616 does not clobber the condition codes. */
7617 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7618 output_asm_insn (\"mov\\t%0, %1\", operands);
7622 /* Similarly, but the target is memory. */
7623 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7624 output_asm_insn (\"str\\t%1, %0\", operands);
7627 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7630 return \"b%d4\\t%l5\";
7632 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7634 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7638 [(set (attr "far_jump")
7640 (ior (and (eq (symbol_ref ("which_alternative"))
7642 (eq_attr "length" "8"))
7643 (eq_attr "length" "10"))
7644 (const_string "yes")
7645 (const_string "no")))
7646 (set (attr "length")
7648 (eq (symbol_ref ("which_alternative"))
7651 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7652 (le (minus (match_dup 5) (pc)) (const_int 256)))
7655 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7656 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7660 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7661 (le (minus (match_dup 5) (pc)) (const_int 256)))
7664 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7665 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7670 (define_insn "*subsi3_cbranch_scratch"
7673 (match_operator 0 "arm_comparison_operator"
7674 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7675 (match_operand:SI 2 "nonmemory_operand" "l"))
7677 (label_ref (match_operand 3 "" ""))
7680 && (GET_CODE (operands[0]) == EQ
7681 || GET_CODE (operands[0]) == NE
7682 || GET_CODE (operands[0]) == GE
7683 || GET_CODE (operands[0]) == LT)"
7685 output_asm_insn (\"cmp\\t%1, %2\", operands);
7686 switch (get_attr_length (insn))
7688 case 4: return \"b%d0\\t%l3\";
7689 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7690 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7693 [(set (attr "far_jump")
7695 (eq_attr "length" "8")
7696 (const_string "yes")
7697 (const_string "no")))
7698 (set (attr "length")
7700 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7701 (le (minus (match_dup 3) (pc)) (const_int 256)))
7704 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7705 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7710 ;; Comparison and test insns
7712 (define_insn "*arm_cmpsi_insn"
7713 [(set (reg:CC CC_REGNUM)
7714 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7715 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7720 [(set_attr "conds" "set")]
7723 (define_insn "*arm_cmpsi_shiftsi"
7724 [(set (reg:CC CC_REGNUM)
7725 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7726 (match_operator:SI 3 "shift_operator"
7727 [(match_operand:SI 1 "s_register_operand" "r")
7728 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7731 [(set_attr "conds" "set")
7732 (set_attr "shift" "1")
7733 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7734 (const_string "alu_shift")
7735 (const_string "alu_shift_reg")))]
7738 (define_insn "*arm_cmpsi_shiftsi_swp"
7739 [(set (reg:CC_SWP CC_REGNUM)
7740 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7741 [(match_operand:SI 1 "s_register_operand" "r")
7742 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7743 (match_operand:SI 0 "s_register_operand" "r")))]
7746 [(set_attr "conds" "set")
7747 (set_attr "shift" "1")
7748 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7749 (const_string "alu_shift")
7750 (const_string "alu_shift_reg")))]
7753 (define_insn "*arm_cmpsi_negshiftsi_si"
7754 [(set (reg:CC_Z CC_REGNUM)
7756 (neg:SI (match_operator:SI 1 "shift_operator"
7757 [(match_operand:SI 2 "s_register_operand" "r")
7758 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7759 (match_operand:SI 0 "s_register_operand" "r")))]
7762 [(set_attr "conds" "set")
7763 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7764 (const_string "alu_shift")
7765 (const_string "alu_shift_reg")))]
7768 ;; Cirrus SF compare instruction
7769 (define_insn "*cirrus_cmpsf"
7770 [(set (reg:CCFP CC_REGNUM)
7771 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7772 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7773 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7774 "cfcmps%?\\tr15, %V0, %V1"
7775 [(set_attr "type" "mav_farith")
7776 (set_attr "cirrus" "compare")]
7779 ;; Cirrus DF compare instruction
7780 (define_insn "*cirrus_cmpdf"
7781 [(set (reg:CCFP CC_REGNUM)
7782 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7783 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7784 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7785 "cfcmpd%?\\tr15, %V0, %V1"
7786 [(set_attr "type" "mav_farith")
7787 (set_attr "cirrus" "compare")]
7790 (define_insn "*cirrus_cmpdi"
7791 [(set (reg:CC CC_REGNUM)
7792 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7793 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7794 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7795 "cfcmp64%?\\tr15, %V0, %V1"
7796 [(set_attr "type" "mav_farith")
7797 (set_attr "cirrus" "compare")]
7800 ; This insn allows redundant compares to be removed by cse, nothing should
7801 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7802 ; is deleted later on. The match_dup will match the mode here, so that
7803 ; mode changes of the condition codes aren't lost by this even though we don't
7804 ; specify what they are.
7806 (define_insn "*deleted_compare"
7807 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7809 "\\t%@ deleted compare"
7810 [(set_attr "conds" "set")
7811 (set_attr "length" "0")]
7815 ;; Conditional branch insns
7817 (define_expand "cbranch_cc"
7819 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7820 (match_operand 2 "" "")])
7821 (label_ref (match_operand 3 "" ""))
7824 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7825 operands[1], operands[2]);
7826 operands[2] = const0_rtx;"
7830 ;; Patterns to match conditional branch insns.
7833 (define_insn "*arm_cond_branch"
7835 (if_then_else (match_operator 1 "arm_comparison_operator"
7836 [(match_operand 2 "cc_register" "") (const_int 0)])
7837 (label_ref (match_operand 0 "" ""))
7841 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7843 arm_ccfsm_state += 2;
7846 return \"b%d1\\t%l0\";
7848 [(set_attr "conds" "use")
7849 (set_attr "type" "branch")]
7852 (define_insn "*arm_cond_branch_reversed"
7854 (if_then_else (match_operator 1 "arm_comparison_operator"
7855 [(match_operand 2 "cc_register" "") (const_int 0)])
7857 (label_ref (match_operand 0 "" ""))))]
7860 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7862 arm_ccfsm_state += 2;
7865 return \"b%D1\\t%l0\";
7867 [(set_attr "conds" "use")
7868 (set_attr "type" "branch")]
7875 (define_expand "cstore_cc"
7876 [(set (match_operand:SI 0 "s_register_operand" "")
7877 (match_operator:SI 1 "" [(match_operand 2 "" "")
7878 (match_operand 3 "" "")]))]
7880 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7881 operands[2], operands[3]);
7882 operands[3] = const0_rtx;"
7885 (define_insn "*mov_scc"
7886 [(set (match_operand:SI 0 "s_register_operand" "=r")
7887 (match_operator:SI 1 "arm_comparison_operator"
7888 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7890 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7891 [(set_attr "conds" "use")
7892 (set_attr "length" "8")]
7895 (define_insn "*mov_negscc"
7896 [(set (match_operand:SI 0 "s_register_operand" "=r")
7897 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7898 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7900 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7901 [(set_attr "conds" "use")
7902 (set_attr "length" "8")]
7905 (define_insn "*mov_notscc"
7906 [(set (match_operand:SI 0 "s_register_operand" "=r")
7907 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7908 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7910 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7911 [(set_attr "conds" "use")
7912 (set_attr "length" "8")]
7915 (define_expand "cstoresi4"
7916 [(set (match_operand:SI 0 "s_register_operand" "")
7917 (match_operator:SI 1 "arm_comparison_operator"
7918 [(match_operand:SI 2 "s_register_operand" "")
7919 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7920 "TARGET_32BIT || TARGET_THUMB1"
7922 rtx op3, scratch, scratch2;
7926 if (!arm_add_operand (operands[3], SImode))
7927 operands[3] = force_reg (SImode, operands[3]);
7928 emit_insn (gen_cstore_cc (operands[0], operands[1],
7929 operands[2], operands[3]));
7933 if (operands[3] == const0_rtx)
7935 switch (GET_CODE (operands[1]))
7938 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7942 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7946 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7947 NULL_RTX, 0, OPTAB_WIDEN);
7948 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7949 NULL_RTX, 0, OPTAB_WIDEN);
7950 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7951 operands[0], 1, OPTAB_WIDEN);
7955 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7957 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7958 NULL_RTX, 1, OPTAB_WIDEN);
7962 scratch = expand_binop (SImode, ashr_optab, operands[2],
7963 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7964 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7965 NULL_RTX, 0, OPTAB_WIDEN);
7966 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7970 /* LT is handled by generic code. No need for unsigned with 0. */
7977 switch (GET_CODE (operands[1]))
7980 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7981 NULL_RTX, 0, OPTAB_WIDEN);
7982 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7986 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7987 NULL_RTX, 0, OPTAB_WIDEN);
7988 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7992 op3 = force_reg (SImode, operands[3]);
7994 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7995 NULL_RTX, 1, OPTAB_WIDEN);
7996 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7997 NULL_RTX, 0, OPTAB_WIDEN);
7998 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8004 if (!thumb1_cmp_operand (op3, SImode))
8005 op3 = force_reg (SImode, op3);
8006 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8007 NULL_RTX, 0, OPTAB_WIDEN);
8008 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8009 NULL_RTX, 1, OPTAB_WIDEN);
8010 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8015 op3 = force_reg (SImode, operands[3]);
8016 scratch = force_reg (SImode, const0_rtx);
8017 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8023 if (!thumb1_cmp_operand (op3, SImode))
8024 op3 = force_reg (SImode, op3);
8025 scratch = force_reg (SImode, const0_rtx);
8026 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8032 if (!thumb1_cmp_operand (op3, SImode))
8033 op3 = force_reg (SImode, op3);
8034 scratch = gen_reg_rtx (SImode);
8035 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8036 emit_insn (gen_negsi2 (operands[0], scratch));
8040 op3 = force_reg (SImode, operands[3]);
8041 scratch = gen_reg_rtx (SImode);
8042 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8043 emit_insn (gen_negsi2 (operands[0], scratch));
8046 /* No good sequences for GT, LT. */
8053 (define_expand "cstoresf4"
8054 [(set (match_operand:SI 0 "s_register_operand" "")
8055 (match_operator:SI 1 "arm_comparison_operator"
8056 [(match_operand:SF 2 "s_register_operand" "")
8057 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8058 "TARGET_32BIT && TARGET_HARD_FLOAT"
8059 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8060 operands[2], operands[3])); DONE;"
8063 (define_expand "cstoredf4"
8064 [(set (match_operand:SI 0 "s_register_operand" "")
8065 (match_operator:SI 1 "arm_comparison_operator"
8066 [(match_operand:DF 2 "s_register_operand" "")
8067 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8068 "TARGET_32BIT && TARGET_HARD_FLOAT"
8069 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8070 operands[2], operands[3])); DONE;"
8073 ;; this uses the Cirrus DI compare instruction
8074 (define_expand "cstoredi4"
8075 [(set (match_operand:SI 0 "s_register_operand" "")
8076 (match_operator:SI 1 "arm_comparison_operator"
8077 [(match_operand:DI 2 "cirrus_fp_register" "")
8078 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8079 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8080 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8081 operands[2], operands[3])); DONE;"
8085 (define_expand "cstoresi_eq0_thumb1"
8087 [(set (match_operand:SI 0 "s_register_operand" "")
8088 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8090 (clobber (match_dup:SI 2))])]
8092 "operands[2] = gen_reg_rtx (SImode);"
8095 (define_expand "cstoresi_ne0_thumb1"
8097 [(set (match_operand:SI 0 "s_register_operand" "")
8098 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8100 (clobber (match_dup:SI 2))])]
8102 "operands[2] = gen_reg_rtx (SImode);"
8105 (define_insn "*cstoresi_eq0_thumb1_insn"
8106 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8107 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8109 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8112 neg\\t%0, %1\;adc\\t%0, %0, %1
8113 neg\\t%2, %1\;adc\\t%0, %1, %2"
8114 [(set_attr "length" "4")]
8117 (define_insn "*cstoresi_ne0_thumb1_insn"
8118 [(set (match_operand:SI 0 "s_register_operand" "=l")
8119 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8121 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8123 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8124 [(set_attr "length" "4")]
8127 (define_insn "cstoresi_nltu_thumb1"
8128 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8129 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8130 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8132 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8133 [(set_attr "length" "4")]
8136 ;; Used as part of the expansion of thumb les sequence.
8137 (define_insn "thumb1_addsi3_addgeu"
8138 [(set (match_operand:SI 0 "s_register_operand" "=l")
8139 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8140 (match_operand:SI 2 "s_register_operand" "l"))
8141 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8142 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8144 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8145 [(set_attr "length" "4")]
8149 ;; Conditional move insns
8151 (define_expand "movsicc"
8152 [(set (match_operand:SI 0 "s_register_operand" "")
8153 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8154 (match_operand:SI 2 "arm_not_operand" "")
8155 (match_operand:SI 3 "arm_not_operand" "")))]
8159 enum rtx_code code = GET_CODE (operands[1]);
8162 if (code == UNEQ || code == LTGT)
8165 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8166 XEXP (operands[1], 1));
8167 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8171 (define_expand "movsfcc"
8172 [(set (match_operand:SF 0 "s_register_operand" "")
8173 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8174 (match_operand:SF 2 "s_register_operand" "")
8175 (match_operand:SF 3 "nonmemory_operand" "")))]
8176 "TARGET_32BIT && TARGET_HARD_FLOAT"
8179 enum rtx_code code = GET_CODE (operands[1]);
8182 if (code == UNEQ || code == LTGT)
8185 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8186 Otherwise, ensure it is a valid FP add operand */
8187 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8188 || (!arm_float_add_operand (operands[3], SFmode)))
8189 operands[3] = force_reg (SFmode, operands[3]);
8191 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8192 XEXP (operands[1], 1));
8193 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8197 (define_expand "movdfcc"
8198 [(set (match_operand:DF 0 "s_register_operand" "")
8199 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8200 (match_operand:DF 2 "s_register_operand" "")
8201 (match_operand:DF 3 "arm_float_add_operand" "")))]
8202 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8205 enum rtx_code code = GET_CODE (operands[1]);
8208 if (code == UNEQ || code == LTGT)
8211 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8212 XEXP (operands[1], 1));
8213 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8217 (define_insn "*movsicc_insn"
8218 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8220 (match_operator 3 "arm_comparison_operator"
8221 [(match_operand 4 "cc_register" "") (const_int 0)])
8222 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8223 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8230 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8231 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8232 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8233 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8234 [(set_attr "length" "4,4,4,4,8,8,8,8")
8235 (set_attr "conds" "use")]
8238 (define_insn "*movsfcc_soft_insn"
8239 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8240 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8241 [(match_operand 4 "cc_register" "") (const_int 0)])
8242 (match_operand:SF 1 "s_register_operand" "0,r")
8243 (match_operand:SF 2 "s_register_operand" "r,0")))]
8244 "TARGET_ARM && TARGET_SOFT_FLOAT"
8248 [(set_attr "conds" "use")]
8252 ;; Jump and linkage insns
8254 (define_expand "jump"
8256 (label_ref (match_operand 0 "" "")))]
8261 (define_insn "*arm_jump"
8263 (label_ref (match_operand 0 "" "")))]
8267 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8269 arm_ccfsm_state += 2;
8272 return \"b%?\\t%l0\";
8275 [(set_attr "predicable" "yes")]
8278 (define_insn "*thumb_jump"
8280 (label_ref (match_operand 0 "" "")))]
8283 if (get_attr_length (insn) == 2)
8285 return \"bl\\t%l0\\t%@ far jump\";
8287 [(set (attr "far_jump")
8289 (eq_attr "length" "4")
8290 (const_string "yes")
8291 (const_string "no")))
8292 (set (attr "length")
8294 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8295 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8300 (define_expand "call"
8301 [(parallel [(call (match_operand 0 "memory_operand" "")
8302 (match_operand 1 "general_operand" ""))
8303 (use (match_operand 2 "" ""))
8304 (clobber (reg:SI LR_REGNUM))])]
8310 /* In an untyped call, we can get NULL for operand 2. */
8311 if (operands[2] == NULL_RTX)
8312 operands[2] = const0_rtx;
8314 /* Decide if we should generate indirect calls by loading the
8315 32-bit address of the callee into a register before performing the
8317 callee = XEXP (operands[0], 0);
8318 if (GET_CODE (callee) == SYMBOL_REF
8319 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8321 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8323 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8324 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8329 (define_expand "call_internal"
8330 [(parallel [(call (match_operand 0 "memory_operand" "")
8331 (match_operand 1 "general_operand" ""))
8332 (use (match_operand 2 "" ""))
8333 (clobber (reg:SI LR_REGNUM))])])
8335 (define_insn "*call_reg_armv5"
8336 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8337 (match_operand 1 "" ""))
8338 (use (match_operand 2 "" ""))
8339 (clobber (reg:SI LR_REGNUM))]
8340 "TARGET_ARM && arm_arch5"
8342 [(set_attr "type" "call")]
8345 (define_insn "*call_reg_arm"
8346 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8347 (match_operand 1 "" ""))
8348 (use (match_operand 2 "" ""))
8349 (clobber (reg:SI LR_REGNUM))]
8350 "TARGET_ARM && !arm_arch5"
8352 return output_call (operands);
8354 ;; length is worst case, normally it is only two
8355 [(set_attr "length" "12")
8356 (set_attr "type" "call")]
8359 (define_insn "*call_mem"
8360 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8361 (match_operand 1 "" ""))
8362 (use (match_operand 2 "" ""))
8363 (clobber (reg:SI LR_REGNUM))]
8366 return output_call_mem (operands);
8368 [(set_attr "length" "12")
8369 (set_attr "type" "call")]
8372 (define_insn "*call_reg_thumb1_v5"
8373 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8374 (match_operand 1 "" ""))
8375 (use (match_operand 2 "" ""))
8376 (clobber (reg:SI LR_REGNUM))]
8377 "TARGET_THUMB1 && arm_arch5"
8379 [(set_attr "length" "2")
8380 (set_attr "type" "call")]
8383 (define_insn "*call_reg_thumb1"
8384 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8385 (match_operand 1 "" ""))
8386 (use (match_operand 2 "" ""))
8387 (clobber (reg:SI LR_REGNUM))]
8388 "TARGET_THUMB1 && !arm_arch5"
8391 if (!TARGET_CALLER_INTERWORKING)
8392 return thumb_call_via_reg (operands[0]);
8393 else if (operands[1] == const0_rtx)
8394 return \"bl\\t%__interwork_call_via_%0\";
8395 else if (frame_pointer_needed)
8396 return \"bl\\t%__interwork_r7_call_via_%0\";
8398 return \"bl\\t%__interwork_r11_call_via_%0\";
8400 [(set_attr "type" "call")]
8403 (define_expand "call_value"
8404 [(parallel [(set (match_operand 0 "" "")
8405 (call (match_operand 1 "memory_operand" "")
8406 (match_operand 2 "general_operand" "")))
8407 (use (match_operand 3 "" ""))
8408 (clobber (reg:SI LR_REGNUM))])]
8414 /* In an untyped call, we can get NULL for operand 2. */
8415 if (operands[3] == 0)
8416 operands[3] = const0_rtx;
8418 /* Decide if we should generate indirect calls by loading the
8419 32-bit address of the callee into a register before performing the
8421 callee = XEXP (operands[1], 0);
8422 if (GET_CODE (callee) == SYMBOL_REF
8423 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8425 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8427 pat = gen_call_value_internal (operands[0], operands[1],
8428 operands[2], operands[3]);
8429 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8434 (define_expand "call_value_internal"
8435 [(parallel [(set (match_operand 0 "" "")
8436 (call (match_operand 1 "memory_operand" "")
8437 (match_operand 2 "general_operand" "")))
8438 (use (match_operand 3 "" ""))
8439 (clobber (reg:SI LR_REGNUM))])])
8441 (define_insn "*call_value_reg_armv5"
8442 [(set (match_operand 0 "" "")
8443 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8444 (match_operand 2 "" "")))
8445 (use (match_operand 3 "" ""))
8446 (clobber (reg:SI LR_REGNUM))]
8447 "TARGET_ARM && arm_arch5"
8449 [(set_attr "type" "call")]
8452 (define_insn "*call_value_reg_arm"
8453 [(set (match_operand 0 "" "")
8454 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8455 (match_operand 2 "" "")))
8456 (use (match_operand 3 "" ""))
8457 (clobber (reg:SI LR_REGNUM))]
8458 "TARGET_ARM && !arm_arch5"
8460 return output_call (&operands[1]);
8462 [(set_attr "length" "12")
8463 (set_attr "type" "call")]
8466 (define_insn "*call_value_mem"
8467 [(set (match_operand 0 "" "")
8468 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8469 (match_operand 2 "" "")))
8470 (use (match_operand 3 "" ""))
8471 (clobber (reg:SI LR_REGNUM))]
8472 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8474 return output_call_mem (&operands[1]);
8476 [(set_attr "length" "12")
8477 (set_attr "type" "call")]
8480 (define_insn "*call_value_reg_thumb1_v5"
8481 [(set (match_operand 0 "" "")
8482 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8483 (match_operand 2 "" "")))
8484 (use (match_operand 3 "" ""))
8485 (clobber (reg:SI LR_REGNUM))]
8486 "TARGET_THUMB1 && arm_arch5"
8488 [(set_attr "length" "2")
8489 (set_attr "type" "call")]
8492 (define_insn "*call_value_reg_thumb1"
8493 [(set (match_operand 0 "" "")
8494 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8495 (match_operand 2 "" "")))
8496 (use (match_operand 3 "" ""))
8497 (clobber (reg:SI LR_REGNUM))]
8498 "TARGET_THUMB1 && !arm_arch5"
8501 if (!TARGET_CALLER_INTERWORKING)
8502 return thumb_call_via_reg (operands[1]);
8503 else if (operands[2] == const0_rtx)
8504 return \"bl\\t%__interwork_call_via_%1\";
8505 else if (frame_pointer_needed)
8506 return \"bl\\t%__interwork_r7_call_via_%1\";
8508 return \"bl\\t%__interwork_r11_call_via_%1\";
8510 [(set_attr "type" "call")]
8513 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8514 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8516 (define_insn "*call_symbol"
8517 [(call (mem:SI (match_operand:SI 0 "" ""))
8518 (match_operand 1 "" ""))
8519 (use (match_operand 2 "" ""))
8520 (clobber (reg:SI LR_REGNUM))]
8522 && (GET_CODE (operands[0]) == SYMBOL_REF)
8523 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8526 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8528 [(set_attr "type" "call")]
8531 (define_insn "*call_value_symbol"
8532 [(set (match_operand 0 "" "")
8533 (call (mem:SI (match_operand:SI 1 "" ""))
8534 (match_operand:SI 2 "" "")))
8535 (use (match_operand 3 "" ""))
8536 (clobber (reg:SI LR_REGNUM))]
8538 && (GET_CODE (operands[1]) == SYMBOL_REF)
8539 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8542 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8544 [(set_attr "type" "call")]
8547 (define_insn "*call_insn"
8548 [(call (mem:SI (match_operand:SI 0 "" ""))
8549 (match_operand:SI 1 "" ""))
8550 (use (match_operand 2 "" ""))
8551 (clobber (reg:SI LR_REGNUM))]
8553 && GET_CODE (operands[0]) == SYMBOL_REF
8554 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8556 [(set_attr "length" "4")
8557 (set_attr "type" "call")]
8560 (define_insn "*call_value_insn"
8561 [(set (match_operand 0 "" "")
8562 (call (mem:SI (match_operand 1 "" ""))
8563 (match_operand 2 "" "")))
8564 (use (match_operand 3 "" ""))
8565 (clobber (reg:SI LR_REGNUM))]
8567 && GET_CODE (operands[1]) == SYMBOL_REF
8568 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8570 [(set_attr "length" "4")
8571 (set_attr "type" "call")]
8574 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8575 (define_expand "sibcall"
8576 [(parallel [(call (match_operand 0 "memory_operand" "")
8577 (match_operand 1 "general_operand" ""))
8579 (use (match_operand 2 "" ""))])]
8583 if (operands[2] == NULL_RTX)
8584 operands[2] = const0_rtx;
8588 (define_expand "sibcall_value"
8589 [(parallel [(set (match_operand 0 "" "")
8590 (call (match_operand 1 "memory_operand" "")
8591 (match_operand 2 "general_operand" "")))
8593 (use (match_operand 3 "" ""))])]
8597 if (operands[3] == NULL_RTX)
8598 operands[3] = const0_rtx;
8602 (define_insn "*sibcall_insn"
8603 [(call (mem:SI (match_operand:SI 0 "" "X"))
8604 (match_operand 1 "" ""))
8606 (use (match_operand 2 "" ""))]
8607 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8609 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8611 [(set_attr "type" "call")]
8614 (define_insn "*sibcall_value_insn"
8615 [(set (match_operand 0 "" "")
8616 (call (mem:SI (match_operand:SI 1 "" "X"))
8617 (match_operand 2 "" "")))
8619 (use (match_operand 3 "" ""))]
8620 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8622 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8624 [(set_attr "type" "call")]
8627 ;; Often the return insn will be the same as loading from memory, so set attr
8628 (define_insn "return"
8630 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8633 if (arm_ccfsm_state == 2)
8635 arm_ccfsm_state += 2;
8638 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8640 [(set_attr "type" "load1")
8641 (set_attr "length" "12")
8642 (set_attr "predicable" "yes")]
8645 (define_insn "*cond_return"
8647 (if_then_else (match_operator 0 "arm_comparison_operator"
8648 [(match_operand 1 "cc_register" "") (const_int 0)])
8651 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8654 if (arm_ccfsm_state == 2)
8656 arm_ccfsm_state += 2;
8659 return output_return_instruction (operands[0], TRUE, FALSE);
8661 [(set_attr "conds" "use")
8662 (set_attr "length" "12")
8663 (set_attr "type" "load1")]
8666 (define_insn "*cond_return_inverted"
8668 (if_then_else (match_operator 0 "arm_comparison_operator"
8669 [(match_operand 1 "cc_register" "") (const_int 0)])
8672 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8675 if (arm_ccfsm_state == 2)
8677 arm_ccfsm_state += 2;
8680 return output_return_instruction (operands[0], TRUE, TRUE);
8682 [(set_attr "conds" "use")
8683 (set_attr "length" "12")
8684 (set_attr "type" "load1")]
8687 ;; Generate a sequence of instructions to determine if the processor is
8688 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8691 (define_expand "return_addr_mask"
8693 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8695 (set (match_operand:SI 0 "s_register_operand" "")
8696 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8698 (const_int 67108860)))] ; 0x03fffffc
8701 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8704 (define_insn "*check_arch2"
8705 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8706 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8709 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8710 [(set_attr "length" "8")
8711 (set_attr "conds" "set")]
8714 ;; Call subroutine returning any type.
8716 (define_expand "untyped_call"
8717 [(parallel [(call (match_operand 0 "" "")
8719 (match_operand 1 "" "")
8720 (match_operand 2 "" "")])]
8725 rtx par = gen_rtx_PARALLEL (VOIDmode,
8726 rtvec_alloc (XVECLEN (operands[2], 0)));
8727 rtx addr = gen_reg_rtx (Pmode);
8731 emit_move_insn (addr, XEXP (operands[1], 0));
8732 mem = change_address (operands[1], BLKmode, addr);
8734 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8736 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8738 /* Default code only uses r0 as a return value, but we could
8739 be using anything up to 4 registers. */
8740 if (REGNO (src) == R0_REGNUM)
8741 src = gen_rtx_REG (TImode, R0_REGNUM);
8743 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8745 size += GET_MODE_SIZE (GET_MODE (src));
8748 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8753 for (i = 0; i < XVECLEN (par, 0); i++)
8755 HOST_WIDE_INT offset = 0;
8756 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8759 emit_move_insn (addr, plus_constant (addr, size));
8761 mem = change_address (mem, GET_MODE (reg), NULL);
8762 if (REGNO (reg) == R0_REGNUM)
8764 /* On thumb we have to use a write-back instruction. */
8765 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8766 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8767 size = TARGET_ARM ? 16 : 0;
8771 emit_move_insn (mem, reg);
8772 size = GET_MODE_SIZE (GET_MODE (reg));
8776 /* The optimizer does not know that the call sets the function value
8777 registers we stored in the result block. We avoid problems by
8778 claiming that all hard registers are used and clobbered at this
8780 emit_insn (gen_blockage ());
8786 (define_expand "untyped_return"
8787 [(match_operand:BLK 0 "memory_operand" "")
8788 (match_operand 1 "" "")]
8793 rtx addr = gen_reg_rtx (Pmode);
8797 emit_move_insn (addr, XEXP (operands[0], 0));
8798 mem = change_address (operands[0], BLKmode, addr);
8800 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8802 HOST_WIDE_INT offset = 0;
8803 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8806 emit_move_insn (addr, plus_constant (addr, size));
8808 mem = change_address (mem, GET_MODE (reg), NULL);
8809 if (REGNO (reg) == R0_REGNUM)
8811 /* On thumb we have to use a write-back instruction. */
8812 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8813 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8814 size = TARGET_ARM ? 16 : 0;
8818 emit_move_insn (reg, mem);
8819 size = GET_MODE_SIZE (GET_MODE (reg));
8823 /* Emit USE insns before the return. */
8824 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8825 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8827 /* Construct the return. */
8828 expand_naked_return ();
8834 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8835 ;; all of memory. This blocks insns from being moved across this point.
8837 (define_insn "blockage"
8838 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8841 [(set_attr "length" "0")
8842 (set_attr "type" "block")]
8845 (define_expand "casesi"
8846 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8847 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8848 (match_operand:SI 2 "const_int_operand" "") ; total range
8849 (match_operand:SI 3 "" "") ; table label
8850 (match_operand:SI 4 "" "")] ; Out of range label
8851 "TARGET_32BIT || optimize_size || flag_pic"
8854 enum insn_code code;
8855 if (operands[1] != const0_rtx)
8857 rtx reg = gen_reg_rtx (SImode);
8859 emit_insn (gen_addsi3 (reg, operands[0],
8860 GEN_INT (-INTVAL (operands[1]))));
8865 code = CODE_FOR_arm_casesi_internal;
8866 else if (TARGET_THUMB1)
8867 code = CODE_FOR_thumb1_casesi_internal_pic;
8869 code = CODE_FOR_thumb2_casesi_internal_pic;
8871 code = CODE_FOR_thumb2_casesi_internal;
8873 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8874 operands[2] = force_reg (SImode, operands[2]);
8876 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8877 operands[3], operands[4]));
8882 ;; The USE in this pattern is needed to tell flow analysis that this is
8883 ;; a CASESI insn. It has no other purpose.
8884 (define_insn "arm_casesi_internal"
8885 [(parallel [(set (pc)
8887 (leu (match_operand:SI 0 "s_register_operand" "r")
8888 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8889 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8890 (label_ref (match_operand 2 "" ""))))
8891 (label_ref (match_operand 3 "" ""))))
8892 (clobber (reg:CC CC_REGNUM))
8893 (use (label_ref (match_dup 2)))])]
8897 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8898 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8900 [(set_attr "conds" "clob")
8901 (set_attr "length" "12")]
8904 (define_expand "thumb1_casesi_internal_pic"
8905 [(match_operand:SI 0 "s_register_operand" "")
8906 (match_operand:SI 1 "thumb1_cmp_operand" "")
8907 (match_operand 2 "" "")
8908 (match_operand 3 "" "")]
8912 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8913 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8915 reg0 = gen_rtx_REG (SImode, 0);
8916 emit_move_insn (reg0, operands[0]);
8917 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8922 (define_insn "thumb1_casesi_dispatch"
8923 [(parallel [(set (pc) (unspec [(reg:SI 0)
8924 (label_ref (match_operand 0 "" ""))
8925 ;; (label_ref (match_operand 1 "" ""))
8927 UNSPEC_THUMB1_CASESI))
8928 (clobber (reg:SI IP_REGNUM))
8929 (clobber (reg:SI LR_REGNUM))])]
8931 "* return thumb1_output_casesi(operands);"
8932 [(set_attr "length" "4")]
8935 (define_expand "indirect_jump"
8937 (match_operand:SI 0 "s_register_operand" ""))]
8940 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8941 address and use bx. */
8945 tmp = gen_reg_rtx (SImode);
8946 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8952 ;; NB Never uses BX.
8953 (define_insn "*arm_indirect_jump"
8955 (match_operand:SI 0 "s_register_operand" "r"))]
8957 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8958 [(set_attr "predicable" "yes")]
8961 (define_insn "*load_indirect_jump"
8963 (match_operand:SI 0 "memory_operand" "m"))]
8965 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8966 [(set_attr "type" "load1")
8967 (set_attr "pool_range" "4096")
8968 (set_attr "neg_pool_range" "4084")
8969 (set_attr "predicable" "yes")]
8972 ;; NB Never uses BX.
8973 (define_insn "*thumb1_indirect_jump"
8975 (match_operand:SI 0 "register_operand" "l*r"))]
8978 [(set_attr "conds" "clob")
8979 (set_attr "length" "2")]
8989 if (TARGET_UNIFIED_ASM)
8992 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8993 return \"mov\\tr8, r8\";
8995 [(set (attr "length")
8996 (if_then_else (eq_attr "is_thumb" "yes")
9002 ;; Patterns to allow combination of arithmetic, cond code and shifts
9004 (define_insn "*arith_shiftsi"
9005 [(set (match_operand:SI 0 "s_register_operand" "=r")
9006 (match_operator:SI 1 "shiftable_operator"
9007 [(match_operator:SI 3 "shift_operator"
9008 [(match_operand:SI 4 "s_register_operand" "r")
9009 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9010 (match_operand:SI 2 "s_register_operand" "r")]))]
9012 "%i1%?\\t%0, %2, %4%S3"
9013 [(set_attr "predicable" "yes")
9014 (set_attr "shift" "4")
9015 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9016 (const_string "alu_shift")
9017 (const_string "alu_shift_reg")))]
9021 [(set (match_operand:SI 0 "s_register_operand" "")
9022 (match_operator:SI 1 "shiftable_operator"
9023 [(match_operator:SI 2 "shiftable_operator"
9024 [(match_operator:SI 3 "shift_operator"
9025 [(match_operand:SI 4 "s_register_operand" "")
9026 (match_operand:SI 5 "reg_or_int_operand" "")])
9027 (match_operand:SI 6 "s_register_operand" "")])
9028 (match_operand:SI 7 "arm_rhs_operand" "")]))
9029 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9032 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9035 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9038 (define_insn "*arith_shiftsi_compare0"
9039 [(set (reg:CC_NOOV CC_REGNUM)
9040 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9041 [(match_operator:SI 3 "shift_operator"
9042 [(match_operand:SI 4 "s_register_operand" "r")
9043 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9044 (match_operand:SI 2 "s_register_operand" "r")])
9046 (set (match_operand:SI 0 "s_register_operand" "=r")
9047 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9050 "%i1%.\\t%0, %2, %4%S3"
9051 [(set_attr "conds" "set")
9052 (set_attr "shift" "4")
9053 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9054 (const_string "alu_shift")
9055 (const_string "alu_shift_reg")))]
9058 (define_insn "*arith_shiftsi_compare0_scratch"
9059 [(set (reg:CC_NOOV CC_REGNUM)
9060 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9061 [(match_operator:SI 3 "shift_operator"
9062 [(match_operand:SI 4 "s_register_operand" "r")
9063 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9064 (match_operand:SI 2 "s_register_operand" "r")])
9066 (clobber (match_scratch:SI 0 "=r"))]
9068 "%i1%.\\t%0, %2, %4%S3"
9069 [(set_attr "conds" "set")
9070 (set_attr "shift" "4")
9071 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9072 (const_string "alu_shift")
9073 (const_string "alu_shift_reg")))]
9076 (define_insn "*sub_shiftsi"
9077 [(set (match_operand:SI 0 "s_register_operand" "=r")
9078 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9079 (match_operator:SI 2 "shift_operator"
9080 [(match_operand:SI 3 "s_register_operand" "r")
9081 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9083 "sub%?\\t%0, %1, %3%S2"
9084 [(set_attr "predicable" "yes")
9085 (set_attr "shift" "3")
9086 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9087 (const_string "alu_shift")
9088 (const_string "alu_shift_reg")))]
9091 (define_insn "*sub_shiftsi_compare0"
9092 [(set (reg:CC_NOOV CC_REGNUM)
9094 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9095 (match_operator:SI 2 "shift_operator"
9096 [(match_operand:SI 3 "s_register_operand" "r")
9097 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9099 (set (match_operand:SI 0 "s_register_operand" "=r")
9100 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9103 "sub%.\\t%0, %1, %3%S2"
9104 [(set_attr "conds" "set")
9105 (set_attr "shift" "3")
9106 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9107 (const_string "alu_shift")
9108 (const_string "alu_shift_reg")))]
9111 (define_insn "*sub_shiftsi_compare0_scratch"
9112 [(set (reg:CC_NOOV CC_REGNUM)
9114 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9115 (match_operator:SI 2 "shift_operator"
9116 [(match_operand:SI 3 "s_register_operand" "r")
9117 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9119 (clobber (match_scratch:SI 0 "=r"))]
9121 "sub%.\\t%0, %1, %3%S2"
9122 [(set_attr "conds" "set")
9123 (set_attr "shift" "3")
9124 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9125 (const_string "alu_shift")
9126 (const_string "alu_shift_reg")))]
9131 (define_insn "*and_scc"
9132 [(set (match_operand:SI 0 "s_register_operand" "=r")
9133 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9134 [(match_operand 3 "cc_register" "") (const_int 0)])
9135 (match_operand:SI 2 "s_register_operand" "r")))]
9137 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9138 [(set_attr "conds" "use")
9139 (set_attr "length" "8")]
9142 (define_insn "*ior_scc"
9143 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9144 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9145 [(match_operand 3 "cc_register" "") (const_int 0)])
9146 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9150 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9151 [(set_attr "conds" "use")
9152 (set_attr "length" "4,8")]
9155 (define_insn "*compare_scc"
9156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9157 (match_operator:SI 1 "arm_comparison_operator"
9158 [(match_operand:SI 2 "s_register_operand" "r,r")
9159 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9160 (clobber (reg:CC CC_REGNUM))]
9163 if (operands[3] == const0_rtx)
9165 if (GET_CODE (operands[1]) == LT)
9166 return \"mov\\t%0, %2, lsr #31\";
9168 if (GET_CODE (operands[1]) == GE)
9169 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9171 if (GET_CODE (operands[1]) == EQ)
9172 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9175 if (GET_CODE (operands[1]) == NE)
9177 if (which_alternative == 1)
9178 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9179 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9181 if (which_alternative == 1)
9182 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9184 output_asm_insn (\"cmp\\t%2, %3\", operands);
9185 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9187 [(set_attr "conds" "clob")
9188 (set_attr "length" "12")]
9191 (define_insn "*cond_move"
9192 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9193 (if_then_else:SI (match_operator 3 "equality_operator"
9194 [(match_operator 4 "arm_comparison_operator"
9195 [(match_operand 5 "cc_register" "") (const_int 0)])
9197 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9198 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9201 if (GET_CODE (operands[3]) == NE)
9203 if (which_alternative != 1)
9204 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9205 if (which_alternative != 0)
9206 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9209 if (which_alternative != 0)
9210 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9211 if (which_alternative != 1)
9212 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9215 [(set_attr "conds" "use")
9216 (set_attr "length" "4,4,8")]
9219 (define_insn "*cond_arith"
9220 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9221 (match_operator:SI 5 "shiftable_operator"
9222 [(match_operator:SI 4 "arm_comparison_operator"
9223 [(match_operand:SI 2 "s_register_operand" "r,r")
9224 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9225 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9226 (clobber (reg:CC CC_REGNUM))]
9229 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9230 return \"%i5\\t%0, %1, %2, lsr #31\";
9232 output_asm_insn (\"cmp\\t%2, %3\", operands);
9233 if (GET_CODE (operands[5]) == AND)
9234 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9235 else if (GET_CODE (operands[5]) == MINUS)
9236 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9237 else if (which_alternative != 0)
9238 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9239 return \"%i5%d4\\t%0, %1, #1\";
9241 [(set_attr "conds" "clob")
9242 (set_attr "length" "12")]
9245 (define_insn "*cond_sub"
9246 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9247 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9248 (match_operator:SI 4 "arm_comparison_operator"
9249 [(match_operand:SI 2 "s_register_operand" "r,r")
9250 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9251 (clobber (reg:CC CC_REGNUM))]
9254 output_asm_insn (\"cmp\\t%2, %3\", operands);
9255 if (which_alternative != 0)
9256 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9257 return \"sub%d4\\t%0, %1, #1\";
9259 [(set_attr "conds" "clob")
9260 (set_attr "length" "8,12")]
9263 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9264 (define_insn "*cmp_ite0"
9265 [(set (match_operand 6 "dominant_cc_register" "")
9268 (match_operator 4 "arm_comparison_operator"
9269 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9270 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9271 (match_operator:SI 5 "arm_comparison_operator"
9272 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9273 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9279 static const char * const opcodes[4][2] =
9281 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9282 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9283 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9284 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9285 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9286 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9287 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9288 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9291 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9293 return opcodes[which_alternative][swap];
9295 [(set_attr "conds" "set")
9296 (set_attr "length" "8")]
9299 (define_insn "*cmp_ite1"
9300 [(set (match_operand 6 "dominant_cc_register" "")
9303 (match_operator 4 "arm_comparison_operator"
9304 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9305 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9306 (match_operator:SI 5 "arm_comparison_operator"
9307 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9308 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9314 static const char * const opcodes[4][2] =
9316 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9317 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9318 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9319 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9320 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9321 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9322 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9323 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9326 comparison_dominates_p (GET_CODE (operands[5]),
9327 reverse_condition (GET_CODE (operands[4])));
9329 return opcodes[which_alternative][swap];
9331 [(set_attr "conds" "set")
9332 (set_attr "length" "8")]
9335 (define_insn "*cmp_and"
9336 [(set (match_operand 6 "dominant_cc_register" "")
9339 (match_operator 4 "arm_comparison_operator"
9340 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9341 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9342 (match_operator:SI 5 "arm_comparison_operator"
9343 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9344 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9349 static const char *const opcodes[4][2] =
9351 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9352 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9353 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9354 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9355 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9356 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9357 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9358 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9361 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9363 return opcodes[which_alternative][swap];
9365 [(set_attr "conds" "set")
9366 (set_attr "predicable" "no")
9367 (set_attr "length" "8")]
9370 (define_insn "*cmp_ior"
9371 [(set (match_operand 6 "dominant_cc_register" "")
9374 (match_operator 4 "arm_comparison_operator"
9375 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9376 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9377 (match_operator:SI 5 "arm_comparison_operator"
9378 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9379 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9384 static const char *const opcodes[4][2] =
9386 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9387 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9388 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9389 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9390 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9391 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9392 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9393 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9396 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9398 return opcodes[which_alternative][swap];
9401 [(set_attr "conds" "set")
9402 (set_attr "length" "8")]
9405 (define_insn_and_split "*ior_scc_scc"
9406 [(set (match_operand:SI 0 "s_register_operand" "=r")
9407 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9408 [(match_operand:SI 1 "s_register_operand" "r")
9409 (match_operand:SI 2 "arm_add_operand" "rIL")])
9410 (match_operator:SI 6 "arm_comparison_operator"
9411 [(match_operand:SI 4 "s_register_operand" "r")
9412 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9413 (clobber (reg:CC CC_REGNUM))]
9415 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9418 "TARGET_ARM && reload_completed"
9422 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9423 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9425 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9427 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9430 [(set_attr "conds" "clob")
9431 (set_attr "length" "16")])
9433 ; If the above pattern is followed by a CMP insn, then the compare is
9434 ; redundant, since we can rework the conditional instruction that follows.
9435 (define_insn_and_split "*ior_scc_scc_cmp"
9436 [(set (match_operand 0 "dominant_cc_register" "")
9437 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9438 [(match_operand:SI 1 "s_register_operand" "r")
9439 (match_operand:SI 2 "arm_add_operand" "rIL")])
9440 (match_operator:SI 6 "arm_comparison_operator"
9441 [(match_operand:SI 4 "s_register_operand" "r")
9442 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9444 (set (match_operand:SI 7 "s_register_operand" "=r")
9445 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9446 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9449 "TARGET_ARM && reload_completed"
9453 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9454 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9456 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9458 [(set_attr "conds" "set")
9459 (set_attr "length" "16")])
9461 (define_insn_and_split "*and_scc_scc"
9462 [(set (match_operand:SI 0 "s_register_operand" "=r")
9463 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9464 [(match_operand:SI 1 "s_register_operand" "r")
9465 (match_operand:SI 2 "arm_add_operand" "rIL")])
9466 (match_operator:SI 6 "arm_comparison_operator"
9467 [(match_operand:SI 4 "s_register_operand" "r")
9468 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9469 (clobber (reg:CC CC_REGNUM))]
9471 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9474 "TARGET_ARM && reload_completed
9475 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9480 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9481 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9483 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9485 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9488 [(set_attr "conds" "clob")
9489 (set_attr "length" "16")])
9491 ; If the above pattern is followed by a CMP insn, then the compare is
9492 ; redundant, since we can rework the conditional instruction that follows.
9493 (define_insn_and_split "*and_scc_scc_cmp"
9494 [(set (match_operand 0 "dominant_cc_register" "")
9495 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9496 [(match_operand:SI 1 "s_register_operand" "r")
9497 (match_operand:SI 2 "arm_add_operand" "rIL")])
9498 (match_operator:SI 6 "arm_comparison_operator"
9499 [(match_operand:SI 4 "s_register_operand" "r")
9500 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9502 (set (match_operand:SI 7 "s_register_operand" "=r")
9503 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9504 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9507 "TARGET_ARM && reload_completed"
9511 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9512 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9514 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9516 [(set_attr "conds" "set")
9517 (set_attr "length" "16")])
9519 ;; If there is no dominance in the comparison, then we can still save an
9520 ;; instruction in the AND case, since we can know that the second compare
9521 ;; need only zero the value if false (if true, then the value is already
9523 (define_insn_and_split "*and_scc_scc_nodom"
9524 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9525 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9526 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9527 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9528 (match_operator:SI 6 "arm_comparison_operator"
9529 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9530 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9531 (clobber (reg:CC CC_REGNUM))]
9533 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9536 "TARGET_ARM && reload_completed"
9537 [(parallel [(set (match_dup 0)
9538 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9539 (clobber (reg:CC CC_REGNUM))])
9540 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9542 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9545 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9546 operands[4], operands[5]),
9548 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9550 [(set_attr "conds" "clob")
9551 (set_attr "length" "20")])
9554 [(set (reg:CC_NOOV CC_REGNUM)
9555 (compare:CC_NOOV (ior:SI
9556 (and:SI (match_operand:SI 0 "s_register_operand" "")
9558 (match_operator:SI 1 "arm_comparison_operator"
9559 [(match_operand:SI 2 "s_register_operand" "")
9560 (match_operand:SI 3 "arm_add_operand" "")]))
9562 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9565 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9567 (set (reg:CC_NOOV CC_REGNUM)
9568 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9573 [(set (reg:CC_NOOV CC_REGNUM)
9574 (compare:CC_NOOV (ior:SI
9575 (match_operator:SI 1 "arm_comparison_operator"
9576 [(match_operand:SI 2 "s_register_operand" "")
9577 (match_operand:SI 3 "arm_add_operand" "")])
9578 (and:SI (match_operand:SI 0 "s_register_operand" "")
9581 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9584 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9586 (set (reg:CC_NOOV CC_REGNUM)
9587 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9590 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9592 (define_insn "*negscc"
9593 [(set (match_operand:SI 0 "s_register_operand" "=r")
9594 (neg:SI (match_operator 3 "arm_comparison_operator"
9595 [(match_operand:SI 1 "s_register_operand" "r")
9596 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9597 (clobber (reg:CC CC_REGNUM))]
9600 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9601 return \"mov\\t%0, %1, asr #31\";
9603 if (GET_CODE (operands[3]) == NE)
9604 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9606 output_asm_insn (\"cmp\\t%1, %2\", operands);
9607 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9608 return \"mvn%d3\\t%0, #0\";
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "12")]
9614 (define_insn "movcond"
9615 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9617 (match_operator 5 "arm_comparison_operator"
9618 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9619 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9620 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9621 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9622 (clobber (reg:CC CC_REGNUM))]
9625 if (GET_CODE (operands[5]) == LT
9626 && (operands[4] == const0_rtx))
9628 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9630 if (operands[2] == const0_rtx)
9631 return \"and\\t%0, %1, %3, asr #31\";
9632 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9634 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9636 if (operands[1] == const0_rtx)
9637 return \"bic\\t%0, %2, %3, asr #31\";
9638 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9640 /* The only case that falls through to here is when both ops 1 & 2
9644 if (GET_CODE (operands[5]) == GE
9645 && (operands[4] == const0_rtx))
9647 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9649 if (operands[2] == const0_rtx)
9650 return \"bic\\t%0, %1, %3, asr #31\";
9651 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9653 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9655 if (operands[1] == const0_rtx)
9656 return \"and\\t%0, %2, %3, asr #31\";
9657 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9659 /* The only case that falls through to here is when both ops 1 & 2
9662 if (GET_CODE (operands[4]) == CONST_INT
9663 && !const_ok_for_arm (INTVAL (operands[4])))
9664 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9666 output_asm_insn (\"cmp\\t%3, %4\", operands);
9667 if (which_alternative != 0)
9668 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9669 if (which_alternative != 1)
9670 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9673 [(set_attr "conds" "clob")
9674 (set_attr "length" "8,8,12")]
9677 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9679 (define_insn "*ifcompare_plus_move"
9680 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9681 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9682 [(match_operand:SI 4 "s_register_operand" "r,r")
9683 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9685 (match_operand:SI 2 "s_register_operand" "r,r")
9686 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9687 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9688 (clobber (reg:CC CC_REGNUM))]
9691 [(set_attr "conds" "clob")
9692 (set_attr "length" "8,12")]
9695 (define_insn "*if_plus_move"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9698 (match_operator 4 "arm_comparison_operator"
9699 [(match_operand 5 "cc_register" "") (const_int 0)])
9701 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9702 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9703 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9707 sub%d4\\t%0, %2, #%n3
9708 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9709 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9710 [(set_attr "conds" "use")
9711 (set_attr "length" "4,4,8,8")
9712 (set_attr "type" "*,*,*,*")]
9715 (define_insn "*ifcompare_move_plus"
9716 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9717 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9718 [(match_operand:SI 4 "s_register_operand" "r,r")
9719 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9720 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9722 (match_operand:SI 2 "s_register_operand" "r,r")
9723 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9724 (clobber (reg:CC CC_REGNUM))]
9727 [(set_attr "conds" "clob")
9728 (set_attr "length" "8,12")]
9731 (define_insn "*if_move_plus"
9732 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9734 (match_operator 4 "arm_comparison_operator"
9735 [(match_operand 5 "cc_register" "") (const_int 0)])
9736 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9738 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9739 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9743 sub%D4\\t%0, %2, #%n3
9744 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9745 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9746 [(set_attr "conds" "use")
9747 (set_attr "length" "4,4,8,8")
9748 (set_attr "type" "*,*,*,*")]
9751 (define_insn "*ifcompare_arith_arith"
9752 [(set (match_operand:SI 0 "s_register_operand" "=r")
9753 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9754 [(match_operand:SI 5 "s_register_operand" "r")
9755 (match_operand:SI 6 "arm_add_operand" "rIL")])
9756 (match_operator:SI 8 "shiftable_operator"
9757 [(match_operand:SI 1 "s_register_operand" "r")
9758 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9759 (match_operator:SI 7 "shiftable_operator"
9760 [(match_operand:SI 3 "s_register_operand" "r")
9761 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9762 (clobber (reg:CC CC_REGNUM))]
9765 [(set_attr "conds" "clob")
9766 (set_attr "length" "12")]
9769 (define_insn "*if_arith_arith"
9770 [(set (match_operand:SI 0 "s_register_operand" "=r")
9771 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9772 [(match_operand 8 "cc_register" "") (const_int 0)])
9773 (match_operator:SI 6 "shiftable_operator"
9774 [(match_operand:SI 1 "s_register_operand" "r")
9775 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9776 (match_operator:SI 7 "shiftable_operator"
9777 [(match_operand:SI 3 "s_register_operand" "r")
9778 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9780 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9781 [(set_attr "conds" "use")
9782 (set_attr "length" "8")]
9785 (define_insn "*ifcompare_arith_move"
9786 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9787 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9788 [(match_operand:SI 2 "s_register_operand" "r,r")
9789 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9790 (match_operator:SI 7 "shiftable_operator"
9791 [(match_operand:SI 4 "s_register_operand" "r,r")
9792 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9793 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9794 (clobber (reg:CC CC_REGNUM))]
9797 /* If we have an operation where (op x 0) is the identity operation and
9798 the conditional operator is LT or GE and we are comparing against zero and
9799 everything is in registers then we can do this in two instructions. */
9800 if (operands[3] == const0_rtx
9801 && GET_CODE (operands[7]) != AND
9802 && GET_CODE (operands[5]) == REG
9803 && GET_CODE (operands[1]) == REG
9804 && REGNO (operands[1]) == REGNO (operands[4])
9805 && REGNO (operands[4]) != REGNO (operands[0]))
9807 if (GET_CODE (operands[6]) == LT)
9808 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9809 else if (GET_CODE (operands[6]) == GE)
9810 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9812 if (GET_CODE (operands[3]) == CONST_INT
9813 && !const_ok_for_arm (INTVAL (operands[3])))
9814 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9816 output_asm_insn (\"cmp\\t%2, %3\", operands);
9817 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9818 if (which_alternative != 0)
9819 return \"mov%D6\\t%0, %1\";
9822 [(set_attr "conds" "clob")
9823 (set_attr "length" "8,12")]
9826 (define_insn "*if_arith_move"
9827 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9828 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9829 [(match_operand 6 "cc_register" "") (const_int 0)])
9830 (match_operator:SI 5 "shiftable_operator"
9831 [(match_operand:SI 2 "s_register_operand" "r,r")
9832 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9833 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9837 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9838 [(set_attr "conds" "use")
9839 (set_attr "length" "4,8")
9840 (set_attr "type" "*,*")]
9843 (define_insn "*ifcompare_move_arith"
9844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9845 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9846 [(match_operand:SI 4 "s_register_operand" "r,r")
9847 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9848 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9849 (match_operator:SI 7 "shiftable_operator"
9850 [(match_operand:SI 2 "s_register_operand" "r,r")
9851 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9852 (clobber (reg:CC CC_REGNUM))]
9855 /* If we have an operation where (op x 0) is the identity operation and
9856 the conditional operator is LT or GE and we are comparing against zero and
9857 everything is in registers then we can do this in two instructions */
9858 if (operands[5] == const0_rtx
9859 && GET_CODE (operands[7]) != AND
9860 && GET_CODE (operands[3]) == REG
9861 && GET_CODE (operands[1]) == REG
9862 && REGNO (operands[1]) == REGNO (operands[2])
9863 && REGNO (operands[2]) != REGNO (operands[0]))
9865 if (GET_CODE (operands[6]) == GE)
9866 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9867 else if (GET_CODE (operands[6]) == LT)
9868 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9871 if (GET_CODE (operands[5]) == CONST_INT
9872 && !const_ok_for_arm (INTVAL (operands[5])))
9873 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9875 output_asm_insn (\"cmp\\t%4, %5\", operands);
9877 if (which_alternative != 0)
9878 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9879 return \"%I7%D6\\t%0, %2, %3\";
9881 [(set_attr "conds" "clob")
9882 (set_attr "length" "8,12")]
9885 (define_insn "*if_move_arith"
9886 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9888 (match_operator 4 "arm_comparison_operator"
9889 [(match_operand 6 "cc_register" "") (const_int 0)])
9890 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9891 (match_operator:SI 5 "shiftable_operator"
9892 [(match_operand:SI 2 "s_register_operand" "r,r")
9893 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9897 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9898 [(set_attr "conds" "use")
9899 (set_attr "length" "4,8")
9900 (set_attr "type" "*,*")]
9903 (define_insn "*ifcompare_move_not"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9906 (match_operator 5 "arm_comparison_operator"
9907 [(match_operand:SI 3 "s_register_operand" "r,r")
9908 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9909 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9911 (match_operand:SI 2 "s_register_operand" "r,r"))))
9912 (clobber (reg:CC CC_REGNUM))]
9915 [(set_attr "conds" "clob")
9916 (set_attr "length" "8,12")]
9919 (define_insn "*if_move_not"
9920 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9922 (match_operator 4 "arm_comparison_operator"
9923 [(match_operand 3 "cc_register" "") (const_int 0)])
9924 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9925 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9929 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9930 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9931 [(set_attr "conds" "use")
9932 (set_attr "length" "4,8,8")]
9935 (define_insn "*ifcompare_not_move"
9936 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9938 (match_operator 5 "arm_comparison_operator"
9939 [(match_operand:SI 3 "s_register_operand" "r,r")
9940 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9942 (match_operand:SI 2 "s_register_operand" "r,r"))
9943 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9944 (clobber (reg:CC CC_REGNUM))]
9947 [(set_attr "conds" "clob")
9948 (set_attr "length" "8,12")]
9951 (define_insn "*if_not_move"
9952 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9954 (match_operator 4 "arm_comparison_operator"
9955 [(match_operand 3 "cc_register" "") (const_int 0)])
9956 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9957 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9961 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9962 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9963 [(set_attr "conds" "use")
9964 (set_attr "length" "4,8,8")]
9967 (define_insn "*ifcompare_shift_move"
9968 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9970 (match_operator 6 "arm_comparison_operator"
9971 [(match_operand:SI 4 "s_register_operand" "r,r")
9972 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9973 (match_operator:SI 7 "shift_operator"
9974 [(match_operand:SI 2 "s_register_operand" "r,r")
9975 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9976 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9977 (clobber (reg:CC CC_REGNUM))]
9980 [(set_attr "conds" "clob")
9981 (set_attr "length" "8,12")]
9984 (define_insn "*if_shift_move"
9985 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9987 (match_operator 5 "arm_comparison_operator"
9988 [(match_operand 6 "cc_register" "") (const_int 0)])
9989 (match_operator:SI 4 "shift_operator"
9990 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9991 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9992 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9996 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9997 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9998 [(set_attr "conds" "use")
9999 (set_attr "shift" "2")
10000 (set_attr "length" "4,8,8")
10001 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10002 (const_string "alu_shift")
10003 (const_string "alu_shift_reg")))]
10006 (define_insn "*ifcompare_move_shift"
10007 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10009 (match_operator 6 "arm_comparison_operator"
10010 [(match_operand:SI 4 "s_register_operand" "r,r")
10011 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10012 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10013 (match_operator:SI 7 "shift_operator"
10014 [(match_operand:SI 2 "s_register_operand" "r,r")
10015 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10016 (clobber (reg:CC CC_REGNUM))]
10019 [(set_attr "conds" "clob")
10020 (set_attr "length" "8,12")]
10023 (define_insn "*if_move_shift"
10024 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10026 (match_operator 5 "arm_comparison_operator"
10027 [(match_operand 6 "cc_register" "") (const_int 0)])
10028 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10029 (match_operator:SI 4 "shift_operator"
10030 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10031 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10035 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10036 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10037 [(set_attr "conds" "use")
10038 (set_attr "shift" "2")
10039 (set_attr "length" "4,8,8")
10040 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10041 (const_string "alu_shift")
10042 (const_string "alu_shift_reg")))]
10045 (define_insn "*ifcompare_shift_shift"
10046 [(set (match_operand:SI 0 "s_register_operand" "=r")
10048 (match_operator 7 "arm_comparison_operator"
10049 [(match_operand:SI 5 "s_register_operand" "r")
10050 (match_operand:SI 6 "arm_add_operand" "rIL")])
10051 (match_operator:SI 8 "shift_operator"
10052 [(match_operand:SI 1 "s_register_operand" "r")
10053 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10054 (match_operator:SI 9 "shift_operator"
10055 [(match_operand:SI 3 "s_register_operand" "r")
10056 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10057 (clobber (reg:CC CC_REGNUM))]
10060 [(set_attr "conds" "clob")
10061 (set_attr "length" "12")]
10064 (define_insn "*if_shift_shift"
10065 [(set (match_operand:SI 0 "s_register_operand" "=r")
10067 (match_operator 5 "arm_comparison_operator"
10068 [(match_operand 8 "cc_register" "") (const_int 0)])
10069 (match_operator:SI 6 "shift_operator"
10070 [(match_operand:SI 1 "s_register_operand" "r")
10071 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10072 (match_operator:SI 7 "shift_operator"
10073 [(match_operand:SI 3 "s_register_operand" "r")
10074 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10076 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10077 [(set_attr "conds" "use")
10078 (set_attr "shift" "1")
10079 (set_attr "length" "8")
10080 (set (attr "type") (if_then_else
10081 (and (match_operand 2 "const_int_operand" "")
10082 (match_operand 4 "const_int_operand" ""))
10083 (const_string "alu_shift")
10084 (const_string "alu_shift_reg")))]
10087 (define_insn "*ifcompare_not_arith"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r")
10090 (match_operator 6 "arm_comparison_operator"
10091 [(match_operand:SI 4 "s_register_operand" "r")
10092 (match_operand:SI 5 "arm_add_operand" "rIL")])
10093 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10094 (match_operator:SI 7 "shiftable_operator"
10095 [(match_operand:SI 2 "s_register_operand" "r")
10096 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10097 (clobber (reg:CC CC_REGNUM))]
10100 [(set_attr "conds" "clob")
10101 (set_attr "length" "12")]
10104 (define_insn "*if_not_arith"
10105 [(set (match_operand:SI 0 "s_register_operand" "=r")
10107 (match_operator 5 "arm_comparison_operator"
10108 [(match_operand 4 "cc_register" "") (const_int 0)])
10109 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10110 (match_operator:SI 6 "shiftable_operator"
10111 [(match_operand:SI 2 "s_register_operand" "r")
10112 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10114 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10115 [(set_attr "conds" "use")
10116 (set_attr "length" "8")]
10119 (define_insn "*ifcompare_arith_not"
10120 [(set (match_operand:SI 0 "s_register_operand" "=r")
10122 (match_operator 6 "arm_comparison_operator"
10123 [(match_operand:SI 4 "s_register_operand" "r")
10124 (match_operand:SI 5 "arm_add_operand" "rIL")])
10125 (match_operator:SI 7 "shiftable_operator"
10126 [(match_operand:SI 2 "s_register_operand" "r")
10127 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10128 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10129 (clobber (reg:CC CC_REGNUM))]
10132 [(set_attr "conds" "clob")
10133 (set_attr "length" "12")]
10136 (define_insn "*if_arith_not"
10137 [(set (match_operand:SI 0 "s_register_operand" "=r")
10139 (match_operator 5 "arm_comparison_operator"
10140 [(match_operand 4 "cc_register" "") (const_int 0)])
10141 (match_operator:SI 6 "shiftable_operator"
10142 [(match_operand:SI 2 "s_register_operand" "r")
10143 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10144 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10146 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10147 [(set_attr "conds" "use")
10148 (set_attr "length" "8")]
10151 (define_insn "*ifcompare_neg_move"
10152 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10154 (match_operator 5 "arm_comparison_operator"
10155 [(match_operand:SI 3 "s_register_operand" "r,r")
10156 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10157 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10158 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10159 (clobber (reg:CC CC_REGNUM))]
10162 [(set_attr "conds" "clob")
10163 (set_attr "length" "8,12")]
10166 (define_insn "*if_neg_move"
10167 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10169 (match_operator 4 "arm_comparison_operator"
10170 [(match_operand 3 "cc_register" "") (const_int 0)])
10171 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10172 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10175 rsb%d4\\t%0, %2, #0
10176 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10177 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10178 [(set_attr "conds" "use")
10179 (set_attr "length" "4,8,8")]
10182 (define_insn "*ifcompare_move_neg"
10183 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10185 (match_operator 5 "arm_comparison_operator"
10186 [(match_operand:SI 3 "s_register_operand" "r,r")
10187 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10188 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10189 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10190 (clobber (reg:CC CC_REGNUM))]
10193 [(set_attr "conds" "clob")
10194 (set_attr "length" "8,12")]
10197 (define_insn "*if_move_neg"
10198 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10200 (match_operator 4 "arm_comparison_operator"
10201 [(match_operand 3 "cc_register" "") (const_int 0)])
10202 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10203 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10206 rsb%D4\\t%0, %2, #0
10207 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10208 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10209 [(set_attr "conds" "use")
10210 (set_attr "length" "4,8,8")]
10213 (define_insn "*arith_adjacentmem"
10214 [(set (match_operand:SI 0 "s_register_operand" "=r")
10215 (match_operator:SI 1 "shiftable_operator"
10216 [(match_operand:SI 2 "memory_operand" "m")
10217 (match_operand:SI 3 "memory_operand" "m")]))
10218 (clobber (match_scratch:SI 4 "=r"))]
10219 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10225 HOST_WIDE_INT val1 = 0, val2 = 0;
10227 if (REGNO (operands[0]) > REGNO (operands[4]))
10229 ldm[1] = operands[4];
10230 ldm[2] = operands[0];
10234 ldm[1] = operands[0];
10235 ldm[2] = operands[4];
10238 base_reg = XEXP (operands[2], 0);
10240 if (!REG_P (base_reg))
10242 val1 = INTVAL (XEXP (base_reg, 1));
10243 base_reg = XEXP (base_reg, 0);
10246 if (!REG_P (XEXP (operands[3], 0)))
10247 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10249 arith[0] = operands[0];
10250 arith[3] = operands[1];
10264 if (val1 !=0 && val2 != 0)
10268 if (val1 == 4 || val2 == 4)
10269 /* Other val must be 8, since we know they are adjacent and neither
10271 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10272 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10274 ldm[0] = ops[0] = operands[4];
10276 ops[2] = GEN_INT (val1);
10277 output_add_immediate (ops);
10279 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10281 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10285 /* Offset is out of range for a single add, so use two ldr. */
10288 ops[2] = GEN_INT (val1);
10289 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10291 ops[2] = GEN_INT (val2);
10292 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10295 else if (val1 != 0)
10298 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10300 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10305 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10307 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10309 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10312 [(set_attr "length" "12")
10313 (set_attr "predicable" "yes")
10314 (set_attr "type" "load1")]
10317 ; This pattern is never tried by combine, so do it as a peephole
10320 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10321 (match_operand:SI 1 "arm_general_register_operand" ""))
10322 (set (reg:CC CC_REGNUM)
10323 (compare:CC (match_dup 1) (const_int 0)))]
10325 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10326 (set (match_dup 0) (match_dup 1))])]
10330 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10331 ; reversed, check that the memory references aren't volatile.
10334 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10335 (match_operand:SI 4 "memory_operand" "m"))
10336 (set (match_operand:SI 1 "s_register_operand" "=rk")
10337 (match_operand:SI 5 "memory_operand" "m"))
10338 (set (match_operand:SI 2 "s_register_operand" "=rk")
10339 (match_operand:SI 6 "memory_operand" "m"))
10340 (set (match_operand:SI 3 "s_register_operand" "=rk")
10341 (match_operand:SI 7 "memory_operand" "m"))]
10342 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10344 return emit_ldm_seq (operands, 4);
10349 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10350 (match_operand:SI 3 "memory_operand" "m"))
10351 (set (match_operand:SI 1 "s_register_operand" "=rk")
10352 (match_operand:SI 4 "memory_operand" "m"))
10353 (set (match_operand:SI 2 "s_register_operand" "=rk")
10354 (match_operand:SI 5 "memory_operand" "m"))]
10355 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10357 return emit_ldm_seq (operands, 3);
10362 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10363 (match_operand:SI 2 "memory_operand" "m"))
10364 (set (match_operand:SI 1 "s_register_operand" "=rk")
10365 (match_operand:SI 3 "memory_operand" "m"))]
10366 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10368 return emit_ldm_seq (operands, 2);
10373 [(set (match_operand:SI 4 "memory_operand" "=m")
10374 (match_operand:SI 0 "s_register_operand" "rk"))
10375 (set (match_operand:SI 5 "memory_operand" "=m")
10376 (match_operand:SI 1 "s_register_operand" "rk"))
10377 (set (match_operand:SI 6 "memory_operand" "=m")
10378 (match_operand:SI 2 "s_register_operand" "rk"))
10379 (set (match_operand:SI 7 "memory_operand" "=m")
10380 (match_operand:SI 3 "s_register_operand" "rk"))]
10381 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10383 return emit_stm_seq (operands, 4);
10388 [(set (match_operand:SI 3 "memory_operand" "=m")
10389 (match_operand:SI 0 "s_register_operand" "rk"))
10390 (set (match_operand:SI 4 "memory_operand" "=m")
10391 (match_operand:SI 1 "s_register_operand" "rk"))
10392 (set (match_operand:SI 5 "memory_operand" "=m")
10393 (match_operand:SI 2 "s_register_operand" "rk"))]
10394 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10396 return emit_stm_seq (operands, 3);
10401 [(set (match_operand:SI 2 "memory_operand" "=m")
10402 (match_operand:SI 0 "s_register_operand" "rk"))
10403 (set (match_operand:SI 3 "memory_operand" "=m")
10404 (match_operand:SI 1 "s_register_operand" "rk"))]
10405 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10407 return emit_stm_seq (operands, 2);
10412 [(set (match_operand:SI 0 "s_register_operand" "")
10413 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10415 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10416 [(match_operand:SI 3 "s_register_operand" "")
10417 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10418 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10420 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10421 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10426 ;; This split can be used because CC_Z mode implies that the following
10427 ;; branch will be an equality, or an unsigned inequality, so the sign
10428 ;; extension is not needed.
10431 [(set (reg:CC_Z CC_REGNUM)
10433 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10435 (match_operand 1 "const_int_operand" "")))
10436 (clobber (match_scratch:SI 2 ""))]
10438 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10439 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10440 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10441 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10443 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10446 ;; ??? Check the patterns above for Thumb-2 usefulness
10448 (define_expand "prologue"
10449 [(clobber (const_int 0))]
10452 arm_expand_prologue ();
10454 thumb1_expand_prologue ();
10459 (define_expand "epilogue"
10460 [(clobber (const_int 0))]
10463 if (crtl->calls_eh_return)
10464 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10466 thumb1_expand_epilogue ();
10467 else if (USE_RETURN_INSN (FALSE))
10469 emit_jump_insn (gen_return ());
10472 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10474 gen_rtx_RETURN (VOIDmode)),
10475 VUNSPEC_EPILOGUE));
10480 ;; Note - although unspec_volatile's USE all hard registers,
10481 ;; USEs are ignored after relaod has completed. Thus we need
10482 ;; to add an unspec of the link register to ensure that flow
10483 ;; does not think that it is unused by the sibcall branch that
10484 ;; will replace the standard function epilogue.
10485 (define_insn "sibcall_epilogue"
10486 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10487 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10490 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10491 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10492 return arm_output_epilogue (next_nonnote_insn (insn));
10494 ;; Length is absolute worst case
10495 [(set_attr "length" "44")
10496 (set_attr "type" "block")
10497 ;; We don't clobber the conditions, but the potential length of this
10498 ;; operation is sufficient to make conditionalizing the sequence
10499 ;; unlikely to be profitable.
10500 (set_attr "conds" "clob")]
10503 (define_insn "*epilogue_insns"
10504 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10508 return arm_output_epilogue (NULL);
10509 else /* TARGET_THUMB1 */
10510 return thumb_unexpanded_epilogue ();
10512 ; Length is absolute worst case
10513 [(set_attr "length" "44")
10514 (set_attr "type" "block")
10515 ;; We don't clobber the conditions, but the potential length of this
10516 ;; operation is sufficient to make conditionalizing the sequence
10517 ;; unlikely to be profitable.
10518 (set_attr "conds" "clob")]
10521 (define_expand "eh_epilogue"
10522 [(use (match_operand:SI 0 "register_operand" ""))
10523 (use (match_operand:SI 1 "register_operand" ""))
10524 (use (match_operand:SI 2 "register_operand" ""))]
10528 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10529 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10531 rtx ra = gen_rtx_REG (Pmode, 2);
10533 emit_move_insn (ra, operands[2]);
10536 /* This is a hack -- we may have crystalized the function type too
10538 cfun->machine->func_type = 0;
10542 ;; This split is only used during output to reduce the number of patterns
10543 ;; that need assembler instructions adding to them. We allowed the setting
10544 ;; of the conditions to be implicit during rtl generation so that
10545 ;; the conditional compare patterns would work. However this conflicts to
10546 ;; some extent with the conditional data operations, so we have to split them
10549 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10550 ;; conditional execution sufficient?
10553 [(set (match_operand:SI 0 "s_register_operand" "")
10554 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10555 [(match_operand 2 "" "") (match_operand 3 "" "")])
10557 (match_operand 4 "" "")))
10558 (clobber (reg:CC CC_REGNUM))]
10559 "TARGET_ARM && reload_completed"
10560 [(set (match_dup 5) (match_dup 6))
10561 (cond_exec (match_dup 7)
10562 (set (match_dup 0) (match_dup 4)))]
10565 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10566 operands[2], operands[3]);
10567 enum rtx_code rc = GET_CODE (operands[1]);
10569 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10570 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10571 if (mode == CCFPmode || mode == CCFPEmode)
10572 rc = reverse_condition_maybe_unordered (rc);
10574 rc = reverse_condition (rc);
10576 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10581 [(set (match_operand:SI 0 "s_register_operand" "")
10582 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10583 [(match_operand 2 "" "") (match_operand 3 "" "")])
10584 (match_operand 4 "" "")
10586 (clobber (reg:CC CC_REGNUM))]
10587 "TARGET_ARM && reload_completed"
10588 [(set (match_dup 5) (match_dup 6))
10589 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10590 (set (match_dup 0) (match_dup 4)))]
10593 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10594 operands[2], operands[3]);
10596 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10597 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10602 [(set (match_operand:SI 0 "s_register_operand" "")
10603 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10604 [(match_operand 2 "" "") (match_operand 3 "" "")])
10605 (match_operand 4 "" "")
10606 (match_operand 5 "" "")))
10607 (clobber (reg:CC CC_REGNUM))]
10608 "TARGET_ARM && reload_completed"
10609 [(set (match_dup 6) (match_dup 7))
10610 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10611 (set (match_dup 0) (match_dup 4)))
10612 (cond_exec (match_dup 8)
10613 (set (match_dup 0) (match_dup 5)))]
10616 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10617 operands[2], operands[3]);
10618 enum rtx_code rc = GET_CODE (operands[1]);
10620 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10621 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10622 if (mode == CCFPmode || mode == CCFPEmode)
10623 rc = reverse_condition_maybe_unordered (rc);
10625 rc = reverse_condition (rc);
10627 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10632 [(set (match_operand:SI 0 "s_register_operand" "")
10633 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10634 [(match_operand:SI 2 "s_register_operand" "")
10635 (match_operand:SI 3 "arm_add_operand" "")])
10636 (match_operand:SI 4 "arm_rhs_operand" "")
10638 (match_operand:SI 5 "s_register_operand" ""))))
10639 (clobber (reg:CC CC_REGNUM))]
10640 "TARGET_ARM && reload_completed"
10641 [(set (match_dup 6) (match_dup 7))
10642 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10643 (set (match_dup 0) (match_dup 4)))
10644 (cond_exec (match_dup 8)
10645 (set (match_dup 0) (not:SI (match_dup 5))))]
10648 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10649 operands[2], operands[3]);
10650 enum rtx_code rc = GET_CODE (operands[1]);
10652 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10653 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10654 if (mode == CCFPmode || mode == CCFPEmode)
10655 rc = reverse_condition_maybe_unordered (rc);
10657 rc = reverse_condition (rc);
10659 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10663 (define_insn "*cond_move_not"
10664 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10665 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10666 [(match_operand 3 "cc_register" "") (const_int 0)])
10667 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10669 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10673 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10674 [(set_attr "conds" "use")
10675 (set_attr "length" "4,8")]
10678 ;; The next two patterns occur when an AND operation is followed by a
10679 ;; scc insn sequence
10681 (define_insn "*sign_extract_onebit"
10682 [(set (match_operand:SI 0 "s_register_operand" "=r")
10683 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10685 (match_operand:SI 2 "const_int_operand" "n")))
10686 (clobber (reg:CC CC_REGNUM))]
10689 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10690 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10691 return \"mvnne\\t%0, #0\";
10693 [(set_attr "conds" "clob")
10694 (set_attr "length" "8")]
10697 (define_insn "*not_signextract_onebit"
10698 [(set (match_operand:SI 0 "s_register_operand" "=r")
10700 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10702 (match_operand:SI 2 "const_int_operand" "n"))))
10703 (clobber (reg:CC CC_REGNUM))]
10706 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10707 output_asm_insn (\"tst\\t%1, %2\", operands);
10708 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10709 return \"movne\\t%0, #0\";
10711 [(set_attr "conds" "clob")
10712 (set_attr "length" "12")]
10714 ;; ??? The above patterns need auditing for Thumb-2
10716 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10717 ;; expressions. For simplicity, the first register is also in the unspec
10719 (define_insn "*push_multi"
10720 [(match_parallel 2 "multi_register_push"
10721 [(set (match_operand:BLK 0 "memory_operand" "=m")
10722 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10723 UNSPEC_PUSH_MULT))])]
10727 int num_saves = XVECLEN (operands[2], 0);
10729 /* For the StrongARM at least it is faster to
10730 use STR to store only a single register.
10731 In Thumb mode always use push, and the assembler will pick
10732 something appropriate. */
10733 if (num_saves == 1 && TARGET_ARM)
10734 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10741 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10743 strcpy (pattern, \"push\\t{%1\");
10745 for (i = 1; i < num_saves; i++)
10747 strcat (pattern, \", %|\");
10749 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10752 strcat (pattern, \"}\");
10753 output_asm_insn (pattern, operands);
10758 [(set_attr "type" "store4")]
10761 (define_insn "stack_tie"
10762 [(set (mem:BLK (scratch))
10763 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10764 (match_operand:SI 1 "s_register_operand" "rk")]
10768 [(set_attr "length" "0")]
10771 ;; Similarly for the floating point registers
10772 (define_insn "*push_fp_multi"
10773 [(match_parallel 2 "multi_register_push"
10774 [(set (match_operand:BLK 0 "memory_operand" "=m")
10775 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10776 UNSPEC_PUSH_MULT))])]
10777 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10782 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10783 output_asm_insn (pattern, operands);
10786 [(set_attr "type" "f_store")]
10789 ;; Special patterns for dealing with the constant pool
10791 (define_insn "align_4"
10792 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10795 assemble_align (32);
10800 (define_insn "align_8"
10801 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10804 assemble_align (64);
10809 (define_insn "consttable_end"
10810 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10813 making_const_table = FALSE;
10818 (define_insn "consttable_1"
10819 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10822 making_const_table = TRUE;
10823 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10824 assemble_zeros (3);
10827 [(set_attr "length" "4")]
10830 (define_insn "consttable_2"
10831 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10834 making_const_table = TRUE;
10835 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10836 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10837 assemble_zeros (2);
10840 [(set_attr "length" "4")]
10843 (define_insn "consttable_4"
10844 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10848 rtx x = operands[0];
10849 making_const_table = TRUE;
10850 switch (GET_MODE_CLASS (GET_MODE (x)))
10853 if (GET_MODE (x) == HFmode)
10854 arm_emit_fp16_const (x);
10858 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10859 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10863 assemble_integer (x, 4, BITS_PER_WORD, 1);
10864 mark_symbol_refs_as_used (x);
10869 [(set_attr "length" "4")]
10872 (define_insn "consttable_8"
10873 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10877 making_const_table = TRUE;
10878 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10883 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10884 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10888 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10893 [(set_attr "length" "8")]
10896 (define_insn "consttable_16"
10897 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10901 making_const_table = TRUE;
10902 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10907 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10908 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10912 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10917 [(set_attr "length" "16")]
10920 ;; Miscellaneous Thumb patterns
10922 (define_expand "tablejump"
10923 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10924 (use (label_ref (match_operand 1 "" "")))])]
10929 /* Hopefully, CSE will eliminate this copy. */
10930 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10931 rtx reg2 = gen_reg_rtx (SImode);
10933 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10934 operands[0] = reg2;
10939 ;; NB never uses BX.
10940 (define_insn "*thumb1_tablejump"
10941 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10942 (use (label_ref (match_operand 1 "" "")))]
10945 [(set_attr "length" "2")]
10948 ;; V5 Instructions,
10950 (define_insn "clzsi2"
10951 [(set (match_operand:SI 0 "s_register_operand" "=r")
10952 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10953 "TARGET_32BIT && arm_arch5"
10955 [(set_attr "predicable" "yes")
10956 (set_attr "insn" "clz")])
10958 ;; V5E instructions.
10960 (define_insn "prefetch"
10961 [(prefetch (match_operand:SI 0 "address_operand" "p")
10962 (match_operand:SI 1 "" "")
10963 (match_operand:SI 2 "" ""))]
10964 "TARGET_32BIT && arm_arch5e"
10967 ;; General predication pattern
10970 [(match_operator 0 "arm_comparison_operator"
10971 [(match_operand 1 "cc_register" "")
10977 (define_insn "prologue_use"
10978 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10980 "%@ %0 needed for prologue"
10981 [(set_attr "length" "0")]
10985 ;; Patterns for exception handling
10987 (define_expand "eh_return"
10988 [(use (match_operand 0 "general_operand" ""))]
10993 emit_insn (gen_arm_eh_return (operands[0]));
10995 emit_insn (gen_thumb_eh_return (operands[0]));
11000 ;; We can't expand this before we know where the link register is stored.
11001 (define_insn_and_split "arm_eh_return"
11002 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11004 (clobber (match_scratch:SI 1 "=&r"))]
11007 "&& reload_completed"
11011 arm_set_return_address (operands[0], operands[1]);
11016 (define_insn_and_split "thumb_eh_return"
11017 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11019 (clobber (match_scratch:SI 1 "=&l"))]
11022 "&& reload_completed"
11026 thumb_set_return_address (operands[0], operands[1]);
11034 (define_insn "load_tp_hard"
11035 [(set (match_operand:SI 0 "register_operand" "=r")
11036 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11038 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11039 [(set_attr "predicable" "yes")]
11042 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11043 (define_insn "load_tp_soft"
11044 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11045 (clobber (reg:SI LR_REGNUM))
11046 (clobber (reg:SI IP_REGNUM))
11047 (clobber (reg:CC CC_REGNUM))]
11049 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11050 [(set_attr "conds" "clob")]
11053 ;; Load the FPA co-processor patterns
11055 ;; Load the Maverick co-processor patterns
11056 (include "cirrus.md")
11057 ;; Vector bits common to IWMMXT and Neon
11058 (include "vec-common.md")
11059 ;; Load the Intel Wireless Multimedia Extension patterns
11060 (include "iwmmxt.md")
11061 ;; Load the VFP co-processor patterns
11063 ;; Thumb-2 patterns
11064 (include "thumb2.md")
11066 (include "neon.md")