1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
109 ;; UNSPEC_VOLATILE Usage:
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
143 ;;---------------------------------------------------------------------------
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
151 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
152 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
162 (const (symbol_ref "arm_fpu_attr")))
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
253 ;; Classification of NEON instructions for scheduling purposes.
254 ;; Do not set this attribute and the "type" attribute together in
255 ;; any one instruction pattern.
256 (define_attr "neon_type"
267 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mul_qqq_8_16_32_ddd_32,\
269 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
270 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
272 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
273 neon_mla_qqq_32_qqd_32_scalar,\
274 neon_mul_ddd_16_scalar_32_16_long_scalar,\
275 neon_mul_qqd_32_scalar,\
276 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
281 neon_vqshl_vrshl_vqrshl_qqq,\
283 neon_fp_vadd_ddd_vabs_dd,\
284 neon_fp_vadd_qqq_vabs_qq,\
290 neon_fp_vmla_ddd_scalar,\
291 neon_fp_vmla_qqq_scalar,\
292 neon_fp_vrecps_vrsqrts_ddd,\
293 neon_fp_vrecps_vrsqrts_qqq,\
301 neon_vld2_2_regs_vld1_vld2_all_lanes,\
304 neon_vst1_1_2_regs_vst2_2_regs,\
306 neon_vst2_4_regs_vst3_vst4,\
308 neon_vld1_vld2_lane,\
309 neon_vld3_vld4_lane,\
310 neon_vst1_vst2_lane,\
311 neon_vst3_vst4_lane,\
312 neon_vld3_vld4_all_lanes,\
320 (const_string "none"))
322 ; condition codes: this one is used by final_prescan_insn to speed up
323 ; conditionalizing instructions. It saves having to scan the rtl to see if
324 ; it uses or alters the condition codes.
326 ; USE means that the condition codes are used by the insn in the process of
327 ; outputting code, this means (at present) that we can't use the insn in
330 ; SET means that the purpose of the insn is to set the condition codes in a
331 ; well defined manner.
333 ; CLOB means that the condition codes are altered in an undefined manner, if
334 ; they are altered at all
336 ; UNCONDITIONAL means the instions can not be conditionally executed.
338 ; NOCOND means that the condition codes are neither altered nor affect the
339 ; output of this insn
341 (define_attr "conds" "use,set,clob,unconditional,nocond"
342 (if_then_else (eq_attr "type" "call")
343 (const_string "clob")
344 (if_then_else (eq_attr "neon_type" "none")
345 (const_string "nocond")
346 (const_string "unconditional"))))
348 ; Predicable means that the insn can be conditionally executed based on
349 ; an automatically added predicate (additional patterns are generated by
350 ; gen...). We default to 'no' because no Thumb patterns match this rule
351 ; and not all ARM patterns do.
352 (define_attr "predicable" "no,yes" (const_string "no"))
354 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
355 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
356 ; suffer blockages enough to warrant modelling this (and it can adversely
357 ; affect the schedule).
358 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
360 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
361 ; to stall the processor. Used with model_wbuf above.
362 (define_attr "write_conflict" "no,yes"
363 (if_then_else (eq_attr "type"
364 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
366 (const_string "no")))
368 ; Classify the insns into those that take one cycle and those that take more
369 ; than one on the main cpu execution unit.
370 (define_attr "core_cycles" "single,multi"
371 (if_then_else (eq_attr "type"
372 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
373 (const_string "single")
374 (const_string "multi")))
376 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
377 ;; distant label. Only applicable to Thumb code.
378 (define_attr "far_jump" "yes,no" (const_string "no"))
381 ;; The number of machine instructions this pattern expands to.
382 ;; Used for Thumb-2 conditional execution.
383 (define_attr "ce_count" "" (const_int 1))
385 ;;---------------------------------------------------------------------------
388 ; A list of modes that are exactly 64 bits in size. We use this to expand
389 ; some splits that are the same for all modes when operating on ARM
391 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
393 ;; The integer modes up to word size
394 (define_mode_iterator QHSI [QI HI SI])
396 ;;---------------------------------------------------------------------------
399 (include "predicates.md")
400 (include "constraints.md")
402 ;;---------------------------------------------------------------------------
403 ;; Pipeline descriptions
405 ;; Processor type. This is created automatically from arm-cores.def.
406 (include "arm-tune.md")
408 (define_attr "tune_cortexr4" "yes,no"
410 (eq_attr "tune" "cortexr4,cortexr4f")
412 (const_string "no"))))
414 ;; True if the generic scheduling description should be used.
416 (define_attr "generic_sched" "yes,no"
418 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
419 (eq_attr "tune_cortexr4" "yes"))
421 (const_string "yes"))))
423 (define_attr "generic_vfp" "yes,no"
425 (and (eq_attr "fpu" "vfp")
426 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
427 (eq_attr "tune_cortexr4" "no"))
429 (const_string "no"))))
431 (include "arm-generic.md")
432 (include "arm926ejs.md")
433 (include "arm1020e.md")
434 (include "arm1026ejs.md")
435 (include "arm1136jfs.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-r4.md")
439 (include "cortex-r4f.md")
443 ;;---------------------------------------------------------------------------
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451 ;; Cirrus 64bit additions should not be split because we have a native
452 ;; 64bit addition instructions.
454 (define_expand "adddi3"
456 [(set (match_operand:DI 0 "s_register_operand" "")
457 (plus:DI (match_operand:DI 1 "s_register_operand" "")
458 (match_operand:DI 2 "s_register_operand" "")))
459 (clobber (reg:CC CC_REGNUM))])]
462 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
464 if (!cirrus_fp_register (operands[0], DImode))
465 operands[0] = force_reg (DImode, operands[0]);
466 if (!cirrus_fp_register (operands[1], DImode))
467 operands[1] = force_reg (DImode, operands[1]);
468 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
474 if (GET_CODE (operands[1]) != REG)
475 operands[1] = force_reg (DImode, operands[1]);
476 if (GET_CODE (operands[2]) != REG)
477 operands[2] = force_reg (DImode, operands[2]);
482 (define_insn "*thumb1_adddi3"
483 [(set (match_operand:DI 0 "register_operand" "=l")
484 (plus:DI (match_operand:DI 1 "register_operand" "%0")
485 (match_operand:DI 2 "register_operand" "l")))
486 (clobber (reg:CC CC_REGNUM))
489 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
490 [(set_attr "length" "4")]
493 (define_insn_and_split "*arm_adddi3"
494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
495 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
496 (match_operand:DI 2 "s_register_operand" "r, 0")))
497 (clobber (reg:CC CC_REGNUM))]
498 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
500 "TARGET_32BIT && reload_completed
501 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
502 [(parallel [(set (reg:CC_C CC_REGNUM)
503 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
505 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
506 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
507 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
510 operands[3] = gen_highpart (SImode, operands[0]);
511 operands[0] = gen_lowpart (SImode, operands[0]);
512 operands[4] = gen_highpart (SImode, operands[1]);
513 operands[1] = gen_lowpart (SImode, operands[1]);
514 operands[5] = gen_highpart (SImode, operands[2]);
515 operands[2] = gen_lowpart (SImode, operands[2]);
517 [(set_attr "conds" "clob")
518 (set_attr "length" "8")]
521 (define_insn_and_split "*adddi_sesidi_di"
522 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
523 (plus:DI (sign_extend:DI
524 (match_operand:SI 2 "s_register_operand" "r,r"))
525 (match_operand:DI 1 "s_register_operand" "0,r")))
526 (clobber (reg:CC CC_REGNUM))]
527 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
529 "TARGET_32BIT && reload_completed"
530 [(parallel [(set (reg:CC_C CC_REGNUM)
531 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
533 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
534 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
537 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
540 operands[3] = gen_highpart (SImode, operands[0]);
541 operands[0] = gen_lowpart (SImode, operands[0]);
542 operands[4] = gen_highpart (SImode, operands[1]);
543 operands[1] = gen_lowpart (SImode, operands[1]);
544 operands[2] = gen_lowpart (SImode, operands[2]);
546 [(set_attr "conds" "clob")
547 (set_attr "length" "8")]
550 (define_insn_and_split "*adddi_zesidi_di"
551 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
552 (plus:DI (zero_extend:DI
553 (match_operand:SI 2 "s_register_operand" "r,r"))
554 (match_operand:DI 1 "s_register_operand" "0,r")))
555 (clobber (reg:CC CC_REGNUM))]
556 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
558 "TARGET_32BIT && reload_completed"
559 [(parallel [(set (reg:CC_C CC_REGNUM)
560 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
562 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
563 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
564 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
567 operands[3] = gen_highpart (SImode, operands[0]);
568 operands[0] = gen_lowpart (SImode, operands[0]);
569 operands[4] = gen_highpart (SImode, operands[1]);
570 operands[1] = gen_lowpart (SImode, operands[1]);
571 operands[2] = gen_lowpart (SImode, operands[2]);
573 [(set_attr "conds" "clob")
574 (set_attr "length" "8")]
577 (define_expand "addsi3"
578 [(set (match_operand:SI 0 "s_register_operand" "")
579 (plus:SI (match_operand:SI 1 "s_register_operand" "")
580 (match_operand:SI 2 "reg_or_int_operand" "")))]
583 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
585 arm_split_constant (PLUS, SImode, NULL_RTX,
586 INTVAL (operands[2]), operands[0], operands[1],
587 optimize && can_create_pseudo_p ());
593 ; If there is a scratch available, this will be faster than synthesizing the
596 [(match_scratch:SI 3 "r")
597 (set (match_operand:SI 0 "arm_general_register_operand" "")
598 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
599 (match_operand:SI 2 "const_int_operand" "")))]
601 !(const_ok_for_arm (INTVAL (operands[2]))
602 || const_ok_for_arm (-INTVAL (operands[2])))
603 && const_ok_for_arm (~INTVAL (operands[2]))"
604 [(set (match_dup 3) (match_dup 2))
605 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
609 ;; The r/r/k alternative is required when reloading the address
610 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
611 ;; put the duplicated register first, and not try the commutative version.
612 (define_insn_and_split "*arm_addsi3"
613 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
614 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
615 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
625 && GET_CODE (operands[2]) == CONST_INT
626 && !(const_ok_for_arm (INTVAL (operands[2]))
627 || const_ok_for_arm (-INTVAL (operands[2])))
628 && (reload_completed || !arm_eliminable_register (operands[1]))"
629 [(clobber (const_int 0))]
631 arm_split_constant (PLUS, SImode, curr_insn,
632 INTVAL (operands[2]), operands[0],
636 [(set_attr "length" "4,4,4,4,4,16")
637 (set_attr "predicable" "yes")]
640 ;; Register group 'k' is a single register group containing only the stack
641 ;; register. Trying to reload it will always fail catastrophically,
642 ;; so never allow those alternatives to match if reloading is needed.
644 (define_insn_and_split "*thumb1_addsi3"
645 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
646 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
647 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
650 static const char * const asms[] =
652 \"add\\t%0, %0, %2\",
653 \"sub\\t%0, %0, #%n2\",
654 \"add\\t%0, %1, %2\",
655 \"add\\t%0, %0, %2\",
656 \"add\\t%0, %0, %2\",
657 \"add\\t%0, %1, %2\",
658 \"add\\t%0, %1, %2\",
662 if ((which_alternative == 2 || which_alternative == 6)
663 && GET_CODE (operands[2]) == CONST_INT
664 && INTVAL (operands[2]) < 0)
665 return \"sub\\t%0, %1, #%n2\";
666 return asms[which_alternative];
668 "&& reload_completed && CONST_INT_P (operands[2])
669 && operands[1] != stack_pointer_rtx
670 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
671 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
672 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
674 HOST_WIDE_INT offset = INTVAL (operands[2]);
677 else if (offset < -255)
680 operands[3] = GEN_INT (offset);
681 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
683 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
686 ;; Reloading and elimination of the frame pointer can
687 ;; sometimes cause this optimization to be missed.
689 [(set (match_operand:SI 0 "arm_general_register_operand" "")
690 (match_operand:SI 1 "const_int_operand" ""))
692 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
694 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
695 && (INTVAL (operands[1]) & 3) == 0"
696 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
700 (define_insn "*addsi3_compare0"
701 [(set (reg:CC_NOOV CC_REGNUM)
703 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
704 (match_operand:SI 2 "arm_add_operand" "rI,L"))
706 (set (match_operand:SI 0 "s_register_operand" "=r,r")
707 (plus:SI (match_dup 1) (match_dup 2)))]
711 sub%.\\t%0, %1, #%n2"
712 [(set_attr "conds" "set")]
715 (define_insn "*addsi3_compare0_scratch"
716 [(set (reg:CC_NOOV CC_REGNUM)
718 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
719 (match_operand:SI 1 "arm_add_operand" "rI,L"))
725 [(set_attr "conds" "set")]
728 (define_insn "*compare_negsi_si"
729 [(set (reg:CC_Z CC_REGNUM)
731 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
732 (match_operand:SI 1 "s_register_operand" "r")))]
735 [(set_attr "conds" "set")]
738 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
739 ;; addend is a constant.
740 (define_insn "*cmpsi2_addneg"
741 [(set (reg:CC CC_REGNUM)
743 (match_operand:SI 1 "s_register_operand" "r,r")
744 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
745 (set (match_operand:SI 0 "s_register_operand" "=r,r")
746 (plus:SI (match_dup 1)
747 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
748 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
751 sub%.\\t%0, %1, #%n3"
752 [(set_attr "conds" "set")]
755 ;; Convert the sequence
757 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
761 ;; bcs dest ((unsigned)rn >= 1)
762 ;; similarly for the beq variant using bcc.
763 ;; This is a common looping idiom (while (n--))
765 [(set (match_operand:SI 0 "arm_general_register_operand" "")
766 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
768 (set (match_operand 2 "cc_register" "")
769 (compare (match_dup 0) (const_int -1)))
771 (if_then_else (match_operator 3 "equality_operator"
772 [(match_dup 2) (const_int 0)])
773 (match_operand 4 "" "")
774 (match_operand 5 "" "")))]
775 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
779 (match_dup 1) (const_int 1)))
780 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
782 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
785 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
786 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
789 operands[2], const0_rtx);"
792 ;; The next four insns work because they compare the result with one of
793 ;; the operands, and we know that the use of the condition code is
794 ;; either GEU or LTU, so we can use the carry flag from the addition
795 ;; instead of doing the compare a second time.
796 (define_insn "*addsi3_compare_op1"
797 [(set (reg:CC_C CC_REGNUM)
799 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
800 (match_operand:SI 2 "arm_add_operand" "rI,L"))
802 (set (match_operand:SI 0 "s_register_operand" "=r,r")
803 (plus:SI (match_dup 1) (match_dup 2)))]
807 sub%.\\t%0, %1, #%n2"
808 [(set_attr "conds" "set")]
811 (define_insn "*addsi3_compare_op2"
812 [(set (reg:CC_C CC_REGNUM)
814 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
815 (match_operand:SI 2 "arm_add_operand" "rI,L"))
817 (set (match_operand:SI 0 "s_register_operand" "=r,r")
818 (plus:SI (match_dup 1) (match_dup 2)))]
822 sub%.\\t%0, %1, #%n2"
823 [(set_attr "conds" "set")]
826 (define_insn "*compare_addsi2_op0"
827 [(set (reg:CC_C CC_REGNUM)
829 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
830 (match_operand:SI 1 "arm_add_operand" "rI,L"))
836 [(set_attr "conds" "set")]
839 (define_insn "*compare_addsi2_op1"
840 [(set (reg:CC_C CC_REGNUM)
842 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
843 (match_operand:SI 1 "arm_add_operand" "rI,L"))
849 [(set_attr "conds" "set")]
852 (define_code_iterator LTUGEU [ltu geu])
853 (define_code_attr cnb [(ltu "CC_C") (geu "CC")])
854 (define_code_attr optab [(ltu "ltu") (geu "geu")])
856 (define_insn "*addsi3_carryin_<optab>"
857 [(set (match_operand:SI 0 "s_register_operand" "=r")
858 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
859 (match_operand:SI 2 "arm_rhs_operand" "rI"))
860 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
863 [(set_attr "conds" "use")]
866 (define_insn "*addsi3_carryin_alt2_<optab>"
867 [(set (match_operand:SI 0 "s_register_operand" "=r")
868 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
869 (match_operand:SI 1 "s_register_operand" "%r"))
870 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
873 [(set_attr "conds" "use")]
876 (define_insn "*addsi3_carryin_shift_<optab>"
877 [(set (match_operand:SI 0 "s_register_operand" "=r")
879 (match_operator:SI 2 "shift_operator"
880 [(match_operand:SI 3 "s_register_operand" "r")
881 (match_operand:SI 4 "reg_or_int_operand" "rM")])
882 (match_operand:SI 1 "s_register_operand" "r"))
883 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
885 "adc%?\\t%0, %1, %3%S2"
886 [(set_attr "conds" "use")
887 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
888 (const_string "alu_shift")
889 (const_string "alu_shift_reg")))]
892 (define_expand "incscc"
893 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
894 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
895 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
896 (match_operand:SI 1 "s_register_operand" "0,?r")))]
901 (define_insn "*arm_incscc"
902 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
903 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
904 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
905 (match_operand:SI 1 "s_register_operand" "0,?r")))]
909 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
910 [(set_attr "conds" "use")
911 (set_attr "length" "4,8")]
914 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
916 [(set (match_operand:SI 0 "s_register_operand" "")
917 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
918 (match_operand:SI 2 "s_register_operand" ""))
920 (clobber (match_operand:SI 3 "s_register_operand" ""))]
922 [(set (match_dup 3) (match_dup 1))
923 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
925 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
928 (define_expand "addsf3"
929 [(set (match_operand:SF 0 "s_register_operand" "")
930 (plus:SF (match_operand:SF 1 "s_register_operand" "")
931 (match_operand:SF 2 "arm_float_add_operand" "")))]
932 "TARGET_32BIT && TARGET_HARD_FLOAT"
935 && !cirrus_fp_register (operands[2], SFmode))
936 operands[2] = force_reg (SFmode, operands[2]);
939 (define_expand "adddf3"
940 [(set (match_operand:DF 0 "s_register_operand" "")
941 (plus:DF (match_operand:DF 1 "s_register_operand" "")
942 (match_operand:DF 2 "arm_float_add_operand" "")))]
943 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
946 && !cirrus_fp_register (operands[2], DFmode))
947 operands[2] = force_reg (DFmode, operands[2]);
950 (define_expand "subdi3"
952 [(set (match_operand:DI 0 "s_register_operand" "")
953 (minus:DI (match_operand:DI 1 "s_register_operand" "")
954 (match_operand:DI 2 "s_register_operand" "")))
955 (clobber (reg:CC CC_REGNUM))])]
958 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
960 && cirrus_fp_register (operands[0], DImode)
961 && cirrus_fp_register (operands[1], DImode))
963 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
969 if (GET_CODE (operands[1]) != REG)
970 operands[1] = force_reg (DImode, operands[1]);
971 if (GET_CODE (operands[2]) != REG)
972 operands[2] = force_reg (DImode, operands[2]);
977 (define_insn "*arm_subdi3"
978 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
979 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
980 (match_operand:DI 2 "s_register_operand" "r,0,0")))
981 (clobber (reg:CC CC_REGNUM))]
982 "TARGET_32BIT && !TARGET_NEON"
983 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
984 [(set_attr "conds" "clob")
985 (set_attr "length" "8")]
988 (define_insn "*thumb_subdi3"
989 [(set (match_operand:DI 0 "register_operand" "=l")
990 (minus:DI (match_operand:DI 1 "register_operand" "0")
991 (match_operand:DI 2 "register_operand" "l")))
992 (clobber (reg:CC CC_REGNUM))]
994 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
995 [(set_attr "length" "4")]
998 (define_insn "*subdi_di_zesidi"
999 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1000 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1002 (match_operand:SI 2 "s_register_operand" "r,r"))))
1003 (clobber (reg:CC CC_REGNUM))]
1005 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1006 [(set_attr "conds" "clob")
1007 (set_attr "length" "8")]
1010 (define_insn "*subdi_di_sesidi"
1011 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1012 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1014 (match_operand:SI 2 "s_register_operand" "r,r"))))
1015 (clobber (reg:CC CC_REGNUM))]
1017 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1018 [(set_attr "conds" "clob")
1019 (set_attr "length" "8")]
1022 (define_insn "*subdi_zesidi_di"
1023 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1024 (minus:DI (zero_extend:DI
1025 (match_operand:SI 2 "s_register_operand" "r,r"))
1026 (match_operand:DI 1 "s_register_operand" "0,r")))
1027 (clobber (reg:CC CC_REGNUM))]
1029 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1030 [(set_attr "conds" "clob")
1031 (set_attr "length" "8")]
1034 (define_insn "*subdi_sesidi_di"
1035 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1036 (minus:DI (sign_extend:DI
1037 (match_operand:SI 2 "s_register_operand" "r,r"))
1038 (match_operand:DI 1 "s_register_operand" "0,r")))
1039 (clobber (reg:CC CC_REGNUM))]
1041 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1042 [(set_attr "conds" "clob")
1043 (set_attr "length" "8")]
1046 (define_insn "*subdi_zesidi_zesidi"
1047 [(set (match_operand:DI 0 "s_register_operand" "=r")
1048 (minus:DI (zero_extend:DI
1049 (match_operand:SI 1 "s_register_operand" "r"))
1051 (match_operand:SI 2 "s_register_operand" "r"))))
1052 (clobber (reg:CC CC_REGNUM))]
1054 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1055 [(set_attr "conds" "clob")
1056 (set_attr "length" "8")]
1059 (define_expand "subsi3"
1060 [(set (match_operand:SI 0 "s_register_operand" "")
1061 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1062 (match_operand:SI 2 "s_register_operand" "")))]
1065 if (GET_CODE (operands[1]) == CONST_INT)
1069 arm_split_constant (MINUS, SImode, NULL_RTX,
1070 INTVAL (operands[1]), operands[0],
1071 operands[2], optimize && can_create_pseudo_p ());
1074 else /* TARGET_THUMB1 */
1075 operands[1] = force_reg (SImode, operands[1]);
1080 (define_insn "*thumb1_subsi3_insn"
1081 [(set (match_operand:SI 0 "register_operand" "=l")
1082 (minus:SI (match_operand:SI 1 "register_operand" "l")
1083 (match_operand:SI 2 "register_operand" "l")))]
1086 [(set_attr "length" "2")]
1089 ; ??? Check Thumb-2 split length
1090 (define_insn_and_split "*arm_subsi3_insn"
1091 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1092 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,!k,?n,r")
1093 (match_operand:SI 2 "reg_or_int_operand" "r,rI, r, r,?n")))]
1101 "&& ((GET_CODE (operands[1]) == CONST_INT
1102 && !const_ok_for_arm (INTVAL (operands[1])))
1103 || (GET_CODE (operands[2]) == CONST_INT
1104 && !const_ok_for_arm (INTVAL (operands[2]))))"
1105 [(clobber (const_int 0))]
1107 arm_split_constant (MINUS, SImode, curr_insn,
1108 INTVAL (operands[1]), operands[0], operands[2], 0);
1111 [(set_attr "length" "4,4,4,16,16")
1112 (set_attr "predicable" "yes")]
1116 [(match_scratch:SI 3 "r")
1117 (set (match_operand:SI 0 "arm_general_register_operand" "")
1118 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1119 (match_operand:SI 2 "arm_general_register_operand" "")))]
1121 && !const_ok_for_arm (INTVAL (operands[1]))
1122 && const_ok_for_arm (~INTVAL (operands[1]))"
1123 [(set (match_dup 3) (match_dup 1))
1124 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1128 (define_insn "*subsi3_compare0"
1129 [(set (reg:CC_NOOV CC_REGNUM)
1131 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1132 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1134 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1135 (minus:SI (match_dup 1) (match_dup 2)))]
1140 [(set_attr "conds" "set")]
1143 (define_insn "*subsi3_compare"
1144 [(set (reg:CC CC_REGNUM)
1145 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1146 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1147 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1148 (minus:SI (match_dup 1) (match_dup 2)))]
1153 [(set_attr "conds" "set")]
1156 (define_expand "decscc"
1157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1158 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1159 (match_operator:SI 2 "arm_comparison_operator"
1160 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1165 (define_insn "*arm_decscc"
1166 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1167 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1168 (match_operator:SI 2 "arm_comparison_operator"
1169 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1173 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1174 [(set_attr "conds" "use")
1175 (set_attr "length" "*,8")]
1178 (define_expand "subsf3"
1179 [(set (match_operand:SF 0 "s_register_operand" "")
1180 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1181 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1182 "TARGET_32BIT && TARGET_HARD_FLOAT"
1184 if (TARGET_MAVERICK)
1186 if (!cirrus_fp_register (operands[1], SFmode))
1187 operands[1] = force_reg (SFmode, operands[1]);
1188 if (!cirrus_fp_register (operands[2], SFmode))
1189 operands[2] = force_reg (SFmode, operands[2]);
1193 (define_expand "subdf3"
1194 [(set (match_operand:DF 0 "s_register_operand" "")
1195 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1196 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1197 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1199 if (TARGET_MAVERICK)
1201 if (!cirrus_fp_register (operands[1], DFmode))
1202 operands[1] = force_reg (DFmode, operands[1]);
1203 if (!cirrus_fp_register (operands[2], DFmode))
1204 operands[2] = force_reg (DFmode, operands[2]);
1209 ;; Multiplication insns
1211 (define_expand "mulsi3"
1212 [(set (match_operand:SI 0 "s_register_operand" "")
1213 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1214 (match_operand:SI 1 "s_register_operand" "")))]
1219 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1220 (define_insn "*arm_mulsi3"
1221 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1222 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1223 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1224 "TARGET_32BIT && !arm_arch6"
1225 "mul%?\\t%0, %2, %1"
1226 [(set_attr "insn" "mul")
1227 (set_attr "predicable" "yes")]
1230 (define_insn "*arm_mulsi3_v6"
1231 [(set (match_operand:SI 0 "s_register_operand" "=r")
1232 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1233 (match_operand:SI 2 "s_register_operand" "r")))]
1234 "TARGET_32BIT && arm_arch6"
1235 "mul%?\\t%0, %1, %2"
1236 [(set_attr "insn" "mul")
1237 (set_attr "predicable" "yes")]
1240 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1241 ; 1 and 2; are the same, because reload will make operand 0 match
1242 ; operand 1 without realizing that this conflicts with operand 2. We fix
1243 ; this by adding another alternative to match this case, and then `reload'
1244 ; it ourselves. This alternative must come first.
1245 (define_insn "*thumb_mulsi3"
1246 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1247 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1248 (match_operand:SI 2 "register_operand" "l,l,l")))]
1249 "TARGET_THUMB1 && !arm_arch6"
1251 if (which_alternative < 2)
1252 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1254 return \"mul\\t%0, %2\";
1256 [(set_attr "length" "4,4,2")
1257 (set_attr "insn" "mul")]
1260 (define_insn "*thumb_mulsi3_v6"
1261 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1262 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1263 (match_operand:SI 2 "register_operand" "l,0,0")))]
1264 "TARGET_THUMB1 && arm_arch6"
1269 [(set_attr "length" "2")
1270 (set_attr "insn" "mul")]
1273 (define_insn "*mulsi3_compare0"
1274 [(set (reg:CC_NOOV CC_REGNUM)
1275 (compare:CC_NOOV (mult:SI
1276 (match_operand:SI 2 "s_register_operand" "r,r")
1277 (match_operand:SI 1 "s_register_operand" "%0,r"))
1279 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1280 (mult:SI (match_dup 2) (match_dup 1)))]
1281 "TARGET_ARM && !arm_arch6"
1282 "mul%.\\t%0, %2, %1"
1283 [(set_attr "conds" "set")
1284 (set_attr "insn" "muls")]
1287 (define_insn "*mulsi3_compare0_v6"
1288 [(set (reg:CC_NOOV CC_REGNUM)
1289 (compare:CC_NOOV (mult:SI
1290 (match_operand:SI 2 "s_register_operand" "r")
1291 (match_operand:SI 1 "s_register_operand" "r"))
1293 (set (match_operand:SI 0 "s_register_operand" "=r")
1294 (mult:SI (match_dup 2) (match_dup 1)))]
1295 "TARGET_ARM && arm_arch6 && optimize_size"
1296 "mul%.\\t%0, %2, %1"
1297 [(set_attr "conds" "set")
1298 (set_attr "insn" "muls")]
1301 (define_insn "*mulsi_compare0_scratch"
1302 [(set (reg:CC_NOOV CC_REGNUM)
1303 (compare:CC_NOOV (mult:SI
1304 (match_operand:SI 2 "s_register_operand" "r,r")
1305 (match_operand:SI 1 "s_register_operand" "%0,r"))
1307 (clobber (match_scratch:SI 0 "=&r,&r"))]
1308 "TARGET_ARM && !arm_arch6"
1309 "mul%.\\t%0, %2, %1"
1310 [(set_attr "conds" "set")
1311 (set_attr "insn" "muls")]
1314 (define_insn "*mulsi_compare0_scratch_v6"
1315 [(set (reg:CC_NOOV CC_REGNUM)
1316 (compare:CC_NOOV (mult:SI
1317 (match_operand:SI 2 "s_register_operand" "r")
1318 (match_operand:SI 1 "s_register_operand" "r"))
1320 (clobber (match_scratch:SI 0 "=r"))]
1321 "TARGET_ARM && arm_arch6 && optimize_size"
1322 "mul%.\\t%0, %2, %1"
1323 [(set_attr "conds" "set")
1324 (set_attr "insn" "muls")]
1327 ;; Unnamed templates to match MLA instruction.
1329 (define_insn "*mulsi3addsi"
1330 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1332 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1333 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1334 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1335 "TARGET_32BIT && !arm_arch6"
1336 "mla%?\\t%0, %2, %1, %3"
1337 [(set_attr "insn" "mla")
1338 (set_attr "predicable" "yes")]
1341 (define_insn "*mulsi3addsi_v6"
1342 [(set (match_operand:SI 0 "s_register_operand" "=r")
1344 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1345 (match_operand:SI 1 "s_register_operand" "r"))
1346 (match_operand:SI 3 "s_register_operand" "r")))]
1347 "TARGET_32BIT && arm_arch6"
1348 "mla%?\\t%0, %2, %1, %3"
1349 [(set_attr "insn" "mla")
1350 (set_attr "predicable" "yes")]
1353 (define_insn "*mulsi3addsi_compare0"
1354 [(set (reg:CC_NOOV CC_REGNUM)
1357 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1358 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1359 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1361 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1362 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1364 "TARGET_ARM && arm_arch6"
1365 "mla%.\\t%0, %2, %1, %3"
1366 [(set_attr "conds" "set")
1367 (set_attr "insn" "mlas")]
1370 (define_insn "*mulsi3addsi_compare0_v6"
1371 [(set (reg:CC_NOOV CC_REGNUM)
1374 (match_operand:SI 2 "s_register_operand" "r")
1375 (match_operand:SI 1 "s_register_operand" "r"))
1376 (match_operand:SI 3 "s_register_operand" "r"))
1378 (set (match_operand:SI 0 "s_register_operand" "=r")
1379 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1381 "TARGET_ARM && arm_arch6 && optimize_size"
1382 "mla%.\\t%0, %2, %1, %3"
1383 [(set_attr "conds" "set")
1384 (set_attr "insn" "mlas")]
1387 (define_insn "*mulsi3addsi_compare0_scratch"
1388 [(set (reg:CC_NOOV CC_REGNUM)
1391 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1392 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1393 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1395 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1396 "TARGET_ARM && !arm_arch6"
1397 "mla%.\\t%0, %2, %1, %3"
1398 [(set_attr "conds" "set")
1399 (set_attr "insn" "mlas")]
1402 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1403 [(set (reg:CC_NOOV CC_REGNUM)
1406 (match_operand:SI 2 "s_register_operand" "r")
1407 (match_operand:SI 1 "s_register_operand" "r"))
1408 (match_operand:SI 3 "s_register_operand" "r"))
1410 (clobber (match_scratch:SI 0 "=r"))]
1411 "TARGET_ARM && arm_arch6 && optimize_size"
1412 "mla%.\\t%0, %2, %1, %3"
1413 [(set_attr "conds" "set")
1414 (set_attr "insn" "mlas")]
1417 (define_insn "*mulsi3subsi"
1418 [(set (match_operand:SI 0 "s_register_operand" "=r")
1420 (match_operand:SI 3 "s_register_operand" "r")
1421 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1422 (match_operand:SI 1 "s_register_operand" "r"))))]
1423 "TARGET_32BIT && arm_arch_thumb2"
1424 "mls%?\\t%0, %2, %1, %3"
1425 [(set_attr "insn" "mla")
1426 (set_attr "predicable" "yes")]
1429 (define_expand "maddsidi4"
1430 [(set (match_operand:DI 0 "s_register_operand" "")
1433 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1434 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1435 (match_operand:DI 3 "s_register_operand" "")))]
1436 "TARGET_32BIT && arm_arch3m"
1439 (define_insn "*mulsidi3adddi"
1440 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1443 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1444 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1445 (match_operand:DI 1 "s_register_operand" "0")))]
1446 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1447 "smlal%?\\t%Q0, %R0, %3, %2"
1448 [(set_attr "insn" "smlal")
1449 (set_attr "predicable" "yes")]
1452 (define_insn "*mulsidi3adddi_v6"
1453 [(set (match_operand:DI 0 "s_register_operand" "=r")
1456 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1457 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1458 (match_operand:DI 1 "s_register_operand" "0")))]
1459 "TARGET_32BIT && arm_arch6"
1460 "smlal%?\\t%Q0, %R0, %3, %2"
1461 [(set_attr "insn" "smlal")
1462 (set_attr "predicable" "yes")]
1465 ;; 32x32->64 widening multiply.
1466 ;; As with mulsi3, the only difference between the v3-5 and v6+
1467 ;; versions of these patterns is the requirement that the output not
1468 ;; overlap the inputs, but that still means we have to have a named
1469 ;; expander and two different starred insns.
1471 (define_expand "mulsidi3"
1472 [(set (match_operand:DI 0 "s_register_operand" "")
1474 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1475 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1476 "TARGET_32BIT && arm_arch3m"
1480 (define_insn "*mulsidi3_nov6"
1481 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1483 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1484 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1485 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1486 "smull%?\\t%Q0, %R0, %1, %2"
1487 [(set_attr "insn" "smull")
1488 (set_attr "predicable" "yes")]
1491 (define_insn "*mulsidi3_v6"
1492 [(set (match_operand:DI 0 "s_register_operand" "=r")
1494 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1495 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1496 "TARGET_32BIT && arm_arch6"
1497 "smull%?\\t%Q0, %R0, %1, %2"
1498 [(set_attr "insn" "smull")
1499 (set_attr "predicable" "yes")]
1502 (define_expand "umulsidi3"
1503 [(set (match_operand:DI 0 "s_register_operand" "")
1505 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1506 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1507 "TARGET_32BIT && arm_arch3m"
1511 (define_insn "*umulsidi3_nov6"
1512 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1514 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1515 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1516 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1517 "umull%?\\t%Q0, %R0, %1, %2"
1518 [(set_attr "insn" "umull")
1519 (set_attr "predicable" "yes")]
1522 (define_insn "*umulsidi3_v6"
1523 [(set (match_operand:DI 0 "s_register_operand" "=r")
1525 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1526 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1527 "TARGET_32BIT && arm_arch6"
1528 "umull%?\\t%Q0, %R0, %1, %2"
1529 [(set_attr "insn" "umull")
1530 (set_attr "predicable" "yes")]
1533 (define_expand "umaddsidi4"
1534 [(set (match_operand:DI 0 "s_register_operand" "")
1537 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1538 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1539 (match_operand:DI 3 "s_register_operand" "")))]
1540 "TARGET_32BIT && arm_arch3m"
1543 (define_insn "*umulsidi3adddi"
1544 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1547 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1548 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1549 (match_operand:DI 1 "s_register_operand" "0")))]
1550 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1551 "umlal%?\\t%Q0, %R0, %3, %2"
1552 [(set_attr "insn" "umlal")
1553 (set_attr "predicable" "yes")]
1556 (define_insn "*umulsidi3adddi_v6"
1557 [(set (match_operand:DI 0 "s_register_operand" "=r")
1560 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1561 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1562 (match_operand:DI 1 "s_register_operand" "0")))]
1563 "TARGET_32BIT && arm_arch6"
1564 "umlal%?\\t%Q0, %R0, %3, %2"
1565 [(set_attr "insn" "umlal")
1566 (set_attr "predicable" "yes")]
1569 (define_expand "smulsi3_highpart"
1571 [(set (match_operand:SI 0 "s_register_operand" "")
1575 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1576 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1578 (clobber (match_scratch:SI 3 ""))])]
1579 "TARGET_32BIT && arm_arch3m"
1583 (define_insn "*smulsi3_highpart_nov6"
1584 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1588 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1589 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1591 (clobber (match_scratch:SI 3 "=&r,&r"))]
1592 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1593 "smull%?\\t%3, %0, %2, %1"
1594 [(set_attr "insn" "smull")
1595 (set_attr "predicable" "yes")]
1598 (define_insn "*smulsi3_highpart_v6"
1599 [(set (match_operand:SI 0 "s_register_operand" "=r")
1603 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1604 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1606 (clobber (match_scratch:SI 3 "=r"))]
1607 "TARGET_32BIT && arm_arch6"
1608 "smull%?\\t%3, %0, %2, %1"
1609 [(set_attr "insn" "smull")
1610 (set_attr "predicable" "yes")]
1613 (define_expand "umulsi3_highpart"
1615 [(set (match_operand:SI 0 "s_register_operand" "")
1619 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1620 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1622 (clobber (match_scratch:SI 3 ""))])]
1623 "TARGET_32BIT && arm_arch3m"
1627 (define_insn "*umulsi3_highpart_nov6"
1628 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1632 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1633 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1635 (clobber (match_scratch:SI 3 "=&r,&r"))]
1636 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1637 "umull%?\\t%3, %0, %2, %1"
1638 [(set_attr "insn" "umull")
1639 (set_attr "predicable" "yes")]
1642 (define_insn "*umulsi3_highpart_v6"
1643 [(set (match_operand:SI 0 "s_register_operand" "=r")
1647 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1648 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1650 (clobber (match_scratch:SI 3 "=r"))]
1651 "TARGET_32BIT && arm_arch6"
1652 "umull%?\\t%3, %0, %2, %1"
1653 [(set_attr "insn" "umull")
1654 (set_attr "predicable" "yes")]
1657 (define_insn "mulhisi3"
1658 [(set (match_operand:SI 0 "s_register_operand" "=r")
1659 (mult:SI (sign_extend:SI
1660 (match_operand:HI 1 "s_register_operand" "%r"))
1662 (match_operand:HI 2 "s_register_operand" "r"))))]
1663 "TARGET_DSP_MULTIPLY"
1664 "smulbb%?\\t%0, %1, %2"
1665 [(set_attr "insn" "smulxy")
1666 (set_attr "predicable" "yes")]
1669 (define_insn "*mulhisi3tb"
1670 [(set (match_operand:SI 0 "s_register_operand" "=r")
1671 (mult:SI (ashiftrt:SI
1672 (match_operand:SI 1 "s_register_operand" "r")
1675 (match_operand:HI 2 "s_register_operand" "r"))))]
1676 "TARGET_DSP_MULTIPLY"
1677 "smultb%?\\t%0, %1, %2"
1678 [(set_attr "insn" "smulxy")
1679 (set_attr "predicable" "yes")]
1682 (define_insn "*mulhisi3bt"
1683 [(set (match_operand:SI 0 "s_register_operand" "=r")
1684 (mult:SI (sign_extend:SI
1685 (match_operand:HI 1 "s_register_operand" "r"))
1687 (match_operand:SI 2 "s_register_operand" "r")
1689 "TARGET_DSP_MULTIPLY"
1690 "smulbt%?\\t%0, %1, %2"
1691 [(set_attr "insn" "smulxy")
1692 (set_attr "predicable" "yes")]
1695 (define_insn "*mulhisi3tt"
1696 [(set (match_operand:SI 0 "s_register_operand" "=r")
1697 (mult:SI (ashiftrt:SI
1698 (match_operand:SI 1 "s_register_operand" "r")
1701 (match_operand:SI 2 "s_register_operand" "r")
1703 "TARGET_DSP_MULTIPLY"
1704 "smultt%?\\t%0, %1, %2"
1705 [(set_attr "insn" "smulxy")
1706 (set_attr "predicable" "yes")]
1709 (define_insn "maddhisi4"
1710 [(set (match_operand:SI 0 "s_register_operand" "=r")
1711 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1712 (mult:SI (sign_extend:SI
1713 (match_operand:HI 1 "s_register_operand" "%r"))
1715 (match_operand:HI 2 "s_register_operand" "r")))))]
1716 "TARGET_DSP_MULTIPLY"
1717 "smlabb%?\\t%0, %1, %2, %3"
1718 [(set_attr "insn" "smlaxy")
1719 (set_attr "predicable" "yes")]
1722 (define_insn "*maddhidi4"
1723 [(set (match_operand:DI 0 "s_register_operand" "=r")
1725 (match_operand:DI 3 "s_register_operand" "0")
1726 (mult:DI (sign_extend:DI
1727 (match_operand:HI 1 "s_register_operand" "%r"))
1729 (match_operand:HI 2 "s_register_operand" "r")))))]
1730 "TARGET_DSP_MULTIPLY"
1731 "smlalbb%?\\t%Q0, %R0, %1, %2"
1732 [(set_attr "insn" "smlalxy")
1733 (set_attr "predicable" "yes")])
1735 (define_expand "mulsf3"
1736 [(set (match_operand:SF 0 "s_register_operand" "")
1737 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1738 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1739 "TARGET_32BIT && TARGET_HARD_FLOAT"
1742 && !cirrus_fp_register (operands[2], SFmode))
1743 operands[2] = force_reg (SFmode, operands[2]);
1746 (define_expand "muldf3"
1747 [(set (match_operand:DF 0 "s_register_operand" "")
1748 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1749 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1750 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1753 && !cirrus_fp_register (operands[2], DFmode))
1754 operands[2] = force_reg (DFmode, operands[2]);
1759 (define_expand "divsf3"
1760 [(set (match_operand:SF 0 "s_register_operand" "")
1761 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1762 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1763 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1766 (define_expand "divdf3"
1767 [(set (match_operand:DF 0 "s_register_operand" "")
1768 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1769 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1770 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1775 (define_expand "modsf3"
1776 [(set (match_operand:SF 0 "s_register_operand" "")
1777 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1778 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1779 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1782 (define_expand "moddf3"
1783 [(set (match_operand:DF 0 "s_register_operand" "")
1784 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1785 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1786 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1789 ;; Boolean and,ior,xor insns
1791 ;; Split up double word logical operations
1793 ;; Split up simple DImode logical operations. Simply perform the logical
1794 ;; operation on the upper and lower halves of the registers.
1796 [(set (match_operand:DI 0 "s_register_operand" "")
1797 (match_operator:DI 6 "logical_binary_operator"
1798 [(match_operand:DI 1 "s_register_operand" "")
1799 (match_operand:DI 2 "s_register_operand" "")]))]
1800 "TARGET_32BIT && reload_completed
1801 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1802 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1803 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1804 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1807 operands[3] = gen_highpart (SImode, operands[0]);
1808 operands[0] = gen_lowpart (SImode, operands[0]);
1809 operands[4] = gen_highpart (SImode, operands[1]);
1810 operands[1] = gen_lowpart (SImode, operands[1]);
1811 operands[5] = gen_highpart (SImode, operands[2]);
1812 operands[2] = gen_lowpart (SImode, operands[2]);
1817 [(set (match_operand:DI 0 "s_register_operand" "")
1818 (match_operator:DI 6 "logical_binary_operator"
1819 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1820 (match_operand:DI 1 "s_register_operand" "")]))]
1821 "TARGET_32BIT && reload_completed"
1822 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1823 (set (match_dup 3) (match_op_dup:SI 6
1824 [(ashiftrt:SI (match_dup 2) (const_int 31))
1828 operands[3] = gen_highpart (SImode, operands[0]);
1829 operands[0] = gen_lowpart (SImode, operands[0]);
1830 operands[4] = gen_highpart (SImode, operands[1]);
1831 operands[1] = gen_lowpart (SImode, operands[1]);
1832 operands[5] = gen_highpart (SImode, operands[2]);
1833 operands[2] = gen_lowpart (SImode, operands[2]);
1837 ;; The zero extend of operand 2 means we can just copy the high part of
1838 ;; operand1 into operand0.
1840 [(set (match_operand:DI 0 "s_register_operand" "")
1842 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1843 (match_operand:DI 1 "s_register_operand" "")))]
1844 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1845 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1846 (set (match_dup 3) (match_dup 4))]
1849 operands[4] = gen_highpart (SImode, operands[1]);
1850 operands[3] = gen_highpart (SImode, operands[0]);
1851 operands[0] = gen_lowpart (SImode, operands[0]);
1852 operands[1] = gen_lowpart (SImode, operands[1]);
1856 ;; The zero extend of operand 2 means we can just copy the high part of
1857 ;; operand1 into operand0.
1859 [(set (match_operand:DI 0 "s_register_operand" "")
1861 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1862 (match_operand:DI 1 "s_register_operand" "")))]
1863 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1864 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1865 (set (match_dup 3) (match_dup 4))]
1868 operands[4] = gen_highpart (SImode, operands[1]);
1869 operands[3] = gen_highpart (SImode, operands[0]);
1870 operands[0] = gen_lowpart (SImode, operands[0]);
1871 operands[1] = gen_lowpart (SImode, operands[1]);
1875 (define_expand "anddi3"
1876 [(set (match_operand:DI 0 "s_register_operand" "")
1877 (and:DI (match_operand:DI 1 "s_register_operand" "")
1878 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1883 (define_insn "*anddi3_insn"
1884 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1885 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1886 (match_operand:DI 2 "s_register_operand" "r,r")))]
1887 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1889 [(set_attr "length" "8")]
1892 (define_insn_and_split "*anddi_zesidi_di"
1893 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1894 (and:DI (zero_extend:DI
1895 (match_operand:SI 2 "s_register_operand" "r,r"))
1896 (match_operand:DI 1 "s_register_operand" "0,r")))]
1899 "TARGET_32BIT && reload_completed"
1900 ; The zero extend of operand 2 clears the high word of the output
1902 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1903 (set (match_dup 3) (const_int 0))]
1906 operands[3] = gen_highpart (SImode, operands[0]);
1907 operands[0] = gen_lowpart (SImode, operands[0]);
1908 operands[1] = gen_lowpart (SImode, operands[1]);
1910 [(set_attr "length" "8")]
1913 (define_insn "*anddi_sesdi_di"
1914 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1915 (and:DI (sign_extend:DI
1916 (match_operand:SI 2 "s_register_operand" "r,r"))
1917 (match_operand:DI 1 "s_register_operand" "0,r")))]
1920 [(set_attr "length" "8")]
1923 (define_expand "andsi3"
1924 [(set (match_operand:SI 0 "s_register_operand" "")
1925 (and:SI (match_operand:SI 1 "s_register_operand" "")
1926 (match_operand:SI 2 "reg_or_int_operand" "")))]
1931 if (GET_CODE (operands[2]) == CONST_INT)
1933 arm_split_constant (AND, SImode, NULL_RTX,
1934 INTVAL (operands[2]), operands[0],
1935 operands[1], optimize && can_create_pseudo_p ());
1940 else /* TARGET_THUMB1 */
1942 if (GET_CODE (operands[2]) != CONST_INT)
1944 rtx tmp = force_reg (SImode, operands[2]);
1945 if (rtx_equal_p (operands[0], operands[1]))
1949 operands[2] = operands[1];
1957 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1959 operands[2] = force_reg (SImode,
1960 GEN_INT (~INTVAL (operands[2])));
1962 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1967 for (i = 9; i <= 31; i++)
1969 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1971 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1975 else if ((((HOST_WIDE_INT) 1) << i) - 1
1976 == ~INTVAL (operands[2]))
1978 rtx shift = GEN_INT (i);
1979 rtx reg = gen_reg_rtx (SImode);
1981 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1982 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1988 operands[2] = force_reg (SImode, operands[2]);
1994 ; ??? Check split length for Thumb-2
1995 (define_insn_and_split "*arm_andsi3_insn"
1996 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1997 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1998 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2002 bic%?\\t%0, %1, #%B2
2005 && GET_CODE (operands[2]) == CONST_INT
2006 && !(const_ok_for_arm (INTVAL (operands[2]))
2007 || const_ok_for_arm (~INTVAL (operands[2])))"
2008 [(clobber (const_int 0))]
2010 arm_split_constant (AND, SImode, curr_insn,
2011 INTVAL (operands[2]), operands[0], operands[1], 0);
2014 [(set_attr "length" "4,4,16")
2015 (set_attr "predicable" "yes")]
2018 (define_insn "*thumb1_andsi3_insn"
2019 [(set (match_operand:SI 0 "register_operand" "=l")
2020 (and:SI (match_operand:SI 1 "register_operand" "%0")
2021 (match_operand:SI 2 "register_operand" "l")))]
2024 [(set_attr "length" "2")]
2027 (define_insn "*andsi3_compare0"
2028 [(set (reg:CC_NOOV CC_REGNUM)
2030 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2031 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2033 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2034 (and:SI (match_dup 1) (match_dup 2)))]
2038 bic%.\\t%0, %1, #%B2"
2039 [(set_attr "conds" "set")]
2042 (define_insn "*andsi3_compare0_scratch"
2043 [(set (reg:CC_NOOV CC_REGNUM)
2045 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2046 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2048 (clobber (match_scratch:SI 2 "=X,r"))]
2052 bic%.\\t%2, %0, #%B1"
2053 [(set_attr "conds" "set")]
2056 (define_insn "*zeroextractsi_compare0_scratch"
2057 [(set (reg:CC_NOOV CC_REGNUM)
2058 (compare:CC_NOOV (zero_extract:SI
2059 (match_operand:SI 0 "s_register_operand" "r")
2060 (match_operand 1 "const_int_operand" "n")
2061 (match_operand 2 "const_int_operand" "n"))
2064 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2065 && INTVAL (operands[1]) > 0
2066 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2067 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2069 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2070 << INTVAL (operands[2]));
2071 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2074 [(set_attr "conds" "set")]
2077 (define_insn_and_split "*ne_zeroextractsi"
2078 [(set (match_operand:SI 0 "s_register_operand" "=r")
2079 (ne:SI (zero_extract:SI
2080 (match_operand:SI 1 "s_register_operand" "r")
2081 (match_operand:SI 2 "const_int_operand" "n")
2082 (match_operand:SI 3 "const_int_operand" "n"))
2084 (clobber (reg:CC CC_REGNUM))]
2086 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2087 && INTVAL (operands[2]) > 0
2088 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2089 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2092 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2093 && INTVAL (operands[2]) > 0
2094 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2095 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2096 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2097 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2099 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2101 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2102 (match_dup 0) (const_int 1)))]
2104 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2105 << INTVAL (operands[3]));
2107 [(set_attr "conds" "clob")
2108 (set (attr "length")
2109 (if_then_else (eq_attr "is_thumb" "yes")
2114 (define_insn_and_split "*ne_zeroextractsi_shifted"
2115 [(set (match_operand:SI 0 "s_register_operand" "=r")
2116 (ne:SI (zero_extract:SI
2117 (match_operand:SI 1 "s_register_operand" "r")
2118 (match_operand:SI 2 "const_int_operand" "n")
2121 (clobber (reg:CC CC_REGNUM))]
2125 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2126 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2128 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2130 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2131 (match_dup 0) (const_int 1)))]
2133 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2135 [(set_attr "conds" "clob")
2136 (set_attr "length" "8")]
2139 (define_insn_and_split "*ite_ne_zeroextractsi"
2140 [(set (match_operand:SI 0 "s_register_operand" "=r")
2141 (if_then_else:SI (ne (zero_extract:SI
2142 (match_operand:SI 1 "s_register_operand" "r")
2143 (match_operand:SI 2 "const_int_operand" "n")
2144 (match_operand:SI 3 "const_int_operand" "n"))
2146 (match_operand:SI 4 "arm_not_operand" "rIK")
2148 (clobber (reg:CC CC_REGNUM))]
2150 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2151 && INTVAL (operands[2]) > 0
2152 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2153 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2154 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2157 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2158 && INTVAL (operands[2]) > 0
2159 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2160 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2161 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2162 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2163 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2165 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2167 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2168 (match_dup 0) (match_dup 4)))]
2170 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2171 << INTVAL (operands[3]));
2173 [(set_attr "conds" "clob")
2174 (set_attr "length" "8")]
2177 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2178 [(set (match_operand:SI 0 "s_register_operand" "=r")
2179 (if_then_else:SI (ne (zero_extract:SI
2180 (match_operand:SI 1 "s_register_operand" "r")
2181 (match_operand:SI 2 "const_int_operand" "n")
2184 (match_operand:SI 3 "arm_not_operand" "rIK")
2186 (clobber (reg:CC CC_REGNUM))]
2187 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2189 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2190 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2191 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2193 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2195 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2196 (match_dup 0) (match_dup 3)))]
2198 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2200 [(set_attr "conds" "clob")
2201 (set_attr "length" "8")]
2205 [(set (match_operand:SI 0 "s_register_operand" "")
2206 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2207 (match_operand:SI 2 "const_int_operand" "")
2208 (match_operand:SI 3 "const_int_operand" "")))
2209 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2211 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2212 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2214 HOST_WIDE_INT temp = INTVAL (operands[2]);
2216 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2217 operands[3] = GEN_INT (32 - temp);
2221 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2223 [(set (match_operand:SI 0 "s_register_operand" "")
2224 (match_operator:SI 1 "shiftable_operator"
2225 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2226 (match_operand:SI 3 "const_int_operand" "")
2227 (match_operand:SI 4 "const_int_operand" ""))
2228 (match_operand:SI 5 "s_register_operand" "")]))
2229 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2231 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2234 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2237 HOST_WIDE_INT temp = INTVAL (operands[3]);
2239 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2240 operands[4] = GEN_INT (32 - temp);
2245 [(set (match_operand:SI 0 "s_register_operand" "")
2246 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2247 (match_operand:SI 2 "const_int_operand" "")
2248 (match_operand:SI 3 "const_int_operand" "")))]
2250 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2251 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2253 HOST_WIDE_INT temp = INTVAL (operands[2]);
2255 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2256 operands[3] = GEN_INT (32 - temp);
2261 [(set (match_operand:SI 0 "s_register_operand" "")
2262 (match_operator:SI 1 "shiftable_operator"
2263 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2264 (match_operand:SI 3 "const_int_operand" "")
2265 (match_operand:SI 4 "const_int_operand" ""))
2266 (match_operand:SI 5 "s_register_operand" "")]))
2267 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2269 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2272 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2275 HOST_WIDE_INT temp = INTVAL (operands[3]);
2277 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2278 operands[4] = GEN_INT (32 - temp);
2282 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2283 ;;; represented by the bitfield, then this will produce incorrect results.
2284 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2285 ;;; which have a real bit-field insert instruction, the truncation happens
2286 ;;; in the bit-field insert instruction itself. Since arm does not have a
2287 ;;; bit-field insert instruction, we would have to emit code here to truncate
2288 ;;; the value before we insert. This loses some of the advantage of having
2289 ;;; this insv pattern, so this pattern needs to be reevalutated.
2291 (define_expand "insv"
2292 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2293 (match_operand:SI 1 "general_operand" "")
2294 (match_operand:SI 2 "general_operand" ""))
2295 (match_operand:SI 3 "reg_or_int_operand" ""))]
2296 "TARGET_ARM || arm_arch_thumb2"
2299 int start_bit = INTVAL (operands[2]);
2300 int width = INTVAL (operands[1]);
2301 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2302 rtx target, subtarget;
2304 if (arm_arch_thumb2)
2306 bool use_bfi = TRUE;
2308 if (GET_CODE (operands[3]) == CONST_INT)
2310 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2314 emit_insn (gen_insv_zero (operands[0], operands[1],
2319 /* See if the set can be done with a single orr instruction. */
2320 if (val == mask && const_ok_for_arm (val << start_bit))
2326 if (GET_CODE (operands[3]) != REG)
2327 operands[3] = force_reg (SImode, operands[3]);
2329 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2335 target = copy_rtx (operands[0]);
2336 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2337 subreg as the final target. */
2338 if (GET_CODE (target) == SUBREG)
2340 subtarget = gen_reg_rtx (SImode);
2341 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2342 < GET_MODE_SIZE (SImode))
2343 target = SUBREG_REG (target);
2348 if (GET_CODE (operands[3]) == CONST_INT)
2350 /* Since we are inserting a known constant, we may be able to
2351 reduce the number of bits that we have to clear so that
2352 the mask becomes simple. */
2353 /* ??? This code does not check to see if the new mask is actually
2354 simpler. It may not be. */
2355 rtx op1 = gen_reg_rtx (SImode);
2356 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2357 start of this pattern. */
2358 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2359 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2361 emit_insn (gen_andsi3 (op1, operands[0],
2362 gen_int_mode (~mask2, SImode)));
2363 emit_insn (gen_iorsi3 (subtarget, op1,
2364 gen_int_mode (op3_value << start_bit, SImode)));
2366 else if (start_bit == 0
2367 && !(const_ok_for_arm (mask)
2368 || const_ok_for_arm (~mask)))
2370 /* A Trick, since we are setting the bottom bits in the word,
2371 we can shift operand[3] up, operand[0] down, OR them together
2372 and rotate the result back again. This takes 3 insns, and
2373 the third might be mergeable into another op. */
2374 /* The shift up copes with the possibility that operand[3] is
2375 wider than the bitfield. */
2376 rtx op0 = gen_reg_rtx (SImode);
2377 rtx op1 = gen_reg_rtx (SImode);
2379 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2380 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2381 emit_insn (gen_iorsi3 (op1, op1, op0));
2382 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2384 else if ((width + start_bit == 32)
2385 && !(const_ok_for_arm (mask)
2386 || const_ok_for_arm (~mask)))
2388 /* Similar trick, but slightly less efficient. */
2390 rtx op0 = gen_reg_rtx (SImode);
2391 rtx op1 = gen_reg_rtx (SImode);
2393 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2394 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2395 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2396 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2400 rtx op0 = gen_int_mode (mask, SImode);
2401 rtx op1 = gen_reg_rtx (SImode);
2402 rtx op2 = gen_reg_rtx (SImode);
2404 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2406 rtx tmp = gen_reg_rtx (SImode);
2408 emit_insn (gen_movsi (tmp, op0));
2412 /* Mask out any bits in operand[3] that are not needed. */
2413 emit_insn (gen_andsi3 (op1, operands[3], op0));
2415 if (GET_CODE (op0) == CONST_INT
2416 && (const_ok_for_arm (mask << start_bit)
2417 || const_ok_for_arm (~(mask << start_bit))))
2419 op0 = gen_int_mode (~(mask << start_bit), SImode);
2420 emit_insn (gen_andsi3 (op2, operands[0], op0));
2424 if (GET_CODE (op0) == CONST_INT)
2426 rtx tmp = gen_reg_rtx (SImode);
2428 emit_insn (gen_movsi (tmp, op0));
2433 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2435 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2439 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2441 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2444 if (subtarget != target)
2446 /* If TARGET is still a SUBREG, then it must be wider than a word,
2447 so we must be careful only to set the subword we were asked to. */
2448 if (GET_CODE (target) == SUBREG)
2449 emit_move_insn (target, subtarget);
2451 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2458 (define_insn "insv_zero"
2459 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2460 (match_operand:SI 1 "const_int_operand" "M")
2461 (match_operand:SI 2 "const_int_operand" "M"))
2465 [(set_attr "length" "4")
2466 (set_attr "predicable" "yes")]
2469 (define_insn "insv_t2"
2470 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2471 (match_operand:SI 1 "const_int_operand" "M")
2472 (match_operand:SI 2 "const_int_operand" "M"))
2473 (match_operand:SI 3 "s_register_operand" "r"))]
2475 "bfi%?\t%0, %3, %2, %1"
2476 [(set_attr "length" "4")
2477 (set_attr "predicable" "yes")]
2480 ; constants for op 2 will never be given to these patterns.
2481 (define_insn_and_split "*anddi_notdi_di"
2482 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2483 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2484 (match_operand:DI 2 "s_register_operand" "r,0")))]
2487 "TARGET_32BIT && reload_completed
2488 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2489 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2490 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2491 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2494 operands[3] = gen_highpart (SImode, operands[0]);
2495 operands[0] = gen_lowpart (SImode, operands[0]);
2496 operands[4] = gen_highpart (SImode, operands[1]);
2497 operands[1] = gen_lowpart (SImode, operands[1]);
2498 operands[5] = gen_highpart (SImode, operands[2]);
2499 operands[2] = gen_lowpart (SImode, operands[2]);
2501 [(set_attr "length" "8")
2502 (set_attr "predicable" "yes")]
2505 (define_insn_and_split "*anddi_notzesidi_di"
2506 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2507 (and:DI (not:DI (zero_extend:DI
2508 (match_operand:SI 2 "s_register_operand" "r,r")))
2509 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2512 bic%?\\t%Q0, %Q1, %2
2514 ; (not (zero_extend ...)) allows us to just copy the high word from
2515 ; operand1 to operand0.
2518 && operands[0] != operands[1]"
2519 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2520 (set (match_dup 3) (match_dup 4))]
2523 operands[3] = gen_highpart (SImode, operands[0]);
2524 operands[0] = gen_lowpart (SImode, operands[0]);
2525 operands[4] = gen_highpart (SImode, operands[1]);
2526 operands[1] = gen_lowpart (SImode, operands[1]);
2528 [(set_attr "length" "4,8")
2529 (set_attr "predicable" "yes")]
2532 (define_insn_and_split "*anddi_notsesidi_di"
2533 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2534 (and:DI (not:DI (sign_extend:DI
2535 (match_operand:SI 2 "s_register_operand" "r,r")))
2536 (match_operand:DI 1 "s_register_operand" "0,r")))]
2539 "TARGET_32BIT && reload_completed"
2540 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2541 (set (match_dup 3) (and:SI (not:SI
2542 (ashiftrt:SI (match_dup 2) (const_int 31)))
2546 operands[3] = gen_highpart (SImode, operands[0]);
2547 operands[0] = gen_lowpart (SImode, operands[0]);
2548 operands[4] = gen_highpart (SImode, operands[1]);
2549 operands[1] = gen_lowpart (SImode, operands[1]);
2551 [(set_attr "length" "8")
2552 (set_attr "predicable" "yes")]
2555 (define_insn "andsi_notsi_si"
2556 [(set (match_operand:SI 0 "s_register_operand" "=r")
2557 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2558 (match_operand:SI 1 "s_register_operand" "r")))]
2560 "bic%?\\t%0, %1, %2"
2561 [(set_attr "predicable" "yes")]
2564 (define_insn "bicsi3"
2565 [(set (match_operand:SI 0 "register_operand" "=l")
2566 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2567 (match_operand:SI 2 "register_operand" "0")))]
2570 [(set_attr "length" "2")]
2573 (define_insn "andsi_not_shiftsi_si"
2574 [(set (match_operand:SI 0 "s_register_operand" "=r")
2575 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2576 [(match_operand:SI 2 "s_register_operand" "r")
2577 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2578 (match_operand:SI 1 "s_register_operand" "r")))]
2580 "bic%?\\t%0, %1, %2%S4"
2581 [(set_attr "predicable" "yes")
2582 (set_attr "shift" "2")
2583 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2584 (const_string "alu_shift")
2585 (const_string "alu_shift_reg")))]
2588 (define_insn "*andsi_notsi_si_compare0"
2589 [(set (reg:CC_NOOV CC_REGNUM)
2591 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2592 (match_operand:SI 1 "s_register_operand" "r"))
2594 (set (match_operand:SI 0 "s_register_operand" "=r")
2595 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2597 "bic%.\\t%0, %1, %2"
2598 [(set_attr "conds" "set")]
2601 (define_insn "*andsi_notsi_si_compare0_scratch"
2602 [(set (reg:CC_NOOV CC_REGNUM)
2604 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2605 (match_operand:SI 1 "s_register_operand" "r"))
2607 (clobber (match_scratch:SI 0 "=r"))]
2609 "bic%.\\t%0, %1, %2"
2610 [(set_attr "conds" "set")]
2613 (define_expand "iordi3"
2614 [(set (match_operand:DI 0 "s_register_operand" "")
2615 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2616 (match_operand:DI 2 "neon_logic_op2" "")))]
2621 (define_insn "*iordi3_insn"
2622 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2623 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2624 (match_operand:DI 2 "s_register_operand" "r,r")))]
2625 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2627 [(set_attr "length" "8")
2628 (set_attr "predicable" "yes")]
2631 (define_insn "*iordi_zesidi_di"
2632 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2633 (ior:DI (zero_extend:DI
2634 (match_operand:SI 2 "s_register_operand" "r,r"))
2635 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2638 orr%?\\t%Q0, %Q1, %2
2640 [(set_attr "length" "4,8")
2641 (set_attr "predicable" "yes")]
2644 (define_insn "*iordi_sesidi_di"
2645 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2646 (ior:DI (sign_extend:DI
2647 (match_operand:SI 2 "s_register_operand" "r,r"))
2648 (match_operand:DI 1 "s_register_operand" "0,r")))]
2651 [(set_attr "length" "8")
2652 (set_attr "predicable" "yes")]
2655 (define_expand "iorsi3"
2656 [(set (match_operand:SI 0 "s_register_operand" "")
2657 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2658 (match_operand:SI 2 "reg_or_int_operand" "")))]
2661 if (GET_CODE (operands[2]) == CONST_INT)
2665 arm_split_constant (IOR, SImode, NULL_RTX,
2666 INTVAL (operands[2]), operands[0], operands[1],
2667 optimize && can_create_pseudo_p ());
2670 else /* TARGET_THUMB1 */
2672 rtx tmp = force_reg (SImode, operands[2]);
2673 if (rtx_equal_p (operands[0], operands[1]))
2677 operands[2] = operands[1];
2685 (define_insn_and_split "*arm_iorsi3"
2686 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2687 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2688 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2694 && GET_CODE (operands[2]) == CONST_INT
2695 && !const_ok_for_arm (INTVAL (operands[2]))"
2696 [(clobber (const_int 0))]
2698 arm_split_constant (IOR, SImode, curr_insn,
2699 INTVAL (operands[2]), operands[0], operands[1], 0);
2702 [(set_attr "length" "4,16")
2703 (set_attr "predicable" "yes")]
2706 (define_insn "*thumb1_iorsi3"
2707 [(set (match_operand:SI 0 "register_operand" "=l")
2708 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2709 (match_operand:SI 2 "register_operand" "l")))]
2712 [(set_attr "length" "2")]
2716 [(match_scratch:SI 3 "r")
2717 (set (match_operand:SI 0 "arm_general_register_operand" "")
2718 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2719 (match_operand:SI 2 "const_int_operand" "")))]
2721 && !const_ok_for_arm (INTVAL (operands[2]))
2722 && const_ok_for_arm (~INTVAL (operands[2]))"
2723 [(set (match_dup 3) (match_dup 2))
2724 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2728 (define_insn "*iorsi3_compare0"
2729 [(set (reg:CC_NOOV CC_REGNUM)
2730 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2731 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2733 (set (match_operand:SI 0 "s_register_operand" "=r")
2734 (ior:SI (match_dup 1) (match_dup 2)))]
2736 "orr%.\\t%0, %1, %2"
2737 [(set_attr "conds" "set")]
2740 (define_insn "*iorsi3_compare0_scratch"
2741 [(set (reg:CC_NOOV CC_REGNUM)
2742 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2743 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2745 (clobber (match_scratch:SI 0 "=r"))]
2747 "orr%.\\t%0, %1, %2"
2748 [(set_attr "conds" "set")]
2751 (define_expand "xordi3"
2752 [(set (match_operand:DI 0 "s_register_operand" "")
2753 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2754 (match_operand:DI 2 "s_register_operand" "")))]
2759 (define_insn "*xordi3_insn"
2760 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2761 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2762 (match_operand:DI 2 "s_register_operand" "r,r")))]
2763 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2765 [(set_attr "length" "8")
2766 (set_attr "predicable" "yes")]
2769 (define_insn "*xordi_zesidi_di"
2770 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2771 (xor:DI (zero_extend:DI
2772 (match_operand:SI 2 "s_register_operand" "r,r"))
2773 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2776 eor%?\\t%Q0, %Q1, %2
2778 [(set_attr "length" "4,8")
2779 (set_attr "predicable" "yes")]
2782 (define_insn "*xordi_sesidi_di"
2783 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2784 (xor:DI (sign_extend:DI
2785 (match_operand:SI 2 "s_register_operand" "r,r"))
2786 (match_operand:DI 1 "s_register_operand" "0,r")))]
2789 [(set_attr "length" "8")
2790 (set_attr "predicable" "yes")]
2793 (define_expand "xorsi3"
2794 [(set (match_operand:SI 0 "s_register_operand" "")
2795 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2796 (match_operand:SI 2 "reg_or_int_operand" "")))]
2798 "if (GET_CODE (operands[2]) == CONST_INT)
2802 arm_split_constant (XOR, SImode, NULL_RTX,
2803 INTVAL (operands[2]), operands[0], operands[1],
2804 optimize && can_create_pseudo_p ());
2807 else /* TARGET_THUMB1 */
2809 rtx tmp = force_reg (SImode, operands[2]);
2810 if (rtx_equal_p (operands[0], operands[1]))
2814 operands[2] = operands[1];
2821 (define_insn "*arm_xorsi3"
2822 [(set (match_operand:SI 0 "s_register_operand" "=r")
2823 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2824 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2826 "eor%?\\t%0, %1, %2"
2827 [(set_attr "predicable" "yes")]
2830 (define_insn "*thumb1_xorsi3"
2831 [(set (match_operand:SI 0 "register_operand" "=l")
2832 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2833 (match_operand:SI 2 "register_operand" "l")))]
2836 [(set_attr "length" "2")]
2839 (define_insn "*xorsi3_compare0"
2840 [(set (reg:CC_NOOV CC_REGNUM)
2841 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2842 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2844 (set (match_operand:SI 0 "s_register_operand" "=r")
2845 (xor:SI (match_dup 1) (match_dup 2)))]
2847 "eor%.\\t%0, %1, %2"
2848 [(set_attr "conds" "set")]
2851 (define_insn "*xorsi3_compare0_scratch"
2852 [(set (reg:CC_NOOV CC_REGNUM)
2853 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2854 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2858 [(set_attr "conds" "set")]
2861 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2862 ; (NOT D) we can sometimes merge the final NOT into one of the following
2866 [(set (match_operand:SI 0 "s_register_operand" "")
2867 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2868 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2869 (match_operand:SI 3 "arm_rhs_operand" "")))
2870 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2872 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2873 (not:SI (match_dup 3))))
2874 (set (match_dup 0) (not:SI (match_dup 4)))]
2878 (define_insn "*andsi_iorsi3_notsi"
2879 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2880 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2881 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2882 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2884 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2885 [(set_attr "length" "8")
2886 (set_attr "ce_count" "2")
2887 (set_attr "predicable" "yes")]
2890 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2891 ; insns are available?
2893 [(set (match_operand:SI 0 "s_register_operand" "")
2894 (match_operator:SI 1 "logical_binary_operator"
2895 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2896 (match_operand:SI 3 "const_int_operand" "")
2897 (match_operand:SI 4 "const_int_operand" ""))
2898 (match_operator:SI 9 "logical_binary_operator"
2899 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2900 (match_operand:SI 6 "const_int_operand" ""))
2901 (match_operand:SI 7 "s_register_operand" "")])]))
2902 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2904 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2905 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2908 [(ashift:SI (match_dup 2) (match_dup 4))
2912 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2915 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2919 [(set (match_operand:SI 0 "s_register_operand" "")
2920 (match_operator:SI 1 "logical_binary_operator"
2921 [(match_operator:SI 9 "logical_binary_operator"
2922 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2923 (match_operand:SI 6 "const_int_operand" ""))
2924 (match_operand:SI 7 "s_register_operand" "")])
2925 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2926 (match_operand:SI 3 "const_int_operand" "")
2927 (match_operand:SI 4 "const_int_operand" ""))]))
2928 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2930 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2931 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2934 [(ashift:SI (match_dup 2) (match_dup 4))
2938 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2941 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2945 [(set (match_operand:SI 0 "s_register_operand" "")
2946 (match_operator:SI 1 "logical_binary_operator"
2947 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2948 (match_operand:SI 3 "const_int_operand" "")
2949 (match_operand:SI 4 "const_int_operand" ""))
2950 (match_operator:SI 9 "logical_binary_operator"
2951 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2952 (match_operand:SI 6 "const_int_operand" ""))
2953 (match_operand:SI 7 "s_register_operand" "")])]))
2954 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2956 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2957 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2960 [(ashift:SI (match_dup 2) (match_dup 4))
2964 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2967 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2971 [(set (match_operand:SI 0 "s_register_operand" "")
2972 (match_operator:SI 1 "logical_binary_operator"
2973 [(match_operator:SI 9 "logical_binary_operator"
2974 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2975 (match_operand:SI 6 "const_int_operand" ""))
2976 (match_operand:SI 7 "s_register_operand" "")])
2977 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2978 (match_operand:SI 3 "const_int_operand" "")
2979 (match_operand:SI 4 "const_int_operand" ""))]))
2980 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2982 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2983 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2986 [(ashift:SI (match_dup 2) (match_dup 4))
2990 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2993 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2997 ;; Minimum and maximum insns
2999 (define_expand "smaxsi3"
3001 (set (match_operand:SI 0 "s_register_operand" "")
3002 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3003 (match_operand:SI 2 "arm_rhs_operand" "")))
3004 (clobber (reg:CC CC_REGNUM))])]
3007 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3009 /* No need for a clobber of the condition code register here. */
3010 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3011 gen_rtx_SMAX (SImode, operands[1],
3017 (define_insn "*smax_0"
3018 [(set (match_operand:SI 0 "s_register_operand" "=r")
3019 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3022 "bic%?\\t%0, %1, %1, asr #31"
3023 [(set_attr "predicable" "yes")]
3026 (define_insn "*smax_m1"
3027 [(set (match_operand:SI 0 "s_register_operand" "=r")
3028 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3031 "orr%?\\t%0, %1, %1, asr #31"
3032 [(set_attr "predicable" "yes")]
3035 (define_insn "*arm_smax_insn"
3036 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3037 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3038 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3039 (clobber (reg:CC CC_REGNUM))]
3042 cmp\\t%1, %2\;movlt\\t%0, %2
3043 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3044 [(set_attr "conds" "clob")
3045 (set_attr "length" "8,12")]
3048 (define_expand "sminsi3"
3050 (set (match_operand:SI 0 "s_register_operand" "")
3051 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3052 (match_operand:SI 2 "arm_rhs_operand" "")))
3053 (clobber (reg:CC CC_REGNUM))])]
3056 if (operands[2] == const0_rtx)
3058 /* No need for a clobber of the condition code register here. */
3059 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3060 gen_rtx_SMIN (SImode, operands[1],
3066 (define_insn "*smin_0"
3067 [(set (match_operand:SI 0 "s_register_operand" "=r")
3068 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3071 "and%?\\t%0, %1, %1, asr #31"
3072 [(set_attr "predicable" "yes")]
3075 (define_insn "*arm_smin_insn"
3076 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3077 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3078 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3079 (clobber (reg:CC CC_REGNUM))]
3082 cmp\\t%1, %2\;movge\\t%0, %2
3083 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3084 [(set_attr "conds" "clob")
3085 (set_attr "length" "8,12")]
3088 (define_expand "umaxsi3"
3090 (set (match_operand:SI 0 "s_register_operand" "")
3091 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3092 (match_operand:SI 2 "arm_rhs_operand" "")))
3093 (clobber (reg:CC CC_REGNUM))])]
3098 (define_insn "*arm_umaxsi3"
3099 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3100 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3101 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3102 (clobber (reg:CC CC_REGNUM))]
3105 cmp\\t%1, %2\;movcc\\t%0, %2
3106 cmp\\t%1, %2\;movcs\\t%0, %1
3107 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3108 [(set_attr "conds" "clob")
3109 (set_attr "length" "8,8,12")]
3112 (define_expand "uminsi3"
3114 (set (match_operand:SI 0 "s_register_operand" "")
3115 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3116 (match_operand:SI 2 "arm_rhs_operand" "")))
3117 (clobber (reg:CC CC_REGNUM))])]
3122 (define_insn "*arm_uminsi3"
3123 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3124 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3125 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3126 (clobber (reg:CC CC_REGNUM))]
3129 cmp\\t%1, %2\;movcs\\t%0, %2
3130 cmp\\t%1, %2\;movcc\\t%0, %1
3131 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3132 [(set_attr "conds" "clob")
3133 (set_attr "length" "8,8,12")]
3136 (define_insn "*store_minmaxsi"
3137 [(set (match_operand:SI 0 "memory_operand" "=m")
3138 (match_operator:SI 3 "minmax_operator"
3139 [(match_operand:SI 1 "s_register_operand" "r")
3140 (match_operand:SI 2 "s_register_operand" "r")]))
3141 (clobber (reg:CC CC_REGNUM))]
3144 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3145 operands[1], operands[2]);
3146 output_asm_insn (\"cmp\\t%1, %2\", operands);
3148 output_asm_insn (\"ite\t%d3\", operands);
3149 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3150 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3153 [(set_attr "conds" "clob")
3154 (set (attr "length")
3155 (if_then_else (eq_attr "is_thumb" "yes")
3158 (set_attr "type" "store1")]
3161 ; Reject the frame pointer in operand[1], since reloading this after
3162 ; it has been eliminated can cause carnage.
3163 (define_insn "*minmax_arithsi"
3164 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3165 (match_operator:SI 4 "shiftable_operator"
3166 [(match_operator:SI 5 "minmax_operator"
3167 [(match_operand:SI 2 "s_register_operand" "r,r")
3168 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3169 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3170 (clobber (reg:CC CC_REGNUM))]
3171 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3174 enum rtx_code code = GET_CODE (operands[4]);
3177 if (which_alternative != 0 || operands[3] != const0_rtx
3178 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3183 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3184 operands[2], operands[3]);
3185 output_asm_insn (\"cmp\\t%2, %3\", operands);
3189 output_asm_insn (\"ite\\t%d5\", operands);
3191 output_asm_insn (\"it\\t%d5\", operands);
3193 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3195 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3198 [(set_attr "conds" "clob")
3199 (set (attr "length")
3200 (if_then_else (eq_attr "is_thumb" "yes")
3206 ;; Shift and rotation insns
3208 (define_expand "ashldi3"
3209 [(set (match_operand:DI 0 "s_register_operand" "")
3210 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3211 (match_operand:SI 2 "reg_or_int_operand" "")))]
3214 if (GET_CODE (operands[2]) == CONST_INT)
3216 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3218 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3221 /* Ideally we shouldn't fail here if we could know that operands[1]
3222 ends up already living in an iwmmxt register. Otherwise it's
3223 cheaper to have the alternate code being generated than moving
3224 values to iwmmxt regs and back. */
3227 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3232 (define_insn "arm_ashldi3_1bit"
3233 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3234 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3236 (clobber (reg:CC CC_REGNUM))]
3238 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3239 [(set_attr "conds" "clob")
3240 (set_attr "length" "8")]
3243 (define_expand "ashlsi3"
3244 [(set (match_operand:SI 0 "s_register_operand" "")
3245 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3246 (match_operand:SI 2 "arm_rhs_operand" "")))]
3249 if (GET_CODE (operands[2]) == CONST_INT
3250 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3252 emit_insn (gen_movsi (operands[0], const0_rtx));
3258 (define_insn "*thumb1_ashlsi3"
3259 [(set (match_operand:SI 0 "register_operand" "=l,l")
3260 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3261 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3264 [(set_attr "length" "2")]
3267 (define_expand "ashrdi3"
3268 [(set (match_operand:DI 0 "s_register_operand" "")
3269 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3270 (match_operand:SI 2 "reg_or_int_operand" "")))]
3273 if (GET_CODE (operands[2]) == CONST_INT)
3275 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3277 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3280 /* Ideally we shouldn't fail here if we could know that operands[1]
3281 ends up already living in an iwmmxt register. Otherwise it's
3282 cheaper to have the alternate code being generated than moving
3283 values to iwmmxt regs and back. */
3286 else if (!TARGET_REALLY_IWMMXT)
3291 (define_insn "arm_ashrdi3_1bit"
3292 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3293 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3295 (clobber (reg:CC CC_REGNUM))]
3297 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3298 [(set_attr "conds" "clob")
3299 (set_attr "length" "8")]
3302 (define_expand "ashrsi3"
3303 [(set (match_operand:SI 0 "s_register_operand" "")
3304 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3305 (match_operand:SI 2 "arm_rhs_operand" "")))]
3308 if (GET_CODE (operands[2]) == CONST_INT
3309 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3310 operands[2] = GEN_INT (31);
3314 (define_insn "*thumb1_ashrsi3"
3315 [(set (match_operand:SI 0 "register_operand" "=l,l")
3316 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3317 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3320 [(set_attr "length" "2")]
3323 (define_expand "lshrdi3"
3324 [(set (match_operand:DI 0 "s_register_operand" "")
3325 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3326 (match_operand:SI 2 "reg_or_int_operand" "")))]
3329 if (GET_CODE (operands[2]) == CONST_INT)
3331 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3333 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3336 /* Ideally we shouldn't fail here if we could know that operands[1]
3337 ends up already living in an iwmmxt register. Otherwise it's
3338 cheaper to have the alternate code being generated than moving
3339 values to iwmmxt regs and back. */
3342 else if (!TARGET_REALLY_IWMMXT)
3347 (define_insn "arm_lshrdi3_1bit"
3348 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3349 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3351 (clobber (reg:CC CC_REGNUM))]
3353 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3354 [(set_attr "conds" "clob")
3355 (set_attr "length" "8")]
3358 (define_expand "lshrsi3"
3359 [(set (match_operand:SI 0 "s_register_operand" "")
3360 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3361 (match_operand:SI 2 "arm_rhs_operand" "")))]
3364 if (GET_CODE (operands[2]) == CONST_INT
3365 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3367 emit_insn (gen_movsi (operands[0], const0_rtx));
3373 (define_insn "*thumb1_lshrsi3"
3374 [(set (match_operand:SI 0 "register_operand" "=l,l")
3375 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3376 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3379 [(set_attr "length" "2")]
3382 (define_expand "rotlsi3"
3383 [(set (match_operand:SI 0 "s_register_operand" "")
3384 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3385 (match_operand:SI 2 "reg_or_int_operand" "")))]
3388 if (GET_CODE (operands[2]) == CONST_INT)
3389 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3392 rtx reg = gen_reg_rtx (SImode);
3393 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3399 (define_expand "rotrsi3"
3400 [(set (match_operand:SI 0 "s_register_operand" "")
3401 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3402 (match_operand:SI 2 "arm_rhs_operand" "")))]
3407 if (GET_CODE (operands[2]) == CONST_INT
3408 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3409 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3411 else /* TARGET_THUMB1 */
3413 if (GET_CODE (operands [2]) == CONST_INT)
3414 operands [2] = force_reg (SImode, operands[2]);
3419 (define_insn "*thumb1_rotrsi3"
3420 [(set (match_operand:SI 0 "register_operand" "=l")
3421 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3422 (match_operand:SI 2 "register_operand" "l")))]
3425 [(set_attr "length" "2")]
3428 (define_insn "*arm_shiftsi3"
3429 [(set (match_operand:SI 0 "s_register_operand" "=r")
3430 (match_operator:SI 3 "shift_operator"
3431 [(match_operand:SI 1 "s_register_operand" "r")
3432 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3434 "* return arm_output_shift(operands, 0);"
3435 [(set_attr "predicable" "yes")
3436 (set_attr "shift" "1")
3437 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3438 (const_string "alu_shift")
3439 (const_string "alu_shift_reg")))]
3442 (define_insn "*shiftsi3_compare0"
3443 [(set (reg:CC_NOOV CC_REGNUM)
3444 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3445 [(match_operand:SI 1 "s_register_operand" "r")
3446 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3448 (set (match_operand:SI 0 "s_register_operand" "=r")
3449 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3451 "* return arm_output_shift(operands, 1);"
3452 [(set_attr "conds" "set")
3453 (set_attr "shift" "1")
3454 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3455 (const_string "alu_shift")
3456 (const_string "alu_shift_reg")))]
3459 (define_insn "*shiftsi3_compare0_scratch"
3460 [(set (reg:CC_NOOV CC_REGNUM)
3461 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3462 [(match_operand:SI 1 "s_register_operand" "r")
3463 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3465 (clobber (match_scratch:SI 0 "=r"))]
3467 "* return arm_output_shift(operands, 1);"
3468 [(set_attr "conds" "set")
3469 (set_attr "shift" "1")]
3472 (define_insn "*arm_notsi_shiftsi"
3473 [(set (match_operand:SI 0 "s_register_operand" "=r")
3474 (not:SI (match_operator:SI 3 "shift_operator"
3475 [(match_operand:SI 1 "s_register_operand" "r")
3476 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3479 [(set_attr "predicable" "yes")
3480 (set_attr "shift" "1")
3481 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3482 (const_string "alu_shift")
3483 (const_string "alu_shift_reg")))]
3486 (define_insn "*arm_notsi_shiftsi_compare0"
3487 [(set (reg:CC_NOOV CC_REGNUM)
3488 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3489 [(match_operand:SI 1 "s_register_operand" "r")
3490 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3492 (set (match_operand:SI 0 "s_register_operand" "=r")
3493 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3496 [(set_attr "conds" "set")
3497 (set_attr "shift" "1")
3498 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3499 (const_string "alu_shift")
3500 (const_string "alu_shift_reg")))]
3503 (define_insn "*arm_not_shiftsi_compare0_scratch"
3504 [(set (reg:CC_NOOV CC_REGNUM)
3505 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3506 [(match_operand:SI 1 "s_register_operand" "r")
3507 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3509 (clobber (match_scratch:SI 0 "=r"))]
3512 [(set_attr "conds" "set")
3513 (set_attr "shift" "1")
3514 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3515 (const_string "alu_shift")
3516 (const_string "alu_shift_reg")))]
3519 ;; We don't really have extzv, but defining this using shifts helps
3520 ;; to reduce register pressure later on.
3522 (define_expand "extzv"
3524 (ashift:SI (match_operand:SI 1 "register_operand" "")
3525 (match_operand:SI 2 "const_int_operand" "")))
3526 (set (match_operand:SI 0 "register_operand" "")
3527 (lshiftrt:SI (match_dup 4)
3528 (match_operand:SI 3 "const_int_operand" "")))]
3529 "TARGET_THUMB1 || arm_arch_thumb2"
3532 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3533 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3535 if (arm_arch_thumb2)
3537 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3542 operands[3] = GEN_INT (rshift);
3546 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3550 operands[2] = GEN_INT (lshift);
3551 operands[4] = gen_reg_rtx (SImode);
3556 [(set (match_operand:SI 0 "s_register_operand" "=r")
3557 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3558 (match_operand:SI 2 "const_int_operand" "M")
3559 (match_operand:SI 3 "const_int_operand" "M")))]
3561 "sbfx%?\t%0, %1, %3, %2"
3562 [(set_attr "length" "4")
3563 (set_attr "predicable" "yes")]
3566 (define_insn "extzv_t2"
3567 [(set (match_operand:SI 0 "s_register_operand" "=r")
3568 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3569 (match_operand:SI 2 "const_int_operand" "M")
3570 (match_operand:SI 3 "const_int_operand" "M")))]
3572 "ubfx%?\t%0, %1, %3, %2"
3573 [(set_attr "length" "4")
3574 (set_attr "predicable" "yes")]
3578 ;; Unary arithmetic insns
3580 (define_expand "negdi2"
3582 [(set (match_operand:DI 0 "s_register_operand" "")
3583 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3584 (clobber (reg:CC CC_REGNUM))])]
3589 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3590 ;; The first alternative allows the common case of a *full* overlap.
3591 (define_insn "*arm_negdi2"
3592 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3593 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3594 (clobber (reg:CC CC_REGNUM))]
3596 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3597 [(set_attr "conds" "clob")
3598 (set_attr "length" "8")]
3601 (define_insn "*thumb1_negdi2"
3602 [(set (match_operand:DI 0 "register_operand" "=&l")
3603 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3604 (clobber (reg:CC CC_REGNUM))]
3606 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3607 [(set_attr "length" "6")]
3610 (define_expand "negsi2"
3611 [(set (match_operand:SI 0 "s_register_operand" "")
3612 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3617 (define_insn "*arm_negsi2"
3618 [(set (match_operand:SI 0 "s_register_operand" "=r")
3619 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3621 "rsb%?\\t%0, %1, #0"
3622 [(set_attr "predicable" "yes")]
3625 (define_insn "*thumb1_negsi2"
3626 [(set (match_operand:SI 0 "register_operand" "=l")
3627 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3630 [(set_attr "length" "2")]
3633 (define_expand "negsf2"
3634 [(set (match_operand:SF 0 "s_register_operand" "")
3635 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3636 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3640 (define_expand "negdf2"
3641 [(set (match_operand:DF 0 "s_register_operand" "")
3642 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3643 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3646 ;; abssi2 doesn't really clobber the condition codes if a different register
3647 ;; is being set. To keep things simple, assume during rtl manipulations that
3648 ;; it does, but tell the final scan operator the truth. Similarly for
3651 (define_expand "abssi2"
3653 [(set (match_operand:SI 0 "s_register_operand" "")
3654 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3655 (clobber (match_dup 2))])]
3659 operands[2] = gen_rtx_SCRATCH (SImode);
3661 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3664 (define_insn "*arm_abssi2"
3665 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3666 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3667 (clobber (reg:CC CC_REGNUM))]
3670 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3671 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3672 [(set_attr "conds" "clob,*")
3673 (set_attr "shift" "1")
3674 ;; predicable can't be set based on the variant, so left as no
3675 (set_attr "length" "8")]
3678 (define_insn_and_split "*thumb1_abssi2"
3679 [(set (match_operand:SI 0 "s_register_operand" "=l")
3680 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3681 (clobber (match_scratch:SI 2 "=&l"))]
3684 "TARGET_THUMB1 && reload_completed"
3685 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3686 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3687 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3689 [(set_attr "length" "6")]
3692 (define_insn "*arm_neg_abssi2"
3693 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3694 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3695 (clobber (reg:CC CC_REGNUM))]
3698 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3699 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3700 [(set_attr "conds" "clob,*")
3701 (set_attr "shift" "1")
3702 ;; predicable can't be set based on the variant, so left as no
3703 (set_attr "length" "8")]
3706 (define_insn_and_split "*thumb1_neg_abssi2"
3707 [(set (match_operand:SI 0 "s_register_operand" "=l")
3708 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3709 (clobber (match_scratch:SI 2 "=&l"))]
3712 "TARGET_THUMB1 && reload_completed"
3713 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3714 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3715 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3717 [(set_attr "length" "6")]
3720 (define_expand "abssf2"
3721 [(set (match_operand:SF 0 "s_register_operand" "")
3722 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3723 "TARGET_32BIT && TARGET_HARD_FLOAT"
3726 (define_expand "absdf2"
3727 [(set (match_operand:DF 0 "s_register_operand" "")
3728 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3729 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3732 (define_expand "sqrtsf2"
3733 [(set (match_operand:SF 0 "s_register_operand" "")
3734 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3735 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3738 (define_expand "sqrtdf2"
3739 [(set (match_operand:DF 0 "s_register_operand" "")
3740 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3741 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3744 (define_insn_and_split "one_cmpldi2"
3745 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3746 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3749 "TARGET_32BIT && reload_completed"
3750 [(set (match_dup 0) (not:SI (match_dup 1)))
3751 (set (match_dup 2) (not:SI (match_dup 3)))]
3754 operands[2] = gen_highpart (SImode, operands[0]);
3755 operands[0] = gen_lowpart (SImode, operands[0]);
3756 operands[3] = gen_highpart (SImode, operands[1]);
3757 operands[1] = gen_lowpart (SImode, operands[1]);
3759 [(set_attr "length" "8")
3760 (set_attr "predicable" "yes")]
3763 (define_expand "one_cmplsi2"
3764 [(set (match_operand:SI 0 "s_register_operand" "")
3765 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3770 (define_insn "*arm_one_cmplsi2"
3771 [(set (match_operand:SI 0 "s_register_operand" "=r")
3772 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3775 [(set_attr "predicable" "yes")]
3778 (define_insn "*thumb1_one_cmplsi2"
3779 [(set (match_operand:SI 0 "register_operand" "=l")
3780 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3783 [(set_attr "length" "2")]
3786 (define_insn "*notsi_compare0"
3787 [(set (reg:CC_NOOV CC_REGNUM)
3788 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3790 (set (match_operand:SI 0 "s_register_operand" "=r")
3791 (not:SI (match_dup 1)))]
3794 [(set_attr "conds" "set")]
3797 (define_insn "*notsi_compare0_scratch"
3798 [(set (reg:CC_NOOV CC_REGNUM)
3799 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3801 (clobber (match_scratch:SI 0 "=r"))]
3804 [(set_attr "conds" "set")]
3807 ;; Fixed <--> Floating conversion insns
3809 (define_expand "floatsihf2"
3810 [(set (match_operand:HF 0 "general_operand" "")
3811 (float:HF (match_operand:SI 1 "general_operand" "")))]
3815 rtx op1 = gen_reg_rtx (SFmode);
3816 expand_float (op1, operands[1], 0);
3817 op1 = convert_to_mode (HFmode, op1, 0);
3818 emit_move_insn (operands[0], op1);
3823 (define_expand "floatdihf2"
3824 [(set (match_operand:HF 0 "general_operand" "")
3825 (float:HF (match_operand:DI 1 "general_operand" "")))]
3829 rtx op1 = gen_reg_rtx (SFmode);
3830 expand_float (op1, operands[1], 0);
3831 op1 = convert_to_mode (HFmode, op1, 0);
3832 emit_move_insn (operands[0], op1);
3837 (define_expand "floatsisf2"
3838 [(set (match_operand:SF 0 "s_register_operand" "")
3839 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3840 "TARGET_32BIT && TARGET_HARD_FLOAT"
3842 if (TARGET_MAVERICK)
3844 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3849 (define_expand "floatsidf2"
3850 [(set (match_operand:DF 0 "s_register_operand" "")
3851 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3852 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3854 if (TARGET_MAVERICK)
3856 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3861 (define_expand "fix_trunchfsi2"
3862 [(set (match_operand:SI 0 "general_operand" "")
3863 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3867 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3868 expand_fix (operands[0], op1, 0);
3873 (define_expand "fix_trunchfdi2"
3874 [(set (match_operand:DI 0 "general_operand" "")
3875 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3879 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3880 expand_fix (operands[0], op1, 0);
3885 (define_expand "fix_truncsfsi2"
3886 [(set (match_operand:SI 0 "s_register_operand" "")
3887 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3888 "TARGET_32BIT && TARGET_HARD_FLOAT"
3890 if (TARGET_MAVERICK)
3892 if (!cirrus_fp_register (operands[0], SImode))
3893 operands[0] = force_reg (SImode, operands[0]);
3894 if (!cirrus_fp_register (operands[1], SFmode))
3895 operands[1] = force_reg (SFmode, operands[0]);
3896 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3901 (define_expand "fix_truncdfsi2"
3902 [(set (match_operand:SI 0 "s_register_operand" "")
3903 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3904 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3906 if (TARGET_MAVERICK)
3908 if (!cirrus_fp_register (operands[1], DFmode))
3909 operands[1] = force_reg (DFmode, operands[0]);
3910 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3917 (define_expand "truncdfsf2"
3918 [(set (match_operand:SF 0 "s_register_operand" "")
3920 (match_operand:DF 1 "s_register_operand" "")))]
3921 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3925 /* DFmode -> HFmode conversions have to go through SFmode. */
3926 (define_expand "truncdfhf2"
3927 [(set (match_operand:HF 0 "general_operand" "")
3929 (match_operand:DF 1 "general_operand" "")))]
3934 op1 = convert_to_mode (SFmode, operands[1], 0);
3935 op1 = convert_to_mode (HFmode, op1, 0);
3936 emit_move_insn (operands[0], op1);
3941 ;; Zero and sign extension instructions.
3943 (define_expand "zero_extendsidi2"
3944 [(set (match_operand:DI 0 "s_register_operand" "")
3945 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3950 (define_insn "*arm_zero_extendsidi2"
3951 [(set (match_operand:DI 0 "s_register_operand" "=r")
3952 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3955 if (REGNO (operands[1])
3956 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3957 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3958 return \"mov%?\\t%R0, #0\";
3960 [(set_attr "length" "8")
3961 (set_attr "predicable" "yes")]
3964 (define_expand "zero_extendqidi2"
3965 [(set (match_operand:DI 0 "s_register_operand" "")
3966 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3971 (define_insn "*arm_zero_extendqidi2"
3972 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3973 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3976 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3977 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3978 [(set_attr "length" "8")
3979 (set_attr "predicable" "yes")
3980 (set_attr "type" "*,load_byte")
3981 (set_attr "pool_range" "*,4092")
3982 (set_attr "neg_pool_range" "*,4084")]
3985 (define_expand "extendsidi2"
3986 [(set (match_operand:DI 0 "s_register_operand" "")
3987 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3992 (define_insn "*arm_extendsidi2"
3993 [(set (match_operand:DI 0 "s_register_operand" "=r")
3994 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3997 if (REGNO (operands[1])
3998 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3999 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4000 return \"mov%?\\t%R0, %Q0, asr #31\";
4002 [(set_attr "length" "8")
4003 (set_attr "shift" "1")
4004 (set_attr "predicable" "yes")]
4007 (define_expand "zero_extendhisi2"
4008 [(set (match_operand:SI 0 "s_register_operand" "")
4009 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4012 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4014 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4017 if (!arm_arch6 && !MEM_P (operands[1]))
4019 rtx t = gen_lowpart (SImode, operands[1]);
4020 rtx tmp = gen_reg_rtx (SImode);
4021 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4022 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4028 [(set (match_operand:SI 0 "register_operand" "")
4029 (zero_extend:SI (match_operand:HI 1 "register_operand" "l,m")))]
4030 "!TARGET_THUMB2 && !arm_arch6"
4031 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4032 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4034 operands[2] = gen_lowpart (SImode, operands[1]);
4037 (define_insn "*thumb1_zero_extendhisi2"
4038 [(set (match_operand:SI 0 "register_operand" "=l,l")
4039 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4044 if (which_alternative == 0 && arm_arch6)
4045 return \"uxth\\t%0, %1\";
4046 if (which_alternative == 0)
4049 mem = XEXP (operands[1], 0);
4051 if (GET_CODE (mem) == CONST)
4052 mem = XEXP (mem, 0);
4054 if (GET_CODE (mem) == LABEL_REF)
4055 return \"ldr\\t%0, %1\";
4057 if (GET_CODE (mem) == PLUS)
4059 rtx a = XEXP (mem, 0);
4060 rtx b = XEXP (mem, 1);
4062 /* This can happen due to bugs in reload. */
4063 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4066 ops[0] = operands[0];
4069 output_asm_insn (\"mov %0, %1\", ops);
4071 XEXP (mem, 0) = operands[0];
4074 else if ( GET_CODE (a) == LABEL_REF
4075 && GET_CODE (b) == CONST_INT)
4076 return \"ldr\\t%0, %1\";
4079 return \"ldrh\\t%0, %1\";
4081 [(set_attr_alternative "length"
4082 [(if_then_else (eq_attr "is_arch6" "yes")
4083 (const_int 2) (const_int 4))
4085 (set_attr "type" "alu_shift,load_byte")
4086 (set_attr "pool_range" "*,60")]
4089 (define_insn "*arm_zero_extendhisi2"
4090 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4091 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4092 "TARGET_ARM && arm_arch4 && !arm_arch6"
4096 [(set_attr "type" "alu_shift,load_byte")
4097 (set_attr "predicable" "yes")
4098 (set_attr "pool_range" "*,256")
4099 (set_attr "neg_pool_range" "*,244")]
4102 (define_insn "*arm_zero_extendhisi2_v6"
4103 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4104 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4105 "TARGET_ARM && arm_arch6"
4109 [(set_attr "type" "alu_shift,load_byte")
4110 (set_attr "predicable" "yes")
4111 (set_attr "pool_range" "*,256")
4112 (set_attr "neg_pool_range" "*,244")]
4115 (define_insn "*arm_zero_extendhisi2addsi"
4116 [(set (match_operand:SI 0 "s_register_operand" "=r")
4117 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4118 (match_operand:SI 2 "s_register_operand" "r")))]
4120 "uxtah%?\\t%0, %2, %1"
4121 [(set_attr "type" "alu_shift")
4122 (set_attr "predicable" "yes")]
4125 (define_expand "zero_extendqisi2"
4126 [(set (match_operand:SI 0 "s_register_operand" "")
4127 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4130 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4132 emit_insn (gen_andsi3 (operands[0],
4133 gen_lowpart (SImode, operands[1]),
4137 if (!arm_arch6 && !MEM_P (operands[1]))
4139 rtx t = gen_lowpart (SImode, operands[1]);
4140 rtx tmp = gen_reg_rtx (SImode);
4141 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4142 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4148 [(set (match_operand:SI 0 "register_operand" "")
4149 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4151 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4152 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4154 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4157 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4162 (define_insn "*thumb1_zero_extendqisi2"
4163 [(set (match_operand:SI 0 "register_operand" "=l,l")
4164 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4165 "TARGET_THUMB1 && !arm_arch6"
4169 [(set_attr "length" "4,2")
4170 (set_attr "type" "alu_shift,load_byte")
4171 (set_attr "pool_range" "*,32")]
4174 (define_insn "*thumb1_zero_extendqisi2_v6"
4175 [(set (match_operand:SI 0 "register_operand" "=l,l")
4176 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4177 "TARGET_THUMB1 && arm_arch6"
4181 [(set_attr "length" "2,2")
4182 (set_attr "type" "alu_shift,load_byte")
4183 (set_attr "pool_range" "*,32")]
4186 (define_insn "*arm_zero_extendqisi2"
4187 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4188 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4189 "TARGET_ARM && !arm_arch6"
4192 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4193 [(set_attr "length" "8,4")
4194 (set_attr "type" "alu_shift,load_byte")
4195 (set_attr "predicable" "yes")
4196 (set_attr "pool_range" "*,4096")
4197 (set_attr "neg_pool_range" "*,4084")]
4200 (define_insn "*arm_zero_extendqisi2_v6"
4201 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4202 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4203 "TARGET_ARM && arm_arch6"
4206 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4207 [(set_attr "type" "alu_shift,load_byte")
4208 (set_attr "predicable" "yes")
4209 (set_attr "pool_range" "*,4096")
4210 (set_attr "neg_pool_range" "*,4084")]
4213 (define_insn "*arm_zero_extendqisi2addsi"
4214 [(set (match_operand:SI 0 "s_register_operand" "=r")
4215 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4216 (match_operand:SI 2 "s_register_operand" "r")))]
4218 "uxtab%?\\t%0, %2, %1"
4219 [(set_attr "predicable" "yes")
4220 (set_attr "insn" "xtab")
4221 (set_attr "type" "alu_shift")]
4225 [(set (match_operand:SI 0 "s_register_operand" "")
4226 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4227 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4228 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4229 [(set (match_dup 2) (match_dup 1))
4230 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4235 [(set (match_operand:SI 0 "s_register_operand" "")
4236 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4237 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4238 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4239 [(set (match_dup 2) (match_dup 1))
4240 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4244 (define_code_iterator ior_xor [ior xor])
4247 [(set (match_operand:SI 0 "s_register_operand" "")
4248 (ior_xor:SI (and:SI (ashift:SI
4249 (match_operand:SI 1 "s_register_operand" "")
4250 (match_operand:SI 2 "const_int_operand" ""))
4251 (match_operand:SI 3 "const_int_operand" ""))
4253 (match_operator 5 "subreg_lowpart_operator"
4254 [(match_operand:SI 4 "s_register_operand" "")]))))]
4256 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4257 == (GET_MODE_MASK (GET_MODE (operands[5]))
4258 & (GET_MODE_MASK (GET_MODE (operands[5]))
4259 << (INTVAL (operands[2])))))"
4260 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4262 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4263 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4266 (define_insn "*compareqi_eq0"
4267 [(set (reg:CC_Z CC_REGNUM)
4268 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4272 [(set_attr "conds" "set")]
4275 (define_expand "extendhisi2"
4276 [(set (match_operand:SI 0 "s_register_operand" "")
4277 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4282 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4285 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4287 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4291 if (!arm_arch6 && !MEM_P (operands[1]))
4293 rtx t = gen_lowpart (SImode, operands[1]);
4294 rtx tmp = gen_reg_rtx (SImode);
4295 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4296 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4303 [(set (match_operand:SI 0 "register_operand" "")
4304 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4305 (clobber (match_scratch:SI 2 ""))])]
4307 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4308 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4310 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4313 ;; We used to have an early-clobber on the scratch register here.
4314 ;; However, there's a bug somewhere in reload which means that this
4315 ;; can be partially ignored during spill allocation if the memory
4316 ;; address also needs reloading; this causes us to die later on when
4317 ;; we try to verify the operands. Fortunately, we don't really need
4318 ;; the early-clobber: we can always use operand 0 if operand 2
4319 ;; overlaps the address.
4320 (define_insn "thumb1_extendhisi2"
4321 [(set (match_operand:SI 0 "register_operand" "=l,l")
4322 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4323 (clobber (match_scratch:SI 2 "=X,l"))]
4330 if (which_alternative == 0 && !arm_arch6)
4332 if (which_alternative == 0)
4333 return \"sxth\\t%0, %1\";
4335 mem = XEXP (operands[1], 0);
4337 /* This code used to try to use 'V', and fix the address only if it was
4338 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4339 range of QImode offsets, and offsettable_address_p does a QImode
4342 if (GET_CODE (mem) == CONST)
4343 mem = XEXP (mem, 0);
4345 if (GET_CODE (mem) == LABEL_REF)
4346 return \"ldr\\t%0, %1\";
4348 if (GET_CODE (mem) == PLUS)
4350 rtx a = XEXP (mem, 0);
4351 rtx b = XEXP (mem, 1);
4353 if (GET_CODE (a) == LABEL_REF
4354 && GET_CODE (b) == CONST_INT)
4355 return \"ldr\\t%0, %1\";
4357 if (GET_CODE (b) == REG)
4358 return \"ldrsh\\t%0, %1\";
4366 ops[2] = const0_rtx;
4369 gcc_assert (GET_CODE (ops[1]) == REG);
4371 ops[0] = operands[0];
4372 if (reg_mentioned_p (operands[2], ops[1]))
4375 ops[3] = operands[2];
4376 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4379 [(set_attr_alternative "length"
4380 [(if_then_else (eq_attr "is_arch6" "yes")
4381 (const_int 2) (const_int 4))
4383 (set_attr "type" "alu_shift,load_byte")
4384 (set_attr "pool_range" "*,1020")]
4387 ;; This pattern will only be used when ldsh is not available
4388 (define_expand "extendhisi2_mem"
4389 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4391 (zero_extend:SI (match_dup 7)))
4392 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4393 (set (match_operand:SI 0 "" "")
4394 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4399 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4401 mem1 = change_address (operands[1], QImode, addr);
4402 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4403 operands[0] = gen_lowpart (SImode, operands[0]);
4405 operands[2] = gen_reg_rtx (SImode);
4406 operands[3] = gen_reg_rtx (SImode);
4407 operands[6] = gen_reg_rtx (SImode);
4410 if (BYTES_BIG_ENDIAN)
4412 operands[4] = operands[2];
4413 operands[5] = operands[3];
4417 operands[4] = operands[3];
4418 operands[5] = operands[2];
4424 [(set (match_operand:SI 0 "register_operand" "")
4425 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4427 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4428 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4430 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4433 (define_insn "*arm_extendhisi2"
4434 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4435 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4436 "TARGET_ARM && arm_arch4 && !arm_arch6"
4440 [(set_attr "length" "8,4")
4441 (set_attr "type" "alu_shift,load_byte")
4442 (set_attr "predicable" "yes")
4443 (set_attr "pool_range" "*,256")
4444 (set_attr "neg_pool_range" "*,244")]
4447 ;; ??? Check Thumb-2 pool range
4448 (define_insn "*arm_extendhisi2_v6"
4449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4450 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4451 "TARGET_32BIT && arm_arch6"
4455 [(set_attr "type" "alu_shift,load_byte")
4456 (set_attr "predicable" "yes")
4457 (set_attr "pool_range" "*,256")
4458 (set_attr "neg_pool_range" "*,244")]
4461 (define_insn "*arm_extendhisi2addsi"
4462 [(set (match_operand:SI 0 "s_register_operand" "=r")
4463 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4464 (match_operand:SI 2 "s_register_operand" "r")))]
4466 "sxtah%?\\t%0, %2, %1"
4469 (define_expand "extendqihi2"
4471 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4473 (set (match_operand:HI 0 "s_register_operand" "")
4474 (ashiftrt:SI (match_dup 2)
4479 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4481 emit_insn (gen_rtx_SET (VOIDmode,
4483 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4486 if (!s_register_operand (operands[1], QImode))
4487 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4488 operands[0] = gen_lowpart (SImode, operands[0]);
4489 operands[1] = gen_lowpart (SImode, operands[1]);
4490 operands[2] = gen_reg_rtx (SImode);
4494 (define_insn "*arm_extendqihi_insn"
4495 [(set (match_operand:HI 0 "s_register_operand" "=r")
4496 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4497 "TARGET_ARM && arm_arch4"
4498 "ldr%(sb%)\\t%0, %1"
4499 [(set_attr "type" "load_byte")
4500 (set_attr "predicable" "yes")
4501 (set_attr "pool_range" "256")
4502 (set_attr "neg_pool_range" "244")]
4505 (define_expand "extendqisi2"
4506 [(set (match_operand:SI 0 "s_register_operand" "")
4507 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4510 if (!arm_arch4 && MEM_P (operands[1]))
4511 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4513 if (!arm_arch6 && !MEM_P (operands[1]))
4515 rtx t = gen_lowpart (SImode, operands[1]);
4516 rtx tmp = gen_reg_rtx (SImode);
4517 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4518 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4524 [(set (match_operand:SI 0 "register_operand" "")
4525 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4527 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4528 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4530 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4533 (define_insn "*arm_extendqisi"
4534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4535 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4536 "TARGET_ARM && arm_arch4 && !arm_arch6"
4540 [(set_attr "length" "8,4")
4541 (set_attr "type" "alu_shift,load_byte")
4542 (set_attr "predicable" "yes")
4543 (set_attr "pool_range" "*,256")
4544 (set_attr "neg_pool_range" "*,244")]
4547 (define_insn "*arm_extendqisi_v6"
4548 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4550 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4551 "TARGET_ARM && arm_arch6"
4555 [(set_attr "type" "alu_shift,load_byte")
4556 (set_attr "predicable" "yes")
4557 (set_attr "pool_range" "*,256")
4558 (set_attr "neg_pool_range" "*,244")]
4561 (define_insn "*arm_extendqisi2addsi"
4562 [(set (match_operand:SI 0 "s_register_operand" "=r")
4563 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4564 (match_operand:SI 2 "s_register_operand" "r")))]
4566 "sxtab%?\\t%0, %2, %1"
4567 [(set_attr "type" "alu_shift")
4568 (set_attr "insn" "xtab")
4569 (set_attr "predicable" "yes")]
4573 [(set (match_operand:SI 0 "register_operand" "")
4574 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4575 "TARGET_THUMB1 && reload_completed"
4576 [(set (match_dup 0) (match_dup 2))
4577 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4579 rtx addr = XEXP (operands[1], 0);
4581 if (GET_CODE (addr) == CONST)
4582 addr = XEXP (addr, 0);
4584 if (GET_CODE (addr) == PLUS
4585 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4586 /* No split necessary. */
4589 if (GET_CODE (addr) == PLUS
4590 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4593 if (reg_overlap_mentioned_p (operands[0], addr))
4595 rtx t = gen_lowpart (QImode, operands[0]);
4596 emit_move_insn (t, operands[1]);
4597 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4603 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4604 operands[2] = const0_rtx;
4606 else if (GET_CODE (addr) != PLUS)
4608 else if (REG_P (XEXP (addr, 0)))
4610 operands[2] = XEXP (addr, 1);
4611 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4615 operands[2] = XEXP (addr, 0);
4616 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4619 operands[3] = change_address (operands[1], QImode, addr);
4622 (define_insn "thumb1_extendqisi2"
4623 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4624 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4629 if (which_alternative == 0 && arm_arch6)
4630 return "sxtb\\t%0, %1";
4631 if (which_alternative == 0)
4634 addr = XEXP (operands[1], 0);
4635 if (GET_CODE (addr) == PLUS
4636 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4637 return "ldrsb\\t%0, %1";
4641 [(set_attr_alternative "length"
4642 [(if_then_else (eq_attr "is_arch6" "yes")
4643 (const_int 2) (const_int 4))
4645 (if_then_else (eq_attr "is_arch6" "yes")
4646 (const_int 4) (const_int 6))])
4647 (set_attr "type" "alu_shift,load_byte,load_byte")]
4650 (define_expand "extendsfdf2"
4651 [(set (match_operand:DF 0 "s_register_operand" "")
4652 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4653 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4657 /* HFmode -> DFmode conversions have to go through SFmode. */
4658 (define_expand "extendhfdf2"
4659 [(set (match_operand:DF 0 "general_operand" "")
4660 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4665 op1 = convert_to_mode (SFmode, operands[1], 0);
4666 op1 = convert_to_mode (DFmode, op1, 0);
4667 emit_insn (gen_movdf (operands[0], op1));
4672 ;; Move insns (including loads and stores)
4674 ;; XXX Just some ideas about movti.
4675 ;; I don't think these are a good idea on the arm, there just aren't enough
4677 ;;(define_expand "loadti"
4678 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4679 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4682 ;;(define_expand "storeti"
4683 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4684 ;; (match_operand:TI 1 "s_register_operand" ""))]
4687 ;;(define_expand "movti"
4688 ;; [(set (match_operand:TI 0 "general_operand" "")
4689 ;; (match_operand:TI 1 "general_operand" ""))]
4695 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4696 ;; operands[1] = copy_to_reg (operands[1]);
4697 ;; if (GET_CODE (operands[0]) == MEM)
4698 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4699 ;; else if (GET_CODE (operands[1]) == MEM)
4700 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4704 ;; emit_insn (insn);
4708 ;; Recognize garbage generated above.
4711 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4712 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4716 ;; register mem = (which_alternative < 3);
4717 ;; register const char *template;
4719 ;; operands[mem] = XEXP (operands[mem], 0);
4720 ;; switch (which_alternative)
4722 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4723 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4724 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4725 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4726 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4727 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4729 ;; output_asm_insn (template, operands);
4733 (define_expand "movdi"
4734 [(set (match_operand:DI 0 "general_operand" "")
4735 (match_operand:DI 1 "general_operand" ""))]
4738 if (can_create_pseudo_p ())
4740 if (GET_CODE (operands[0]) != REG)
4741 operands[1] = force_reg (DImode, operands[1]);
4746 (define_insn "*arm_movdi"
4747 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4748 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4750 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4752 && ( register_operand (operands[0], DImode)
4753 || register_operand (operands[1], DImode))"
4755 switch (which_alternative)
4762 return output_move_double (operands);
4765 [(set_attr "length" "8,12,16,8,8")
4766 (set_attr "type" "*,*,*,load2,store2")
4767 (set_attr "pool_range" "*,*,*,1020,*")
4768 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4772 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4773 (match_operand:ANY64 1 "const_double_operand" ""))]
4776 && (arm_const_double_inline_cost (operands[1])
4777 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4780 arm_split_constant (SET, SImode, curr_insn,
4781 INTVAL (gen_lowpart (SImode, operands[1])),
4782 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4783 arm_split_constant (SET, SImode, curr_insn,
4784 INTVAL (gen_highpart_mode (SImode,
4785 GET_MODE (operands[0]),
4787 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4792 ; If optimizing for size, or if we have load delay slots, then
4793 ; we want to split the constant into two separate operations.
4794 ; In both cases this may split a trivial part into a single data op
4795 ; leaving a single complex constant to load. We can also get longer
4796 ; offsets in a LDR which means we get better chances of sharing the pool
4797 ; entries. Finally, we can normally do a better job of scheduling
4798 ; LDR instructions than we can with LDM.
4799 ; This pattern will only match if the one above did not.
4801 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4802 (match_operand:ANY64 1 "const_double_operand" ""))]
4803 "TARGET_ARM && reload_completed
4804 && arm_const_double_by_parts (operands[1])"
4805 [(set (match_dup 0) (match_dup 1))
4806 (set (match_dup 2) (match_dup 3))]
4808 operands[2] = gen_highpart (SImode, operands[0]);
4809 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4811 operands[0] = gen_lowpart (SImode, operands[0]);
4812 operands[1] = gen_lowpart (SImode, operands[1]);
4817 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4818 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4819 "TARGET_EITHER && reload_completed"
4820 [(set (match_dup 0) (match_dup 1))
4821 (set (match_dup 2) (match_dup 3))]
4823 operands[2] = gen_highpart (SImode, operands[0]);
4824 operands[3] = gen_highpart (SImode, operands[1]);
4825 operands[0] = gen_lowpart (SImode, operands[0]);
4826 operands[1] = gen_lowpart (SImode, operands[1]);
4828 /* Handle a partial overlap. */
4829 if (rtx_equal_p (operands[0], operands[3]))
4831 rtx tmp0 = operands[0];
4832 rtx tmp1 = operands[1];
4834 operands[0] = operands[2];
4835 operands[1] = operands[3];
4842 ;; We can't actually do base+index doubleword loads if the index and
4843 ;; destination overlap. Split here so that we at least have chance to
4846 [(set (match_operand:DI 0 "s_register_operand" "")
4847 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4848 (match_operand:SI 2 "s_register_operand" ""))))]
4850 && reg_overlap_mentioned_p (operands[0], operands[1])
4851 && reg_overlap_mentioned_p (operands[0], operands[2])"
4853 (plus:SI (match_dup 1)
4856 (mem:DI (match_dup 4)))]
4858 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4862 ;;; ??? This should have alternatives for constants.
4863 ;;; ??? This was originally identical to the movdf_insn pattern.
4864 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4865 ;;; thumb_reorg with a memory reference.
4866 (define_insn "*thumb1_movdi_insn"
4867 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4868 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4870 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4871 && ( register_operand (operands[0], DImode)
4872 || register_operand (operands[1], DImode))"
4875 switch (which_alternative)
4879 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4880 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4881 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4883 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4885 operands[1] = GEN_INT (- INTVAL (operands[1]));
4886 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4888 return \"ldmia\\t%1, {%0, %H0}\";
4890 return \"stmia\\t%0, {%1, %H1}\";
4892 return thumb_load_double_from_address (operands);
4894 operands[2] = gen_rtx_MEM (SImode,
4895 plus_constant (XEXP (operands[0], 0), 4));
4896 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4899 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4900 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4901 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4904 [(set_attr "length" "4,4,6,2,2,6,4,4")
4905 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4906 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4909 (define_expand "movsi"
4910 [(set (match_operand:SI 0 "general_operand" "")
4911 (match_operand:SI 1 "general_operand" ""))]
4915 rtx base, offset, tmp;
4919 /* Everything except mem = const or mem = mem can be done easily. */
4920 if (GET_CODE (operands[0]) == MEM)
4921 operands[1] = force_reg (SImode, operands[1]);
4922 if (arm_general_register_operand (operands[0], SImode)
4923 && GET_CODE (operands[1]) == CONST_INT
4924 && !(const_ok_for_arm (INTVAL (operands[1]))
4925 || const_ok_for_arm (~INTVAL (operands[1]))))
4927 arm_split_constant (SET, SImode, NULL_RTX,
4928 INTVAL (operands[1]), operands[0], NULL_RTX,
4929 optimize && can_create_pseudo_p ());
4933 if (TARGET_USE_MOVT && !target_word_relocations
4934 && GET_CODE (operands[1]) == SYMBOL_REF
4935 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4937 arm_emit_movpair (operands[0], operands[1]);
4941 else /* TARGET_THUMB1... */
4943 if (can_create_pseudo_p ())
4945 if (GET_CODE (operands[0]) != REG)
4946 operands[1] = force_reg (SImode, operands[1]);
4950 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4952 split_const (operands[1], &base, &offset);
4953 if (GET_CODE (base) == SYMBOL_REF
4954 && !offset_within_block_p (base, INTVAL (offset)))
4956 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4957 emit_move_insn (tmp, base);
4958 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4963 /* Recognize the case where operand[1] is a reference to thread-local
4964 data and load its address to a register. */
4965 if (arm_tls_referenced_p (operands[1]))
4967 rtx tmp = operands[1];
4970 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4972 addend = XEXP (XEXP (tmp, 0), 1);
4973 tmp = XEXP (XEXP (tmp, 0), 0);
4976 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4977 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4979 tmp = legitimize_tls_address (tmp,
4980 !can_create_pseudo_p () ? operands[0] : 0);
4983 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4984 tmp = force_operand (tmp, operands[0]);
4989 && (CONSTANT_P (operands[1])
4990 || symbol_mentioned_p (operands[1])
4991 || label_mentioned_p (operands[1])))
4992 operands[1] = legitimize_pic_address (operands[1], SImode,
4993 (!can_create_pseudo_p ()
5000 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5001 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5002 ;; so this does not matter.
5003 (define_insn "*arm_movt"
5004 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5005 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5006 (match_operand:SI 2 "general_operand" "i")))]
5008 "movt%?\t%0, #:upper16:%c2"
5009 [(set_attr "predicable" "yes")
5010 (set_attr "length" "4")]
5013 (define_insn "*arm_movsi_insn"
5014 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5015 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5016 "TARGET_ARM && ! TARGET_IWMMXT
5017 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5018 && ( register_operand (operands[0], SImode)
5019 || register_operand (operands[1], SImode))"
5027 [(set_attr "type" "*,*,*,*,load1,store1")
5028 (set_attr "predicable" "yes")
5029 (set_attr "pool_range" "*,*,*,*,4096,*")
5030 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5034 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5035 (match_operand:SI 1 "const_int_operand" ""))]
5037 && (!(const_ok_for_arm (INTVAL (operands[1]))
5038 || const_ok_for_arm (~INTVAL (operands[1]))))"
5039 [(clobber (const_int 0))]
5041 arm_split_constant (SET, SImode, NULL_RTX,
5042 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5047 (define_insn "*thumb1_movsi_insn"
5048 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5049 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5051 && ( register_operand (operands[0], SImode)
5052 || register_operand (operands[1], SImode))"
5063 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5064 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5065 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5069 [(set (match_operand:SI 0 "register_operand" "")
5070 (match_operand:SI 1 "const_int_operand" ""))]
5071 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5072 [(set (match_dup 0) (match_dup 1))
5073 (set (match_dup 0) (neg:SI (match_dup 0)))]
5074 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5078 [(set (match_operand:SI 0 "register_operand" "")
5079 (match_operand:SI 1 "const_int_operand" ""))]
5080 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5081 [(set (match_dup 0) (match_dup 1))
5082 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5085 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5086 unsigned HOST_WIDE_INT mask = 0xff;
5089 for (i = 0; i < 25; i++)
5090 if ((val & (mask << i)) == val)
5093 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5097 operands[1] = GEN_INT (val >> i);
5098 operands[2] = GEN_INT (i);
5102 ;; When generating pic, we need to load the symbol offset into a register.
5103 ;; So that the optimizer does not confuse this with a normal symbol load
5104 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5105 ;; since that is the only type of relocation we can use.
5107 ;; The rather odd constraints on the following are to force reload to leave
5108 ;; the insn alone, and to force the minipool generation pass to then move
5109 ;; the GOT symbol to memory.
5111 (define_insn "pic_load_addr_32bit"
5112 [(set (match_operand:SI 0 "s_register_operand" "=r")
5113 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5114 "TARGET_32BIT && flag_pic"
5116 [(set_attr "type" "load1")
5117 (set_attr "pool_range" "4096")
5118 (set (attr "neg_pool_range")
5119 (if_then_else (eq_attr "is_thumb" "no")
5124 (define_insn "pic_load_addr_thumb1"
5125 [(set (match_operand:SI 0 "s_register_operand" "=l")
5126 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5127 "TARGET_THUMB1 && flag_pic"
5129 [(set_attr "type" "load1")
5130 (set (attr "pool_range") (const_int 1024))]
5133 (define_insn "pic_add_dot_plus_four"
5134 [(set (match_operand:SI 0 "register_operand" "=r")
5135 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5137 (match_operand 2 "" "")]
5141 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5142 INTVAL (operands[2]));
5143 return \"add\\t%0, %|pc\";
5145 [(set_attr "length" "2")]
5148 (define_insn "pic_add_dot_plus_eight"
5149 [(set (match_operand:SI 0 "register_operand" "=r")
5150 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5152 (match_operand 2 "" "")]
5156 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5157 INTVAL (operands[2]));
5158 return \"add%?\\t%0, %|pc, %1\";
5160 [(set_attr "predicable" "yes")]
5163 (define_insn "tls_load_dot_plus_eight"
5164 [(set (match_operand:SI 0 "register_operand" "=r")
5165 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5167 (match_operand 2 "" "")]
5171 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5172 INTVAL (operands[2]));
5173 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5175 [(set_attr "predicable" "yes")]
5178 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5179 ;; followed by a load. These sequences can be crunched down to
5180 ;; tls_load_dot_plus_eight by a peephole.
5183 [(set (match_operand:SI 0 "register_operand" "")
5184 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5186 (match_operand 1 "" "")]
5188 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5189 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5191 (mem:SI (unspec:SI [(match_dup 3)
5198 (define_insn "pic_offset_arm"
5199 [(set (match_operand:SI 0 "register_operand" "=r")
5200 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5201 (unspec:SI [(match_operand:SI 2 "" "X")]
5202 UNSPEC_PIC_OFFSET))))]
5203 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5204 "ldr%?\\t%0, [%1,%2]"
5205 [(set_attr "type" "load1")]
5208 (define_expand "builtin_setjmp_receiver"
5209 [(label_ref (match_operand 0 "" ""))]
5213 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5215 if (arm_pic_register != INVALID_REGNUM)
5216 arm_load_pic_register (1UL << 3);
5220 ;; If copying one reg to another we can set the condition codes according to
5221 ;; its value. Such a move is common after a return from subroutine and the
5222 ;; result is being tested against zero.
5224 (define_insn "*movsi_compare0"
5225 [(set (reg:CC CC_REGNUM)
5226 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5228 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5234 [(set_attr "conds" "set")]
5237 ;; Subroutine to store a half word from a register into memory.
5238 ;; Operand 0 is the source register (HImode)
5239 ;; Operand 1 is the destination address in a register (SImode)
5241 ;; In both this routine and the next, we must be careful not to spill
5242 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5243 ;; can generate unrecognizable rtl.
5245 (define_expand "storehi"
5246 [;; store the low byte
5247 (set (match_operand 1 "" "") (match_dup 3))
5248 ;; extract the high byte
5250 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5251 ;; store the high byte
5252 (set (match_dup 4) (match_dup 5))]
5256 rtx op1 = operands[1];
5257 rtx addr = XEXP (op1, 0);
5258 enum rtx_code code = GET_CODE (addr);
5260 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5262 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5264 operands[4] = adjust_address (op1, QImode, 1);
5265 operands[1] = adjust_address (operands[1], QImode, 0);
5266 operands[3] = gen_lowpart (QImode, operands[0]);
5267 operands[0] = gen_lowpart (SImode, operands[0]);
5268 operands[2] = gen_reg_rtx (SImode);
5269 operands[5] = gen_lowpart (QImode, operands[2]);
5273 (define_expand "storehi_bigend"
5274 [(set (match_dup 4) (match_dup 3))
5276 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5277 (set (match_operand 1 "" "") (match_dup 5))]
5281 rtx op1 = operands[1];
5282 rtx addr = XEXP (op1, 0);
5283 enum rtx_code code = GET_CODE (addr);
5285 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5287 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5289 operands[4] = adjust_address (op1, QImode, 1);
5290 operands[1] = adjust_address (operands[1], QImode, 0);
5291 operands[3] = gen_lowpart (QImode, operands[0]);
5292 operands[0] = gen_lowpart (SImode, operands[0]);
5293 operands[2] = gen_reg_rtx (SImode);
5294 operands[5] = gen_lowpart (QImode, operands[2]);
5298 ;; Subroutine to store a half word integer constant into memory.
5299 (define_expand "storeinthi"
5300 [(set (match_operand 0 "" "")
5301 (match_operand 1 "" ""))
5302 (set (match_dup 3) (match_dup 2))]
5306 HOST_WIDE_INT value = INTVAL (operands[1]);
5307 rtx addr = XEXP (operands[0], 0);
5308 rtx op0 = operands[0];
5309 enum rtx_code code = GET_CODE (addr);
5311 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5313 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5315 operands[1] = gen_reg_rtx (SImode);
5316 if (BYTES_BIG_ENDIAN)
5318 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5319 if ((value & 255) == ((value >> 8) & 255))
5320 operands[2] = operands[1];
5323 operands[2] = gen_reg_rtx (SImode);
5324 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5329 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5330 if ((value & 255) == ((value >> 8) & 255))
5331 operands[2] = operands[1];
5334 operands[2] = gen_reg_rtx (SImode);
5335 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5339 operands[3] = adjust_address (op0, QImode, 1);
5340 operands[0] = adjust_address (operands[0], QImode, 0);
5341 operands[2] = gen_lowpart (QImode, operands[2]);
5342 operands[1] = gen_lowpart (QImode, operands[1]);
5346 (define_expand "storehi_single_op"
5347 [(set (match_operand:HI 0 "memory_operand" "")
5348 (match_operand:HI 1 "general_operand" ""))]
5349 "TARGET_32BIT && arm_arch4"
5351 if (!s_register_operand (operands[1], HImode))
5352 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5356 (define_expand "movhi"
5357 [(set (match_operand:HI 0 "general_operand" "")
5358 (match_operand:HI 1 "general_operand" ""))]
5363 if (can_create_pseudo_p ())
5365 if (GET_CODE (operands[0]) == MEM)
5369 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5372 if (GET_CODE (operands[1]) == CONST_INT)
5373 emit_insn (gen_storeinthi (operands[0], operands[1]));
5376 if (GET_CODE (operands[1]) == MEM)
5377 operands[1] = force_reg (HImode, operands[1]);
5378 if (BYTES_BIG_ENDIAN)
5379 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5381 emit_insn (gen_storehi (operands[1], operands[0]));
5385 /* Sign extend a constant, and keep it in an SImode reg. */
5386 else if (GET_CODE (operands[1]) == CONST_INT)
5388 rtx reg = gen_reg_rtx (SImode);
5389 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5391 /* If the constant is already valid, leave it alone. */
5392 if (!const_ok_for_arm (val))
5394 /* If setting all the top bits will make the constant
5395 loadable in a single instruction, then set them.
5396 Otherwise, sign extend the number. */
5398 if (const_ok_for_arm (~(val | ~0xffff)))
5400 else if (val & 0x8000)
5404 emit_insn (gen_movsi (reg, GEN_INT (val)));
5405 operands[1] = gen_lowpart (HImode, reg);
5407 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5408 && GET_CODE (operands[1]) == MEM)
5410 rtx reg = gen_reg_rtx (SImode);
5412 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5413 operands[1] = gen_lowpart (HImode, reg);
5415 else if (!arm_arch4)
5417 if (GET_CODE (operands[1]) == MEM)
5420 rtx offset = const0_rtx;
5421 rtx reg = gen_reg_rtx (SImode);
5423 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5424 || (GET_CODE (base) == PLUS
5425 && (GET_CODE (offset = XEXP (base, 1))
5427 && ((INTVAL(offset) & 1) != 1)
5428 && GET_CODE (base = XEXP (base, 0)) == REG))
5429 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5433 new_rtx = widen_memory_access (operands[1], SImode,
5434 ((INTVAL (offset) & ~3)
5435 - INTVAL (offset)));
5436 emit_insn (gen_movsi (reg, new_rtx));
5437 if (((INTVAL (offset) & 2) != 0)
5438 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5440 rtx reg2 = gen_reg_rtx (SImode);
5442 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5447 emit_insn (gen_movhi_bytes (reg, operands[1]));
5449 operands[1] = gen_lowpart (HImode, reg);
5453 /* Handle loading a large integer during reload. */
5454 else if (GET_CODE (operands[1]) == CONST_INT
5455 && !const_ok_for_arm (INTVAL (operands[1]))
5456 && !const_ok_for_arm (~INTVAL (operands[1])))
5458 /* Writing a constant to memory needs a scratch, which should
5459 be handled with SECONDARY_RELOADs. */
5460 gcc_assert (GET_CODE (operands[0]) == REG);
5462 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5463 emit_insn (gen_movsi (operands[0], operands[1]));
5467 else if (TARGET_THUMB2)
5469 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5470 if (can_create_pseudo_p ())
5472 if (GET_CODE (operands[0]) != REG)
5473 operands[1] = force_reg (HImode, operands[1]);
5474 /* Zero extend a constant, and keep it in an SImode reg. */
5475 else if (GET_CODE (operands[1]) == CONST_INT)
5477 rtx reg = gen_reg_rtx (SImode);
5478 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5480 emit_insn (gen_movsi (reg, GEN_INT (val)));
5481 operands[1] = gen_lowpart (HImode, reg);
5485 else /* TARGET_THUMB1 */
5487 if (can_create_pseudo_p ())
5489 if (GET_CODE (operands[1]) == CONST_INT)
5491 rtx reg = gen_reg_rtx (SImode);
5493 emit_insn (gen_movsi (reg, operands[1]));
5494 operands[1] = gen_lowpart (HImode, reg);
5497 /* ??? We shouldn't really get invalid addresses here, but this can
5498 happen if we are passed a SP (never OK for HImode/QImode) or
5499 virtual register (also rejected as illegitimate for HImode/QImode)
5500 relative address. */
5501 /* ??? This should perhaps be fixed elsewhere, for instance, in
5502 fixup_stack_1, by checking for other kinds of invalid addresses,
5503 e.g. a bare reference to a virtual register. This may confuse the
5504 alpha though, which must handle this case differently. */
5505 if (GET_CODE (operands[0]) == MEM
5506 && !memory_address_p (GET_MODE (operands[0]),
5507 XEXP (operands[0], 0)))
5509 = replace_equiv_address (operands[0],
5510 copy_to_reg (XEXP (operands[0], 0)));
5512 if (GET_CODE (operands[1]) == MEM
5513 && !memory_address_p (GET_MODE (operands[1]),
5514 XEXP (operands[1], 0)))
5516 = replace_equiv_address (operands[1],
5517 copy_to_reg (XEXP (operands[1], 0)));
5519 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5521 rtx reg = gen_reg_rtx (SImode);
5523 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5524 operands[1] = gen_lowpart (HImode, reg);
5527 if (GET_CODE (operands[0]) == MEM)
5528 operands[1] = force_reg (HImode, operands[1]);
5530 else if (GET_CODE (operands[1]) == CONST_INT
5531 && !satisfies_constraint_I (operands[1]))
5533 /* Handle loading a large integer during reload. */
5535 /* Writing a constant to memory needs a scratch, which should
5536 be handled with SECONDARY_RELOADs. */
5537 gcc_assert (GET_CODE (operands[0]) == REG);
5539 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5540 emit_insn (gen_movsi (operands[0], operands[1]));
5547 (define_insn "*thumb1_movhi_insn"
5548 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5549 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5551 && ( register_operand (operands[0], HImode)
5552 || register_operand (operands[1], HImode))"
5554 switch (which_alternative)
5556 case 0: return \"add %0, %1, #0\";
5557 case 2: return \"strh %1, %0\";
5558 case 3: return \"mov %0, %1\";
5559 case 4: return \"mov %0, %1\";
5560 case 5: return \"mov %0, %1\";
5561 default: gcc_unreachable ();
5563 /* The stack pointer can end up being taken as an index register.
5564 Catch this case here and deal with it. */
5565 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5566 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5567 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5570 ops[0] = operands[0];
5571 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5573 output_asm_insn (\"mov %0, %1\", ops);
5575 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5578 return \"ldrh %0, %1\";
5580 [(set_attr "length" "2,4,2,2,2,2")
5581 (set_attr "type" "*,load1,store1,*,*,*")]
5585 (define_expand "movhi_bytes"
5586 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5588 (zero_extend:SI (match_dup 6)))
5589 (set (match_operand:SI 0 "" "")
5590 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5595 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5597 mem1 = change_address (operands[1], QImode, addr);
5598 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5599 operands[0] = gen_lowpart (SImode, operands[0]);
5601 operands[2] = gen_reg_rtx (SImode);
5602 operands[3] = gen_reg_rtx (SImode);
5605 if (BYTES_BIG_ENDIAN)
5607 operands[4] = operands[2];
5608 operands[5] = operands[3];
5612 operands[4] = operands[3];
5613 operands[5] = operands[2];
5618 (define_expand "movhi_bigend"
5620 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5623 (ashiftrt:SI (match_dup 2) (const_int 16)))
5624 (set (match_operand:HI 0 "s_register_operand" "")
5628 operands[2] = gen_reg_rtx (SImode);
5629 operands[3] = gen_reg_rtx (SImode);
5630 operands[4] = gen_lowpart (HImode, operands[3]);
5634 ;; Pattern to recognize insn generated default case above
5635 (define_insn "*movhi_insn_arch4"
5636 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5637 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5640 && (GET_CODE (operands[1]) != CONST_INT
5641 || const_ok_for_arm (INTVAL (operands[1]))
5642 || const_ok_for_arm (~INTVAL (operands[1])))"
5644 mov%?\\t%0, %1\\t%@ movhi
5645 mvn%?\\t%0, #%B1\\t%@ movhi
5646 str%(h%)\\t%1, %0\\t%@ movhi
5647 ldr%(h%)\\t%0, %1\\t%@ movhi"
5648 [(set_attr "type" "*,*,store1,load1")
5649 (set_attr "predicable" "yes")
5650 (set_attr "pool_range" "*,*,*,256")
5651 (set_attr "neg_pool_range" "*,*,*,244")]
5654 (define_insn "*movhi_bytes"
5655 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5656 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5659 mov%?\\t%0, %1\\t%@ movhi
5660 mvn%?\\t%0, #%B1\\t%@ movhi"
5661 [(set_attr "predicable" "yes")]
5664 (define_expand "thumb_movhi_clobber"
5665 [(set (match_operand:HI 0 "memory_operand" "")
5666 (match_operand:HI 1 "register_operand" ""))
5667 (clobber (match_operand:DI 2 "register_operand" ""))]
5670 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5671 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5673 emit_insn (gen_movhi (operands[0], operands[1]));
5676 /* XXX Fixme, need to handle other cases here as well. */
5681 ;; We use a DImode scratch because we may occasionally need an additional
5682 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5683 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5684 (define_expand "reload_outhi"
5685 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5686 (match_operand:HI 1 "s_register_operand" "r")
5687 (match_operand:DI 2 "s_register_operand" "=&l")])]
5690 arm_reload_out_hi (operands);
5692 thumb_reload_out_hi (operands);
5697 (define_expand "reload_inhi"
5698 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5699 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5700 (match_operand:DI 2 "s_register_operand" "=&r")])]
5704 arm_reload_in_hi (operands);
5706 thumb_reload_out_hi (operands);
5710 (define_expand "movqi"
5711 [(set (match_operand:QI 0 "general_operand" "")
5712 (match_operand:QI 1 "general_operand" ""))]
5715 /* Everything except mem = const or mem = mem can be done easily */
5717 if (can_create_pseudo_p ())
5719 if (GET_CODE (operands[1]) == CONST_INT)
5721 rtx reg = gen_reg_rtx (SImode);
5723 /* For thumb we want an unsigned immediate, then we are more likely
5724 to be able to use a movs insn. */
5726 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5728 emit_insn (gen_movsi (reg, operands[1]));
5729 operands[1] = gen_lowpart (QImode, reg);
5734 /* ??? We shouldn't really get invalid addresses here, but this can
5735 happen if we are passed a SP (never OK for HImode/QImode) or
5736 virtual register (also rejected as illegitimate for HImode/QImode)
5737 relative address. */
5738 /* ??? This should perhaps be fixed elsewhere, for instance, in
5739 fixup_stack_1, by checking for other kinds of invalid addresses,
5740 e.g. a bare reference to a virtual register. This may confuse the
5741 alpha though, which must handle this case differently. */
5742 if (GET_CODE (operands[0]) == MEM
5743 && !memory_address_p (GET_MODE (operands[0]),
5744 XEXP (operands[0], 0)))
5746 = replace_equiv_address (operands[0],
5747 copy_to_reg (XEXP (operands[0], 0)));
5748 if (GET_CODE (operands[1]) == MEM
5749 && !memory_address_p (GET_MODE (operands[1]),
5750 XEXP (operands[1], 0)))
5752 = replace_equiv_address (operands[1],
5753 copy_to_reg (XEXP (operands[1], 0)));
5756 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5758 rtx reg = gen_reg_rtx (SImode);
5760 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5761 operands[1] = gen_lowpart (QImode, reg);
5764 if (GET_CODE (operands[0]) == MEM)
5765 operands[1] = force_reg (QImode, operands[1]);
5767 else if (TARGET_THUMB
5768 && GET_CODE (operands[1]) == CONST_INT
5769 && !satisfies_constraint_I (operands[1]))
5771 /* Handle loading a large integer during reload. */
5773 /* Writing a constant to memory needs a scratch, which should
5774 be handled with SECONDARY_RELOADs. */
5775 gcc_assert (GET_CODE (operands[0]) == REG);
5777 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5778 emit_insn (gen_movsi (operands[0], operands[1]));
5785 (define_insn "*arm_movqi_insn"
5786 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5787 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5789 && ( register_operand (operands[0], QImode)
5790 || register_operand (operands[1], QImode))"
5796 [(set_attr "type" "*,*,load1,store1")
5797 (set_attr "predicable" "yes")]
5800 (define_insn "*thumb1_movqi_insn"
5801 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5802 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5804 && ( register_operand (operands[0], QImode)
5805 || register_operand (operands[1], QImode))"
5813 [(set_attr "length" "2")
5814 (set_attr "type" "*,load1,store1,*,*,*")
5815 (set_attr "pool_range" "*,32,*,*,*,*")]
5819 (define_expand "movhf"
5820 [(set (match_operand:HF 0 "general_operand" "")
5821 (match_operand:HF 1 "general_operand" ""))]
5826 if (GET_CODE (operands[0]) == MEM)
5827 operands[1] = force_reg (HFmode, operands[1]);
5829 else /* TARGET_THUMB1 */
5831 if (can_create_pseudo_p ())
5833 if (GET_CODE (operands[0]) != REG)
5834 operands[1] = force_reg (HFmode, operands[1]);
5840 (define_insn "*arm32_movhf"
5841 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5842 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5843 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5844 && ( s_register_operand (operands[0], HFmode)
5845 || s_register_operand (operands[1], HFmode))"
5847 switch (which_alternative)
5849 case 0: /* ARM register from memory */
5850 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5851 case 1: /* memory from ARM register */
5852 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5853 case 2: /* ARM register from ARM register */
5854 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5855 case 3: /* ARM register from constant */
5861 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5862 bits = real_to_target (NULL, &r, HFmode);
5863 ops[0] = operands[0];
5864 ops[1] = GEN_INT (bits);
5865 ops[2] = GEN_INT (bits & 0xff00);
5866 ops[3] = GEN_INT (bits & 0x00ff);
5868 if (arm_arch_thumb2)
5869 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5871 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5878 [(set_attr "conds" "unconditional")
5879 (set_attr "type" "load1,store1,*,*")
5880 (set_attr "length" "4,4,4,8")
5881 (set_attr "predicable" "yes")
5885 (define_insn "*thumb1_movhf"
5886 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
5887 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
5889 && ( s_register_operand (operands[0], HFmode)
5890 || s_register_operand (operands[1], HFmode))"
5892 switch (which_alternative)
5897 gcc_assert (GET_CODE(operands[1]) == MEM);
5898 addr = XEXP (operands[1], 0);
5899 if (GET_CODE (addr) == LABEL_REF
5900 || (GET_CODE (addr) == CONST
5901 && GET_CODE (XEXP (addr, 0)) == PLUS
5902 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
5903 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
5905 /* Constant pool entry. */
5906 return \"ldr\\t%0, %1\";
5908 return \"ldrh\\t%0, %1\";
5910 case 2: return \"strh\\t%1, %0\";
5911 default: return \"mov\\t%0, %1\";
5914 [(set_attr "length" "2")
5915 (set_attr "type" "*,load1,store1,*,*")
5916 (set_attr "pool_range" "*,1020,*,*,*")]
5919 (define_expand "movsf"
5920 [(set (match_operand:SF 0 "general_operand" "")
5921 (match_operand:SF 1 "general_operand" ""))]
5926 if (GET_CODE (operands[0]) == MEM)
5927 operands[1] = force_reg (SFmode, operands[1]);
5929 else /* TARGET_THUMB1 */
5931 if (can_create_pseudo_p ())
5933 if (GET_CODE (operands[0]) != REG)
5934 operands[1] = force_reg (SFmode, operands[1]);
5940 ;; Transform a floating-point move of a constant into a core register into
5941 ;; an SImode operation.
5943 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5944 (match_operand:SF 1 "immediate_operand" ""))]
5947 && GET_CODE (operands[1]) == CONST_DOUBLE"
5948 [(set (match_dup 2) (match_dup 3))]
5950 operands[2] = gen_lowpart (SImode, operands[0]);
5951 operands[3] = gen_lowpart (SImode, operands[1]);
5952 if (operands[2] == 0 || operands[3] == 0)
5957 (define_insn "*arm_movsf_soft_insn"
5958 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5959 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5961 && TARGET_SOFT_FLOAT
5962 && (GET_CODE (operands[0]) != MEM
5963 || register_operand (operands[1], SFmode))"
5966 ldr%?\\t%0, %1\\t%@ float
5967 str%?\\t%1, %0\\t%@ float"
5968 [(set_attr "length" "4,4,4")
5969 (set_attr "predicable" "yes")
5970 (set_attr "type" "*,load1,store1")
5971 (set_attr "pool_range" "*,4096,*")
5972 (set_attr "neg_pool_range" "*,4084,*")]
5975 ;;; ??? This should have alternatives for constants.
5976 (define_insn "*thumb1_movsf_insn"
5977 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5978 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5980 && ( register_operand (operands[0], SFmode)
5981 || register_operand (operands[1], SFmode))"
5990 [(set_attr "length" "2")
5991 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5992 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5995 (define_expand "movdf"
5996 [(set (match_operand:DF 0 "general_operand" "")
5997 (match_operand:DF 1 "general_operand" ""))]
6002 if (GET_CODE (operands[0]) == MEM)
6003 operands[1] = force_reg (DFmode, operands[1]);
6005 else /* TARGET_THUMB */
6007 if (can_create_pseudo_p ())
6009 if (GET_CODE (operands[0]) != REG)
6010 operands[1] = force_reg (DFmode, operands[1]);
6016 ;; Reloading a df mode value stored in integer regs to memory can require a
6018 (define_expand "reload_outdf"
6019 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6020 (match_operand:DF 1 "s_register_operand" "r")
6021 (match_operand:SI 2 "s_register_operand" "=&r")]
6025 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6028 operands[2] = XEXP (operands[0], 0);
6029 else if (code == POST_INC || code == PRE_DEC)
6031 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6032 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6033 emit_insn (gen_movdi (operands[0], operands[1]));
6036 else if (code == PRE_INC)
6038 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6040 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6043 else if (code == POST_DEC)
6044 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6046 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6047 XEXP (XEXP (operands[0], 0), 1)));
6049 emit_insn (gen_rtx_SET (VOIDmode,
6050 replace_equiv_address (operands[0], operands[2]),
6053 if (code == POST_DEC)
6054 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6060 (define_insn "*movdf_soft_insn"
6061 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6062 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6063 "TARGET_ARM && TARGET_SOFT_FLOAT
6064 && ( register_operand (operands[0], DFmode)
6065 || register_operand (operands[1], DFmode))"
6067 switch (which_alternative)
6074 return output_move_double (operands);
6077 [(set_attr "length" "8,12,16,8,8")
6078 (set_attr "type" "*,*,*,load2,store2")
6079 (set_attr "pool_range" "1020")
6080 (set_attr "neg_pool_range" "1008")]
6083 ;;; ??? This should have alternatives for constants.
6084 ;;; ??? This was originally identical to the movdi_insn pattern.
6085 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6086 ;;; thumb_reorg with a memory reference.
6087 (define_insn "*thumb_movdf_insn"
6088 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6089 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6091 && ( register_operand (operands[0], DFmode)
6092 || register_operand (operands[1], DFmode))"
6094 switch (which_alternative)
6098 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6099 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6100 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6102 return \"ldmia\\t%1, {%0, %H0}\";
6104 return \"stmia\\t%0, {%1, %H1}\";
6106 return thumb_load_double_from_address (operands);
6108 operands[2] = gen_rtx_MEM (SImode,
6109 plus_constant (XEXP (operands[0], 0), 4));
6110 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6113 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6114 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6115 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6118 [(set_attr "length" "4,2,2,6,4,4")
6119 (set_attr "type" "*,load2,store2,load2,store2,*")
6120 (set_attr "pool_range" "*,*,*,1020,*,*")]
6123 (define_expand "movxf"
6124 [(set (match_operand:XF 0 "general_operand" "")
6125 (match_operand:XF 1 "general_operand" ""))]
6126 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6128 if (GET_CODE (operands[0]) == MEM)
6129 operands[1] = force_reg (XFmode, operands[1]);
6135 ;; load- and store-multiple insns
6136 ;; The arm can load/store any set of registers, provided that they are in
6137 ;; ascending order; but that is beyond GCC so stick with what it knows.
6139 (define_expand "load_multiple"
6140 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6141 (match_operand:SI 1 "" ""))
6142 (use (match_operand:SI 2 "" ""))])]
6145 HOST_WIDE_INT offset = 0;
6147 /* Support only fixed point registers. */
6148 if (GET_CODE (operands[2]) != CONST_INT
6149 || INTVAL (operands[2]) > 14
6150 || INTVAL (operands[2]) < 2
6151 || GET_CODE (operands[1]) != MEM
6152 || GET_CODE (operands[0]) != REG
6153 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6154 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6158 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6159 force_reg (SImode, XEXP (operands[1], 0)),
6160 TRUE, FALSE, operands[1], &offset);
6163 ;; Load multiple with write-back
6165 (define_insn "*ldmsi_postinc4"
6166 [(match_parallel 0 "load_multiple_operation"
6167 [(set (match_operand:SI 1 "s_register_operand" "=r")
6168 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6170 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6171 (mem:SI (match_dup 2)))
6172 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6173 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6174 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6175 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6176 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6177 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6178 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6179 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6180 [(set_attr "type" "load4")
6181 (set_attr "predicable" "yes")]
6184 (define_insn "*ldmsi_postinc4_thumb1"
6185 [(match_parallel 0 "load_multiple_operation"
6186 [(set (match_operand:SI 1 "s_register_operand" "=l")
6187 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6189 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6190 (mem:SI (match_dup 2)))
6191 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6192 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6193 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6194 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6195 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6196 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6197 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6198 "ldmia\\t%1!, {%3, %4, %5, %6}"
6199 [(set_attr "type" "load4")]
6202 (define_insn "*ldmsi_postinc3"
6203 [(match_parallel 0 "load_multiple_operation"
6204 [(set (match_operand:SI 1 "s_register_operand" "=r")
6205 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6207 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6208 (mem:SI (match_dup 2)))
6209 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6210 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6211 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6212 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6213 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6214 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6215 [(set_attr "type" "load3")
6216 (set_attr "predicable" "yes")]
6219 (define_insn "*ldmsi_postinc2"
6220 [(match_parallel 0 "load_multiple_operation"
6221 [(set (match_operand:SI 1 "s_register_operand" "=r")
6222 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6224 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6225 (mem:SI (match_dup 2)))
6226 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6227 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6228 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6229 "ldm%(ia%)\\t%1!, {%3, %4}"
6230 [(set_attr "type" "load2")
6231 (set_attr "predicable" "yes")]
6234 ;; Ordinary load multiple
6236 (define_insn "*ldmsi4"
6237 [(match_parallel 0 "load_multiple_operation"
6238 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6239 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6240 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6241 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6242 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6243 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6244 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6245 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6246 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6247 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6248 [(set_attr "type" "load4")
6249 (set_attr "predicable" "yes")]
6252 (define_insn "*ldmsi3"
6253 [(match_parallel 0 "load_multiple_operation"
6254 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6255 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6256 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6257 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6258 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6259 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6260 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6261 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6262 [(set_attr "type" "load3")
6263 (set_attr "predicable" "yes")]
6266 (define_insn "*ldmsi2"
6267 [(match_parallel 0 "load_multiple_operation"
6268 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6269 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6270 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6271 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6272 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6273 "ldm%(ia%)\\t%1, {%2, %3}"
6274 [(set_attr "type" "load2")
6275 (set_attr "predicable" "yes")]
6278 (define_expand "store_multiple"
6279 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6280 (match_operand:SI 1 "" ""))
6281 (use (match_operand:SI 2 "" ""))])]
6284 HOST_WIDE_INT offset = 0;
6286 /* Support only fixed point registers. */
6287 if (GET_CODE (operands[2]) != CONST_INT
6288 || INTVAL (operands[2]) > 14
6289 || INTVAL (operands[2]) < 2
6290 || GET_CODE (operands[1]) != REG
6291 || GET_CODE (operands[0]) != MEM
6292 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6293 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6297 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6298 force_reg (SImode, XEXP (operands[0], 0)),
6299 TRUE, FALSE, operands[0], &offset);
6302 ;; Store multiple with write-back
6304 (define_insn "*stmsi_postinc4"
6305 [(match_parallel 0 "store_multiple_operation"
6306 [(set (match_operand:SI 1 "s_register_operand" "=r")
6307 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6309 (set (mem:SI (match_dup 2))
6310 (match_operand:SI 3 "arm_hard_register_operand" ""))
6311 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6312 (match_operand:SI 4 "arm_hard_register_operand" ""))
6313 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6314 (match_operand:SI 5 "arm_hard_register_operand" ""))
6315 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6316 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6317 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6318 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6319 [(set_attr "predicable" "yes")
6320 (set_attr "type" "store4")]
6323 (define_insn "*stmsi_postinc4_thumb1"
6324 [(match_parallel 0 "store_multiple_operation"
6325 [(set (match_operand:SI 1 "s_register_operand" "=l")
6326 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6328 (set (mem:SI (match_dup 2))
6329 (match_operand:SI 3 "arm_hard_register_operand" ""))
6330 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6331 (match_operand:SI 4 "arm_hard_register_operand" ""))
6332 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6333 (match_operand:SI 5 "arm_hard_register_operand" ""))
6334 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6335 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6336 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6337 "stmia\\t%1!, {%3, %4, %5, %6}"
6338 [(set_attr "type" "store4")]
6341 (define_insn "*stmsi_postinc3"
6342 [(match_parallel 0 "store_multiple_operation"
6343 [(set (match_operand:SI 1 "s_register_operand" "=r")
6344 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6346 (set (mem:SI (match_dup 2))
6347 (match_operand:SI 3 "arm_hard_register_operand" ""))
6348 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6349 (match_operand:SI 4 "arm_hard_register_operand" ""))
6350 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6351 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6352 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6353 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6354 [(set_attr "predicable" "yes")
6355 (set_attr "type" "store3")]
6358 (define_insn "*stmsi_postinc2"
6359 [(match_parallel 0 "store_multiple_operation"
6360 [(set (match_operand:SI 1 "s_register_operand" "=r")
6361 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6363 (set (mem:SI (match_dup 2))
6364 (match_operand:SI 3 "arm_hard_register_operand" ""))
6365 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6366 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6367 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6368 "stm%(ia%)\\t%1!, {%3, %4}"
6369 [(set_attr "predicable" "yes")
6370 (set_attr "type" "store2")]
6373 ;; Ordinary store multiple
6375 (define_insn "*stmsi4"
6376 [(match_parallel 0 "store_multiple_operation"
6377 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6378 (match_operand:SI 2 "arm_hard_register_operand" ""))
6379 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6380 (match_operand:SI 3 "arm_hard_register_operand" ""))
6381 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6382 (match_operand:SI 4 "arm_hard_register_operand" ""))
6383 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6384 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6385 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6386 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6387 [(set_attr "predicable" "yes")
6388 (set_attr "type" "store4")]
6391 (define_insn "*stmsi3"
6392 [(match_parallel 0 "store_multiple_operation"
6393 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6394 (match_operand:SI 2 "arm_hard_register_operand" ""))
6395 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6396 (match_operand:SI 3 "arm_hard_register_operand" ""))
6397 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6398 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6399 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6400 "stm%(ia%)\\t%1, {%2, %3, %4}"
6401 [(set_attr "predicable" "yes")
6402 (set_attr "type" "store3")]
6405 (define_insn "*stmsi2"
6406 [(match_parallel 0 "store_multiple_operation"
6407 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6408 (match_operand:SI 2 "arm_hard_register_operand" ""))
6409 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6410 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6411 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6412 "stm%(ia%)\\t%1, {%2, %3}"
6413 [(set_attr "predicable" "yes")
6414 (set_attr "type" "store2")]
6417 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6418 ;; We could let this apply for blocks of less than this, but it clobbers so
6419 ;; many registers that there is then probably a better way.
6421 (define_expand "movmemqi"
6422 [(match_operand:BLK 0 "general_operand" "")
6423 (match_operand:BLK 1 "general_operand" "")
6424 (match_operand:SI 2 "const_int_operand" "")
6425 (match_operand:SI 3 "const_int_operand" "")]
6430 if (arm_gen_movmemqi (operands))
6434 else /* TARGET_THUMB1 */
6436 if ( INTVAL (operands[3]) != 4
6437 || INTVAL (operands[2]) > 48)
6440 thumb_expand_movmemqi (operands);
6446 ;; Thumb block-move insns
6448 (define_insn "movmem12b"
6449 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6450 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6451 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6452 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6453 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6454 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6455 (set (match_operand:SI 0 "register_operand" "=l")
6456 (plus:SI (match_dup 2) (const_int 12)))
6457 (set (match_operand:SI 1 "register_operand" "=l")
6458 (plus:SI (match_dup 3) (const_int 12)))
6459 (clobber (match_scratch:SI 4 "=&l"))
6460 (clobber (match_scratch:SI 5 "=&l"))
6461 (clobber (match_scratch:SI 6 "=&l"))]
6463 "* return thumb_output_move_mem_multiple (3, operands);"
6464 [(set_attr "length" "4")
6465 ; This isn't entirely accurate... It loads as well, but in terms of
6466 ; scheduling the following insn it is better to consider it as a store
6467 (set_attr "type" "store3")]
6470 (define_insn "movmem8b"
6471 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6472 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6473 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6474 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6475 (set (match_operand:SI 0 "register_operand" "=l")
6476 (plus:SI (match_dup 2) (const_int 8)))
6477 (set (match_operand:SI 1 "register_operand" "=l")
6478 (plus:SI (match_dup 3) (const_int 8)))
6479 (clobber (match_scratch:SI 4 "=&l"))
6480 (clobber (match_scratch:SI 5 "=&l"))]
6482 "* return thumb_output_move_mem_multiple (2, operands);"
6483 [(set_attr "length" "4")
6484 ; This isn't entirely accurate... It loads as well, but in terms of
6485 ; scheduling the following insn it is better to consider it as a store
6486 (set_attr "type" "store2")]
6491 ;; Compare & branch insns
6492 ;; The range calculations are based as follows:
6493 ;; For forward branches, the address calculation returns the address of
6494 ;; the next instruction. This is 2 beyond the branch instruction.
6495 ;; For backward branches, the address calculation returns the address of
6496 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6497 ;; instruction for the shortest sequence, and 4 before the branch instruction
6498 ;; if we have to jump around an unconditional branch.
6499 ;; To the basic branch range the PC offset must be added (this is +4).
6500 ;; So for forward branches we have
6501 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6502 ;; And for backward branches we have
6503 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6505 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6506 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6508 (define_expand "cbranchsi4"
6509 [(set (pc) (if_then_else
6510 (match_operator 0 "arm_comparison_operator"
6511 [(match_operand:SI 1 "s_register_operand" "")
6512 (match_operand:SI 2 "nonmemory_operand" "")])
6513 (label_ref (match_operand 3 "" ""))
6515 "TARGET_THUMB1 || TARGET_32BIT"
6519 if (!arm_add_operand (operands[2], SImode))
6520 operands[2] = force_reg (SImode, operands[2]);
6521 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6525 if (thumb1_cmpneg_operand (operands[2], SImode))
6527 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6528 operands[3], operands[0]));
6531 if (!thumb1_cmp_operand (operands[2], SImode))
6532 operands[2] = force_reg (SImode, operands[2]);
6535 ;; A pattern to recognize a special situation and optimize for it.
6536 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6537 ;; due to the available addressing modes. Hence, convert a signed comparison
6538 ;; with zero into an unsigned comparison with 127 if possible.
6539 (define_expand "cbranchqi4"
6540 [(set (pc) (if_then_else
6541 (match_operator 0 "lt_ge_comparison_operator"
6542 [(match_operand:QI 1 "memory_operand" "")
6543 (match_operand:QI 2 "const0_operand" "")])
6544 (label_ref (match_operand 3 "" ""))
6549 xops[1] = gen_reg_rtx (SImode);
6550 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6551 xops[2] = GEN_INT (127);
6552 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6553 VOIDmode, xops[1], xops[2]);
6554 xops[3] = operands[3];
6555 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6559 (define_expand "cbranchsf4"
6560 [(set (pc) (if_then_else
6561 (match_operator 0 "arm_comparison_operator"
6562 [(match_operand:SF 1 "s_register_operand" "")
6563 (match_operand:SF 2 "arm_float_compare_operand" "")])
6564 (label_ref (match_operand 3 "" ""))
6566 "TARGET_32BIT && TARGET_HARD_FLOAT"
6567 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6568 operands[3])); DONE;"
6571 (define_expand "cbranchdf4"
6572 [(set (pc) (if_then_else
6573 (match_operator 0 "arm_comparison_operator"
6574 [(match_operand:DF 1 "s_register_operand" "")
6575 (match_operand:DF 2 "arm_float_compare_operand" "")])
6576 (label_ref (match_operand 3 "" ""))
6578 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6579 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6580 operands[3])); DONE;"
6583 (define_expand "cbranchdi4"
6584 [(set (pc) (if_then_else
6585 (match_operator 0 "arm_comparison_operator"
6586 [(match_operand:DI 1 "cmpdi_operand" "")
6587 (match_operand:DI 2 "cmpdi_operand" "")])
6588 (label_ref (match_operand 3 "" ""))
6592 rtx swap = NULL_RTX;
6593 enum rtx_code code = GET_CODE (operands[0]);
6595 /* We should not have two constants. */
6596 gcc_assert (GET_MODE (operands[1]) == DImode
6597 || GET_MODE (operands[2]) == DImode);
6599 /* Flip unimplemented DImode comparisons to a form that
6600 arm_gen_compare_reg can handle. */
6604 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6606 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6608 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6610 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6615 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6618 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6624 (define_insn "cbranchsi4_insn"
6625 [(set (pc) (if_then_else
6626 (match_operator 0 "arm_comparison_operator"
6627 [(match_operand:SI 1 "s_register_operand" "l,*h")
6628 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6629 (label_ref (match_operand 3 "" ""))
6633 rtx t = prev_nonnote_insn (insn);
6636 && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
6638 t = XEXP (SET_SRC (PATTERN (t)), 0);
6639 if (!rtx_equal_p (XEXP (t, 0), operands[1])
6640 || !rtx_equal_p (XEXP (t, 1), operands[2]))
6646 output_asm_insn (\"cmp\\t%1, %2\", operands);
6648 switch (get_attr_length (insn))
6650 case 4: return \"b%d0\\t%l3\";
6651 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6652 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6655 [(set (attr "far_jump")
6657 (eq_attr "length" "8")
6658 (const_string "yes")
6659 (const_string "no")))
6660 (set (attr "length")
6662 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6663 (le (minus (match_dup 3) (pc)) (const_int 256)))
6666 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6667 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6672 (define_insn "cbranchsi4_scratch"
6673 [(set (pc) (if_then_else
6674 (match_operator 4 "arm_comparison_operator"
6675 [(match_operand:SI 1 "s_register_operand" "l,0")
6676 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6677 (label_ref (match_operand 3 "" ""))
6679 (clobber (match_scratch:SI 0 "=l,l"))]
6682 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6684 switch (get_attr_length (insn))
6686 case 4: return \"b%d4\\t%l3\";
6687 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6688 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6691 [(set (attr "far_jump")
6693 (eq_attr "length" "8")
6694 (const_string "yes")
6695 (const_string "no")))
6696 (set (attr "length")
6698 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6699 (le (minus (match_dup 3) (pc)) (const_int 256)))
6702 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6703 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6708 (define_insn "*movsi_cbranchsi4"
6711 (match_operator 3 "arm_comparison_operator"
6712 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6714 (label_ref (match_operand 2 "" ""))
6716 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6720 if (which_alternative == 0)
6721 output_asm_insn (\"cmp\t%0, #0\", operands);
6722 else if (which_alternative == 1)
6723 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6726 output_asm_insn (\"cmp\t%1, #0\", operands);
6727 if (which_alternative == 2)
6728 output_asm_insn (\"mov\t%0, %1\", operands);
6730 output_asm_insn (\"str\t%1, %0\", operands);
6732 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6734 case 4: return \"b%d3\\t%l2\";
6735 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6736 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6739 [(set (attr "far_jump")
6741 (ior (and (gt (symbol_ref ("which_alternative"))
6743 (eq_attr "length" "8"))
6744 (eq_attr "length" "10"))
6745 (const_string "yes")
6746 (const_string "no")))
6747 (set (attr "length")
6749 (le (symbol_ref ("which_alternative"))
6752 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6753 (le (minus (match_dup 2) (pc)) (const_int 256)))
6756 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6757 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6761 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6762 (le (minus (match_dup 2) (pc)) (const_int 256)))
6765 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6766 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6772 [(set (match_operand:SI 0 "low_register_operand" "")
6773 (match_operand:SI 1 "low_register_operand" ""))
6775 (if_then_else (match_operator 2 "arm_comparison_operator"
6776 [(match_dup 1) (const_int 0)])
6777 (label_ref (match_operand 3 "" ""))
6782 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6783 (label_ref (match_dup 3))
6785 (set (match_dup 0) (match_dup 1))])]
6789 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6790 ;; merge cases like this because the op1 is a hard register in
6791 ;; CLASS_LIKELY_SPILLED_P.
6793 [(set (match_operand:SI 0 "low_register_operand" "")
6794 (match_operand:SI 1 "low_register_operand" ""))
6796 (if_then_else (match_operator 2 "arm_comparison_operator"
6797 [(match_dup 0) (const_int 0)])
6798 (label_ref (match_operand 3 "" ""))
6803 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6804 (label_ref (match_dup 3))
6806 (set (match_dup 0) (match_dup 1))])]
6810 (define_insn "*negated_cbranchsi4"
6813 (match_operator 0 "equality_operator"
6814 [(match_operand:SI 1 "s_register_operand" "l")
6815 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6816 (label_ref (match_operand 3 "" ""))
6820 output_asm_insn (\"cmn\\t%1, %2\", operands);
6821 switch (get_attr_length (insn))
6823 case 4: return \"b%d0\\t%l3\";
6824 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6825 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6828 [(set (attr "far_jump")
6830 (eq_attr "length" "8")
6831 (const_string "yes")
6832 (const_string "no")))
6833 (set (attr "length")
6835 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6836 (le (minus (match_dup 3) (pc)) (const_int 256)))
6839 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6840 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6845 (define_insn "*tbit_cbranch"
6848 (match_operator 0 "equality_operator"
6849 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6851 (match_operand:SI 2 "const_int_operand" "i"))
6853 (label_ref (match_operand 3 "" ""))
6855 (clobber (match_scratch:SI 4 "=l"))]
6860 op[0] = operands[4];
6861 op[1] = operands[1];
6862 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6864 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6865 switch (get_attr_length (insn))
6867 case 4: return \"b%d0\\t%l3\";
6868 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6869 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6872 [(set (attr "far_jump")
6874 (eq_attr "length" "8")
6875 (const_string "yes")
6876 (const_string "no")))
6877 (set (attr "length")
6879 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6880 (le (minus (match_dup 3) (pc)) (const_int 256)))
6883 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6884 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6889 (define_insn "*tlobits_cbranch"
6892 (match_operator 0 "equality_operator"
6893 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6894 (match_operand:SI 2 "const_int_operand" "i")
6897 (label_ref (match_operand 3 "" ""))
6899 (clobber (match_scratch:SI 4 "=l"))]
6904 op[0] = operands[4];
6905 op[1] = operands[1];
6906 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6908 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6909 switch (get_attr_length (insn))
6911 case 4: return \"b%d0\\t%l3\";
6912 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6913 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6916 [(set (attr "far_jump")
6918 (eq_attr "length" "8")
6919 (const_string "yes")
6920 (const_string "no")))
6921 (set (attr "length")
6923 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6924 (le (minus (match_dup 3) (pc)) (const_int 256)))
6927 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6928 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6933 (define_insn "*tstsi3_cbranch"
6936 (match_operator 3 "equality_operator"
6937 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6938 (match_operand:SI 1 "s_register_operand" "l"))
6940 (label_ref (match_operand 2 "" ""))
6945 output_asm_insn (\"tst\\t%0, %1\", operands);
6946 switch (get_attr_length (insn))
6948 case 4: return \"b%d3\\t%l2\";
6949 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6950 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6953 [(set (attr "far_jump")
6955 (eq_attr "length" "8")
6956 (const_string "yes")
6957 (const_string "no")))
6958 (set (attr "length")
6960 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6961 (le (minus (match_dup 2) (pc)) (const_int 256)))
6964 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6965 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6970 (define_insn "*andsi3_cbranch"
6973 (match_operator 5 "equality_operator"
6974 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6975 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6977 (label_ref (match_operand 4 "" ""))
6979 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6980 (and:SI (match_dup 2) (match_dup 3)))
6981 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6985 if (which_alternative == 0)
6986 output_asm_insn (\"and\\t%0, %3\", operands);
6987 else if (which_alternative == 1)
6989 output_asm_insn (\"and\\t%1, %3\", operands);
6990 output_asm_insn (\"mov\\t%0, %1\", operands);
6994 output_asm_insn (\"and\\t%1, %3\", operands);
6995 output_asm_insn (\"str\\t%1, %0\", operands);
6998 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7000 case 4: return \"b%d5\\t%l4\";
7001 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7002 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7005 [(set (attr "far_jump")
7007 (ior (and (eq (symbol_ref ("which_alternative"))
7009 (eq_attr "length" "8"))
7010 (eq_attr "length" "10"))
7011 (const_string "yes")
7012 (const_string "no")))
7013 (set (attr "length")
7015 (eq (symbol_ref ("which_alternative"))
7018 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7019 (le (minus (match_dup 4) (pc)) (const_int 256)))
7022 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7023 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7027 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7028 (le (minus (match_dup 4) (pc)) (const_int 256)))
7031 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7032 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7037 (define_insn "*orrsi3_cbranch_scratch"
7040 (match_operator 4 "equality_operator"
7041 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7042 (match_operand:SI 2 "s_register_operand" "l"))
7044 (label_ref (match_operand 3 "" ""))
7046 (clobber (match_scratch:SI 0 "=l"))]
7050 output_asm_insn (\"orr\\t%0, %2\", operands);
7051 switch (get_attr_length (insn))
7053 case 4: return \"b%d4\\t%l3\";
7054 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7055 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7058 [(set (attr "far_jump")
7060 (eq_attr "length" "8")
7061 (const_string "yes")
7062 (const_string "no")))
7063 (set (attr "length")
7065 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7066 (le (minus (match_dup 3) (pc)) (const_int 256)))
7069 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7070 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7075 (define_insn "*orrsi3_cbranch"
7078 (match_operator 5 "equality_operator"
7079 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7080 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7082 (label_ref (match_operand 4 "" ""))
7084 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7085 (ior:SI (match_dup 2) (match_dup 3)))
7086 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7090 if (which_alternative == 0)
7091 output_asm_insn (\"orr\\t%0, %3\", operands);
7092 else if (which_alternative == 1)
7094 output_asm_insn (\"orr\\t%1, %3\", operands);
7095 output_asm_insn (\"mov\\t%0, %1\", operands);
7099 output_asm_insn (\"orr\\t%1, %3\", operands);
7100 output_asm_insn (\"str\\t%1, %0\", operands);
7103 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7105 case 4: return \"b%d5\\t%l4\";
7106 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7107 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7110 [(set (attr "far_jump")
7112 (ior (and (eq (symbol_ref ("which_alternative"))
7114 (eq_attr "length" "8"))
7115 (eq_attr "length" "10"))
7116 (const_string "yes")
7117 (const_string "no")))
7118 (set (attr "length")
7120 (eq (symbol_ref ("which_alternative"))
7123 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7124 (le (minus (match_dup 4) (pc)) (const_int 256)))
7127 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7128 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7132 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7133 (le (minus (match_dup 4) (pc)) (const_int 256)))
7136 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7137 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7142 (define_insn "*xorsi3_cbranch_scratch"
7145 (match_operator 4 "equality_operator"
7146 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7147 (match_operand:SI 2 "s_register_operand" "l"))
7149 (label_ref (match_operand 3 "" ""))
7151 (clobber (match_scratch:SI 0 "=l"))]
7155 output_asm_insn (\"eor\\t%0, %2\", operands);
7156 switch (get_attr_length (insn))
7158 case 4: return \"b%d4\\t%l3\";
7159 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7160 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7163 [(set (attr "far_jump")
7165 (eq_attr "length" "8")
7166 (const_string "yes")
7167 (const_string "no")))
7168 (set (attr "length")
7170 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7171 (le (minus (match_dup 3) (pc)) (const_int 256)))
7174 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7175 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7180 (define_insn "*xorsi3_cbranch"
7183 (match_operator 5 "equality_operator"
7184 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7185 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7187 (label_ref (match_operand 4 "" ""))
7189 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7190 (xor:SI (match_dup 2) (match_dup 3)))
7191 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7195 if (which_alternative == 0)
7196 output_asm_insn (\"eor\\t%0, %3\", operands);
7197 else if (which_alternative == 1)
7199 output_asm_insn (\"eor\\t%1, %3\", operands);
7200 output_asm_insn (\"mov\\t%0, %1\", operands);
7204 output_asm_insn (\"eor\\t%1, %3\", operands);
7205 output_asm_insn (\"str\\t%1, %0\", operands);
7208 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7210 case 4: return \"b%d5\\t%l4\";
7211 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7212 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7215 [(set (attr "far_jump")
7217 (ior (and (eq (symbol_ref ("which_alternative"))
7219 (eq_attr "length" "8"))
7220 (eq_attr "length" "10"))
7221 (const_string "yes")
7222 (const_string "no")))
7223 (set (attr "length")
7225 (eq (symbol_ref ("which_alternative"))
7228 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7229 (le (minus (match_dup 4) (pc)) (const_int 256)))
7232 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7233 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7237 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7238 (le (minus (match_dup 4) (pc)) (const_int 256)))
7241 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7242 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7247 (define_insn "*bicsi3_cbranch_scratch"
7250 (match_operator 4 "equality_operator"
7251 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7252 (match_operand:SI 1 "s_register_operand" "0"))
7254 (label_ref (match_operand 3 "" ""))
7256 (clobber (match_scratch:SI 0 "=l"))]
7260 output_asm_insn (\"bic\\t%0, %2\", operands);
7261 switch (get_attr_length (insn))
7263 case 4: return \"b%d4\\t%l3\";
7264 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7265 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7268 [(set (attr "far_jump")
7270 (eq_attr "length" "8")
7271 (const_string "yes")
7272 (const_string "no")))
7273 (set (attr "length")
7275 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7276 (le (minus (match_dup 3) (pc)) (const_int 256)))
7279 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7280 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7285 (define_insn "*bicsi3_cbranch"
7288 (match_operator 5 "equality_operator"
7289 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7290 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7292 (label_ref (match_operand 4 "" ""))
7294 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7295 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7296 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7300 if (which_alternative == 0)
7301 output_asm_insn (\"bic\\t%0, %3\", operands);
7302 else if (which_alternative <= 2)
7304 output_asm_insn (\"bic\\t%1, %3\", operands);
7305 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7306 conditions again, since we're only testing for equality. */
7307 output_asm_insn (\"mov\\t%0, %1\", operands);
7311 output_asm_insn (\"bic\\t%1, %3\", operands);
7312 output_asm_insn (\"str\\t%1, %0\", operands);
7315 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7317 case 4: return \"b%d5\\t%l4\";
7318 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7319 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7322 [(set (attr "far_jump")
7324 (ior (and (eq (symbol_ref ("which_alternative"))
7326 (eq_attr "length" "8"))
7327 (eq_attr "length" "10"))
7328 (const_string "yes")
7329 (const_string "no")))
7330 (set (attr "length")
7332 (eq (symbol_ref ("which_alternative"))
7335 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7336 (le (minus (match_dup 4) (pc)) (const_int 256)))
7339 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7340 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7344 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7345 (le (minus (match_dup 4) (pc)) (const_int 256)))
7348 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7349 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7354 (define_insn "*cbranchne_decr1"
7356 (if_then_else (match_operator 3 "equality_operator"
7357 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7359 (label_ref (match_operand 4 "" ""))
7361 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7362 (plus:SI (match_dup 2) (const_int -1)))
7363 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7368 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7370 VOIDmode, operands[2], const1_rtx);
7371 cond[1] = operands[4];
7373 if (which_alternative == 0)
7374 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7375 else if (which_alternative == 1)
7377 /* We must provide an alternative for a hi reg because reload
7378 cannot handle output reloads on a jump instruction, but we
7379 can't subtract into that. Fortunately a mov from lo to hi
7380 does not clobber the condition codes. */
7381 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7382 output_asm_insn (\"mov\\t%0, %1\", operands);
7386 /* Similarly, but the target is memory. */
7387 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7388 output_asm_insn (\"str\\t%1, %0\", operands);
7391 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7394 output_asm_insn (\"b%d0\\t%l1\", cond);
7397 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7398 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7400 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7401 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7405 [(set (attr "far_jump")
7407 (ior (and (eq (symbol_ref ("which_alternative"))
7409 (eq_attr "length" "8"))
7410 (eq_attr "length" "10"))
7411 (const_string "yes")
7412 (const_string "no")))
7413 (set_attr_alternative "length"
7417 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7418 (le (minus (match_dup 4) (pc)) (const_int 256)))
7421 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7422 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7427 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7428 (le (minus (match_dup 4) (pc)) (const_int 256)))
7431 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7432 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7437 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7438 (le (minus (match_dup 4) (pc)) (const_int 256)))
7441 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7442 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7447 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7448 (le (minus (match_dup 4) (pc)) (const_int 256)))
7451 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7452 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7457 (define_insn "*addsi3_cbranch"
7460 (match_operator 4 "arm_comparison_operator"
7462 (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
7463 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
7465 (label_ref (match_operand 5 "" ""))
7468 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7469 (plus:SI (match_dup 2) (match_dup 3)))
7470 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7472 && (GET_CODE (operands[4]) == EQ
7473 || GET_CODE (operands[4]) == NE
7474 || GET_CODE (operands[4]) == GE
7475 || GET_CODE (operands[4]) == LT)"
7480 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7481 cond[1] = operands[2];
7482 cond[2] = operands[3];
7484 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7485 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7487 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7489 if (which_alternative >= 2
7490 && which_alternative < 4)
7491 output_asm_insn (\"mov\\t%0, %1\", operands);
7492 else if (which_alternative >= 4)
7493 output_asm_insn (\"str\\t%1, %0\", operands);
7495 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7498 return \"b%d4\\t%l5\";
7500 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7502 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7506 [(set (attr "far_jump")
7508 (ior (and (lt (symbol_ref ("which_alternative"))
7510 (eq_attr "length" "8"))
7511 (eq_attr "length" "10"))
7512 (const_string "yes")
7513 (const_string "no")))
7514 (set (attr "length")
7516 (lt (symbol_ref ("which_alternative"))
7519 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7520 (le (minus (match_dup 5) (pc)) (const_int 256)))
7523 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7524 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7528 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7529 (le (minus (match_dup 5) (pc)) (const_int 256)))
7532 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7533 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7538 (define_insn "*addsi3_cbranch_scratch"
7541 (match_operator 3 "arm_comparison_operator"
7543 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7544 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7546 (label_ref (match_operand 4 "" ""))
7548 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7550 && (GET_CODE (operands[3]) == EQ
7551 || GET_CODE (operands[3]) == NE
7552 || GET_CODE (operands[3]) == GE
7553 || GET_CODE (operands[3]) == LT)"
7556 switch (which_alternative)
7559 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7562 output_asm_insn (\"cmn\t%1, %2\", operands);
7565 if (INTVAL (operands[2]) < 0)
7566 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7568 output_asm_insn (\"add\t%0, %1, %2\", operands);
7571 if (INTVAL (operands[2]) < 0)
7572 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7574 output_asm_insn (\"add\t%0, %0, %2\", operands);
7578 switch (get_attr_length (insn))
7581 return \"b%d3\\t%l4\";
7583 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7585 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7589 [(set (attr "far_jump")
7591 (eq_attr "length" "8")
7592 (const_string "yes")
7593 (const_string "no")))
7594 (set (attr "length")
7596 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7597 (le (minus (match_dup 4) (pc)) (const_int 256)))
7600 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7601 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7606 (define_insn "*subsi3_cbranch"
7609 (match_operator 4 "arm_comparison_operator"
7611 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7612 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7614 (label_ref (match_operand 5 "" ""))
7616 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7617 (minus:SI (match_dup 2) (match_dup 3)))
7618 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7620 && (GET_CODE (operands[4]) == EQ
7621 || GET_CODE (operands[4]) == NE
7622 || GET_CODE (operands[4]) == GE
7623 || GET_CODE (operands[4]) == LT)"
7626 if (which_alternative == 0)
7627 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7628 else if (which_alternative == 1)
7630 /* We must provide an alternative for a hi reg because reload
7631 cannot handle output reloads on a jump instruction, but we
7632 can't subtract into that. Fortunately a mov from lo to hi
7633 does not clobber the condition codes. */
7634 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7635 output_asm_insn (\"mov\\t%0, %1\", operands);
7639 /* Similarly, but the target is memory. */
7640 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7641 output_asm_insn (\"str\\t%1, %0\", operands);
7644 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7647 return \"b%d4\\t%l5\";
7649 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7651 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7655 [(set (attr "far_jump")
7657 (ior (and (eq (symbol_ref ("which_alternative"))
7659 (eq_attr "length" "8"))
7660 (eq_attr "length" "10"))
7661 (const_string "yes")
7662 (const_string "no")))
7663 (set (attr "length")
7665 (eq (symbol_ref ("which_alternative"))
7668 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7669 (le (minus (match_dup 5) (pc)) (const_int 256)))
7672 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7673 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7677 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7678 (le (minus (match_dup 5) (pc)) (const_int 256)))
7681 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7682 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7687 (define_insn "*subsi3_cbranch_scratch"
7690 (match_operator 0 "arm_comparison_operator"
7691 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7692 (match_operand:SI 2 "nonmemory_operand" "l"))
7694 (label_ref (match_operand 3 "" ""))
7697 && (GET_CODE (operands[0]) == EQ
7698 || GET_CODE (operands[0]) == NE
7699 || GET_CODE (operands[0]) == GE
7700 || GET_CODE (operands[0]) == LT)"
7702 output_asm_insn (\"cmp\\t%1, %2\", operands);
7703 switch (get_attr_length (insn))
7705 case 4: return \"b%d0\\t%l3\";
7706 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7707 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7710 [(set (attr "far_jump")
7712 (eq_attr "length" "8")
7713 (const_string "yes")
7714 (const_string "no")))
7715 (set (attr "length")
7717 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7718 (le (minus (match_dup 3) (pc)) (const_int 256)))
7721 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7722 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7727 ;; Comparison and test insns
7729 (define_insn "*arm_cmpsi_insn"
7730 [(set (reg:CC CC_REGNUM)
7731 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7732 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7737 [(set_attr "conds" "set")]
7740 (define_insn "*arm_cmpsi_shiftsi"
7741 [(set (reg:CC CC_REGNUM)
7742 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7743 (match_operator:SI 3 "shift_operator"
7744 [(match_operand:SI 1 "s_register_operand" "r")
7745 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7748 [(set_attr "conds" "set")
7749 (set_attr "shift" "1")
7750 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7751 (const_string "alu_shift")
7752 (const_string "alu_shift_reg")))]
7755 (define_insn "*arm_cmpsi_shiftsi_swp"
7756 [(set (reg:CC_SWP CC_REGNUM)
7757 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7758 [(match_operand:SI 1 "s_register_operand" "r")
7759 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7760 (match_operand:SI 0 "s_register_operand" "r")))]
7763 [(set_attr "conds" "set")
7764 (set_attr "shift" "1")
7765 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7766 (const_string "alu_shift")
7767 (const_string "alu_shift_reg")))]
7770 (define_insn "*arm_cmpsi_negshiftsi_si"
7771 [(set (reg:CC_Z CC_REGNUM)
7773 (neg:SI (match_operator:SI 1 "shift_operator"
7774 [(match_operand:SI 2 "s_register_operand" "r")
7775 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7776 (match_operand:SI 0 "s_register_operand" "r")))]
7779 [(set_attr "conds" "set")
7780 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7781 (const_string "alu_shift")
7782 (const_string "alu_shift_reg")))]
7785 ;; DImode comparisons. The generic code generates branches that
7786 ;; if-conversion can not reduce to a conditional compare, so we do
7789 (define_insn "*arm_cmpdi_insn"
7790 [(set (reg:CC_NCV CC_REGNUM)
7791 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7792 (match_operand:DI 1 "arm_di_operand" "rDi")))
7793 (clobber (match_scratch:SI 2 "=r"))]
7794 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7795 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7796 [(set_attr "conds" "set")
7797 (set_attr "length" "8")]
7800 (define_insn "*arm_cmpdi_unsigned"
7801 [(set (reg:CC_CZ CC_REGNUM)
7802 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7803 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7805 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7806 [(set_attr "conds" "set")
7807 (set_attr "length" "8")]
7810 (define_insn "*arm_cmpdi_zero"
7811 [(set (reg:CC_Z CC_REGNUM)
7812 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7814 (clobber (match_scratch:SI 1 "=r"))]
7816 "orr%.\\t%1, %Q0, %R0"
7817 [(set_attr "conds" "set")]
7820 (define_insn "*thumb_cmpdi_zero"
7821 [(set (reg:CC_Z CC_REGNUM)
7822 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7824 (clobber (match_scratch:SI 1 "=l"))]
7826 "orr\\t%1, %Q0, %R0"
7827 [(set_attr "conds" "set")
7828 (set_attr "length" "2")]
7831 ;; Cirrus SF compare instruction
7832 (define_insn "*cirrus_cmpsf"
7833 [(set (reg:CCFP CC_REGNUM)
7834 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7835 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7836 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7837 "cfcmps%?\\tr15, %V0, %V1"
7838 [(set_attr "type" "mav_farith")
7839 (set_attr "cirrus" "compare")]
7842 ;; Cirrus DF compare instruction
7843 (define_insn "*cirrus_cmpdf"
7844 [(set (reg:CCFP CC_REGNUM)
7845 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7846 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7847 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7848 "cfcmpd%?\\tr15, %V0, %V1"
7849 [(set_attr "type" "mav_farith")
7850 (set_attr "cirrus" "compare")]
7853 (define_insn "*cirrus_cmpdi"
7854 [(set (reg:CC CC_REGNUM)
7855 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7856 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7857 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7858 "cfcmp64%?\\tr15, %V0, %V1"
7859 [(set_attr "type" "mav_farith")
7860 (set_attr "cirrus" "compare")]
7863 ; This insn allows redundant compares to be removed by cse, nothing should
7864 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7865 ; is deleted later on. The match_dup will match the mode here, so that
7866 ; mode changes of the condition codes aren't lost by this even though we don't
7867 ; specify what they are.
7869 (define_insn "*deleted_compare"
7870 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7872 "\\t%@ deleted compare"
7873 [(set_attr "conds" "set")
7874 (set_attr "length" "0")]
7878 ;; Conditional branch insns
7880 (define_expand "cbranch_cc"
7882 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7883 (match_operand 2 "" "")])
7884 (label_ref (match_operand 3 "" ""))
7887 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7888 operands[1], operands[2]);
7889 operands[2] = const0_rtx;"
7893 ;; Patterns to match conditional branch insns.
7896 (define_insn "*arm_cond_branch"
7898 (if_then_else (match_operator 1 "arm_comparison_operator"
7899 [(match_operand 2 "cc_register" "") (const_int 0)])
7900 (label_ref (match_operand 0 "" ""))
7904 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7906 arm_ccfsm_state += 2;
7909 return \"b%d1\\t%l0\";
7911 [(set_attr "conds" "use")
7912 (set_attr "type" "branch")]
7915 (define_insn "*arm_cond_branch_reversed"
7917 (if_then_else (match_operator 1 "arm_comparison_operator"
7918 [(match_operand 2 "cc_register" "") (const_int 0)])
7920 (label_ref (match_operand 0 "" ""))))]
7923 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7925 arm_ccfsm_state += 2;
7928 return \"b%D1\\t%l0\";
7930 [(set_attr "conds" "use")
7931 (set_attr "type" "branch")]
7938 (define_expand "cstore_cc"
7939 [(set (match_operand:SI 0 "s_register_operand" "")
7940 (match_operator:SI 1 "" [(match_operand 2 "" "")
7941 (match_operand 3 "" "")]))]
7943 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7944 operands[2], operands[3]);
7945 operands[3] = const0_rtx;"
7948 (define_insn "*mov_scc"
7949 [(set (match_operand:SI 0 "s_register_operand" "=r")
7950 (match_operator:SI 1 "arm_comparison_operator"
7951 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7953 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7954 [(set_attr "conds" "use")
7955 (set_attr "length" "8")]
7958 (define_insn "*mov_negscc"
7959 [(set (match_operand:SI 0 "s_register_operand" "=r")
7960 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7961 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7963 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7964 [(set_attr "conds" "use")
7965 (set_attr "length" "8")]
7968 (define_insn "*mov_notscc"
7969 [(set (match_operand:SI 0 "s_register_operand" "=r")
7970 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7971 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7973 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7974 [(set_attr "conds" "use")
7975 (set_attr "length" "8")]
7978 (define_expand "cstoresi4"
7979 [(set (match_operand:SI 0 "s_register_operand" "")
7980 (match_operator:SI 1 "arm_comparison_operator"
7981 [(match_operand:SI 2 "s_register_operand" "")
7982 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7983 "TARGET_32BIT || TARGET_THUMB1"
7985 rtx op3, scratch, scratch2;
7989 if (!arm_add_operand (operands[3], SImode))
7990 operands[3] = force_reg (SImode, operands[3]);
7991 emit_insn (gen_cstore_cc (operands[0], operands[1],
7992 operands[2], operands[3]));
7996 if (operands[3] == const0_rtx)
7998 switch (GET_CODE (operands[1]))
8001 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8005 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8009 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8010 NULL_RTX, 0, OPTAB_WIDEN);
8011 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8012 NULL_RTX, 0, OPTAB_WIDEN);
8013 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8014 operands[0], 1, OPTAB_WIDEN);
8018 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8020 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8021 NULL_RTX, 1, OPTAB_WIDEN);
8025 scratch = expand_binop (SImode, ashr_optab, operands[2],
8026 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8027 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8028 NULL_RTX, 0, OPTAB_WIDEN);
8029 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8033 /* LT is handled by generic code. No need for unsigned with 0. */
8040 switch (GET_CODE (operands[1]))
8043 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8044 NULL_RTX, 0, OPTAB_WIDEN);
8045 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8049 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8050 NULL_RTX, 0, OPTAB_WIDEN);
8051 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8055 op3 = force_reg (SImode, operands[3]);
8057 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8058 NULL_RTX, 1, OPTAB_WIDEN);
8059 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8060 NULL_RTX, 0, OPTAB_WIDEN);
8061 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8067 if (!thumb1_cmp_operand (op3, SImode))
8068 op3 = force_reg (SImode, op3);
8069 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8070 NULL_RTX, 0, OPTAB_WIDEN);
8071 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8072 NULL_RTX, 1, OPTAB_WIDEN);
8073 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8078 op3 = force_reg (SImode, operands[3]);
8079 scratch = force_reg (SImode, const0_rtx);
8080 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8086 if (!thumb1_cmp_operand (op3, SImode))
8087 op3 = force_reg (SImode, op3);
8088 scratch = force_reg (SImode, const0_rtx);
8089 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8095 if (!thumb1_cmp_operand (op3, SImode))
8096 op3 = force_reg (SImode, op3);
8097 scratch = gen_reg_rtx (SImode);
8098 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8102 op3 = force_reg (SImode, operands[3]);
8103 scratch = gen_reg_rtx (SImode);
8104 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8107 /* No good sequences for GT, LT. */
8114 (define_expand "cstoresf4"
8115 [(set (match_operand:SI 0 "s_register_operand" "")
8116 (match_operator:SI 1 "arm_comparison_operator"
8117 [(match_operand:SF 2 "s_register_operand" "")
8118 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8119 "TARGET_32BIT && TARGET_HARD_FLOAT"
8120 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8121 operands[2], operands[3])); DONE;"
8124 (define_expand "cstoredf4"
8125 [(set (match_operand:SI 0 "s_register_operand" "")
8126 (match_operator:SI 1 "arm_comparison_operator"
8127 [(match_operand:DF 2 "s_register_operand" "")
8128 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8129 "TARGET_32BIT && TARGET_HARD_FLOAT"
8130 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8131 operands[2], operands[3])); DONE;"
8134 (define_expand "cstoredi4"
8135 [(set (match_operand:SI 0 "s_register_operand" "")
8136 (match_operator:SI 1 "arm_comparison_operator"
8137 [(match_operand:DI 2 "cmpdi_operand" "")
8138 (match_operand:DI 3 "cmpdi_operand" "")]))]
8141 rtx swap = NULL_RTX;
8142 enum rtx_code code = GET_CODE (operands[1]);
8144 /* We should not have two constants. */
8145 gcc_assert (GET_MODE (operands[2]) == DImode
8146 || GET_MODE (operands[3]) == DImode);
8148 /* Flip unimplemented DImode comparisons to a form that
8149 arm_gen_compare_reg can handle. */
8153 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
8155 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
8157 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
8159 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
8164 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
8167 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8173 (define_expand "cstoresi_eq0_thumb1"
8175 [(set (match_operand:SI 0 "s_register_operand" "")
8176 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8178 (clobber (match_dup:SI 2))])]
8180 "operands[2] = gen_reg_rtx (SImode);"
8183 (define_expand "cstoresi_ne0_thumb1"
8185 [(set (match_operand:SI 0 "s_register_operand" "")
8186 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8188 (clobber (match_dup:SI 2))])]
8190 "operands[2] = gen_reg_rtx (SImode);"
8193 (define_insn "*cstoresi_eq0_thumb1_insn"
8194 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8195 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8197 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8200 neg\\t%0, %1\;adc\\t%0, %0, %1
8201 neg\\t%2, %1\;adc\\t%0, %1, %2"
8202 [(set_attr "length" "4")]
8205 (define_insn "*cstoresi_ne0_thumb1_insn"
8206 [(set (match_operand:SI 0 "s_register_operand" "=l")
8207 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8209 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8211 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8212 [(set_attr "length" "4")]
8215 ;; Used as part of the expansion of thumb ltu and gtu sequences
8216 (define_insn "cstoresi_nltu_thumb1"
8217 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8218 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8219 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8221 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8222 [(set_attr "length" "4")]
8225 (define_insn_and_split "cstoresi_ltu_thumb1"
8226 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8227 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8228 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8233 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8234 (set (match_dup 0) (neg:SI (match_dup 3)))]
8235 "operands[3] = gen_reg_rtx (SImode);"
8236 [(set_attr "length" "4")]
8239 ;; Used as part of the expansion of thumb les sequence.
8240 (define_insn "thumb1_addsi3_addgeu"
8241 [(set (match_operand:SI 0 "s_register_operand" "=l")
8242 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8243 (match_operand:SI 2 "s_register_operand" "l"))
8244 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8245 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8247 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8248 [(set_attr "length" "4")]
8252 ;; Conditional move insns
8254 (define_expand "movsicc"
8255 [(set (match_operand:SI 0 "s_register_operand" "")
8256 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8257 (match_operand:SI 2 "arm_not_operand" "")
8258 (match_operand:SI 3 "arm_not_operand" "")))]
8262 enum rtx_code code = GET_CODE (operands[1]);
8265 if (code == UNEQ || code == LTGT)
8268 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8269 XEXP (operands[1], 1));
8270 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8274 (define_expand "movsfcc"
8275 [(set (match_operand:SF 0 "s_register_operand" "")
8276 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8277 (match_operand:SF 2 "s_register_operand" "")
8278 (match_operand:SF 3 "nonmemory_operand" "")))]
8279 "TARGET_32BIT && TARGET_HARD_FLOAT"
8282 enum rtx_code code = GET_CODE (operands[1]);
8285 if (code == UNEQ || code == LTGT)
8288 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8289 Otherwise, ensure it is a valid FP add operand */
8290 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8291 || (!arm_float_add_operand (operands[3], SFmode)))
8292 operands[3] = force_reg (SFmode, operands[3]);
8294 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8295 XEXP (operands[1], 1));
8296 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8300 (define_expand "movdfcc"
8301 [(set (match_operand:DF 0 "s_register_operand" "")
8302 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8303 (match_operand:DF 2 "s_register_operand" "")
8304 (match_operand:DF 3 "arm_float_add_operand" "")))]
8305 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8308 enum rtx_code code = GET_CODE (operands[1]);
8311 if (code == UNEQ || code == LTGT)
8314 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8315 XEXP (operands[1], 1));
8316 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8320 (define_insn "*movsicc_insn"
8321 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8323 (match_operator 3 "arm_comparison_operator"
8324 [(match_operand 4 "cc_register" "") (const_int 0)])
8325 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8326 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8333 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8334 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8335 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8336 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8337 [(set_attr "length" "4,4,4,4,8,8,8,8")
8338 (set_attr "conds" "use")]
8341 (define_insn "*movsfcc_soft_insn"
8342 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8343 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8344 [(match_operand 4 "cc_register" "") (const_int 0)])
8345 (match_operand:SF 1 "s_register_operand" "0,r")
8346 (match_operand:SF 2 "s_register_operand" "r,0")))]
8347 "TARGET_ARM && TARGET_SOFT_FLOAT"
8351 [(set_attr "conds" "use")]
8355 ;; Jump and linkage insns
8357 (define_expand "jump"
8359 (label_ref (match_operand 0 "" "")))]
8364 (define_insn "*arm_jump"
8366 (label_ref (match_operand 0 "" "")))]
8370 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8372 arm_ccfsm_state += 2;
8375 return \"b%?\\t%l0\";
8378 [(set_attr "predicable" "yes")]
8381 (define_insn "*thumb_jump"
8383 (label_ref (match_operand 0 "" "")))]
8386 if (get_attr_length (insn) == 2)
8388 return \"bl\\t%l0\\t%@ far jump\";
8390 [(set (attr "far_jump")
8392 (eq_attr "length" "4")
8393 (const_string "yes")
8394 (const_string "no")))
8395 (set (attr "length")
8397 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8398 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8403 (define_expand "call"
8404 [(parallel [(call (match_operand 0 "memory_operand" "")
8405 (match_operand 1 "general_operand" ""))
8406 (use (match_operand 2 "" ""))
8407 (clobber (reg:SI LR_REGNUM))])]
8413 /* In an untyped call, we can get NULL for operand 2. */
8414 if (operands[2] == NULL_RTX)
8415 operands[2] = const0_rtx;
8417 /* Decide if we should generate indirect calls by loading the
8418 32-bit address of the callee into a register before performing the
8420 callee = XEXP (operands[0], 0);
8421 if (GET_CODE (callee) == SYMBOL_REF
8422 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8424 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8426 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8427 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8432 (define_expand "call_internal"
8433 [(parallel [(call (match_operand 0 "memory_operand" "")
8434 (match_operand 1 "general_operand" ""))
8435 (use (match_operand 2 "" ""))
8436 (clobber (reg:SI LR_REGNUM))])])
8438 (define_insn "*call_reg_armv5"
8439 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8440 (match_operand 1 "" ""))
8441 (use (match_operand 2 "" ""))
8442 (clobber (reg:SI LR_REGNUM))]
8443 "TARGET_ARM && arm_arch5"
8445 [(set_attr "type" "call")]
8448 (define_insn "*call_reg_arm"
8449 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8450 (match_operand 1 "" ""))
8451 (use (match_operand 2 "" ""))
8452 (clobber (reg:SI LR_REGNUM))]
8453 "TARGET_ARM && !arm_arch5"
8455 return output_call (operands);
8457 ;; length is worst case, normally it is only two
8458 [(set_attr "length" "12")
8459 (set_attr "type" "call")]
8463 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8464 ;; considered a function call by the branch predictor of some cores (PR40887).
8465 ;; Falls back to blx rN (*call_reg_armv5).
8467 (define_insn "*call_mem"
8468 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8469 (match_operand 1 "" ""))
8470 (use (match_operand 2 "" ""))
8471 (clobber (reg:SI LR_REGNUM))]
8472 "TARGET_ARM && !arm_arch5"
8474 return output_call_mem (operands);
8476 [(set_attr "length" "12")
8477 (set_attr "type" "call")]
8480 (define_insn "*call_reg_thumb1_v5"
8481 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8482 (match_operand 1 "" ""))
8483 (use (match_operand 2 "" ""))
8484 (clobber (reg:SI LR_REGNUM))]
8485 "TARGET_THUMB1 && arm_arch5"
8487 [(set_attr "length" "2")
8488 (set_attr "type" "call")]
8491 (define_insn "*call_reg_thumb1"
8492 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8493 (match_operand 1 "" ""))
8494 (use (match_operand 2 "" ""))
8495 (clobber (reg:SI LR_REGNUM))]
8496 "TARGET_THUMB1 && !arm_arch5"
8499 if (!TARGET_CALLER_INTERWORKING)
8500 return thumb_call_via_reg (operands[0]);
8501 else if (operands[1] == const0_rtx)
8502 return \"bl\\t%__interwork_call_via_%0\";
8503 else if (frame_pointer_needed)
8504 return \"bl\\t%__interwork_r7_call_via_%0\";
8506 return \"bl\\t%__interwork_r11_call_via_%0\";
8508 [(set_attr "type" "call")]
8511 (define_expand "call_value"
8512 [(parallel [(set (match_operand 0 "" "")
8513 (call (match_operand 1 "memory_operand" "")
8514 (match_operand 2 "general_operand" "")))
8515 (use (match_operand 3 "" ""))
8516 (clobber (reg:SI LR_REGNUM))])]
8522 /* In an untyped call, we can get NULL for operand 2. */
8523 if (operands[3] == 0)
8524 operands[3] = const0_rtx;
8526 /* Decide if we should generate indirect calls by loading the
8527 32-bit address of the callee into a register before performing the
8529 callee = XEXP (operands[1], 0);
8530 if (GET_CODE (callee) == SYMBOL_REF
8531 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8533 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8535 pat = gen_call_value_internal (operands[0], operands[1],
8536 operands[2], operands[3]);
8537 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8542 (define_expand "call_value_internal"
8543 [(parallel [(set (match_operand 0 "" "")
8544 (call (match_operand 1 "memory_operand" "")
8545 (match_operand 2 "general_operand" "")))
8546 (use (match_operand 3 "" ""))
8547 (clobber (reg:SI LR_REGNUM))])])
8549 (define_insn "*call_value_reg_armv5"
8550 [(set (match_operand 0 "" "")
8551 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8552 (match_operand 2 "" "")))
8553 (use (match_operand 3 "" ""))
8554 (clobber (reg:SI LR_REGNUM))]
8555 "TARGET_ARM && arm_arch5"
8557 [(set_attr "type" "call")]
8560 (define_insn "*call_value_reg_arm"
8561 [(set (match_operand 0 "" "")
8562 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8563 (match_operand 2 "" "")))
8564 (use (match_operand 3 "" ""))
8565 (clobber (reg:SI LR_REGNUM))]
8566 "TARGET_ARM && !arm_arch5"
8568 return output_call (&operands[1]);
8570 [(set_attr "length" "12")
8571 (set_attr "type" "call")]
8574 ;; Note: see *call_mem
8576 (define_insn "*call_value_mem"
8577 [(set (match_operand 0 "" "")
8578 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8579 (match_operand 2 "" "")))
8580 (use (match_operand 3 "" ""))
8581 (clobber (reg:SI LR_REGNUM))]
8582 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8584 return output_call_mem (&operands[1]);
8586 [(set_attr "length" "12")
8587 (set_attr "type" "call")]
8590 (define_insn "*call_value_reg_thumb1_v5"
8591 [(set (match_operand 0 "" "")
8592 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8593 (match_operand 2 "" "")))
8594 (use (match_operand 3 "" ""))
8595 (clobber (reg:SI LR_REGNUM))]
8596 "TARGET_THUMB1 && arm_arch5"
8598 [(set_attr "length" "2")
8599 (set_attr "type" "call")]
8602 (define_insn "*call_value_reg_thumb1"
8603 [(set (match_operand 0 "" "")
8604 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8605 (match_operand 2 "" "")))
8606 (use (match_operand 3 "" ""))
8607 (clobber (reg:SI LR_REGNUM))]
8608 "TARGET_THUMB1 && !arm_arch5"
8611 if (!TARGET_CALLER_INTERWORKING)
8612 return thumb_call_via_reg (operands[1]);
8613 else if (operands[2] == const0_rtx)
8614 return \"bl\\t%__interwork_call_via_%1\";
8615 else if (frame_pointer_needed)
8616 return \"bl\\t%__interwork_r7_call_via_%1\";
8618 return \"bl\\t%__interwork_r11_call_via_%1\";
8620 [(set_attr "type" "call")]
8623 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8624 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8626 (define_insn "*call_symbol"
8627 [(call (mem:SI (match_operand:SI 0 "" ""))
8628 (match_operand 1 "" ""))
8629 (use (match_operand 2 "" ""))
8630 (clobber (reg:SI LR_REGNUM))]
8632 && (GET_CODE (operands[0]) == SYMBOL_REF)
8633 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8636 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8638 [(set_attr "type" "call")]
8641 (define_insn "*call_value_symbol"
8642 [(set (match_operand 0 "" "")
8643 (call (mem:SI (match_operand:SI 1 "" ""))
8644 (match_operand:SI 2 "" "")))
8645 (use (match_operand 3 "" ""))
8646 (clobber (reg:SI LR_REGNUM))]
8648 && (GET_CODE (operands[1]) == SYMBOL_REF)
8649 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8652 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8654 [(set_attr "type" "call")]
8657 (define_insn "*call_insn"
8658 [(call (mem:SI (match_operand:SI 0 "" ""))
8659 (match_operand:SI 1 "" ""))
8660 (use (match_operand 2 "" ""))
8661 (clobber (reg:SI LR_REGNUM))]
8663 && GET_CODE (operands[0]) == SYMBOL_REF
8664 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8666 [(set_attr "length" "4")
8667 (set_attr "type" "call")]
8670 (define_insn "*call_value_insn"
8671 [(set (match_operand 0 "" "")
8672 (call (mem:SI (match_operand 1 "" ""))
8673 (match_operand 2 "" "")))
8674 (use (match_operand 3 "" ""))
8675 (clobber (reg:SI LR_REGNUM))]
8677 && GET_CODE (operands[1]) == SYMBOL_REF
8678 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8680 [(set_attr "length" "4")
8681 (set_attr "type" "call")]
8684 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8685 (define_expand "sibcall"
8686 [(parallel [(call (match_operand 0 "memory_operand" "")
8687 (match_operand 1 "general_operand" ""))
8689 (use (match_operand 2 "" ""))])]
8693 if (operands[2] == NULL_RTX)
8694 operands[2] = const0_rtx;
8698 (define_expand "sibcall_value"
8699 [(parallel [(set (match_operand 0 "" "")
8700 (call (match_operand 1 "memory_operand" "")
8701 (match_operand 2 "general_operand" "")))
8703 (use (match_operand 3 "" ""))])]
8707 if (operands[3] == NULL_RTX)
8708 operands[3] = const0_rtx;
8712 (define_insn "*sibcall_insn"
8713 [(call (mem:SI (match_operand:SI 0 "" "X"))
8714 (match_operand 1 "" ""))
8716 (use (match_operand 2 "" ""))]
8717 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8719 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8721 [(set_attr "type" "call")]
8724 (define_insn "*sibcall_value_insn"
8725 [(set (match_operand 0 "" "")
8726 (call (mem:SI (match_operand:SI 1 "" "X"))
8727 (match_operand 2 "" "")))
8729 (use (match_operand 3 "" ""))]
8730 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8732 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8734 [(set_attr "type" "call")]
8737 (define_expand "return"
8739 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8742 ;; Often the return insn will be the same as loading from memory, so set attr
8743 (define_insn "*arm_return"
8745 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8748 if (arm_ccfsm_state == 2)
8750 arm_ccfsm_state += 2;
8753 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8755 [(set_attr "type" "load1")
8756 (set_attr "length" "12")
8757 (set_attr "predicable" "yes")]
8760 (define_insn "*cond_return"
8762 (if_then_else (match_operator 0 "arm_comparison_operator"
8763 [(match_operand 1 "cc_register" "") (const_int 0)])
8766 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8769 if (arm_ccfsm_state == 2)
8771 arm_ccfsm_state += 2;
8774 return output_return_instruction (operands[0], TRUE, FALSE);
8776 [(set_attr "conds" "use")
8777 (set_attr "length" "12")
8778 (set_attr "type" "load1")]
8781 (define_insn "*cond_return_inverted"
8783 (if_then_else (match_operator 0 "arm_comparison_operator"
8784 [(match_operand 1 "cc_register" "") (const_int 0)])
8787 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8790 if (arm_ccfsm_state == 2)
8792 arm_ccfsm_state += 2;
8795 return output_return_instruction (operands[0], TRUE, TRUE);
8797 [(set_attr "conds" "use")
8798 (set_attr "length" "12")
8799 (set_attr "type" "load1")]
8802 ;; Generate a sequence of instructions to determine if the processor is
8803 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8806 (define_expand "return_addr_mask"
8808 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8810 (set (match_operand:SI 0 "s_register_operand" "")
8811 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8813 (const_int 67108860)))] ; 0x03fffffc
8816 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8819 (define_insn "*check_arch2"
8820 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8821 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8824 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8825 [(set_attr "length" "8")
8826 (set_attr "conds" "set")]
8829 ;; Call subroutine returning any type.
8831 (define_expand "untyped_call"
8832 [(parallel [(call (match_operand 0 "" "")
8834 (match_operand 1 "" "")
8835 (match_operand 2 "" "")])]
8840 rtx par = gen_rtx_PARALLEL (VOIDmode,
8841 rtvec_alloc (XVECLEN (operands[2], 0)));
8842 rtx addr = gen_reg_rtx (Pmode);
8846 emit_move_insn (addr, XEXP (operands[1], 0));
8847 mem = change_address (operands[1], BLKmode, addr);
8849 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8851 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8853 /* Default code only uses r0 as a return value, but we could
8854 be using anything up to 4 registers. */
8855 if (REGNO (src) == R0_REGNUM)
8856 src = gen_rtx_REG (TImode, R0_REGNUM);
8858 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8860 size += GET_MODE_SIZE (GET_MODE (src));
8863 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8868 for (i = 0; i < XVECLEN (par, 0); i++)
8870 HOST_WIDE_INT offset = 0;
8871 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8874 emit_move_insn (addr, plus_constant (addr, size));
8876 mem = change_address (mem, GET_MODE (reg), NULL);
8877 if (REGNO (reg) == R0_REGNUM)
8879 /* On thumb we have to use a write-back instruction. */
8880 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8881 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8882 size = TARGET_ARM ? 16 : 0;
8886 emit_move_insn (mem, reg);
8887 size = GET_MODE_SIZE (GET_MODE (reg));
8891 /* The optimizer does not know that the call sets the function value
8892 registers we stored in the result block. We avoid problems by
8893 claiming that all hard registers are used and clobbered at this
8895 emit_insn (gen_blockage ());
8901 (define_expand "untyped_return"
8902 [(match_operand:BLK 0 "memory_operand" "")
8903 (match_operand 1 "" "")]
8908 rtx addr = gen_reg_rtx (Pmode);
8912 emit_move_insn (addr, XEXP (operands[0], 0));
8913 mem = change_address (operands[0], BLKmode, addr);
8915 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8917 HOST_WIDE_INT offset = 0;
8918 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8921 emit_move_insn (addr, plus_constant (addr, size));
8923 mem = change_address (mem, GET_MODE (reg), NULL);
8924 if (REGNO (reg) == R0_REGNUM)
8926 /* On thumb we have to use a write-back instruction. */
8927 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8928 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8929 size = TARGET_ARM ? 16 : 0;
8933 emit_move_insn (reg, mem);
8934 size = GET_MODE_SIZE (GET_MODE (reg));
8938 /* Emit USE insns before the return. */
8939 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8940 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8942 /* Construct the return. */
8943 expand_naked_return ();
8949 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8950 ;; all of memory. This blocks insns from being moved across this point.
8952 (define_insn "blockage"
8953 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8956 [(set_attr "length" "0")
8957 (set_attr "type" "block")]
8960 (define_expand "casesi"
8961 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8962 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8963 (match_operand:SI 2 "const_int_operand" "") ; total range
8964 (match_operand:SI 3 "" "") ; table label
8965 (match_operand:SI 4 "" "")] ; Out of range label
8966 "TARGET_32BIT || optimize_size || flag_pic"
8969 enum insn_code code;
8970 if (operands[1] != const0_rtx)
8972 rtx reg = gen_reg_rtx (SImode);
8974 emit_insn (gen_addsi3 (reg, operands[0],
8975 GEN_INT (-INTVAL (operands[1]))));
8980 code = CODE_FOR_arm_casesi_internal;
8981 else if (TARGET_THUMB1)
8982 code = CODE_FOR_thumb1_casesi_internal_pic;
8984 code = CODE_FOR_thumb2_casesi_internal_pic;
8986 code = CODE_FOR_thumb2_casesi_internal;
8988 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8989 operands[2] = force_reg (SImode, operands[2]);
8991 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8992 operands[3], operands[4]));
8997 ;; The USE in this pattern is needed to tell flow analysis that this is
8998 ;; a CASESI insn. It has no other purpose.
8999 (define_insn "arm_casesi_internal"
9000 [(parallel [(set (pc)
9002 (leu (match_operand:SI 0 "s_register_operand" "r")
9003 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9004 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9005 (label_ref (match_operand 2 "" ""))))
9006 (label_ref (match_operand 3 "" ""))))
9007 (clobber (reg:CC CC_REGNUM))
9008 (use (label_ref (match_dup 2)))])]
9012 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9013 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9015 [(set_attr "conds" "clob")
9016 (set_attr "length" "12")]
9019 (define_expand "thumb1_casesi_internal_pic"
9020 [(match_operand:SI 0 "s_register_operand" "")
9021 (match_operand:SI 1 "thumb1_cmp_operand" "")
9022 (match_operand 2 "" "")
9023 (match_operand 3 "" "")]
9027 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9028 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9030 reg0 = gen_rtx_REG (SImode, 0);
9031 emit_move_insn (reg0, operands[0]);
9032 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9037 (define_insn "thumb1_casesi_dispatch"
9038 [(parallel [(set (pc) (unspec [(reg:SI 0)
9039 (label_ref (match_operand 0 "" ""))
9040 ;; (label_ref (match_operand 1 "" ""))
9042 UNSPEC_THUMB1_CASESI))
9043 (clobber (reg:SI IP_REGNUM))
9044 (clobber (reg:SI LR_REGNUM))])]
9046 "* return thumb1_output_casesi(operands);"
9047 [(set_attr "length" "4")]
9050 (define_expand "indirect_jump"
9052 (match_operand:SI 0 "s_register_operand" ""))]
9055 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9056 address and use bx. */
9060 tmp = gen_reg_rtx (SImode);
9061 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9067 ;; NB Never uses BX.
9068 (define_insn "*arm_indirect_jump"
9070 (match_operand:SI 0 "s_register_operand" "r"))]
9072 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9073 [(set_attr "predicable" "yes")]
9076 (define_insn "*load_indirect_jump"
9078 (match_operand:SI 0 "memory_operand" "m"))]
9080 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9081 [(set_attr "type" "load1")
9082 (set_attr "pool_range" "4096")
9083 (set_attr "neg_pool_range" "4084")
9084 (set_attr "predicable" "yes")]
9087 ;; NB Never uses BX.
9088 (define_insn "*thumb1_indirect_jump"
9090 (match_operand:SI 0 "register_operand" "l*r"))]
9093 [(set_attr "conds" "clob")
9094 (set_attr "length" "2")]
9104 if (TARGET_UNIFIED_ASM)
9107 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9108 return \"mov\\tr8, r8\";
9110 [(set (attr "length")
9111 (if_then_else (eq_attr "is_thumb" "yes")
9117 ;; Patterns to allow combination of arithmetic, cond code and shifts
9119 (define_insn "*arith_shiftsi"
9120 [(set (match_operand:SI 0 "s_register_operand" "=r")
9121 (match_operator:SI 1 "shiftable_operator"
9122 [(match_operator:SI 3 "shift_operator"
9123 [(match_operand:SI 4 "s_register_operand" "r")
9124 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9125 (match_operand:SI 2 "s_register_operand" "rk")]))]
9127 "%i1%?\\t%0, %2, %4%S3"
9128 [(set_attr "predicable" "yes")
9129 (set_attr "shift" "4")
9130 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9131 (const_string "alu_shift")
9132 (const_string "alu_shift_reg")))]
9136 [(set (match_operand:SI 0 "s_register_operand" "")
9137 (match_operator:SI 1 "shiftable_operator"
9138 [(match_operator:SI 2 "shiftable_operator"
9139 [(match_operator:SI 3 "shift_operator"
9140 [(match_operand:SI 4 "s_register_operand" "")
9141 (match_operand:SI 5 "reg_or_int_operand" "")])
9142 (match_operand:SI 6 "s_register_operand" "")])
9143 (match_operand:SI 7 "arm_rhs_operand" "")]))
9144 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9147 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9150 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9153 (define_insn "*arith_shiftsi_compare0"
9154 [(set (reg:CC_NOOV CC_REGNUM)
9155 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9156 [(match_operator:SI 3 "shift_operator"
9157 [(match_operand:SI 4 "s_register_operand" "r")
9158 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9159 (match_operand:SI 2 "s_register_operand" "r")])
9161 (set (match_operand:SI 0 "s_register_operand" "=r")
9162 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9165 "%i1%.\\t%0, %2, %4%S3"
9166 [(set_attr "conds" "set")
9167 (set_attr "shift" "4")
9168 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9169 (const_string "alu_shift")
9170 (const_string "alu_shift_reg")))]
9173 (define_insn "*arith_shiftsi_compare0_scratch"
9174 [(set (reg:CC_NOOV CC_REGNUM)
9175 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9176 [(match_operator:SI 3 "shift_operator"
9177 [(match_operand:SI 4 "s_register_operand" "r")
9178 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9179 (match_operand:SI 2 "s_register_operand" "r")])
9181 (clobber (match_scratch:SI 0 "=r"))]
9183 "%i1%.\\t%0, %2, %4%S3"
9184 [(set_attr "conds" "set")
9185 (set_attr "shift" "4")
9186 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9187 (const_string "alu_shift")
9188 (const_string "alu_shift_reg")))]
9191 (define_insn "*sub_shiftsi"
9192 [(set (match_operand:SI 0 "s_register_operand" "=r")
9193 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9194 (match_operator:SI 2 "shift_operator"
9195 [(match_operand:SI 3 "s_register_operand" "r")
9196 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9198 "sub%?\\t%0, %1, %3%S2"
9199 [(set_attr "predicable" "yes")
9200 (set_attr "shift" "3")
9201 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9202 (const_string "alu_shift")
9203 (const_string "alu_shift_reg")))]
9206 (define_insn "*sub_shiftsi_compare0"
9207 [(set (reg:CC_NOOV CC_REGNUM)
9209 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9210 (match_operator:SI 2 "shift_operator"
9211 [(match_operand:SI 3 "s_register_operand" "r")
9212 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9214 (set (match_operand:SI 0 "s_register_operand" "=r")
9215 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9218 "sub%.\\t%0, %1, %3%S2"
9219 [(set_attr "conds" "set")
9220 (set_attr "shift" "3")
9221 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9222 (const_string "alu_shift")
9223 (const_string "alu_shift_reg")))]
9226 (define_insn "*sub_shiftsi_compare0_scratch"
9227 [(set (reg:CC_NOOV CC_REGNUM)
9229 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9230 (match_operator:SI 2 "shift_operator"
9231 [(match_operand:SI 3 "s_register_operand" "r")
9232 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9234 (clobber (match_scratch:SI 0 "=r"))]
9236 "sub%.\\t%0, %1, %3%S2"
9237 [(set_attr "conds" "set")
9238 (set_attr "shift" "3")
9239 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9240 (const_string "alu_shift")
9241 (const_string "alu_shift_reg")))]
9246 (define_insn "*and_scc"
9247 [(set (match_operand:SI 0 "s_register_operand" "=r")
9248 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9249 [(match_operand 3 "cc_register" "") (const_int 0)])
9250 (match_operand:SI 2 "s_register_operand" "r")))]
9252 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9253 [(set_attr "conds" "use")
9254 (set_attr "length" "8")]
9257 (define_insn "*ior_scc"
9258 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9259 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9260 [(match_operand 3 "cc_register" "") (const_int 0)])
9261 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9265 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9266 [(set_attr "conds" "use")
9267 (set_attr "length" "4,8")]
9270 ; A series of splitters for the compare_scc pattern below. Note that
9271 ; order is important.
9273 [(set (match_operand:SI 0 "s_register_operand" "")
9274 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9276 (clobber (reg:CC CC_REGNUM))]
9277 "TARGET_32BIT && reload_completed"
9278 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9281 [(set (match_operand:SI 0 "s_register_operand" "")
9282 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9284 (clobber (reg:CC CC_REGNUM))]
9285 "TARGET_32BIT && reload_completed"
9286 [(set (match_dup 0) (not:SI (match_dup 1)))
9287 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9290 [(set (match_operand:SI 0 "s_register_operand" "")
9291 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9293 (clobber (reg:CC CC_REGNUM))]
9294 "TARGET_32BIT && reload_completed"
9296 [(set (reg:CC CC_REGNUM)
9297 (compare:CC (const_int 1) (match_dup 1)))
9299 (minus:SI (const_int 1) (match_dup 1)))])
9300 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9301 (set (match_dup 0) (const_int 0)))])
9304 [(set (match_operand:SI 0 "s_register_operand" "")
9305 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9306 (match_operand:SI 2 "const_int_operand" "")))
9307 (clobber (reg:CC CC_REGNUM))]
9308 "TARGET_32BIT && reload_completed"
9310 [(set (reg:CC CC_REGNUM)
9311 (compare:CC (match_dup 1) (match_dup 2)))
9312 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9313 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9314 (set (match_dup 0) (const_int 1)))]
9316 operands[3] = GEN_INT (-INTVAL (operands[2]));
9320 [(set (match_operand:SI 0 "s_register_operand" "")
9321 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9322 (match_operand:SI 2 "arm_add_operand" "")))
9323 (clobber (reg:CC CC_REGNUM))]
9324 "TARGET_32BIT && reload_completed"
9326 [(set (reg:CC_NOOV CC_REGNUM)
9327 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9329 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9330 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9331 (set (match_dup 0) (const_int 1)))])
9333 (define_insn_and_split "*compare_scc"
9334 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9335 (match_operator:SI 1 "arm_comparison_operator"
9336 [(match_operand:SI 2 "s_register_operand" "r,r")
9337 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9338 (clobber (reg:CC CC_REGNUM))]
9341 "&& reload_completed"
9342 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9343 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9344 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9347 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9348 operands[2], operands[3]);
9349 enum rtx_code rc = GET_CODE (operands[1]);
9351 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9353 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9354 if (mode == CCFPmode || mode == CCFPEmode)
9355 rc = reverse_condition_maybe_unordered (rc);
9357 rc = reverse_condition (rc);
9358 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9361 ;; Attempt to improve the sequence generated by the compare_scc splitters
9362 ;; not to use conditional execution.
9364 [(set (reg:CC CC_REGNUM)
9365 (compare:CC (match_operand:SI 1 "register_operand" "")
9366 (match_operand:SI 2 "arm_rhs_operand" "")))
9367 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9368 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9369 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9370 (set (match_dup 0) (const_int 1)))
9371 (match_scratch:SI 3 "r")]
9373 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
9375 [(set (reg:CC CC_REGNUM)
9376 (compare:CC (const_int 0) (match_dup 3)))
9377 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9379 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9380 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
9382 (define_insn "*cond_move"
9383 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9384 (if_then_else:SI (match_operator 3 "equality_operator"
9385 [(match_operator 4 "arm_comparison_operator"
9386 [(match_operand 5 "cc_register" "") (const_int 0)])
9388 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9389 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9392 if (GET_CODE (operands[3]) == NE)
9394 if (which_alternative != 1)
9395 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9396 if (which_alternative != 0)
9397 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9400 if (which_alternative != 0)
9401 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9402 if (which_alternative != 1)
9403 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9406 [(set_attr "conds" "use")
9407 (set_attr "length" "4,4,8")]
9410 (define_insn "*cond_arith"
9411 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9412 (match_operator:SI 5 "shiftable_operator"
9413 [(match_operator:SI 4 "arm_comparison_operator"
9414 [(match_operand:SI 2 "s_register_operand" "r,r")
9415 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9416 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9417 (clobber (reg:CC CC_REGNUM))]
9420 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9421 return \"%i5\\t%0, %1, %2, lsr #31\";
9423 output_asm_insn (\"cmp\\t%2, %3\", operands);
9424 if (GET_CODE (operands[5]) == AND)
9425 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9426 else if (GET_CODE (operands[5]) == MINUS)
9427 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9428 else if (which_alternative != 0)
9429 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9430 return \"%i5%d4\\t%0, %1, #1\";
9432 [(set_attr "conds" "clob")
9433 (set_attr "length" "12")]
9436 (define_insn "*cond_sub"
9437 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9438 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9439 (match_operator:SI 4 "arm_comparison_operator"
9440 [(match_operand:SI 2 "s_register_operand" "r,r")
9441 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9442 (clobber (reg:CC CC_REGNUM))]
9445 output_asm_insn (\"cmp\\t%2, %3\", operands);
9446 if (which_alternative != 0)
9447 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9448 return \"sub%d4\\t%0, %1, #1\";
9450 [(set_attr "conds" "clob")
9451 (set_attr "length" "8,12")]
9454 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9455 (define_insn "*cmp_ite0"
9456 [(set (match_operand 6 "dominant_cc_register" "")
9459 (match_operator 4 "arm_comparison_operator"
9460 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9461 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9462 (match_operator:SI 5 "arm_comparison_operator"
9463 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9464 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9470 static const char * const opcodes[4][2] =
9472 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9473 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9474 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9475 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9476 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9477 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9478 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9479 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9482 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9484 return opcodes[which_alternative][swap];
9486 [(set_attr "conds" "set")
9487 (set_attr "length" "8")]
9490 (define_insn "*cmp_ite1"
9491 [(set (match_operand 6 "dominant_cc_register" "")
9494 (match_operator 4 "arm_comparison_operator"
9495 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9496 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9497 (match_operator:SI 5 "arm_comparison_operator"
9498 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9499 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9505 static const char * const opcodes[4][2] =
9507 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9508 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9509 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9510 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9511 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9512 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9513 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9514 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9517 comparison_dominates_p (GET_CODE (operands[5]),
9518 reverse_condition (GET_CODE (operands[4])));
9520 return opcodes[which_alternative][swap];
9522 [(set_attr "conds" "set")
9523 (set_attr "length" "8")]
9526 (define_insn "*cmp_and"
9527 [(set (match_operand 6 "dominant_cc_register" "")
9530 (match_operator 4 "arm_comparison_operator"
9531 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9532 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9533 (match_operator:SI 5 "arm_comparison_operator"
9534 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9535 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9540 static const char *const opcodes[4][2] =
9542 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9543 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9544 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9545 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9546 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9547 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9548 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9549 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9552 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9554 return opcodes[which_alternative][swap];
9556 [(set_attr "conds" "set")
9557 (set_attr "predicable" "no")
9558 (set_attr "length" "8")]
9561 (define_insn "*cmp_ior"
9562 [(set (match_operand 6 "dominant_cc_register" "")
9565 (match_operator 4 "arm_comparison_operator"
9566 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9567 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9568 (match_operator:SI 5 "arm_comparison_operator"
9569 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9570 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9575 static const char *const opcodes[4][2] =
9577 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9578 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9579 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9580 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9581 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9582 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9583 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9584 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9587 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9589 return opcodes[which_alternative][swap];
9592 [(set_attr "conds" "set")
9593 (set_attr "length" "8")]
9596 (define_insn_and_split "*ior_scc_scc"
9597 [(set (match_operand:SI 0 "s_register_operand" "=r")
9598 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9599 [(match_operand:SI 1 "s_register_operand" "r")
9600 (match_operand:SI 2 "arm_add_operand" "rIL")])
9601 (match_operator:SI 6 "arm_comparison_operator"
9602 [(match_operand:SI 4 "s_register_operand" "r")
9603 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9604 (clobber (reg:CC CC_REGNUM))]
9606 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9609 "TARGET_ARM && reload_completed"
9613 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9614 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9616 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9618 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9621 [(set_attr "conds" "clob")
9622 (set_attr "length" "16")])
9624 ; If the above pattern is followed by a CMP insn, then the compare is
9625 ; redundant, since we can rework the conditional instruction that follows.
9626 (define_insn_and_split "*ior_scc_scc_cmp"
9627 [(set (match_operand 0 "dominant_cc_register" "")
9628 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9629 [(match_operand:SI 1 "s_register_operand" "r")
9630 (match_operand:SI 2 "arm_add_operand" "rIL")])
9631 (match_operator:SI 6 "arm_comparison_operator"
9632 [(match_operand:SI 4 "s_register_operand" "r")
9633 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9635 (set (match_operand:SI 7 "s_register_operand" "=r")
9636 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9637 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9640 "TARGET_ARM && reload_completed"
9644 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9645 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9647 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9649 [(set_attr "conds" "set")
9650 (set_attr "length" "16")])
9652 (define_insn_and_split "*and_scc_scc"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r")
9654 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9655 [(match_operand:SI 1 "s_register_operand" "r")
9656 (match_operand:SI 2 "arm_add_operand" "rIL")])
9657 (match_operator:SI 6 "arm_comparison_operator"
9658 [(match_operand:SI 4 "s_register_operand" "r")
9659 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9660 (clobber (reg:CC CC_REGNUM))]
9662 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9665 "TARGET_ARM && reload_completed
9666 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9671 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9672 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9674 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9676 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9679 [(set_attr "conds" "clob")
9680 (set_attr "length" "16")])
9682 ; If the above pattern is followed by a CMP insn, then the compare is
9683 ; redundant, since we can rework the conditional instruction that follows.
9684 (define_insn_and_split "*and_scc_scc_cmp"
9685 [(set (match_operand 0 "dominant_cc_register" "")
9686 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9687 [(match_operand:SI 1 "s_register_operand" "r")
9688 (match_operand:SI 2 "arm_add_operand" "rIL")])
9689 (match_operator:SI 6 "arm_comparison_operator"
9690 [(match_operand:SI 4 "s_register_operand" "r")
9691 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9693 (set (match_operand:SI 7 "s_register_operand" "=r")
9694 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9695 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9698 "TARGET_ARM && reload_completed"
9702 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9703 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9705 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9707 [(set_attr "conds" "set")
9708 (set_attr "length" "16")])
9710 ;; If there is no dominance in the comparison, then we can still save an
9711 ;; instruction in the AND case, since we can know that the second compare
9712 ;; need only zero the value if false (if true, then the value is already
9714 (define_insn_and_split "*and_scc_scc_nodom"
9715 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9716 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9717 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9718 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9719 (match_operator:SI 6 "arm_comparison_operator"
9720 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9721 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9722 (clobber (reg:CC CC_REGNUM))]
9724 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9727 "TARGET_ARM && reload_completed"
9728 [(parallel [(set (match_dup 0)
9729 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9730 (clobber (reg:CC CC_REGNUM))])
9731 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9733 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9736 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9737 operands[4], operands[5]),
9739 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9741 [(set_attr "conds" "clob")
9742 (set_attr "length" "20")])
9745 [(set (reg:CC_NOOV CC_REGNUM)
9746 (compare:CC_NOOV (ior:SI
9747 (and:SI (match_operand:SI 0 "s_register_operand" "")
9749 (match_operator:SI 1 "arm_comparison_operator"
9750 [(match_operand:SI 2 "s_register_operand" "")
9751 (match_operand:SI 3 "arm_add_operand" "")]))
9753 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9756 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9758 (set (reg:CC_NOOV CC_REGNUM)
9759 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9764 [(set (reg:CC_NOOV CC_REGNUM)
9765 (compare:CC_NOOV (ior:SI
9766 (match_operator:SI 1 "arm_comparison_operator"
9767 [(match_operand:SI 2 "s_register_operand" "")
9768 (match_operand:SI 3 "arm_add_operand" "")])
9769 (and:SI (match_operand:SI 0 "s_register_operand" "")
9772 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9775 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9777 (set (reg:CC_NOOV CC_REGNUM)
9778 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9781 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9783 (define_insn "*negscc"
9784 [(set (match_operand:SI 0 "s_register_operand" "=r")
9785 (neg:SI (match_operator 3 "arm_comparison_operator"
9786 [(match_operand:SI 1 "s_register_operand" "r")
9787 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9788 (clobber (reg:CC CC_REGNUM))]
9791 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9792 return \"mov\\t%0, %1, asr #31\";
9794 if (GET_CODE (operands[3]) == NE)
9795 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9797 output_asm_insn (\"cmp\\t%1, %2\", operands);
9798 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9799 return \"mvn%d3\\t%0, #0\";
9801 [(set_attr "conds" "clob")
9802 (set_attr "length" "12")]
9805 (define_insn "movcond"
9806 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9808 (match_operator 5 "arm_comparison_operator"
9809 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9810 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9811 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9812 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9813 (clobber (reg:CC CC_REGNUM))]
9816 if (GET_CODE (operands[5]) == LT
9817 && (operands[4] == const0_rtx))
9819 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9821 if (operands[2] == const0_rtx)
9822 return \"and\\t%0, %1, %3, asr #31\";
9823 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9825 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9827 if (operands[1] == const0_rtx)
9828 return \"bic\\t%0, %2, %3, asr #31\";
9829 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9831 /* The only case that falls through to here is when both ops 1 & 2
9835 if (GET_CODE (operands[5]) == GE
9836 && (operands[4] == const0_rtx))
9838 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9840 if (operands[2] == const0_rtx)
9841 return \"bic\\t%0, %1, %3, asr #31\";
9842 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9844 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9846 if (operands[1] == const0_rtx)
9847 return \"and\\t%0, %2, %3, asr #31\";
9848 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9850 /* The only case that falls through to here is when both ops 1 & 2
9853 if (GET_CODE (operands[4]) == CONST_INT
9854 && !const_ok_for_arm (INTVAL (operands[4])))
9855 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9857 output_asm_insn (\"cmp\\t%3, %4\", operands);
9858 if (which_alternative != 0)
9859 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9860 if (which_alternative != 1)
9861 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9864 [(set_attr "conds" "clob")
9865 (set_attr "length" "8,8,12")]
9868 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9870 (define_insn "*ifcompare_plus_move"
9871 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9872 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9873 [(match_operand:SI 4 "s_register_operand" "r,r")
9874 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9876 (match_operand:SI 2 "s_register_operand" "r,r")
9877 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9878 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9879 (clobber (reg:CC CC_REGNUM))]
9882 [(set_attr "conds" "clob")
9883 (set_attr "length" "8,12")]
9886 (define_insn "*if_plus_move"
9887 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9889 (match_operator 4 "arm_comparison_operator"
9890 [(match_operand 5 "cc_register" "") (const_int 0)])
9892 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9893 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9894 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9898 sub%d4\\t%0, %2, #%n3
9899 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9900 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9901 [(set_attr "conds" "use")
9902 (set_attr "length" "4,4,8,8")
9903 (set_attr "type" "*,*,*,*")]
9906 (define_insn "*ifcompare_move_plus"
9907 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9908 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9909 [(match_operand:SI 4 "s_register_operand" "r,r")
9910 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9911 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9913 (match_operand:SI 2 "s_register_operand" "r,r")
9914 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9915 (clobber (reg:CC CC_REGNUM))]
9918 [(set_attr "conds" "clob")
9919 (set_attr "length" "8,12")]
9922 (define_insn "*if_move_plus"
9923 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9925 (match_operator 4 "arm_comparison_operator"
9926 [(match_operand 5 "cc_register" "") (const_int 0)])
9927 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9929 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9930 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9934 sub%D4\\t%0, %2, #%n3
9935 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9936 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9937 [(set_attr "conds" "use")
9938 (set_attr "length" "4,4,8,8")
9939 (set_attr "type" "*,*,*,*")]
9942 (define_insn "*ifcompare_arith_arith"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r")
9944 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9945 [(match_operand:SI 5 "s_register_operand" "r")
9946 (match_operand:SI 6 "arm_add_operand" "rIL")])
9947 (match_operator:SI 8 "shiftable_operator"
9948 [(match_operand:SI 1 "s_register_operand" "r")
9949 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9950 (match_operator:SI 7 "shiftable_operator"
9951 [(match_operand:SI 3 "s_register_operand" "r")
9952 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9953 (clobber (reg:CC CC_REGNUM))]
9956 [(set_attr "conds" "clob")
9957 (set_attr "length" "12")]
9960 (define_insn "*if_arith_arith"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r")
9962 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9963 [(match_operand 8 "cc_register" "") (const_int 0)])
9964 (match_operator:SI 6 "shiftable_operator"
9965 [(match_operand:SI 1 "s_register_operand" "r")
9966 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9967 (match_operator:SI 7 "shiftable_operator"
9968 [(match_operand:SI 3 "s_register_operand" "r")
9969 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9971 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9972 [(set_attr "conds" "use")
9973 (set_attr "length" "8")]
9976 (define_insn "*ifcompare_arith_move"
9977 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9978 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9979 [(match_operand:SI 2 "s_register_operand" "r,r")
9980 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9981 (match_operator:SI 7 "shiftable_operator"
9982 [(match_operand:SI 4 "s_register_operand" "r,r")
9983 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9984 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9985 (clobber (reg:CC CC_REGNUM))]
9988 /* If we have an operation where (op x 0) is the identity operation and
9989 the conditional operator is LT or GE and we are comparing against zero and
9990 everything is in registers then we can do this in two instructions. */
9991 if (operands[3] == const0_rtx
9992 && GET_CODE (operands[7]) != AND
9993 && GET_CODE (operands[5]) == REG
9994 && GET_CODE (operands[1]) == REG
9995 && REGNO (operands[1]) == REGNO (operands[4])
9996 && REGNO (operands[4]) != REGNO (operands[0]))
9998 if (GET_CODE (operands[6]) == LT)
9999 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10000 else if (GET_CODE (operands[6]) == GE)
10001 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10003 if (GET_CODE (operands[3]) == CONST_INT
10004 && !const_ok_for_arm (INTVAL (operands[3])))
10005 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10007 output_asm_insn (\"cmp\\t%2, %3\", operands);
10008 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10009 if (which_alternative != 0)
10010 return \"mov%D6\\t%0, %1\";
10013 [(set_attr "conds" "clob")
10014 (set_attr "length" "8,12")]
10017 (define_insn "*if_arith_move"
10018 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10019 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10020 [(match_operand 6 "cc_register" "") (const_int 0)])
10021 (match_operator:SI 5 "shiftable_operator"
10022 [(match_operand:SI 2 "s_register_operand" "r,r")
10023 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10024 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10027 %I5%d4\\t%0, %2, %3
10028 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10029 [(set_attr "conds" "use")
10030 (set_attr "length" "4,8")
10031 (set_attr "type" "*,*")]
10034 (define_insn "*ifcompare_move_arith"
10035 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10036 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10037 [(match_operand:SI 4 "s_register_operand" "r,r")
10038 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10039 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10040 (match_operator:SI 7 "shiftable_operator"
10041 [(match_operand:SI 2 "s_register_operand" "r,r")
10042 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10043 (clobber (reg:CC CC_REGNUM))]
10046 /* If we have an operation where (op x 0) is the identity operation and
10047 the conditional operator is LT or GE and we are comparing against zero and
10048 everything is in registers then we can do this in two instructions */
10049 if (operands[5] == const0_rtx
10050 && GET_CODE (operands[7]) != AND
10051 && GET_CODE (operands[3]) == REG
10052 && GET_CODE (operands[1]) == REG
10053 && REGNO (operands[1]) == REGNO (operands[2])
10054 && REGNO (operands[2]) != REGNO (operands[0]))
10056 if (GET_CODE (operands[6]) == GE)
10057 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10058 else if (GET_CODE (operands[6]) == LT)
10059 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10062 if (GET_CODE (operands[5]) == CONST_INT
10063 && !const_ok_for_arm (INTVAL (operands[5])))
10064 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10066 output_asm_insn (\"cmp\\t%4, %5\", operands);
10068 if (which_alternative != 0)
10069 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10070 return \"%I7%D6\\t%0, %2, %3\";
10072 [(set_attr "conds" "clob")
10073 (set_attr "length" "8,12")]
10076 (define_insn "*if_move_arith"
10077 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10079 (match_operator 4 "arm_comparison_operator"
10080 [(match_operand 6 "cc_register" "") (const_int 0)])
10081 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10082 (match_operator:SI 5 "shiftable_operator"
10083 [(match_operand:SI 2 "s_register_operand" "r,r")
10084 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10087 %I5%D4\\t%0, %2, %3
10088 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10089 [(set_attr "conds" "use")
10090 (set_attr "length" "4,8")
10091 (set_attr "type" "*,*")]
10094 (define_insn "*ifcompare_move_not"
10095 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10097 (match_operator 5 "arm_comparison_operator"
10098 [(match_operand:SI 3 "s_register_operand" "r,r")
10099 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10100 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10102 (match_operand:SI 2 "s_register_operand" "r,r"))))
10103 (clobber (reg:CC CC_REGNUM))]
10106 [(set_attr "conds" "clob")
10107 (set_attr "length" "8,12")]
10110 (define_insn "*if_move_not"
10111 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10113 (match_operator 4 "arm_comparison_operator"
10114 [(match_operand 3 "cc_register" "") (const_int 0)])
10115 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10116 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10120 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10121 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10122 [(set_attr "conds" "use")
10123 (set_attr "length" "4,8,8")]
10126 (define_insn "*ifcompare_not_move"
10127 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10129 (match_operator 5 "arm_comparison_operator"
10130 [(match_operand:SI 3 "s_register_operand" "r,r")
10131 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10133 (match_operand:SI 2 "s_register_operand" "r,r"))
10134 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10135 (clobber (reg:CC CC_REGNUM))]
10138 [(set_attr "conds" "clob")
10139 (set_attr "length" "8,12")]
10142 (define_insn "*if_not_move"
10143 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10145 (match_operator 4 "arm_comparison_operator"
10146 [(match_operand 3 "cc_register" "") (const_int 0)])
10147 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10148 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10152 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10153 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10154 [(set_attr "conds" "use")
10155 (set_attr "length" "4,8,8")]
10158 (define_insn "*ifcompare_shift_move"
10159 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10161 (match_operator 6 "arm_comparison_operator"
10162 [(match_operand:SI 4 "s_register_operand" "r,r")
10163 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10164 (match_operator:SI 7 "shift_operator"
10165 [(match_operand:SI 2 "s_register_operand" "r,r")
10166 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10167 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10168 (clobber (reg:CC CC_REGNUM))]
10171 [(set_attr "conds" "clob")
10172 (set_attr "length" "8,12")]
10175 (define_insn "*if_shift_move"
10176 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10178 (match_operator 5 "arm_comparison_operator"
10179 [(match_operand 6 "cc_register" "") (const_int 0)])
10180 (match_operator:SI 4 "shift_operator"
10181 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10182 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10183 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10187 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10188 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10189 [(set_attr "conds" "use")
10190 (set_attr "shift" "2")
10191 (set_attr "length" "4,8,8")
10192 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10193 (const_string "alu_shift")
10194 (const_string "alu_shift_reg")))]
10197 (define_insn "*ifcompare_move_shift"
10198 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10200 (match_operator 6 "arm_comparison_operator"
10201 [(match_operand:SI 4 "s_register_operand" "r,r")
10202 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10203 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10204 (match_operator:SI 7 "shift_operator"
10205 [(match_operand:SI 2 "s_register_operand" "r,r")
10206 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10207 (clobber (reg:CC CC_REGNUM))]
10210 [(set_attr "conds" "clob")
10211 (set_attr "length" "8,12")]
10214 (define_insn "*if_move_shift"
10215 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10217 (match_operator 5 "arm_comparison_operator"
10218 [(match_operand 6 "cc_register" "") (const_int 0)])
10219 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10220 (match_operator:SI 4 "shift_operator"
10221 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10222 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10226 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10227 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10228 [(set_attr "conds" "use")
10229 (set_attr "shift" "2")
10230 (set_attr "length" "4,8,8")
10231 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10232 (const_string "alu_shift")
10233 (const_string "alu_shift_reg")))]
10236 (define_insn "*ifcompare_shift_shift"
10237 [(set (match_operand:SI 0 "s_register_operand" "=r")
10239 (match_operator 7 "arm_comparison_operator"
10240 [(match_operand:SI 5 "s_register_operand" "r")
10241 (match_operand:SI 6 "arm_add_operand" "rIL")])
10242 (match_operator:SI 8 "shift_operator"
10243 [(match_operand:SI 1 "s_register_operand" "r")
10244 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10245 (match_operator:SI 9 "shift_operator"
10246 [(match_operand:SI 3 "s_register_operand" "r")
10247 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10248 (clobber (reg:CC CC_REGNUM))]
10251 [(set_attr "conds" "clob")
10252 (set_attr "length" "12")]
10255 (define_insn "*if_shift_shift"
10256 [(set (match_operand:SI 0 "s_register_operand" "=r")
10258 (match_operator 5 "arm_comparison_operator"
10259 [(match_operand 8 "cc_register" "") (const_int 0)])
10260 (match_operator:SI 6 "shift_operator"
10261 [(match_operand:SI 1 "s_register_operand" "r")
10262 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10263 (match_operator:SI 7 "shift_operator"
10264 [(match_operand:SI 3 "s_register_operand" "r")
10265 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10267 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10268 [(set_attr "conds" "use")
10269 (set_attr "shift" "1")
10270 (set_attr "length" "8")
10271 (set (attr "type") (if_then_else
10272 (and (match_operand 2 "const_int_operand" "")
10273 (match_operand 4 "const_int_operand" ""))
10274 (const_string "alu_shift")
10275 (const_string "alu_shift_reg")))]
10278 (define_insn "*ifcompare_not_arith"
10279 [(set (match_operand:SI 0 "s_register_operand" "=r")
10281 (match_operator 6 "arm_comparison_operator"
10282 [(match_operand:SI 4 "s_register_operand" "r")
10283 (match_operand:SI 5 "arm_add_operand" "rIL")])
10284 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10285 (match_operator:SI 7 "shiftable_operator"
10286 [(match_operand:SI 2 "s_register_operand" "r")
10287 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10288 (clobber (reg:CC CC_REGNUM))]
10291 [(set_attr "conds" "clob")
10292 (set_attr "length" "12")]
10295 (define_insn "*if_not_arith"
10296 [(set (match_operand:SI 0 "s_register_operand" "=r")
10298 (match_operator 5 "arm_comparison_operator"
10299 [(match_operand 4 "cc_register" "") (const_int 0)])
10300 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10301 (match_operator:SI 6 "shiftable_operator"
10302 [(match_operand:SI 2 "s_register_operand" "r")
10303 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10305 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10306 [(set_attr "conds" "use")
10307 (set_attr "length" "8")]
10310 (define_insn "*ifcompare_arith_not"
10311 [(set (match_operand:SI 0 "s_register_operand" "=r")
10313 (match_operator 6 "arm_comparison_operator"
10314 [(match_operand:SI 4 "s_register_operand" "r")
10315 (match_operand:SI 5 "arm_add_operand" "rIL")])
10316 (match_operator:SI 7 "shiftable_operator"
10317 [(match_operand:SI 2 "s_register_operand" "r")
10318 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10319 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10320 (clobber (reg:CC CC_REGNUM))]
10323 [(set_attr "conds" "clob")
10324 (set_attr "length" "12")]
10327 (define_insn "*if_arith_not"
10328 [(set (match_operand:SI 0 "s_register_operand" "=r")
10330 (match_operator 5 "arm_comparison_operator"
10331 [(match_operand 4 "cc_register" "") (const_int 0)])
10332 (match_operator:SI 6 "shiftable_operator"
10333 [(match_operand:SI 2 "s_register_operand" "r")
10334 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10335 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10337 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10338 [(set_attr "conds" "use")
10339 (set_attr "length" "8")]
10342 (define_insn "*ifcompare_neg_move"
10343 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10345 (match_operator 5 "arm_comparison_operator"
10346 [(match_operand:SI 3 "s_register_operand" "r,r")
10347 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10348 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10349 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10350 (clobber (reg:CC CC_REGNUM))]
10353 [(set_attr "conds" "clob")
10354 (set_attr "length" "8,12")]
10357 (define_insn "*if_neg_move"
10358 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10360 (match_operator 4 "arm_comparison_operator"
10361 [(match_operand 3 "cc_register" "") (const_int 0)])
10362 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10363 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10366 rsb%d4\\t%0, %2, #0
10367 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10368 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10369 [(set_attr "conds" "use")
10370 (set_attr "length" "4,8,8")]
10373 (define_insn "*ifcompare_move_neg"
10374 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10376 (match_operator 5 "arm_comparison_operator"
10377 [(match_operand:SI 3 "s_register_operand" "r,r")
10378 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10379 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10380 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10381 (clobber (reg:CC CC_REGNUM))]
10384 [(set_attr "conds" "clob")
10385 (set_attr "length" "8,12")]
10388 (define_insn "*if_move_neg"
10389 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10391 (match_operator 4 "arm_comparison_operator"
10392 [(match_operand 3 "cc_register" "") (const_int 0)])
10393 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10394 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10397 rsb%D4\\t%0, %2, #0
10398 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10399 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10400 [(set_attr "conds" "use")
10401 (set_attr "length" "4,8,8")]
10404 (define_insn "*arith_adjacentmem"
10405 [(set (match_operand:SI 0 "s_register_operand" "=r")
10406 (match_operator:SI 1 "shiftable_operator"
10407 [(match_operand:SI 2 "memory_operand" "m")
10408 (match_operand:SI 3 "memory_operand" "m")]))
10409 (clobber (match_scratch:SI 4 "=r"))]
10410 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10416 HOST_WIDE_INT val1 = 0, val2 = 0;
10418 if (REGNO (operands[0]) > REGNO (operands[4]))
10420 ldm[1] = operands[4];
10421 ldm[2] = operands[0];
10425 ldm[1] = operands[0];
10426 ldm[2] = operands[4];
10429 base_reg = XEXP (operands[2], 0);
10431 if (!REG_P (base_reg))
10433 val1 = INTVAL (XEXP (base_reg, 1));
10434 base_reg = XEXP (base_reg, 0);
10437 if (!REG_P (XEXP (operands[3], 0)))
10438 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10440 arith[0] = operands[0];
10441 arith[3] = operands[1];
10455 if (val1 !=0 && val2 != 0)
10459 if (val1 == 4 || val2 == 4)
10460 /* Other val must be 8, since we know they are adjacent and neither
10462 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10463 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10465 ldm[0] = ops[0] = operands[4];
10467 ops[2] = GEN_INT (val1);
10468 output_add_immediate (ops);
10470 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10472 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10476 /* Offset is out of range for a single add, so use two ldr. */
10479 ops[2] = GEN_INT (val1);
10480 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10482 ops[2] = GEN_INT (val2);
10483 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10486 else if (val1 != 0)
10489 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10491 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10496 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10498 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10500 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10503 [(set_attr "length" "12")
10504 (set_attr "predicable" "yes")
10505 (set_attr "type" "load1")]
10508 ; This pattern is never tried by combine, so do it as a peephole
10511 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10512 (match_operand:SI 1 "arm_general_register_operand" ""))
10513 (set (reg:CC CC_REGNUM)
10514 (compare:CC (match_dup 1) (const_int 0)))]
10516 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10517 (set (match_dup 0) (match_dup 1))])]
10521 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10522 ; reversed, check that the memory references aren't volatile.
10525 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10526 (match_operand:SI 4 "memory_operand" "m"))
10527 (set (match_operand:SI 1 "s_register_operand" "=rk")
10528 (match_operand:SI 5 "memory_operand" "m"))
10529 (set (match_operand:SI 2 "s_register_operand" "=rk")
10530 (match_operand:SI 6 "memory_operand" "m"))
10531 (set (match_operand:SI 3 "s_register_operand" "=rk")
10532 (match_operand:SI 7 "memory_operand" "m"))]
10533 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10535 return emit_ldm_seq (operands, 4);
10540 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10541 (match_operand:SI 3 "memory_operand" "m"))
10542 (set (match_operand:SI 1 "s_register_operand" "=rk")
10543 (match_operand:SI 4 "memory_operand" "m"))
10544 (set (match_operand:SI 2 "s_register_operand" "=rk")
10545 (match_operand:SI 5 "memory_operand" "m"))]
10546 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10548 return emit_ldm_seq (operands, 3);
10553 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10554 (match_operand:SI 2 "memory_operand" "m"))
10555 (set (match_operand:SI 1 "s_register_operand" "=rk")
10556 (match_operand:SI 3 "memory_operand" "m"))]
10557 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10559 return emit_ldm_seq (operands, 2);
10564 [(set (match_operand:SI 4 "memory_operand" "=m")
10565 (match_operand:SI 0 "s_register_operand" "rk"))
10566 (set (match_operand:SI 5 "memory_operand" "=m")
10567 (match_operand:SI 1 "s_register_operand" "rk"))
10568 (set (match_operand:SI 6 "memory_operand" "=m")
10569 (match_operand:SI 2 "s_register_operand" "rk"))
10570 (set (match_operand:SI 7 "memory_operand" "=m")
10571 (match_operand:SI 3 "s_register_operand" "rk"))]
10572 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10574 return emit_stm_seq (operands, 4);
10579 [(set (match_operand:SI 3 "memory_operand" "=m")
10580 (match_operand:SI 0 "s_register_operand" "rk"))
10581 (set (match_operand:SI 4 "memory_operand" "=m")
10582 (match_operand:SI 1 "s_register_operand" "rk"))
10583 (set (match_operand:SI 5 "memory_operand" "=m")
10584 (match_operand:SI 2 "s_register_operand" "rk"))]
10585 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10587 return emit_stm_seq (operands, 3);
10592 [(set (match_operand:SI 2 "memory_operand" "=m")
10593 (match_operand:SI 0 "s_register_operand" "rk"))
10594 (set (match_operand:SI 3 "memory_operand" "=m")
10595 (match_operand:SI 1 "s_register_operand" "rk"))]
10596 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10598 return emit_stm_seq (operands, 2);
10603 [(set (match_operand:SI 0 "s_register_operand" "")
10604 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10606 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10607 [(match_operand:SI 3 "s_register_operand" "")
10608 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10609 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10611 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10612 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10617 ;; This split can be used because CC_Z mode implies that the following
10618 ;; branch will be an equality, or an unsigned inequality, so the sign
10619 ;; extension is not needed.
10622 [(set (reg:CC_Z CC_REGNUM)
10624 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10626 (match_operand 1 "const_int_operand" "")))
10627 (clobber (match_scratch:SI 2 ""))]
10629 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10630 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10631 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10632 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10634 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10637 ;; ??? Check the patterns above for Thumb-2 usefulness
10639 (define_expand "prologue"
10640 [(clobber (const_int 0))]
10643 arm_expand_prologue ();
10645 thumb1_expand_prologue ();
10650 (define_expand "epilogue"
10651 [(clobber (const_int 0))]
10654 if (crtl->calls_eh_return)
10655 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10657 thumb1_expand_epilogue ();
10658 else if (USE_RETURN_INSN (FALSE))
10660 emit_jump_insn (gen_return ());
10663 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10665 gen_rtx_RETURN (VOIDmode)),
10666 VUNSPEC_EPILOGUE));
10671 ;; Note - although unspec_volatile's USE all hard registers,
10672 ;; USEs are ignored after relaod has completed. Thus we need
10673 ;; to add an unspec of the link register to ensure that flow
10674 ;; does not think that it is unused by the sibcall branch that
10675 ;; will replace the standard function epilogue.
10676 (define_insn "sibcall_epilogue"
10677 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10678 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10681 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10682 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10683 return arm_output_epilogue (next_nonnote_insn (insn));
10685 ;; Length is absolute worst case
10686 [(set_attr "length" "44")
10687 (set_attr "type" "block")
10688 ;; We don't clobber the conditions, but the potential length of this
10689 ;; operation is sufficient to make conditionalizing the sequence
10690 ;; unlikely to be profitable.
10691 (set_attr "conds" "clob")]
10694 (define_insn "*epilogue_insns"
10695 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10699 return arm_output_epilogue (NULL);
10700 else /* TARGET_THUMB1 */
10701 return thumb_unexpanded_epilogue ();
10703 ; Length is absolute worst case
10704 [(set_attr "length" "44")
10705 (set_attr "type" "block")
10706 ;; We don't clobber the conditions, but the potential length of this
10707 ;; operation is sufficient to make conditionalizing the sequence
10708 ;; unlikely to be profitable.
10709 (set_attr "conds" "clob")]
10712 (define_expand "eh_epilogue"
10713 [(use (match_operand:SI 0 "register_operand" ""))
10714 (use (match_operand:SI 1 "register_operand" ""))
10715 (use (match_operand:SI 2 "register_operand" ""))]
10719 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10720 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10722 rtx ra = gen_rtx_REG (Pmode, 2);
10724 emit_move_insn (ra, operands[2]);
10727 /* This is a hack -- we may have crystalized the function type too
10729 cfun->machine->func_type = 0;
10733 ;; This split is only used during output to reduce the number of patterns
10734 ;; that need assembler instructions adding to them. We allowed the setting
10735 ;; of the conditions to be implicit during rtl generation so that
10736 ;; the conditional compare patterns would work. However this conflicts to
10737 ;; some extent with the conditional data operations, so we have to split them
10740 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10741 ;; conditional execution sufficient?
10744 [(set (match_operand:SI 0 "s_register_operand" "")
10745 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10746 [(match_operand 2 "" "") (match_operand 3 "" "")])
10748 (match_operand 4 "" "")))
10749 (clobber (reg:CC CC_REGNUM))]
10750 "TARGET_ARM && reload_completed"
10751 [(set (match_dup 5) (match_dup 6))
10752 (cond_exec (match_dup 7)
10753 (set (match_dup 0) (match_dup 4)))]
10756 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10757 operands[2], operands[3]);
10758 enum rtx_code rc = GET_CODE (operands[1]);
10760 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10761 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10762 if (mode == CCFPmode || mode == CCFPEmode)
10763 rc = reverse_condition_maybe_unordered (rc);
10765 rc = reverse_condition (rc);
10767 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10772 [(set (match_operand:SI 0 "s_register_operand" "")
10773 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10774 [(match_operand 2 "" "") (match_operand 3 "" "")])
10775 (match_operand 4 "" "")
10777 (clobber (reg:CC CC_REGNUM))]
10778 "TARGET_ARM && reload_completed"
10779 [(set (match_dup 5) (match_dup 6))
10780 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10781 (set (match_dup 0) (match_dup 4)))]
10784 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10785 operands[2], operands[3]);
10787 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10788 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10793 [(set (match_operand:SI 0 "s_register_operand" "")
10794 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10795 [(match_operand 2 "" "") (match_operand 3 "" "")])
10796 (match_operand 4 "" "")
10797 (match_operand 5 "" "")))
10798 (clobber (reg:CC CC_REGNUM))]
10799 "TARGET_ARM && reload_completed"
10800 [(set (match_dup 6) (match_dup 7))
10801 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10802 (set (match_dup 0) (match_dup 4)))
10803 (cond_exec (match_dup 8)
10804 (set (match_dup 0) (match_dup 5)))]
10807 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10808 operands[2], operands[3]);
10809 enum rtx_code rc = GET_CODE (operands[1]);
10811 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10812 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10813 if (mode == CCFPmode || mode == CCFPEmode)
10814 rc = reverse_condition_maybe_unordered (rc);
10816 rc = reverse_condition (rc);
10818 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10823 [(set (match_operand:SI 0 "s_register_operand" "")
10824 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10825 [(match_operand:SI 2 "s_register_operand" "")
10826 (match_operand:SI 3 "arm_add_operand" "")])
10827 (match_operand:SI 4 "arm_rhs_operand" "")
10829 (match_operand:SI 5 "s_register_operand" ""))))
10830 (clobber (reg:CC CC_REGNUM))]
10831 "TARGET_ARM && reload_completed"
10832 [(set (match_dup 6) (match_dup 7))
10833 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10834 (set (match_dup 0) (match_dup 4)))
10835 (cond_exec (match_dup 8)
10836 (set (match_dup 0) (not:SI (match_dup 5))))]
10839 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10840 operands[2], operands[3]);
10841 enum rtx_code rc = GET_CODE (operands[1]);
10843 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10844 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10845 if (mode == CCFPmode || mode == CCFPEmode)
10846 rc = reverse_condition_maybe_unordered (rc);
10848 rc = reverse_condition (rc);
10850 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10854 (define_insn "*cond_move_not"
10855 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10856 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10857 [(match_operand 3 "cc_register" "") (const_int 0)])
10858 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10860 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10864 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10865 [(set_attr "conds" "use")
10866 (set_attr "length" "4,8")]
10869 ;; The next two patterns occur when an AND operation is followed by a
10870 ;; scc insn sequence
10872 (define_insn "*sign_extract_onebit"
10873 [(set (match_operand:SI 0 "s_register_operand" "=r")
10874 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10876 (match_operand:SI 2 "const_int_operand" "n")))
10877 (clobber (reg:CC CC_REGNUM))]
10880 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10881 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10882 return \"mvnne\\t%0, #0\";
10884 [(set_attr "conds" "clob")
10885 (set_attr "length" "8")]
10888 (define_insn "*not_signextract_onebit"
10889 [(set (match_operand:SI 0 "s_register_operand" "=r")
10891 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10893 (match_operand:SI 2 "const_int_operand" "n"))))
10894 (clobber (reg:CC CC_REGNUM))]
10897 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10898 output_asm_insn (\"tst\\t%1, %2\", operands);
10899 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10900 return \"movne\\t%0, #0\";
10902 [(set_attr "conds" "clob")
10903 (set_attr "length" "12")]
10905 ;; ??? The above patterns need auditing for Thumb-2
10907 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10908 ;; expressions. For simplicity, the first register is also in the unspec
10910 (define_insn "*push_multi"
10911 [(match_parallel 2 "multi_register_push"
10912 [(set (match_operand:BLK 0 "memory_operand" "=m")
10913 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10914 UNSPEC_PUSH_MULT))])]
10918 int num_saves = XVECLEN (operands[2], 0);
10920 /* For the StrongARM at least it is faster to
10921 use STR to store only a single register.
10922 In Thumb mode always use push, and the assembler will pick
10923 something appropriate. */
10924 if (num_saves == 1 && TARGET_ARM)
10925 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10932 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10934 strcpy (pattern, \"push\\t{%1\");
10936 for (i = 1; i < num_saves; i++)
10938 strcat (pattern, \", %|\");
10940 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10943 strcat (pattern, \"}\");
10944 output_asm_insn (pattern, operands);
10949 [(set_attr "type" "store4")]
10952 (define_insn "stack_tie"
10953 [(set (mem:BLK (scratch))
10954 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10955 (match_operand:SI 1 "s_register_operand" "rk")]
10959 [(set_attr "length" "0")]
10962 ;; Similarly for the floating point registers
10963 (define_insn "*push_fp_multi"
10964 [(match_parallel 2 "multi_register_push"
10965 [(set (match_operand:BLK 0 "memory_operand" "=m")
10966 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10967 UNSPEC_PUSH_MULT))])]
10968 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10973 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10974 output_asm_insn (pattern, operands);
10977 [(set_attr "type" "f_store")]
10980 ;; Special patterns for dealing with the constant pool
10982 (define_insn "align_4"
10983 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10986 assemble_align (32);
10991 (define_insn "align_8"
10992 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10995 assemble_align (64);
11000 (define_insn "consttable_end"
11001 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11004 making_const_table = FALSE;
11009 (define_insn "consttable_1"
11010 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11013 making_const_table = TRUE;
11014 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11015 assemble_zeros (3);
11018 [(set_attr "length" "4")]
11021 (define_insn "consttable_2"
11022 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11025 making_const_table = TRUE;
11026 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
11027 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11028 assemble_zeros (2);
11031 [(set_attr "length" "4")]
11034 (define_insn "consttable_4"
11035 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11039 rtx x = operands[0];
11040 making_const_table = TRUE;
11041 switch (GET_MODE_CLASS (GET_MODE (x)))
11044 if (GET_MODE (x) == HFmode)
11045 arm_emit_fp16_const (x);
11049 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
11050 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
11054 /* XXX: Sometimes gcc does something really dumb and ends up with
11055 a HIGH in a constant pool entry, usually because it's trying to
11056 load into a VFP register. We know this will always be used in
11057 combination with a LO_SUM which ignores the high bits, so just
11058 strip off the HIGH. */
11059 if (GET_CODE (x) == HIGH)
11061 assemble_integer (x, 4, BITS_PER_WORD, 1);
11062 mark_symbol_refs_as_used (x);
11067 [(set_attr "length" "4")]
11070 (define_insn "consttable_8"
11071 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11075 making_const_table = TRUE;
11076 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11081 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11082 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11086 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11091 [(set_attr "length" "8")]
11094 (define_insn "consttable_16"
11095 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11099 making_const_table = TRUE;
11100 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11105 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11106 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11110 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11115 [(set_attr "length" "16")]
11118 ;; Miscellaneous Thumb patterns
11120 (define_expand "tablejump"
11121 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11122 (use (label_ref (match_operand 1 "" "")))])]
11127 /* Hopefully, CSE will eliminate this copy. */
11128 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11129 rtx reg2 = gen_reg_rtx (SImode);
11131 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11132 operands[0] = reg2;
11137 ;; NB never uses BX.
11138 (define_insn "*thumb1_tablejump"
11139 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11140 (use (label_ref (match_operand 1 "" "")))]
11143 [(set_attr "length" "2")]
11146 ;; V5 Instructions,
11148 (define_insn "clzsi2"
11149 [(set (match_operand:SI 0 "s_register_operand" "=r")
11150 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11151 "TARGET_32BIT && arm_arch5"
11153 [(set_attr "predicable" "yes")
11154 (set_attr "insn" "clz")])
11156 (define_insn "rbitsi2"
11157 [(set (match_operand:SI 0 "s_register_operand" "=r")
11158 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11159 "TARGET_32BIT && arm_arch_thumb2"
11161 [(set_attr "predicable" "yes")
11162 (set_attr "insn" "clz")])
11164 (define_expand "ctzsi2"
11165 [(set (match_operand:SI 0 "s_register_operand" "")
11166 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11167 "TARGET_32BIT && arm_arch_thumb2"
11170 rtx tmp = gen_reg_rtx (SImode);
11171 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11172 emit_insn (gen_clzsi2 (operands[0], tmp));
11178 ;; V5E instructions.
11180 (define_insn "prefetch"
11181 [(prefetch (match_operand:SI 0 "address_operand" "p")
11182 (match_operand:SI 1 "" "")
11183 (match_operand:SI 2 "" ""))]
11184 "TARGET_32BIT && arm_arch5e"
11187 ;; General predication pattern
11190 [(match_operator 0 "arm_comparison_operator"
11191 [(match_operand 1 "cc_register" "")
11197 (define_insn "prologue_use"
11198 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11200 "%@ %0 needed for prologue"
11201 [(set_attr "length" "0")]
11205 ;; Patterns for exception handling
11207 (define_expand "eh_return"
11208 [(use (match_operand 0 "general_operand" ""))]
11213 emit_insn (gen_arm_eh_return (operands[0]));
11215 emit_insn (gen_thumb_eh_return (operands[0]));
11220 ;; We can't expand this before we know where the link register is stored.
11221 (define_insn_and_split "arm_eh_return"
11222 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11224 (clobber (match_scratch:SI 1 "=&r"))]
11227 "&& reload_completed"
11231 arm_set_return_address (operands[0], operands[1]);
11236 (define_insn_and_split "thumb_eh_return"
11237 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11239 (clobber (match_scratch:SI 1 "=&l"))]
11242 "&& reload_completed"
11246 thumb_set_return_address (operands[0], operands[1]);
11254 (define_insn "load_tp_hard"
11255 [(set (match_operand:SI 0 "register_operand" "=r")
11256 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11258 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11259 [(set_attr "predicable" "yes")]
11262 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11263 (define_insn "load_tp_soft"
11264 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11265 (clobber (reg:SI LR_REGNUM))
11266 (clobber (reg:SI IP_REGNUM))
11267 (clobber (reg:CC CC_REGNUM))]
11269 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11270 [(set_attr "conds" "clob")]
11273 (define_insn "*arm_movtas_ze"
11274 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11277 (match_operand:SI 1 "const_int_operand" ""))]
11280 [(set_attr "predicable" "yes")
11281 (set_attr "length" "4")]
11284 (define_insn "arm_rev"
11285 [(set (match_operand:SI 0 "s_register_operand" "=r")
11286 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11287 "TARGET_EITHER && arm_arch6"
11289 [(set (attr "length")
11290 (if_then_else (eq_attr "is_thumb" "yes")
11295 (define_expand "arm_legacy_rev"
11296 [(set (match_operand:SI 2 "s_register_operand" "")
11297 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11301 (lshiftrt:SI (match_dup 2)
11303 (set (match_operand:SI 3 "s_register_operand" "")
11304 (rotatert:SI (match_dup 1)
11307 (and:SI (match_dup 2)
11308 (const_int -65281)))
11309 (set (match_operand:SI 0 "s_register_operand" "")
11310 (xor:SI (match_dup 3)
11316 ;; Reuse temporaries to keep register pressure down.
11317 (define_expand "thumb_legacy_rev"
11318 [(set (match_operand:SI 2 "s_register_operand" "")
11319 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11321 (set (match_operand:SI 3 "s_register_operand" "")
11322 (lshiftrt:SI (match_dup 1)
11325 (ior:SI (match_dup 3)
11327 (set (match_operand:SI 4 "s_register_operand" "")
11329 (set (match_operand:SI 5 "s_register_operand" "")
11330 (rotatert:SI (match_dup 1)
11333 (ashift:SI (match_dup 5)
11336 (lshiftrt:SI (match_dup 5)
11339 (ior:SI (match_dup 5)
11342 (rotatert:SI (match_dup 5)
11344 (set (match_operand:SI 0 "s_register_operand" "")
11345 (ior:SI (match_dup 5)
11351 (define_expand "bswapsi2"
11352 [(set (match_operand:SI 0 "s_register_operand" "=r")
11353 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11358 if (!optimize_size)
11360 rtx op2 = gen_reg_rtx (SImode);
11361 rtx op3 = gen_reg_rtx (SImode);
11365 rtx op4 = gen_reg_rtx (SImode);
11366 rtx op5 = gen_reg_rtx (SImode);
11368 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11369 op2, op3, op4, op5));
11373 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11385 ;; Load the FPA co-processor patterns
11387 ;; Load the Maverick co-processor patterns
11388 (include "cirrus.md")
11389 ;; Vector bits common to IWMMXT and Neon
11390 (include "vec-common.md")
11391 ;; Load the Intel Wireless Multimedia Extension patterns
11392 (include "iwmmxt.md")
11393 ;; Load the VFP co-processor patterns
11395 ;; Thumb-2 patterns
11396 (include "thumb2.md")
11398 (include "neon.md")