1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
105 ;; UNSPEC_VOLATILE Usage:
108 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
110 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
111 ; instruction epilogue sequence that isn't expanded
112 ; into normal RTL. Used for both normal and sibcall
114 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
115 ; for inlined constants.
116 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
118 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
120 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
122 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
124 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
126 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
128 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
129 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
130 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
131 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
132 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
133 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
134 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
139 ;;---------------------------------------------------------------------------
142 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
143 ; generating ARM code. This is used to control the length of some insn
144 ; patterns that share the same RTL in both ARM and Thumb code.
145 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
147 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
148 ; scheduling decisions for the load unit and the multiplier.
149 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
151 ; IS_XSCALE is set to 'yes' when compiling for XScale.
152 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon"
162 (const (symbol_ref "arm_fpu_attr")))
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
253 ;; Classification of NEON instructions for scheduling purposes.
254 ;; Do not set this attribute and the "type" attribute together in
255 ;; any one instruction pattern.
256 (define_attr "neon_type"
267 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mul_qqq_8_16_32_ddd_32,\
269 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
270 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
272 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
273 neon_mla_qqq_32_qqd_32_scalar,\
274 neon_mul_ddd_16_scalar_32_16_long_scalar,\
275 neon_mul_qqd_32_scalar,\
276 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
281 neon_vqshl_vrshl_vqrshl_qqq,\
283 neon_fp_vadd_ddd_vabs_dd,\
284 neon_fp_vadd_qqq_vabs_qq,\
290 neon_fp_vmla_ddd_scalar,\
291 neon_fp_vmla_qqq_scalar,\
292 neon_fp_vrecps_vrsqrts_ddd,\
293 neon_fp_vrecps_vrsqrts_qqq,\
301 neon_vld2_2_regs_vld1_vld2_all_lanes,\
304 neon_vst1_1_2_regs_vst2_2_regs,\
306 neon_vst2_4_regs_vst3_vst4,\
308 neon_vld1_vld2_lane,\
309 neon_vld3_vld4_lane,\
310 neon_vst1_vst2_lane,\
311 neon_vst3_vst4_lane,\
312 neon_vld3_vld4_all_lanes,\
320 (const_string "none"))
322 ; condition codes: this one is used by final_prescan_insn to speed up
323 ; conditionalizing instructions. It saves having to scan the rtl to see if
324 ; it uses or alters the condition codes.
326 ; USE means that the condition codes are used by the insn in the process of
327 ; outputting code, this means (at present) that we can't use the insn in
330 ; SET means that the purpose of the insn is to set the condition codes in a
331 ; well defined manner.
333 ; CLOB means that the condition codes are altered in an undefined manner, if
334 ; they are altered at all
336 ; JUMP_CLOB is used when the condition cannot be represented by a single
337 ; instruction (UNEQ and LTGT). These cannot be predicated.
339 ; UNCONDITIONAL means the instions can not be conditionally executed.
341 ; NOCOND means that the condition codes are neither altered nor affect the
342 ; output of this insn
344 (define_attr "conds" "use,set,clob,jump_clob,unconditional,nocond"
345 (if_then_else (eq_attr "type" "call")
346 (const_string "clob")
347 (if_then_else (eq_attr "neon_type" "none")
348 (const_string "nocond")
349 (const_string "unconditional"))))
351 ; Predicable means that the insn can be conditionally executed based on
352 ; an automatically added predicate (additional patterns are generated by
353 ; gen...). We default to 'no' because no Thumb patterns match this rule
354 ; and not all ARM patterns do.
355 (define_attr "predicable" "no,yes" (const_string "no"))
357 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
358 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
359 ; suffer blockages enough to warrant modelling this (and it can adversely
360 ; affect the schedule).
361 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
363 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
364 ; to stall the processor. Used with model_wbuf above.
365 (define_attr "write_conflict" "no,yes"
366 (if_then_else (eq_attr "type"
367 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
369 (const_string "no")))
371 ; Classify the insns into those that take one cycle and those that take more
372 ; than one on the main cpu execution unit.
373 (define_attr "core_cycles" "single,multi"
374 (if_then_else (eq_attr "type"
375 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
376 (const_string "single")
377 (const_string "multi")))
379 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
380 ;; distant label. Only applicable to Thumb code.
381 (define_attr "far_jump" "yes,no" (const_string "no"))
384 ;; The number of machine instructions this pattern expands to.
385 ;; Used for Thumb-2 conditional execution.
386 (define_attr "ce_count" "" (const_int 1))
388 ;;---------------------------------------------------------------------------
391 ; A list of modes that are exactly 64 bits in size. We use this to expand
392 ; some splits that are the same for all modes when operating on ARM
394 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
396 ;;---------------------------------------------------------------------------
399 (include "predicates.md")
400 (include "constraints.md")
402 ;;---------------------------------------------------------------------------
403 ;; Pipeline descriptions
405 ;; Processor type. This is created automatically from arm-cores.def.
406 (include "arm-tune.md")
408 (define_attr "tune_cortexr4" "yes,no"
410 (eq_attr "tune" "cortexr4,cortexr4f")
412 (const_string "no"))))
414 ;; True if the generic scheduling description should be used.
416 (define_attr "generic_sched" "yes,no"
418 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
419 (eq_attr "tune_cortexr4" "yes"))
421 (const_string "yes"))))
423 (define_attr "generic_vfp" "yes,no"
425 (and (eq_attr "fpu" "vfp")
426 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
427 (eq_attr "tune_cortexr4" "no"))
429 (const_string "no"))))
431 (include "arm-generic.md")
432 (include "arm926ejs.md")
433 (include "arm1020e.md")
434 (include "arm1026ejs.md")
435 (include "arm1136jfs.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-r4.md")
439 (include "cortex-r4f.md")
443 ;;---------------------------------------------------------------------------
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451 ;; Cirrus 64bit additions should not be split because we have a native
452 ;; 64bit addition instructions.
454 (define_expand "adddi3"
456 [(set (match_operand:DI 0 "s_register_operand" "")
457 (plus:DI (match_operand:DI 1 "s_register_operand" "")
458 (match_operand:DI 2 "s_register_operand" "")))
459 (clobber (reg:CC CC_REGNUM))])]
462 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
464 if (!cirrus_fp_register (operands[0], DImode))
465 operands[0] = force_reg (DImode, operands[0]);
466 if (!cirrus_fp_register (operands[1], DImode))
467 operands[1] = force_reg (DImode, operands[1]);
468 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
474 if (GET_CODE (operands[1]) != REG)
475 operands[1] = force_reg (DImode, operands[1]);
476 if (GET_CODE (operands[2]) != REG)
477 operands[2] = force_reg (DImode, operands[2]);
482 (define_insn "*thumb1_adddi3"
483 [(set (match_operand:DI 0 "register_operand" "=l")
484 (plus:DI (match_operand:DI 1 "register_operand" "%0")
485 (match_operand:DI 2 "register_operand" "l")))
486 (clobber (reg:CC CC_REGNUM))
489 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
490 [(set_attr "length" "4")]
493 (define_insn_and_split "*arm_adddi3"
494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
495 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
496 (match_operand:DI 2 "s_register_operand" "r, 0")))
497 (clobber (reg:CC CC_REGNUM))]
498 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
500 "TARGET_32BIT && reload_completed"
501 [(parallel [(set (reg:CC_C CC_REGNUM)
502 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
504 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
505 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
506 (plus:SI (match_dup 4) (match_dup 5))))]
509 operands[3] = gen_highpart (SImode, operands[0]);
510 operands[0] = gen_lowpart (SImode, operands[0]);
511 operands[4] = gen_highpart (SImode, operands[1]);
512 operands[1] = gen_lowpart (SImode, operands[1]);
513 operands[5] = gen_highpart (SImode, operands[2]);
514 operands[2] = gen_lowpart (SImode, operands[2]);
516 [(set_attr "conds" "clob")
517 (set_attr "length" "8")]
520 (define_insn_and_split "*adddi_sesidi_di"
521 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
522 (plus:DI (sign_extend:DI
523 (match_operand:SI 2 "s_register_operand" "r,r"))
524 (match_operand:DI 1 "s_register_operand" "r,0")))
525 (clobber (reg:CC CC_REGNUM))]
526 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
528 "TARGET_32BIT && reload_completed"
529 [(parallel [(set (reg:CC_C CC_REGNUM)
530 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
532 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
533 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
534 (plus:SI (ashiftrt:SI (match_dup 2)
539 operands[3] = gen_highpart (SImode, operands[0]);
540 operands[0] = gen_lowpart (SImode, operands[0]);
541 operands[4] = gen_highpart (SImode, operands[1]);
542 operands[1] = gen_lowpart (SImode, operands[1]);
543 operands[2] = gen_lowpart (SImode, operands[2]);
545 [(set_attr "conds" "clob")
546 (set_attr "length" "8")]
549 (define_insn_and_split "*adddi_zesidi_di"
550 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
551 (plus:DI (zero_extend:DI
552 (match_operand:SI 2 "s_register_operand" "r,r"))
553 (match_operand:DI 1 "s_register_operand" "r,0")))
554 (clobber (reg:CC CC_REGNUM))]
555 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
557 "TARGET_32BIT && reload_completed"
558 [(parallel [(set (reg:CC_C CC_REGNUM)
559 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
561 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
562 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
563 (plus:SI (match_dup 4) (const_int 0))))]
566 operands[3] = gen_highpart (SImode, operands[0]);
567 operands[0] = gen_lowpart (SImode, operands[0]);
568 operands[4] = gen_highpart (SImode, operands[1]);
569 operands[1] = gen_lowpart (SImode, operands[1]);
570 operands[2] = gen_lowpart (SImode, operands[2]);
572 [(set_attr "conds" "clob")
573 (set_attr "length" "8")]
576 (define_expand "addsi3"
577 [(set (match_operand:SI 0 "s_register_operand" "")
578 (plus:SI (match_operand:SI 1 "s_register_operand" "")
579 (match_operand:SI 2 "reg_or_int_operand" "")))]
582 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
584 arm_split_constant (PLUS, SImode, NULL_RTX,
585 INTVAL (operands[2]), operands[0], operands[1],
586 optimize && can_create_pseudo_p ());
592 ; If there is a scratch available, this will be faster than synthesizing the
595 [(match_scratch:SI 3 "r")
596 (set (match_operand:SI 0 "arm_general_register_operand" "")
597 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
598 (match_operand:SI 2 "const_int_operand" "")))]
600 !(const_ok_for_arm (INTVAL (operands[2]))
601 || const_ok_for_arm (-INTVAL (operands[2])))
602 && const_ok_for_arm (~INTVAL (operands[2]))"
603 [(set (match_dup 3) (match_dup 2))
604 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
608 ;; The r/r/k alternative is required when reloading the address
609 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
610 ;; put the duplicated register first, and not try the commutative version.
611 (define_insn_and_split "*arm_addsi3"
612 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
613 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
614 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
624 GET_CODE (operands[2]) == CONST_INT
625 && !(const_ok_for_arm (INTVAL (operands[2]))
626 || const_ok_for_arm (-INTVAL (operands[2])))"
627 [(clobber (const_int 0))]
629 arm_split_constant (PLUS, SImode, curr_insn,
630 INTVAL (operands[2]), operands[0],
634 [(set_attr "length" "4,4,4,4,4,16")
635 (set_attr "predicable" "yes")]
638 ;; Register group 'k' is a single register group containing only the stack
639 ;; register. Trying to reload it will always fail catastrophically,
640 ;; so never allow those alternatives to match if reloading is needed.
642 (define_insn "*thumb1_addsi3"
643 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
644 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
645 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
648 static const char * const asms[] =
650 \"add\\t%0, %0, %2\",
651 \"sub\\t%0, %0, #%n2\",
652 \"add\\t%0, %1, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %1, %2\",
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
664 [(set_attr "length" "2")]
667 ;; Reloading and elimination of the frame pointer can
668 ;; sometimes cause this optimization to be missed.
670 [(set (match_operand:SI 0 "arm_general_register_operand" "")
671 (match_operand:SI 1 "const_int_operand" ""))
673 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
675 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
676 && (INTVAL (operands[1]) & 3) == 0"
677 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
681 ;; ??? Make Thumb-2 variants which prefer low regs
682 (define_insn "*addsi3_compare0"
683 [(set (reg:CC_NOOV CC_REGNUM)
685 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
686 (match_operand:SI 2 "arm_add_operand" "rI,L"))
688 (set (match_operand:SI 0 "s_register_operand" "=r,r")
689 (plus:SI (match_dup 1) (match_dup 2)))]
693 sub%.\\t%0, %1, #%n2"
694 [(set_attr "conds" "set")]
697 (define_insn "*addsi3_compare0_scratch"
698 [(set (reg:CC_NOOV CC_REGNUM)
700 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
701 (match_operand:SI 1 "arm_add_operand" "rI,L"))
707 [(set_attr "conds" "set")]
710 (define_insn "*compare_negsi_si"
711 [(set (reg:CC_Z CC_REGNUM)
713 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
714 (match_operand:SI 1 "s_register_operand" "r")))]
717 [(set_attr "conds" "set")]
720 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
721 ;; addend is a constant.
722 (define_insn "*cmpsi2_addneg"
723 [(set (reg:CC CC_REGNUM)
725 (match_operand:SI 1 "s_register_operand" "r,r")
726 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
727 (set (match_operand:SI 0 "s_register_operand" "=r,r")
728 (plus:SI (match_dup 1)
729 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
730 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
733 add%.\\t%0, %1, #%n2"
734 [(set_attr "conds" "set")]
737 ;; Convert the sequence
739 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
743 ;; bcs dest ((unsigned)rn >= 1)
744 ;; similarly for the beq variant using bcc.
745 ;; This is a common looping idiom (while (n--))
747 [(set (match_operand:SI 0 "arm_general_register_operand" "")
748 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
750 (set (match_operand 2 "cc_register" "")
751 (compare (match_dup 0) (const_int -1)))
753 (if_then_else (match_operator 3 "equality_operator"
754 [(match_dup 2) (const_int 0)])
755 (match_operand 4 "" "")
756 (match_operand 5 "" "")))]
757 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
761 (match_dup 1) (const_int 1)))
762 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
764 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
767 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
768 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
771 operands[2], const0_rtx);"
774 ;; The next four insns work because they compare the result with one of
775 ;; the operands, and we know that the use of the condition code is
776 ;; either GEU or LTU, so we can use the carry flag from the addition
777 ;; instead of doing the compare a second time.
778 (define_insn "*addsi3_compare_op1"
779 [(set (reg:CC_C CC_REGNUM)
781 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
782 (match_operand:SI 2 "arm_add_operand" "rI,L"))
784 (set (match_operand:SI 0 "s_register_operand" "=r,r")
785 (plus:SI (match_dup 1) (match_dup 2)))]
789 sub%.\\t%0, %1, #%n2"
790 [(set_attr "conds" "set")]
793 (define_insn "*addsi3_compare_op2"
794 [(set (reg:CC_C CC_REGNUM)
796 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
797 (match_operand:SI 2 "arm_add_operand" "rI,L"))
799 (set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_dup 1) (match_dup 2)))]
804 sub%.\\t%0, %1, #%n2"
805 [(set_attr "conds" "set")]
808 (define_insn "*compare_addsi2_op0"
809 [(set (reg:CC_C CC_REGNUM)
811 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
812 (match_operand:SI 1 "arm_add_operand" "rI,L"))
818 [(set_attr "conds" "set")]
821 (define_insn "*compare_addsi2_op1"
822 [(set (reg:CC_C CC_REGNUM)
824 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
825 (match_operand:SI 1 "arm_add_operand" "rI,L"))
831 [(set_attr "conds" "set")]
834 (define_insn "*addsi3_carryin"
835 [(set (match_operand:SI 0 "s_register_operand" "=r")
836 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
837 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
838 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
841 [(set_attr "conds" "use")]
844 (define_insn "*addsi3_carryin_shift"
845 [(set (match_operand:SI 0 "s_register_operand" "=r")
846 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
848 (match_operator:SI 2 "shift_operator"
849 [(match_operand:SI 3 "s_register_operand" "r")
850 (match_operand:SI 4 "reg_or_int_operand" "rM")])
851 (match_operand:SI 1 "s_register_operand" "r"))))]
853 "adc%?\\t%0, %1, %3%S2"
854 [(set_attr "conds" "use")
855 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
856 (const_string "alu_shift")
857 (const_string "alu_shift_reg")))]
860 (define_insn "*addsi3_carryin_alt1"
861 [(set (match_operand:SI 0 "s_register_operand" "=r")
862 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
863 (match_operand:SI 2 "arm_rhs_operand" "rI"))
864 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
867 [(set_attr "conds" "use")]
870 (define_insn "*addsi3_carryin_alt2"
871 [(set (match_operand:SI 0 "s_register_operand" "=r")
872 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
873 (match_operand:SI 1 "s_register_operand" "r"))
874 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
877 [(set_attr "conds" "use")]
880 (define_insn "*addsi3_carryin_alt3"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
883 (match_operand:SI 2 "arm_rhs_operand" "rI"))
884 (match_operand:SI 1 "s_register_operand" "r")))]
887 [(set_attr "conds" "use")]
890 (define_expand "incscc"
891 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
892 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
893 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
894 (match_operand:SI 1 "s_register_operand" "0,?r")))]
899 (define_insn "*arm_incscc"
900 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
901 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
902 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
903 (match_operand:SI 1 "s_register_operand" "0,?r")))]
907 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
908 [(set_attr "conds" "use")
909 (set_attr "length" "4,8")]
912 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
914 [(set (match_operand:SI 0 "s_register_operand" "")
915 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
916 (match_operand:SI 2 "s_register_operand" ""))
918 (clobber (match_operand:SI 3 "s_register_operand" ""))]
920 [(set (match_dup 3) (match_dup 1))
921 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
923 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
926 (define_expand "addsf3"
927 [(set (match_operand:SF 0 "s_register_operand" "")
928 (plus:SF (match_operand:SF 1 "s_register_operand" "")
929 (match_operand:SF 2 "arm_float_add_operand" "")))]
930 "TARGET_32BIT && TARGET_HARD_FLOAT"
933 && !cirrus_fp_register (operands[2], SFmode))
934 operands[2] = force_reg (SFmode, operands[2]);
937 (define_expand "adddf3"
938 [(set (match_operand:DF 0 "s_register_operand" "")
939 (plus:DF (match_operand:DF 1 "s_register_operand" "")
940 (match_operand:DF 2 "arm_float_add_operand" "")))]
941 "TARGET_32BIT && TARGET_HARD_FLOAT"
944 && !cirrus_fp_register (operands[2], DFmode))
945 operands[2] = force_reg (DFmode, operands[2]);
948 (define_expand "subdi3"
950 [(set (match_operand:DI 0 "s_register_operand" "")
951 (minus:DI (match_operand:DI 1 "s_register_operand" "")
952 (match_operand:DI 2 "s_register_operand" "")))
953 (clobber (reg:CC CC_REGNUM))])]
956 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
958 && cirrus_fp_register (operands[0], DImode)
959 && cirrus_fp_register (operands[1], DImode))
961 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
967 if (GET_CODE (operands[1]) != REG)
968 operands[1] = force_reg (SImode, operands[1]);
969 if (GET_CODE (operands[2]) != REG)
970 operands[2] = force_reg (SImode, operands[2]);
975 (define_insn "*arm_subdi3"
976 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
977 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
978 (match_operand:DI 2 "s_register_operand" "r,0,0")))
979 (clobber (reg:CC CC_REGNUM))]
981 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
982 [(set_attr "conds" "clob")
983 (set_attr "length" "8")]
986 (define_insn "*thumb_subdi3"
987 [(set (match_operand:DI 0 "register_operand" "=l")
988 (minus:DI (match_operand:DI 1 "register_operand" "0")
989 (match_operand:DI 2 "register_operand" "l")))
990 (clobber (reg:CC CC_REGNUM))]
992 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
993 [(set_attr "length" "4")]
996 (define_insn "*subdi_di_zesidi"
997 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
998 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
1000 (match_operand:SI 2 "s_register_operand" "r,r"))))
1001 (clobber (reg:CC CC_REGNUM))]
1003 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1004 [(set_attr "conds" "clob")
1005 (set_attr "length" "8")]
1008 (define_insn "*subdi_di_sesidi"
1009 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1010 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1012 (match_operand:SI 2 "s_register_operand" "r,r"))))
1013 (clobber (reg:CC CC_REGNUM))]
1015 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1016 [(set_attr "conds" "clob")
1017 (set_attr "length" "8")]
1020 (define_insn "*subdi_zesidi_di"
1021 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1022 (minus:DI (zero_extend:DI
1023 (match_operand:SI 2 "s_register_operand" "r,r"))
1024 (match_operand:DI 1 "s_register_operand" "?r,0")))
1025 (clobber (reg:CC CC_REGNUM))]
1027 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1028 [(set_attr "conds" "clob")
1029 (set_attr "length" "8")]
1032 (define_insn "*subdi_sesidi_di"
1033 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1034 (minus:DI (sign_extend:DI
1035 (match_operand:SI 2 "s_register_operand" "r,r"))
1036 (match_operand:DI 1 "s_register_operand" "?r,0")))
1037 (clobber (reg:CC CC_REGNUM))]
1039 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1040 [(set_attr "conds" "clob")
1041 (set_attr "length" "8")]
1044 (define_insn "*subdi_zesidi_zesidi"
1045 [(set (match_operand:DI 0 "s_register_operand" "=r")
1046 (minus:DI (zero_extend:DI
1047 (match_operand:SI 1 "s_register_operand" "r"))
1049 (match_operand:SI 2 "s_register_operand" "r"))))
1050 (clobber (reg:CC CC_REGNUM))]
1052 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1053 [(set_attr "conds" "clob")
1054 (set_attr "length" "8")]
1057 (define_expand "subsi3"
1058 [(set (match_operand:SI 0 "s_register_operand" "")
1059 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1060 (match_operand:SI 2 "s_register_operand" "")))]
1063 if (GET_CODE (operands[1]) == CONST_INT)
1067 arm_split_constant (MINUS, SImode, NULL_RTX,
1068 INTVAL (operands[1]), operands[0],
1069 operands[2], optimize && can_create_pseudo_p ());
1072 else /* TARGET_THUMB1 */
1073 operands[1] = force_reg (SImode, operands[1]);
1078 (define_insn "*thumb1_subsi3_insn"
1079 [(set (match_operand:SI 0 "register_operand" "=l")
1080 (minus:SI (match_operand:SI 1 "register_operand" "l")
1081 (match_operand:SI 2 "register_operand" "l")))]
1084 [(set_attr "length" "2")]
1087 ; ??? Check Thumb-2 split length
1088 (define_insn_and_split "*arm_subsi3_insn"
1089 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1090 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1091 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1098 && GET_CODE (operands[1]) == CONST_INT
1099 && !const_ok_for_arm (INTVAL (operands[1]))"
1100 [(clobber (const_int 0))]
1102 arm_split_constant (MINUS, SImode, curr_insn,
1103 INTVAL (operands[1]), operands[0], operands[2], 0);
1106 [(set_attr "length" "4,4,16")
1107 (set_attr "predicable" "yes")]
1111 [(match_scratch:SI 3 "r")
1112 (set (match_operand:SI 0 "arm_general_register_operand" "")
1113 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1114 (match_operand:SI 2 "arm_general_register_operand" "")))]
1116 && !const_ok_for_arm (INTVAL (operands[1]))
1117 && const_ok_for_arm (~INTVAL (operands[1]))"
1118 [(set (match_dup 3) (match_dup 1))
1119 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1123 (define_insn "*subsi3_compare0"
1124 [(set (reg:CC_NOOV CC_REGNUM)
1126 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1127 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1129 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1130 (minus:SI (match_dup 1) (match_dup 2)))]
1135 [(set_attr "conds" "set")]
1138 (define_expand "decscc"
1139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1140 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1141 (match_operator:SI 2 "arm_comparison_operator"
1142 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1147 (define_insn "*arm_decscc"
1148 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1149 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1150 (match_operator:SI 2 "arm_comparison_operator"
1151 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1155 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1156 [(set_attr "conds" "use")
1157 (set_attr "length" "*,8")]
1160 (define_expand "subsf3"
1161 [(set (match_operand:SF 0 "s_register_operand" "")
1162 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1163 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1164 "TARGET_32BIT && TARGET_HARD_FLOAT"
1166 if (TARGET_MAVERICK)
1168 if (!cirrus_fp_register (operands[1], SFmode))
1169 operands[1] = force_reg (SFmode, operands[1]);
1170 if (!cirrus_fp_register (operands[2], SFmode))
1171 operands[2] = force_reg (SFmode, operands[2]);
1175 (define_expand "subdf3"
1176 [(set (match_operand:DF 0 "s_register_operand" "")
1177 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1178 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT"
1181 if (TARGET_MAVERICK)
1183 if (!cirrus_fp_register (operands[1], DFmode))
1184 operands[1] = force_reg (DFmode, operands[1]);
1185 if (!cirrus_fp_register (operands[2], DFmode))
1186 operands[2] = force_reg (DFmode, operands[2]);
1191 ;; Multiplication insns
1193 (define_expand "mulsi3"
1194 [(set (match_operand:SI 0 "s_register_operand" "")
1195 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1196 (match_operand:SI 1 "s_register_operand" "")))]
1201 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1202 (define_insn "*arm_mulsi3"
1203 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1204 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1205 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1206 "TARGET_32BIT && !arm_arch6"
1207 "mul%?\\t%0, %2, %1"
1208 [(set_attr "insn" "mul")
1209 (set_attr "predicable" "yes")]
1212 (define_insn "*arm_mulsi3_v6"
1213 [(set (match_operand:SI 0 "s_register_operand" "=r")
1214 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1215 (match_operand:SI 2 "s_register_operand" "r")))]
1216 "TARGET_32BIT && arm_arch6"
1217 "mul%?\\t%0, %1, %2"
1218 [(set_attr "insn" "mul")
1219 (set_attr "predicable" "yes")]
1222 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1223 ; 1 and 2; are the same, because reload will make operand 0 match
1224 ; operand 1 without realizing that this conflicts with operand 2. We fix
1225 ; this by adding another alternative to match this case, and then `reload'
1226 ; it ourselves. This alternative must come first.
1227 (define_insn "*thumb_mulsi3"
1228 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1229 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1230 (match_operand:SI 2 "register_operand" "l,l,l")))]
1231 "TARGET_THUMB1 && !arm_arch6"
1233 if (which_alternative < 2)
1234 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1236 return \"mul\\t%0, %2\";
1238 [(set_attr "length" "4,4,2")
1239 (set_attr "insn" "mul")]
1242 (define_insn "*thumb_mulsi3_v6"
1243 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1244 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1245 (match_operand:SI 2 "register_operand" "l,0,0")))]
1246 "TARGET_THUMB1 && arm_arch6"
1251 [(set_attr "length" "2")
1252 (set_attr "insn" "mul")]
1255 (define_insn "*mulsi3_compare0"
1256 [(set (reg:CC_NOOV CC_REGNUM)
1257 (compare:CC_NOOV (mult:SI
1258 (match_operand:SI 2 "s_register_operand" "r,r")
1259 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1261 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1262 (mult:SI (match_dup 2) (match_dup 1)))]
1263 "TARGET_ARM && !arm_arch6"
1264 "mul%.\\t%0, %2, %1"
1265 [(set_attr "conds" "set")
1266 (set_attr "insn" "muls")]
1269 (define_insn "*mulsi3_compare0_v6"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1271 (compare:CC_NOOV (mult:SI
1272 (match_operand:SI 2 "s_register_operand" "r")
1273 (match_operand:SI 1 "s_register_operand" "r"))
1275 (set (match_operand:SI 0 "s_register_operand" "=r")
1276 (mult:SI (match_dup 2) (match_dup 1)))]
1277 "TARGET_ARM && arm_arch6 && optimize_size"
1278 "mul%.\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "insn" "muls")]
1283 (define_insn "*mulsi_compare0_scratch"
1284 [(set (reg:CC_NOOV CC_REGNUM)
1285 (compare:CC_NOOV (mult:SI
1286 (match_operand:SI 2 "s_register_operand" "r,r")
1287 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1289 (clobber (match_scratch:SI 0 "=&r,&r"))]
1290 "TARGET_ARM && !arm_arch6"
1291 "mul%.\\t%0, %2, %1"
1292 [(set_attr "conds" "set")
1293 (set_attr "insn" "muls")]
1296 (define_insn "*mulsi_compare0_scratch_v6"
1297 [(set (reg:CC_NOOV CC_REGNUM)
1298 (compare:CC_NOOV (mult:SI
1299 (match_operand:SI 2 "s_register_operand" "r")
1300 (match_operand:SI 1 "s_register_operand" "r"))
1302 (clobber (match_scratch:SI 0 "=r"))]
1303 "TARGET_ARM && arm_arch6 && optimize_size"
1304 "mul%.\\t%0, %2, %1"
1305 [(set_attr "conds" "set")
1306 (set_attr "insn" "muls")]
1309 ;; Unnamed templates to match MLA instruction.
1311 (define_insn "*mulsi3addsi"
1312 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1314 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1315 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1316 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1317 "TARGET_32BIT && !arm_arch6"
1318 "mla%?\\t%0, %2, %1, %3"
1319 [(set_attr "insn" "mla")
1320 (set_attr "predicable" "yes")]
1323 (define_insn "*mulsi3addsi_v6"
1324 [(set (match_operand:SI 0 "s_register_operand" "=r")
1326 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1327 (match_operand:SI 1 "s_register_operand" "r"))
1328 (match_operand:SI 3 "s_register_operand" "r")))]
1329 "TARGET_32BIT && arm_arch6"
1330 "mla%?\\t%0, %2, %1, %3"
1331 [(set_attr "insn" "mla")
1332 (set_attr "predicable" "yes")]
1335 (define_insn "*mulsi3addsi_compare0"
1336 [(set (reg:CC_NOOV CC_REGNUM)
1339 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1340 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1341 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1343 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1344 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1346 "TARGET_ARM && arm_arch6"
1347 "mla%.\\t%0, %2, %1, %3"
1348 [(set_attr "conds" "set")
1349 (set_attr "insn" "mlas")]
1352 (define_insn "*mulsi3addsi_compare0_v6"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1356 (match_operand:SI 2 "s_register_operand" "r")
1357 (match_operand:SI 1 "s_register_operand" "r"))
1358 (match_operand:SI 3 "s_register_operand" "r"))
1360 (set (match_operand:SI 0 "s_register_operand" "=r")
1361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1363 "TARGET_ARM && arm_arch6 && optimize_size"
1364 "mla%.\\t%0, %2, %1, %3"
1365 [(set_attr "conds" "set")
1366 (set_attr "insn" "mlas")]
1369 (define_insn "*mulsi3addsi_compare0_scratch"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1373 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1374 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1375 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1377 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1378 "TARGET_ARM && !arm_arch6"
1379 "mla%.\\t%0, %2, %1, %3"
1380 [(set_attr "conds" "set")
1381 (set_attr "insn" "mlas")]
1384 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1388 (match_operand:SI 2 "s_register_operand" "r")
1389 (match_operand:SI 1 "s_register_operand" "r"))
1390 (match_operand:SI 3 "s_register_operand" "r"))
1392 (clobber (match_scratch:SI 0 "=r"))]
1393 "TARGET_ARM && arm_arch6 && optimize_size"
1394 "mla%.\\t%0, %2, %1, %3"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "mlas")]
1399 (define_insn "*mulsi3subsi"
1400 [(set (match_operand:SI 0 "s_register_operand" "=r")
1402 (match_operand:SI 3 "s_register_operand" "r")
1403 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1404 (match_operand:SI 1 "s_register_operand" "r"))))]
1405 "TARGET_32BIT && arm_arch_thumb2"
1406 "mls%?\\t%0, %2, %1, %3"
1407 [(set_attr "insn" "mla")
1408 (set_attr "predicable" "yes")]
1411 ;; Unnamed template to match long long multiply-accumulate (smlal)
1413 (define_insn "*mulsidi3adddi"
1414 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1417 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1418 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1419 (match_operand:DI 1 "s_register_operand" "0")))]
1420 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1421 "smlal%?\\t%Q0, %R0, %3, %2"
1422 [(set_attr "insn" "smlal")
1423 (set_attr "predicable" "yes")]
1426 (define_insn "*mulsidi3adddi_v6"
1427 [(set (match_operand:DI 0 "s_register_operand" "=r")
1430 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1431 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1432 (match_operand:DI 1 "s_register_operand" "0")))]
1433 "TARGET_32BIT && arm_arch6"
1434 "smlal%?\\t%Q0, %R0, %3, %2"
1435 [(set_attr "insn" "smlal")
1436 (set_attr "predicable" "yes")]
1439 ;; 32x32->64 widening multiply.
1440 ;; As with mulsi3, the only difference between the v3-5 and v6+
1441 ;; versions of these patterns is the requirement that the output not
1442 ;; overlap the inputs, but that still means we have to have a named
1443 ;; expander and two different starred insns.
1445 (define_expand "mulsidi3"
1446 [(set (match_operand:DI 0 "s_register_operand" "")
1448 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1449 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1450 "TARGET_32BIT && arm_arch3m"
1454 (define_insn "*mulsidi3_nov6"
1455 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1457 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1458 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1459 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1460 "smull%?\\t%Q0, %R0, %1, %2"
1461 [(set_attr "insn" "smull")
1462 (set_attr "predicable" "yes")]
1465 (define_insn "*mulsidi3_v6"
1466 [(set (match_operand:DI 0 "s_register_operand" "=r")
1468 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1469 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1470 "TARGET_32BIT && arm_arch6"
1471 "smull%?\\t%Q0, %R0, %1, %2"
1472 [(set_attr "insn" "smull")
1473 (set_attr "predicable" "yes")]
1476 (define_expand "umulsidi3"
1477 [(set (match_operand:DI 0 "s_register_operand" "")
1479 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1480 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1481 "TARGET_32BIT && arm_arch3m"
1485 (define_insn "*umulsidi3_nov6"
1486 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1488 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1489 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1490 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1491 "umull%?\\t%Q0, %R0, %1, %2"
1492 [(set_attr "insn" "umull")
1493 (set_attr "predicable" "yes")]
1496 (define_insn "*umulsidi3_v6"
1497 [(set (match_operand:DI 0 "s_register_operand" "=r")
1499 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1500 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1501 "TARGET_32BIT && arm_arch6"
1502 "umull%?\\t%Q0, %R0, %1, %2"
1503 [(set_attr "insn" "umull")
1504 (set_attr "predicable" "yes")]
1507 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1509 (define_insn "*umulsidi3adddi"
1510 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1513 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1514 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1515 (match_operand:DI 1 "s_register_operand" "0")))]
1516 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1517 "umlal%?\\t%Q0, %R0, %3, %2"
1518 [(set_attr "insn" "umlal")
1519 (set_attr "predicable" "yes")]
1522 (define_insn "*umulsidi3adddi_v6"
1523 [(set (match_operand:DI 0 "s_register_operand" "=r")
1526 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1527 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1528 (match_operand:DI 1 "s_register_operand" "0")))]
1529 "TARGET_32BIT && arm_arch6"
1530 "umlal%?\\t%Q0, %R0, %3, %2"
1531 [(set_attr "insn" "umlal")
1532 (set_attr "predicable" "yes")]
1535 (define_expand "smulsi3_highpart"
1537 [(set (match_operand:SI 0 "s_register_operand" "")
1541 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1542 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1544 (clobber (match_scratch:SI 3 ""))])]
1545 "TARGET_32BIT && arm_arch3m"
1549 (define_insn "*smulsi3_highpart_nov6"
1550 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1554 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1555 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1557 (clobber (match_scratch:SI 3 "=&r,&r"))]
1558 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1559 "smull%?\\t%3, %0, %2, %1"
1560 [(set_attr "insn" "smull")
1561 (set_attr "predicable" "yes")]
1564 (define_insn "*smulsi3_highpart_v6"
1565 [(set (match_operand:SI 0 "s_register_operand" "=r")
1569 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1570 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1572 (clobber (match_scratch:SI 3 "=r"))]
1573 "TARGET_32BIT && arm_arch6"
1574 "smull%?\\t%3, %0, %2, %1"
1575 [(set_attr "insn" "smull")
1576 (set_attr "predicable" "yes")]
1579 (define_expand "umulsi3_highpart"
1581 [(set (match_operand:SI 0 "s_register_operand" "")
1585 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1586 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1588 (clobber (match_scratch:SI 3 ""))])]
1589 "TARGET_32BIT && arm_arch3m"
1593 (define_insn "*umulsi3_highpart_nov6"
1594 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1598 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1599 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1601 (clobber (match_scratch:SI 3 "=&r,&r"))]
1602 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1603 "umull%?\\t%3, %0, %2, %1"
1604 [(set_attr "insn" "umull")
1605 (set_attr "predicable" "yes")]
1608 (define_insn "*umulsi3_highpart_v6"
1609 [(set (match_operand:SI 0 "s_register_operand" "=r")
1613 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1614 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1616 (clobber (match_scratch:SI 3 "=r"))]
1617 "TARGET_32BIT && arm_arch6"
1618 "umull%?\\t%3, %0, %2, %1"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1623 (define_insn "mulhisi3"
1624 [(set (match_operand:SI 0 "s_register_operand" "=r")
1625 (mult:SI (sign_extend:SI
1626 (match_operand:HI 1 "s_register_operand" "%r"))
1628 (match_operand:HI 2 "s_register_operand" "r"))))]
1629 "TARGET_DSP_MULTIPLY"
1630 "smulbb%?\\t%0, %1, %2"
1631 [(set_attr "insn" "smulxy")
1632 (set_attr "predicable" "yes")]
1635 (define_insn "*mulhisi3tb"
1636 [(set (match_operand:SI 0 "s_register_operand" "=r")
1637 (mult:SI (ashiftrt:SI
1638 (match_operand:SI 1 "s_register_operand" "r")
1641 (match_operand:HI 2 "s_register_operand" "r"))))]
1642 "TARGET_DSP_MULTIPLY"
1643 "smultb%?\\t%0, %1, %2"
1644 [(set_attr "insn" "smulxy")
1645 (set_attr "predicable" "yes")]
1648 (define_insn "*mulhisi3bt"
1649 [(set (match_operand:SI 0 "s_register_operand" "=r")
1650 (mult:SI (sign_extend:SI
1651 (match_operand:HI 1 "s_register_operand" "r"))
1653 (match_operand:SI 2 "s_register_operand" "r")
1655 "TARGET_DSP_MULTIPLY"
1656 "smulbt%?\\t%0, %1, %2"
1657 [(set_attr "insn" "smulxy")
1658 (set_attr "predicable" "yes")]
1661 (define_insn "*mulhisi3tt"
1662 [(set (match_operand:SI 0 "s_register_operand" "=r")
1663 (mult:SI (ashiftrt:SI
1664 (match_operand:SI 1 "s_register_operand" "r")
1667 (match_operand:SI 2 "s_register_operand" "r")
1669 "TARGET_DSP_MULTIPLY"
1670 "smultt%?\\t%0, %1, %2"
1671 [(set_attr "insn" "smulxy")
1672 (set_attr "predicable" "yes")]
1675 (define_insn "*mulhisi3addsi"
1676 [(set (match_operand:SI 0 "s_register_operand" "=r")
1677 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1678 (mult:SI (sign_extend:SI
1679 (match_operand:HI 2 "s_register_operand" "%r"))
1681 (match_operand:HI 3 "s_register_operand" "r")))))]
1682 "TARGET_DSP_MULTIPLY"
1683 "smlabb%?\\t%0, %2, %3, %1"
1684 [(set_attr "insn" "smlaxy")
1685 (set_attr "predicable" "yes")]
1688 (define_insn "*mulhidi3adddi"
1689 [(set (match_operand:DI 0 "s_register_operand" "=r")
1691 (match_operand:DI 1 "s_register_operand" "0")
1692 (mult:DI (sign_extend:DI
1693 (match_operand:HI 2 "s_register_operand" "%r"))
1695 (match_operand:HI 3 "s_register_operand" "r")))))]
1696 "TARGET_DSP_MULTIPLY"
1697 "smlalbb%?\\t%Q0, %R0, %2, %3"
1698 [(set_attr "insn" "smlalxy")
1699 (set_attr "predicable" "yes")])
1701 (define_expand "mulsf3"
1702 [(set (match_operand:SF 0 "s_register_operand" "")
1703 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1704 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1705 "TARGET_32BIT && TARGET_HARD_FLOAT"
1708 && !cirrus_fp_register (operands[2], SFmode))
1709 operands[2] = force_reg (SFmode, operands[2]);
1712 (define_expand "muldf3"
1713 [(set (match_operand:DF 0 "s_register_operand" "")
1714 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1715 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1716 "TARGET_32BIT && TARGET_HARD_FLOAT"
1719 && !cirrus_fp_register (operands[2], DFmode))
1720 operands[2] = force_reg (DFmode, operands[2]);
1725 (define_expand "divsf3"
1726 [(set (match_operand:SF 0 "s_register_operand" "")
1727 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1728 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1729 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1732 (define_expand "divdf3"
1733 [(set (match_operand:DF 0 "s_register_operand" "")
1734 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1735 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1736 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1741 (define_expand "modsf3"
1742 [(set (match_operand:SF 0 "s_register_operand" "")
1743 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1744 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1745 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1748 (define_expand "moddf3"
1749 [(set (match_operand:DF 0 "s_register_operand" "")
1750 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1751 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1752 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1755 ;; Boolean and,ior,xor insns
1757 ;; Split up double word logical operations
1759 ;; Split up simple DImode logical operations. Simply perform the logical
1760 ;; operation on the upper and lower halves of the registers.
1762 [(set (match_operand:DI 0 "s_register_operand" "")
1763 (match_operator:DI 6 "logical_binary_operator"
1764 [(match_operand:DI 1 "s_register_operand" "")
1765 (match_operand:DI 2 "s_register_operand" "")]))]
1766 "TARGET_32BIT && reload_completed
1767 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1768 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1769 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1772 operands[3] = gen_highpart (SImode, operands[0]);
1773 operands[0] = gen_lowpart (SImode, operands[0]);
1774 operands[4] = gen_highpart (SImode, operands[1]);
1775 operands[1] = gen_lowpart (SImode, operands[1]);
1776 operands[5] = gen_highpart (SImode, operands[2]);
1777 operands[2] = gen_lowpart (SImode, operands[2]);
1782 [(set (match_operand:DI 0 "s_register_operand" "")
1783 (match_operator:DI 6 "logical_binary_operator"
1784 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1785 (match_operand:DI 1 "s_register_operand" "")]))]
1786 "TARGET_32BIT && reload_completed"
1787 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1788 (set (match_dup 3) (match_op_dup:SI 6
1789 [(ashiftrt:SI (match_dup 2) (const_int 31))
1793 operands[3] = gen_highpart (SImode, operands[0]);
1794 operands[0] = gen_lowpart (SImode, operands[0]);
1795 operands[4] = gen_highpart (SImode, operands[1]);
1796 operands[1] = gen_lowpart (SImode, operands[1]);
1797 operands[5] = gen_highpart (SImode, operands[2]);
1798 operands[2] = gen_lowpart (SImode, operands[2]);
1802 ;; The zero extend of operand 2 means we can just copy the high part of
1803 ;; operand1 into operand0.
1805 [(set (match_operand:DI 0 "s_register_operand" "")
1807 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1808 (match_operand:DI 1 "s_register_operand" "")))]
1809 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1810 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1811 (set (match_dup 3) (match_dup 4))]
1814 operands[4] = gen_highpart (SImode, operands[1]);
1815 operands[3] = gen_highpart (SImode, operands[0]);
1816 operands[0] = gen_lowpart (SImode, operands[0]);
1817 operands[1] = gen_lowpart (SImode, operands[1]);
1821 ;; The zero extend of operand 2 means we can just copy the high part of
1822 ;; operand1 into operand0.
1824 [(set (match_operand:DI 0 "s_register_operand" "")
1826 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1827 (match_operand:DI 1 "s_register_operand" "")))]
1828 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1829 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1830 (set (match_dup 3) (match_dup 4))]
1833 operands[4] = gen_highpart (SImode, operands[1]);
1834 operands[3] = gen_highpart (SImode, operands[0]);
1835 operands[0] = gen_lowpart (SImode, operands[0]);
1836 operands[1] = gen_lowpart (SImode, operands[1]);
1840 (define_insn "anddi3"
1841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1842 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1843 (match_operand:DI 2 "s_register_operand" "r,r")))]
1844 "TARGET_32BIT && ! TARGET_IWMMXT"
1846 [(set_attr "length" "8")]
1849 (define_insn_and_split "*anddi_zesidi_di"
1850 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1851 (and:DI (zero_extend:DI
1852 (match_operand:SI 2 "s_register_operand" "r,r"))
1853 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1856 "TARGET_32BIT && reload_completed"
1857 ; The zero extend of operand 2 clears the high word of the output
1859 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1860 (set (match_dup 3) (const_int 0))]
1863 operands[3] = gen_highpart (SImode, operands[0]);
1864 operands[0] = gen_lowpart (SImode, operands[0]);
1865 operands[1] = gen_lowpart (SImode, operands[1]);
1867 [(set_attr "length" "8")]
1870 (define_insn "*anddi_sesdi_di"
1871 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1872 (and:DI (sign_extend:DI
1873 (match_operand:SI 2 "s_register_operand" "r,r"))
1874 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1877 [(set_attr "length" "8")]
1880 (define_expand "andsi3"
1881 [(set (match_operand:SI 0 "s_register_operand" "")
1882 (and:SI (match_operand:SI 1 "s_register_operand" "")
1883 (match_operand:SI 2 "reg_or_int_operand" "")))]
1888 if (GET_CODE (operands[2]) == CONST_INT)
1890 arm_split_constant (AND, SImode, NULL_RTX,
1891 INTVAL (operands[2]), operands[0],
1892 operands[1], optimize && can_create_pseudo_p ());
1897 else /* TARGET_THUMB1 */
1899 if (GET_CODE (operands[2]) != CONST_INT)
1900 operands[2] = force_reg (SImode, operands[2]);
1905 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1907 operands[2] = force_reg (SImode,
1908 GEN_INT (~INTVAL (operands[2])));
1910 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1915 for (i = 9; i <= 31; i++)
1917 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1919 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1923 else if ((((HOST_WIDE_INT) 1) << i) - 1
1924 == ~INTVAL (operands[2]))
1926 rtx shift = GEN_INT (i);
1927 rtx reg = gen_reg_rtx (SImode);
1929 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1930 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1936 operands[2] = force_reg (SImode, operands[2]);
1942 ; ??? Check split length for Thumb-2
1943 (define_insn_and_split "*arm_andsi3_insn"
1944 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1945 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1946 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1950 bic%?\\t%0, %1, #%B2
1953 && GET_CODE (operands[2]) == CONST_INT
1954 && !(const_ok_for_arm (INTVAL (operands[2]))
1955 || const_ok_for_arm (~INTVAL (operands[2])))"
1956 [(clobber (const_int 0))]
1958 arm_split_constant (AND, SImode, curr_insn,
1959 INTVAL (operands[2]), operands[0], operands[1], 0);
1962 [(set_attr "length" "4,4,16")
1963 (set_attr "predicable" "yes")]
1966 (define_insn "*thumb1_andsi3_insn"
1967 [(set (match_operand:SI 0 "register_operand" "=l")
1968 (and:SI (match_operand:SI 1 "register_operand" "%0")
1969 (match_operand:SI 2 "register_operand" "l")))]
1972 [(set_attr "length" "2")]
1975 (define_insn "*andsi3_compare0"
1976 [(set (reg:CC_NOOV CC_REGNUM)
1978 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1979 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1981 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1982 (and:SI (match_dup 1) (match_dup 2)))]
1986 bic%.\\t%0, %1, #%B2"
1987 [(set_attr "conds" "set")]
1990 (define_insn "*andsi3_compare0_scratch"
1991 [(set (reg:CC_NOOV CC_REGNUM)
1993 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1994 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1996 (clobber (match_scratch:SI 2 "=X,r"))]
2000 bic%.\\t%2, %0, #%B1"
2001 [(set_attr "conds" "set")]
2004 (define_insn "*zeroextractsi_compare0_scratch"
2005 [(set (reg:CC_NOOV CC_REGNUM)
2006 (compare:CC_NOOV (zero_extract:SI
2007 (match_operand:SI 0 "s_register_operand" "r")
2008 (match_operand 1 "const_int_operand" "n")
2009 (match_operand 2 "const_int_operand" "n"))
2012 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2013 && INTVAL (operands[1]) > 0
2014 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2015 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2017 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2018 << INTVAL (operands[2]));
2019 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2022 [(set_attr "conds" "set")]
2025 (define_insn_and_split "*ne_zeroextractsi"
2026 [(set (match_operand:SI 0 "s_register_operand" "=r")
2027 (ne:SI (zero_extract:SI
2028 (match_operand:SI 1 "s_register_operand" "r")
2029 (match_operand:SI 2 "const_int_operand" "n")
2030 (match_operand:SI 3 "const_int_operand" "n"))
2032 (clobber (reg:CC CC_REGNUM))]
2034 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2035 && INTVAL (operands[2]) > 0
2036 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2037 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2040 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2041 && INTVAL (operands[2]) > 0
2042 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2043 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2044 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2045 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2047 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2049 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2050 (match_dup 0) (const_int 1)))]
2052 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2053 << INTVAL (operands[3]));
2055 [(set_attr "conds" "clob")
2056 (set (attr "length")
2057 (if_then_else (eq_attr "is_thumb" "yes")
2062 (define_insn_and_split "*ne_zeroextractsi_shifted"
2063 [(set (match_operand:SI 0 "s_register_operand" "=r")
2064 (ne:SI (zero_extract:SI
2065 (match_operand:SI 1 "s_register_operand" "r")
2066 (match_operand:SI 2 "const_int_operand" "n")
2069 (clobber (reg:CC CC_REGNUM))]
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2076 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2081 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2083 [(set_attr "conds" "clob")
2084 (set_attr "length" "8")]
2087 (define_insn_and_split "*ite_ne_zeroextractsi"
2088 [(set (match_operand:SI 0 "s_register_operand" "=r")
2089 (if_then_else:SI (ne (zero_extract:SI
2090 (match_operand:SI 1 "s_register_operand" "r")
2091 (match_operand:SI 2 "const_int_operand" "n")
2092 (match_operand:SI 3 "const_int_operand" "n"))
2094 (match_operand:SI 4 "arm_not_operand" "rIK")
2096 (clobber (reg:CC CC_REGNUM))]
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2102 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2105 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2106 && INTVAL (operands[2]) > 0
2107 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2108 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2109 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2110 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2111 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2113 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2115 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2116 (match_dup 0) (match_dup 4)))]
2118 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2119 << INTVAL (operands[3]));
2121 [(set_attr "conds" "clob")
2122 (set_attr "length" "8")]
2125 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2126 [(set (match_operand:SI 0 "s_register_operand" "=r")
2127 (if_then_else:SI (ne (zero_extract:SI
2128 (match_operand:SI 1 "s_register_operand" "r")
2129 (match_operand:SI 2 "const_int_operand" "n")
2132 (match_operand:SI 3 "arm_not_operand" "rIK")
2134 (clobber (reg:CC CC_REGNUM))]
2135 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2137 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2138 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2139 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2141 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2143 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2144 (match_dup 0) (match_dup 3)))]
2146 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2148 [(set_attr "conds" "clob")
2149 (set_attr "length" "8")]
2153 [(set (match_operand:SI 0 "s_register_operand" "")
2154 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2155 (match_operand:SI 2 "const_int_operand" "")
2156 (match_operand:SI 3 "const_int_operand" "")))
2157 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2159 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2160 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2162 HOST_WIDE_INT temp = INTVAL (operands[2]);
2164 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2165 operands[3] = GEN_INT (32 - temp);
2169 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2171 [(set (match_operand:SI 0 "s_register_operand" "")
2172 (match_operator:SI 1 "shiftable_operator"
2173 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2174 (match_operand:SI 3 "const_int_operand" "")
2175 (match_operand:SI 4 "const_int_operand" ""))
2176 (match_operand:SI 5 "s_register_operand" "")]))
2177 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2179 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2182 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2185 HOST_WIDE_INT temp = INTVAL (operands[3]);
2187 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2188 operands[4] = GEN_INT (32 - temp);
2193 [(set (match_operand:SI 0 "s_register_operand" "")
2194 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2195 (match_operand:SI 2 "const_int_operand" "")
2196 (match_operand:SI 3 "const_int_operand" "")))]
2198 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2199 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2201 HOST_WIDE_INT temp = INTVAL (operands[2]);
2203 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2204 operands[3] = GEN_INT (32 - temp);
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (match_operator:SI 1 "shiftable_operator"
2211 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")
2213 (match_operand:SI 4 "const_int_operand" ""))
2214 (match_operand:SI 5 "s_register_operand" "")]))
2215 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2217 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2220 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2223 HOST_WIDE_INT temp = INTVAL (operands[3]);
2225 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2226 operands[4] = GEN_INT (32 - temp);
2230 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2231 ;;; represented by the bitfield, then this will produce incorrect results.
2232 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2233 ;;; which have a real bit-field insert instruction, the truncation happens
2234 ;;; in the bit-field insert instruction itself. Since arm does not have a
2235 ;;; bit-field insert instruction, we would have to emit code here to truncate
2236 ;;; the value before we insert. This loses some of the advantage of having
2237 ;;; this insv pattern, so this pattern needs to be reevalutated.
2239 (define_expand "insv"
2240 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2241 (match_operand:SI 1 "general_operand" "")
2242 (match_operand:SI 2 "general_operand" ""))
2243 (match_operand:SI 3 "reg_or_int_operand" ""))]
2244 "TARGET_ARM || arm_arch_thumb2"
2247 int start_bit = INTVAL (operands[2]);
2248 int width = INTVAL (operands[1]);
2249 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2250 rtx target, subtarget;
2252 if (arm_arch_thumb2)
2254 bool use_bfi = TRUE;
2256 if (GET_CODE (operands[3]) == CONST_INT)
2258 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2262 emit_insn (gen_insv_zero (operands[0], operands[1],
2267 /* See if the set can be done with a single orr instruction. */
2268 if (val == mask && const_ok_for_arm (val << start_bit))
2274 if (GET_CODE (operands[3]) != REG)
2275 operands[3] = force_reg (SImode, operands[3]);
2277 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2283 target = copy_rtx (operands[0]);
2284 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2285 subreg as the final target. */
2286 if (GET_CODE (target) == SUBREG)
2288 subtarget = gen_reg_rtx (SImode);
2289 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2290 < GET_MODE_SIZE (SImode))
2291 target = SUBREG_REG (target);
2296 if (GET_CODE (operands[3]) == CONST_INT)
2298 /* Since we are inserting a known constant, we may be able to
2299 reduce the number of bits that we have to clear so that
2300 the mask becomes simple. */
2301 /* ??? This code does not check to see if the new mask is actually
2302 simpler. It may not be. */
2303 rtx op1 = gen_reg_rtx (SImode);
2304 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2305 start of this pattern. */
2306 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2307 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2309 emit_insn (gen_andsi3 (op1, operands[0],
2310 gen_int_mode (~mask2, SImode)));
2311 emit_insn (gen_iorsi3 (subtarget, op1,
2312 gen_int_mode (op3_value << start_bit, SImode)));
2314 else if (start_bit == 0
2315 && !(const_ok_for_arm (mask)
2316 || const_ok_for_arm (~mask)))
2318 /* A Trick, since we are setting the bottom bits in the word,
2319 we can shift operand[3] up, operand[0] down, OR them together
2320 and rotate the result back again. This takes 3 insns, and
2321 the third might be mergeable into another op. */
2322 /* The shift up copes with the possibility that operand[3] is
2323 wider than the bitfield. */
2324 rtx op0 = gen_reg_rtx (SImode);
2325 rtx op1 = gen_reg_rtx (SImode);
2327 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2328 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2329 emit_insn (gen_iorsi3 (op1, op1, op0));
2330 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2332 else if ((width + start_bit == 32)
2333 && !(const_ok_for_arm (mask)
2334 || const_ok_for_arm (~mask)))
2336 /* Similar trick, but slightly less efficient. */
2338 rtx op0 = gen_reg_rtx (SImode);
2339 rtx op1 = gen_reg_rtx (SImode);
2341 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2342 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2343 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2344 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2348 rtx op0 = gen_int_mode (mask, SImode);
2349 rtx op1 = gen_reg_rtx (SImode);
2350 rtx op2 = gen_reg_rtx (SImode);
2352 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2354 rtx tmp = gen_reg_rtx (SImode);
2356 emit_insn (gen_movsi (tmp, op0));
2360 /* Mask out any bits in operand[3] that are not needed. */
2361 emit_insn (gen_andsi3 (op1, operands[3], op0));
2363 if (GET_CODE (op0) == CONST_INT
2364 && (const_ok_for_arm (mask << start_bit)
2365 || const_ok_for_arm (~(mask << start_bit))))
2367 op0 = gen_int_mode (~(mask << start_bit), SImode);
2368 emit_insn (gen_andsi3 (op2, operands[0], op0));
2372 if (GET_CODE (op0) == CONST_INT)
2374 rtx tmp = gen_reg_rtx (SImode);
2376 emit_insn (gen_movsi (tmp, op0));
2381 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2383 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2387 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2389 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2392 if (subtarget != target)
2394 /* If TARGET is still a SUBREG, then it must be wider than a word,
2395 so we must be careful only to set the subword we were asked to. */
2396 if (GET_CODE (target) == SUBREG)
2397 emit_move_insn (target, subtarget);
2399 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2406 (define_insn "insv_zero"
2407 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2408 (match_operand:SI 1 "const_int_operand" "M")
2409 (match_operand:SI 2 "const_int_operand" "M"))
2413 [(set_attr "length" "4")
2414 (set_attr "predicable" "yes")]
2417 (define_insn "insv_t2"
2418 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2419 (match_operand:SI 1 "const_int_operand" "M")
2420 (match_operand:SI 2 "const_int_operand" "M"))
2421 (match_operand:SI 3 "s_register_operand" "r"))]
2423 "bfi%?\t%0, %3, %2, %1"
2424 [(set_attr "length" "4")
2425 (set_attr "predicable" "yes")]
2428 ; constants for op 2 will never be given to these patterns.
2429 (define_insn_and_split "*anddi_notdi_di"
2430 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2431 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2432 (match_operand:DI 2 "s_register_operand" "0,r")))]
2435 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2436 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2437 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2440 operands[3] = gen_highpart (SImode, operands[0]);
2441 operands[0] = gen_lowpart (SImode, operands[0]);
2442 operands[4] = gen_highpart (SImode, operands[1]);
2443 operands[1] = gen_lowpart (SImode, operands[1]);
2444 operands[5] = gen_highpart (SImode, operands[2]);
2445 operands[2] = gen_lowpart (SImode, operands[2]);
2447 [(set_attr "length" "8")
2448 (set_attr "predicable" "yes")]
2451 (define_insn_and_split "*anddi_notzesidi_di"
2452 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2453 (and:DI (not:DI (zero_extend:DI
2454 (match_operand:SI 2 "s_register_operand" "r,r")))
2455 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2458 bic%?\\t%Q0, %Q1, %2
2460 ; (not (zero_extend ...)) allows us to just copy the high word from
2461 ; operand1 to operand0.
2464 && operands[0] != operands[1]"
2465 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2466 (set (match_dup 3) (match_dup 4))]
2469 operands[3] = gen_highpart (SImode, operands[0]);
2470 operands[0] = gen_lowpart (SImode, operands[0]);
2471 operands[4] = gen_highpart (SImode, operands[1]);
2472 operands[1] = gen_lowpart (SImode, operands[1]);
2474 [(set_attr "length" "4,8")
2475 (set_attr "predicable" "yes")]
2478 (define_insn_and_split "*anddi_notsesidi_di"
2479 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2480 (and:DI (not:DI (sign_extend:DI
2481 (match_operand:SI 2 "s_register_operand" "r,r")))
2482 (match_operand:DI 1 "s_register_operand" "0,r")))]
2485 "TARGET_32BIT && reload_completed"
2486 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2487 (set (match_dup 3) (and:SI (not:SI
2488 (ashiftrt:SI (match_dup 2) (const_int 31)))
2492 operands[3] = gen_highpart (SImode, operands[0]);
2493 operands[0] = gen_lowpart (SImode, operands[0]);
2494 operands[4] = gen_highpart (SImode, operands[1]);
2495 operands[1] = gen_lowpart (SImode, operands[1]);
2497 [(set_attr "length" "8")
2498 (set_attr "predicable" "yes")]
2501 (define_insn "andsi_notsi_si"
2502 [(set (match_operand:SI 0 "s_register_operand" "=r")
2503 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2504 (match_operand:SI 1 "s_register_operand" "r")))]
2506 "bic%?\\t%0, %1, %2"
2507 [(set_attr "predicable" "yes")]
2510 (define_insn "bicsi3"
2511 [(set (match_operand:SI 0 "register_operand" "=l")
2512 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2513 (match_operand:SI 2 "register_operand" "0")))]
2516 [(set_attr "length" "2")]
2519 (define_insn "andsi_not_shiftsi_si"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2522 [(match_operand:SI 2 "s_register_operand" "r")
2523 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2524 (match_operand:SI 1 "s_register_operand" "r")))]
2526 "bic%?\\t%0, %1, %2%S4"
2527 [(set_attr "predicable" "yes")
2528 (set_attr "shift" "2")
2529 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2530 (const_string "alu_shift")
2531 (const_string "alu_shift_reg")))]
2534 (define_insn "*andsi_notsi_si_compare0"
2535 [(set (reg:CC_NOOV CC_REGNUM)
2537 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2538 (match_operand:SI 1 "s_register_operand" "r"))
2540 (set (match_operand:SI 0 "s_register_operand" "=r")
2541 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2543 "bic%.\\t%0, %1, %2"
2544 [(set_attr "conds" "set")]
2547 (define_insn "*andsi_notsi_si_compare0_scratch"
2548 [(set (reg:CC_NOOV CC_REGNUM)
2550 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2551 (match_operand:SI 1 "s_register_operand" "r"))
2553 (clobber (match_scratch:SI 0 "=r"))]
2555 "bic%.\\t%0, %1, %2"
2556 [(set_attr "conds" "set")]
2559 (define_insn "iordi3"
2560 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2561 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2562 (match_operand:DI 2 "s_register_operand" "r,r")))]
2563 "TARGET_32BIT && ! TARGET_IWMMXT"
2565 [(set_attr "length" "8")
2566 (set_attr "predicable" "yes")]
2569 (define_insn "*iordi_zesidi_di"
2570 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2571 (ior:DI (zero_extend:DI
2572 (match_operand:SI 2 "s_register_operand" "r,r"))
2573 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2576 orr%?\\t%Q0, %Q1, %2
2578 [(set_attr "length" "4,8")
2579 (set_attr "predicable" "yes")]
2582 (define_insn "*iordi_sesidi_di"
2583 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2584 (ior:DI (sign_extend:DI
2585 (match_operand:SI 2 "s_register_operand" "r,r"))
2586 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2589 [(set_attr "length" "8")
2590 (set_attr "predicable" "yes")]
2593 (define_expand "iorsi3"
2594 [(set (match_operand:SI 0 "s_register_operand" "")
2595 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2596 (match_operand:SI 2 "reg_or_int_operand" "")))]
2599 if (GET_CODE (operands[2]) == CONST_INT)
2603 arm_split_constant (IOR, SImode, NULL_RTX,
2604 INTVAL (operands[2]), operands[0], operands[1],
2605 optimize && can_create_pseudo_p ());
2608 else /* TARGET_THUMB1 */
2609 operands [2] = force_reg (SImode, operands [2]);
2614 (define_insn_and_split "*arm_iorsi3"
2615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2616 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2617 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2623 && GET_CODE (operands[2]) == CONST_INT
2624 && !const_ok_for_arm (INTVAL (operands[2]))"
2625 [(clobber (const_int 0))]
2627 arm_split_constant (IOR, SImode, curr_insn,
2628 INTVAL (operands[2]), operands[0], operands[1], 0);
2631 [(set_attr "length" "4,16")
2632 (set_attr "predicable" "yes")]
2635 (define_insn "*thumb1_iorsi3"
2636 [(set (match_operand:SI 0 "register_operand" "=l")
2637 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2638 (match_operand:SI 2 "register_operand" "l")))]
2641 [(set_attr "length" "2")]
2645 [(match_scratch:SI 3 "r")
2646 (set (match_operand:SI 0 "arm_general_register_operand" "")
2647 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2648 (match_operand:SI 2 "const_int_operand" "")))]
2650 && !const_ok_for_arm (INTVAL (operands[2]))
2651 && const_ok_for_arm (~INTVAL (operands[2]))"
2652 [(set (match_dup 3) (match_dup 2))
2653 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2657 (define_insn "*iorsi3_compare0"
2658 [(set (reg:CC_NOOV CC_REGNUM)
2659 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2660 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2662 (set (match_operand:SI 0 "s_register_operand" "=r")
2663 (ior:SI (match_dup 1) (match_dup 2)))]
2665 "orr%.\\t%0, %1, %2"
2666 [(set_attr "conds" "set")]
2669 (define_insn "*iorsi3_compare0_scratch"
2670 [(set (reg:CC_NOOV CC_REGNUM)
2671 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2672 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2674 (clobber (match_scratch:SI 0 "=r"))]
2676 "orr%.\\t%0, %1, %2"
2677 [(set_attr "conds" "set")]
2680 (define_insn "xordi3"
2681 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2682 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2683 (match_operand:DI 2 "s_register_operand" "r,r")))]
2684 "TARGET_32BIT && !TARGET_IWMMXT"
2686 [(set_attr "length" "8")
2687 (set_attr "predicable" "yes")]
2690 (define_insn "*xordi_zesidi_di"
2691 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2692 (xor:DI (zero_extend:DI
2693 (match_operand:SI 2 "s_register_operand" "r,r"))
2694 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2697 eor%?\\t%Q0, %Q1, %2
2699 [(set_attr "length" "4,8")
2700 (set_attr "predicable" "yes")]
2703 (define_insn "*xordi_sesidi_di"
2704 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2705 (xor:DI (sign_extend:DI
2706 (match_operand:SI 2 "s_register_operand" "r,r"))
2707 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2710 [(set_attr "length" "8")
2711 (set_attr "predicable" "yes")]
2714 (define_expand "xorsi3"
2715 [(set (match_operand:SI 0 "s_register_operand" "")
2716 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2717 (match_operand:SI 2 "arm_rhs_operand" "")))]
2720 if (GET_CODE (operands[2]) == CONST_INT)
2721 operands[2] = force_reg (SImode, operands[2]);
2725 (define_insn "*arm_xorsi3"
2726 [(set (match_operand:SI 0 "s_register_operand" "=r")
2727 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2728 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2730 "eor%?\\t%0, %1, %2"
2731 [(set_attr "predicable" "yes")]
2734 (define_insn "*thumb1_xorsi3"
2735 [(set (match_operand:SI 0 "register_operand" "=l")
2736 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2737 (match_operand:SI 2 "register_operand" "l")))]
2740 [(set_attr "length" "2")]
2743 (define_insn "*xorsi3_compare0"
2744 [(set (reg:CC_NOOV CC_REGNUM)
2745 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2746 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2748 (set (match_operand:SI 0 "s_register_operand" "=r")
2749 (xor:SI (match_dup 1) (match_dup 2)))]
2751 "eor%.\\t%0, %1, %2"
2752 [(set_attr "conds" "set")]
2755 (define_insn "*xorsi3_compare0_scratch"
2756 [(set (reg:CC_NOOV CC_REGNUM)
2757 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2758 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2762 [(set_attr "conds" "set")]
2765 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2766 ; (NOT D) we can sometimes merge the final NOT into one of the following
2770 [(set (match_operand:SI 0 "s_register_operand" "")
2771 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2772 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2773 (match_operand:SI 3 "arm_rhs_operand" "")))
2774 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2776 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2777 (not:SI (match_dup 3))))
2778 (set (match_dup 0) (not:SI (match_dup 4)))]
2782 (define_insn "*andsi_iorsi3_notsi"
2783 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2784 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2785 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2786 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2788 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2789 [(set_attr "length" "8")
2790 (set_attr "ce_count" "2")
2791 (set_attr "predicable" "yes")]
2794 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2795 ; insns are available?
2797 [(set (match_operand:SI 0 "s_register_operand" "")
2798 (match_operator:SI 1 "logical_binary_operator"
2799 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2800 (match_operand:SI 3 "const_int_operand" "")
2801 (match_operand:SI 4 "const_int_operand" ""))
2802 (match_operator:SI 9 "logical_binary_operator"
2803 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2804 (match_operand:SI 6 "const_int_operand" ""))
2805 (match_operand:SI 7 "s_register_operand" "")])]))
2806 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2808 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2809 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2812 [(ashift:SI (match_dup 2) (match_dup 4))
2816 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2819 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2823 [(set (match_operand:SI 0 "s_register_operand" "")
2824 (match_operator:SI 1 "logical_binary_operator"
2825 [(match_operator:SI 9 "logical_binary_operator"
2826 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2827 (match_operand:SI 6 "const_int_operand" ""))
2828 (match_operand:SI 7 "s_register_operand" "")])
2829 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2830 (match_operand:SI 3 "const_int_operand" "")
2831 (match_operand:SI 4 "const_int_operand" ""))]))
2832 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2834 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2835 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2838 [(ashift:SI (match_dup 2) (match_dup 4))
2842 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2845 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2849 [(set (match_operand:SI 0 "s_register_operand" "")
2850 (match_operator:SI 1 "logical_binary_operator"
2851 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2852 (match_operand:SI 3 "const_int_operand" "")
2853 (match_operand:SI 4 "const_int_operand" ""))
2854 (match_operator:SI 9 "logical_binary_operator"
2855 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2856 (match_operand:SI 6 "const_int_operand" ""))
2857 (match_operand:SI 7 "s_register_operand" "")])]))
2858 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2860 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2861 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2864 [(ashift:SI (match_dup 2) (match_dup 4))
2868 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2871 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2875 [(set (match_operand:SI 0 "s_register_operand" "")
2876 (match_operator:SI 1 "logical_binary_operator"
2877 [(match_operator:SI 9 "logical_binary_operator"
2878 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2879 (match_operand:SI 6 "const_int_operand" ""))
2880 (match_operand:SI 7 "s_register_operand" "")])
2881 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2882 (match_operand:SI 3 "const_int_operand" "")
2883 (match_operand:SI 4 "const_int_operand" ""))]))
2884 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2886 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2887 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2890 [(ashift:SI (match_dup 2) (match_dup 4))
2894 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2897 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2901 ;; Minimum and maximum insns
2903 (define_expand "smaxsi3"
2905 (set (match_operand:SI 0 "s_register_operand" "")
2906 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2907 (match_operand:SI 2 "arm_rhs_operand" "")))
2908 (clobber (reg:CC CC_REGNUM))])]
2911 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2913 /* No need for a clobber of the condition code register here. */
2914 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2915 gen_rtx_SMAX (SImode, operands[1],
2921 (define_insn "*smax_0"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r")
2923 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2926 "bic%?\\t%0, %1, %1, asr #31"
2927 [(set_attr "predicable" "yes")]
2930 (define_insn "*smax_m1"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r")
2932 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2935 "orr%?\\t%0, %1, %1, asr #31"
2936 [(set_attr "predicable" "yes")]
2939 (define_insn "*arm_smax_insn"
2940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2941 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2942 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2943 (clobber (reg:CC CC_REGNUM))]
2946 cmp\\t%1, %2\;movlt\\t%0, %2
2947 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2948 [(set_attr "conds" "clob")
2949 (set_attr "length" "8,12")]
2952 (define_expand "sminsi3"
2954 (set (match_operand:SI 0 "s_register_operand" "")
2955 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2956 (match_operand:SI 2 "arm_rhs_operand" "")))
2957 (clobber (reg:CC CC_REGNUM))])]
2960 if (operands[2] == const0_rtx)
2962 /* No need for a clobber of the condition code register here. */
2963 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2964 gen_rtx_SMIN (SImode, operands[1],
2970 (define_insn "*smin_0"
2971 [(set (match_operand:SI 0 "s_register_operand" "=r")
2972 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2975 "and%?\\t%0, %1, %1, asr #31"
2976 [(set_attr "predicable" "yes")]
2979 (define_insn "*arm_smin_insn"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2981 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2982 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2983 (clobber (reg:CC CC_REGNUM))]
2986 cmp\\t%1, %2\;movge\\t%0, %2
2987 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2988 [(set_attr "conds" "clob")
2989 (set_attr "length" "8,12")]
2992 (define_expand "umaxsi3"
2994 (set (match_operand:SI 0 "s_register_operand" "")
2995 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2996 (match_operand:SI 2 "arm_rhs_operand" "")))
2997 (clobber (reg:CC CC_REGNUM))])]
3002 (define_insn "*arm_umaxsi3"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3004 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3005 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3006 (clobber (reg:CC CC_REGNUM))]
3009 cmp\\t%1, %2\;movcc\\t%0, %2
3010 cmp\\t%1, %2\;movcs\\t%0, %1
3011 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3012 [(set_attr "conds" "clob")
3013 (set_attr "length" "8,8,12")]
3016 (define_expand "uminsi3"
3018 (set (match_operand:SI 0 "s_register_operand" "")
3019 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3020 (match_operand:SI 2 "arm_rhs_operand" "")))
3021 (clobber (reg:CC CC_REGNUM))])]
3026 (define_insn "*arm_uminsi3"
3027 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3028 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3029 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3030 (clobber (reg:CC CC_REGNUM))]
3033 cmp\\t%1, %2\;movcs\\t%0, %2
3034 cmp\\t%1, %2\;movcc\\t%0, %1
3035 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3036 [(set_attr "conds" "clob")
3037 (set_attr "length" "8,8,12")]
3040 (define_insn "*store_minmaxsi"
3041 [(set (match_operand:SI 0 "memory_operand" "=m")
3042 (match_operator:SI 3 "minmax_operator"
3043 [(match_operand:SI 1 "s_register_operand" "r")
3044 (match_operand:SI 2 "s_register_operand" "r")]))
3045 (clobber (reg:CC CC_REGNUM))]
3048 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3049 operands[1], operands[2]);
3050 output_asm_insn (\"cmp\\t%1, %2\", operands);
3052 output_asm_insn (\"ite\t%d3\", operands);
3053 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3054 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3057 [(set_attr "conds" "clob")
3058 (set (attr "length")
3059 (if_then_else (eq_attr "is_thumb" "yes")
3062 (set_attr "type" "store1")]
3065 ; Reject the frame pointer in operand[1], since reloading this after
3066 ; it has been eliminated can cause carnage.
3067 (define_insn "*minmax_arithsi"
3068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3069 (match_operator:SI 4 "shiftable_operator"
3070 [(match_operator:SI 5 "minmax_operator"
3071 [(match_operand:SI 2 "s_register_operand" "r,r")
3072 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3073 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3074 (clobber (reg:CC CC_REGNUM))]
3075 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3078 enum rtx_code code = GET_CODE (operands[4]);
3081 if (which_alternative != 0 || operands[3] != const0_rtx
3082 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3087 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3088 operands[2], operands[3]);
3089 output_asm_insn (\"cmp\\t%2, %3\", operands);
3093 output_asm_insn (\"ite\\t%d5\", operands);
3095 output_asm_insn (\"it\\t%d5\", operands);
3097 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3099 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3102 [(set_attr "conds" "clob")
3103 (set (attr "length")
3104 (if_then_else (eq_attr "is_thumb" "yes")
3110 ;; Shift and rotation insns
3112 (define_expand "ashldi3"
3113 [(set (match_operand:DI 0 "s_register_operand" "")
3114 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3115 (match_operand:SI 2 "reg_or_int_operand" "")))]
3118 if (GET_CODE (operands[2]) == CONST_INT)
3120 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3122 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3125 /* Ideally we shouldn't fail here if we could know that operands[1]
3126 ends up already living in an iwmmxt register. Otherwise it's
3127 cheaper to have the alternate code being generated than moving
3128 values to iwmmxt regs and back. */
3131 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3136 (define_insn "arm_ashldi3_1bit"
3137 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3138 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3140 (clobber (reg:CC CC_REGNUM))]
3142 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3143 [(set_attr "conds" "clob")
3144 (set_attr "length" "8")]
3147 (define_expand "ashlsi3"
3148 [(set (match_operand:SI 0 "s_register_operand" "")
3149 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3150 (match_operand:SI 2 "arm_rhs_operand" "")))]
3153 if (GET_CODE (operands[2]) == CONST_INT
3154 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3156 emit_insn (gen_movsi (operands[0], const0_rtx));
3162 (define_insn "*thumb1_ashlsi3"
3163 [(set (match_operand:SI 0 "register_operand" "=l,l")
3164 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3165 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3168 [(set_attr "length" "2")]
3171 (define_expand "ashrdi3"
3172 [(set (match_operand:DI 0 "s_register_operand" "")
3173 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3174 (match_operand:SI 2 "reg_or_int_operand" "")))]
3177 if (GET_CODE (operands[2]) == CONST_INT)
3179 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3181 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3184 /* Ideally we shouldn't fail here if we could know that operands[1]
3185 ends up already living in an iwmmxt register. Otherwise it's
3186 cheaper to have the alternate code being generated than moving
3187 values to iwmmxt regs and back. */
3190 else if (!TARGET_REALLY_IWMMXT)
3195 (define_insn "arm_ashrdi3_1bit"
3196 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3197 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3199 (clobber (reg:CC CC_REGNUM))]
3201 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3202 [(set_attr "conds" "clob")
3203 (set_attr "length" "8")]
3206 (define_expand "ashrsi3"
3207 [(set (match_operand:SI 0 "s_register_operand" "")
3208 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3209 (match_operand:SI 2 "arm_rhs_operand" "")))]
3212 if (GET_CODE (operands[2]) == CONST_INT
3213 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3214 operands[2] = GEN_INT (31);
3218 (define_insn "*thumb1_ashrsi3"
3219 [(set (match_operand:SI 0 "register_operand" "=l,l")
3220 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3221 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3224 [(set_attr "length" "2")]
3227 (define_expand "lshrdi3"
3228 [(set (match_operand:DI 0 "s_register_operand" "")
3229 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3230 (match_operand:SI 2 "reg_or_int_operand" "")))]
3233 if (GET_CODE (operands[2]) == CONST_INT)
3235 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3237 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3240 /* Ideally we shouldn't fail here if we could know that operands[1]
3241 ends up already living in an iwmmxt register. Otherwise it's
3242 cheaper to have the alternate code being generated than moving
3243 values to iwmmxt regs and back. */
3246 else if (!TARGET_REALLY_IWMMXT)
3251 (define_insn "arm_lshrdi3_1bit"
3252 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3253 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3255 (clobber (reg:CC CC_REGNUM))]
3257 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3258 [(set_attr "conds" "clob")
3259 (set_attr "length" "8")]
3262 (define_expand "lshrsi3"
3263 [(set (match_operand:SI 0 "s_register_operand" "")
3264 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3265 (match_operand:SI 2 "arm_rhs_operand" "")))]
3268 if (GET_CODE (operands[2]) == CONST_INT
3269 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3271 emit_insn (gen_movsi (operands[0], const0_rtx));
3277 (define_insn "*thumb1_lshrsi3"
3278 [(set (match_operand:SI 0 "register_operand" "=l,l")
3279 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3280 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3283 [(set_attr "length" "2")]
3286 (define_expand "rotlsi3"
3287 [(set (match_operand:SI 0 "s_register_operand" "")
3288 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3289 (match_operand:SI 2 "reg_or_int_operand" "")))]
3292 if (GET_CODE (operands[2]) == CONST_INT)
3293 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3296 rtx reg = gen_reg_rtx (SImode);
3297 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3303 (define_expand "rotrsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "")
3305 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "arm_rhs_operand" "")))]
3311 if (GET_CODE (operands[2]) == CONST_INT
3312 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3313 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3315 else /* TARGET_THUMB1 */
3317 if (GET_CODE (operands [2]) == CONST_INT)
3318 operands [2] = force_reg (SImode, operands[2]);
3323 (define_insn "*thumb1_rotrsi3"
3324 [(set (match_operand:SI 0 "register_operand" "=l")
3325 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3326 (match_operand:SI 2 "register_operand" "l")))]
3329 [(set_attr "length" "2")]
3332 (define_insn "*arm_shiftsi3"
3333 [(set (match_operand:SI 0 "s_register_operand" "=r")
3334 (match_operator:SI 3 "shift_operator"
3335 [(match_operand:SI 1 "s_register_operand" "r")
3336 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3338 "* return arm_output_shift(operands, 0);"
3339 [(set_attr "predicable" "yes")
3340 (set_attr "shift" "1")
3341 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3342 (const_string "alu_shift")
3343 (const_string "alu_shift_reg")))]
3346 (define_insn "*shiftsi3_compare0"
3347 [(set (reg:CC_NOOV CC_REGNUM)
3348 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3349 [(match_operand:SI 1 "s_register_operand" "r")
3350 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3352 (set (match_operand:SI 0 "s_register_operand" "=r")
3353 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3355 "* return arm_output_shift(operands, 1);"
3356 [(set_attr "conds" "set")
3357 (set_attr "shift" "1")
3358 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3359 (const_string "alu_shift")
3360 (const_string "alu_shift_reg")))]
3363 (define_insn "*shiftsi3_compare0_scratch"
3364 [(set (reg:CC_NOOV CC_REGNUM)
3365 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3366 [(match_operand:SI 1 "s_register_operand" "r")
3367 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3369 (clobber (match_scratch:SI 0 "=r"))]
3371 "* return arm_output_shift(operands, 1);"
3372 [(set_attr "conds" "set")
3373 (set_attr "shift" "1")]
3376 (define_insn "*arm_notsi_shiftsi"
3377 [(set (match_operand:SI 0 "s_register_operand" "=r")
3378 (not:SI (match_operator:SI 3 "shift_operator"
3379 [(match_operand:SI 1 "s_register_operand" "r")
3380 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3383 [(set_attr "predicable" "yes")
3384 (set_attr "shift" "1")
3385 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3386 (const_string "alu_shift")
3387 (const_string "alu_shift_reg")))]
3390 (define_insn "*arm_notsi_shiftsi_compare0"
3391 [(set (reg:CC_NOOV CC_REGNUM)
3392 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3393 [(match_operand:SI 1 "s_register_operand" "r")
3394 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3396 (set (match_operand:SI 0 "s_register_operand" "=r")
3397 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3400 [(set_attr "conds" "set")
3401 (set_attr "shift" "1")
3402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3403 (const_string "alu_shift")
3404 (const_string "alu_shift_reg")))]
3407 (define_insn "*arm_not_shiftsi_compare0_scratch"
3408 [(set (reg:CC_NOOV CC_REGNUM)
3409 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3410 [(match_operand:SI 1 "s_register_operand" "r")
3411 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3413 (clobber (match_scratch:SI 0 "=r"))]
3416 [(set_attr "conds" "set")
3417 (set_attr "shift" "1")
3418 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3419 (const_string "alu_shift")
3420 (const_string "alu_shift_reg")))]
3423 ;; We don't really have extzv, but defining this using shifts helps
3424 ;; to reduce register pressure later on.
3426 (define_expand "extzv"
3428 (ashift:SI (match_operand:SI 1 "register_operand" "")
3429 (match_operand:SI 2 "const_int_operand" "")))
3430 (set (match_operand:SI 0 "register_operand" "")
3431 (lshiftrt:SI (match_dup 4)
3432 (match_operand:SI 3 "const_int_operand" "")))]
3433 "TARGET_THUMB1 || arm_arch_thumb2"
3436 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3437 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3439 if (arm_arch_thumb2)
3441 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3446 operands[3] = GEN_INT (rshift);
3450 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3454 operands[2] = GEN_INT (lshift);
3455 operands[4] = gen_reg_rtx (SImode);
3460 [(set (match_operand:SI 0 "s_register_operand" "=r")
3461 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3462 (match_operand:SI 2 "const_int_operand" "M")
3463 (match_operand:SI 3 "const_int_operand" "M")))]
3465 "sbfx%?\t%0, %1, %3, %2"
3466 [(set_attr "length" "4")
3467 (set_attr "predicable" "yes")]
3470 (define_insn "extzv_t2"
3471 [(set (match_operand:SI 0 "s_register_operand" "=r")
3472 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3473 (match_operand:SI 2 "const_int_operand" "M")
3474 (match_operand:SI 3 "const_int_operand" "M")))]
3476 "ubfx%?\t%0, %1, %3, %2"
3477 [(set_attr "length" "4")
3478 (set_attr "predicable" "yes")]
3482 ;; Unary arithmetic insns
3484 (define_expand "negdi2"
3486 [(set (match_operand:DI 0 "s_register_operand" "")
3487 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3488 (clobber (reg:CC CC_REGNUM))])]
3493 if (GET_CODE (operands[1]) != REG)
3494 operands[1] = force_reg (SImode, operands[1]);
3499 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3500 ;; The second alternative is to allow the common case of a *full* overlap.
3501 (define_insn "*arm_negdi2"
3502 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3503 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3504 (clobber (reg:CC CC_REGNUM))]
3506 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3507 [(set_attr "conds" "clob")
3508 (set_attr "length" "8")]
3511 (define_insn "*thumb1_negdi2"
3512 [(set (match_operand:DI 0 "register_operand" "=&l")
3513 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3514 (clobber (reg:CC CC_REGNUM))]
3516 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3517 [(set_attr "length" "6")]
3520 (define_expand "negsi2"
3521 [(set (match_operand:SI 0 "s_register_operand" "")
3522 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3527 (define_insn "*arm_negsi2"
3528 [(set (match_operand:SI 0 "s_register_operand" "=r")
3529 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3531 "rsb%?\\t%0, %1, #0"
3532 [(set_attr "predicable" "yes")]
3535 (define_insn "*thumb1_negsi2"
3536 [(set (match_operand:SI 0 "register_operand" "=l")
3537 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3540 [(set_attr "length" "2")]
3543 (define_expand "negsf2"
3544 [(set (match_operand:SF 0 "s_register_operand" "")
3545 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3546 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3550 (define_expand "negdf2"
3551 [(set (match_operand:DF 0 "s_register_operand" "")
3552 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3553 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3556 ;; abssi2 doesn't really clobber the condition codes if a different register
3557 ;; is being set. To keep things simple, assume during rtl manipulations that
3558 ;; it does, but tell the final scan operator the truth. Similarly for
3561 (define_expand "abssi2"
3563 [(set (match_operand:SI 0 "s_register_operand" "")
3564 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3565 (clobber (match_dup 2))])]
3569 operands[2] = gen_rtx_SCRATCH (SImode);
3571 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3574 (define_insn "*arm_abssi2"
3575 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3576 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3577 (clobber (reg:CC CC_REGNUM))]
3580 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3581 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3582 [(set_attr "conds" "clob,*")
3583 (set_attr "shift" "1")
3584 ;; predicable can't be set based on the variant, so left as no
3585 (set_attr "length" "8")]
3588 (define_insn_and_split "*thumb1_abssi2"
3589 [(set (match_operand:SI 0 "s_register_operand" "=l")
3590 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3591 (clobber (match_scratch:SI 2 "=&l"))]
3594 "TARGET_THUMB1 && reload_completed"
3595 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3596 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3597 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3599 [(set_attr "length" "6")]
3602 (define_insn "*arm_neg_abssi2"
3603 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3604 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3605 (clobber (reg:CC CC_REGNUM))]
3608 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3609 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3610 [(set_attr "conds" "clob,*")
3611 (set_attr "shift" "1")
3612 ;; predicable can't be set based on the variant, so left as no
3613 (set_attr "length" "8")]
3616 (define_insn_and_split "*thumb1_neg_abssi2"
3617 [(set (match_operand:SI 0 "s_register_operand" "=l")
3618 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3619 (clobber (match_scratch:SI 2 "=&l"))]
3622 "TARGET_THUMB1 && reload_completed"
3623 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3624 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3625 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3627 [(set_attr "length" "6")]
3630 (define_expand "abssf2"
3631 [(set (match_operand:SF 0 "s_register_operand" "")
3632 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3633 "TARGET_32BIT && TARGET_HARD_FLOAT"
3636 (define_expand "absdf2"
3637 [(set (match_operand:DF 0 "s_register_operand" "")
3638 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3639 "TARGET_32BIT && TARGET_HARD_FLOAT"
3642 (define_expand "sqrtsf2"
3643 [(set (match_operand:SF 0 "s_register_operand" "")
3644 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3645 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3648 (define_expand "sqrtdf2"
3649 [(set (match_operand:DF 0 "s_register_operand" "")
3650 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3651 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3654 (define_insn_and_split "one_cmpldi2"
3655 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3656 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3659 "TARGET_32BIT && reload_completed"
3660 [(set (match_dup 0) (not:SI (match_dup 1)))
3661 (set (match_dup 2) (not:SI (match_dup 3)))]
3664 operands[2] = gen_highpart (SImode, operands[0]);
3665 operands[0] = gen_lowpart (SImode, operands[0]);
3666 operands[3] = gen_highpart (SImode, operands[1]);
3667 operands[1] = gen_lowpart (SImode, operands[1]);
3669 [(set_attr "length" "8")
3670 (set_attr "predicable" "yes")]
3673 (define_expand "one_cmplsi2"
3674 [(set (match_operand:SI 0 "s_register_operand" "")
3675 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3680 (define_insn "*arm_one_cmplsi2"
3681 [(set (match_operand:SI 0 "s_register_operand" "=r")
3682 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3685 [(set_attr "predicable" "yes")]
3688 (define_insn "*thumb1_one_cmplsi2"
3689 [(set (match_operand:SI 0 "register_operand" "=l")
3690 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3693 [(set_attr "length" "2")]
3696 (define_insn "*notsi_compare0"
3697 [(set (reg:CC_NOOV CC_REGNUM)
3698 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3700 (set (match_operand:SI 0 "s_register_operand" "=r")
3701 (not:SI (match_dup 1)))]
3704 [(set_attr "conds" "set")]
3707 (define_insn "*notsi_compare0_scratch"
3708 [(set (reg:CC_NOOV CC_REGNUM)
3709 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3711 (clobber (match_scratch:SI 0 "=r"))]
3714 [(set_attr "conds" "set")]
3717 ;; Fixed <--> Floating conversion insns
3719 (define_expand "floatsisf2"
3720 [(set (match_operand:SF 0 "s_register_operand" "")
3721 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3722 "TARGET_32BIT && TARGET_HARD_FLOAT"
3724 if (TARGET_MAVERICK)
3726 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3731 (define_expand "floatsidf2"
3732 [(set (match_operand:DF 0 "s_register_operand" "")
3733 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3734 "TARGET_32BIT && TARGET_HARD_FLOAT"
3736 if (TARGET_MAVERICK)
3738 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3743 (define_expand "fix_truncsfsi2"
3744 [(set (match_operand:SI 0 "s_register_operand" "")
3745 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3746 "TARGET_32BIT && TARGET_HARD_FLOAT"
3748 if (TARGET_MAVERICK)
3750 if (!cirrus_fp_register (operands[0], SImode))
3751 operands[0] = force_reg (SImode, operands[0]);
3752 if (!cirrus_fp_register (operands[1], SFmode))
3753 operands[1] = force_reg (SFmode, operands[0]);
3754 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3759 (define_expand "fix_truncdfsi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "")
3761 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3762 "TARGET_32BIT && TARGET_HARD_FLOAT"
3764 if (TARGET_MAVERICK)
3766 if (!cirrus_fp_register (operands[1], DFmode))
3767 operands[1] = force_reg (DFmode, operands[0]);
3768 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3775 (define_expand "truncdfsf2"
3776 [(set (match_operand:SF 0 "s_register_operand" "")
3778 (match_operand:DF 1 "s_register_operand" "")))]
3779 "TARGET_32BIT && TARGET_HARD_FLOAT"
3783 ;; Zero and sign extension instructions.
3785 (define_expand "zero_extendsidi2"
3786 [(set (match_operand:DI 0 "s_register_operand" "")
3787 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3792 (define_insn "*arm_zero_extendsidi2"
3793 [(set (match_operand:DI 0 "s_register_operand" "=r")
3794 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3797 if (REGNO (operands[1])
3798 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3799 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3800 return \"mov%?\\t%R0, #0\";
3802 [(set_attr "length" "8")
3803 (set_attr "predicable" "yes")]
3806 (define_expand "zero_extendqidi2"
3807 [(set (match_operand:DI 0 "s_register_operand" "")
3808 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3813 (define_insn "*arm_zero_extendqidi2"
3814 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3815 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3818 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3819 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3820 [(set_attr "length" "8")
3821 (set_attr "predicable" "yes")
3822 (set_attr "type" "*,load_byte")
3823 (set_attr "pool_range" "*,4092")
3824 (set_attr "neg_pool_range" "*,4084")]
3827 (define_expand "extendsidi2"
3828 [(set (match_operand:DI 0 "s_register_operand" "")
3829 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3834 (define_insn "*arm_extendsidi2"
3835 [(set (match_operand:DI 0 "s_register_operand" "=r")
3836 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3839 if (REGNO (operands[1])
3840 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3841 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3842 return \"mov%?\\t%R0, %Q0, asr #31\";
3844 [(set_attr "length" "8")
3845 (set_attr "shift" "1")
3846 (set_attr "predicable" "yes")]
3849 (define_expand "zero_extendhisi2"
3851 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3853 (set (match_operand:SI 0 "s_register_operand" "")
3854 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3858 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3860 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3861 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3865 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3867 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3871 if (!s_register_operand (operands[1], HImode))
3872 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3876 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3877 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3881 operands[1] = gen_lowpart (SImode, operands[1]);
3882 operands[2] = gen_reg_rtx (SImode);
3886 (define_insn "*thumb1_zero_extendhisi2"
3887 [(set (match_operand:SI 0 "register_operand" "=l")
3888 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3889 "TARGET_THUMB1 && !arm_arch6"
3891 rtx mem = XEXP (operands[1], 0);
3893 if (GET_CODE (mem) == CONST)
3894 mem = XEXP (mem, 0);
3896 if (GET_CODE (mem) == LABEL_REF)
3897 return \"ldr\\t%0, %1\";
3899 if (GET_CODE (mem) == PLUS)
3901 rtx a = XEXP (mem, 0);
3902 rtx b = XEXP (mem, 1);
3904 /* This can happen due to bugs in reload. */
3905 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3908 ops[0] = operands[0];
3911 output_asm_insn (\"mov %0, %1\", ops);
3913 XEXP (mem, 0) = operands[0];
3916 else if ( GET_CODE (a) == LABEL_REF
3917 && GET_CODE (b) == CONST_INT)
3918 return \"ldr\\t%0, %1\";
3921 return \"ldrh\\t%0, %1\";
3923 [(set_attr "length" "4")
3924 (set_attr "type" "load_byte")
3925 (set_attr "pool_range" "60")]
3928 (define_insn "*thumb1_zero_extendhisi2_v6"
3929 [(set (match_operand:SI 0 "register_operand" "=l,l")
3930 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3931 "TARGET_THUMB1 && arm_arch6"
3935 if (which_alternative == 0)
3936 return \"uxth\\t%0, %1\";
3938 mem = XEXP (operands[1], 0);
3940 if (GET_CODE (mem) == CONST)
3941 mem = XEXP (mem, 0);
3943 if (GET_CODE (mem) == LABEL_REF)
3944 return \"ldr\\t%0, %1\";
3946 if (GET_CODE (mem) == PLUS)
3948 rtx a = XEXP (mem, 0);
3949 rtx b = XEXP (mem, 1);
3951 /* This can happen due to bugs in reload. */
3952 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3955 ops[0] = operands[0];
3958 output_asm_insn (\"mov %0, %1\", ops);
3960 XEXP (mem, 0) = operands[0];
3963 else if ( GET_CODE (a) == LABEL_REF
3964 && GET_CODE (b) == CONST_INT)
3965 return \"ldr\\t%0, %1\";
3968 return \"ldrh\\t%0, %1\";
3970 [(set_attr "length" "2,4")
3971 (set_attr "type" "alu_shift,load_byte")
3972 (set_attr "pool_range" "*,60")]
3975 (define_insn "*arm_zero_extendhisi2"
3976 [(set (match_operand:SI 0 "s_register_operand" "=r")
3977 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3978 "TARGET_ARM && arm_arch4 && !arm_arch6"
3980 [(set_attr "type" "load_byte")
3981 (set_attr "predicable" "yes")
3982 (set_attr "pool_range" "256")
3983 (set_attr "neg_pool_range" "244")]
3986 (define_insn "*arm_zero_extendhisi2_v6"
3987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3988 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3989 "TARGET_ARM && arm_arch6"
3993 [(set_attr "type" "alu_shift,load_byte")
3994 (set_attr "predicable" "yes")
3995 (set_attr "pool_range" "*,256")
3996 (set_attr "neg_pool_range" "*,244")]
3999 (define_insn "*arm_zero_extendhisi2addsi"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4002 (match_operand:SI 2 "s_register_operand" "r")))]
4004 "uxtah%?\\t%0, %2, %1"
4005 [(set_attr "type" "alu_shift")
4006 (set_attr "predicable" "yes")]
4009 (define_expand "zero_extendqisi2"
4010 [(set (match_operand:SI 0 "s_register_operand" "")
4011 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4014 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4018 emit_insn (gen_andsi3 (operands[0],
4019 gen_lowpart (SImode, operands[1]),
4022 else /* TARGET_THUMB */
4024 rtx temp = gen_reg_rtx (SImode);
4027 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4028 operands[1] = gen_lowpart (SImode, operands[1]);
4031 ops[1] = operands[1];
4032 ops[2] = GEN_INT (24);
4034 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4035 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4037 ops[0] = operands[0];
4039 ops[2] = GEN_INT (24);
4041 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4042 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4049 (define_insn "*thumb1_zero_extendqisi2"
4050 [(set (match_operand:SI 0 "register_operand" "=l")
4051 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4052 "TARGET_THUMB1 && !arm_arch6"
4054 [(set_attr "length" "2")
4055 (set_attr "type" "load_byte")
4056 (set_attr "pool_range" "32")]
4059 (define_insn "*thumb1_zero_extendqisi2_v6"
4060 [(set (match_operand:SI 0 "register_operand" "=l,l")
4061 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4062 "TARGET_THUMB1 && arm_arch6"
4066 [(set_attr "length" "2,2")
4067 (set_attr "type" "alu_shift,load_byte")
4068 (set_attr "pool_range" "*,32")]
4071 (define_insn "*arm_zero_extendqisi2"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r")
4073 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4074 "TARGET_ARM && !arm_arch6"
4075 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4076 [(set_attr "type" "load_byte")
4077 (set_attr "predicable" "yes")
4078 (set_attr "pool_range" "4096")
4079 (set_attr "neg_pool_range" "4084")]
4082 (define_insn "*arm_zero_extendqisi2_v6"
4083 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4084 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4085 "TARGET_ARM && arm_arch6"
4088 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4089 [(set_attr "type" "alu_shift,load_byte")
4090 (set_attr "predicable" "yes")
4091 (set_attr "pool_range" "*,4096")
4092 (set_attr "neg_pool_range" "*,4084")]
4095 (define_insn "*arm_zero_extendqisi2addsi"
4096 [(set (match_operand:SI 0 "s_register_operand" "=r")
4097 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4098 (match_operand:SI 2 "s_register_operand" "r")))]
4100 "uxtab%?\\t%0, %2, %1"
4101 [(set_attr "predicable" "yes")
4102 (set_attr "insn" "xtab")
4103 (set_attr "type" "alu_shift")]
4107 [(set (match_operand:SI 0 "s_register_operand" "")
4108 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4109 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4110 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4111 [(set (match_dup 2) (match_dup 1))
4112 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4117 [(set (match_operand:SI 0 "s_register_operand" "")
4118 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4119 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4120 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4121 [(set (match_dup 2) (match_dup 1))
4122 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4126 (define_insn "*compareqi_eq0"
4127 [(set (reg:CC_Z CC_REGNUM)
4128 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4132 [(set_attr "conds" "set")]
4135 (define_expand "extendhisi2"
4137 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4139 (set (match_operand:SI 0 "s_register_operand" "")
4140 (ashiftrt:SI (match_dup 2)
4145 if (GET_CODE (operands[1]) == MEM)
4149 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4154 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4155 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4160 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4162 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4166 if (!s_register_operand (operands[1], HImode))
4167 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4172 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4174 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4175 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4180 operands[1] = gen_lowpart (SImode, operands[1]);
4181 operands[2] = gen_reg_rtx (SImode);
4185 (define_insn "thumb1_extendhisi2"
4186 [(set (match_operand:SI 0 "register_operand" "=l")
4187 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4188 (clobber (match_scratch:SI 2 "=&l"))]
4189 "TARGET_THUMB1 && !arm_arch6"
4193 rtx mem = XEXP (operands[1], 0);
4195 /* This code used to try to use 'V', and fix the address only if it was
4196 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4197 range of QImode offsets, and offsettable_address_p does a QImode
4200 if (GET_CODE (mem) == CONST)
4201 mem = XEXP (mem, 0);
4203 if (GET_CODE (mem) == LABEL_REF)
4204 return \"ldr\\t%0, %1\";
4206 if (GET_CODE (mem) == PLUS)
4208 rtx a = XEXP (mem, 0);
4209 rtx b = XEXP (mem, 1);
4211 if (GET_CODE (a) == LABEL_REF
4212 && GET_CODE (b) == CONST_INT)
4213 return \"ldr\\t%0, %1\";
4215 if (GET_CODE (b) == REG)
4216 return \"ldrsh\\t%0, %1\";
4224 ops[2] = const0_rtx;
4227 gcc_assert (GET_CODE (ops[1]) == REG);
4229 ops[0] = operands[0];
4230 ops[3] = operands[2];
4231 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4234 [(set_attr "length" "4")
4235 (set_attr "type" "load_byte")
4236 (set_attr "pool_range" "1020")]
4239 ;; We used to have an early-clobber on the scratch register here.
4240 ;; However, there's a bug somewhere in reload which means that this
4241 ;; can be partially ignored during spill allocation if the memory
4242 ;; address also needs reloading; this causes us to die later on when
4243 ;; we try to verify the operands. Fortunately, we don't really need
4244 ;; the early-clobber: we can always use operand 0 if operand 2
4245 ;; overlaps the address.
4246 (define_insn "*thumb1_extendhisi2_insn_v6"
4247 [(set (match_operand:SI 0 "register_operand" "=l,l")
4248 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4249 (clobber (match_scratch:SI 2 "=X,l"))]
4250 "TARGET_THUMB1 && arm_arch6"
4256 if (which_alternative == 0)
4257 return \"sxth\\t%0, %1\";
4259 mem = XEXP (operands[1], 0);
4261 /* This code used to try to use 'V', and fix the address only if it was
4262 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4263 range of QImode offsets, and offsettable_address_p does a QImode
4266 if (GET_CODE (mem) == CONST)
4267 mem = XEXP (mem, 0);
4269 if (GET_CODE (mem) == LABEL_REF)
4270 return \"ldr\\t%0, %1\";
4272 if (GET_CODE (mem) == PLUS)
4274 rtx a = XEXP (mem, 0);
4275 rtx b = XEXP (mem, 1);
4277 if (GET_CODE (a) == LABEL_REF
4278 && GET_CODE (b) == CONST_INT)
4279 return \"ldr\\t%0, %1\";
4281 if (GET_CODE (b) == REG)
4282 return \"ldrsh\\t%0, %1\";
4290 ops[2] = const0_rtx;
4293 gcc_assert (GET_CODE (ops[1]) == REG);
4295 ops[0] = operands[0];
4296 if (reg_mentioned_p (operands[2], ops[1]))
4299 ops[3] = operands[2];
4300 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4303 [(set_attr "length" "2,4")
4304 (set_attr "type" "alu_shift,load_byte")
4305 (set_attr "pool_range" "*,1020")]
4308 ;; This pattern will only be used when ldsh is not available
4309 (define_expand "extendhisi2_mem"
4310 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4312 (zero_extend:SI (match_dup 7)))
4313 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4314 (set (match_operand:SI 0 "" "")
4315 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4320 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4322 mem1 = change_address (operands[1], QImode, addr);
4323 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4324 operands[0] = gen_lowpart (SImode, operands[0]);
4326 operands[2] = gen_reg_rtx (SImode);
4327 operands[3] = gen_reg_rtx (SImode);
4328 operands[6] = gen_reg_rtx (SImode);
4331 if (BYTES_BIG_ENDIAN)
4333 operands[4] = operands[2];
4334 operands[5] = operands[3];
4338 operands[4] = operands[3];
4339 operands[5] = operands[2];
4344 (define_insn "*arm_extendhisi2"
4345 [(set (match_operand:SI 0 "s_register_operand" "=r")
4346 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4347 "TARGET_ARM && arm_arch4 && !arm_arch6"
4348 "ldr%(sh%)\\t%0, %1"
4349 [(set_attr "type" "load_byte")
4350 (set_attr "predicable" "yes")
4351 (set_attr "pool_range" "256")
4352 (set_attr "neg_pool_range" "244")]
4355 ;; ??? Check Thumb-2 pool range
4356 (define_insn "*arm_extendhisi2_v6"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4358 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4359 "TARGET_32BIT && arm_arch6"
4363 [(set_attr "type" "alu_shift,load_byte")
4364 (set_attr "predicable" "yes")
4365 (set_attr "pool_range" "*,256")
4366 (set_attr "neg_pool_range" "*,244")]
4369 (define_insn "*arm_extendhisi2addsi"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4372 (match_operand:SI 2 "s_register_operand" "r")))]
4374 "sxtah%?\\t%0, %2, %1"
4377 (define_expand "extendqihi2"
4379 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4381 (set (match_operand:HI 0 "s_register_operand" "")
4382 (ashiftrt:SI (match_dup 2)
4387 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4389 emit_insn (gen_rtx_SET (VOIDmode,
4391 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4394 if (!s_register_operand (operands[1], QImode))
4395 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4396 operands[0] = gen_lowpart (SImode, operands[0]);
4397 operands[1] = gen_lowpart (SImode, operands[1]);
4398 operands[2] = gen_reg_rtx (SImode);
4402 (define_insn "*arm_extendqihi_insn"
4403 [(set (match_operand:HI 0 "s_register_operand" "=r")
4404 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4405 "TARGET_ARM && arm_arch4"
4406 "ldr%(sb%)\\t%0, %1"
4407 [(set_attr "type" "load_byte")
4408 (set_attr "predicable" "yes")
4409 (set_attr "pool_range" "256")
4410 (set_attr "neg_pool_range" "244")]
4413 (define_expand "extendqisi2"
4415 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4417 (set (match_operand:SI 0 "s_register_operand" "")
4418 (ashiftrt:SI (match_dup 2)
4423 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4425 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4426 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4430 if (!s_register_operand (operands[1], QImode))
4431 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4435 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4436 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4440 operands[1] = gen_lowpart (SImode, operands[1]);
4441 operands[2] = gen_reg_rtx (SImode);
4445 (define_insn "*arm_extendqisi"
4446 [(set (match_operand:SI 0 "s_register_operand" "=r")
4447 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4448 "TARGET_ARM && arm_arch4 && !arm_arch6"
4449 "ldr%(sb%)\\t%0, %1"
4450 [(set_attr "type" "load_byte")
4451 (set_attr "predicable" "yes")
4452 (set_attr "pool_range" "256")
4453 (set_attr "neg_pool_range" "244")]
4456 (define_insn "*arm_extendqisi_v6"
4457 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4459 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4460 "TARGET_ARM && arm_arch6"
4464 [(set_attr "type" "alu_shift,load_byte")
4465 (set_attr "predicable" "yes")
4466 (set_attr "pool_range" "*,256")
4467 (set_attr "neg_pool_range" "*,244")]
4470 (define_insn "*arm_extendqisi2addsi"
4471 [(set (match_operand:SI 0 "s_register_operand" "=r")
4472 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4473 (match_operand:SI 2 "s_register_operand" "r")))]
4475 "sxtab%?\\t%0, %2, %1"
4476 [(set_attr "type" "alu_shift")
4477 (set_attr "insn" "xtab")
4478 (set_attr "predicable" "yes")]
4481 (define_insn "*thumb1_extendqisi2"
4482 [(set (match_operand:SI 0 "register_operand" "=l,l")
4483 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4484 "TARGET_THUMB1 && !arm_arch6"
4488 rtx mem = XEXP (operands[1], 0);
4490 if (GET_CODE (mem) == CONST)
4491 mem = XEXP (mem, 0);
4493 if (GET_CODE (mem) == LABEL_REF)
4494 return \"ldr\\t%0, %1\";
4496 if (GET_CODE (mem) == PLUS
4497 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4498 return \"ldr\\t%0, %1\";
4500 if (which_alternative == 0)
4501 return \"ldrsb\\t%0, %1\";
4503 ops[0] = operands[0];
4505 if (GET_CODE (mem) == PLUS)
4507 rtx a = XEXP (mem, 0);
4508 rtx b = XEXP (mem, 1);
4513 if (GET_CODE (a) == REG)
4515 if (GET_CODE (b) == REG)
4516 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4517 else if (REGNO (a) == REGNO (ops[0]))
4519 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4520 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4521 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4524 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4528 gcc_assert (GET_CODE (b) == REG);
4529 if (REGNO (b) == REGNO (ops[0]))
4531 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4532 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4533 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4536 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4539 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4541 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4542 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4543 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4548 ops[2] = const0_rtx;
4550 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4554 [(set_attr "length" "2,6")
4555 (set_attr "type" "load_byte,load_byte")
4556 (set_attr "pool_range" "32,32")]
4559 (define_insn "*thumb1_extendqisi2_v6"
4560 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4561 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4562 "TARGET_THUMB1 && arm_arch6"
4568 if (which_alternative == 0)
4569 return \"sxtb\\t%0, %1\";
4571 mem = XEXP (operands[1], 0);
4573 if (GET_CODE (mem) == CONST)
4574 mem = XEXP (mem, 0);
4576 if (GET_CODE (mem) == LABEL_REF)
4577 return \"ldr\\t%0, %1\";
4579 if (GET_CODE (mem) == PLUS
4580 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4581 return \"ldr\\t%0, %1\";
4583 if (which_alternative == 0)
4584 return \"ldrsb\\t%0, %1\";
4586 ops[0] = operands[0];
4588 if (GET_CODE (mem) == PLUS)
4590 rtx a = XEXP (mem, 0);
4591 rtx b = XEXP (mem, 1);
4596 if (GET_CODE (a) == REG)
4598 if (GET_CODE (b) == REG)
4599 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4600 else if (REGNO (a) == REGNO (ops[0]))
4602 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4603 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4606 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4610 gcc_assert (GET_CODE (b) == REG);
4611 if (REGNO (b) == REGNO (ops[0]))
4613 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4614 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4617 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4620 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4622 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4623 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4628 ops[2] = const0_rtx;
4630 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4634 [(set_attr "length" "2,2,4")
4635 (set_attr "type" "alu_shift,load_byte,load_byte")
4636 (set_attr "pool_range" "*,32,32")]
4639 (define_expand "extendsfdf2"
4640 [(set (match_operand:DF 0 "s_register_operand" "")
4641 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4642 "TARGET_32BIT && TARGET_HARD_FLOAT"
4646 ;; Move insns (including loads and stores)
4648 ;; XXX Just some ideas about movti.
4649 ;; I don't think these are a good idea on the arm, there just aren't enough
4651 ;;(define_expand "loadti"
4652 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4653 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4656 ;;(define_expand "storeti"
4657 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4658 ;; (match_operand:TI 1 "s_register_operand" ""))]
4661 ;;(define_expand "movti"
4662 ;; [(set (match_operand:TI 0 "general_operand" "")
4663 ;; (match_operand:TI 1 "general_operand" ""))]
4669 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4670 ;; operands[1] = copy_to_reg (operands[1]);
4671 ;; if (GET_CODE (operands[0]) == MEM)
4672 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4673 ;; else if (GET_CODE (operands[1]) == MEM)
4674 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4678 ;; emit_insn (insn);
4682 ;; Recognize garbage generated above.
4685 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4686 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4690 ;; register mem = (which_alternative < 3);
4691 ;; register const char *template;
4693 ;; operands[mem] = XEXP (operands[mem], 0);
4694 ;; switch (which_alternative)
4696 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4697 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4698 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4699 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4700 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4701 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4703 ;; output_asm_insn (template, operands);
4707 (define_expand "movdi"
4708 [(set (match_operand:DI 0 "general_operand" "")
4709 (match_operand:DI 1 "general_operand" ""))]
4712 if (can_create_pseudo_p ())
4714 if (GET_CODE (operands[0]) != REG)
4715 operands[1] = force_reg (DImode, operands[1]);
4720 (define_insn "*arm_movdi"
4721 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4722 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4724 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4726 && ( register_operand (operands[0], DImode)
4727 || register_operand (operands[1], DImode))"
4729 switch (which_alternative)
4736 return output_move_double (operands);
4739 [(set_attr "length" "8,12,16,8,8")
4740 (set_attr "type" "*,*,*,load2,store2")
4741 (set_attr "pool_range" "*,*,*,1020,*")
4742 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4746 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4747 (match_operand:ANY64 1 "const_double_operand" ""))]
4750 && (arm_const_double_inline_cost (operands[1])
4751 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4754 arm_split_constant (SET, SImode, curr_insn,
4755 INTVAL (gen_lowpart (SImode, operands[1])),
4756 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4757 arm_split_constant (SET, SImode, curr_insn,
4758 INTVAL (gen_highpart_mode (SImode,
4759 GET_MODE (operands[0]),
4761 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4766 ; If optimizing for size, or if we have load delay slots, then
4767 ; we want to split the constant into two separate operations.
4768 ; In both cases this may split a trivial part into a single data op
4769 ; leaving a single complex constant to load. We can also get longer
4770 ; offsets in a LDR which means we get better chances of sharing the pool
4771 ; entries. Finally, we can normally do a better job of scheduling
4772 ; LDR instructions than we can with LDM.
4773 ; This pattern will only match if the one above did not.
4775 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4776 (match_operand:ANY64 1 "const_double_operand" ""))]
4777 "TARGET_ARM && reload_completed
4778 && arm_const_double_by_parts (operands[1])"
4779 [(set (match_dup 0) (match_dup 1))
4780 (set (match_dup 2) (match_dup 3))]
4782 operands[2] = gen_highpart (SImode, operands[0]);
4783 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4785 operands[0] = gen_lowpart (SImode, operands[0]);
4786 operands[1] = gen_lowpart (SImode, operands[1]);
4791 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4792 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4793 "TARGET_EITHER && reload_completed"
4794 [(set (match_dup 0) (match_dup 1))
4795 (set (match_dup 2) (match_dup 3))]
4797 operands[2] = gen_highpart (SImode, operands[0]);
4798 operands[3] = gen_highpart (SImode, operands[1]);
4799 operands[0] = gen_lowpart (SImode, operands[0]);
4800 operands[1] = gen_lowpart (SImode, operands[1]);
4802 /* Handle a partial overlap. */
4803 if (rtx_equal_p (operands[0], operands[3]))
4805 rtx tmp0 = operands[0];
4806 rtx tmp1 = operands[1];
4808 operands[0] = operands[2];
4809 operands[1] = operands[3];
4816 ;; We can't actually do base+index doubleword loads if the index and
4817 ;; destination overlap. Split here so that we at least have chance to
4820 [(set (match_operand:DI 0 "s_register_operand" "")
4821 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4822 (match_operand:SI 2 "s_register_operand" ""))))]
4824 && reg_overlap_mentioned_p (operands[0], operands[1])
4825 && reg_overlap_mentioned_p (operands[0], operands[2])"
4827 (plus:SI (match_dup 1)
4830 (mem:DI (match_dup 4)))]
4832 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4836 ;;; ??? This should have alternatives for constants.
4837 ;;; ??? This was originally identical to the movdf_insn pattern.
4838 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4839 ;;; thumb_reorg with a memory reference.
4840 (define_insn "*thumb1_movdi_insn"
4841 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4842 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4844 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4845 && ( register_operand (operands[0], DImode)
4846 || register_operand (operands[1], DImode))"
4849 switch (which_alternative)
4853 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4854 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4855 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4857 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4859 operands[1] = GEN_INT (- INTVAL (operands[1]));
4860 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4862 return \"ldmia\\t%1, {%0, %H0}\";
4864 return \"stmia\\t%0, {%1, %H1}\";
4866 return thumb_load_double_from_address (operands);
4868 operands[2] = gen_rtx_MEM (SImode,
4869 plus_constant (XEXP (operands[0], 0), 4));
4870 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4873 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4874 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4875 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4878 [(set_attr "length" "4,4,6,2,2,6,4,4")
4879 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4880 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4883 (define_expand "movsi"
4884 [(set (match_operand:SI 0 "general_operand" "")
4885 (match_operand:SI 1 "general_operand" ""))]
4889 rtx base, offset, tmp;
4893 /* Everything except mem = const or mem = mem can be done easily. */
4894 if (GET_CODE (operands[0]) == MEM)
4895 operands[1] = force_reg (SImode, operands[1]);
4896 if (arm_general_register_operand (operands[0], SImode)
4897 && GET_CODE (operands[1]) == CONST_INT
4898 && !(const_ok_for_arm (INTVAL (operands[1]))
4899 || const_ok_for_arm (~INTVAL (operands[1]))))
4901 arm_split_constant (SET, SImode, NULL_RTX,
4902 INTVAL (operands[1]), operands[0], NULL_RTX,
4903 optimize && can_create_pseudo_p ());
4907 if (TARGET_USE_MOVT && !target_word_relocations
4908 && GET_CODE (operands[1]) == SYMBOL_REF
4909 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4911 arm_emit_movpair (operands[0], operands[1]);
4915 else /* TARGET_THUMB1... */
4917 if (can_create_pseudo_p ())
4919 if (GET_CODE (operands[0]) != REG)
4920 operands[1] = force_reg (SImode, operands[1]);
4924 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4926 split_const (operands[1], &base, &offset);
4927 if (GET_CODE (base) == SYMBOL_REF
4928 && !offset_within_block_p (base, INTVAL (offset)))
4930 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4931 emit_move_insn (tmp, base);
4932 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4937 /* Recognize the case where operand[1] is a reference to thread-local
4938 data and load its address to a register. */
4939 if (arm_tls_referenced_p (operands[1]))
4941 rtx tmp = operands[1];
4944 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4946 addend = XEXP (XEXP (tmp, 0), 1);
4947 tmp = XEXP (XEXP (tmp, 0), 0);
4950 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4951 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4953 tmp = legitimize_tls_address (tmp,
4954 !can_create_pseudo_p () ? operands[0] : 0);
4957 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4958 tmp = force_operand (tmp, operands[0]);
4963 && (CONSTANT_P (operands[1])
4964 || symbol_mentioned_p (operands[1])
4965 || label_mentioned_p (operands[1])))
4966 operands[1] = legitimize_pic_address (operands[1], SImode,
4967 (!can_create_pseudo_p ()
4974 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
4975 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
4976 ;; so this does not matter.
4977 (define_insn "*arm_movt"
4978 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4979 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
4980 (match_operand:SI 2 "general_operand" "i")))]
4982 "movt%?\t%0, #:upper16:%c2"
4983 [(set_attr "predicable" "yes")
4984 (set_attr "length" "4")]
4987 (define_insn "*arm_movw"
4988 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4989 (high:SI (match_operand:SI 1 "general_operand" "i")))]
4991 "movw%?\t%0, #:lower16:%c1"
4992 [(set_attr "predicable" "yes")
4993 (set_attr "length" "4")]
4996 (define_insn "*arm_movsi_insn"
4997 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4998 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4999 "TARGET_ARM && ! TARGET_IWMMXT
5000 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5001 && ( register_operand (operands[0], SImode)
5002 || register_operand (operands[1], SImode))"
5010 [(set_attr "type" "*,*,*,*,load1,store1")
5011 (set_attr "predicable" "yes")
5012 (set_attr "pool_range" "*,*,*,*,4096,*")
5013 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5017 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5018 (match_operand:SI 1 "const_int_operand" ""))]
5020 && (!(const_ok_for_arm (INTVAL (operands[1]))
5021 || const_ok_for_arm (~INTVAL (operands[1]))))"
5022 [(clobber (const_int 0))]
5024 arm_split_constant (SET, SImode, NULL_RTX,
5025 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5030 (define_insn "*thumb1_movsi_insn"
5031 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5032 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5034 && ( register_operand (operands[0], SImode)
5035 || register_operand (operands[1], SImode))"
5046 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5047 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5048 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5052 [(set (match_operand:SI 0 "register_operand" "")
5053 (match_operand:SI 1 "const_int_operand" ""))]
5054 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5055 [(set (match_dup 0) (match_dup 1))
5056 (set (match_dup 0) (neg:SI (match_dup 0)))]
5057 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5061 [(set (match_operand:SI 0 "register_operand" "")
5062 (match_operand:SI 1 "const_int_operand" ""))]
5063 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5064 [(set (match_dup 0) (match_dup 1))
5065 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5068 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
5069 unsigned HOST_WIDE_INT mask = 0xff;
5072 for (i = 0; i < 25; i++)
5073 if ((val & (mask << i)) == val)
5076 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5080 operands[1] = GEN_INT (val >> i);
5081 operands[2] = GEN_INT (i);
5085 ;; When generating pic, we need to load the symbol offset into a register.
5086 ;; So that the optimizer does not confuse this with a normal symbol load
5087 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5088 ;; since that is the only type of relocation we can use.
5090 ;; The rather odd constraints on the following are to force reload to leave
5091 ;; the insn alone, and to force the minipool generation pass to then move
5092 ;; the GOT symbol to memory.
5094 (define_insn "pic_load_addr_arm"
5095 [(set (match_operand:SI 0 "s_register_operand" "=r")
5096 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5097 "TARGET_ARM && flag_pic"
5099 [(set_attr "type" "load1")
5100 (set (attr "pool_range") (const_int 4096))
5101 (set (attr "neg_pool_range") (const_int 4084))]
5104 (define_insn "pic_load_addr_thumb1"
5105 [(set (match_operand:SI 0 "s_register_operand" "=l")
5106 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5107 "TARGET_THUMB1 && flag_pic"
5109 [(set_attr "type" "load1")
5110 (set (attr "pool_range") (const_int 1024))]
5113 (define_insn "pic_add_dot_plus_four"
5114 [(set (match_operand:SI 0 "register_operand" "=r")
5115 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5117 (match_operand 2 "" "")]
5121 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5122 INTVAL (operands[2]));
5123 return \"add\\t%0, %|pc\";
5125 [(set_attr "length" "2")]
5128 (define_insn "pic_add_dot_plus_eight"
5129 [(set (match_operand:SI 0 "register_operand" "=r")
5130 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5132 (match_operand 2 "" "")]
5136 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5137 INTVAL (operands[2]));
5138 return \"add%?\\t%0, %|pc, %1\";
5140 [(set_attr "predicable" "yes")]
5143 (define_insn "tls_load_dot_plus_eight"
5144 [(set (match_operand:SI 0 "register_operand" "+r")
5145 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5147 (match_operand 2 "" "")]
5151 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5152 INTVAL (operands[2]));
5153 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5155 [(set_attr "predicable" "yes")]
5158 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5159 ;; followed by a load. These sequences can be crunched down to
5160 ;; tls_load_dot_plus_eight by a peephole.
5163 [(set (match_operand:SI 0 "register_operand" "")
5164 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5166 (match_operand 1 "" "")]
5168 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5169 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5171 (mem:SI (unspec:SI [(match_dup 3)
5178 (define_insn "pic_offset_arm"
5179 [(set (match_operand:SI 0 "register_operand" "=r")
5180 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5181 (unspec:SI [(match_operand:SI 2 "" "X")]
5182 UNSPEC_PIC_OFFSET))))]
5183 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5184 "ldr%?\\t%0, [%1,%2]"
5185 [(set_attr "type" "load1")]
5188 (define_expand "builtin_setjmp_receiver"
5189 [(label_ref (match_operand 0 "" ""))]
5193 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5195 if (arm_pic_register != INVALID_REGNUM)
5196 arm_load_pic_register (1UL << 3);
5200 ;; If copying one reg to another we can set the condition codes according to
5201 ;; its value. Such a move is common after a return from subroutine and the
5202 ;; result is being tested against zero.
5204 (define_insn "*movsi_compare0"
5205 [(set (reg:CC CC_REGNUM)
5206 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5208 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5214 [(set_attr "conds" "set")]
5217 ;; Subroutine to store a half word from a register into memory.
5218 ;; Operand 0 is the source register (HImode)
5219 ;; Operand 1 is the destination address in a register (SImode)
5221 ;; In both this routine and the next, we must be careful not to spill
5222 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5223 ;; can generate unrecognizable rtl.
5225 (define_expand "storehi"
5226 [;; store the low byte
5227 (set (match_operand 1 "" "") (match_dup 3))
5228 ;; extract the high byte
5230 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5231 ;; store the high byte
5232 (set (match_dup 4) (match_dup 5))]
5236 rtx op1 = operands[1];
5237 rtx addr = XEXP (op1, 0);
5238 enum rtx_code code = GET_CODE (addr);
5240 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5242 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5244 operands[4] = adjust_address (op1, QImode, 1);
5245 operands[1] = adjust_address (operands[1], QImode, 0);
5246 operands[3] = gen_lowpart (QImode, operands[0]);
5247 operands[0] = gen_lowpart (SImode, operands[0]);
5248 operands[2] = gen_reg_rtx (SImode);
5249 operands[5] = gen_lowpart (QImode, operands[2]);
5253 (define_expand "storehi_bigend"
5254 [(set (match_dup 4) (match_dup 3))
5256 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5257 (set (match_operand 1 "" "") (match_dup 5))]
5261 rtx op1 = operands[1];
5262 rtx addr = XEXP (op1, 0);
5263 enum rtx_code code = GET_CODE (addr);
5265 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5267 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5269 operands[4] = adjust_address (op1, QImode, 1);
5270 operands[1] = adjust_address (operands[1], QImode, 0);
5271 operands[3] = gen_lowpart (QImode, operands[0]);
5272 operands[0] = gen_lowpart (SImode, operands[0]);
5273 operands[2] = gen_reg_rtx (SImode);
5274 operands[5] = gen_lowpart (QImode, operands[2]);
5278 ;; Subroutine to store a half word integer constant into memory.
5279 (define_expand "storeinthi"
5280 [(set (match_operand 0 "" "")
5281 (match_operand 1 "" ""))
5282 (set (match_dup 3) (match_dup 2))]
5286 HOST_WIDE_INT value = INTVAL (operands[1]);
5287 rtx addr = XEXP (operands[0], 0);
5288 rtx op0 = operands[0];
5289 enum rtx_code code = GET_CODE (addr);
5291 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5293 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5295 operands[1] = gen_reg_rtx (SImode);
5296 if (BYTES_BIG_ENDIAN)
5298 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5299 if ((value & 255) == ((value >> 8) & 255))
5300 operands[2] = operands[1];
5303 operands[2] = gen_reg_rtx (SImode);
5304 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5309 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5310 if ((value & 255) == ((value >> 8) & 255))
5311 operands[2] = operands[1];
5314 operands[2] = gen_reg_rtx (SImode);
5315 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5319 operands[3] = adjust_address (op0, QImode, 1);
5320 operands[0] = adjust_address (operands[0], QImode, 0);
5321 operands[2] = gen_lowpart (QImode, operands[2]);
5322 operands[1] = gen_lowpart (QImode, operands[1]);
5326 (define_expand "storehi_single_op"
5327 [(set (match_operand:HI 0 "memory_operand" "")
5328 (match_operand:HI 1 "general_operand" ""))]
5329 "TARGET_32BIT && arm_arch4"
5331 if (!s_register_operand (operands[1], HImode))
5332 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5336 (define_expand "movhi"
5337 [(set (match_operand:HI 0 "general_operand" "")
5338 (match_operand:HI 1 "general_operand" ""))]
5343 if (can_create_pseudo_p ())
5345 if (GET_CODE (operands[0]) == MEM)
5349 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5352 if (GET_CODE (operands[1]) == CONST_INT)
5353 emit_insn (gen_storeinthi (operands[0], operands[1]));
5356 if (GET_CODE (operands[1]) == MEM)
5357 operands[1] = force_reg (HImode, operands[1]);
5358 if (BYTES_BIG_ENDIAN)
5359 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5361 emit_insn (gen_storehi (operands[1], operands[0]));
5365 /* Sign extend a constant, and keep it in an SImode reg. */
5366 else if (GET_CODE (operands[1]) == CONST_INT)
5368 rtx reg = gen_reg_rtx (SImode);
5369 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5371 /* If the constant is already valid, leave it alone. */
5372 if (!const_ok_for_arm (val))
5374 /* If setting all the top bits will make the constant
5375 loadable in a single instruction, then set them.
5376 Otherwise, sign extend the number. */
5378 if (const_ok_for_arm (~(val | ~0xffff)))
5380 else if (val & 0x8000)
5384 emit_insn (gen_movsi (reg, GEN_INT (val)));
5385 operands[1] = gen_lowpart (HImode, reg);
5387 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5388 && GET_CODE (operands[1]) == MEM)
5390 rtx reg = gen_reg_rtx (SImode);
5392 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5393 operands[1] = gen_lowpart (HImode, reg);
5395 else if (!arm_arch4)
5397 if (GET_CODE (operands[1]) == MEM)
5400 rtx offset = const0_rtx;
5401 rtx reg = gen_reg_rtx (SImode);
5403 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5404 || (GET_CODE (base) == PLUS
5405 && (GET_CODE (offset = XEXP (base, 1))
5407 && ((INTVAL(offset) & 1) != 1)
5408 && GET_CODE (base = XEXP (base, 0)) == REG))
5409 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5413 new_rtx = widen_memory_access (operands[1], SImode,
5414 ((INTVAL (offset) & ~3)
5415 - INTVAL (offset)));
5416 emit_insn (gen_movsi (reg, new_rtx));
5417 if (((INTVAL (offset) & 2) != 0)
5418 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5420 rtx reg2 = gen_reg_rtx (SImode);
5422 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5427 emit_insn (gen_movhi_bytes (reg, operands[1]));
5429 operands[1] = gen_lowpart (HImode, reg);
5433 /* Handle loading a large integer during reload. */
5434 else if (GET_CODE (operands[1]) == CONST_INT
5435 && !const_ok_for_arm (INTVAL (operands[1]))
5436 && !const_ok_for_arm (~INTVAL (operands[1])))
5438 /* Writing a constant to memory needs a scratch, which should
5439 be handled with SECONDARY_RELOADs. */
5440 gcc_assert (GET_CODE (operands[0]) == REG);
5442 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5443 emit_insn (gen_movsi (operands[0], operands[1]));
5447 else if (TARGET_THUMB2)
5449 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5450 if (can_create_pseudo_p ())
5452 if (GET_CODE (operands[0]) != REG)
5453 operands[1] = force_reg (HImode, operands[1]);
5454 /* Zero extend a constant, and keep it in an SImode reg. */
5455 else if (GET_CODE (operands[1]) == CONST_INT)
5457 rtx reg = gen_reg_rtx (SImode);
5458 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5460 emit_insn (gen_movsi (reg, GEN_INT (val)));
5461 operands[1] = gen_lowpart (HImode, reg);
5465 else /* TARGET_THUMB1 */
5467 if (can_create_pseudo_p ())
5469 if (GET_CODE (operands[1]) == CONST_INT)
5471 rtx reg = gen_reg_rtx (SImode);
5473 emit_insn (gen_movsi (reg, operands[1]));
5474 operands[1] = gen_lowpart (HImode, reg);
5477 /* ??? We shouldn't really get invalid addresses here, but this can
5478 happen if we are passed a SP (never OK for HImode/QImode) or
5479 virtual register (also rejected as illegitimate for HImode/QImode)
5480 relative address. */
5481 /* ??? This should perhaps be fixed elsewhere, for instance, in
5482 fixup_stack_1, by checking for other kinds of invalid addresses,
5483 e.g. a bare reference to a virtual register. This may confuse the
5484 alpha though, which must handle this case differently. */
5485 if (GET_CODE (operands[0]) == MEM
5486 && !memory_address_p (GET_MODE (operands[0]),
5487 XEXP (operands[0], 0)))
5489 = replace_equiv_address (operands[0],
5490 copy_to_reg (XEXP (operands[0], 0)));
5492 if (GET_CODE (operands[1]) == MEM
5493 && !memory_address_p (GET_MODE (operands[1]),
5494 XEXP (operands[1], 0)))
5496 = replace_equiv_address (operands[1],
5497 copy_to_reg (XEXP (operands[1], 0)));
5499 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5501 rtx reg = gen_reg_rtx (SImode);
5503 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5504 operands[1] = gen_lowpart (HImode, reg);
5507 if (GET_CODE (operands[0]) == MEM)
5508 operands[1] = force_reg (HImode, operands[1]);
5510 else if (GET_CODE (operands[1]) == CONST_INT
5511 && !satisfies_constraint_I (operands[1]))
5513 /* Handle loading a large integer during reload. */
5515 /* Writing a constant to memory needs a scratch, which should
5516 be handled with SECONDARY_RELOADs. */
5517 gcc_assert (GET_CODE (operands[0]) == REG);
5519 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5520 emit_insn (gen_movsi (operands[0], operands[1]));
5527 (define_insn "*thumb1_movhi_insn"
5528 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5529 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5531 && ( register_operand (operands[0], HImode)
5532 || register_operand (operands[1], HImode))"
5534 switch (which_alternative)
5536 case 0: return \"add %0, %1, #0\";
5537 case 2: return \"strh %1, %0\";
5538 case 3: return \"mov %0, %1\";
5539 case 4: return \"mov %0, %1\";
5540 case 5: return \"mov %0, %1\";
5541 default: gcc_unreachable ();
5543 /* The stack pointer can end up being taken as an index register.
5544 Catch this case here and deal with it. */
5545 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5546 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5547 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5550 ops[0] = operands[0];
5551 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5553 output_asm_insn (\"mov %0, %1\", ops);
5555 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5558 return \"ldrh %0, %1\";
5560 [(set_attr "length" "2,4,2,2,2,2")
5561 (set_attr "type" "*,load1,store1,*,*,*")]
5565 (define_expand "movhi_bytes"
5566 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5568 (zero_extend:SI (match_dup 6)))
5569 (set (match_operand:SI 0 "" "")
5570 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5575 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5577 mem1 = change_address (operands[1], QImode, addr);
5578 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5579 operands[0] = gen_lowpart (SImode, operands[0]);
5581 operands[2] = gen_reg_rtx (SImode);
5582 operands[3] = gen_reg_rtx (SImode);
5585 if (BYTES_BIG_ENDIAN)
5587 operands[4] = operands[2];
5588 operands[5] = operands[3];
5592 operands[4] = operands[3];
5593 operands[5] = operands[2];
5598 (define_expand "movhi_bigend"
5600 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5603 (ashiftrt:SI (match_dup 2) (const_int 16)))
5604 (set (match_operand:HI 0 "s_register_operand" "")
5608 operands[2] = gen_reg_rtx (SImode);
5609 operands[3] = gen_reg_rtx (SImode);
5610 operands[4] = gen_lowpart (HImode, operands[3]);
5614 ;; Pattern to recognize insn generated default case above
5615 (define_insn "*movhi_insn_arch4"
5616 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5617 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5620 && (GET_CODE (operands[1]) != CONST_INT
5621 || const_ok_for_arm (INTVAL (operands[1]))
5622 || const_ok_for_arm (~INTVAL (operands[1])))"
5624 mov%?\\t%0, %1\\t%@ movhi
5625 mvn%?\\t%0, #%B1\\t%@ movhi
5626 str%(h%)\\t%1, %0\\t%@ movhi
5627 ldr%(h%)\\t%0, %1\\t%@ movhi"
5628 [(set_attr "type" "*,*,store1,load1")
5629 (set_attr "predicable" "yes")
5630 (set_attr "pool_range" "*,*,*,256")
5631 (set_attr "neg_pool_range" "*,*,*,244")]
5634 (define_insn "*movhi_bytes"
5635 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5636 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5639 mov%?\\t%0, %1\\t%@ movhi
5640 mvn%?\\t%0, #%B1\\t%@ movhi"
5641 [(set_attr "predicable" "yes")]
5644 (define_expand "thumb_movhi_clobber"
5645 [(set (match_operand:HI 0 "memory_operand" "")
5646 (match_operand:HI 1 "register_operand" ""))
5647 (clobber (match_operand:DI 2 "register_operand" ""))]
5650 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5651 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5653 emit_insn (gen_movhi (operands[0], operands[1]));
5656 /* XXX Fixme, need to handle other cases here as well. */
5661 ;; We use a DImode scratch because we may occasionally need an additional
5662 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5663 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5664 (define_expand "reload_outhi"
5665 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5666 (match_operand:HI 1 "s_register_operand" "r")
5667 (match_operand:DI 2 "s_register_operand" "=&l")])]
5670 arm_reload_out_hi (operands);
5672 thumb_reload_out_hi (operands);
5677 (define_expand "reload_inhi"
5678 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5679 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5680 (match_operand:DI 2 "s_register_operand" "=&r")])]
5684 arm_reload_in_hi (operands);
5686 thumb_reload_out_hi (operands);
5690 (define_expand "movqi"
5691 [(set (match_operand:QI 0 "general_operand" "")
5692 (match_operand:QI 1 "general_operand" ""))]
5695 /* Everything except mem = const or mem = mem can be done easily */
5697 if (can_create_pseudo_p ())
5699 if (GET_CODE (operands[1]) == CONST_INT)
5701 rtx reg = gen_reg_rtx (SImode);
5703 emit_insn (gen_movsi (reg, operands[1]));
5704 operands[1] = gen_lowpart (QImode, reg);
5709 /* ??? We shouldn't really get invalid addresses here, but this can
5710 happen if we are passed a SP (never OK for HImode/QImode) or
5711 virtual register (also rejected as illegitimate for HImode/QImode)
5712 relative address. */
5713 /* ??? This should perhaps be fixed elsewhere, for instance, in
5714 fixup_stack_1, by checking for other kinds of invalid addresses,
5715 e.g. a bare reference to a virtual register. This may confuse the
5716 alpha though, which must handle this case differently. */
5717 if (GET_CODE (operands[0]) == MEM
5718 && !memory_address_p (GET_MODE (operands[0]),
5719 XEXP (operands[0], 0)))
5721 = replace_equiv_address (operands[0],
5722 copy_to_reg (XEXP (operands[0], 0)));
5723 if (GET_CODE (operands[1]) == MEM
5724 && !memory_address_p (GET_MODE (operands[1]),
5725 XEXP (operands[1], 0)))
5727 = replace_equiv_address (operands[1],
5728 copy_to_reg (XEXP (operands[1], 0)));
5731 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5733 rtx reg = gen_reg_rtx (SImode);
5735 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5736 operands[1] = gen_lowpart (QImode, reg);
5739 if (GET_CODE (operands[0]) == MEM)
5740 operands[1] = force_reg (QImode, operands[1]);
5742 else if (TARGET_THUMB
5743 && GET_CODE (operands[1]) == CONST_INT
5744 && !satisfies_constraint_I (operands[1]))
5746 /* Handle loading a large integer during reload. */
5748 /* Writing a constant to memory needs a scratch, which should
5749 be handled with SECONDARY_RELOADs. */
5750 gcc_assert (GET_CODE (operands[0]) == REG);
5752 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5753 emit_insn (gen_movsi (operands[0], operands[1]));
5760 (define_insn "*arm_movqi_insn"
5761 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5762 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5764 && ( register_operand (operands[0], QImode)
5765 || register_operand (operands[1], QImode))"
5771 [(set_attr "type" "*,*,load1,store1")
5772 (set_attr "predicable" "yes")]
5775 (define_insn "*thumb1_movqi_insn"
5776 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5777 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5779 && ( register_operand (operands[0], QImode)
5780 || register_operand (operands[1], QImode))"
5788 [(set_attr "length" "2")
5789 (set_attr "type" "*,load1,store1,*,*,*")
5790 (set_attr "pool_range" "*,32,*,*,*,*")]
5793 (define_expand "movsf"
5794 [(set (match_operand:SF 0 "general_operand" "")
5795 (match_operand:SF 1 "general_operand" ""))]
5800 if (GET_CODE (operands[0]) == MEM)
5801 operands[1] = force_reg (SFmode, operands[1]);
5803 else /* TARGET_THUMB1 */
5805 if (can_create_pseudo_p ())
5807 if (GET_CODE (operands[0]) != REG)
5808 operands[1] = force_reg (SFmode, operands[1]);
5814 ;; Transform a floating-point move of a constant into a core register into
5815 ;; an SImode operation.
5817 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5818 (match_operand:SF 1 "immediate_operand" ""))]
5821 && GET_CODE (operands[1]) == CONST_DOUBLE"
5822 [(set (match_dup 2) (match_dup 3))]
5824 operands[2] = gen_lowpart (SImode, operands[0]);
5825 operands[3] = gen_lowpart (SImode, operands[1]);
5826 if (operands[2] == 0 || operands[3] == 0)
5831 (define_insn "*arm_movsf_soft_insn"
5832 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5833 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5835 && TARGET_SOFT_FLOAT
5836 && (GET_CODE (operands[0]) != MEM
5837 || register_operand (operands[1], SFmode))"
5840 ldr%?\\t%0, %1\\t%@ float
5841 str%?\\t%1, %0\\t%@ float"
5842 [(set_attr "length" "4,4,4")
5843 (set_attr "predicable" "yes")
5844 (set_attr "type" "*,load1,store1")
5845 (set_attr "pool_range" "*,4096,*")
5846 (set_attr "neg_pool_range" "*,4084,*")]
5849 ;;; ??? This should have alternatives for constants.
5850 (define_insn "*thumb1_movsf_insn"
5851 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5852 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5854 && ( register_operand (operands[0], SFmode)
5855 || register_operand (operands[1], SFmode))"
5864 [(set_attr "length" "2")
5865 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5866 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5869 (define_expand "movdf"
5870 [(set (match_operand:DF 0 "general_operand" "")
5871 (match_operand:DF 1 "general_operand" ""))]
5876 if (GET_CODE (operands[0]) == MEM)
5877 operands[1] = force_reg (DFmode, operands[1]);
5879 else /* TARGET_THUMB */
5881 if (can_create_pseudo_p ())
5883 if (GET_CODE (operands[0]) != REG)
5884 operands[1] = force_reg (DFmode, operands[1]);
5890 ;; Reloading a df mode value stored in integer regs to memory can require a
5892 (define_expand "reload_outdf"
5893 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5894 (match_operand:DF 1 "s_register_operand" "r")
5895 (match_operand:SI 2 "s_register_operand" "=&r")]
5899 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5902 operands[2] = XEXP (operands[0], 0);
5903 else if (code == POST_INC || code == PRE_DEC)
5905 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5906 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5907 emit_insn (gen_movdi (operands[0], operands[1]));
5910 else if (code == PRE_INC)
5912 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5914 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5917 else if (code == POST_DEC)
5918 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5920 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5921 XEXP (XEXP (operands[0], 0), 1)));
5923 emit_insn (gen_rtx_SET (VOIDmode,
5924 replace_equiv_address (operands[0], operands[2]),
5927 if (code == POST_DEC)
5928 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5934 (define_insn "*movdf_soft_insn"
5935 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5936 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5937 "TARGET_ARM && TARGET_SOFT_FLOAT
5938 && ( register_operand (operands[0], DFmode)
5939 || register_operand (operands[1], DFmode))"
5941 switch (which_alternative)
5948 return output_move_double (operands);
5951 [(set_attr "length" "8,12,16,8,8")
5952 (set_attr "type" "*,*,*,load2,store2")
5953 (set_attr "pool_range" "1020")
5954 (set_attr "neg_pool_range" "1008")]
5957 ;;; ??? This should have alternatives for constants.
5958 ;;; ??? This was originally identical to the movdi_insn pattern.
5959 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5960 ;;; thumb_reorg with a memory reference.
5961 (define_insn "*thumb_movdf_insn"
5962 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5963 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5965 && ( register_operand (operands[0], DFmode)
5966 || register_operand (operands[1], DFmode))"
5968 switch (which_alternative)
5972 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5973 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5974 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5976 return \"ldmia\\t%1, {%0, %H0}\";
5978 return \"stmia\\t%0, {%1, %H1}\";
5980 return thumb_load_double_from_address (operands);
5982 operands[2] = gen_rtx_MEM (SImode,
5983 plus_constant (XEXP (operands[0], 0), 4));
5984 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5987 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5988 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5989 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5992 [(set_attr "length" "4,2,2,6,4,4")
5993 (set_attr "type" "*,load2,store2,load2,store2,*")
5994 (set_attr "pool_range" "*,*,*,1020,*,*")]
5997 (define_expand "movxf"
5998 [(set (match_operand:XF 0 "general_operand" "")
5999 (match_operand:XF 1 "general_operand" ""))]
6000 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6002 if (GET_CODE (operands[0]) == MEM)
6003 operands[1] = force_reg (XFmode, operands[1]);
6009 ;; load- and store-multiple insns
6010 ;; The arm can load/store any set of registers, provided that they are in
6011 ;; ascending order; but that is beyond GCC so stick with what it knows.
6013 (define_expand "load_multiple"
6014 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6015 (match_operand:SI 1 "" ""))
6016 (use (match_operand:SI 2 "" ""))])]
6019 HOST_WIDE_INT offset = 0;
6021 /* Support only fixed point registers. */
6022 if (GET_CODE (operands[2]) != CONST_INT
6023 || INTVAL (operands[2]) > 14
6024 || INTVAL (operands[2]) < 2
6025 || GET_CODE (operands[1]) != MEM
6026 || GET_CODE (operands[0]) != REG
6027 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6028 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6032 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6033 force_reg (SImode, XEXP (operands[1], 0)),
6034 TRUE, FALSE, operands[1], &offset);
6037 ;; Load multiple with write-back
6039 (define_insn "*ldmsi_postinc4"
6040 [(match_parallel 0 "load_multiple_operation"
6041 [(set (match_operand:SI 1 "s_register_operand" "=r")
6042 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6044 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6045 (mem:SI (match_dup 2)))
6046 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6047 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6048 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6049 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6050 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6051 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6052 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6053 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6054 [(set_attr "type" "load4")
6055 (set_attr "predicable" "yes")]
6058 (define_insn "*ldmsi_postinc4_thumb1"
6059 [(match_parallel 0 "load_multiple_operation"
6060 [(set (match_operand:SI 1 "s_register_operand" "=l")
6061 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6063 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6064 (mem:SI (match_dup 2)))
6065 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6066 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6067 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6068 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6069 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6070 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6071 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6072 "ldmia\\t%1!, {%3, %4, %5, %6}"
6073 [(set_attr "type" "load4")]
6076 (define_insn "*ldmsi_postinc3"
6077 [(match_parallel 0 "load_multiple_operation"
6078 [(set (match_operand:SI 1 "s_register_operand" "=r")
6079 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6081 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6082 (mem:SI (match_dup 2)))
6083 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6084 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6085 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6086 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6087 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6088 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6089 [(set_attr "type" "load3")
6090 (set_attr "predicable" "yes")]
6093 (define_insn "*ldmsi_postinc2"
6094 [(match_parallel 0 "load_multiple_operation"
6095 [(set (match_operand:SI 1 "s_register_operand" "=r")
6096 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6098 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6099 (mem:SI (match_dup 2)))
6100 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6101 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6102 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6103 "ldm%(ia%)\\t%1!, {%3, %4}"
6104 [(set_attr "type" "load2")
6105 (set_attr "predicable" "yes")]
6108 ;; Ordinary load multiple
6110 (define_insn "*ldmsi4"
6111 [(match_parallel 0 "load_multiple_operation"
6112 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6113 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6114 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6115 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6116 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6117 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6118 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6119 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6120 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6121 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6122 [(set_attr "type" "load4")
6123 (set_attr "predicable" "yes")]
6126 (define_insn "*ldmsi3"
6127 [(match_parallel 0 "load_multiple_operation"
6128 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6129 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6130 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6131 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6132 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6133 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6134 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6135 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6136 [(set_attr "type" "load3")
6137 (set_attr "predicable" "yes")]
6140 (define_insn "*ldmsi2"
6141 [(match_parallel 0 "load_multiple_operation"
6142 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6143 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6144 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6145 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6146 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6147 "ldm%(ia%)\\t%1, {%2, %3}"
6148 [(set_attr "type" "load2")
6149 (set_attr "predicable" "yes")]
6152 (define_expand "store_multiple"
6153 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6154 (match_operand:SI 1 "" ""))
6155 (use (match_operand:SI 2 "" ""))])]
6158 HOST_WIDE_INT offset = 0;
6160 /* Support only fixed point registers. */
6161 if (GET_CODE (operands[2]) != CONST_INT
6162 || INTVAL (operands[2]) > 14
6163 || INTVAL (operands[2]) < 2
6164 || GET_CODE (operands[1]) != REG
6165 || GET_CODE (operands[0]) != MEM
6166 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6167 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6171 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6172 force_reg (SImode, XEXP (operands[0], 0)),
6173 TRUE, FALSE, operands[0], &offset);
6176 ;; Store multiple with write-back
6178 (define_insn "*stmsi_postinc4"
6179 [(match_parallel 0 "store_multiple_operation"
6180 [(set (match_operand:SI 1 "s_register_operand" "=r")
6181 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6183 (set (mem:SI (match_dup 2))
6184 (match_operand:SI 3 "arm_hard_register_operand" ""))
6185 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6186 (match_operand:SI 4 "arm_hard_register_operand" ""))
6187 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6188 (match_operand:SI 5 "arm_hard_register_operand" ""))
6189 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6190 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6191 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6192 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6193 [(set_attr "predicable" "yes")
6194 (set_attr "type" "store4")]
6197 (define_insn "*stmsi_postinc4_thumb1"
6198 [(match_parallel 0 "store_multiple_operation"
6199 [(set (match_operand:SI 1 "s_register_operand" "=l")
6200 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6202 (set (mem:SI (match_dup 2))
6203 (match_operand:SI 3 "arm_hard_register_operand" ""))
6204 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6205 (match_operand:SI 4 "arm_hard_register_operand" ""))
6206 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6207 (match_operand:SI 5 "arm_hard_register_operand" ""))
6208 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6209 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6210 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6211 "stmia\\t%1!, {%3, %4, %5, %6}"
6212 [(set_attr "type" "store4")]
6215 (define_insn "*stmsi_postinc3"
6216 [(match_parallel 0 "store_multiple_operation"
6217 [(set (match_operand:SI 1 "s_register_operand" "=r")
6218 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6220 (set (mem:SI (match_dup 2))
6221 (match_operand:SI 3 "arm_hard_register_operand" ""))
6222 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6223 (match_operand:SI 4 "arm_hard_register_operand" ""))
6224 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6225 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6226 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6227 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6228 [(set_attr "predicable" "yes")
6229 (set_attr "type" "store3")]
6232 (define_insn "*stmsi_postinc2"
6233 [(match_parallel 0 "store_multiple_operation"
6234 [(set (match_operand:SI 1 "s_register_operand" "=r")
6235 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6237 (set (mem:SI (match_dup 2))
6238 (match_operand:SI 3 "arm_hard_register_operand" ""))
6239 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6240 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6241 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6242 "stm%(ia%)\\t%1!, {%3, %4}"
6243 [(set_attr "predicable" "yes")
6244 (set_attr "type" "store2")]
6247 ;; Ordinary store multiple
6249 (define_insn "*stmsi4"
6250 [(match_parallel 0 "store_multiple_operation"
6251 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6252 (match_operand:SI 2 "arm_hard_register_operand" ""))
6253 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6254 (match_operand:SI 3 "arm_hard_register_operand" ""))
6255 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6256 (match_operand:SI 4 "arm_hard_register_operand" ""))
6257 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6258 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6259 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6260 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6261 [(set_attr "predicable" "yes")
6262 (set_attr "type" "store4")]
6265 (define_insn "*stmsi3"
6266 [(match_parallel 0 "store_multiple_operation"
6267 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6268 (match_operand:SI 2 "arm_hard_register_operand" ""))
6269 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6270 (match_operand:SI 3 "arm_hard_register_operand" ""))
6271 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6272 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6273 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6274 "stm%(ia%)\\t%1, {%2, %3, %4}"
6275 [(set_attr "predicable" "yes")
6276 (set_attr "type" "store3")]
6279 (define_insn "*stmsi2"
6280 [(match_parallel 0 "store_multiple_operation"
6281 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6282 (match_operand:SI 2 "arm_hard_register_operand" ""))
6283 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6284 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6285 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6286 "stm%(ia%)\\t%1, {%2, %3}"
6287 [(set_attr "predicable" "yes")
6288 (set_attr "type" "store2")]
6291 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6292 ;; We could let this apply for blocks of less than this, but it clobbers so
6293 ;; many registers that there is then probably a better way.
6295 (define_expand "movmemqi"
6296 [(match_operand:BLK 0 "general_operand" "")
6297 (match_operand:BLK 1 "general_operand" "")
6298 (match_operand:SI 2 "const_int_operand" "")
6299 (match_operand:SI 3 "const_int_operand" "")]
6304 if (arm_gen_movmemqi (operands))
6308 else /* TARGET_THUMB1 */
6310 if ( INTVAL (operands[3]) != 4
6311 || INTVAL (operands[2]) > 48)
6314 thumb_expand_movmemqi (operands);
6320 ;; Thumb block-move insns
6322 (define_insn "movmem12b"
6323 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6324 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6325 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6326 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6327 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6328 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6329 (set (match_operand:SI 0 "register_operand" "=l")
6330 (plus:SI (match_dup 2) (const_int 12)))
6331 (set (match_operand:SI 1 "register_operand" "=l")
6332 (plus:SI (match_dup 3) (const_int 12)))
6333 (clobber (match_scratch:SI 4 "=&l"))
6334 (clobber (match_scratch:SI 5 "=&l"))
6335 (clobber (match_scratch:SI 6 "=&l"))]
6337 "* return thumb_output_move_mem_multiple (3, operands);"
6338 [(set_attr "length" "4")
6339 ; This isn't entirely accurate... It loads as well, but in terms of
6340 ; scheduling the following insn it is better to consider it as a store
6341 (set_attr "type" "store3")]
6344 (define_insn "movmem8b"
6345 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6346 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6347 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6348 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6349 (set (match_operand:SI 0 "register_operand" "=l")
6350 (plus:SI (match_dup 2) (const_int 8)))
6351 (set (match_operand:SI 1 "register_operand" "=l")
6352 (plus:SI (match_dup 3) (const_int 8)))
6353 (clobber (match_scratch:SI 4 "=&l"))
6354 (clobber (match_scratch:SI 5 "=&l"))]
6356 "* return thumb_output_move_mem_multiple (2, operands);"
6357 [(set_attr "length" "4")
6358 ; This isn't entirely accurate... It loads as well, but in terms of
6359 ; scheduling the following insn it is better to consider it as a store
6360 (set_attr "type" "store2")]
6365 ;; Compare & branch insns
6366 ;; The range calculations are based as follows:
6367 ;; For forward branches, the address calculation returns the address of
6368 ;; the next instruction. This is 2 beyond the branch instruction.
6369 ;; For backward branches, the address calculation returns the address of
6370 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6371 ;; instruction for the shortest sequence, and 4 before the branch instruction
6372 ;; if we have to jump around an unconditional branch.
6373 ;; To the basic branch range the PC offset must be added (this is +4).
6374 ;; So for forward branches we have
6375 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6376 ;; And for backward branches we have
6377 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6379 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6380 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6382 (define_expand "cbranchsi4"
6383 [(set (pc) (if_then_else
6384 (match_operator 0 "arm_comparison_operator"
6385 [(match_operand:SI 1 "s_register_operand" "")
6386 (match_operand:SI 2 "nonmemory_operand" "")])
6387 (label_ref (match_operand 3 "" ""))
6389 "TARGET_THUMB1 || TARGET_32BIT"
6393 if (!arm_add_operand (operands[2], SImode))
6394 operands[2] = force_reg (SImode, operands[2]);
6395 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6399 if (thumb1_cmpneg_operand (operands[2], SImode))
6401 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6402 operands[3], operands[0]));
6405 if (!thumb1_cmp_operand (operands[2], SImode))
6406 operands[2] = force_reg (SImode, operands[2]);
6409 (define_expand "cbranchsf4"
6410 [(set (pc) (if_then_else
6411 (match_operator 0 "arm_comparison_operator"
6412 [(match_operand:SF 1 "s_register_operand" "")
6413 (match_operand:SF 2 "arm_float_compare_operand" "")])
6414 (label_ref (match_operand 3 "" ""))
6416 "TARGET_32BIT && TARGET_HARD_FLOAT"
6417 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6418 operands[3])); DONE;"
6421 (define_expand "cbranchdf4"
6422 [(set (pc) (if_then_else
6423 (match_operator 0 "arm_comparison_operator"
6424 [(match_operand:DF 1 "s_register_operand" "")
6425 (match_operand:DF 2 "arm_float_compare_operand" "")])
6426 (label_ref (match_operand 3 "" ""))
6428 "TARGET_32BIT && TARGET_HARD_FLOAT"
6429 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6430 operands[3])); DONE;"
6433 ;; this uses the Cirrus DI compare instruction
6434 (define_expand "cbranchdi4"
6435 [(set (pc) (if_then_else
6436 (match_operator 0 "arm_comparison_operator"
6437 [(match_operand:DI 1 "cirrus_fp_register" "")
6438 (match_operand:DI 2 "cirrus_fp_register" "")])
6439 (label_ref (match_operand 3 "" ""))
6441 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6442 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6443 operands[3])); DONE;"
6446 (define_insn "*cbranchsi4_insn"
6447 [(set (pc) (if_then_else
6448 (match_operator 0 "arm_comparison_operator"
6449 [(match_operand:SI 1 "s_register_operand" "l,*h")
6450 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6451 (label_ref (match_operand 3 "" ""))
6455 output_asm_insn (\"cmp\\t%1, %2\", operands);
6457 switch (get_attr_length (insn))
6459 case 4: return \"b%d0\\t%l3\";
6460 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6461 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6464 [(set (attr "far_jump")
6466 (eq_attr "length" "8")
6467 (const_string "yes")
6468 (const_string "no")))
6469 (set (attr "length")
6471 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6472 (le (minus (match_dup 3) (pc)) (const_int 256)))
6475 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6476 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6481 (define_insn "cbranchsi4_scratch"
6482 [(set (pc) (if_then_else
6483 (match_operator 4 "arm_comparison_operator"
6484 [(match_operand:SI 1 "s_register_operand" "l,0")
6485 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6486 (label_ref (match_operand 3 "" ""))
6488 (clobber (match_scratch:SI 0 "=l,l"))]
6491 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6493 switch (get_attr_length (insn))
6495 case 4: return \"b%d4\\t%l3\";
6496 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6497 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6500 [(set (attr "far_jump")
6502 (eq_attr "length" "8")
6503 (const_string "yes")
6504 (const_string "no")))
6505 (set (attr "length")
6507 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6508 (le (minus (match_dup 3) (pc)) (const_int 256)))
6511 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6512 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6516 (define_insn "*movsi_cbranchsi4"
6519 (match_operator 3 "arm_comparison_operator"
6520 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6522 (label_ref (match_operand 2 "" ""))
6524 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6528 if (which_alternative == 0)
6529 output_asm_insn (\"cmp\t%0, #0\", operands);
6530 else if (which_alternative == 1)
6531 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6534 output_asm_insn (\"cmp\t%1, #0\", operands);
6535 if (which_alternative == 2)
6536 output_asm_insn (\"mov\t%0, %1\", operands);
6538 output_asm_insn (\"str\t%1, %0\", operands);
6540 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6542 case 4: return \"b%d3\\t%l2\";
6543 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6544 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6547 [(set (attr "far_jump")
6549 (ior (and (gt (symbol_ref ("which_alternative"))
6551 (eq_attr "length" "8"))
6552 (eq_attr "length" "10"))
6553 (const_string "yes")
6554 (const_string "no")))
6555 (set (attr "length")
6557 (le (symbol_ref ("which_alternative"))
6560 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6561 (le (minus (match_dup 2) (pc)) (const_int 256)))
6564 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6565 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6569 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6570 (le (minus (match_dup 2) (pc)) (const_int 256)))
6573 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6574 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6579 (define_insn "*negated_cbranchsi4"
6582 (match_operator 0 "equality_operator"
6583 [(match_operand:SI 1 "s_register_operand" "l")
6584 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6585 (label_ref (match_operand 3 "" ""))
6589 output_asm_insn (\"cmn\\t%1, %2\", operands);
6590 switch (get_attr_length (insn))
6592 case 4: return \"b%d0\\t%l3\";
6593 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6594 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6597 [(set (attr "far_jump")
6599 (eq_attr "length" "8")
6600 (const_string "yes")
6601 (const_string "no")))
6602 (set (attr "length")
6604 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6605 (le (minus (match_dup 3) (pc)) (const_int 256)))
6608 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6609 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6614 (define_insn "*tbit_cbranch"
6617 (match_operator 0 "equality_operator"
6618 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6620 (match_operand:SI 2 "const_int_operand" "i"))
6622 (label_ref (match_operand 3 "" ""))
6624 (clobber (match_scratch:SI 4 "=l"))]
6629 op[0] = operands[4];
6630 op[1] = operands[1];
6631 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6633 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6634 switch (get_attr_length (insn))
6636 case 4: return \"b%d0\\t%l3\";
6637 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6638 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6641 [(set (attr "far_jump")
6643 (eq_attr "length" "8")
6644 (const_string "yes")
6645 (const_string "no")))
6646 (set (attr "length")
6648 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6649 (le (minus (match_dup 3) (pc)) (const_int 256)))
6652 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6653 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6658 (define_insn "*tlobits_cbranch"
6661 (match_operator 0 "equality_operator"
6662 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6663 (match_operand:SI 2 "const_int_operand" "i")
6666 (label_ref (match_operand 3 "" ""))
6668 (clobber (match_scratch:SI 4 "=l"))]
6673 op[0] = operands[4];
6674 op[1] = operands[1];
6675 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6677 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6678 switch (get_attr_length (insn))
6680 case 4: return \"b%d0\\t%l3\";
6681 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6682 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6685 [(set (attr "far_jump")
6687 (eq_attr "length" "8")
6688 (const_string "yes")
6689 (const_string "no")))
6690 (set (attr "length")
6692 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6693 (le (minus (match_dup 3) (pc)) (const_int 256)))
6696 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6697 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6702 (define_insn "*tstsi3_cbranch"
6705 (match_operator 3 "equality_operator"
6706 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6707 (match_operand:SI 1 "s_register_operand" "l"))
6709 (label_ref (match_operand 2 "" ""))
6714 output_asm_insn (\"tst\\t%0, %1\", operands);
6715 switch (get_attr_length (insn))
6717 case 4: return \"b%d3\\t%l2\";
6718 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6719 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6722 [(set (attr "far_jump")
6724 (eq_attr "length" "8")
6725 (const_string "yes")
6726 (const_string "no")))
6727 (set (attr "length")
6729 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6730 (le (minus (match_dup 2) (pc)) (const_int 256)))
6733 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6734 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6739 (define_insn "*andsi3_cbranch"
6742 (match_operator 5 "equality_operator"
6743 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6744 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6746 (label_ref (match_operand 4 "" ""))
6748 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6749 (and:SI (match_dup 2) (match_dup 3)))
6750 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6754 if (which_alternative == 0)
6755 output_asm_insn (\"and\\t%0, %3\", operands);
6756 else if (which_alternative == 1)
6758 output_asm_insn (\"and\\t%1, %3\", operands);
6759 output_asm_insn (\"mov\\t%0, %1\", operands);
6763 output_asm_insn (\"and\\t%1, %3\", operands);
6764 output_asm_insn (\"str\\t%1, %0\", operands);
6767 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6769 case 4: return \"b%d5\\t%l4\";
6770 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6771 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6774 [(set (attr "far_jump")
6776 (ior (and (eq (symbol_ref ("which_alternative"))
6778 (eq_attr "length" "8"))
6779 (eq_attr "length" "10"))
6780 (const_string "yes")
6781 (const_string "no")))
6782 (set (attr "length")
6784 (eq (symbol_ref ("which_alternative"))
6787 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6788 (le (minus (match_dup 4) (pc)) (const_int 256)))
6791 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6792 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6796 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6797 (le (minus (match_dup 4) (pc)) (const_int 256)))
6800 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6801 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6806 (define_insn "*orrsi3_cbranch_scratch"
6809 (match_operator 4 "equality_operator"
6810 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6811 (match_operand:SI 2 "s_register_operand" "l"))
6813 (label_ref (match_operand 3 "" ""))
6815 (clobber (match_scratch:SI 0 "=l"))]
6819 output_asm_insn (\"orr\\t%0, %2\", operands);
6820 switch (get_attr_length (insn))
6822 case 4: return \"b%d4\\t%l3\";
6823 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6824 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6827 [(set (attr "far_jump")
6829 (eq_attr "length" "8")
6830 (const_string "yes")
6831 (const_string "no")))
6832 (set (attr "length")
6834 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6835 (le (minus (match_dup 3) (pc)) (const_int 256)))
6838 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6839 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6844 (define_insn "*orrsi3_cbranch"
6847 (match_operator 5 "equality_operator"
6848 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6849 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6851 (label_ref (match_operand 4 "" ""))
6853 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6854 (ior:SI (match_dup 2) (match_dup 3)))
6855 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6859 if (which_alternative == 0)
6860 output_asm_insn (\"orr\\t%0, %3\", operands);
6861 else if (which_alternative == 1)
6863 output_asm_insn (\"orr\\t%1, %3\", operands);
6864 output_asm_insn (\"mov\\t%0, %1\", operands);
6868 output_asm_insn (\"orr\\t%1, %3\", operands);
6869 output_asm_insn (\"str\\t%1, %0\", operands);
6872 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6874 case 4: return \"b%d5\\t%l4\";
6875 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6876 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6879 [(set (attr "far_jump")
6881 (ior (and (eq (symbol_ref ("which_alternative"))
6883 (eq_attr "length" "8"))
6884 (eq_attr "length" "10"))
6885 (const_string "yes")
6886 (const_string "no")))
6887 (set (attr "length")
6889 (eq (symbol_ref ("which_alternative"))
6892 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6893 (le (minus (match_dup 4) (pc)) (const_int 256)))
6896 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6897 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6901 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6902 (le (minus (match_dup 4) (pc)) (const_int 256)))
6905 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6906 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6911 (define_insn "*xorsi3_cbranch_scratch"
6914 (match_operator 4 "equality_operator"
6915 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6916 (match_operand:SI 2 "s_register_operand" "l"))
6918 (label_ref (match_operand 3 "" ""))
6920 (clobber (match_scratch:SI 0 "=l"))]
6924 output_asm_insn (\"eor\\t%0, %2\", operands);
6925 switch (get_attr_length (insn))
6927 case 4: return \"b%d4\\t%l3\";
6928 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6929 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6932 [(set (attr "far_jump")
6934 (eq_attr "length" "8")
6935 (const_string "yes")
6936 (const_string "no")))
6937 (set (attr "length")
6939 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6940 (le (minus (match_dup 3) (pc)) (const_int 256)))
6943 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6944 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6949 (define_insn "*xorsi3_cbranch"
6952 (match_operator 5 "equality_operator"
6953 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6954 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6956 (label_ref (match_operand 4 "" ""))
6958 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6959 (xor:SI (match_dup 2) (match_dup 3)))
6960 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6964 if (which_alternative == 0)
6965 output_asm_insn (\"eor\\t%0, %3\", operands);
6966 else if (which_alternative == 1)
6968 output_asm_insn (\"eor\\t%1, %3\", operands);
6969 output_asm_insn (\"mov\\t%0, %1\", operands);
6973 output_asm_insn (\"eor\\t%1, %3\", operands);
6974 output_asm_insn (\"str\\t%1, %0\", operands);
6977 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6979 case 4: return \"b%d5\\t%l4\";
6980 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6981 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6984 [(set (attr "far_jump")
6986 (ior (and (eq (symbol_ref ("which_alternative"))
6988 (eq_attr "length" "8"))
6989 (eq_attr "length" "10"))
6990 (const_string "yes")
6991 (const_string "no")))
6992 (set (attr "length")
6994 (eq (symbol_ref ("which_alternative"))
6997 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6998 (le (minus (match_dup 4) (pc)) (const_int 256)))
7001 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7002 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7006 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7007 (le (minus (match_dup 4) (pc)) (const_int 256)))
7010 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7011 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7016 (define_insn "*bicsi3_cbranch_scratch"
7019 (match_operator 4 "equality_operator"
7020 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7021 (match_operand:SI 1 "s_register_operand" "0"))
7023 (label_ref (match_operand 3 "" ""))
7025 (clobber (match_scratch:SI 0 "=l"))]
7029 output_asm_insn (\"bic\\t%0, %2\", operands);
7030 switch (get_attr_length (insn))
7032 case 4: return \"b%d4\\t%l3\";
7033 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7034 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7037 [(set (attr "far_jump")
7039 (eq_attr "length" "8")
7040 (const_string "yes")
7041 (const_string "no")))
7042 (set (attr "length")
7044 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7045 (le (minus (match_dup 3) (pc)) (const_int 256)))
7048 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7049 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7054 (define_insn "*bicsi3_cbranch"
7057 (match_operator 5 "equality_operator"
7058 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7059 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7061 (label_ref (match_operand 4 "" ""))
7063 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7064 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7065 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7069 if (which_alternative == 0)
7070 output_asm_insn (\"bic\\t%0, %3\", operands);
7071 else if (which_alternative <= 2)
7073 output_asm_insn (\"bic\\t%1, %3\", operands);
7074 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7075 conditions again, since we're only testing for equality. */
7076 output_asm_insn (\"mov\\t%0, %1\", operands);
7080 output_asm_insn (\"bic\\t%1, %3\", operands);
7081 output_asm_insn (\"str\\t%1, %0\", operands);
7084 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7086 case 4: return \"b%d5\\t%l4\";
7087 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7088 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7091 [(set (attr "far_jump")
7093 (ior (and (eq (symbol_ref ("which_alternative"))
7095 (eq_attr "length" "8"))
7096 (eq_attr "length" "10"))
7097 (const_string "yes")
7098 (const_string "no")))
7099 (set (attr "length")
7101 (eq (symbol_ref ("which_alternative"))
7104 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7105 (le (minus (match_dup 4) (pc)) (const_int 256)))
7108 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7109 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7113 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7114 (le (minus (match_dup 4) (pc)) (const_int 256)))
7117 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7118 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7123 (define_insn "*cbranchne_decr1"
7125 (if_then_else (match_operator 3 "equality_operator"
7126 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7128 (label_ref (match_operand 4 "" ""))
7130 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7131 (plus:SI (match_dup 2) (const_int -1)))
7132 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7137 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7139 VOIDmode, operands[2], const1_rtx);
7140 cond[1] = operands[4];
7142 if (which_alternative == 0)
7143 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7144 else if (which_alternative == 1)
7146 /* We must provide an alternative for a hi reg because reload
7147 cannot handle output reloads on a jump instruction, but we
7148 can't subtract into that. Fortunately a mov from lo to hi
7149 does not clobber the condition codes. */
7150 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7151 output_asm_insn (\"mov\\t%0, %1\", operands);
7155 /* Similarly, but the target is memory. */
7156 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7157 output_asm_insn (\"str\\t%1, %0\", operands);
7160 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7163 output_asm_insn (\"b%d0\\t%l1\", cond);
7166 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7167 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7169 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7170 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7174 [(set (attr "far_jump")
7176 (ior (and (eq (symbol_ref ("which_alternative"))
7178 (eq_attr "length" "8"))
7179 (eq_attr "length" "10"))
7180 (const_string "yes")
7181 (const_string "no")))
7182 (set_attr_alternative "length"
7186 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7187 (le (minus (match_dup 4) (pc)) (const_int 256)))
7190 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7191 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7196 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7197 (le (minus (match_dup 4) (pc)) (const_int 256)))
7200 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7201 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7206 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7207 (le (minus (match_dup 4) (pc)) (const_int 256)))
7210 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7211 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7216 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7217 (le (minus (match_dup 4) (pc)) (const_int 256)))
7220 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7221 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7226 (define_insn "*addsi3_cbranch"
7229 (match_operator 4 "comparison_operator"
7231 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7232 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7234 (label_ref (match_operand 5 "" ""))
7237 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7238 (plus:SI (match_dup 2) (match_dup 3)))
7239 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7241 && (GET_CODE (operands[4]) == EQ
7242 || GET_CODE (operands[4]) == NE
7243 || GET_CODE (operands[4]) == GE
7244 || GET_CODE (operands[4]) == LT)"
7250 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7251 cond[1] = operands[2];
7252 cond[2] = operands[3];
7254 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7255 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7257 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7259 if (which_alternative >= 3
7260 && which_alternative < 4)
7261 output_asm_insn (\"mov\\t%0, %1\", operands);
7262 else if (which_alternative >= 4)
7263 output_asm_insn (\"str\\t%1, %0\", operands);
7265 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7268 return \"b%d4\\t%l5\";
7270 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7272 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7276 [(set (attr "far_jump")
7278 (ior (and (lt (symbol_ref ("which_alternative"))
7280 (eq_attr "length" "8"))
7281 (eq_attr "length" "10"))
7282 (const_string "yes")
7283 (const_string "no")))
7284 (set (attr "length")
7286 (lt (symbol_ref ("which_alternative"))
7289 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7290 (le (minus (match_dup 5) (pc)) (const_int 256)))
7293 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7294 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7298 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7299 (le (minus (match_dup 5) (pc)) (const_int 256)))
7302 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7303 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7308 (define_insn "*addsi3_cbranch_scratch"
7311 (match_operator 3 "comparison_operator"
7313 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7314 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7316 (label_ref (match_operand 4 "" ""))
7318 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7320 && (GET_CODE (operands[3]) == EQ
7321 || GET_CODE (operands[3]) == NE
7322 || GET_CODE (operands[3]) == GE
7323 || GET_CODE (operands[3]) == LT)"
7326 switch (which_alternative)
7329 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7332 output_asm_insn (\"cmn\t%1, %2\", operands);
7335 if (INTVAL (operands[2]) < 0)
7336 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7338 output_asm_insn (\"add\t%0, %1, %2\", operands);
7341 if (INTVAL (operands[2]) < 0)
7342 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7344 output_asm_insn (\"add\t%0, %0, %2\", operands);
7348 switch (get_attr_length (insn))
7351 return \"b%d3\\t%l4\";
7353 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7355 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7359 [(set (attr "far_jump")
7361 (eq_attr "length" "8")
7362 (const_string "yes")
7363 (const_string "no")))
7364 (set (attr "length")
7366 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7367 (le (minus (match_dup 4) (pc)) (const_int 256)))
7370 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7371 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7376 (define_insn "*subsi3_cbranch"
7379 (match_operator 4 "comparison_operator"
7381 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7382 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7384 (label_ref (match_operand 5 "" ""))
7386 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7387 (minus:SI (match_dup 2) (match_dup 3)))
7388 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7390 && (GET_CODE (operands[4]) == EQ
7391 || GET_CODE (operands[4]) == NE
7392 || GET_CODE (operands[4]) == GE
7393 || GET_CODE (operands[4]) == LT)"
7396 if (which_alternative == 0)
7397 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7398 else if (which_alternative == 1)
7400 /* We must provide an alternative for a hi reg because reload
7401 cannot handle output reloads on a jump instruction, but we
7402 can't subtract into that. Fortunately a mov from lo to hi
7403 does not clobber the condition codes. */
7404 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7405 output_asm_insn (\"mov\\t%0, %1\", operands);
7409 /* Similarly, but the target is memory. */
7410 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7411 output_asm_insn (\"str\\t%1, %0\", operands);
7414 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7417 return \"b%d4\\t%l5\";
7419 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7421 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7425 [(set (attr "far_jump")
7427 (ior (and (eq (symbol_ref ("which_alternative"))
7429 (eq_attr "length" "8"))
7430 (eq_attr "length" "10"))
7431 (const_string "yes")
7432 (const_string "no")))
7433 (set (attr "length")
7435 (eq (symbol_ref ("which_alternative"))
7438 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7439 (le (minus (match_dup 5) (pc)) (const_int 256)))
7442 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7443 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7447 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7448 (le (minus (match_dup 5) (pc)) (const_int 256)))
7451 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7452 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7457 (define_insn "*subsi3_cbranch_scratch"
7460 (match_operator 0 "arm_comparison_operator"
7461 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7462 (match_operand:SI 2 "nonmemory_operand" "l"))
7464 (label_ref (match_operand 3 "" ""))
7467 && (GET_CODE (operands[0]) == EQ
7468 || GET_CODE (operands[0]) == NE
7469 || GET_CODE (operands[0]) == GE
7470 || GET_CODE (operands[0]) == LT)"
7472 output_asm_insn (\"cmp\\t%1, %2\", operands);
7473 switch (get_attr_length (insn))
7475 case 4: return \"b%d0\\t%l3\";
7476 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7477 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7480 [(set (attr "far_jump")
7482 (eq_attr "length" "8")
7483 (const_string "yes")
7484 (const_string "no")))
7485 (set (attr "length")
7487 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7488 (le (minus (match_dup 3) (pc)) (const_int 256)))
7491 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7492 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7497 ;; Comparison and test insns
7499 (define_insn "*arm_cmpsi_insn"
7500 [(set (reg:CC CC_REGNUM)
7501 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7502 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7507 [(set_attr "conds" "set")]
7510 (define_insn "*arm_cmpsi_shiftsi"
7511 [(set (reg:CC CC_REGNUM)
7512 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7513 (match_operator:SI 3 "shift_operator"
7514 [(match_operand:SI 1 "s_register_operand" "r")
7515 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7518 [(set_attr "conds" "set")
7519 (set_attr "shift" "1")
7520 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7521 (const_string "alu_shift")
7522 (const_string "alu_shift_reg")))]
7525 (define_insn "*arm_cmpsi_shiftsi_swp"
7526 [(set (reg:CC_SWP CC_REGNUM)
7527 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7528 [(match_operand:SI 1 "s_register_operand" "r")
7529 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7530 (match_operand:SI 0 "s_register_operand" "r")))]
7533 [(set_attr "conds" "set")
7534 (set_attr "shift" "1")
7535 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7536 (const_string "alu_shift")
7537 (const_string "alu_shift_reg")))]
7540 (define_insn "*arm_cmpsi_negshiftsi_si"
7541 [(set (reg:CC_Z CC_REGNUM)
7543 (neg:SI (match_operator:SI 1 "shift_operator"
7544 [(match_operand:SI 2 "s_register_operand" "r")
7545 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7546 (match_operand:SI 0 "s_register_operand" "r")))]
7549 [(set_attr "conds" "set")
7550 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7551 (const_string "alu_shift")
7552 (const_string "alu_shift_reg")))]
7555 ;; Cirrus SF compare instruction
7556 (define_insn "*cirrus_cmpsf"
7557 [(set (reg:CCFP CC_REGNUM)
7558 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7559 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7560 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7561 "cfcmps%?\\tr15, %V0, %V1"
7562 [(set_attr "type" "mav_farith")
7563 (set_attr "cirrus" "compare")]
7566 ;; Cirrus DF compare instruction
7567 (define_insn "*cirrus_cmpdf"
7568 [(set (reg:CCFP CC_REGNUM)
7569 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7570 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7571 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7572 "cfcmpd%?\\tr15, %V0, %V1"
7573 [(set_attr "type" "mav_farith")
7574 (set_attr "cirrus" "compare")]
7577 (define_insn "*cirrus_cmpdi"
7578 [(set (reg:CC CC_REGNUM)
7579 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7580 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7581 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7582 "cfcmp64%?\\tr15, %V0, %V1"
7583 [(set_attr "type" "mav_farith")
7584 (set_attr "cirrus" "compare")]
7587 ; This insn allows redundant compares to be removed by cse, nothing should
7588 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7589 ; is deleted later on. The match_dup will match the mode here, so that
7590 ; mode changes of the condition codes aren't lost by this even though we don't
7591 ; specify what they are.
7593 (define_insn "*deleted_compare"
7594 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7596 "\\t%@ deleted compare"
7597 [(set_attr "conds" "set")
7598 (set_attr "length" "0")]
7602 ;; Conditional branch insns
7604 (define_expand "cbranch_cc"
7606 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7607 (match_operand 2 "" "")])
7608 (label_ref (match_operand 3 "" ""))
7611 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7612 operands[1], operands[2]);
7613 operands[2] = const0_rtx;"
7617 ;; Patterns to match conditional branch insns.
7620 ; Special pattern to match UNEQ.
7621 (define_insn "*arm_buneq"
7623 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7624 (label_ref (match_operand 0 "" ""))
7626 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7628 gcc_assert (!arm_ccfsm_state);
7630 return \"bvs\\t%l0\;beq\\t%l0\";
7632 [(set_attr "conds" "jump_clob")
7633 (set_attr "length" "8")]
7636 ; Special pattern to match LTGT.
7637 (define_insn "*arm_bltgt"
7639 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7640 (label_ref (match_operand 0 "" ""))
7642 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7644 gcc_assert (!arm_ccfsm_state);
7646 return \"bmi\\t%l0\;bgt\\t%l0\";
7648 [(set_attr "conds" "jump_clob")
7649 (set_attr "length" "8")]
7652 (define_insn "*arm_cond_branch"
7654 (if_then_else (match_operator 1 "arm_comparison_operator"
7655 [(match_operand 2 "cc_register" "") (const_int 0)])
7656 (label_ref (match_operand 0 "" ""))
7660 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7662 arm_ccfsm_state += 2;
7665 return \"b%d1\\t%l0\";
7667 [(set_attr "conds" "use")
7668 (set_attr "type" "branch")]
7671 ; Special pattern to match reversed UNEQ.
7672 (define_insn "*arm_buneq_reversed"
7674 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7676 (label_ref (match_operand 0 "" ""))))]
7677 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7679 gcc_assert (!arm_ccfsm_state);
7681 return \"bmi\\t%l0\;bgt\\t%l0\";
7683 [(set_attr "conds" "jump_clob")
7684 (set_attr "length" "8")]
7687 ; Special pattern to match reversed LTGT.
7688 (define_insn "*arm_bltgt_reversed"
7690 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7692 (label_ref (match_operand 0 "" ""))))]
7693 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7695 gcc_assert (!arm_ccfsm_state);
7697 return \"bvs\\t%l0\;beq\\t%l0\";
7699 [(set_attr "conds" "jump_clob")
7700 (set_attr "length" "8")]
7703 (define_insn "*arm_cond_branch_reversed"
7705 (if_then_else (match_operator 1 "arm_comparison_operator"
7706 [(match_operand 2 "cc_register" "") (const_int 0)])
7708 (label_ref (match_operand 0 "" ""))))]
7711 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7713 arm_ccfsm_state += 2;
7716 return \"b%D1\\t%l0\";
7718 [(set_attr "conds" "use")
7719 (set_attr "type" "branch")]
7726 (define_expand "cstore_cc"
7727 [(set (match_operand:SI 0 "s_register_operand" "")
7728 (match_operator:SI 1 "" [(match_operand 2 "" "")
7729 (match_operand 3 "" "")]))]
7731 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7732 operands[2], operands[3]);
7733 operands[3] = const0_rtx;"
7736 (define_insn "*mov_scc"
7737 [(set (match_operand:SI 0 "s_register_operand" "=r")
7738 (match_operator:SI 1 "arm_comparison_operator"
7739 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7741 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7742 [(set_attr "conds" "use")
7743 (set_attr "length" "8")]
7746 (define_insn "*mov_negscc"
7747 [(set (match_operand:SI 0 "s_register_operand" "=r")
7748 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7749 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7751 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7752 [(set_attr "conds" "use")
7753 (set_attr "length" "8")]
7756 (define_insn "*mov_notscc"
7757 [(set (match_operand:SI 0 "s_register_operand" "=r")
7758 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7759 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7761 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7762 [(set_attr "conds" "use")
7763 (set_attr "length" "8")]
7766 (define_expand "cstoresi4"
7767 [(set (match_operand:SI 0 "s_register_operand" "")
7768 (match_operator:SI 1 "arm_comparison_operator"
7769 [(match_operand:SI 2 "s_register_operand" "")
7770 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7771 "TARGET_32BIT || TARGET_THUMB1"
7773 rtx op3, scratch, scratch2;
7777 if (!arm_add_operand (operands[3], SImode))
7778 operands[3] = force_reg (SImode, operands[3]);
7779 emit_insn (gen_cstore_cc (operands[0], operands[1],
7780 operands[2], operands[3]));
7784 if (operands[3] == const0_rtx)
7786 switch (GET_CODE (operands[1]))
7789 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7793 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7797 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7798 NULL_RTX, 0, OPTAB_WIDEN);
7799 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7800 NULL_RTX, 0, OPTAB_WIDEN);
7801 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7802 operands[0], 1, OPTAB_WIDEN);
7806 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7808 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7809 NULL_RTX, 1, OPTAB_WIDEN);
7813 scratch = expand_binop (SImode, ashr_optab, operands[2],
7814 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7815 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7816 NULL_RTX, 0, OPTAB_WIDEN);
7817 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7821 /* LT is handled by generic code. No need for unsigned with 0. */
7828 switch (GET_CODE (operands[1]))
7831 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7832 NULL_RTX, 0, OPTAB_WIDEN);
7833 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7837 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7838 NULL_RTX, 0, OPTAB_WIDEN);
7839 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7843 op3 = force_reg (SImode, operands[3]);
7845 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7846 NULL_RTX, 1, OPTAB_WIDEN);
7847 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7848 NULL_RTX, 0, OPTAB_WIDEN);
7849 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7855 if (!thumb1_cmp_operand (op3, SImode))
7856 op3 = force_reg (SImode, op3);
7857 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7858 NULL_RTX, 0, OPTAB_WIDEN);
7859 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7860 NULL_RTX, 1, OPTAB_WIDEN);
7861 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7866 op3 = force_reg (SImode, operands[3]);
7867 scratch = force_reg (SImode, const0_rtx);
7868 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7874 if (!thumb1_cmp_operand (op3, SImode))
7875 op3 = force_reg (SImode, op3);
7876 scratch = force_reg (SImode, const0_rtx);
7877 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7883 if (!thumb1_cmp_operand (op3, SImode))
7884 op3 = force_reg (SImode, op3);
7885 scratch = gen_reg_rtx (SImode);
7886 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
7887 emit_insn (gen_negsi2 (operands[0], scratch));
7891 op3 = force_reg (SImode, operands[3]);
7892 scratch = gen_reg_rtx (SImode);
7893 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
7894 emit_insn (gen_negsi2 (operands[0], scratch));
7897 /* No good sequences for GT, LT. */
7904 (define_expand "cstoresf4"
7905 [(set (match_operand:SI 0 "s_register_operand" "")
7906 (match_operator:SI 1 "arm_comparison_operator"
7907 [(match_operand:SF 2 "s_register_operand" "")
7908 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7909 "TARGET_32BIT && TARGET_HARD_FLOAT"
7910 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7911 operands[2], operands[3])); DONE;"
7914 (define_expand "cstoredf4"
7915 [(set (match_operand:SI 0 "s_register_operand" "")
7916 (match_operator:SI 1 "arm_comparison_operator"
7917 [(match_operand:DF 2 "s_register_operand" "")
7918 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7919 "TARGET_32BIT && TARGET_HARD_FLOAT"
7920 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7921 operands[2], operands[3])); DONE;"
7924 ;; this uses the Cirrus DI compare instruction
7925 (define_expand "cstoredi4"
7926 [(set (match_operand:SI 0 "s_register_operand" "")
7927 (match_operator:SI 1 "arm_comparison_operator"
7928 [(match_operand:DI 2 "cirrus_fp_register" "")
7929 (match_operand:DI 3 "cirrus_fp_register" "")]))]
7930 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7931 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7932 operands[2], operands[3])); DONE;"
7936 (define_expand "cstoresi_eq0_thumb1"
7938 [(set (match_operand:SI 0 "s_register_operand" "")
7939 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7941 (clobber (match_dup:SI 2))])]
7943 "operands[2] = gen_reg_rtx (SImode);"
7946 (define_expand "cstoresi_ne0_thumb1"
7948 [(set (match_operand:SI 0 "s_register_operand" "")
7949 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7951 (clobber (match_dup:SI 2))])]
7953 "operands[2] = gen_reg_rtx (SImode);"
7956 (define_insn "*cstoresi_eq0_thumb1_insn"
7957 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7958 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7960 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7963 neg\\t%0, %1\;adc\\t%0, %0, %1
7964 neg\\t%2, %1\;adc\\t%0, %1, %2"
7965 [(set_attr "length" "4")]
7968 (define_insn "*cstoresi_ne0_thumb1_insn"
7969 [(set (match_operand:SI 0 "s_register_operand" "=l")
7970 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7972 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7974 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7975 [(set_attr "length" "4")]
7978 (define_insn "cstoresi_nltu_thumb1"
7979 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7980 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7981 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7983 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7984 [(set_attr "length" "4")]
7987 ;; Used as part of the expansion of thumb les sequence.
7988 (define_insn "thumb1_addsi3_addgeu"
7989 [(set (match_operand:SI 0 "s_register_operand" "=l")
7990 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7991 (match_operand:SI 2 "s_register_operand" "l"))
7992 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7993 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7995 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7996 [(set_attr "length" "4")]
8000 ;; Conditional move insns
8002 (define_expand "movsicc"
8003 [(set (match_operand:SI 0 "s_register_operand" "")
8004 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8005 (match_operand:SI 2 "arm_not_operand" "")
8006 (match_operand:SI 3 "arm_not_operand" "")))]
8010 enum rtx_code code = GET_CODE (operands[1]);
8013 if (code == UNEQ || code == LTGT)
8016 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8017 XEXP (operands[1], 1));
8018 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8022 (define_expand "movsfcc"
8023 [(set (match_operand:SF 0 "s_register_operand" "")
8024 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8025 (match_operand:SF 2 "s_register_operand" "")
8026 (match_operand:SF 3 "nonmemory_operand" "")))]
8027 "TARGET_32BIT && TARGET_HARD_FLOAT"
8030 enum rtx_code code = GET_CODE (operands[1]);
8033 if (code == UNEQ || code == LTGT)
8036 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8037 Otherwise, ensure it is a valid FP add operand */
8038 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8039 || (!arm_float_add_operand (operands[3], SFmode)))
8040 operands[3] = force_reg (SFmode, operands[3]);
8042 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8043 XEXP (operands[1], 1));
8044 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8048 (define_expand "movdfcc"
8049 [(set (match_operand:DF 0 "s_register_operand" "")
8050 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8051 (match_operand:DF 2 "s_register_operand" "")
8052 (match_operand:DF 3 "arm_float_add_operand" "")))]
8053 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8056 enum rtx_code code = GET_CODE (operands[1]);
8059 if (code == UNEQ || code == LTGT)
8062 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8063 XEXP (operands[1], 1));
8064 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8068 (define_insn "*movsicc_insn"
8069 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8071 (match_operator 3 "arm_comparison_operator"
8072 [(match_operand 4 "cc_register" "") (const_int 0)])
8073 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8074 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8081 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8082 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8083 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8084 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8085 [(set_attr "length" "4,4,4,4,8,8,8,8")
8086 (set_attr "conds" "use")]
8089 (define_insn "*movsfcc_soft_insn"
8090 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8091 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8092 [(match_operand 4 "cc_register" "") (const_int 0)])
8093 (match_operand:SF 1 "s_register_operand" "0,r")
8094 (match_operand:SF 2 "s_register_operand" "r,0")))]
8095 "TARGET_ARM && TARGET_SOFT_FLOAT"
8099 [(set_attr "conds" "use")]
8103 ;; Jump and linkage insns
8105 (define_expand "jump"
8107 (label_ref (match_operand 0 "" "")))]
8112 (define_insn "*arm_jump"
8114 (label_ref (match_operand 0 "" "")))]
8118 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8120 arm_ccfsm_state += 2;
8123 return \"b%?\\t%l0\";
8126 [(set_attr "predicable" "yes")]
8129 (define_insn "*thumb_jump"
8131 (label_ref (match_operand 0 "" "")))]
8134 if (get_attr_length (insn) == 2)
8136 return \"bl\\t%l0\\t%@ far jump\";
8138 [(set (attr "far_jump")
8140 (eq_attr "length" "4")
8141 (const_string "yes")
8142 (const_string "no")))
8143 (set (attr "length")
8145 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8146 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8151 (define_expand "call"
8152 [(parallel [(call (match_operand 0 "memory_operand" "")
8153 (match_operand 1 "general_operand" ""))
8154 (use (match_operand 2 "" ""))
8155 (clobber (reg:SI LR_REGNUM))])]
8161 /* In an untyped call, we can get NULL for operand 2. */
8162 if (operands[2] == NULL_RTX)
8163 operands[2] = const0_rtx;
8165 /* Decide if we should generate indirect calls by loading the
8166 32-bit address of the callee into a register before performing the
8168 callee = XEXP (operands[0], 0);
8169 if (GET_CODE (callee) == SYMBOL_REF
8170 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8172 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8174 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8175 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8180 (define_expand "call_internal"
8181 [(parallel [(call (match_operand 0 "memory_operand" "")
8182 (match_operand 1 "general_operand" ""))
8183 (use (match_operand 2 "" ""))
8184 (clobber (reg:SI LR_REGNUM))])])
8186 (define_insn "*call_reg_armv5"
8187 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8188 (match_operand 1 "" ""))
8189 (use (match_operand 2 "" ""))
8190 (clobber (reg:SI LR_REGNUM))]
8191 "TARGET_ARM && arm_arch5"
8193 [(set_attr "type" "call")]
8196 (define_insn "*call_reg_arm"
8197 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8198 (match_operand 1 "" ""))
8199 (use (match_operand 2 "" ""))
8200 (clobber (reg:SI LR_REGNUM))]
8201 "TARGET_ARM && !arm_arch5"
8203 return output_call (operands);
8205 ;; length is worst case, normally it is only two
8206 [(set_attr "length" "12")
8207 (set_attr "type" "call")]
8210 (define_insn "*call_mem"
8211 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8212 (match_operand 1 "" ""))
8213 (use (match_operand 2 "" ""))
8214 (clobber (reg:SI LR_REGNUM))]
8217 return output_call_mem (operands);
8219 [(set_attr "length" "12")
8220 (set_attr "type" "call")]
8223 (define_insn "*call_reg_thumb1_v5"
8224 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8225 (match_operand 1 "" ""))
8226 (use (match_operand 2 "" ""))
8227 (clobber (reg:SI LR_REGNUM))]
8228 "TARGET_THUMB1 && arm_arch5"
8230 [(set_attr "length" "2")
8231 (set_attr "type" "call")]
8234 (define_insn "*call_reg_thumb1"
8235 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8236 (match_operand 1 "" ""))
8237 (use (match_operand 2 "" ""))
8238 (clobber (reg:SI LR_REGNUM))]
8239 "TARGET_THUMB1 && !arm_arch5"
8242 if (!TARGET_CALLER_INTERWORKING)
8243 return thumb_call_via_reg (operands[0]);
8244 else if (operands[1] == const0_rtx)
8245 return \"bl\\t%__interwork_call_via_%0\";
8246 else if (frame_pointer_needed)
8247 return \"bl\\t%__interwork_r7_call_via_%0\";
8249 return \"bl\\t%__interwork_r11_call_via_%0\";
8251 [(set_attr "type" "call")]
8254 (define_expand "call_value"
8255 [(parallel [(set (match_operand 0 "" "")
8256 (call (match_operand 1 "memory_operand" "")
8257 (match_operand 2 "general_operand" "")))
8258 (use (match_operand 3 "" ""))
8259 (clobber (reg:SI LR_REGNUM))])]
8265 /* In an untyped call, we can get NULL for operand 2. */
8266 if (operands[3] == 0)
8267 operands[3] = const0_rtx;
8269 /* Decide if we should generate indirect calls by loading the
8270 32-bit address of the callee into a register before performing the
8272 callee = XEXP (operands[1], 0);
8273 if (GET_CODE (callee) == SYMBOL_REF
8274 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8276 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8278 pat = gen_call_value_internal (operands[0], operands[1],
8279 operands[2], operands[3]);
8280 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8285 (define_expand "call_value_internal"
8286 [(parallel [(set (match_operand 0 "" "")
8287 (call (match_operand 1 "memory_operand" "")
8288 (match_operand 2 "general_operand" "")))
8289 (use (match_operand 3 "" ""))
8290 (clobber (reg:SI LR_REGNUM))])])
8292 (define_insn "*call_value_reg_armv5"
8293 [(set (match_operand 0 "" "")
8294 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8295 (match_operand 2 "" "")))
8296 (use (match_operand 3 "" ""))
8297 (clobber (reg:SI LR_REGNUM))]
8298 "TARGET_ARM && arm_arch5"
8300 [(set_attr "type" "call")]
8303 (define_insn "*call_value_reg_arm"
8304 [(set (match_operand 0 "" "")
8305 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8306 (match_operand 2 "" "")))
8307 (use (match_operand 3 "" ""))
8308 (clobber (reg:SI LR_REGNUM))]
8309 "TARGET_ARM && !arm_arch5"
8311 return output_call (&operands[1]);
8313 [(set_attr "length" "12")
8314 (set_attr "type" "call")]
8317 (define_insn "*call_value_mem"
8318 [(set (match_operand 0 "" "")
8319 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8320 (match_operand 2 "" "")))
8321 (use (match_operand 3 "" ""))
8322 (clobber (reg:SI LR_REGNUM))]
8323 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8325 return output_call_mem (&operands[1]);
8327 [(set_attr "length" "12")
8328 (set_attr "type" "call")]
8331 (define_insn "*call_value_reg_thumb1_v5"
8332 [(set (match_operand 0 "" "")
8333 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8334 (match_operand 2 "" "")))
8335 (use (match_operand 3 "" ""))
8336 (clobber (reg:SI LR_REGNUM))]
8337 "TARGET_THUMB1 && arm_arch5"
8339 [(set_attr "length" "2")
8340 (set_attr "type" "call")]
8343 (define_insn "*call_value_reg_thumb1"
8344 [(set (match_operand 0 "" "")
8345 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8346 (match_operand 2 "" "")))
8347 (use (match_operand 3 "" ""))
8348 (clobber (reg:SI LR_REGNUM))]
8349 "TARGET_THUMB1 && !arm_arch5"
8352 if (!TARGET_CALLER_INTERWORKING)
8353 return thumb_call_via_reg (operands[1]);
8354 else if (operands[2] == const0_rtx)
8355 return \"bl\\t%__interwork_call_via_%1\";
8356 else if (frame_pointer_needed)
8357 return \"bl\\t%__interwork_r7_call_via_%1\";
8359 return \"bl\\t%__interwork_r11_call_via_%1\";
8361 [(set_attr "type" "call")]
8364 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8365 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8367 (define_insn "*call_symbol"
8368 [(call (mem:SI (match_operand:SI 0 "" ""))
8369 (match_operand 1 "" ""))
8370 (use (match_operand 2 "" ""))
8371 (clobber (reg:SI LR_REGNUM))]
8373 && (GET_CODE (operands[0]) == SYMBOL_REF)
8374 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8377 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8379 [(set_attr "type" "call")]
8382 (define_insn "*call_value_symbol"
8383 [(set (match_operand 0 "" "")
8384 (call (mem:SI (match_operand:SI 1 "" ""))
8385 (match_operand:SI 2 "" "")))
8386 (use (match_operand 3 "" ""))
8387 (clobber (reg:SI LR_REGNUM))]
8389 && (GET_CODE (operands[1]) == SYMBOL_REF)
8390 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8393 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8395 [(set_attr "type" "call")]
8398 (define_insn "*call_insn"
8399 [(call (mem:SI (match_operand:SI 0 "" ""))
8400 (match_operand:SI 1 "" ""))
8401 (use (match_operand 2 "" ""))
8402 (clobber (reg:SI LR_REGNUM))]
8404 && GET_CODE (operands[0]) == SYMBOL_REF
8405 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8407 [(set_attr "length" "4")
8408 (set_attr "type" "call")]
8411 (define_insn "*call_value_insn"
8412 [(set (match_operand 0 "" "")
8413 (call (mem:SI (match_operand 1 "" ""))
8414 (match_operand 2 "" "")))
8415 (use (match_operand 3 "" ""))
8416 (clobber (reg:SI LR_REGNUM))]
8418 && GET_CODE (operands[1]) == SYMBOL_REF
8419 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8421 [(set_attr "length" "4")
8422 (set_attr "type" "call")]
8425 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8426 (define_expand "sibcall"
8427 [(parallel [(call (match_operand 0 "memory_operand" "")
8428 (match_operand 1 "general_operand" ""))
8430 (use (match_operand 2 "" ""))])]
8434 if (operands[2] == NULL_RTX)
8435 operands[2] = const0_rtx;
8439 (define_expand "sibcall_value"
8440 [(parallel [(set (match_operand 0 "" "")
8441 (call (match_operand 1 "memory_operand" "")
8442 (match_operand 2 "general_operand" "")))
8444 (use (match_operand 3 "" ""))])]
8448 if (operands[3] == NULL_RTX)
8449 operands[3] = const0_rtx;
8453 (define_insn "*sibcall_insn"
8454 [(call (mem:SI (match_operand:SI 0 "" "X"))
8455 (match_operand 1 "" ""))
8457 (use (match_operand 2 "" ""))]
8458 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8460 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8462 [(set_attr "type" "call")]
8465 (define_insn "*sibcall_value_insn"
8466 [(set (match_operand 0 "" "")
8467 (call (mem:SI (match_operand:SI 1 "" "X"))
8468 (match_operand 2 "" "")))
8470 (use (match_operand 3 "" ""))]
8471 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8473 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8475 [(set_attr "type" "call")]
8478 ;; Often the return insn will be the same as loading from memory, so set attr
8479 (define_insn "return"
8481 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8484 if (arm_ccfsm_state == 2)
8486 arm_ccfsm_state += 2;
8489 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8491 [(set_attr "type" "load1")
8492 (set_attr "length" "12")
8493 (set_attr "predicable" "yes")]
8496 (define_insn "*cond_return"
8498 (if_then_else (match_operator 0 "arm_comparison_operator"
8499 [(match_operand 1 "cc_register" "") (const_int 0)])
8502 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8505 if (arm_ccfsm_state == 2)
8507 arm_ccfsm_state += 2;
8510 return output_return_instruction (operands[0], TRUE, FALSE);
8512 [(set_attr "conds" "use")
8513 (set_attr "length" "12")
8514 (set_attr "type" "load1")]
8517 (define_insn "*cond_return_inverted"
8519 (if_then_else (match_operator 0 "arm_comparison_operator"
8520 [(match_operand 1 "cc_register" "") (const_int 0)])
8523 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8526 if (arm_ccfsm_state == 2)
8528 arm_ccfsm_state += 2;
8531 return output_return_instruction (operands[0], TRUE, TRUE);
8533 [(set_attr "conds" "use")
8534 (set_attr "length" "12")
8535 (set_attr "type" "load1")]
8538 ;; Generate a sequence of instructions to determine if the processor is
8539 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8542 (define_expand "return_addr_mask"
8544 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8546 (set (match_operand:SI 0 "s_register_operand" "")
8547 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8549 (const_int 67108860)))] ; 0x03fffffc
8552 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8555 (define_insn "*check_arch2"
8556 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8557 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8560 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8561 [(set_attr "length" "8")
8562 (set_attr "conds" "set")]
8565 ;; Call subroutine returning any type.
8567 (define_expand "untyped_call"
8568 [(parallel [(call (match_operand 0 "" "")
8570 (match_operand 1 "" "")
8571 (match_operand 2 "" "")])]
8576 rtx par = gen_rtx_PARALLEL (VOIDmode,
8577 rtvec_alloc (XVECLEN (operands[2], 0)));
8578 rtx addr = gen_reg_rtx (Pmode);
8582 emit_move_insn (addr, XEXP (operands[1], 0));
8583 mem = change_address (operands[1], BLKmode, addr);
8585 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8587 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8589 /* Default code only uses r0 as a return value, but we could
8590 be using anything up to 4 registers. */
8591 if (REGNO (src) == R0_REGNUM)
8592 src = gen_rtx_REG (TImode, R0_REGNUM);
8594 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8596 size += GET_MODE_SIZE (GET_MODE (src));
8599 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8604 for (i = 0; i < XVECLEN (par, 0); i++)
8606 HOST_WIDE_INT offset = 0;
8607 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8610 emit_move_insn (addr, plus_constant (addr, size));
8612 mem = change_address (mem, GET_MODE (reg), NULL);
8613 if (REGNO (reg) == R0_REGNUM)
8615 /* On thumb we have to use a write-back instruction. */
8616 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8617 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8618 size = TARGET_ARM ? 16 : 0;
8622 emit_move_insn (mem, reg);
8623 size = GET_MODE_SIZE (GET_MODE (reg));
8627 /* The optimizer does not know that the call sets the function value
8628 registers we stored in the result block. We avoid problems by
8629 claiming that all hard registers are used and clobbered at this
8631 emit_insn (gen_blockage ());
8637 (define_expand "untyped_return"
8638 [(match_operand:BLK 0 "memory_operand" "")
8639 (match_operand 1 "" "")]
8644 rtx addr = gen_reg_rtx (Pmode);
8648 emit_move_insn (addr, XEXP (operands[0], 0));
8649 mem = change_address (operands[0], BLKmode, addr);
8651 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8653 HOST_WIDE_INT offset = 0;
8654 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8657 emit_move_insn (addr, plus_constant (addr, size));
8659 mem = change_address (mem, GET_MODE (reg), NULL);
8660 if (REGNO (reg) == R0_REGNUM)
8662 /* On thumb we have to use a write-back instruction. */
8663 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8664 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8665 size = TARGET_ARM ? 16 : 0;
8669 emit_move_insn (reg, mem);
8670 size = GET_MODE_SIZE (GET_MODE (reg));
8674 /* Emit USE insns before the return. */
8675 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8676 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8678 /* Construct the return. */
8679 expand_naked_return ();
8685 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8686 ;; all of memory. This blocks insns from being moved across this point.
8688 (define_insn "blockage"
8689 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8692 [(set_attr "length" "0")
8693 (set_attr "type" "block")]
8696 (define_expand "casesi"
8697 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8698 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8699 (match_operand:SI 2 "const_int_operand" "") ; total range
8700 (match_operand:SI 3 "" "") ; table label
8701 (match_operand:SI 4 "" "")] ; Out of range label
8706 if (operands[1] != const0_rtx)
8708 reg = gen_reg_rtx (SImode);
8710 emit_insn (gen_addsi3 (reg, operands[0],
8711 GEN_INT (-INTVAL (operands[1]))));
8715 if (!const_ok_for_arm (INTVAL (operands[2])))
8716 operands[2] = force_reg (SImode, operands[2]);
8720 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8721 operands[3], operands[4]));
8725 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8726 operands[2], operands[3], operands[4]));
8730 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8731 operands[3], operands[4]));
8737 ;; The USE in this pattern is needed to tell flow analysis that this is
8738 ;; a CASESI insn. It has no other purpose.
8739 (define_insn "arm_casesi_internal"
8740 [(parallel [(set (pc)
8742 (leu (match_operand:SI 0 "s_register_operand" "r")
8743 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8744 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8745 (label_ref (match_operand 2 "" ""))))
8746 (label_ref (match_operand 3 "" ""))))
8747 (clobber (reg:CC CC_REGNUM))
8748 (use (label_ref (match_dup 2)))])]
8752 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8753 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8755 [(set_attr "conds" "clob")
8756 (set_attr "length" "12")]
8759 (define_expand "indirect_jump"
8761 (match_operand:SI 0 "s_register_operand" ""))]
8764 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8765 address and use bx. */
8769 tmp = gen_reg_rtx (SImode);
8770 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8776 ;; NB Never uses BX.
8777 (define_insn "*arm_indirect_jump"
8779 (match_operand:SI 0 "s_register_operand" "r"))]
8781 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8782 [(set_attr "predicable" "yes")]
8785 (define_insn "*load_indirect_jump"
8787 (match_operand:SI 0 "memory_operand" "m"))]
8789 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8790 [(set_attr "type" "load1")
8791 (set_attr "pool_range" "4096")
8792 (set_attr "neg_pool_range" "4084")
8793 (set_attr "predicable" "yes")]
8796 ;; NB Never uses BX.
8797 (define_insn "*thumb1_indirect_jump"
8799 (match_operand:SI 0 "register_operand" "l*r"))]
8802 [(set_attr "conds" "clob")
8803 (set_attr "length" "2")]
8813 if (TARGET_UNIFIED_ASM)
8816 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8817 return \"mov\\tr8, r8\";
8819 [(set (attr "length")
8820 (if_then_else (eq_attr "is_thumb" "yes")
8826 ;; Patterns to allow combination of arithmetic, cond code and shifts
8828 (define_insn "*arith_shiftsi"
8829 [(set (match_operand:SI 0 "s_register_operand" "=r")
8830 (match_operator:SI 1 "shiftable_operator"
8831 [(match_operator:SI 3 "shift_operator"
8832 [(match_operand:SI 4 "s_register_operand" "r")
8833 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8834 (match_operand:SI 2 "s_register_operand" "r")]))]
8836 "%i1%?\\t%0, %2, %4%S3"
8837 [(set_attr "predicable" "yes")
8838 (set_attr "shift" "4")
8839 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8840 (const_string "alu_shift")
8841 (const_string "alu_shift_reg")))]
8845 [(set (match_operand:SI 0 "s_register_operand" "")
8846 (match_operator:SI 1 "shiftable_operator"
8847 [(match_operator:SI 2 "shiftable_operator"
8848 [(match_operator:SI 3 "shift_operator"
8849 [(match_operand:SI 4 "s_register_operand" "")
8850 (match_operand:SI 5 "reg_or_int_operand" "")])
8851 (match_operand:SI 6 "s_register_operand" "")])
8852 (match_operand:SI 7 "arm_rhs_operand" "")]))
8853 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8856 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8859 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8862 (define_insn "*arith_shiftsi_compare0"
8863 [(set (reg:CC_NOOV CC_REGNUM)
8864 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8865 [(match_operator:SI 3 "shift_operator"
8866 [(match_operand:SI 4 "s_register_operand" "r")
8867 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8868 (match_operand:SI 2 "s_register_operand" "r")])
8870 (set (match_operand:SI 0 "s_register_operand" "=r")
8871 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8874 "%i1%.\\t%0, %2, %4%S3"
8875 [(set_attr "conds" "set")
8876 (set_attr "shift" "4")
8877 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8878 (const_string "alu_shift")
8879 (const_string "alu_shift_reg")))]
8882 (define_insn "*arith_shiftsi_compare0_scratch"
8883 [(set (reg:CC_NOOV CC_REGNUM)
8884 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8885 [(match_operator:SI 3 "shift_operator"
8886 [(match_operand:SI 4 "s_register_operand" "r")
8887 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8888 (match_operand:SI 2 "s_register_operand" "r")])
8890 (clobber (match_scratch:SI 0 "=r"))]
8892 "%i1%.\\t%0, %2, %4%S3"
8893 [(set_attr "conds" "set")
8894 (set_attr "shift" "4")
8895 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8896 (const_string "alu_shift")
8897 (const_string "alu_shift_reg")))]
8900 (define_insn "*sub_shiftsi"
8901 [(set (match_operand:SI 0 "s_register_operand" "=r")
8902 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8903 (match_operator:SI 2 "shift_operator"
8904 [(match_operand:SI 3 "s_register_operand" "r")
8905 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
8907 "sub%?\\t%0, %1, %3%S2"
8908 [(set_attr "predicable" "yes")
8909 (set_attr "shift" "3")
8910 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8911 (const_string "alu_shift")
8912 (const_string "alu_shift_reg")))]
8915 (define_insn "*sub_shiftsi_compare0"
8916 [(set (reg:CC_NOOV CC_REGNUM)
8918 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8919 (match_operator:SI 2 "shift_operator"
8920 [(match_operand:SI 3 "s_register_operand" "r")
8921 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8923 (set (match_operand:SI 0 "s_register_operand" "=r")
8924 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
8927 "sub%.\\t%0, %1, %3%S2"
8928 [(set_attr "conds" "set")
8929 (set_attr "shift" "3")
8930 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8931 (const_string "alu_shift")
8932 (const_string "alu_shift_reg")))]
8935 (define_insn "*sub_shiftsi_compare0_scratch"
8936 [(set (reg:CC_NOOV CC_REGNUM)
8938 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8939 (match_operator:SI 2 "shift_operator"
8940 [(match_operand:SI 3 "s_register_operand" "r")
8941 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8943 (clobber (match_scratch:SI 0 "=r"))]
8945 "sub%.\\t%0, %1, %3%S2"
8946 [(set_attr "conds" "set")
8947 (set_attr "shift" "3")
8948 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8949 (const_string "alu_shift")
8950 (const_string "alu_shift_reg")))]
8955 (define_insn "*and_scc"
8956 [(set (match_operand:SI 0 "s_register_operand" "=r")
8957 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8958 [(match_operand 3 "cc_register" "") (const_int 0)])
8959 (match_operand:SI 2 "s_register_operand" "r")))]
8961 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8962 [(set_attr "conds" "use")
8963 (set_attr "length" "8")]
8966 (define_insn "*ior_scc"
8967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8968 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8969 [(match_operand 3 "cc_register" "") (const_int 0)])
8970 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8974 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8975 [(set_attr "conds" "use")
8976 (set_attr "length" "4,8")]
8979 (define_insn "*compare_scc"
8980 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8981 (match_operator:SI 1 "arm_comparison_operator"
8982 [(match_operand:SI 2 "s_register_operand" "r,r")
8983 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8984 (clobber (reg:CC CC_REGNUM))]
8987 if (operands[3] == const0_rtx)
8989 if (GET_CODE (operands[1]) == LT)
8990 return \"mov\\t%0, %2, lsr #31\";
8992 if (GET_CODE (operands[1]) == GE)
8993 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
8995 if (GET_CODE (operands[1]) == EQ)
8996 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
8999 if (GET_CODE (operands[1]) == NE)
9001 if (which_alternative == 1)
9002 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9003 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9005 if (which_alternative == 1)
9006 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9008 output_asm_insn (\"cmp\\t%2, %3\", operands);
9009 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9011 [(set_attr "conds" "clob")
9012 (set_attr "length" "12")]
9015 (define_insn "*cond_move"
9016 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9017 (if_then_else:SI (match_operator 3 "equality_operator"
9018 [(match_operator 4 "arm_comparison_operator"
9019 [(match_operand 5 "cc_register" "") (const_int 0)])
9021 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9022 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9025 if (GET_CODE (operands[3]) == NE)
9027 if (which_alternative != 1)
9028 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9029 if (which_alternative != 0)
9030 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9033 if (which_alternative != 0)
9034 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9035 if (which_alternative != 1)
9036 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9039 [(set_attr "conds" "use")
9040 (set_attr "length" "4,4,8")]
9043 (define_insn "*cond_arith"
9044 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9045 (match_operator:SI 5 "shiftable_operator"
9046 [(match_operator:SI 4 "arm_comparison_operator"
9047 [(match_operand:SI 2 "s_register_operand" "r,r")
9048 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9049 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9050 (clobber (reg:CC CC_REGNUM))]
9053 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9054 return \"%i5\\t%0, %1, %2, lsr #31\";
9056 output_asm_insn (\"cmp\\t%2, %3\", operands);
9057 if (GET_CODE (operands[5]) == AND)
9058 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9059 else if (GET_CODE (operands[5]) == MINUS)
9060 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9061 else if (which_alternative != 0)
9062 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9063 return \"%i5%d4\\t%0, %1, #1\";
9065 [(set_attr "conds" "clob")
9066 (set_attr "length" "12")]
9069 (define_insn "*cond_sub"
9070 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9071 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9072 (match_operator:SI 4 "arm_comparison_operator"
9073 [(match_operand:SI 2 "s_register_operand" "r,r")
9074 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9075 (clobber (reg:CC CC_REGNUM))]
9078 output_asm_insn (\"cmp\\t%2, %3\", operands);
9079 if (which_alternative != 0)
9080 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9081 return \"sub%d4\\t%0, %1, #1\";
9083 [(set_attr "conds" "clob")
9084 (set_attr "length" "8,12")]
9087 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9088 (define_insn "*cmp_ite0"
9089 [(set (match_operand 6 "dominant_cc_register" "")
9092 (match_operator 4 "arm_comparison_operator"
9093 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9094 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9095 (match_operator:SI 5 "arm_comparison_operator"
9096 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9097 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9103 static const char * const opcodes[4][2] =
9105 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9106 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9107 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9108 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9109 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9110 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9111 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9112 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9115 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9117 return opcodes[which_alternative][swap];
9119 [(set_attr "conds" "set")
9120 (set_attr "length" "8")]
9123 (define_insn "*cmp_ite1"
9124 [(set (match_operand 6 "dominant_cc_register" "")
9127 (match_operator 4 "arm_comparison_operator"
9128 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9129 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9130 (match_operator:SI 5 "arm_comparison_operator"
9131 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9132 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9138 static const char * const opcodes[4][2] =
9140 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9141 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9142 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9143 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9144 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9145 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9146 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9147 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9150 comparison_dominates_p (GET_CODE (operands[5]),
9151 reverse_condition (GET_CODE (operands[4])));
9153 return opcodes[which_alternative][swap];
9155 [(set_attr "conds" "set")
9156 (set_attr "length" "8")]
9159 (define_insn "*cmp_and"
9160 [(set (match_operand 6 "dominant_cc_register" "")
9163 (match_operator 4 "arm_comparison_operator"
9164 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9165 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9166 (match_operator:SI 5 "arm_comparison_operator"
9167 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9168 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9173 static const char *const opcodes[4][2] =
9175 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9176 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9177 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9178 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9179 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9180 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9181 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9182 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9185 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9187 return opcodes[which_alternative][swap];
9189 [(set_attr "conds" "set")
9190 (set_attr "predicable" "no")
9191 (set_attr "length" "8")]
9194 (define_insn "*cmp_ior"
9195 [(set (match_operand 6 "dominant_cc_register" "")
9198 (match_operator 4 "arm_comparison_operator"
9199 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9200 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9201 (match_operator:SI 5 "arm_comparison_operator"
9202 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9203 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9208 static const char *const opcodes[4][2] =
9210 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9211 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9212 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9213 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9214 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9215 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9216 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9217 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9220 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9222 return opcodes[which_alternative][swap];
9225 [(set_attr "conds" "set")
9226 (set_attr "length" "8")]
9229 (define_insn_and_split "*ior_scc_scc"
9230 [(set (match_operand:SI 0 "s_register_operand" "=r")
9231 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9232 [(match_operand:SI 1 "s_register_operand" "r")
9233 (match_operand:SI 2 "arm_add_operand" "rIL")])
9234 (match_operator:SI 6 "arm_comparison_operator"
9235 [(match_operand:SI 4 "s_register_operand" "r")
9236 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9237 (clobber (reg:CC CC_REGNUM))]
9239 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9242 "TARGET_ARM && reload_completed"
9246 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9247 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9249 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9251 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9254 [(set_attr "conds" "clob")
9255 (set_attr "length" "16")])
9257 ; If the above pattern is followed by a CMP insn, then the compare is
9258 ; redundant, since we can rework the conditional instruction that follows.
9259 (define_insn_and_split "*ior_scc_scc_cmp"
9260 [(set (match_operand 0 "dominant_cc_register" "")
9261 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9262 [(match_operand:SI 1 "s_register_operand" "r")
9263 (match_operand:SI 2 "arm_add_operand" "rIL")])
9264 (match_operator:SI 6 "arm_comparison_operator"
9265 [(match_operand:SI 4 "s_register_operand" "r")
9266 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9268 (set (match_operand:SI 7 "s_register_operand" "=r")
9269 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9270 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9273 "TARGET_ARM && reload_completed"
9277 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9278 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9280 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9282 [(set_attr "conds" "set")
9283 (set_attr "length" "16")])
9285 (define_insn_and_split "*and_scc_scc"
9286 [(set (match_operand:SI 0 "s_register_operand" "=r")
9287 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9288 [(match_operand:SI 1 "s_register_operand" "r")
9289 (match_operand:SI 2 "arm_add_operand" "rIL")])
9290 (match_operator:SI 6 "arm_comparison_operator"
9291 [(match_operand:SI 4 "s_register_operand" "r")
9292 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9293 (clobber (reg:CC CC_REGNUM))]
9295 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9298 "TARGET_ARM && reload_completed
9299 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9304 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9305 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9307 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9309 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9312 [(set_attr "conds" "clob")
9313 (set_attr "length" "16")])
9315 ; If the above pattern is followed by a CMP insn, then the compare is
9316 ; redundant, since we can rework the conditional instruction that follows.
9317 (define_insn_and_split "*and_scc_scc_cmp"
9318 [(set (match_operand 0 "dominant_cc_register" "")
9319 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9320 [(match_operand:SI 1 "s_register_operand" "r")
9321 (match_operand:SI 2 "arm_add_operand" "rIL")])
9322 (match_operator:SI 6 "arm_comparison_operator"
9323 [(match_operand:SI 4 "s_register_operand" "r")
9324 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9326 (set (match_operand:SI 7 "s_register_operand" "=r")
9327 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9328 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9331 "TARGET_ARM && reload_completed"
9335 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9336 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9338 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9340 [(set_attr "conds" "set")
9341 (set_attr "length" "16")])
9343 ;; If there is no dominance in the comparison, then we can still save an
9344 ;; instruction in the AND case, since we can know that the second compare
9345 ;; need only zero the value if false (if true, then the value is already
9347 (define_insn_and_split "*and_scc_scc_nodom"
9348 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9349 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9350 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9351 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9352 (match_operator:SI 6 "arm_comparison_operator"
9353 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9354 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9355 (clobber (reg:CC CC_REGNUM))]
9357 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9360 "TARGET_ARM && reload_completed"
9361 [(parallel [(set (match_dup 0)
9362 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9363 (clobber (reg:CC CC_REGNUM))])
9364 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9366 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9369 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9370 operands[4], operands[5]),
9372 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9374 [(set_attr "conds" "clob")
9375 (set_attr "length" "20")])
9378 [(set (reg:CC_NOOV CC_REGNUM)
9379 (compare:CC_NOOV (ior:SI
9380 (and:SI (match_operand:SI 0 "s_register_operand" "")
9382 (match_operator:SI 1 "comparison_operator"
9383 [(match_operand:SI 2 "s_register_operand" "")
9384 (match_operand:SI 3 "arm_add_operand" "")]))
9386 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9389 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9391 (set (reg:CC_NOOV CC_REGNUM)
9392 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9397 [(set (reg:CC_NOOV CC_REGNUM)
9398 (compare:CC_NOOV (ior:SI
9399 (match_operator:SI 1 "comparison_operator"
9400 [(match_operand:SI 2 "s_register_operand" "")
9401 (match_operand:SI 3 "arm_add_operand" "")])
9402 (and:SI (match_operand:SI 0 "s_register_operand" "")
9405 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9408 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9410 (set (reg:CC_NOOV CC_REGNUM)
9411 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9414 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9416 (define_insn "*negscc"
9417 [(set (match_operand:SI 0 "s_register_operand" "=r")
9418 (neg:SI (match_operator 3 "arm_comparison_operator"
9419 [(match_operand:SI 1 "s_register_operand" "r")
9420 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9421 (clobber (reg:CC CC_REGNUM))]
9424 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9425 return \"mov\\t%0, %1, asr #31\";
9427 if (GET_CODE (operands[3]) == NE)
9428 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9430 output_asm_insn (\"cmp\\t%1, %2\", operands);
9431 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9432 return \"mvn%d3\\t%0, #0\";
9434 [(set_attr "conds" "clob")
9435 (set_attr "length" "12")]
9438 (define_insn "movcond"
9439 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9441 (match_operator 5 "arm_comparison_operator"
9442 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9443 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9444 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9445 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9446 (clobber (reg:CC CC_REGNUM))]
9449 if (GET_CODE (operands[5]) == LT
9450 && (operands[4] == const0_rtx))
9452 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9454 if (operands[2] == const0_rtx)
9455 return \"and\\t%0, %1, %3, asr #31\";
9456 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9458 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9460 if (operands[1] == const0_rtx)
9461 return \"bic\\t%0, %2, %3, asr #31\";
9462 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9464 /* The only case that falls through to here is when both ops 1 & 2
9468 if (GET_CODE (operands[5]) == GE
9469 && (operands[4] == const0_rtx))
9471 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9473 if (operands[2] == const0_rtx)
9474 return \"bic\\t%0, %1, %3, asr #31\";
9475 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9477 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9479 if (operands[1] == const0_rtx)
9480 return \"and\\t%0, %2, %3, asr #31\";
9481 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9483 /* The only case that falls through to here is when both ops 1 & 2
9486 if (GET_CODE (operands[4]) == CONST_INT
9487 && !const_ok_for_arm (INTVAL (operands[4])))
9488 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9490 output_asm_insn (\"cmp\\t%3, %4\", operands);
9491 if (which_alternative != 0)
9492 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9493 if (which_alternative != 1)
9494 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9497 [(set_attr "conds" "clob")
9498 (set_attr "length" "8,8,12")]
9501 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9503 (define_insn "*ifcompare_plus_move"
9504 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9505 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9506 [(match_operand:SI 4 "s_register_operand" "r,r")
9507 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9509 (match_operand:SI 2 "s_register_operand" "r,r")
9510 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9511 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9512 (clobber (reg:CC CC_REGNUM))]
9515 [(set_attr "conds" "clob")
9516 (set_attr "length" "8,12")]
9519 (define_insn "*if_plus_move"
9520 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9522 (match_operator 4 "arm_comparison_operator"
9523 [(match_operand 5 "cc_register" "") (const_int 0)])
9525 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9526 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9527 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9531 sub%d4\\t%0, %2, #%n3
9532 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9533 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9534 [(set_attr "conds" "use")
9535 (set_attr "length" "4,4,8,8")
9536 (set_attr "type" "*,*,*,*")]
9539 (define_insn "*ifcompare_move_plus"
9540 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9541 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9542 [(match_operand:SI 4 "s_register_operand" "r,r")
9543 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9544 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9546 (match_operand:SI 2 "s_register_operand" "r,r")
9547 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9548 (clobber (reg:CC CC_REGNUM))]
9551 [(set_attr "conds" "clob")
9552 (set_attr "length" "8,12")]
9555 (define_insn "*if_move_plus"
9556 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9558 (match_operator 4 "arm_comparison_operator"
9559 [(match_operand 5 "cc_register" "") (const_int 0)])
9560 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9562 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9563 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9567 sub%D4\\t%0, %2, #%n3
9568 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9569 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9570 [(set_attr "conds" "use")
9571 (set_attr "length" "4,4,8,8")
9572 (set_attr "type" "*,*,*,*")]
9575 (define_insn "*ifcompare_arith_arith"
9576 [(set (match_operand:SI 0 "s_register_operand" "=r")
9577 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9578 [(match_operand:SI 5 "s_register_operand" "r")
9579 (match_operand:SI 6 "arm_add_operand" "rIL")])
9580 (match_operator:SI 8 "shiftable_operator"
9581 [(match_operand:SI 1 "s_register_operand" "r")
9582 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9583 (match_operator:SI 7 "shiftable_operator"
9584 [(match_operand:SI 3 "s_register_operand" "r")
9585 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9586 (clobber (reg:CC CC_REGNUM))]
9589 [(set_attr "conds" "clob")
9590 (set_attr "length" "12")]
9593 (define_insn "*if_arith_arith"
9594 [(set (match_operand:SI 0 "s_register_operand" "=r")
9595 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9596 [(match_operand 8 "cc_register" "") (const_int 0)])
9597 (match_operator:SI 6 "shiftable_operator"
9598 [(match_operand:SI 1 "s_register_operand" "r")
9599 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9600 (match_operator:SI 7 "shiftable_operator"
9601 [(match_operand:SI 3 "s_register_operand" "r")
9602 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9604 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9605 [(set_attr "conds" "use")
9606 (set_attr "length" "8")]
9609 (define_insn "*ifcompare_arith_move"
9610 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9611 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9612 [(match_operand:SI 2 "s_register_operand" "r,r")
9613 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9614 (match_operator:SI 7 "shiftable_operator"
9615 [(match_operand:SI 4 "s_register_operand" "r,r")
9616 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9617 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9618 (clobber (reg:CC CC_REGNUM))]
9621 /* If we have an operation where (op x 0) is the identity operation and
9622 the conditional operator is LT or GE and we are comparing against zero and
9623 everything is in registers then we can do this in two instructions. */
9624 if (operands[3] == const0_rtx
9625 && GET_CODE (operands[7]) != AND
9626 && GET_CODE (operands[5]) == REG
9627 && GET_CODE (operands[1]) == REG
9628 && REGNO (operands[1]) == REGNO (operands[4])
9629 && REGNO (operands[4]) != REGNO (operands[0]))
9631 if (GET_CODE (operands[6]) == LT)
9632 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9633 else if (GET_CODE (operands[6]) == GE)
9634 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9636 if (GET_CODE (operands[3]) == CONST_INT
9637 && !const_ok_for_arm (INTVAL (operands[3])))
9638 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9640 output_asm_insn (\"cmp\\t%2, %3\", operands);
9641 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9642 if (which_alternative != 0)
9643 return \"mov%D6\\t%0, %1\";
9646 [(set_attr "conds" "clob")
9647 (set_attr "length" "8,12")]
9650 (define_insn "*if_arith_move"
9651 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9652 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9653 [(match_operand 6 "cc_register" "") (const_int 0)])
9654 (match_operator:SI 5 "shiftable_operator"
9655 [(match_operand:SI 2 "s_register_operand" "r,r")
9656 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9657 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9661 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9662 [(set_attr "conds" "use")
9663 (set_attr "length" "4,8")
9664 (set_attr "type" "*,*")]
9667 (define_insn "*ifcompare_move_arith"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9669 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9670 [(match_operand:SI 4 "s_register_operand" "r,r")
9671 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9672 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9673 (match_operator:SI 7 "shiftable_operator"
9674 [(match_operand:SI 2 "s_register_operand" "r,r")
9675 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9676 (clobber (reg:CC CC_REGNUM))]
9679 /* If we have an operation where (op x 0) is the identity operation and
9680 the conditional operator is LT or GE and we are comparing against zero and
9681 everything is in registers then we can do this in two instructions */
9682 if (operands[5] == const0_rtx
9683 && GET_CODE (operands[7]) != AND
9684 && GET_CODE (operands[3]) == REG
9685 && GET_CODE (operands[1]) == REG
9686 && REGNO (operands[1]) == REGNO (operands[2])
9687 && REGNO (operands[2]) != REGNO (operands[0]))
9689 if (GET_CODE (operands[6]) == GE)
9690 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9691 else if (GET_CODE (operands[6]) == LT)
9692 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9695 if (GET_CODE (operands[5]) == CONST_INT
9696 && !const_ok_for_arm (INTVAL (operands[5])))
9697 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9699 output_asm_insn (\"cmp\\t%4, %5\", operands);
9701 if (which_alternative != 0)
9702 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9703 return \"%I7%D6\\t%0, %2, %3\";
9705 [(set_attr "conds" "clob")
9706 (set_attr "length" "8,12")]
9709 (define_insn "*if_move_arith"
9710 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9712 (match_operator 4 "arm_comparison_operator"
9713 [(match_operand 6 "cc_register" "") (const_int 0)])
9714 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9715 (match_operator:SI 5 "shiftable_operator"
9716 [(match_operand:SI 2 "s_register_operand" "r,r")
9717 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9721 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9722 [(set_attr "conds" "use")
9723 (set_attr "length" "4,8")
9724 (set_attr "type" "*,*")]
9727 (define_insn "*ifcompare_move_not"
9728 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9730 (match_operator 5 "arm_comparison_operator"
9731 [(match_operand:SI 3 "s_register_operand" "r,r")
9732 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9733 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9735 (match_operand:SI 2 "s_register_operand" "r,r"))))
9736 (clobber (reg:CC CC_REGNUM))]
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "8,12")]
9743 (define_insn "*if_move_not"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9746 (match_operator 4 "arm_comparison_operator"
9747 [(match_operand 3 "cc_register" "") (const_int 0)])
9748 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9749 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9753 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9754 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9755 [(set_attr "conds" "use")
9756 (set_attr "length" "4,8,8")]
9759 (define_insn "*ifcompare_not_move"
9760 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9762 (match_operator 5 "arm_comparison_operator"
9763 [(match_operand:SI 3 "s_register_operand" "r,r")
9764 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9766 (match_operand:SI 2 "s_register_operand" "r,r"))
9767 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9768 (clobber (reg:CC CC_REGNUM))]
9771 [(set_attr "conds" "clob")
9772 (set_attr "length" "8,12")]
9775 (define_insn "*if_not_move"
9776 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9778 (match_operator 4 "arm_comparison_operator"
9779 [(match_operand 3 "cc_register" "") (const_int 0)])
9780 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9781 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9785 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9786 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9787 [(set_attr "conds" "use")
9788 (set_attr "length" "4,8,8")]
9791 (define_insn "*ifcompare_shift_move"
9792 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9794 (match_operator 6 "arm_comparison_operator"
9795 [(match_operand:SI 4 "s_register_operand" "r,r")
9796 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9797 (match_operator:SI 7 "shift_operator"
9798 [(match_operand:SI 2 "s_register_operand" "r,r")
9799 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9800 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9801 (clobber (reg:CC CC_REGNUM))]
9804 [(set_attr "conds" "clob")
9805 (set_attr "length" "8,12")]
9808 (define_insn "*if_shift_move"
9809 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9811 (match_operator 5 "arm_comparison_operator"
9812 [(match_operand 6 "cc_register" "") (const_int 0)])
9813 (match_operator:SI 4 "shift_operator"
9814 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9815 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9816 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9820 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9821 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9822 [(set_attr "conds" "use")
9823 (set_attr "shift" "2")
9824 (set_attr "length" "4,8,8")
9825 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9826 (const_string "alu_shift")
9827 (const_string "alu_shift_reg")))]
9830 (define_insn "*ifcompare_move_shift"
9831 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9833 (match_operator 6 "arm_comparison_operator"
9834 [(match_operand:SI 4 "s_register_operand" "r,r")
9835 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9836 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9837 (match_operator:SI 7 "shift_operator"
9838 [(match_operand:SI 2 "s_register_operand" "r,r")
9839 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9840 (clobber (reg:CC CC_REGNUM))]
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "8,12")]
9847 (define_insn "*if_move_shift"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9850 (match_operator 5 "arm_comparison_operator"
9851 [(match_operand 6 "cc_register" "") (const_int 0)])
9852 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9853 (match_operator:SI 4 "shift_operator"
9854 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9855 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9859 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9860 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9861 [(set_attr "conds" "use")
9862 (set_attr "shift" "2")
9863 (set_attr "length" "4,8,8")
9864 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9865 (const_string "alu_shift")
9866 (const_string "alu_shift_reg")))]
9869 (define_insn "*ifcompare_shift_shift"
9870 [(set (match_operand:SI 0 "s_register_operand" "=r")
9872 (match_operator 7 "arm_comparison_operator"
9873 [(match_operand:SI 5 "s_register_operand" "r")
9874 (match_operand:SI 6 "arm_add_operand" "rIL")])
9875 (match_operator:SI 8 "shift_operator"
9876 [(match_operand:SI 1 "s_register_operand" "r")
9877 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9878 (match_operator:SI 9 "shift_operator"
9879 [(match_operand:SI 3 "s_register_operand" "r")
9880 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9881 (clobber (reg:CC CC_REGNUM))]
9884 [(set_attr "conds" "clob")
9885 (set_attr "length" "12")]
9888 (define_insn "*if_shift_shift"
9889 [(set (match_operand:SI 0 "s_register_operand" "=r")
9891 (match_operator 5 "arm_comparison_operator"
9892 [(match_operand 8 "cc_register" "") (const_int 0)])
9893 (match_operator:SI 6 "shift_operator"
9894 [(match_operand:SI 1 "s_register_operand" "r")
9895 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9896 (match_operator:SI 7 "shift_operator"
9897 [(match_operand:SI 3 "s_register_operand" "r")
9898 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9900 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9901 [(set_attr "conds" "use")
9902 (set_attr "shift" "1")
9903 (set_attr "length" "8")
9904 (set (attr "type") (if_then_else
9905 (and (match_operand 2 "const_int_operand" "")
9906 (match_operand 4 "const_int_operand" ""))
9907 (const_string "alu_shift")
9908 (const_string "alu_shift_reg")))]
9911 (define_insn "*ifcompare_not_arith"
9912 [(set (match_operand:SI 0 "s_register_operand" "=r")
9914 (match_operator 6 "arm_comparison_operator"
9915 [(match_operand:SI 4 "s_register_operand" "r")
9916 (match_operand:SI 5 "arm_add_operand" "rIL")])
9917 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9918 (match_operator:SI 7 "shiftable_operator"
9919 [(match_operand:SI 2 "s_register_operand" "r")
9920 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9921 (clobber (reg:CC CC_REGNUM))]
9924 [(set_attr "conds" "clob")
9925 (set_attr "length" "12")]
9928 (define_insn "*if_not_arith"
9929 [(set (match_operand:SI 0 "s_register_operand" "=r")
9931 (match_operator 5 "arm_comparison_operator"
9932 [(match_operand 4 "cc_register" "") (const_int 0)])
9933 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9934 (match_operator:SI 6 "shiftable_operator"
9935 [(match_operand:SI 2 "s_register_operand" "r")
9936 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9938 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9939 [(set_attr "conds" "use")
9940 (set_attr "length" "8")]
9943 (define_insn "*ifcompare_arith_not"
9944 [(set (match_operand:SI 0 "s_register_operand" "=r")
9946 (match_operator 6 "arm_comparison_operator"
9947 [(match_operand:SI 4 "s_register_operand" "r")
9948 (match_operand:SI 5 "arm_add_operand" "rIL")])
9949 (match_operator:SI 7 "shiftable_operator"
9950 [(match_operand:SI 2 "s_register_operand" "r")
9951 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9952 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9953 (clobber (reg:CC CC_REGNUM))]
9956 [(set_attr "conds" "clob")
9957 (set_attr "length" "12")]
9960 (define_insn "*if_arith_not"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r")
9963 (match_operator 5 "arm_comparison_operator"
9964 [(match_operand 4 "cc_register" "") (const_int 0)])
9965 (match_operator:SI 6 "shiftable_operator"
9966 [(match_operand:SI 2 "s_register_operand" "r")
9967 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9968 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9970 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9971 [(set_attr "conds" "use")
9972 (set_attr "length" "8")]
9975 (define_insn "*ifcompare_neg_move"
9976 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9978 (match_operator 5 "arm_comparison_operator"
9979 [(match_operand:SI 3 "s_register_operand" "r,r")
9980 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9981 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9982 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9983 (clobber (reg:CC CC_REGNUM))]
9986 [(set_attr "conds" "clob")
9987 (set_attr "length" "8,12")]
9990 (define_insn "*if_neg_move"
9991 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9993 (match_operator 4 "arm_comparison_operator"
9994 [(match_operand 3 "cc_register" "") (const_int 0)])
9995 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9996 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10000 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10001 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10002 [(set_attr "conds" "use")
10003 (set_attr "length" "4,8,8")]
10006 (define_insn "*ifcompare_move_neg"
10007 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10009 (match_operator 5 "arm_comparison_operator"
10010 [(match_operand:SI 3 "s_register_operand" "r,r")
10011 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10012 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10013 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10014 (clobber (reg:CC CC_REGNUM))]
10017 [(set_attr "conds" "clob")
10018 (set_attr "length" "8,12")]
10021 (define_insn "*if_move_neg"
10022 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10024 (match_operator 4 "arm_comparison_operator"
10025 [(match_operand 3 "cc_register" "") (const_int 0)])
10026 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10027 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10030 rsb%D4\\t%0, %2, #0
10031 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10032 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10033 [(set_attr "conds" "use")
10034 (set_attr "length" "4,8,8")]
10037 (define_insn "*arith_adjacentmem"
10038 [(set (match_operand:SI 0 "s_register_operand" "=r")
10039 (match_operator:SI 1 "shiftable_operator"
10040 [(match_operand:SI 2 "memory_operand" "m")
10041 (match_operand:SI 3 "memory_operand" "m")]))
10042 (clobber (match_scratch:SI 4 "=r"))]
10043 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10049 HOST_WIDE_INT val1 = 0, val2 = 0;
10051 if (REGNO (operands[0]) > REGNO (operands[4]))
10053 ldm[1] = operands[4];
10054 ldm[2] = operands[0];
10058 ldm[1] = operands[0];
10059 ldm[2] = operands[4];
10062 base_reg = XEXP (operands[2], 0);
10064 if (!REG_P (base_reg))
10066 val1 = INTVAL (XEXP (base_reg, 1));
10067 base_reg = XEXP (base_reg, 0);
10070 if (!REG_P (XEXP (operands[3], 0)))
10071 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10073 arith[0] = operands[0];
10074 arith[3] = operands[1];
10088 if (val1 !=0 && val2 != 0)
10092 if (val1 == 4 || val2 == 4)
10093 /* Other val must be 8, since we know they are adjacent and neither
10095 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10096 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10098 ldm[0] = ops[0] = operands[4];
10100 ops[2] = GEN_INT (val1);
10101 output_add_immediate (ops);
10103 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10105 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10109 /* Offset is out of range for a single add, so use two ldr. */
10112 ops[2] = GEN_INT (val1);
10113 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10115 ops[2] = GEN_INT (val2);
10116 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10119 else if (val1 != 0)
10122 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10124 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10129 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10131 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10133 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10136 [(set_attr "length" "12")
10137 (set_attr "predicable" "yes")
10138 (set_attr "type" "load1")]
10141 ; This pattern is never tried by combine, so do it as a peephole
10144 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10145 (match_operand:SI 1 "arm_general_register_operand" ""))
10146 (set (reg:CC CC_REGNUM)
10147 (compare:CC (match_dup 1) (const_int 0)))]
10149 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10150 (set (match_dup 0) (match_dup 1))])]
10154 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10155 ; reversed, check that the memory references aren't volatile.
10158 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10159 (match_operand:SI 4 "memory_operand" "m"))
10160 (set (match_operand:SI 1 "s_register_operand" "=rk")
10161 (match_operand:SI 5 "memory_operand" "m"))
10162 (set (match_operand:SI 2 "s_register_operand" "=rk")
10163 (match_operand:SI 6 "memory_operand" "m"))
10164 (set (match_operand:SI 3 "s_register_operand" "=rk")
10165 (match_operand:SI 7 "memory_operand" "m"))]
10166 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10168 return emit_ldm_seq (operands, 4);
10173 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10174 (match_operand:SI 3 "memory_operand" "m"))
10175 (set (match_operand:SI 1 "s_register_operand" "=rk")
10176 (match_operand:SI 4 "memory_operand" "m"))
10177 (set (match_operand:SI 2 "s_register_operand" "=rk")
10178 (match_operand:SI 5 "memory_operand" "m"))]
10179 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10181 return emit_ldm_seq (operands, 3);
10186 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10187 (match_operand:SI 2 "memory_operand" "m"))
10188 (set (match_operand:SI 1 "s_register_operand" "=rk")
10189 (match_operand:SI 3 "memory_operand" "m"))]
10190 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10192 return emit_ldm_seq (operands, 2);
10197 [(set (match_operand:SI 4 "memory_operand" "=m")
10198 (match_operand:SI 0 "s_register_operand" "rk"))
10199 (set (match_operand:SI 5 "memory_operand" "=m")
10200 (match_operand:SI 1 "s_register_operand" "rk"))
10201 (set (match_operand:SI 6 "memory_operand" "=m")
10202 (match_operand:SI 2 "s_register_operand" "rk"))
10203 (set (match_operand:SI 7 "memory_operand" "=m")
10204 (match_operand:SI 3 "s_register_operand" "rk"))]
10205 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10207 return emit_stm_seq (operands, 4);
10212 [(set (match_operand:SI 3 "memory_operand" "=m")
10213 (match_operand:SI 0 "s_register_operand" "rk"))
10214 (set (match_operand:SI 4 "memory_operand" "=m")
10215 (match_operand:SI 1 "s_register_operand" "rk"))
10216 (set (match_operand:SI 5 "memory_operand" "=m")
10217 (match_operand:SI 2 "s_register_operand" "rk"))]
10218 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10220 return emit_stm_seq (operands, 3);
10225 [(set (match_operand:SI 2 "memory_operand" "=m")
10226 (match_operand:SI 0 "s_register_operand" "rk"))
10227 (set (match_operand:SI 3 "memory_operand" "=m")
10228 (match_operand:SI 1 "s_register_operand" "rk"))]
10229 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10231 return emit_stm_seq (operands, 2);
10236 [(set (match_operand:SI 0 "s_register_operand" "")
10237 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10239 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10240 [(match_operand:SI 3 "s_register_operand" "")
10241 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10242 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10244 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10245 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10250 ;; This split can be used because CC_Z mode implies that the following
10251 ;; branch will be an equality, or an unsigned inequality, so the sign
10252 ;; extension is not needed.
10255 [(set (reg:CC_Z CC_REGNUM)
10257 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10259 (match_operand 1 "const_int_operand" "")))
10260 (clobber (match_scratch:SI 2 ""))]
10262 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10263 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10264 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10265 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10267 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10270 ;; ??? Check the patterns above for Thumb-2 usefulness
10272 (define_expand "prologue"
10273 [(clobber (const_int 0))]
10276 arm_expand_prologue ();
10278 thumb1_expand_prologue ();
10283 (define_expand "epilogue"
10284 [(clobber (const_int 0))]
10287 if (crtl->calls_eh_return)
10288 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10290 thumb1_expand_epilogue ();
10291 else if (USE_RETURN_INSN (FALSE))
10293 emit_jump_insn (gen_return ());
10296 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10298 gen_rtx_RETURN (VOIDmode)),
10299 VUNSPEC_EPILOGUE));
10304 ;; Note - although unspec_volatile's USE all hard registers,
10305 ;; USEs are ignored after relaod has completed. Thus we need
10306 ;; to add an unspec of the link register to ensure that flow
10307 ;; does not think that it is unused by the sibcall branch that
10308 ;; will replace the standard function epilogue.
10309 (define_insn "sibcall_epilogue"
10310 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10311 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10314 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10315 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10316 return arm_output_epilogue (next_nonnote_insn (insn));
10318 ;; Length is absolute worst case
10319 [(set_attr "length" "44")
10320 (set_attr "type" "block")
10321 ;; We don't clobber the conditions, but the potential length of this
10322 ;; operation is sufficient to make conditionalizing the sequence
10323 ;; unlikely to be profitable.
10324 (set_attr "conds" "clob")]
10327 (define_insn "*epilogue_insns"
10328 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10332 return arm_output_epilogue (NULL);
10333 else /* TARGET_THUMB1 */
10334 return thumb_unexpanded_epilogue ();
10336 ; Length is absolute worst case
10337 [(set_attr "length" "44")
10338 (set_attr "type" "block")
10339 ;; We don't clobber the conditions, but the potential length of this
10340 ;; operation is sufficient to make conditionalizing the sequence
10341 ;; unlikely to be profitable.
10342 (set_attr "conds" "clob")]
10345 (define_expand "eh_epilogue"
10346 [(use (match_operand:SI 0 "register_operand" ""))
10347 (use (match_operand:SI 1 "register_operand" ""))
10348 (use (match_operand:SI 2 "register_operand" ""))]
10352 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10353 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10355 rtx ra = gen_rtx_REG (Pmode, 2);
10357 emit_move_insn (ra, operands[2]);
10360 /* This is a hack -- we may have crystalized the function type too
10362 cfun->machine->func_type = 0;
10366 ;; This split is only used during output to reduce the number of patterns
10367 ;; that need assembler instructions adding to them. We allowed the setting
10368 ;; of the conditions to be implicit during rtl generation so that
10369 ;; the conditional compare patterns would work. However this conflicts to
10370 ;; some extent with the conditional data operations, so we have to split them
10373 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10374 ;; conditional execution sufficient?
10377 [(set (match_operand:SI 0 "s_register_operand" "")
10378 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10379 [(match_operand 2 "" "") (match_operand 3 "" "")])
10381 (match_operand 4 "" "")))
10382 (clobber (reg:CC CC_REGNUM))]
10383 "TARGET_ARM && reload_completed"
10384 [(set (match_dup 5) (match_dup 6))
10385 (cond_exec (match_dup 7)
10386 (set (match_dup 0) (match_dup 4)))]
10389 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10390 operands[2], operands[3]);
10391 enum rtx_code rc = GET_CODE (operands[1]);
10393 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10394 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10395 if (mode == CCFPmode || mode == CCFPEmode)
10396 rc = reverse_condition_maybe_unordered (rc);
10398 rc = reverse_condition (rc);
10400 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10405 [(set (match_operand:SI 0 "s_register_operand" "")
10406 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10407 [(match_operand 2 "" "") (match_operand 3 "" "")])
10408 (match_operand 4 "" "")
10410 (clobber (reg:CC CC_REGNUM))]
10411 "TARGET_ARM && reload_completed"
10412 [(set (match_dup 5) (match_dup 6))
10413 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10414 (set (match_dup 0) (match_dup 4)))]
10417 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10418 operands[2], operands[3]);
10420 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10421 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10426 [(set (match_operand:SI 0 "s_register_operand" "")
10427 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10428 [(match_operand 2 "" "") (match_operand 3 "" "")])
10429 (match_operand 4 "" "")
10430 (match_operand 5 "" "")))
10431 (clobber (reg:CC CC_REGNUM))]
10432 "TARGET_ARM && reload_completed"
10433 [(set (match_dup 6) (match_dup 7))
10434 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10435 (set (match_dup 0) (match_dup 4)))
10436 (cond_exec (match_dup 8)
10437 (set (match_dup 0) (match_dup 5)))]
10440 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10441 operands[2], operands[3]);
10442 enum rtx_code rc = GET_CODE (operands[1]);
10444 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10445 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10446 if (mode == CCFPmode || mode == CCFPEmode)
10447 rc = reverse_condition_maybe_unordered (rc);
10449 rc = reverse_condition (rc);
10451 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10456 [(set (match_operand:SI 0 "s_register_operand" "")
10457 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10458 [(match_operand:SI 2 "s_register_operand" "")
10459 (match_operand:SI 3 "arm_add_operand" "")])
10460 (match_operand:SI 4 "arm_rhs_operand" "")
10462 (match_operand:SI 5 "s_register_operand" ""))))
10463 (clobber (reg:CC CC_REGNUM))]
10464 "TARGET_ARM && reload_completed"
10465 [(set (match_dup 6) (match_dup 7))
10466 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10467 (set (match_dup 0) (match_dup 4)))
10468 (cond_exec (match_dup 8)
10469 (set (match_dup 0) (not:SI (match_dup 5))))]
10472 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10473 operands[2], operands[3]);
10474 enum rtx_code rc = GET_CODE (operands[1]);
10476 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10477 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10478 if (mode == CCFPmode || mode == CCFPEmode)
10479 rc = reverse_condition_maybe_unordered (rc);
10481 rc = reverse_condition (rc);
10483 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10487 (define_insn "*cond_move_not"
10488 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10489 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10490 [(match_operand 3 "cc_register" "") (const_int 0)])
10491 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10493 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10497 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10498 [(set_attr "conds" "use")
10499 (set_attr "length" "4,8")]
10502 ;; The next two patterns occur when an AND operation is followed by a
10503 ;; scc insn sequence
10505 (define_insn "*sign_extract_onebit"
10506 [(set (match_operand:SI 0 "s_register_operand" "=r")
10507 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10509 (match_operand:SI 2 "const_int_operand" "n")))
10510 (clobber (reg:CC CC_REGNUM))]
10513 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10514 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10515 return \"mvnne\\t%0, #0\";
10517 [(set_attr "conds" "clob")
10518 (set_attr "length" "8")]
10521 (define_insn "*not_signextract_onebit"
10522 [(set (match_operand:SI 0 "s_register_operand" "=r")
10524 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10526 (match_operand:SI 2 "const_int_operand" "n"))))
10527 (clobber (reg:CC CC_REGNUM))]
10530 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10531 output_asm_insn (\"tst\\t%1, %2\", operands);
10532 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10533 return \"movne\\t%0, #0\";
10535 [(set_attr "conds" "clob")
10536 (set_attr "length" "12")]
10538 ;; ??? The above patterns need auditing for Thumb-2
10540 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10541 ;; expressions. For simplicity, the first register is also in the unspec
10543 (define_insn "*push_multi"
10544 [(match_parallel 2 "multi_register_push"
10545 [(set (match_operand:BLK 0 "memory_operand" "=m")
10546 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10547 UNSPEC_PUSH_MULT))])]
10551 int num_saves = XVECLEN (operands[2], 0);
10553 /* For the StrongARM at least it is faster to
10554 use STR to store only a single register.
10555 In Thumb mode always use push, and the assembler will pick
10556 something appropriate. */
10557 if (num_saves == 1 && TARGET_ARM)
10558 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10565 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10567 strcpy (pattern, \"push\\t{%1\");
10569 for (i = 1; i < num_saves; i++)
10571 strcat (pattern, \", %|\");
10573 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10576 strcat (pattern, \"}\");
10577 output_asm_insn (pattern, operands);
10582 [(set_attr "type" "store4")]
10585 (define_insn "stack_tie"
10586 [(set (mem:BLK (scratch))
10587 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10588 (match_operand:SI 1 "s_register_operand" "rk")]
10592 [(set_attr "length" "0")]
10595 ;; Similarly for the floating point registers
10596 (define_insn "*push_fp_multi"
10597 [(match_parallel 2 "multi_register_push"
10598 [(set (match_operand:BLK 0 "memory_operand" "=m")
10599 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10600 UNSPEC_PUSH_MULT))])]
10601 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10606 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10607 output_asm_insn (pattern, operands);
10610 [(set_attr "type" "f_store")]
10613 ;; Special patterns for dealing with the constant pool
10615 (define_insn "align_4"
10616 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10619 assemble_align (32);
10624 (define_insn "align_8"
10625 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10628 assemble_align (64);
10633 (define_insn "consttable_end"
10634 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10637 making_const_table = FALSE;
10642 (define_insn "consttable_1"
10643 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10646 making_const_table = TRUE;
10647 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10648 assemble_zeros (3);
10651 [(set_attr "length" "4")]
10654 (define_insn "consttable_2"
10655 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10658 making_const_table = TRUE;
10659 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10660 assemble_zeros (2);
10663 [(set_attr "length" "4")]
10666 (define_insn "consttable_4"
10667 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10671 making_const_table = TRUE;
10672 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10677 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10678 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10682 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10683 mark_symbol_refs_as_used (operands[0]);
10688 [(set_attr "length" "4")]
10691 (define_insn "consttable_8"
10692 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10696 making_const_table = TRUE;
10697 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10702 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10703 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10707 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10712 [(set_attr "length" "8")]
10715 (define_insn "consttable_16"
10716 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10720 making_const_table = TRUE;
10721 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10726 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10727 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10731 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10736 [(set_attr "length" "16")]
10739 ;; Miscellaneous Thumb patterns
10741 (define_expand "tablejump"
10742 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10743 (use (label_ref (match_operand 1 "" "")))])]
10748 /* Hopefully, CSE will eliminate this copy. */
10749 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10750 rtx reg2 = gen_reg_rtx (SImode);
10752 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10753 operands[0] = reg2;
10758 ;; NB never uses BX.
10759 (define_insn "*thumb1_tablejump"
10760 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10761 (use (label_ref (match_operand 1 "" "")))]
10764 [(set_attr "length" "2")]
10767 ;; V5 Instructions,
10769 (define_insn "clzsi2"
10770 [(set (match_operand:SI 0 "s_register_operand" "=r")
10771 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10772 "TARGET_32BIT && arm_arch5"
10774 [(set_attr "predicable" "yes")
10775 (set_attr "insn" "clz")])
10777 ;; V5E instructions.
10779 (define_insn "prefetch"
10780 [(prefetch (match_operand:SI 0 "address_operand" "p")
10781 (match_operand:SI 1 "" "")
10782 (match_operand:SI 2 "" ""))]
10783 "TARGET_32BIT && arm_arch5e"
10786 ;; General predication pattern
10789 [(match_operator 0 "arm_comparison_operator"
10790 [(match_operand 1 "cc_register" "")
10796 (define_insn "prologue_use"
10797 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10799 "%@ %0 needed for prologue"
10803 ;; Patterns for exception handling
10805 (define_expand "eh_return"
10806 [(use (match_operand 0 "general_operand" ""))]
10811 emit_insn (gen_arm_eh_return (operands[0]));
10813 emit_insn (gen_thumb_eh_return (operands[0]));
10818 ;; We can't expand this before we know where the link register is stored.
10819 (define_insn_and_split "arm_eh_return"
10820 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10822 (clobber (match_scratch:SI 1 "=&r"))]
10825 "&& reload_completed"
10829 arm_set_return_address (operands[0], operands[1]);
10834 (define_insn_and_split "thumb_eh_return"
10835 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10837 (clobber (match_scratch:SI 1 "=&l"))]
10840 "&& reload_completed"
10844 thumb_set_return_address (operands[0], operands[1]);
10852 (define_insn "load_tp_hard"
10853 [(set (match_operand:SI 0 "register_operand" "=r")
10854 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10856 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10857 [(set_attr "predicable" "yes")]
10860 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10861 (define_insn "load_tp_soft"
10862 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10863 (clobber (reg:SI LR_REGNUM))
10864 (clobber (reg:SI IP_REGNUM))
10865 (clobber (reg:CC CC_REGNUM))]
10867 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10868 [(set_attr "conds" "clob")]
10871 ;; Load the FPA co-processor patterns
10873 ;; Load the Maverick co-processor patterns
10874 (include "cirrus.md")
10875 ;; Vector bits common to IWMMXT and Neon
10876 (include "vec-common.md")
10877 ;; Load the Intel Wireless Multimedia Extension patterns
10878 (include "iwmmxt.md")
10879 ;; Load the VFP co-processor patterns
10881 ;; Thumb-2 patterns
10882 (include "thumb2.md")
10884 (include "neon.md")