1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
109 ;; UNSPEC_VOLATILE Usage:
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
143 ;;---------------------------------------------------------------------------
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
151 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
152 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
162 (const (symbol_ref "arm_fpu_attr")))
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
253 ;; Classification of NEON instructions for scheduling purposes.
254 ;; Do not set this attribute and the "type" attribute together in
255 ;; any one instruction pattern.
256 (define_attr "neon_type"
267 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mul_qqq_8_16_32_ddd_32,\
269 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
270 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
272 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
273 neon_mla_qqq_32_qqd_32_scalar,\
274 neon_mul_ddd_16_scalar_32_16_long_scalar,\
275 neon_mul_qqd_32_scalar,\
276 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
281 neon_vqshl_vrshl_vqrshl_qqq,\
283 neon_fp_vadd_ddd_vabs_dd,\
284 neon_fp_vadd_qqq_vabs_qq,\
290 neon_fp_vmla_ddd_scalar,\
291 neon_fp_vmla_qqq_scalar,\
292 neon_fp_vrecps_vrsqrts_ddd,\
293 neon_fp_vrecps_vrsqrts_qqq,\
301 neon_vld2_2_regs_vld1_vld2_all_lanes,\
304 neon_vst1_1_2_regs_vst2_2_regs,\
306 neon_vst2_4_regs_vst3_vst4,\
308 neon_vld1_vld2_lane,\
309 neon_vld3_vld4_lane,\
310 neon_vst1_vst2_lane,\
311 neon_vst3_vst4_lane,\
312 neon_vld3_vld4_all_lanes,\
320 (const_string "none"))
322 ; condition codes: this one is used by final_prescan_insn to speed up
323 ; conditionalizing instructions. It saves having to scan the rtl to see if
324 ; it uses or alters the condition codes.
326 ; USE means that the condition codes are used by the insn in the process of
327 ; outputting code, this means (at present) that we can't use the insn in
330 ; SET means that the purpose of the insn is to set the condition codes in a
331 ; well defined manner.
333 ; CLOB means that the condition codes are altered in an undefined manner, if
334 ; they are altered at all
336 ; UNCONDITIONAL means the instions can not be conditionally executed.
338 ; NOCOND means that the condition codes are neither altered nor affect the
339 ; output of this insn
341 (define_attr "conds" "use,set,clob,unconditional,nocond"
342 (if_then_else (eq_attr "type" "call")
343 (const_string "clob")
344 (if_then_else (eq_attr "neon_type" "none")
345 (const_string "nocond")
346 (const_string "unconditional"))))
348 ; Predicable means that the insn can be conditionally executed based on
349 ; an automatically added predicate (additional patterns are generated by
350 ; gen...). We default to 'no' because no Thumb patterns match this rule
351 ; and not all ARM patterns do.
352 (define_attr "predicable" "no,yes" (const_string "no"))
354 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
355 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
356 ; suffer blockages enough to warrant modelling this (and it can adversely
357 ; affect the schedule).
358 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
360 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
361 ; to stall the processor. Used with model_wbuf above.
362 (define_attr "write_conflict" "no,yes"
363 (if_then_else (eq_attr "type"
364 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
366 (const_string "no")))
368 ; Classify the insns into those that take one cycle and those that take more
369 ; than one on the main cpu execution unit.
370 (define_attr "core_cycles" "single,multi"
371 (if_then_else (eq_attr "type"
372 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
373 (const_string "single")
374 (const_string "multi")))
376 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
377 ;; distant label. Only applicable to Thumb code.
378 (define_attr "far_jump" "yes,no" (const_string "no"))
381 ;; The number of machine instructions this pattern expands to.
382 ;; Used for Thumb-2 conditional execution.
383 (define_attr "ce_count" "" (const_int 1))
385 ;;---------------------------------------------------------------------------
388 ; A list of modes that are exactly 64 bits in size. We use this to expand
389 ; some splits that are the same for all modes when operating on ARM
391 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
393 ;; The integer modes up to word size
394 (define_mode_iterator QHSI [QI HI SI])
396 ;;---------------------------------------------------------------------------
399 (include "predicates.md")
400 (include "constraints.md")
402 ;;---------------------------------------------------------------------------
403 ;; Pipeline descriptions
405 ;; Processor type. This is created automatically from arm-cores.def.
406 (include "arm-tune.md")
408 (define_attr "tune_cortexr4" "yes,no"
410 (eq_attr "tune" "cortexr4,cortexr4f")
412 (const_string "no"))))
414 ;; True if the generic scheduling description should be used.
416 (define_attr "generic_sched" "yes,no"
418 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
419 (eq_attr "tune_cortexr4" "yes"))
421 (const_string "yes"))))
423 (define_attr "generic_vfp" "yes,no"
425 (and (eq_attr "fpu" "vfp")
426 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
427 (eq_attr "tune_cortexr4" "no"))
429 (const_string "no"))))
431 (include "arm-generic.md")
432 (include "arm926ejs.md")
433 (include "arm1020e.md")
434 (include "arm1026ejs.md")
435 (include "arm1136jfs.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-r4.md")
439 (include "cortex-r4f.md")
443 ;;---------------------------------------------------------------------------
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451 ;; Cirrus 64bit additions should not be split because we have a native
452 ;; 64bit addition instructions.
454 (define_expand "adddi3"
456 [(set (match_operand:DI 0 "s_register_operand" "")
457 (plus:DI (match_operand:DI 1 "s_register_operand" "")
458 (match_operand:DI 2 "s_register_operand" "")))
459 (clobber (reg:CC CC_REGNUM))])]
462 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
464 if (!cirrus_fp_register (operands[0], DImode))
465 operands[0] = force_reg (DImode, operands[0]);
466 if (!cirrus_fp_register (operands[1], DImode))
467 operands[1] = force_reg (DImode, operands[1]);
468 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
474 if (GET_CODE (operands[1]) != REG)
475 operands[1] = force_reg (DImode, operands[1]);
476 if (GET_CODE (operands[2]) != REG)
477 operands[2] = force_reg (DImode, operands[2]);
482 (define_insn "*thumb1_adddi3"
483 [(set (match_operand:DI 0 "register_operand" "=l")
484 (plus:DI (match_operand:DI 1 "register_operand" "%0")
485 (match_operand:DI 2 "register_operand" "l")))
486 (clobber (reg:CC CC_REGNUM))
489 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
490 [(set_attr "length" "4")]
493 (define_insn_and_split "*arm_adddi3"
494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
495 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
496 (match_operand:DI 2 "s_register_operand" "r, 0")))
497 (clobber (reg:CC CC_REGNUM))]
498 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
500 "TARGET_32BIT && reload_completed"
501 [(parallel [(set (reg:CC_C CC_REGNUM)
502 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
504 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
505 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
506 (plus:SI (match_dup 4) (match_dup 5))))]
509 operands[3] = gen_highpart (SImode, operands[0]);
510 operands[0] = gen_lowpart (SImode, operands[0]);
511 operands[4] = gen_highpart (SImode, operands[1]);
512 operands[1] = gen_lowpart (SImode, operands[1]);
513 operands[5] = gen_highpart (SImode, operands[2]);
514 operands[2] = gen_lowpart (SImode, operands[2]);
516 [(set_attr "conds" "clob")
517 (set_attr "length" "8")]
520 (define_insn_and_split "*adddi_sesidi_di"
521 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
522 (plus:DI (sign_extend:DI
523 (match_operand:SI 2 "s_register_operand" "r,r"))
524 (match_operand:DI 1 "s_register_operand" "0,r")))
525 (clobber (reg:CC CC_REGNUM))]
526 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
528 "TARGET_32BIT && reload_completed"
529 [(parallel [(set (reg:CC_C CC_REGNUM)
530 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
532 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
533 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
534 (plus:SI (ashiftrt:SI (match_dup 2)
539 operands[3] = gen_highpart (SImode, operands[0]);
540 operands[0] = gen_lowpart (SImode, operands[0]);
541 operands[4] = gen_highpart (SImode, operands[1]);
542 operands[1] = gen_lowpart (SImode, operands[1]);
543 operands[2] = gen_lowpart (SImode, operands[2]);
545 [(set_attr "conds" "clob")
546 (set_attr "length" "8")]
549 (define_insn_and_split "*adddi_zesidi_di"
550 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
551 (plus:DI (zero_extend:DI
552 (match_operand:SI 2 "s_register_operand" "r,r"))
553 (match_operand:DI 1 "s_register_operand" "0,r")))
554 (clobber (reg:CC CC_REGNUM))]
555 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
557 "TARGET_32BIT && reload_completed"
558 [(parallel [(set (reg:CC_C CC_REGNUM)
559 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
561 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
562 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
563 (plus:SI (match_dup 4) (const_int 0))))]
566 operands[3] = gen_highpart (SImode, operands[0]);
567 operands[0] = gen_lowpart (SImode, operands[0]);
568 operands[4] = gen_highpart (SImode, operands[1]);
569 operands[1] = gen_lowpart (SImode, operands[1]);
570 operands[2] = gen_lowpart (SImode, operands[2]);
572 [(set_attr "conds" "clob")
573 (set_attr "length" "8")]
576 (define_expand "addsi3"
577 [(set (match_operand:SI 0 "s_register_operand" "")
578 (plus:SI (match_operand:SI 1 "s_register_operand" "")
579 (match_operand:SI 2 "reg_or_int_operand" "")))]
582 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
584 arm_split_constant (PLUS, SImode, NULL_RTX,
585 INTVAL (operands[2]), operands[0], operands[1],
586 optimize && can_create_pseudo_p ());
592 ; If there is a scratch available, this will be faster than synthesizing the
595 [(match_scratch:SI 3 "r")
596 (set (match_operand:SI 0 "arm_general_register_operand" "")
597 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
598 (match_operand:SI 2 "const_int_operand" "")))]
600 !(const_ok_for_arm (INTVAL (operands[2]))
601 || const_ok_for_arm (-INTVAL (operands[2])))
602 && const_ok_for_arm (~INTVAL (operands[2]))"
603 [(set (match_dup 3) (match_dup 2))
604 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
608 ;; The r/r/k alternative is required when reloading the address
609 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
610 ;; put the duplicated register first, and not try the commutative version.
611 (define_insn_and_split "*arm_addsi3"
612 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
613 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
614 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
624 && GET_CODE (operands[2]) == CONST_INT
625 && !(const_ok_for_arm (INTVAL (operands[2]))
626 || const_ok_for_arm (-INTVAL (operands[2])))
627 && (reload_completed || !arm_eliminable_register (operands[1]))"
628 [(clobber (const_int 0))]
630 arm_split_constant (PLUS, SImode, curr_insn,
631 INTVAL (operands[2]), operands[0],
635 [(set_attr "length" "4,4,4,4,4,16")
636 (set_attr "predicable" "yes")]
639 ;; Register group 'k' is a single register group containing only the stack
640 ;; register. Trying to reload it will always fail catastrophically,
641 ;; so never allow those alternatives to match if reloading is needed.
643 (define_insn_and_split "*thumb1_addsi3"
644 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
645 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
646 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
649 static const char * const asms[] =
651 \"add\\t%0, %0, %2\",
652 \"sub\\t%0, %0, #%n2\",
653 \"add\\t%0, %1, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %0, %2\",
656 \"add\\t%0, %1, %2\",
657 \"add\\t%0, %1, %2\",
661 if ((which_alternative == 2 || which_alternative == 6)
662 && GET_CODE (operands[2]) == CONST_INT
663 && INTVAL (operands[2]) < 0)
664 return \"sub\\t%0, %1, #%n2\";
665 return asms[which_alternative];
667 "&& reload_completed && CONST_INT_P (operands[2])
668 && operands[1] != stack_pointer_rtx
669 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
670 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
671 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
673 HOST_WIDE_INT offset = INTVAL (operands[2]);
676 else if (offset < -255)
679 operands[3] = GEN_INT (offset);
680 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
682 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
685 ;; Reloading and elimination of the frame pointer can
686 ;; sometimes cause this optimization to be missed.
688 [(set (match_operand:SI 0 "arm_general_register_operand" "")
689 (match_operand:SI 1 "const_int_operand" ""))
691 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
693 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
694 && (INTVAL (operands[1]) & 3) == 0"
695 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
699 (define_insn "*addsi3_compare0"
700 [(set (reg:CC_NOOV CC_REGNUM)
702 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
703 (match_operand:SI 2 "arm_add_operand" "rI,L"))
705 (set (match_operand:SI 0 "s_register_operand" "=r,r")
706 (plus:SI (match_dup 1) (match_dup 2)))]
710 sub%.\\t%0, %1, #%n2"
711 [(set_attr "conds" "set")]
714 (define_insn "*addsi3_compare0_scratch"
715 [(set (reg:CC_NOOV CC_REGNUM)
717 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
718 (match_operand:SI 1 "arm_add_operand" "rI,L"))
724 [(set_attr "conds" "set")]
727 (define_insn "*compare_negsi_si"
728 [(set (reg:CC_Z CC_REGNUM)
730 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
731 (match_operand:SI 1 "s_register_operand" "r")))]
734 [(set_attr "conds" "set")]
737 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
738 ;; addend is a constant.
739 (define_insn "*cmpsi2_addneg"
740 [(set (reg:CC CC_REGNUM)
742 (match_operand:SI 1 "s_register_operand" "r,r")
743 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
744 (set (match_operand:SI 0 "s_register_operand" "=r,r")
745 (plus:SI (match_dup 1)
746 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
747 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
750 sub%.\\t%0, %1, #%n3"
751 [(set_attr "conds" "set")]
754 ;; Convert the sequence
756 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
760 ;; bcs dest ((unsigned)rn >= 1)
761 ;; similarly for the beq variant using bcc.
762 ;; This is a common looping idiom (while (n--))
764 [(set (match_operand:SI 0 "arm_general_register_operand" "")
765 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
767 (set (match_operand 2 "cc_register" "")
768 (compare (match_dup 0) (const_int -1)))
770 (if_then_else (match_operator 3 "equality_operator"
771 [(match_dup 2) (const_int 0)])
772 (match_operand 4 "" "")
773 (match_operand 5 "" "")))]
774 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
778 (match_dup 1) (const_int 1)))
779 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
781 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
784 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
785 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
788 operands[2], const0_rtx);"
791 ;; The next four insns work because they compare the result with one of
792 ;; the operands, and we know that the use of the condition code is
793 ;; either GEU or LTU, so we can use the carry flag from the addition
794 ;; instead of doing the compare a second time.
795 (define_insn "*addsi3_compare_op1"
796 [(set (reg:CC_C CC_REGNUM)
798 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
799 (match_operand:SI 2 "arm_add_operand" "rI,L"))
801 (set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
806 sub%.\\t%0, %1, #%n2"
807 [(set_attr "conds" "set")]
810 (define_insn "*addsi3_compare_op2"
811 [(set (reg:CC_C CC_REGNUM)
813 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
814 (match_operand:SI 2 "arm_add_operand" "rI,L"))
816 (set (match_operand:SI 0 "s_register_operand" "=r,r")
817 (plus:SI (match_dup 1) (match_dup 2)))]
821 sub%.\\t%0, %1, #%n2"
822 [(set_attr "conds" "set")]
825 (define_insn "*compare_addsi2_op0"
826 [(set (reg:CC_C CC_REGNUM)
828 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
829 (match_operand:SI 1 "arm_add_operand" "rI,L"))
835 [(set_attr "conds" "set")]
838 (define_insn "*compare_addsi2_op1"
839 [(set (reg:CC_C CC_REGNUM)
841 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
842 (match_operand:SI 1 "arm_add_operand" "rI,L"))
848 [(set_attr "conds" "set")]
851 (define_insn "*addsi3_carryin"
852 [(set (match_operand:SI 0 "s_register_operand" "=r")
853 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
854 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
855 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
858 [(set_attr "conds" "use")]
861 (define_insn "*addsi3_carryin_shift"
862 [(set (match_operand:SI 0 "s_register_operand" "=r")
863 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
865 (match_operator:SI 2 "shift_operator"
866 [(match_operand:SI 3 "s_register_operand" "r")
867 (match_operand:SI 4 "reg_or_int_operand" "rM")])
868 (match_operand:SI 1 "s_register_operand" "r"))))]
870 "adc%?\\t%0, %1, %3%S2"
871 [(set_attr "conds" "use")
872 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
873 (const_string "alu_shift")
874 (const_string "alu_shift_reg")))]
877 (define_insn "*addsi3_carryin_alt1"
878 [(set (match_operand:SI 0 "s_register_operand" "=r")
879 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
880 (match_operand:SI 2 "arm_rhs_operand" "rI"))
881 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
884 [(set_attr "conds" "use")]
887 (define_insn "*addsi3_carryin_alt2"
888 [(set (match_operand:SI 0 "s_register_operand" "=r")
889 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
890 (match_operand:SI 1 "s_register_operand" "r"))
891 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
894 [(set_attr "conds" "use")]
897 (define_insn "*addsi3_carryin_alt3"
898 [(set (match_operand:SI 0 "s_register_operand" "=r")
899 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
900 (match_operand:SI 2 "arm_rhs_operand" "rI"))
901 (match_operand:SI 1 "s_register_operand" "r")))]
904 [(set_attr "conds" "use")]
907 (define_expand "incscc"
908 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
909 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
910 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
911 (match_operand:SI 1 "s_register_operand" "0,?r")))]
916 (define_insn "*arm_incscc"
917 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
918 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
919 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
920 (match_operand:SI 1 "s_register_operand" "0,?r")))]
924 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
925 [(set_attr "conds" "use")
926 (set_attr "length" "4,8")]
929 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
931 [(set (match_operand:SI 0 "s_register_operand" "")
932 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
933 (match_operand:SI 2 "s_register_operand" ""))
935 (clobber (match_operand:SI 3 "s_register_operand" ""))]
937 [(set (match_dup 3) (match_dup 1))
938 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
940 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
943 (define_expand "addsf3"
944 [(set (match_operand:SF 0 "s_register_operand" "")
945 (plus:SF (match_operand:SF 1 "s_register_operand" "")
946 (match_operand:SF 2 "arm_float_add_operand" "")))]
947 "TARGET_32BIT && TARGET_HARD_FLOAT"
950 && !cirrus_fp_register (operands[2], SFmode))
951 operands[2] = force_reg (SFmode, operands[2]);
954 (define_expand "adddf3"
955 [(set (match_operand:DF 0 "s_register_operand" "")
956 (plus:DF (match_operand:DF 1 "s_register_operand" "")
957 (match_operand:DF 2 "arm_float_add_operand" "")))]
958 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
961 && !cirrus_fp_register (operands[2], DFmode))
962 operands[2] = force_reg (DFmode, operands[2]);
965 (define_expand "subdi3"
967 [(set (match_operand:DI 0 "s_register_operand" "")
968 (minus:DI (match_operand:DI 1 "s_register_operand" "")
969 (match_operand:DI 2 "s_register_operand" "")))
970 (clobber (reg:CC CC_REGNUM))])]
973 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
975 && cirrus_fp_register (operands[0], DImode)
976 && cirrus_fp_register (operands[1], DImode))
978 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
984 if (GET_CODE (operands[1]) != REG)
985 operands[1] = force_reg (DImode, operands[1]);
986 if (GET_CODE (operands[2]) != REG)
987 operands[2] = force_reg (DImode, operands[2]);
992 (define_insn "*arm_subdi3"
993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
994 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
995 (match_operand:DI 2 "s_register_operand" "r,0,0")))
996 (clobber (reg:CC CC_REGNUM))]
998 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
999 [(set_attr "conds" "clob")
1000 (set_attr "length" "8")]
1003 (define_insn "*thumb_subdi3"
1004 [(set (match_operand:DI 0 "register_operand" "=l")
1005 (minus:DI (match_operand:DI 1 "register_operand" "0")
1006 (match_operand:DI 2 "register_operand" "l")))
1007 (clobber (reg:CC CC_REGNUM))]
1009 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1010 [(set_attr "length" "4")]
1013 (define_insn "*subdi_di_zesidi"
1014 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1015 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1017 (match_operand:SI 2 "s_register_operand" "r,r"))))
1018 (clobber (reg:CC CC_REGNUM))]
1020 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1021 [(set_attr "conds" "clob")
1022 (set_attr "length" "8")]
1025 (define_insn "*subdi_di_sesidi"
1026 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1027 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1029 (match_operand:SI 2 "s_register_operand" "r,r"))))
1030 (clobber (reg:CC CC_REGNUM))]
1032 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1033 [(set_attr "conds" "clob")
1034 (set_attr "length" "8")]
1037 (define_insn "*subdi_zesidi_di"
1038 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1039 (minus:DI (zero_extend:DI
1040 (match_operand:SI 2 "s_register_operand" "r,r"))
1041 (match_operand:DI 1 "s_register_operand" "0,r")))
1042 (clobber (reg:CC CC_REGNUM))]
1044 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1045 [(set_attr "conds" "clob")
1046 (set_attr "length" "8")]
1049 (define_insn "*subdi_sesidi_di"
1050 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1051 (minus:DI (sign_extend:DI
1052 (match_operand:SI 2 "s_register_operand" "r,r"))
1053 (match_operand:DI 1 "s_register_operand" "0,r")))
1054 (clobber (reg:CC CC_REGNUM))]
1056 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1057 [(set_attr "conds" "clob")
1058 (set_attr "length" "8")]
1061 (define_insn "*subdi_zesidi_zesidi"
1062 [(set (match_operand:DI 0 "s_register_operand" "=r")
1063 (minus:DI (zero_extend:DI
1064 (match_operand:SI 1 "s_register_operand" "r"))
1066 (match_operand:SI 2 "s_register_operand" "r"))))
1067 (clobber (reg:CC CC_REGNUM))]
1069 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1070 [(set_attr "conds" "clob")
1071 (set_attr "length" "8")]
1074 (define_expand "subsi3"
1075 [(set (match_operand:SI 0 "s_register_operand" "")
1076 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1077 (match_operand:SI 2 "s_register_operand" "")))]
1080 if (GET_CODE (operands[1]) == CONST_INT)
1084 arm_split_constant (MINUS, SImode, NULL_RTX,
1085 INTVAL (operands[1]), operands[0],
1086 operands[2], optimize && can_create_pseudo_p ());
1089 else /* TARGET_THUMB1 */
1090 operands[1] = force_reg (SImode, operands[1]);
1095 (define_insn "*thumb1_subsi3_insn"
1096 [(set (match_operand:SI 0 "register_operand" "=l")
1097 (minus:SI (match_operand:SI 1 "register_operand" "l")
1098 (match_operand:SI 2 "register_operand" "l")))]
1101 [(set_attr "length" "2")]
1104 ; ??? Check Thumb-2 split length
1105 (define_insn_and_split "*arm_subsi3_insn"
1106 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1107 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1108 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1115 && GET_CODE (operands[1]) == CONST_INT
1116 && !const_ok_for_arm (INTVAL (operands[1]))"
1117 [(clobber (const_int 0))]
1119 arm_split_constant (MINUS, SImode, curr_insn,
1120 INTVAL (operands[1]), operands[0], operands[2], 0);
1123 [(set_attr "length" "4,4,16")
1124 (set_attr "predicable" "yes")]
1128 [(match_scratch:SI 3 "r")
1129 (set (match_operand:SI 0 "arm_general_register_operand" "")
1130 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1131 (match_operand:SI 2 "arm_general_register_operand" "")))]
1133 && !const_ok_for_arm (INTVAL (operands[1]))
1134 && const_ok_for_arm (~INTVAL (operands[1]))"
1135 [(set (match_dup 3) (match_dup 1))
1136 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1140 (define_insn "*subsi3_compare0"
1141 [(set (reg:CC_NOOV CC_REGNUM)
1143 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1144 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1146 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1147 (minus:SI (match_dup 1) (match_dup 2)))]
1152 [(set_attr "conds" "set")]
1155 (define_insn "*subsi3_compare0_c"
1156 [(set (reg:CC_NOTB CC_REGNUM)
1157 (compare:CC_NOTB (match_operand:SI 1 "arm_rhs_operand" "r,I")
1158 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1159 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1160 (minus:SI (match_dup 1) (match_dup 2)))]
1165 [(set_attr "conds" "set")]
1168 (define_expand "decscc"
1169 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1170 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1171 (match_operator:SI 2 "arm_comparison_operator"
1172 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1177 (define_insn "*arm_decscc"
1178 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1179 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1180 (match_operator:SI 2 "arm_comparison_operator"
1181 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1185 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1186 [(set_attr "conds" "use")
1187 (set_attr "length" "*,8")]
1190 (define_expand "subsf3"
1191 [(set (match_operand:SF 0 "s_register_operand" "")
1192 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1193 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1194 "TARGET_32BIT && TARGET_HARD_FLOAT"
1196 if (TARGET_MAVERICK)
1198 if (!cirrus_fp_register (operands[1], SFmode))
1199 operands[1] = force_reg (SFmode, operands[1]);
1200 if (!cirrus_fp_register (operands[2], SFmode))
1201 operands[2] = force_reg (SFmode, operands[2]);
1205 (define_expand "subdf3"
1206 [(set (match_operand:DF 0 "s_register_operand" "")
1207 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1208 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1209 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1211 if (TARGET_MAVERICK)
1213 if (!cirrus_fp_register (operands[1], DFmode))
1214 operands[1] = force_reg (DFmode, operands[1]);
1215 if (!cirrus_fp_register (operands[2], DFmode))
1216 operands[2] = force_reg (DFmode, operands[2]);
1221 ;; Multiplication insns
1223 (define_expand "mulsi3"
1224 [(set (match_operand:SI 0 "s_register_operand" "")
1225 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1226 (match_operand:SI 1 "s_register_operand" "")))]
1231 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1232 (define_insn "*arm_mulsi3"
1233 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1234 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1235 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1236 "TARGET_32BIT && !arm_arch6"
1237 "mul%?\\t%0, %2, %1"
1238 [(set_attr "insn" "mul")
1239 (set_attr "predicable" "yes")]
1242 (define_insn "*arm_mulsi3_v6"
1243 [(set (match_operand:SI 0 "s_register_operand" "=r")
1244 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1245 (match_operand:SI 2 "s_register_operand" "r")))]
1246 "TARGET_32BIT && arm_arch6"
1247 "mul%?\\t%0, %1, %2"
1248 [(set_attr "insn" "mul")
1249 (set_attr "predicable" "yes")]
1252 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1253 ; 1 and 2; are the same, because reload will make operand 0 match
1254 ; operand 1 without realizing that this conflicts with operand 2. We fix
1255 ; this by adding another alternative to match this case, and then `reload'
1256 ; it ourselves. This alternative must come first.
1257 (define_insn "*thumb_mulsi3"
1258 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1259 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1260 (match_operand:SI 2 "register_operand" "l,l,l")))]
1261 "TARGET_THUMB1 && !arm_arch6"
1263 if (which_alternative < 2)
1264 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1266 return \"mul\\t%0, %2\";
1268 [(set_attr "length" "4,4,2")
1269 (set_attr "insn" "mul")]
1272 (define_insn "*thumb_mulsi3_v6"
1273 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1274 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1275 (match_operand:SI 2 "register_operand" "l,0,0")))]
1276 "TARGET_THUMB1 && arm_arch6"
1281 [(set_attr "length" "2")
1282 (set_attr "insn" "mul")]
1285 (define_insn "*mulsi3_compare0"
1286 [(set (reg:CC_NOOV CC_REGNUM)
1287 (compare:CC_NOOV (mult:SI
1288 (match_operand:SI 2 "s_register_operand" "r,r")
1289 (match_operand:SI 1 "s_register_operand" "%0,r"))
1291 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1292 (mult:SI (match_dup 2) (match_dup 1)))]
1293 "TARGET_ARM && !arm_arch6"
1294 "mul%.\\t%0, %2, %1"
1295 [(set_attr "conds" "set")
1296 (set_attr "insn" "muls")]
1299 (define_insn "*mulsi3_compare0_v6"
1300 [(set (reg:CC_NOOV CC_REGNUM)
1301 (compare:CC_NOOV (mult:SI
1302 (match_operand:SI 2 "s_register_operand" "r")
1303 (match_operand:SI 1 "s_register_operand" "r"))
1305 (set (match_operand:SI 0 "s_register_operand" "=r")
1306 (mult:SI (match_dup 2) (match_dup 1)))]
1307 "TARGET_ARM && arm_arch6 && optimize_size"
1308 "mul%.\\t%0, %2, %1"
1309 [(set_attr "conds" "set")
1310 (set_attr "insn" "muls")]
1313 (define_insn "*mulsi_compare0_scratch"
1314 [(set (reg:CC_NOOV CC_REGNUM)
1315 (compare:CC_NOOV (mult:SI
1316 (match_operand:SI 2 "s_register_operand" "r,r")
1317 (match_operand:SI 1 "s_register_operand" "%0,r"))
1319 (clobber (match_scratch:SI 0 "=&r,&r"))]
1320 "TARGET_ARM && !arm_arch6"
1321 "mul%.\\t%0, %2, %1"
1322 [(set_attr "conds" "set")
1323 (set_attr "insn" "muls")]
1326 (define_insn "*mulsi_compare0_scratch_v6"
1327 [(set (reg:CC_NOOV CC_REGNUM)
1328 (compare:CC_NOOV (mult:SI
1329 (match_operand:SI 2 "s_register_operand" "r")
1330 (match_operand:SI 1 "s_register_operand" "r"))
1332 (clobber (match_scratch:SI 0 "=r"))]
1333 "TARGET_ARM && arm_arch6 && optimize_size"
1334 "mul%.\\t%0, %2, %1"
1335 [(set_attr "conds" "set")
1336 (set_attr "insn" "muls")]
1339 ;; Unnamed templates to match MLA instruction.
1341 (define_insn "*mulsi3addsi"
1342 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1344 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1345 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1346 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1347 "TARGET_32BIT && !arm_arch6"
1348 "mla%?\\t%0, %2, %1, %3"
1349 [(set_attr "insn" "mla")
1350 (set_attr "predicable" "yes")]
1353 (define_insn "*mulsi3addsi_v6"
1354 [(set (match_operand:SI 0 "s_register_operand" "=r")
1356 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1357 (match_operand:SI 1 "s_register_operand" "r"))
1358 (match_operand:SI 3 "s_register_operand" "r")))]
1359 "TARGET_32BIT && arm_arch6"
1360 "mla%?\\t%0, %2, %1, %3"
1361 [(set_attr "insn" "mla")
1362 (set_attr "predicable" "yes")]
1365 (define_insn "*mulsi3addsi_compare0"
1366 [(set (reg:CC_NOOV CC_REGNUM)
1369 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1370 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1371 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1373 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1374 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1376 "TARGET_ARM && arm_arch6"
1377 "mla%.\\t%0, %2, %1, %3"
1378 [(set_attr "conds" "set")
1379 (set_attr "insn" "mlas")]
1382 (define_insn "*mulsi3addsi_compare0_v6"
1383 [(set (reg:CC_NOOV CC_REGNUM)
1386 (match_operand:SI 2 "s_register_operand" "r")
1387 (match_operand:SI 1 "s_register_operand" "r"))
1388 (match_operand:SI 3 "s_register_operand" "r"))
1390 (set (match_operand:SI 0 "s_register_operand" "=r")
1391 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1393 "TARGET_ARM && arm_arch6 && optimize_size"
1394 "mla%.\\t%0, %2, %1, %3"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "mlas")]
1399 (define_insn "*mulsi3addsi_compare0_scratch"
1400 [(set (reg:CC_NOOV CC_REGNUM)
1403 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1404 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1405 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1407 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1408 "TARGET_ARM && !arm_arch6"
1409 "mla%.\\t%0, %2, %1, %3"
1410 [(set_attr "conds" "set")
1411 (set_attr "insn" "mlas")]
1414 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1415 [(set (reg:CC_NOOV CC_REGNUM)
1418 (match_operand:SI 2 "s_register_operand" "r")
1419 (match_operand:SI 1 "s_register_operand" "r"))
1420 (match_operand:SI 3 "s_register_operand" "r"))
1422 (clobber (match_scratch:SI 0 "=r"))]
1423 "TARGET_ARM && arm_arch6 && optimize_size"
1424 "mla%.\\t%0, %2, %1, %3"
1425 [(set_attr "conds" "set")
1426 (set_attr "insn" "mlas")]
1429 (define_insn "*mulsi3subsi"
1430 [(set (match_operand:SI 0 "s_register_operand" "=r")
1432 (match_operand:SI 3 "s_register_operand" "r")
1433 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1434 (match_operand:SI 1 "s_register_operand" "r"))))]
1435 "TARGET_32BIT && arm_arch_thumb2"
1436 "mls%?\\t%0, %2, %1, %3"
1437 [(set_attr "insn" "mla")
1438 (set_attr "predicable" "yes")]
1441 (define_expand "maddsidi4"
1442 [(set (match_operand:DI 0 "s_register_operand" "")
1445 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1446 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1447 (match_operand:DI 3 "s_register_operand" "")))]
1448 "TARGET_32BIT && arm_arch3m"
1451 (define_insn "*mulsidi3adddi"
1452 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1455 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1456 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1457 (match_operand:DI 1 "s_register_operand" "0")))]
1458 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1459 "smlal%?\\t%Q0, %R0, %3, %2"
1460 [(set_attr "insn" "smlal")
1461 (set_attr "predicable" "yes")]
1464 (define_insn "*mulsidi3adddi_v6"
1465 [(set (match_operand:DI 0 "s_register_operand" "=r")
1468 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1469 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1470 (match_operand:DI 1 "s_register_operand" "0")))]
1471 "TARGET_32BIT && arm_arch6"
1472 "smlal%?\\t%Q0, %R0, %3, %2"
1473 [(set_attr "insn" "smlal")
1474 (set_attr "predicable" "yes")]
1477 ;; 32x32->64 widening multiply.
1478 ;; As with mulsi3, the only difference between the v3-5 and v6+
1479 ;; versions of these patterns is the requirement that the output not
1480 ;; overlap the inputs, but that still means we have to have a named
1481 ;; expander and two different starred insns.
1483 (define_expand "mulsidi3"
1484 [(set (match_operand:DI 0 "s_register_operand" "")
1486 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1487 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1488 "TARGET_32BIT && arm_arch3m"
1492 (define_insn "*mulsidi3_nov6"
1493 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1495 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1496 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1497 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1498 "smull%?\\t%Q0, %R0, %1, %2"
1499 [(set_attr "insn" "smull")
1500 (set_attr "predicable" "yes")]
1503 (define_insn "*mulsidi3_v6"
1504 [(set (match_operand:DI 0 "s_register_operand" "=r")
1506 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1507 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1508 "TARGET_32BIT && arm_arch6"
1509 "smull%?\\t%Q0, %R0, %1, %2"
1510 [(set_attr "insn" "smull")
1511 (set_attr "predicable" "yes")]
1514 (define_expand "umulsidi3"
1515 [(set (match_operand:DI 0 "s_register_operand" "")
1517 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1518 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1519 "TARGET_32BIT && arm_arch3m"
1523 (define_insn "*umulsidi3_nov6"
1524 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1526 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1527 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1528 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1529 "umull%?\\t%Q0, %R0, %1, %2"
1530 [(set_attr "insn" "umull")
1531 (set_attr "predicable" "yes")]
1534 (define_insn "*umulsidi3_v6"
1535 [(set (match_operand:DI 0 "s_register_operand" "=r")
1537 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1538 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1539 "TARGET_32BIT && arm_arch6"
1540 "umull%?\\t%Q0, %R0, %1, %2"
1541 [(set_attr "insn" "umull")
1542 (set_attr "predicable" "yes")]
1545 (define_expand "umaddsidi4"
1546 [(set (match_operand:DI 0 "s_register_operand" "")
1549 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1550 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1551 (match_operand:DI 3 "s_register_operand" "")))]
1552 "TARGET_32BIT && arm_arch3m"
1555 (define_insn "*umulsidi3adddi"
1556 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1559 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1560 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1561 (match_operand:DI 1 "s_register_operand" "0")))]
1562 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1563 "umlal%?\\t%Q0, %R0, %3, %2"
1564 [(set_attr "insn" "umlal")
1565 (set_attr "predicable" "yes")]
1568 (define_insn "*umulsidi3adddi_v6"
1569 [(set (match_operand:DI 0 "s_register_operand" "=r")
1572 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1573 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1574 (match_operand:DI 1 "s_register_operand" "0")))]
1575 "TARGET_32BIT && arm_arch6"
1576 "umlal%?\\t%Q0, %R0, %3, %2"
1577 [(set_attr "insn" "umlal")
1578 (set_attr "predicable" "yes")]
1581 (define_expand "smulsi3_highpart"
1583 [(set (match_operand:SI 0 "s_register_operand" "")
1587 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1588 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1590 (clobber (match_scratch:SI 3 ""))])]
1591 "TARGET_32BIT && arm_arch3m"
1595 (define_insn "*smulsi3_highpart_nov6"
1596 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1600 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1601 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1603 (clobber (match_scratch:SI 3 "=&r,&r"))]
1604 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1605 "smull%?\\t%3, %0, %2, %1"
1606 [(set_attr "insn" "smull")
1607 (set_attr "predicable" "yes")]
1610 (define_insn "*smulsi3_highpart_v6"
1611 [(set (match_operand:SI 0 "s_register_operand" "=r")
1615 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1616 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1618 (clobber (match_scratch:SI 3 "=r"))]
1619 "TARGET_32BIT && arm_arch6"
1620 "smull%?\\t%3, %0, %2, %1"
1621 [(set_attr "insn" "smull")
1622 (set_attr "predicable" "yes")]
1625 (define_expand "umulsi3_highpart"
1627 [(set (match_operand:SI 0 "s_register_operand" "")
1631 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1632 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1634 (clobber (match_scratch:SI 3 ""))])]
1635 "TARGET_32BIT && arm_arch3m"
1639 (define_insn "*umulsi3_highpart_nov6"
1640 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1644 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1645 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1647 (clobber (match_scratch:SI 3 "=&r,&r"))]
1648 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1649 "umull%?\\t%3, %0, %2, %1"
1650 [(set_attr "insn" "umull")
1651 (set_attr "predicable" "yes")]
1654 (define_insn "*umulsi3_highpart_v6"
1655 [(set (match_operand:SI 0 "s_register_operand" "=r")
1659 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1660 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1662 (clobber (match_scratch:SI 3 "=r"))]
1663 "TARGET_32BIT && arm_arch6"
1664 "umull%?\\t%3, %0, %2, %1"
1665 [(set_attr "insn" "umull")
1666 (set_attr "predicable" "yes")]
1669 (define_insn "mulhisi3"
1670 [(set (match_operand:SI 0 "s_register_operand" "=r")
1671 (mult:SI (sign_extend:SI
1672 (match_operand:HI 1 "s_register_operand" "%r"))
1674 (match_operand:HI 2 "s_register_operand" "r"))))]
1675 "TARGET_DSP_MULTIPLY"
1676 "smulbb%?\\t%0, %1, %2"
1677 [(set_attr "insn" "smulxy")
1678 (set_attr "predicable" "yes")]
1681 (define_insn "*mulhisi3tb"
1682 [(set (match_operand:SI 0 "s_register_operand" "=r")
1683 (mult:SI (ashiftrt:SI
1684 (match_operand:SI 1 "s_register_operand" "r")
1687 (match_operand:HI 2 "s_register_operand" "r"))))]
1688 "TARGET_DSP_MULTIPLY"
1689 "smultb%?\\t%0, %1, %2"
1690 [(set_attr "insn" "smulxy")
1691 (set_attr "predicable" "yes")]
1694 (define_insn "*mulhisi3bt"
1695 [(set (match_operand:SI 0 "s_register_operand" "=r")
1696 (mult:SI (sign_extend:SI
1697 (match_operand:HI 1 "s_register_operand" "r"))
1699 (match_operand:SI 2 "s_register_operand" "r")
1701 "TARGET_DSP_MULTIPLY"
1702 "smulbt%?\\t%0, %1, %2"
1703 [(set_attr "insn" "smulxy")
1704 (set_attr "predicable" "yes")]
1707 (define_insn "*mulhisi3tt"
1708 [(set (match_operand:SI 0 "s_register_operand" "=r")
1709 (mult:SI (ashiftrt:SI
1710 (match_operand:SI 1 "s_register_operand" "r")
1713 (match_operand:SI 2 "s_register_operand" "r")
1715 "TARGET_DSP_MULTIPLY"
1716 "smultt%?\\t%0, %1, %2"
1717 [(set_attr "insn" "smulxy")
1718 (set_attr "predicable" "yes")]
1721 (define_insn "maddhisi4"
1722 [(set (match_operand:SI 0 "s_register_operand" "=r")
1723 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1724 (mult:SI (sign_extend:SI
1725 (match_operand:HI 1 "s_register_operand" "%r"))
1727 (match_operand:HI 2 "s_register_operand" "r")))))]
1728 "TARGET_DSP_MULTIPLY"
1729 "smlabb%?\\t%0, %1, %2, %3"
1730 [(set_attr "insn" "smlaxy")
1731 (set_attr "predicable" "yes")]
1734 (define_insn "*maddhidi4"
1735 [(set (match_operand:DI 0 "s_register_operand" "=r")
1737 (match_operand:DI 3 "s_register_operand" "0")
1738 (mult:DI (sign_extend:DI
1739 (match_operand:HI 1 "s_register_operand" "%r"))
1741 (match_operand:HI 2 "s_register_operand" "r")))))]
1742 "TARGET_DSP_MULTIPLY"
1743 "smlalbb%?\\t%Q0, %R0, %1, %2"
1744 [(set_attr "insn" "smlalxy")
1745 (set_attr "predicable" "yes")])
1747 (define_expand "mulsf3"
1748 [(set (match_operand:SF 0 "s_register_operand" "")
1749 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1750 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1751 "TARGET_32BIT && TARGET_HARD_FLOAT"
1754 && !cirrus_fp_register (operands[2], SFmode))
1755 operands[2] = force_reg (SFmode, operands[2]);
1758 (define_expand "muldf3"
1759 [(set (match_operand:DF 0 "s_register_operand" "")
1760 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1761 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1762 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1765 && !cirrus_fp_register (operands[2], DFmode))
1766 operands[2] = force_reg (DFmode, operands[2]);
1771 (define_expand "divsf3"
1772 [(set (match_operand:SF 0 "s_register_operand" "")
1773 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1774 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1775 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1778 (define_expand "divdf3"
1779 [(set (match_operand:DF 0 "s_register_operand" "")
1780 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1781 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1782 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1787 (define_expand "modsf3"
1788 [(set (match_operand:SF 0 "s_register_operand" "")
1789 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1790 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1791 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1794 (define_expand "moddf3"
1795 [(set (match_operand:DF 0 "s_register_operand" "")
1796 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1797 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1798 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1801 ;; Boolean and,ior,xor insns
1803 ;; Split up double word logical operations
1805 ;; Split up simple DImode logical operations. Simply perform the logical
1806 ;; operation on the upper and lower halves of the registers.
1808 [(set (match_operand:DI 0 "s_register_operand" "")
1809 (match_operator:DI 6 "logical_binary_operator"
1810 [(match_operand:DI 1 "s_register_operand" "")
1811 (match_operand:DI 2 "s_register_operand" "")]))]
1812 "TARGET_32BIT && reload_completed
1813 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1814 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1815 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1816 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1819 operands[3] = gen_highpart (SImode, operands[0]);
1820 operands[0] = gen_lowpart (SImode, operands[0]);
1821 operands[4] = gen_highpart (SImode, operands[1]);
1822 operands[1] = gen_lowpart (SImode, operands[1]);
1823 operands[5] = gen_highpart (SImode, operands[2]);
1824 operands[2] = gen_lowpart (SImode, operands[2]);
1829 [(set (match_operand:DI 0 "s_register_operand" "")
1830 (match_operator:DI 6 "logical_binary_operator"
1831 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1832 (match_operand:DI 1 "s_register_operand" "")]))]
1833 "TARGET_32BIT && reload_completed"
1834 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1835 (set (match_dup 3) (match_op_dup:SI 6
1836 [(ashiftrt:SI (match_dup 2) (const_int 31))
1840 operands[3] = gen_highpart (SImode, operands[0]);
1841 operands[0] = gen_lowpart (SImode, operands[0]);
1842 operands[4] = gen_highpart (SImode, operands[1]);
1843 operands[1] = gen_lowpart (SImode, operands[1]);
1844 operands[5] = gen_highpart (SImode, operands[2]);
1845 operands[2] = gen_lowpart (SImode, operands[2]);
1849 ;; The zero extend of operand 2 means we can just copy the high part of
1850 ;; operand1 into operand0.
1852 [(set (match_operand:DI 0 "s_register_operand" "")
1854 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1855 (match_operand:DI 1 "s_register_operand" "")))]
1856 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1857 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1858 (set (match_dup 3) (match_dup 4))]
1861 operands[4] = gen_highpart (SImode, operands[1]);
1862 operands[3] = gen_highpart (SImode, operands[0]);
1863 operands[0] = gen_lowpart (SImode, operands[0]);
1864 operands[1] = gen_lowpart (SImode, operands[1]);
1868 ;; The zero extend of operand 2 means we can just copy the high part of
1869 ;; operand1 into operand0.
1871 [(set (match_operand:DI 0 "s_register_operand" "")
1873 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1874 (match_operand:DI 1 "s_register_operand" "")))]
1875 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1876 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1877 (set (match_dup 3) (match_dup 4))]
1880 operands[4] = gen_highpart (SImode, operands[1]);
1881 operands[3] = gen_highpart (SImode, operands[0]);
1882 operands[0] = gen_lowpart (SImode, operands[0]);
1883 operands[1] = gen_lowpart (SImode, operands[1]);
1887 (define_expand "anddi3"
1888 [(set (match_operand:DI 0 "s_register_operand" "")
1889 (and:DI (match_operand:DI 1 "s_register_operand" "")
1890 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1895 (define_insn "*anddi3_insn"
1896 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1897 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1898 (match_operand:DI 2 "s_register_operand" "r,r")))]
1899 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1901 [(set_attr "length" "8")]
1904 (define_insn_and_split "*anddi_zesidi_di"
1905 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1906 (and:DI (zero_extend:DI
1907 (match_operand:SI 2 "s_register_operand" "r,r"))
1908 (match_operand:DI 1 "s_register_operand" "0,r")))]
1911 "TARGET_32BIT && reload_completed"
1912 ; The zero extend of operand 2 clears the high word of the output
1914 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1915 (set (match_dup 3) (const_int 0))]
1918 operands[3] = gen_highpart (SImode, operands[0]);
1919 operands[0] = gen_lowpart (SImode, operands[0]);
1920 operands[1] = gen_lowpart (SImode, operands[1]);
1922 [(set_attr "length" "8")]
1925 (define_insn "*anddi_sesdi_di"
1926 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1927 (and:DI (sign_extend:DI
1928 (match_operand:SI 2 "s_register_operand" "r,r"))
1929 (match_operand:DI 1 "s_register_operand" "0,r")))]
1932 [(set_attr "length" "8")]
1935 (define_expand "andsi3"
1936 [(set (match_operand:SI 0 "s_register_operand" "")
1937 (and:SI (match_operand:SI 1 "s_register_operand" "")
1938 (match_operand:SI 2 "reg_or_int_operand" "")))]
1943 if (GET_CODE (operands[2]) == CONST_INT)
1945 arm_split_constant (AND, SImode, NULL_RTX,
1946 INTVAL (operands[2]), operands[0],
1947 operands[1], optimize && can_create_pseudo_p ());
1952 else /* TARGET_THUMB1 */
1954 if (GET_CODE (operands[2]) != CONST_INT)
1956 rtx tmp = force_reg (SImode, operands[2]);
1957 if (rtx_equal_p (operands[0], operands[1]))
1961 operands[2] = operands[1];
1969 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1971 operands[2] = force_reg (SImode,
1972 GEN_INT (~INTVAL (operands[2])));
1974 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1979 for (i = 9; i <= 31; i++)
1981 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1983 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1987 else if ((((HOST_WIDE_INT) 1) << i) - 1
1988 == ~INTVAL (operands[2]))
1990 rtx shift = GEN_INT (i);
1991 rtx reg = gen_reg_rtx (SImode);
1993 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1994 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2000 operands[2] = force_reg (SImode, operands[2]);
2006 ; ??? Check split length for Thumb-2
2007 (define_insn_and_split "*arm_andsi3_insn"
2008 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2009 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2010 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2014 bic%?\\t%0, %1, #%B2
2017 && GET_CODE (operands[2]) == CONST_INT
2018 && !(const_ok_for_arm (INTVAL (operands[2]))
2019 || const_ok_for_arm (~INTVAL (operands[2])))"
2020 [(clobber (const_int 0))]
2022 arm_split_constant (AND, SImode, curr_insn,
2023 INTVAL (operands[2]), operands[0], operands[1], 0);
2026 [(set_attr "length" "4,4,16")
2027 (set_attr "predicable" "yes")]
2030 (define_insn "*thumb1_andsi3_insn"
2031 [(set (match_operand:SI 0 "register_operand" "=l")
2032 (and:SI (match_operand:SI 1 "register_operand" "%0")
2033 (match_operand:SI 2 "register_operand" "l")))]
2036 [(set_attr "length" "2")]
2039 (define_insn "*andsi3_compare0"
2040 [(set (reg:CC_NOOV CC_REGNUM)
2042 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2043 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2045 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2046 (and:SI (match_dup 1) (match_dup 2)))]
2050 bic%.\\t%0, %1, #%B2"
2051 [(set_attr "conds" "set")]
2054 (define_insn "*andsi3_compare0_scratch"
2055 [(set (reg:CC_NOOV CC_REGNUM)
2057 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2058 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2060 (clobber (match_scratch:SI 2 "=X,r"))]
2064 bic%.\\t%2, %0, #%B1"
2065 [(set_attr "conds" "set")]
2068 (define_insn "*zeroextractsi_compare0_scratch"
2069 [(set (reg:CC_NOOV CC_REGNUM)
2070 (compare:CC_NOOV (zero_extract:SI
2071 (match_operand:SI 0 "s_register_operand" "r")
2072 (match_operand 1 "const_int_operand" "n")
2073 (match_operand 2 "const_int_operand" "n"))
2076 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2077 && INTVAL (operands[1]) > 0
2078 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2079 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2081 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2082 << INTVAL (operands[2]));
2083 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2086 [(set_attr "conds" "set")]
2089 (define_insn_and_split "*ne_zeroextractsi"
2090 [(set (match_operand:SI 0 "s_register_operand" "=r")
2091 (ne:SI (zero_extract:SI
2092 (match_operand:SI 1 "s_register_operand" "r")
2093 (match_operand:SI 2 "const_int_operand" "n")
2094 (match_operand:SI 3 "const_int_operand" "n"))
2096 (clobber (reg:CC CC_REGNUM))]
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2104 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2105 && INTVAL (operands[2]) > 0
2106 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2107 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2108 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2109 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2111 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2113 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2114 (match_dup 0) (const_int 1)))]
2116 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2117 << INTVAL (operands[3]));
2119 [(set_attr "conds" "clob")
2120 (set (attr "length")
2121 (if_then_else (eq_attr "is_thumb" "yes")
2126 (define_insn_and_split "*ne_zeroextractsi_shifted"
2127 [(set (match_operand:SI 0 "s_register_operand" "=r")
2128 (ne:SI (zero_extract:SI
2129 (match_operand:SI 1 "s_register_operand" "r")
2130 (match_operand:SI 2 "const_int_operand" "n")
2133 (clobber (reg:CC CC_REGNUM))]
2137 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2138 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2140 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2142 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2143 (match_dup 0) (const_int 1)))]
2145 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2147 [(set_attr "conds" "clob")
2148 (set_attr "length" "8")]
2151 (define_insn_and_split "*ite_ne_zeroextractsi"
2152 [(set (match_operand:SI 0 "s_register_operand" "=r")
2153 (if_then_else:SI (ne (zero_extract:SI
2154 (match_operand:SI 1 "s_register_operand" "r")
2155 (match_operand:SI 2 "const_int_operand" "n")
2156 (match_operand:SI 3 "const_int_operand" "n"))
2158 (match_operand:SI 4 "arm_not_operand" "rIK")
2160 (clobber (reg:CC CC_REGNUM))]
2162 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2163 && INTVAL (operands[2]) > 0
2164 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2165 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2166 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2169 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2170 && INTVAL (operands[2]) > 0
2171 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2172 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2173 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2174 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2175 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2177 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2179 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2180 (match_dup 0) (match_dup 4)))]
2182 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2183 << INTVAL (operands[3]));
2185 [(set_attr "conds" "clob")
2186 (set_attr "length" "8")]
2189 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2190 [(set (match_operand:SI 0 "s_register_operand" "=r")
2191 (if_then_else:SI (ne (zero_extract:SI
2192 (match_operand:SI 1 "s_register_operand" "r")
2193 (match_operand:SI 2 "const_int_operand" "n")
2196 (match_operand:SI 3 "arm_not_operand" "rIK")
2198 (clobber (reg:CC CC_REGNUM))]
2199 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2201 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2202 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2203 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2205 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2207 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2208 (match_dup 0) (match_dup 3)))]
2210 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2212 [(set_attr "conds" "clob")
2213 (set_attr "length" "8")]
2217 [(set (match_operand:SI 0 "s_register_operand" "")
2218 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2219 (match_operand:SI 2 "const_int_operand" "")
2220 (match_operand:SI 3 "const_int_operand" "")))
2221 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2223 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2224 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2226 HOST_WIDE_INT temp = INTVAL (operands[2]);
2228 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2229 operands[3] = GEN_INT (32 - temp);
2233 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2235 [(set (match_operand:SI 0 "s_register_operand" "")
2236 (match_operator:SI 1 "shiftable_operator"
2237 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2238 (match_operand:SI 3 "const_int_operand" "")
2239 (match_operand:SI 4 "const_int_operand" ""))
2240 (match_operand:SI 5 "s_register_operand" "")]))
2241 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2243 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2246 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2249 HOST_WIDE_INT temp = INTVAL (operands[3]);
2251 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2252 operands[4] = GEN_INT (32 - temp);
2257 [(set (match_operand:SI 0 "s_register_operand" "")
2258 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2259 (match_operand:SI 2 "const_int_operand" "")
2260 (match_operand:SI 3 "const_int_operand" "")))]
2262 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2263 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2265 HOST_WIDE_INT temp = INTVAL (operands[2]);
2267 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2268 operands[3] = GEN_INT (32 - temp);
2273 [(set (match_operand:SI 0 "s_register_operand" "")
2274 (match_operator:SI 1 "shiftable_operator"
2275 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2276 (match_operand:SI 3 "const_int_operand" "")
2277 (match_operand:SI 4 "const_int_operand" ""))
2278 (match_operand:SI 5 "s_register_operand" "")]))
2279 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2281 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2284 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2287 HOST_WIDE_INT temp = INTVAL (operands[3]);
2289 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2290 operands[4] = GEN_INT (32 - temp);
2294 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2295 ;;; represented by the bitfield, then this will produce incorrect results.
2296 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2297 ;;; which have a real bit-field insert instruction, the truncation happens
2298 ;;; in the bit-field insert instruction itself. Since arm does not have a
2299 ;;; bit-field insert instruction, we would have to emit code here to truncate
2300 ;;; the value before we insert. This loses some of the advantage of having
2301 ;;; this insv pattern, so this pattern needs to be reevalutated.
2303 (define_expand "insv"
2304 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2305 (match_operand:SI 1 "general_operand" "")
2306 (match_operand:SI 2 "general_operand" ""))
2307 (match_operand:SI 3 "reg_or_int_operand" ""))]
2308 "TARGET_ARM || arm_arch_thumb2"
2311 int start_bit = INTVAL (operands[2]);
2312 int width = INTVAL (operands[1]);
2313 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2314 rtx target, subtarget;
2316 if (arm_arch_thumb2)
2318 bool use_bfi = TRUE;
2320 if (GET_CODE (operands[3]) == CONST_INT)
2322 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2326 emit_insn (gen_insv_zero (operands[0], operands[1],
2331 /* See if the set can be done with a single orr instruction. */
2332 if (val == mask && const_ok_for_arm (val << start_bit))
2338 if (GET_CODE (operands[3]) != REG)
2339 operands[3] = force_reg (SImode, operands[3]);
2341 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2347 target = copy_rtx (operands[0]);
2348 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2349 subreg as the final target. */
2350 if (GET_CODE (target) == SUBREG)
2352 subtarget = gen_reg_rtx (SImode);
2353 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2354 < GET_MODE_SIZE (SImode))
2355 target = SUBREG_REG (target);
2360 if (GET_CODE (operands[3]) == CONST_INT)
2362 /* Since we are inserting a known constant, we may be able to
2363 reduce the number of bits that we have to clear so that
2364 the mask becomes simple. */
2365 /* ??? This code does not check to see if the new mask is actually
2366 simpler. It may not be. */
2367 rtx op1 = gen_reg_rtx (SImode);
2368 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2369 start of this pattern. */
2370 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2371 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2373 emit_insn (gen_andsi3 (op1, operands[0],
2374 gen_int_mode (~mask2, SImode)));
2375 emit_insn (gen_iorsi3 (subtarget, op1,
2376 gen_int_mode (op3_value << start_bit, SImode)));
2378 else if (start_bit == 0
2379 && !(const_ok_for_arm (mask)
2380 || const_ok_for_arm (~mask)))
2382 /* A Trick, since we are setting the bottom bits in the word,
2383 we can shift operand[3] up, operand[0] down, OR them together
2384 and rotate the result back again. This takes 3 insns, and
2385 the third might be mergeable into another op. */
2386 /* The shift up copes with the possibility that operand[3] is
2387 wider than the bitfield. */
2388 rtx op0 = gen_reg_rtx (SImode);
2389 rtx op1 = gen_reg_rtx (SImode);
2391 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2392 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2393 emit_insn (gen_iorsi3 (op1, op1, op0));
2394 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2396 else if ((width + start_bit == 32)
2397 && !(const_ok_for_arm (mask)
2398 || const_ok_for_arm (~mask)))
2400 /* Similar trick, but slightly less efficient. */
2402 rtx op0 = gen_reg_rtx (SImode);
2403 rtx op1 = gen_reg_rtx (SImode);
2405 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2406 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2407 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2408 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2412 rtx op0 = gen_int_mode (mask, SImode);
2413 rtx op1 = gen_reg_rtx (SImode);
2414 rtx op2 = gen_reg_rtx (SImode);
2416 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2418 rtx tmp = gen_reg_rtx (SImode);
2420 emit_insn (gen_movsi (tmp, op0));
2424 /* Mask out any bits in operand[3] that are not needed. */
2425 emit_insn (gen_andsi3 (op1, operands[3], op0));
2427 if (GET_CODE (op0) == CONST_INT
2428 && (const_ok_for_arm (mask << start_bit)
2429 || const_ok_for_arm (~(mask << start_bit))))
2431 op0 = gen_int_mode (~(mask << start_bit), SImode);
2432 emit_insn (gen_andsi3 (op2, operands[0], op0));
2436 if (GET_CODE (op0) == CONST_INT)
2438 rtx tmp = gen_reg_rtx (SImode);
2440 emit_insn (gen_movsi (tmp, op0));
2445 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2447 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2451 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2453 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2456 if (subtarget != target)
2458 /* If TARGET is still a SUBREG, then it must be wider than a word,
2459 so we must be careful only to set the subword we were asked to. */
2460 if (GET_CODE (target) == SUBREG)
2461 emit_move_insn (target, subtarget);
2463 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2470 (define_insn "insv_zero"
2471 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2472 (match_operand:SI 1 "const_int_operand" "M")
2473 (match_operand:SI 2 "const_int_operand" "M"))
2477 [(set_attr "length" "4")
2478 (set_attr "predicable" "yes")]
2481 (define_insn "insv_t2"
2482 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2483 (match_operand:SI 1 "const_int_operand" "M")
2484 (match_operand:SI 2 "const_int_operand" "M"))
2485 (match_operand:SI 3 "s_register_operand" "r"))]
2487 "bfi%?\t%0, %3, %2, %1"
2488 [(set_attr "length" "4")
2489 (set_attr "predicable" "yes")]
2492 ; constants for op 2 will never be given to these patterns.
2493 (define_insn_and_split "*anddi_notdi_di"
2494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2495 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2496 (match_operand:DI 2 "s_register_operand" "r,0")))]
2499 "TARGET_32BIT && reload_completed
2500 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2501 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2502 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2503 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2506 operands[3] = gen_highpart (SImode, operands[0]);
2507 operands[0] = gen_lowpart (SImode, operands[0]);
2508 operands[4] = gen_highpart (SImode, operands[1]);
2509 operands[1] = gen_lowpart (SImode, operands[1]);
2510 operands[5] = gen_highpart (SImode, operands[2]);
2511 operands[2] = gen_lowpart (SImode, operands[2]);
2513 [(set_attr "length" "8")
2514 (set_attr "predicable" "yes")]
2517 (define_insn_and_split "*anddi_notzesidi_di"
2518 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2519 (and:DI (not:DI (zero_extend:DI
2520 (match_operand:SI 2 "s_register_operand" "r,r")))
2521 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2524 bic%?\\t%Q0, %Q1, %2
2526 ; (not (zero_extend ...)) allows us to just copy the high word from
2527 ; operand1 to operand0.
2530 && operands[0] != operands[1]"
2531 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2532 (set (match_dup 3) (match_dup 4))]
2535 operands[3] = gen_highpart (SImode, operands[0]);
2536 operands[0] = gen_lowpart (SImode, operands[0]);
2537 operands[4] = gen_highpart (SImode, operands[1]);
2538 operands[1] = gen_lowpart (SImode, operands[1]);
2540 [(set_attr "length" "4,8")
2541 (set_attr "predicable" "yes")]
2544 (define_insn_and_split "*anddi_notsesidi_di"
2545 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2546 (and:DI (not:DI (sign_extend:DI
2547 (match_operand:SI 2 "s_register_operand" "r,r")))
2548 (match_operand:DI 1 "s_register_operand" "0,r")))]
2551 "TARGET_32BIT && reload_completed"
2552 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2553 (set (match_dup 3) (and:SI (not:SI
2554 (ashiftrt:SI (match_dup 2) (const_int 31)))
2558 operands[3] = gen_highpart (SImode, operands[0]);
2559 operands[0] = gen_lowpart (SImode, operands[0]);
2560 operands[4] = gen_highpart (SImode, operands[1]);
2561 operands[1] = gen_lowpart (SImode, operands[1]);
2563 [(set_attr "length" "8")
2564 (set_attr "predicable" "yes")]
2567 (define_insn "andsi_notsi_si"
2568 [(set (match_operand:SI 0 "s_register_operand" "=r")
2569 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2570 (match_operand:SI 1 "s_register_operand" "r")))]
2572 "bic%?\\t%0, %1, %2"
2573 [(set_attr "predicable" "yes")]
2576 (define_insn "bicsi3"
2577 [(set (match_operand:SI 0 "register_operand" "=l")
2578 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2579 (match_operand:SI 2 "register_operand" "0")))]
2582 [(set_attr "length" "2")]
2585 (define_insn "andsi_not_shiftsi_si"
2586 [(set (match_operand:SI 0 "s_register_operand" "=r")
2587 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2588 [(match_operand:SI 2 "s_register_operand" "r")
2589 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2590 (match_operand:SI 1 "s_register_operand" "r")))]
2592 "bic%?\\t%0, %1, %2%S4"
2593 [(set_attr "predicable" "yes")
2594 (set_attr "shift" "2")
2595 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2596 (const_string "alu_shift")
2597 (const_string "alu_shift_reg")))]
2600 (define_insn "*andsi_notsi_si_compare0"
2601 [(set (reg:CC_NOOV CC_REGNUM)
2603 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2604 (match_operand:SI 1 "s_register_operand" "r"))
2606 (set (match_operand:SI 0 "s_register_operand" "=r")
2607 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2609 "bic%.\\t%0, %1, %2"
2610 [(set_attr "conds" "set")]
2613 (define_insn "*andsi_notsi_si_compare0_scratch"
2614 [(set (reg:CC_NOOV CC_REGNUM)
2616 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2617 (match_operand:SI 1 "s_register_operand" "r"))
2619 (clobber (match_scratch:SI 0 "=r"))]
2621 "bic%.\\t%0, %1, %2"
2622 [(set_attr "conds" "set")]
2625 (define_expand "iordi3"
2626 [(set (match_operand:DI 0 "s_register_operand" "")
2627 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2628 (match_operand:DI 2 "neon_logic_op2" "")))]
2633 (define_insn "*iordi3_insn"
2634 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2635 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2636 (match_operand:DI 2 "s_register_operand" "r,r")))]
2637 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2639 [(set_attr "length" "8")
2640 (set_attr "predicable" "yes")]
2643 (define_insn "*iordi_zesidi_di"
2644 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2645 (ior:DI (zero_extend:DI
2646 (match_operand:SI 2 "s_register_operand" "r,r"))
2647 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2650 orr%?\\t%Q0, %Q1, %2
2652 [(set_attr "length" "4,8")
2653 (set_attr "predicable" "yes")]
2656 (define_insn "*iordi_sesidi_di"
2657 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2658 (ior:DI (sign_extend:DI
2659 (match_operand:SI 2 "s_register_operand" "r,r"))
2660 (match_operand:DI 1 "s_register_operand" "0,r")))]
2663 [(set_attr "length" "8")
2664 (set_attr "predicable" "yes")]
2667 (define_expand "iorsi3"
2668 [(set (match_operand:SI 0 "s_register_operand" "")
2669 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2670 (match_operand:SI 2 "reg_or_int_operand" "")))]
2673 if (GET_CODE (operands[2]) == CONST_INT)
2677 arm_split_constant (IOR, SImode, NULL_RTX,
2678 INTVAL (operands[2]), operands[0], operands[1],
2679 optimize && can_create_pseudo_p ());
2682 else /* TARGET_THUMB1 */
2684 rtx tmp = force_reg (SImode, operands[2]);
2685 if (rtx_equal_p (operands[0], operands[1]))
2689 operands[2] = operands[1];
2697 (define_insn_and_split "*arm_iorsi3"
2698 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2699 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2700 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2706 && GET_CODE (operands[2]) == CONST_INT
2707 && !const_ok_for_arm (INTVAL (operands[2]))"
2708 [(clobber (const_int 0))]
2710 arm_split_constant (IOR, SImode, curr_insn,
2711 INTVAL (operands[2]), operands[0], operands[1], 0);
2714 [(set_attr "length" "4,16")
2715 (set_attr "predicable" "yes")]
2718 (define_insn "*thumb1_iorsi3"
2719 [(set (match_operand:SI 0 "register_operand" "=l")
2720 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2721 (match_operand:SI 2 "register_operand" "l")))]
2724 [(set_attr "length" "2")]
2728 [(match_scratch:SI 3 "r")
2729 (set (match_operand:SI 0 "arm_general_register_operand" "")
2730 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2731 (match_operand:SI 2 "const_int_operand" "")))]
2733 && !const_ok_for_arm (INTVAL (operands[2]))
2734 && const_ok_for_arm (~INTVAL (operands[2]))"
2735 [(set (match_dup 3) (match_dup 2))
2736 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2740 (define_insn "*iorsi3_compare0"
2741 [(set (reg:CC_NOOV CC_REGNUM)
2742 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2743 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2745 (set (match_operand:SI 0 "s_register_operand" "=r")
2746 (ior:SI (match_dup 1) (match_dup 2)))]
2748 "orr%.\\t%0, %1, %2"
2749 [(set_attr "conds" "set")]
2752 (define_insn "*iorsi3_compare0_scratch"
2753 [(set (reg:CC_NOOV CC_REGNUM)
2754 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2755 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2757 (clobber (match_scratch:SI 0 "=r"))]
2759 "orr%.\\t%0, %1, %2"
2760 [(set_attr "conds" "set")]
2763 (define_expand "xordi3"
2764 [(set (match_operand:DI 0 "s_register_operand" "")
2765 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2766 (match_operand:DI 2 "s_register_operand" "")))]
2771 (define_insn "*xordi3_insn"
2772 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2773 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2774 (match_operand:DI 2 "s_register_operand" "r,r")))]
2775 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2777 [(set_attr "length" "8")
2778 (set_attr "predicable" "yes")]
2781 (define_insn "*xordi_zesidi_di"
2782 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2783 (xor:DI (zero_extend:DI
2784 (match_operand:SI 2 "s_register_operand" "r,r"))
2785 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2788 eor%?\\t%Q0, %Q1, %2
2790 [(set_attr "length" "4,8")
2791 (set_attr "predicable" "yes")]
2794 (define_insn "*xordi_sesidi_di"
2795 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2796 (xor:DI (sign_extend:DI
2797 (match_operand:SI 2 "s_register_operand" "r,r"))
2798 (match_operand:DI 1 "s_register_operand" "0,r")))]
2801 [(set_attr "length" "8")
2802 (set_attr "predicable" "yes")]
2805 (define_expand "xorsi3"
2806 [(set (match_operand:SI 0 "s_register_operand" "")
2807 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2808 (match_operand:SI 2 "reg_or_int_operand" "")))]
2810 "if (GET_CODE (operands[2]) == CONST_INT)
2814 arm_split_constant (XOR, SImode, NULL_RTX,
2815 INTVAL (operands[2]), operands[0], operands[1],
2816 optimize && can_create_pseudo_p ());
2819 else /* TARGET_THUMB1 */
2821 rtx tmp = force_reg (SImode, operands[2]);
2822 if (rtx_equal_p (operands[0], operands[1]))
2826 operands[2] = operands[1];
2833 (define_insn "*arm_xorsi3"
2834 [(set (match_operand:SI 0 "s_register_operand" "=r")
2835 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2836 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2838 "eor%?\\t%0, %1, %2"
2839 [(set_attr "predicable" "yes")]
2842 (define_insn "*thumb1_xorsi3"
2843 [(set (match_operand:SI 0 "register_operand" "=l")
2844 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2845 (match_operand:SI 2 "register_operand" "l")))]
2848 [(set_attr "length" "2")]
2851 (define_insn "*xorsi3_compare0"
2852 [(set (reg:CC_NOOV CC_REGNUM)
2853 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2854 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2856 (set (match_operand:SI 0 "s_register_operand" "=r")
2857 (xor:SI (match_dup 1) (match_dup 2)))]
2859 "eor%.\\t%0, %1, %2"
2860 [(set_attr "conds" "set")]
2863 (define_insn "*xorsi3_compare0_scratch"
2864 [(set (reg:CC_NOOV CC_REGNUM)
2865 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2866 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2870 [(set_attr "conds" "set")]
2873 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2874 ; (NOT D) we can sometimes merge the final NOT into one of the following
2878 [(set (match_operand:SI 0 "s_register_operand" "")
2879 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2880 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2881 (match_operand:SI 3 "arm_rhs_operand" "")))
2882 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2884 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2885 (not:SI (match_dup 3))))
2886 (set (match_dup 0) (not:SI (match_dup 4)))]
2890 (define_insn "*andsi_iorsi3_notsi"
2891 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2892 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2893 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2894 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2896 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2897 [(set_attr "length" "8")
2898 (set_attr "ce_count" "2")
2899 (set_attr "predicable" "yes")]
2902 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2903 ; insns are available?
2905 [(set (match_operand:SI 0 "s_register_operand" "")
2906 (match_operator:SI 1 "logical_binary_operator"
2907 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2908 (match_operand:SI 3 "const_int_operand" "")
2909 (match_operand:SI 4 "const_int_operand" ""))
2910 (match_operator:SI 9 "logical_binary_operator"
2911 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2912 (match_operand:SI 6 "const_int_operand" ""))
2913 (match_operand:SI 7 "s_register_operand" "")])]))
2914 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2916 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2917 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2920 [(ashift:SI (match_dup 2) (match_dup 4))
2924 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2927 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2931 [(set (match_operand:SI 0 "s_register_operand" "")
2932 (match_operator:SI 1 "logical_binary_operator"
2933 [(match_operator:SI 9 "logical_binary_operator"
2934 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2935 (match_operand:SI 6 "const_int_operand" ""))
2936 (match_operand:SI 7 "s_register_operand" "")])
2937 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2938 (match_operand:SI 3 "const_int_operand" "")
2939 (match_operand:SI 4 "const_int_operand" ""))]))
2940 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2942 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2943 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2946 [(ashift:SI (match_dup 2) (match_dup 4))
2950 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2953 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2957 [(set (match_operand:SI 0 "s_register_operand" "")
2958 (match_operator:SI 1 "logical_binary_operator"
2959 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2960 (match_operand:SI 3 "const_int_operand" "")
2961 (match_operand:SI 4 "const_int_operand" ""))
2962 (match_operator:SI 9 "logical_binary_operator"
2963 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2964 (match_operand:SI 6 "const_int_operand" ""))
2965 (match_operand:SI 7 "s_register_operand" "")])]))
2966 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2968 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2969 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2972 [(ashift:SI (match_dup 2) (match_dup 4))
2976 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2979 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2983 [(set (match_operand:SI 0 "s_register_operand" "")
2984 (match_operator:SI 1 "logical_binary_operator"
2985 [(match_operator:SI 9 "logical_binary_operator"
2986 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2987 (match_operand:SI 6 "const_int_operand" ""))
2988 (match_operand:SI 7 "s_register_operand" "")])
2989 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2990 (match_operand:SI 3 "const_int_operand" "")
2991 (match_operand:SI 4 "const_int_operand" ""))]))
2992 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2994 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2995 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2998 [(ashift:SI (match_dup 2) (match_dup 4))
3002 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3005 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3009 ;; Minimum and maximum insns
3011 (define_expand "smaxsi3"
3013 (set (match_operand:SI 0 "s_register_operand" "")
3014 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3015 (match_operand:SI 2 "arm_rhs_operand" "")))
3016 (clobber (reg:CC CC_REGNUM))])]
3019 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3021 /* No need for a clobber of the condition code register here. */
3022 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3023 gen_rtx_SMAX (SImode, operands[1],
3029 (define_insn "*smax_0"
3030 [(set (match_operand:SI 0 "s_register_operand" "=r")
3031 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3034 "bic%?\\t%0, %1, %1, asr #31"
3035 [(set_attr "predicable" "yes")]
3038 (define_insn "*smax_m1"
3039 [(set (match_operand:SI 0 "s_register_operand" "=r")
3040 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3043 "orr%?\\t%0, %1, %1, asr #31"
3044 [(set_attr "predicable" "yes")]
3047 (define_insn "*arm_smax_insn"
3048 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3049 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3050 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3051 (clobber (reg:CC CC_REGNUM))]
3054 cmp\\t%1, %2\;movlt\\t%0, %2
3055 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3056 [(set_attr "conds" "clob")
3057 (set_attr "length" "8,12")]
3060 (define_expand "sminsi3"
3062 (set (match_operand:SI 0 "s_register_operand" "")
3063 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3064 (match_operand:SI 2 "arm_rhs_operand" "")))
3065 (clobber (reg:CC CC_REGNUM))])]
3068 if (operands[2] == const0_rtx)
3070 /* No need for a clobber of the condition code register here. */
3071 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3072 gen_rtx_SMIN (SImode, operands[1],
3078 (define_insn "*smin_0"
3079 [(set (match_operand:SI 0 "s_register_operand" "=r")
3080 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3083 "and%?\\t%0, %1, %1, asr #31"
3084 [(set_attr "predicable" "yes")]
3087 (define_insn "*arm_smin_insn"
3088 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3089 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3090 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3091 (clobber (reg:CC CC_REGNUM))]
3094 cmp\\t%1, %2\;movge\\t%0, %2
3095 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3096 [(set_attr "conds" "clob")
3097 (set_attr "length" "8,12")]
3100 (define_expand "umaxsi3"
3102 (set (match_operand:SI 0 "s_register_operand" "")
3103 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3104 (match_operand:SI 2 "arm_rhs_operand" "")))
3105 (clobber (reg:CC CC_REGNUM))])]
3110 (define_insn "*arm_umaxsi3"
3111 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3112 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3113 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3114 (clobber (reg:CC CC_REGNUM))]
3117 cmp\\t%1, %2\;movcc\\t%0, %2
3118 cmp\\t%1, %2\;movcs\\t%0, %1
3119 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3120 [(set_attr "conds" "clob")
3121 (set_attr "length" "8,8,12")]
3124 (define_expand "uminsi3"
3126 (set (match_operand:SI 0 "s_register_operand" "")
3127 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3128 (match_operand:SI 2 "arm_rhs_operand" "")))
3129 (clobber (reg:CC CC_REGNUM))])]
3134 (define_insn "*arm_uminsi3"
3135 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3136 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3137 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3138 (clobber (reg:CC CC_REGNUM))]
3141 cmp\\t%1, %2\;movcs\\t%0, %2
3142 cmp\\t%1, %2\;movcc\\t%0, %1
3143 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3144 [(set_attr "conds" "clob")
3145 (set_attr "length" "8,8,12")]
3148 (define_insn "*store_minmaxsi"
3149 [(set (match_operand:SI 0 "memory_operand" "=m")
3150 (match_operator:SI 3 "minmax_operator"
3151 [(match_operand:SI 1 "s_register_operand" "r")
3152 (match_operand:SI 2 "s_register_operand" "r")]))
3153 (clobber (reg:CC CC_REGNUM))]
3156 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3157 operands[1], operands[2]);
3158 output_asm_insn (\"cmp\\t%1, %2\", operands);
3160 output_asm_insn (\"ite\t%d3\", operands);
3161 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3162 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3165 [(set_attr "conds" "clob")
3166 (set (attr "length")
3167 (if_then_else (eq_attr "is_thumb" "yes")
3170 (set_attr "type" "store1")]
3173 ; Reject the frame pointer in operand[1], since reloading this after
3174 ; it has been eliminated can cause carnage.
3175 (define_insn "*minmax_arithsi"
3176 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3177 (match_operator:SI 4 "shiftable_operator"
3178 [(match_operator:SI 5 "minmax_operator"
3179 [(match_operand:SI 2 "s_register_operand" "r,r")
3180 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3181 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3182 (clobber (reg:CC CC_REGNUM))]
3183 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3186 enum rtx_code code = GET_CODE (operands[4]);
3189 if (which_alternative != 0 || operands[3] != const0_rtx
3190 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3195 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3196 operands[2], operands[3]);
3197 output_asm_insn (\"cmp\\t%2, %3\", operands);
3201 output_asm_insn (\"ite\\t%d5\", operands);
3203 output_asm_insn (\"it\\t%d5\", operands);
3205 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3207 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3210 [(set_attr "conds" "clob")
3211 (set (attr "length")
3212 (if_then_else (eq_attr "is_thumb" "yes")
3218 ;; Shift and rotation insns
3220 (define_expand "ashldi3"
3221 [(set (match_operand:DI 0 "s_register_operand" "")
3222 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3223 (match_operand:SI 2 "reg_or_int_operand" "")))]
3226 if (GET_CODE (operands[2]) == CONST_INT)
3228 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3230 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3233 /* Ideally we shouldn't fail here if we could know that operands[1]
3234 ends up already living in an iwmmxt register. Otherwise it's
3235 cheaper to have the alternate code being generated than moving
3236 values to iwmmxt regs and back. */
3239 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3244 (define_insn "arm_ashldi3_1bit"
3245 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3246 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3248 (clobber (reg:CC CC_REGNUM))]
3250 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3251 [(set_attr "conds" "clob")
3252 (set_attr "length" "8")]
3255 (define_expand "ashlsi3"
3256 [(set (match_operand:SI 0 "s_register_operand" "")
3257 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3258 (match_operand:SI 2 "arm_rhs_operand" "")))]
3261 if (GET_CODE (operands[2]) == CONST_INT
3262 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3264 emit_insn (gen_movsi (operands[0], const0_rtx));
3270 (define_insn "*thumb1_ashlsi3"
3271 [(set (match_operand:SI 0 "register_operand" "=l,l")
3272 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3273 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3276 [(set_attr "length" "2")]
3279 (define_expand "ashrdi3"
3280 [(set (match_operand:DI 0 "s_register_operand" "")
3281 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3282 (match_operand:SI 2 "reg_or_int_operand" "")))]
3285 if (GET_CODE (operands[2]) == CONST_INT)
3287 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3289 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3292 /* Ideally we shouldn't fail here if we could know that operands[1]
3293 ends up already living in an iwmmxt register. Otherwise it's
3294 cheaper to have the alternate code being generated than moving
3295 values to iwmmxt regs and back. */
3298 else if (!TARGET_REALLY_IWMMXT)
3303 (define_insn "arm_ashrdi3_1bit"
3304 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3305 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3307 (clobber (reg:CC CC_REGNUM))]
3309 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3310 [(set_attr "conds" "clob")
3311 (set_attr "length" "8")]
3314 (define_expand "ashrsi3"
3315 [(set (match_operand:SI 0 "s_register_operand" "")
3316 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3317 (match_operand:SI 2 "arm_rhs_operand" "")))]
3320 if (GET_CODE (operands[2]) == CONST_INT
3321 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3322 operands[2] = GEN_INT (31);
3326 (define_insn "*thumb1_ashrsi3"
3327 [(set (match_operand:SI 0 "register_operand" "=l,l")
3328 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3329 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3332 [(set_attr "length" "2")]
3335 (define_expand "lshrdi3"
3336 [(set (match_operand:DI 0 "s_register_operand" "")
3337 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3338 (match_operand:SI 2 "reg_or_int_operand" "")))]
3341 if (GET_CODE (operands[2]) == CONST_INT)
3343 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3345 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3348 /* Ideally we shouldn't fail here if we could know that operands[1]
3349 ends up already living in an iwmmxt register. Otherwise it's
3350 cheaper to have the alternate code being generated than moving
3351 values to iwmmxt regs and back. */
3354 else if (!TARGET_REALLY_IWMMXT)
3359 (define_insn "arm_lshrdi3_1bit"
3360 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3361 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3363 (clobber (reg:CC CC_REGNUM))]
3365 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3366 [(set_attr "conds" "clob")
3367 (set_attr "length" "8")]
3370 (define_expand "lshrsi3"
3371 [(set (match_operand:SI 0 "s_register_operand" "")
3372 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3373 (match_operand:SI 2 "arm_rhs_operand" "")))]
3376 if (GET_CODE (operands[2]) == CONST_INT
3377 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3379 emit_insn (gen_movsi (operands[0], const0_rtx));
3385 (define_insn "*thumb1_lshrsi3"
3386 [(set (match_operand:SI 0 "register_operand" "=l,l")
3387 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3388 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3391 [(set_attr "length" "2")]
3394 (define_expand "rotlsi3"
3395 [(set (match_operand:SI 0 "s_register_operand" "")
3396 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3397 (match_operand:SI 2 "reg_or_int_operand" "")))]
3400 if (GET_CODE (operands[2]) == CONST_INT)
3401 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3404 rtx reg = gen_reg_rtx (SImode);
3405 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3411 (define_expand "rotrsi3"
3412 [(set (match_operand:SI 0 "s_register_operand" "")
3413 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3414 (match_operand:SI 2 "arm_rhs_operand" "")))]
3419 if (GET_CODE (operands[2]) == CONST_INT
3420 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3421 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3423 else /* TARGET_THUMB1 */
3425 if (GET_CODE (operands [2]) == CONST_INT)
3426 operands [2] = force_reg (SImode, operands[2]);
3431 (define_insn "*thumb1_rotrsi3"
3432 [(set (match_operand:SI 0 "register_operand" "=l")
3433 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3434 (match_operand:SI 2 "register_operand" "l")))]
3437 [(set_attr "length" "2")]
3440 (define_insn "*arm_shiftsi3"
3441 [(set (match_operand:SI 0 "s_register_operand" "=r")
3442 (match_operator:SI 3 "shift_operator"
3443 [(match_operand:SI 1 "s_register_operand" "r")
3444 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3446 "* return arm_output_shift(operands, 0);"
3447 [(set_attr "predicable" "yes")
3448 (set_attr "shift" "1")
3449 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3450 (const_string "alu_shift")
3451 (const_string "alu_shift_reg")))]
3454 (define_insn "*shiftsi3_compare0"
3455 [(set (reg:CC_NOOV CC_REGNUM)
3456 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3457 [(match_operand:SI 1 "s_register_operand" "r")
3458 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3460 (set (match_operand:SI 0 "s_register_operand" "=r")
3461 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3463 "* return arm_output_shift(operands, 1);"
3464 [(set_attr "conds" "set")
3465 (set_attr "shift" "1")
3466 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3467 (const_string "alu_shift")
3468 (const_string "alu_shift_reg")))]
3471 (define_insn "*shiftsi3_compare0_scratch"
3472 [(set (reg:CC_NOOV CC_REGNUM)
3473 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3474 [(match_operand:SI 1 "s_register_operand" "r")
3475 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3477 (clobber (match_scratch:SI 0 "=r"))]
3479 "* return arm_output_shift(operands, 1);"
3480 [(set_attr "conds" "set")
3481 (set_attr "shift" "1")]
3484 (define_insn "*arm_notsi_shiftsi"
3485 [(set (match_operand:SI 0 "s_register_operand" "=r")
3486 (not:SI (match_operator:SI 3 "shift_operator"
3487 [(match_operand:SI 1 "s_register_operand" "r")
3488 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3491 [(set_attr "predicable" "yes")
3492 (set_attr "shift" "1")
3493 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3494 (const_string "alu_shift")
3495 (const_string "alu_shift_reg")))]
3498 (define_insn "*arm_notsi_shiftsi_compare0"
3499 [(set (reg:CC_NOOV CC_REGNUM)
3500 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3501 [(match_operand:SI 1 "s_register_operand" "r")
3502 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3504 (set (match_operand:SI 0 "s_register_operand" "=r")
3505 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3508 [(set_attr "conds" "set")
3509 (set_attr "shift" "1")
3510 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3511 (const_string "alu_shift")
3512 (const_string "alu_shift_reg")))]
3515 (define_insn "*arm_not_shiftsi_compare0_scratch"
3516 [(set (reg:CC_NOOV CC_REGNUM)
3517 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3518 [(match_operand:SI 1 "s_register_operand" "r")
3519 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3521 (clobber (match_scratch:SI 0 "=r"))]
3524 [(set_attr "conds" "set")
3525 (set_attr "shift" "1")
3526 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3527 (const_string "alu_shift")
3528 (const_string "alu_shift_reg")))]
3531 ;; We don't really have extzv, but defining this using shifts helps
3532 ;; to reduce register pressure later on.
3534 (define_expand "extzv"
3536 (ashift:SI (match_operand:SI 1 "register_operand" "")
3537 (match_operand:SI 2 "const_int_operand" "")))
3538 (set (match_operand:SI 0 "register_operand" "")
3539 (lshiftrt:SI (match_dup 4)
3540 (match_operand:SI 3 "const_int_operand" "")))]
3541 "TARGET_THUMB1 || arm_arch_thumb2"
3544 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3545 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3547 if (arm_arch_thumb2)
3549 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3554 operands[3] = GEN_INT (rshift);
3558 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3562 operands[2] = GEN_INT (lshift);
3563 operands[4] = gen_reg_rtx (SImode);
3568 [(set (match_operand:SI 0 "s_register_operand" "=r")
3569 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3570 (match_operand:SI 2 "const_int_operand" "M")
3571 (match_operand:SI 3 "const_int_operand" "M")))]
3573 "sbfx%?\t%0, %1, %3, %2"
3574 [(set_attr "length" "4")
3575 (set_attr "predicable" "yes")]
3578 (define_insn "extzv_t2"
3579 [(set (match_operand:SI 0 "s_register_operand" "=r")
3580 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3581 (match_operand:SI 2 "const_int_operand" "M")
3582 (match_operand:SI 3 "const_int_operand" "M")))]
3584 "ubfx%?\t%0, %1, %3, %2"
3585 [(set_attr "length" "4")
3586 (set_attr "predicable" "yes")]
3590 ;; Unary arithmetic insns
3592 (define_expand "negdi2"
3594 [(set (match_operand:DI 0 "s_register_operand" "")
3595 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3596 (clobber (reg:CC CC_REGNUM))])]
3601 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3602 ;; The first alternative allows the common case of a *full* overlap.
3603 (define_insn "*arm_negdi2"
3604 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3605 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3606 (clobber (reg:CC CC_REGNUM))]
3608 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3609 [(set_attr "conds" "clob")
3610 (set_attr "length" "8")]
3613 (define_insn "*thumb1_negdi2"
3614 [(set (match_operand:DI 0 "register_operand" "=&l")
3615 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3616 (clobber (reg:CC CC_REGNUM))]
3618 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3619 [(set_attr "length" "6")]
3622 (define_expand "negsi2"
3623 [(set (match_operand:SI 0 "s_register_operand" "")
3624 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3629 (define_insn "*arm_negsi2"
3630 [(set (match_operand:SI 0 "s_register_operand" "=r")
3631 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3633 "rsb%?\\t%0, %1, #0"
3634 [(set_attr "predicable" "yes")]
3637 (define_insn "*thumb1_negsi2"
3638 [(set (match_operand:SI 0 "register_operand" "=l")
3639 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3642 [(set_attr "length" "2")]
3645 (define_expand "negsf2"
3646 [(set (match_operand:SF 0 "s_register_operand" "")
3647 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3648 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3652 (define_expand "negdf2"
3653 [(set (match_operand:DF 0 "s_register_operand" "")
3654 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3655 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3658 ;; abssi2 doesn't really clobber the condition codes if a different register
3659 ;; is being set. To keep things simple, assume during rtl manipulations that
3660 ;; it does, but tell the final scan operator the truth. Similarly for
3663 (define_expand "abssi2"
3665 [(set (match_operand:SI 0 "s_register_operand" "")
3666 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3667 (clobber (match_dup 2))])]
3671 operands[2] = gen_rtx_SCRATCH (SImode);
3673 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3676 (define_insn "*arm_abssi2"
3677 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3678 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3679 (clobber (reg:CC CC_REGNUM))]
3682 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3683 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3684 [(set_attr "conds" "clob,*")
3685 (set_attr "shift" "1")
3686 ;; predicable can't be set based on the variant, so left as no
3687 (set_attr "length" "8")]
3690 (define_insn_and_split "*thumb1_abssi2"
3691 [(set (match_operand:SI 0 "s_register_operand" "=l")
3692 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3693 (clobber (match_scratch:SI 2 "=&l"))]
3696 "TARGET_THUMB1 && reload_completed"
3697 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3698 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3699 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3701 [(set_attr "length" "6")]
3704 (define_insn "*arm_neg_abssi2"
3705 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3706 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3707 (clobber (reg:CC CC_REGNUM))]
3710 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3711 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3712 [(set_attr "conds" "clob,*")
3713 (set_attr "shift" "1")
3714 ;; predicable can't be set based on the variant, so left as no
3715 (set_attr "length" "8")]
3718 (define_insn_and_split "*thumb1_neg_abssi2"
3719 [(set (match_operand:SI 0 "s_register_operand" "=l")
3720 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3721 (clobber (match_scratch:SI 2 "=&l"))]
3724 "TARGET_THUMB1 && reload_completed"
3725 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3726 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3727 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3729 [(set_attr "length" "6")]
3732 (define_expand "abssf2"
3733 [(set (match_operand:SF 0 "s_register_operand" "")
3734 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3735 "TARGET_32BIT && TARGET_HARD_FLOAT"
3738 (define_expand "absdf2"
3739 [(set (match_operand:DF 0 "s_register_operand" "")
3740 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3741 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3744 (define_expand "sqrtsf2"
3745 [(set (match_operand:SF 0 "s_register_operand" "")
3746 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3747 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3750 (define_expand "sqrtdf2"
3751 [(set (match_operand:DF 0 "s_register_operand" "")
3752 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3753 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3756 (define_insn_and_split "one_cmpldi2"
3757 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3758 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3761 "TARGET_32BIT && reload_completed"
3762 [(set (match_dup 0) (not:SI (match_dup 1)))
3763 (set (match_dup 2) (not:SI (match_dup 3)))]
3766 operands[2] = gen_highpart (SImode, operands[0]);
3767 operands[0] = gen_lowpart (SImode, operands[0]);
3768 operands[3] = gen_highpart (SImode, operands[1]);
3769 operands[1] = gen_lowpart (SImode, operands[1]);
3771 [(set_attr "length" "8")
3772 (set_attr "predicable" "yes")]
3775 (define_expand "one_cmplsi2"
3776 [(set (match_operand:SI 0 "s_register_operand" "")
3777 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3782 (define_insn "*arm_one_cmplsi2"
3783 [(set (match_operand:SI 0 "s_register_operand" "=r")
3784 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3787 [(set_attr "predicable" "yes")]
3790 (define_insn "*thumb1_one_cmplsi2"
3791 [(set (match_operand:SI 0 "register_operand" "=l")
3792 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3795 [(set_attr "length" "2")]
3798 (define_insn "*notsi_compare0"
3799 [(set (reg:CC_NOOV CC_REGNUM)
3800 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3802 (set (match_operand:SI 0 "s_register_operand" "=r")
3803 (not:SI (match_dup 1)))]
3806 [(set_attr "conds" "set")]
3809 (define_insn "*notsi_compare0_scratch"
3810 [(set (reg:CC_NOOV CC_REGNUM)
3811 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3813 (clobber (match_scratch:SI 0 "=r"))]
3816 [(set_attr "conds" "set")]
3819 ;; Fixed <--> Floating conversion insns
3821 (define_expand "floatsihf2"
3822 [(set (match_operand:HF 0 "general_operand" "")
3823 (float:HF (match_operand:SI 1 "general_operand" "")))]
3827 rtx op1 = gen_reg_rtx (SFmode);
3828 expand_float (op1, operands[1], 0);
3829 op1 = convert_to_mode (HFmode, op1, 0);
3830 emit_move_insn (operands[0], op1);
3835 (define_expand "floatdihf2"
3836 [(set (match_operand:HF 0 "general_operand" "")
3837 (float:HF (match_operand:DI 1 "general_operand" "")))]
3841 rtx op1 = gen_reg_rtx (SFmode);
3842 expand_float (op1, operands[1], 0);
3843 op1 = convert_to_mode (HFmode, op1, 0);
3844 emit_move_insn (operands[0], op1);
3849 (define_expand "floatsisf2"
3850 [(set (match_operand:SF 0 "s_register_operand" "")
3851 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3852 "TARGET_32BIT && TARGET_HARD_FLOAT"
3854 if (TARGET_MAVERICK)
3856 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3861 (define_expand "floatsidf2"
3862 [(set (match_operand:DF 0 "s_register_operand" "")
3863 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3864 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3866 if (TARGET_MAVERICK)
3868 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3873 (define_expand "fix_trunchfsi2"
3874 [(set (match_operand:SI 0 "general_operand" "")
3875 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3879 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3880 expand_fix (operands[0], op1, 0);
3885 (define_expand "fix_trunchfdi2"
3886 [(set (match_operand:DI 0 "general_operand" "")
3887 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3891 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3892 expand_fix (operands[0], op1, 0);
3897 (define_expand "fix_truncsfsi2"
3898 [(set (match_operand:SI 0 "s_register_operand" "")
3899 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3900 "TARGET_32BIT && TARGET_HARD_FLOAT"
3902 if (TARGET_MAVERICK)
3904 if (!cirrus_fp_register (operands[0], SImode))
3905 operands[0] = force_reg (SImode, operands[0]);
3906 if (!cirrus_fp_register (operands[1], SFmode))
3907 operands[1] = force_reg (SFmode, operands[0]);
3908 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3913 (define_expand "fix_truncdfsi2"
3914 [(set (match_operand:SI 0 "s_register_operand" "")
3915 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3916 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3918 if (TARGET_MAVERICK)
3920 if (!cirrus_fp_register (operands[1], DFmode))
3921 operands[1] = force_reg (DFmode, operands[0]);
3922 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3929 (define_expand "truncdfsf2"
3930 [(set (match_operand:SF 0 "s_register_operand" "")
3932 (match_operand:DF 1 "s_register_operand" "")))]
3933 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3937 /* DFmode -> HFmode conversions have to go through SFmode. */
3938 (define_expand "truncdfhf2"
3939 [(set (match_operand:HF 0 "general_operand" "")
3941 (match_operand:DF 1 "general_operand" "")))]
3946 op1 = convert_to_mode (SFmode, operands[1], 0);
3947 op1 = convert_to_mode (HFmode, op1, 0);
3948 emit_move_insn (operands[0], op1);
3953 ;; Zero and sign extension instructions.
3955 (define_expand "zero_extendsidi2"
3956 [(set (match_operand:DI 0 "s_register_operand" "")
3957 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3962 (define_insn "*arm_zero_extendsidi2"
3963 [(set (match_operand:DI 0 "s_register_operand" "=r")
3964 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3967 if (REGNO (operands[1])
3968 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3969 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3970 return \"mov%?\\t%R0, #0\";
3972 [(set_attr "length" "8")
3973 (set_attr "predicable" "yes")]
3976 (define_expand "zero_extendqidi2"
3977 [(set (match_operand:DI 0 "s_register_operand" "")
3978 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3983 (define_insn "*arm_zero_extendqidi2"
3984 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3985 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3988 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3989 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3990 [(set_attr "length" "8")
3991 (set_attr "predicable" "yes")
3992 (set_attr "type" "*,load_byte")
3993 (set_attr "pool_range" "*,4092")
3994 (set_attr "neg_pool_range" "*,4084")]
3997 (define_expand "extendsidi2"
3998 [(set (match_operand:DI 0 "s_register_operand" "")
3999 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
4004 (define_insn "*arm_extendsidi2"
4005 [(set (match_operand:DI 0 "s_register_operand" "=r")
4006 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
4009 if (REGNO (operands[1])
4010 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
4011 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4012 return \"mov%?\\t%R0, %Q0, asr #31\";
4014 [(set_attr "length" "8")
4015 (set_attr "shift" "1")
4016 (set_attr "predicable" "yes")]
4019 (define_expand "zero_extendhisi2"
4020 [(set (match_operand:SI 0 "s_register_operand" "")
4021 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4024 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4026 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4029 if (!arm_arch6 && !MEM_P (operands[1]))
4031 rtx t = gen_lowpart (SImode, operands[1]);
4032 rtx tmp = gen_reg_rtx (SImode);
4033 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4034 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4040 [(set (match_operand:SI 0 "register_operand" "")
4041 (zero_extend:SI (match_operand:HI 1 "register_operand" "l,m")))]
4042 "!TARGET_THUMB2 && !arm_arch6"
4043 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4044 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4046 operands[2] = gen_lowpart (SImode, operands[1]);
4049 (define_insn "*thumb1_zero_extendhisi2"
4050 [(set (match_operand:SI 0 "register_operand" "=l,l")
4051 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4056 if (which_alternative == 0 && arm_arch6)
4057 return \"uxth\\t%0, %1\";
4058 if (which_alternative == 0)
4061 mem = XEXP (operands[1], 0);
4063 if (GET_CODE (mem) == CONST)
4064 mem = XEXP (mem, 0);
4066 if (GET_CODE (mem) == LABEL_REF)
4067 return \"ldr\\t%0, %1\";
4069 if (GET_CODE (mem) == PLUS)
4071 rtx a = XEXP (mem, 0);
4072 rtx b = XEXP (mem, 1);
4074 /* This can happen due to bugs in reload. */
4075 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4078 ops[0] = operands[0];
4081 output_asm_insn (\"mov %0, %1\", ops);
4083 XEXP (mem, 0) = operands[0];
4086 else if ( GET_CODE (a) == LABEL_REF
4087 && GET_CODE (b) == CONST_INT)
4088 return \"ldr\\t%0, %1\";
4091 return \"ldrh\\t%0, %1\";
4093 [(set_attr_alternative "length"
4094 [(if_then_else (eq_attr "is_arch6" "yes")
4095 (const_int 2) (const_int 4))
4097 (set_attr "type" "alu_shift,load_byte")
4098 (set_attr "pool_range" "*,60")]
4101 (define_insn "*arm_zero_extendhisi2"
4102 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4103 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4104 "TARGET_ARM && arm_arch4 && !arm_arch6"
4108 [(set_attr "type" "alu_shift,load_byte")
4109 (set_attr "predicable" "yes")
4110 (set_attr "pool_range" "*,256")
4111 (set_attr "neg_pool_range" "*,244")]
4114 (define_insn "*arm_zero_extendhisi2_v6"
4115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4116 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4117 "TARGET_ARM && arm_arch6"
4121 [(set_attr "type" "alu_shift,load_byte")
4122 (set_attr "predicable" "yes")
4123 (set_attr "pool_range" "*,256")
4124 (set_attr "neg_pool_range" "*,244")]
4127 (define_insn "*arm_zero_extendhisi2addsi"
4128 [(set (match_operand:SI 0 "s_register_operand" "=r")
4129 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4130 (match_operand:SI 2 "s_register_operand" "r")))]
4132 "uxtah%?\\t%0, %2, %1"
4133 [(set_attr "type" "alu_shift")
4134 (set_attr "predicable" "yes")]
4137 (define_expand "zero_extendqisi2"
4138 [(set (match_operand:SI 0 "s_register_operand" "")
4139 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4142 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4144 emit_insn (gen_andsi3 (operands[0],
4145 gen_lowpart (SImode, operands[1]),
4149 if (!arm_arch6 && !MEM_P (operands[1]))
4151 rtx t = gen_lowpart (SImode, operands[1]);
4152 rtx tmp = gen_reg_rtx (SImode);
4153 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4154 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4160 [(set (match_operand:SI 0 "register_operand" "")
4161 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4163 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4164 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4166 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4169 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4174 (define_insn "*thumb1_zero_extendqisi2"
4175 [(set (match_operand:SI 0 "register_operand" "=l,l")
4176 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4177 "TARGET_THUMB1 && !arm_arch6"
4181 [(set_attr "length" "4,2")
4182 (set_attr "type" "alu_shift,load_byte")
4183 (set_attr "pool_range" "*,32")]
4186 (define_insn "*thumb1_zero_extendqisi2_v6"
4187 [(set (match_operand:SI 0 "register_operand" "=l,l")
4188 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4189 "TARGET_THUMB1 && arm_arch6"
4193 [(set_attr "length" "2,2")
4194 (set_attr "type" "alu_shift,load_byte")
4195 (set_attr "pool_range" "*,32")]
4198 (define_insn "*arm_zero_extendqisi2"
4199 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4200 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4201 "TARGET_ARM && !arm_arch6"
4204 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4205 [(set_attr "length" "8,4")
4206 (set_attr "type" "alu_shift,load_byte")
4207 (set_attr "predicable" "yes")
4208 (set_attr "pool_range" "*,4096")
4209 (set_attr "neg_pool_range" "*,4084")]
4212 (define_insn "*arm_zero_extendqisi2_v6"
4213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4214 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4215 "TARGET_ARM && arm_arch6"
4218 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4219 [(set_attr "type" "alu_shift,load_byte")
4220 (set_attr "predicable" "yes")
4221 (set_attr "pool_range" "*,4096")
4222 (set_attr "neg_pool_range" "*,4084")]
4225 (define_insn "*arm_zero_extendqisi2addsi"
4226 [(set (match_operand:SI 0 "s_register_operand" "=r")
4227 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4228 (match_operand:SI 2 "s_register_operand" "r")))]
4230 "uxtab%?\\t%0, %2, %1"
4231 [(set_attr "predicable" "yes")
4232 (set_attr "insn" "xtab")
4233 (set_attr "type" "alu_shift")]
4237 [(set (match_operand:SI 0 "s_register_operand" "")
4238 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4239 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4240 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4241 [(set (match_dup 2) (match_dup 1))
4242 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4247 [(set (match_operand:SI 0 "s_register_operand" "")
4248 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4249 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4250 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4251 [(set (match_dup 2) (match_dup 1))
4252 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4256 (define_code_iterator ior_xor [ior xor])
4259 [(set (match_operand:SI 0 "s_register_operand" "")
4260 (ior_xor:SI (and:SI (ashift:SI
4261 (match_operand:SI 1 "s_register_operand" "")
4262 (match_operand:SI 2 "const_int_operand" ""))
4263 (match_operand:SI 3 "const_int_operand" ""))
4265 (match_operator 5 "subreg_lowpart_operator"
4266 [(match_operand:SI 4 "s_register_operand" "")]))))]
4268 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4269 == (GET_MODE_MASK (GET_MODE (operands[5]))
4270 & (GET_MODE_MASK (GET_MODE (operands[5]))
4271 << (INTVAL (operands[2])))))"
4272 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4274 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4275 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4278 (define_insn "*compareqi_eq0"
4279 [(set (reg:CC_Z CC_REGNUM)
4280 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4284 [(set_attr "conds" "set")]
4287 (define_expand "extendhisi2"
4288 [(set (match_operand:SI 0 "s_register_operand" "")
4289 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4294 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4297 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4299 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4303 if (!arm_arch6 && !MEM_P (operands[1]))
4305 rtx t = gen_lowpart (SImode, operands[1]);
4306 rtx tmp = gen_reg_rtx (SImode);
4307 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4308 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4315 [(set (match_operand:SI 0 "register_operand" "")
4316 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4317 (clobber (match_scratch:SI 2 ""))])]
4319 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4320 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4322 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4325 ;; We used to have an early-clobber on the scratch register here.
4326 ;; However, there's a bug somewhere in reload which means that this
4327 ;; can be partially ignored during spill allocation if the memory
4328 ;; address also needs reloading; this causes us to die later on when
4329 ;; we try to verify the operands. Fortunately, we don't really need
4330 ;; the early-clobber: we can always use operand 0 if operand 2
4331 ;; overlaps the address.
4332 (define_insn "thumb1_extendhisi2"
4333 [(set (match_operand:SI 0 "register_operand" "=l,l")
4334 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4335 (clobber (match_scratch:SI 2 "=X,l"))]
4342 if (which_alternative == 0 && !arm_arch6)
4344 if (which_alternative == 0)
4345 return \"sxth\\t%0, %1\";
4347 mem = XEXP (operands[1], 0);
4349 /* This code used to try to use 'V', and fix the address only if it was
4350 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4351 range of QImode offsets, and offsettable_address_p does a QImode
4354 if (GET_CODE (mem) == CONST)
4355 mem = XEXP (mem, 0);
4357 if (GET_CODE (mem) == LABEL_REF)
4358 return \"ldr\\t%0, %1\";
4360 if (GET_CODE (mem) == PLUS)
4362 rtx a = XEXP (mem, 0);
4363 rtx b = XEXP (mem, 1);
4365 if (GET_CODE (a) == LABEL_REF
4366 && GET_CODE (b) == CONST_INT)
4367 return \"ldr\\t%0, %1\";
4369 if (GET_CODE (b) == REG)
4370 return \"ldrsh\\t%0, %1\";
4378 ops[2] = const0_rtx;
4381 gcc_assert (GET_CODE (ops[1]) == REG);
4383 ops[0] = operands[0];
4384 if (reg_mentioned_p (operands[2], ops[1]))
4387 ops[3] = operands[2];
4388 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4391 [(set_attr_alternative "length"
4392 [(if_then_else (eq_attr "is_arch6" "yes")
4393 (const_int 2) (const_int 4))
4395 (set_attr "type" "alu_shift,load_byte")
4396 (set_attr "pool_range" "*,1020")]
4399 ;; This pattern will only be used when ldsh is not available
4400 (define_expand "extendhisi2_mem"
4401 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4403 (zero_extend:SI (match_dup 7)))
4404 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4405 (set (match_operand:SI 0 "" "")
4406 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4411 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4413 mem1 = change_address (operands[1], QImode, addr);
4414 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4415 operands[0] = gen_lowpart (SImode, operands[0]);
4417 operands[2] = gen_reg_rtx (SImode);
4418 operands[3] = gen_reg_rtx (SImode);
4419 operands[6] = gen_reg_rtx (SImode);
4422 if (BYTES_BIG_ENDIAN)
4424 operands[4] = operands[2];
4425 operands[5] = operands[3];
4429 operands[4] = operands[3];
4430 operands[5] = operands[2];
4436 [(set (match_operand:SI 0 "register_operand" "")
4437 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4439 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4440 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4442 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4445 (define_insn "*arm_extendhisi2"
4446 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4447 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4448 "TARGET_ARM && arm_arch4 && !arm_arch6"
4452 [(set_attr "length" "8,4")
4453 (set_attr "type" "alu_shift,load_byte")
4454 (set_attr "predicable" "yes")
4455 (set_attr "pool_range" "*,256")
4456 (set_attr "neg_pool_range" "*,244")]
4459 ;; ??? Check Thumb-2 pool range
4460 (define_insn "*arm_extendhisi2_v6"
4461 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4462 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4463 "TARGET_32BIT && arm_arch6"
4467 [(set_attr "type" "alu_shift,load_byte")
4468 (set_attr "predicable" "yes")
4469 (set_attr "pool_range" "*,256")
4470 (set_attr "neg_pool_range" "*,244")]
4473 (define_insn "*arm_extendhisi2addsi"
4474 [(set (match_operand:SI 0 "s_register_operand" "=r")
4475 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4476 (match_operand:SI 2 "s_register_operand" "r")))]
4478 "sxtah%?\\t%0, %2, %1"
4481 (define_expand "extendqihi2"
4483 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4485 (set (match_operand:HI 0 "s_register_operand" "")
4486 (ashiftrt:SI (match_dup 2)
4491 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4493 emit_insn (gen_rtx_SET (VOIDmode,
4495 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4498 if (!s_register_operand (operands[1], QImode))
4499 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4500 operands[0] = gen_lowpart (SImode, operands[0]);
4501 operands[1] = gen_lowpart (SImode, operands[1]);
4502 operands[2] = gen_reg_rtx (SImode);
4506 (define_insn "*arm_extendqihi_insn"
4507 [(set (match_operand:HI 0 "s_register_operand" "=r")
4508 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4509 "TARGET_ARM && arm_arch4"
4510 "ldr%(sb%)\\t%0, %1"
4511 [(set_attr "type" "load_byte")
4512 (set_attr "predicable" "yes")
4513 (set_attr "pool_range" "256")
4514 (set_attr "neg_pool_range" "244")]
4517 (define_expand "extendqisi2"
4518 [(set (match_operand:SI 0 "s_register_operand" "")
4519 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4522 if (!arm_arch4 && MEM_P (operands[1]))
4523 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4525 if (!arm_arch6 && !MEM_P (operands[1]))
4527 rtx t = gen_lowpart (SImode, operands[1]);
4528 rtx tmp = gen_reg_rtx (SImode);
4529 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4530 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4536 [(set (match_operand:SI 0 "register_operand" "")
4537 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4539 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4540 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4542 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4545 (define_insn "*arm_extendqisi"
4546 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4547 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4548 "TARGET_ARM && arm_arch4 && !arm_arch6"
4552 [(set_attr "length" "8,4")
4553 (set_attr "type" "alu_shift,load_byte")
4554 (set_attr "predicable" "yes")
4555 (set_attr "pool_range" "*,256")
4556 (set_attr "neg_pool_range" "*,244")]
4559 (define_insn "*arm_extendqisi_v6"
4560 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4562 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4563 "TARGET_ARM && arm_arch6"
4567 [(set_attr "type" "alu_shift,load_byte")
4568 (set_attr "predicable" "yes")
4569 (set_attr "pool_range" "*,256")
4570 (set_attr "neg_pool_range" "*,244")]
4573 (define_insn "*arm_extendqisi2addsi"
4574 [(set (match_operand:SI 0 "s_register_operand" "=r")
4575 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4576 (match_operand:SI 2 "s_register_operand" "r")))]
4578 "sxtab%?\\t%0, %2, %1"
4579 [(set_attr "type" "alu_shift")
4580 (set_attr "insn" "xtab")
4581 (set_attr "predicable" "yes")]
4585 [(set (match_operand:SI 0 "register_operand" "")
4586 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4587 "TARGET_THUMB1 && reload_completed"
4588 [(set (match_dup 0) (match_dup 2))
4589 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4591 rtx addr = XEXP (operands[1], 0);
4593 if (GET_CODE (addr) == CONST)
4594 addr = XEXP (addr, 0);
4596 if (GET_CODE (addr) == PLUS
4597 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4598 /* No split necessary. */
4601 if (GET_CODE (addr) == PLUS
4602 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4605 if (reg_overlap_mentioned_p (operands[0], addr))
4607 rtx t = gen_lowpart (QImode, operands[0]);
4608 emit_move_insn (t, operands[1]);
4609 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4615 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4616 operands[2] = const0_rtx;
4618 else if (GET_CODE (addr) != PLUS)
4620 else if (REG_P (XEXP (addr, 0)))
4622 operands[2] = XEXP (addr, 1);
4623 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4627 operands[2] = XEXP (addr, 0);
4628 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4631 operands[3] = change_address (operands[1], QImode, addr);
4634 (define_insn "thumb1_extendqisi2"
4635 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4636 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4641 if (which_alternative == 0 && arm_arch6)
4642 return "sxtb\\t%0, %1";
4643 if (which_alternative == 0)
4646 addr = XEXP (operands[1], 0);
4647 if (GET_CODE (addr) == PLUS
4648 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4649 return "ldrsb\\t%0, %1";
4653 [(set_attr_alternative "length"
4654 [(if_then_else (eq_attr "is_arch6" "yes")
4655 (const_int 2) (const_int 4))
4657 (if_then_else (eq_attr "is_arch6" "yes")
4658 (const_int 4) (const_int 6))])
4659 (set_attr "type" "alu_shift,load_byte,load_byte")]
4662 (define_expand "extendsfdf2"
4663 [(set (match_operand:DF 0 "s_register_operand" "")
4664 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4665 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4669 /* HFmode -> DFmode conversions have to go through SFmode. */
4670 (define_expand "extendhfdf2"
4671 [(set (match_operand:DF 0 "general_operand" "")
4672 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4677 op1 = convert_to_mode (SFmode, operands[1], 0);
4678 op1 = convert_to_mode (DFmode, op1, 0);
4679 emit_insn (gen_movdf (operands[0], op1));
4684 ;; Move insns (including loads and stores)
4686 ;; XXX Just some ideas about movti.
4687 ;; I don't think these are a good idea on the arm, there just aren't enough
4689 ;;(define_expand "loadti"
4690 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4691 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4694 ;;(define_expand "storeti"
4695 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4696 ;; (match_operand:TI 1 "s_register_operand" ""))]
4699 ;;(define_expand "movti"
4700 ;; [(set (match_operand:TI 0 "general_operand" "")
4701 ;; (match_operand:TI 1 "general_operand" ""))]
4707 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4708 ;; operands[1] = copy_to_reg (operands[1]);
4709 ;; if (GET_CODE (operands[0]) == MEM)
4710 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4711 ;; else if (GET_CODE (operands[1]) == MEM)
4712 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4716 ;; emit_insn (insn);
4720 ;; Recognize garbage generated above.
4723 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4724 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4728 ;; register mem = (which_alternative < 3);
4729 ;; register const char *template;
4731 ;; operands[mem] = XEXP (operands[mem], 0);
4732 ;; switch (which_alternative)
4734 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4735 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4736 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4737 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4738 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4739 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4741 ;; output_asm_insn (template, operands);
4745 (define_expand "movdi"
4746 [(set (match_operand:DI 0 "general_operand" "")
4747 (match_operand:DI 1 "general_operand" ""))]
4750 if (can_create_pseudo_p ())
4752 if (GET_CODE (operands[0]) != REG)
4753 operands[1] = force_reg (DImode, operands[1]);
4758 (define_insn "*arm_movdi"
4759 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4760 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4762 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4764 && ( register_operand (operands[0], DImode)
4765 || register_operand (operands[1], DImode))"
4767 switch (which_alternative)
4774 return output_move_double (operands);
4777 [(set_attr "length" "8,12,16,8,8")
4778 (set_attr "type" "*,*,*,load2,store2")
4779 (set_attr "pool_range" "*,*,*,1020,*")
4780 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4784 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4785 (match_operand:ANY64 1 "const_double_operand" ""))]
4788 && (arm_const_double_inline_cost (operands[1])
4789 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4792 arm_split_constant (SET, SImode, curr_insn,
4793 INTVAL (gen_lowpart (SImode, operands[1])),
4794 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4795 arm_split_constant (SET, SImode, curr_insn,
4796 INTVAL (gen_highpart_mode (SImode,
4797 GET_MODE (operands[0]),
4799 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4804 ; If optimizing for size, or if we have load delay slots, then
4805 ; we want to split the constant into two separate operations.
4806 ; In both cases this may split a trivial part into a single data op
4807 ; leaving a single complex constant to load. We can also get longer
4808 ; offsets in a LDR which means we get better chances of sharing the pool
4809 ; entries. Finally, we can normally do a better job of scheduling
4810 ; LDR instructions than we can with LDM.
4811 ; This pattern will only match if the one above did not.
4813 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4814 (match_operand:ANY64 1 "const_double_operand" ""))]
4815 "TARGET_ARM && reload_completed
4816 && arm_const_double_by_parts (operands[1])"
4817 [(set (match_dup 0) (match_dup 1))
4818 (set (match_dup 2) (match_dup 3))]
4820 operands[2] = gen_highpart (SImode, operands[0]);
4821 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4823 operands[0] = gen_lowpart (SImode, operands[0]);
4824 operands[1] = gen_lowpart (SImode, operands[1]);
4829 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4830 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4831 "TARGET_EITHER && reload_completed"
4832 [(set (match_dup 0) (match_dup 1))
4833 (set (match_dup 2) (match_dup 3))]
4835 operands[2] = gen_highpart (SImode, operands[0]);
4836 operands[3] = gen_highpart (SImode, operands[1]);
4837 operands[0] = gen_lowpart (SImode, operands[0]);
4838 operands[1] = gen_lowpart (SImode, operands[1]);
4840 /* Handle a partial overlap. */
4841 if (rtx_equal_p (operands[0], operands[3]))
4843 rtx tmp0 = operands[0];
4844 rtx tmp1 = operands[1];
4846 operands[0] = operands[2];
4847 operands[1] = operands[3];
4854 ;; We can't actually do base+index doubleword loads if the index and
4855 ;; destination overlap. Split here so that we at least have chance to
4858 [(set (match_operand:DI 0 "s_register_operand" "")
4859 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4860 (match_operand:SI 2 "s_register_operand" ""))))]
4862 && reg_overlap_mentioned_p (operands[0], operands[1])
4863 && reg_overlap_mentioned_p (operands[0], operands[2])"
4865 (plus:SI (match_dup 1)
4868 (mem:DI (match_dup 4)))]
4870 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4874 ;;; ??? This should have alternatives for constants.
4875 ;;; ??? This was originally identical to the movdf_insn pattern.
4876 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4877 ;;; thumb_reorg with a memory reference.
4878 (define_insn "*thumb1_movdi_insn"
4879 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4880 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4882 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4883 && ( register_operand (operands[0], DImode)
4884 || register_operand (operands[1], DImode))"
4887 switch (which_alternative)
4891 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4892 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4893 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4895 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4897 operands[1] = GEN_INT (- INTVAL (operands[1]));
4898 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4900 return \"ldmia\\t%1, {%0, %H0}\";
4902 return \"stmia\\t%0, {%1, %H1}\";
4904 return thumb_load_double_from_address (operands);
4906 operands[2] = gen_rtx_MEM (SImode,
4907 plus_constant (XEXP (operands[0], 0), 4));
4908 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4911 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4912 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4913 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4916 [(set_attr "length" "4,4,6,2,2,6,4,4")
4917 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4918 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4921 (define_expand "movsi"
4922 [(set (match_operand:SI 0 "general_operand" "")
4923 (match_operand:SI 1 "general_operand" ""))]
4927 rtx base, offset, tmp;
4931 /* Everything except mem = const or mem = mem can be done easily. */
4932 if (GET_CODE (operands[0]) == MEM)
4933 operands[1] = force_reg (SImode, operands[1]);
4934 if (arm_general_register_operand (operands[0], SImode)
4935 && GET_CODE (operands[1]) == CONST_INT
4936 && !(const_ok_for_arm (INTVAL (operands[1]))
4937 || const_ok_for_arm (~INTVAL (operands[1]))))
4939 arm_split_constant (SET, SImode, NULL_RTX,
4940 INTVAL (operands[1]), operands[0], NULL_RTX,
4941 optimize && can_create_pseudo_p ());
4945 if (TARGET_USE_MOVT && !target_word_relocations
4946 && GET_CODE (operands[1]) == SYMBOL_REF
4947 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4949 arm_emit_movpair (operands[0], operands[1]);
4953 else /* TARGET_THUMB1... */
4955 if (can_create_pseudo_p ())
4957 if (GET_CODE (operands[0]) != REG)
4958 operands[1] = force_reg (SImode, operands[1]);
4962 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4964 split_const (operands[1], &base, &offset);
4965 if (GET_CODE (base) == SYMBOL_REF
4966 && !offset_within_block_p (base, INTVAL (offset)))
4968 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4969 emit_move_insn (tmp, base);
4970 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4975 /* Recognize the case where operand[1] is a reference to thread-local
4976 data and load its address to a register. */
4977 if (arm_tls_referenced_p (operands[1]))
4979 rtx tmp = operands[1];
4982 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4984 addend = XEXP (XEXP (tmp, 0), 1);
4985 tmp = XEXP (XEXP (tmp, 0), 0);
4988 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4989 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4991 tmp = legitimize_tls_address (tmp,
4992 !can_create_pseudo_p () ? operands[0] : 0);
4995 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4996 tmp = force_operand (tmp, operands[0]);
5001 && (CONSTANT_P (operands[1])
5002 || symbol_mentioned_p (operands[1])
5003 || label_mentioned_p (operands[1])))
5004 operands[1] = legitimize_pic_address (operands[1], SImode,
5005 (!can_create_pseudo_p ()
5012 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5013 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5014 ;; so this does not matter.
5015 (define_insn "*arm_movt"
5016 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5017 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5018 (match_operand:SI 2 "general_operand" "i")))]
5020 "movt%?\t%0, #:upper16:%c2"
5021 [(set_attr "predicable" "yes")
5022 (set_attr "length" "4")]
5025 (define_insn "*arm_movsi_insn"
5026 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5027 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5028 "TARGET_ARM && ! TARGET_IWMMXT
5029 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5030 && ( register_operand (operands[0], SImode)
5031 || register_operand (operands[1], SImode))"
5039 [(set_attr "type" "*,*,*,*,load1,store1")
5040 (set_attr "predicable" "yes")
5041 (set_attr "pool_range" "*,*,*,*,4096,*")
5042 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5046 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5047 (match_operand:SI 1 "const_int_operand" ""))]
5049 && (!(const_ok_for_arm (INTVAL (operands[1]))
5050 || const_ok_for_arm (~INTVAL (operands[1]))))"
5051 [(clobber (const_int 0))]
5053 arm_split_constant (SET, SImode, NULL_RTX,
5054 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5059 (define_insn "*thumb1_movsi_insn"
5060 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5061 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5063 && ( register_operand (operands[0], SImode)
5064 || register_operand (operands[1], SImode))"
5075 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5076 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5077 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5081 [(set (match_operand:SI 0 "register_operand" "")
5082 (match_operand:SI 1 "const_int_operand" ""))]
5083 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5084 [(set (match_dup 0) (match_dup 1))
5085 (set (match_dup 0) (neg:SI (match_dup 0)))]
5086 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5090 [(set (match_operand:SI 0 "register_operand" "")
5091 (match_operand:SI 1 "const_int_operand" ""))]
5092 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5093 [(set (match_dup 0) (match_dup 1))
5094 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5097 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5098 unsigned HOST_WIDE_INT mask = 0xff;
5101 for (i = 0; i < 25; i++)
5102 if ((val & (mask << i)) == val)
5105 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5109 operands[1] = GEN_INT (val >> i);
5110 operands[2] = GEN_INT (i);
5114 ;; When generating pic, we need to load the symbol offset into a register.
5115 ;; So that the optimizer does not confuse this with a normal symbol load
5116 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5117 ;; since that is the only type of relocation we can use.
5119 ;; The rather odd constraints on the following are to force reload to leave
5120 ;; the insn alone, and to force the minipool generation pass to then move
5121 ;; the GOT symbol to memory.
5123 (define_insn "pic_load_addr_32bit"
5124 [(set (match_operand:SI 0 "s_register_operand" "=r")
5125 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5126 "TARGET_32BIT && flag_pic"
5128 [(set_attr "type" "load1")
5129 (set_attr "pool_range" "4096")
5130 (set (attr "neg_pool_range")
5131 (if_then_else (eq_attr "is_thumb" "no")
5136 (define_insn "pic_load_addr_thumb1"
5137 [(set (match_operand:SI 0 "s_register_operand" "=l")
5138 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5139 "TARGET_THUMB1 && flag_pic"
5141 [(set_attr "type" "load1")
5142 (set (attr "pool_range") (const_int 1024))]
5145 (define_insn "pic_add_dot_plus_four"
5146 [(set (match_operand:SI 0 "register_operand" "=r")
5147 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5149 (match_operand 2 "" "")]
5153 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5154 INTVAL (operands[2]));
5155 return \"add\\t%0, %|pc\";
5157 [(set_attr "length" "2")]
5160 (define_insn "pic_add_dot_plus_eight"
5161 [(set (match_operand:SI 0 "register_operand" "=r")
5162 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5164 (match_operand 2 "" "")]
5168 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5169 INTVAL (operands[2]));
5170 return \"add%?\\t%0, %|pc, %1\";
5172 [(set_attr "predicable" "yes")]
5175 (define_insn "tls_load_dot_plus_eight"
5176 [(set (match_operand:SI 0 "register_operand" "=r")
5177 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5179 (match_operand 2 "" "")]
5183 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5184 INTVAL (operands[2]));
5185 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5187 [(set_attr "predicable" "yes")]
5190 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5191 ;; followed by a load. These sequences can be crunched down to
5192 ;; tls_load_dot_plus_eight by a peephole.
5195 [(set (match_operand:SI 0 "register_operand" "")
5196 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5198 (match_operand 1 "" "")]
5200 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5201 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5203 (mem:SI (unspec:SI [(match_dup 3)
5210 (define_insn "pic_offset_arm"
5211 [(set (match_operand:SI 0 "register_operand" "=r")
5212 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5213 (unspec:SI [(match_operand:SI 2 "" "X")]
5214 UNSPEC_PIC_OFFSET))))]
5215 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5216 "ldr%?\\t%0, [%1,%2]"
5217 [(set_attr "type" "load1")]
5220 (define_expand "builtin_setjmp_receiver"
5221 [(label_ref (match_operand 0 "" ""))]
5225 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5227 if (arm_pic_register != INVALID_REGNUM)
5228 arm_load_pic_register (1UL << 3);
5232 ;; If copying one reg to another we can set the condition codes according to
5233 ;; its value. Such a move is common after a return from subroutine and the
5234 ;; result is being tested against zero.
5236 (define_insn "*movsi_compare0"
5237 [(set (reg:CC CC_REGNUM)
5238 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5240 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5246 [(set_attr "conds" "set")]
5249 ;; Subroutine to store a half word from a register into memory.
5250 ;; Operand 0 is the source register (HImode)
5251 ;; Operand 1 is the destination address in a register (SImode)
5253 ;; In both this routine and the next, we must be careful not to spill
5254 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5255 ;; can generate unrecognizable rtl.
5257 (define_expand "storehi"
5258 [;; store the low byte
5259 (set (match_operand 1 "" "") (match_dup 3))
5260 ;; extract the high byte
5262 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5263 ;; store the high byte
5264 (set (match_dup 4) (match_dup 5))]
5268 rtx op1 = operands[1];
5269 rtx addr = XEXP (op1, 0);
5270 enum rtx_code code = GET_CODE (addr);
5272 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5274 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5276 operands[4] = adjust_address (op1, QImode, 1);
5277 operands[1] = adjust_address (operands[1], QImode, 0);
5278 operands[3] = gen_lowpart (QImode, operands[0]);
5279 operands[0] = gen_lowpart (SImode, operands[0]);
5280 operands[2] = gen_reg_rtx (SImode);
5281 operands[5] = gen_lowpart (QImode, operands[2]);
5285 (define_expand "storehi_bigend"
5286 [(set (match_dup 4) (match_dup 3))
5288 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5289 (set (match_operand 1 "" "") (match_dup 5))]
5293 rtx op1 = operands[1];
5294 rtx addr = XEXP (op1, 0);
5295 enum rtx_code code = GET_CODE (addr);
5297 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5299 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5301 operands[4] = adjust_address (op1, QImode, 1);
5302 operands[1] = adjust_address (operands[1], QImode, 0);
5303 operands[3] = gen_lowpart (QImode, operands[0]);
5304 operands[0] = gen_lowpart (SImode, operands[0]);
5305 operands[2] = gen_reg_rtx (SImode);
5306 operands[5] = gen_lowpart (QImode, operands[2]);
5310 ;; Subroutine to store a half word integer constant into memory.
5311 (define_expand "storeinthi"
5312 [(set (match_operand 0 "" "")
5313 (match_operand 1 "" ""))
5314 (set (match_dup 3) (match_dup 2))]
5318 HOST_WIDE_INT value = INTVAL (operands[1]);
5319 rtx addr = XEXP (operands[0], 0);
5320 rtx op0 = operands[0];
5321 enum rtx_code code = GET_CODE (addr);
5323 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5325 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5327 operands[1] = gen_reg_rtx (SImode);
5328 if (BYTES_BIG_ENDIAN)
5330 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5331 if ((value & 255) == ((value >> 8) & 255))
5332 operands[2] = operands[1];
5335 operands[2] = gen_reg_rtx (SImode);
5336 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5341 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5342 if ((value & 255) == ((value >> 8) & 255))
5343 operands[2] = operands[1];
5346 operands[2] = gen_reg_rtx (SImode);
5347 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5351 operands[3] = adjust_address (op0, QImode, 1);
5352 operands[0] = adjust_address (operands[0], QImode, 0);
5353 operands[2] = gen_lowpart (QImode, operands[2]);
5354 operands[1] = gen_lowpart (QImode, operands[1]);
5358 (define_expand "storehi_single_op"
5359 [(set (match_operand:HI 0 "memory_operand" "")
5360 (match_operand:HI 1 "general_operand" ""))]
5361 "TARGET_32BIT && arm_arch4"
5363 if (!s_register_operand (operands[1], HImode))
5364 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5368 (define_expand "movhi"
5369 [(set (match_operand:HI 0 "general_operand" "")
5370 (match_operand:HI 1 "general_operand" ""))]
5375 if (can_create_pseudo_p ())
5377 if (GET_CODE (operands[0]) == MEM)
5381 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5384 if (GET_CODE (operands[1]) == CONST_INT)
5385 emit_insn (gen_storeinthi (operands[0], operands[1]));
5388 if (GET_CODE (operands[1]) == MEM)
5389 operands[1] = force_reg (HImode, operands[1]);
5390 if (BYTES_BIG_ENDIAN)
5391 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5393 emit_insn (gen_storehi (operands[1], operands[0]));
5397 /* Sign extend a constant, and keep it in an SImode reg. */
5398 else if (GET_CODE (operands[1]) == CONST_INT)
5400 rtx reg = gen_reg_rtx (SImode);
5401 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5403 /* If the constant is already valid, leave it alone. */
5404 if (!const_ok_for_arm (val))
5406 /* If setting all the top bits will make the constant
5407 loadable in a single instruction, then set them.
5408 Otherwise, sign extend the number. */
5410 if (const_ok_for_arm (~(val | ~0xffff)))
5412 else if (val & 0x8000)
5416 emit_insn (gen_movsi (reg, GEN_INT (val)));
5417 operands[1] = gen_lowpart (HImode, reg);
5419 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5420 && GET_CODE (operands[1]) == MEM)
5422 rtx reg = gen_reg_rtx (SImode);
5424 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5425 operands[1] = gen_lowpart (HImode, reg);
5427 else if (!arm_arch4)
5429 if (GET_CODE (operands[1]) == MEM)
5432 rtx offset = const0_rtx;
5433 rtx reg = gen_reg_rtx (SImode);
5435 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5436 || (GET_CODE (base) == PLUS
5437 && (GET_CODE (offset = XEXP (base, 1))
5439 && ((INTVAL(offset) & 1) != 1)
5440 && GET_CODE (base = XEXP (base, 0)) == REG))
5441 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5445 new_rtx = widen_memory_access (operands[1], SImode,
5446 ((INTVAL (offset) & ~3)
5447 - INTVAL (offset)));
5448 emit_insn (gen_movsi (reg, new_rtx));
5449 if (((INTVAL (offset) & 2) != 0)
5450 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5452 rtx reg2 = gen_reg_rtx (SImode);
5454 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5459 emit_insn (gen_movhi_bytes (reg, operands[1]));
5461 operands[1] = gen_lowpart (HImode, reg);
5465 /* Handle loading a large integer during reload. */
5466 else if (GET_CODE (operands[1]) == CONST_INT
5467 && !const_ok_for_arm (INTVAL (operands[1]))
5468 && !const_ok_for_arm (~INTVAL (operands[1])))
5470 /* Writing a constant to memory needs a scratch, which should
5471 be handled with SECONDARY_RELOADs. */
5472 gcc_assert (GET_CODE (operands[0]) == REG);
5474 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5475 emit_insn (gen_movsi (operands[0], operands[1]));
5479 else if (TARGET_THUMB2)
5481 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5482 if (can_create_pseudo_p ())
5484 if (GET_CODE (operands[0]) != REG)
5485 operands[1] = force_reg (HImode, operands[1]);
5486 /* Zero extend a constant, and keep it in an SImode reg. */
5487 else if (GET_CODE (operands[1]) == CONST_INT)
5489 rtx reg = gen_reg_rtx (SImode);
5490 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5492 emit_insn (gen_movsi (reg, GEN_INT (val)));
5493 operands[1] = gen_lowpart (HImode, reg);
5497 else /* TARGET_THUMB1 */
5499 if (can_create_pseudo_p ())
5501 if (GET_CODE (operands[1]) == CONST_INT)
5503 rtx reg = gen_reg_rtx (SImode);
5505 emit_insn (gen_movsi (reg, operands[1]));
5506 operands[1] = gen_lowpart (HImode, reg);
5509 /* ??? We shouldn't really get invalid addresses here, but this can
5510 happen if we are passed a SP (never OK for HImode/QImode) or
5511 virtual register (also rejected as illegitimate for HImode/QImode)
5512 relative address. */
5513 /* ??? This should perhaps be fixed elsewhere, for instance, in
5514 fixup_stack_1, by checking for other kinds of invalid addresses,
5515 e.g. a bare reference to a virtual register. This may confuse the
5516 alpha though, which must handle this case differently. */
5517 if (GET_CODE (operands[0]) == MEM
5518 && !memory_address_p (GET_MODE (operands[0]),
5519 XEXP (operands[0], 0)))
5521 = replace_equiv_address (operands[0],
5522 copy_to_reg (XEXP (operands[0], 0)));
5524 if (GET_CODE (operands[1]) == MEM
5525 && !memory_address_p (GET_MODE (operands[1]),
5526 XEXP (operands[1], 0)))
5528 = replace_equiv_address (operands[1],
5529 copy_to_reg (XEXP (operands[1], 0)));
5531 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5533 rtx reg = gen_reg_rtx (SImode);
5535 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5536 operands[1] = gen_lowpart (HImode, reg);
5539 if (GET_CODE (operands[0]) == MEM)
5540 operands[1] = force_reg (HImode, operands[1]);
5542 else if (GET_CODE (operands[1]) == CONST_INT
5543 && !satisfies_constraint_I (operands[1]))
5545 /* Handle loading a large integer during reload. */
5547 /* Writing a constant to memory needs a scratch, which should
5548 be handled with SECONDARY_RELOADs. */
5549 gcc_assert (GET_CODE (operands[0]) == REG);
5551 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5552 emit_insn (gen_movsi (operands[0], operands[1]));
5559 (define_insn "*thumb1_movhi_insn"
5560 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5561 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5563 && ( register_operand (operands[0], HImode)
5564 || register_operand (operands[1], HImode))"
5566 switch (which_alternative)
5568 case 0: return \"add %0, %1, #0\";
5569 case 2: return \"strh %1, %0\";
5570 case 3: return \"mov %0, %1\";
5571 case 4: return \"mov %0, %1\";
5572 case 5: return \"mov %0, %1\";
5573 default: gcc_unreachable ();
5575 /* The stack pointer can end up being taken as an index register.
5576 Catch this case here and deal with it. */
5577 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5578 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5579 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5582 ops[0] = operands[0];
5583 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5585 output_asm_insn (\"mov %0, %1\", ops);
5587 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5590 return \"ldrh %0, %1\";
5592 [(set_attr "length" "2,4,2,2,2,2")
5593 (set_attr "type" "*,load1,store1,*,*,*")]
5597 (define_expand "movhi_bytes"
5598 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5600 (zero_extend:SI (match_dup 6)))
5601 (set (match_operand:SI 0 "" "")
5602 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5607 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5609 mem1 = change_address (operands[1], QImode, addr);
5610 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5611 operands[0] = gen_lowpart (SImode, operands[0]);
5613 operands[2] = gen_reg_rtx (SImode);
5614 operands[3] = gen_reg_rtx (SImode);
5617 if (BYTES_BIG_ENDIAN)
5619 operands[4] = operands[2];
5620 operands[5] = operands[3];
5624 operands[4] = operands[3];
5625 operands[5] = operands[2];
5630 (define_expand "movhi_bigend"
5632 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5635 (ashiftrt:SI (match_dup 2) (const_int 16)))
5636 (set (match_operand:HI 0 "s_register_operand" "")
5640 operands[2] = gen_reg_rtx (SImode);
5641 operands[3] = gen_reg_rtx (SImode);
5642 operands[4] = gen_lowpart (HImode, operands[3]);
5646 ;; Pattern to recognize insn generated default case above
5647 (define_insn "*movhi_insn_arch4"
5648 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5649 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5652 && (GET_CODE (operands[1]) != CONST_INT
5653 || const_ok_for_arm (INTVAL (operands[1]))
5654 || const_ok_for_arm (~INTVAL (operands[1])))"
5656 mov%?\\t%0, %1\\t%@ movhi
5657 mvn%?\\t%0, #%B1\\t%@ movhi
5658 str%(h%)\\t%1, %0\\t%@ movhi
5659 ldr%(h%)\\t%0, %1\\t%@ movhi"
5660 [(set_attr "type" "*,*,store1,load1")
5661 (set_attr "predicable" "yes")
5662 (set_attr "pool_range" "*,*,*,256")
5663 (set_attr "neg_pool_range" "*,*,*,244")]
5666 (define_insn "*movhi_bytes"
5667 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5668 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5671 mov%?\\t%0, %1\\t%@ movhi
5672 mvn%?\\t%0, #%B1\\t%@ movhi"
5673 [(set_attr "predicable" "yes")]
5676 (define_expand "thumb_movhi_clobber"
5677 [(set (match_operand:HI 0 "memory_operand" "")
5678 (match_operand:HI 1 "register_operand" ""))
5679 (clobber (match_operand:DI 2 "register_operand" ""))]
5682 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5683 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5685 emit_insn (gen_movhi (operands[0], operands[1]));
5688 /* XXX Fixme, need to handle other cases here as well. */
5693 ;; We use a DImode scratch because we may occasionally need an additional
5694 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5695 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5696 (define_expand "reload_outhi"
5697 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5698 (match_operand:HI 1 "s_register_operand" "r")
5699 (match_operand:DI 2 "s_register_operand" "=&l")])]
5702 arm_reload_out_hi (operands);
5704 thumb_reload_out_hi (operands);
5709 (define_expand "reload_inhi"
5710 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5711 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5712 (match_operand:DI 2 "s_register_operand" "=&r")])]
5716 arm_reload_in_hi (operands);
5718 thumb_reload_out_hi (operands);
5722 (define_expand "movqi"
5723 [(set (match_operand:QI 0 "general_operand" "")
5724 (match_operand:QI 1 "general_operand" ""))]
5727 /* Everything except mem = const or mem = mem can be done easily */
5729 if (can_create_pseudo_p ())
5731 if (GET_CODE (operands[1]) == CONST_INT)
5733 rtx reg = gen_reg_rtx (SImode);
5735 /* For thumb we want an unsigned immediate, then we are more likely
5736 to be able to use a movs insn. */
5738 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5740 emit_insn (gen_movsi (reg, operands[1]));
5741 operands[1] = gen_lowpart (QImode, reg);
5746 /* ??? We shouldn't really get invalid addresses here, but this can
5747 happen if we are passed a SP (never OK for HImode/QImode) or
5748 virtual register (also rejected as illegitimate for HImode/QImode)
5749 relative address. */
5750 /* ??? This should perhaps be fixed elsewhere, for instance, in
5751 fixup_stack_1, by checking for other kinds of invalid addresses,
5752 e.g. a bare reference to a virtual register. This may confuse the
5753 alpha though, which must handle this case differently. */
5754 if (GET_CODE (operands[0]) == MEM
5755 && !memory_address_p (GET_MODE (operands[0]),
5756 XEXP (operands[0], 0)))
5758 = replace_equiv_address (operands[0],
5759 copy_to_reg (XEXP (operands[0], 0)));
5760 if (GET_CODE (operands[1]) == MEM
5761 && !memory_address_p (GET_MODE (operands[1]),
5762 XEXP (operands[1], 0)))
5764 = replace_equiv_address (operands[1],
5765 copy_to_reg (XEXP (operands[1], 0)));
5768 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5770 rtx reg = gen_reg_rtx (SImode);
5772 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5773 operands[1] = gen_lowpart (QImode, reg);
5776 if (GET_CODE (operands[0]) == MEM)
5777 operands[1] = force_reg (QImode, operands[1]);
5779 else if (TARGET_THUMB
5780 && GET_CODE (operands[1]) == CONST_INT
5781 && !satisfies_constraint_I (operands[1]))
5783 /* Handle loading a large integer during reload. */
5785 /* Writing a constant to memory needs a scratch, which should
5786 be handled with SECONDARY_RELOADs. */
5787 gcc_assert (GET_CODE (operands[0]) == REG);
5789 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5790 emit_insn (gen_movsi (operands[0], operands[1]));
5797 (define_insn "*arm_movqi_insn"
5798 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5799 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5801 && ( register_operand (operands[0], QImode)
5802 || register_operand (operands[1], QImode))"
5808 [(set_attr "type" "*,*,load1,store1")
5809 (set_attr "predicable" "yes")]
5812 (define_insn "*thumb1_movqi_insn"
5813 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5814 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5816 && ( register_operand (operands[0], QImode)
5817 || register_operand (operands[1], QImode))"
5825 [(set_attr "length" "2")
5826 (set_attr "type" "*,load1,store1,*,*,*")
5827 (set_attr "pool_range" "*,32,*,*,*,*")]
5831 (define_expand "movhf"
5832 [(set (match_operand:HF 0 "general_operand" "")
5833 (match_operand:HF 1 "general_operand" ""))]
5838 if (GET_CODE (operands[0]) == MEM)
5839 operands[1] = force_reg (HFmode, operands[1]);
5841 else /* TARGET_THUMB1 */
5843 if (can_create_pseudo_p ())
5845 if (GET_CODE (operands[0]) != REG)
5846 operands[1] = force_reg (HFmode, operands[1]);
5852 (define_insn "*arm32_movhf"
5853 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5854 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5855 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5856 && ( s_register_operand (operands[0], HFmode)
5857 || s_register_operand (operands[1], HFmode))"
5859 switch (which_alternative)
5861 case 0: /* ARM register from memory */
5862 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5863 case 1: /* memory from ARM register */
5864 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5865 case 2: /* ARM register from ARM register */
5866 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5867 case 3: /* ARM register from constant */
5873 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5874 bits = real_to_target (NULL, &r, HFmode);
5875 ops[0] = operands[0];
5876 ops[1] = GEN_INT (bits);
5877 ops[2] = GEN_INT (bits & 0xff00);
5878 ops[3] = GEN_INT (bits & 0x00ff);
5880 if (arm_arch_thumb2)
5881 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5883 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5890 [(set_attr "conds" "unconditional")
5891 (set_attr "type" "load1,store1,*,*")
5892 (set_attr "length" "4,4,4,8")
5893 (set_attr "predicable" "yes")
5897 (define_insn "*thumb1_movhf"
5898 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
5899 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
5901 && ( s_register_operand (operands[0], HFmode)
5902 || s_register_operand (operands[1], HFmode))"
5904 switch (which_alternative)
5909 gcc_assert (GET_CODE(operands[1]) == MEM);
5910 addr = XEXP (operands[1], 0);
5911 if (GET_CODE (addr) == LABEL_REF
5912 || (GET_CODE (addr) == CONST
5913 && GET_CODE (XEXP (addr, 0)) == PLUS
5914 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
5915 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
5917 /* Constant pool entry. */
5918 return \"ldr\\t%0, %1\";
5920 return \"ldrh\\t%0, %1\";
5922 case 2: return \"strh\\t%1, %0\";
5923 default: return \"mov\\t%0, %1\";
5926 [(set_attr "length" "2")
5927 (set_attr "type" "*,load1,store1,*,*")
5928 (set_attr "pool_range" "*,1020,*,*,*")]
5931 (define_expand "movsf"
5932 [(set (match_operand:SF 0 "general_operand" "")
5933 (match_operand:SF 1 "general_operand" ""))]
5938 if (GET_CODE (operands[0]) == MEM)
5939 operands[1] = force_reg (SFmode, operands[1]);
5941 else /* TARGET_THUMB1 */
5943 if (can_create_pseudo_p ())
5945 if (GET_CODE (operands[0]) != REG)
5946 operands[1] = force_reg (SFmode, operands[1]);
5952 ;; Transform a floating-point move of a constant into a core register into
5953 ;; an SImode operation.
5955 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5956 (match_operand:SF 1 "immediate_operand" ""))]
5959 && GET_CODE (operands[1]) == CONST_DOUBLE"
5960 [(set (match_dup 2) (match_dup 3))]
5962 operands[2] = gen_lowpart (SImode, operands[0]);
5963 operands[3] = gen_lowpart (SImode, operands[1]);
5964 if (operands[2] == 0 || operands[3] == 0)
5969 (define_insn "*arm_movsf_soft_insn"
5970 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5971 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5973 && TARGET_SOFT_FLOAT
5974 && (GET_CODE (operands[0]) != MEM
5975 || register_operand (operands[1], SFmode))"
5978 ldr%?\\t%0, %1\\t%@ float
5979 str%?\\t%1, %0\\t%@ float"
5980 [(set_attr "length" "4,4,4")
5981 (set_attr "predicable" "yes")
5982 (set_attr "type" "*,load1,store1")
5983 (set_attr "pool_range" "*,4096,*")
5984 (set_attr "neg_pool_range" "*,4084,*")]
5987 ;;; ??? This should have alternatives for constants.
5988 (define_insn "*thumb1_movsf_insn"
5989 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5990 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5992 && ( register_operand (operands[0], SFmode)
5993 || register_operand (operands[1], SFmode))"
6002 [(set_attr "length" "2")
6003 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6004 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6007 (define_expand "movdf"
6008 [(set (match_operand:DF 0 "general_operand" "")
6009 (match_operand:DF 1 "general_operand" ""))]
6014 if (GET_CODE (operands[0]) == MEM)
6015 operands[1] = force_reg (DFmode, operands[1]);
6017 else /* TARGET_THUMB */
6019 if (can_create_pseudo_p ())
6021 if (GET_CODE (operands[0]) != REG)
6022 operands[1] = force_reg (DFmode, operands[1]);
6028 ;; Reloading a df mode value stored in integer regs to memory can require a
6030 (define_expand "reload_outdf"
6031 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6032 (match_operand:DF 1 "s_register_operand" "r")
6033 (match_operand:SI 2 "s_register_operand" "=&r")]
6037 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6040 operands[2] = XEXP (operands[0], 0);
6041 else if (code == POST_INC || code == PRE_DEC)
6043 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6044 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6045 emit_insn (gen_movdi (operands[0], operands[1]));
6048 else if (code == PRE_INC)
6050 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6052 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6055 else if (code == POST_DEC)
6056 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6058 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6059 XEXP (XEXP (operands[0], 0), 1)));
6061 emit_insn (gen_rtx_SET (VOIDmode,
6062 replace_equiv_address (operands[0], operands[2]),
6065 if (code == POST_DEC)
6066 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6072 (define_insn "*movdf_soft_insn"
6073 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6074 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6075 "TARGET_ARM && TARGET_SOFT_FLOAT
6076 && ( register_operand (operands[0], DFmode)
6077 || register_operand (operands[1], DFmode))"
6079 switch (which_alternative)
6086 return output_move_double (operands);
6089 [(set_attr "length" "8,12,16,8,8")
6090 (set_attr "type" "*,*,*,load2,store2")
6091 (set_attr "pool_range" "1020")
6092 (set_attr "neg_pool_range" "1008")]
6095 ;;; ??? This should have alternatives for constants.
6096 ;;; ??? This was originally identical to the movdi_insn pattern.
6097 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6098 ;;; thumb_reorg with a memory reference.
6099 (define_insn "*thumb_movdf_insn"
6100 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6101 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6103 && ( register_operand (operands[0], DFmode)
6104 || register_operand (operands[1], DFmode))"
6106 switch (which_alternative)
6110 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6111 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6112 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6114 return \"ldmia\\t%1, {%0, %H0}\";
6116 return \"stmia\\t%0, {%1, %H1}\";
6118 return thumb_load_double_from_address (operands);
6120 operands[2] = gen_rtx_MEM (SImode,
6121 plus_constant (XEXP (operands[0], 0), 4));
6122 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6125 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6126 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6127 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6130 [(set_attr "length" "4,2,2,6,4,4")
6131 (set_attr "type" "*,load2,store2,load2,store2,*")
6132 (set_attr "pool_range" "*,*,*,1020,*,*")]
6135 (define_expand "movxf"
6136 [(set (match_operand:XF 0 "general_operand" "")
6137 (match_operand:XF 1 "general_operand" ""))]
6138 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6140 if (GET_CODE (operands[0]) == MEM)
6141 operands[1] = force_reg (XFmode, operands[1]);
6147 ;; load- and store-multiple insns
6148 ;; The arm can load/store any set of registers, provided that they are in
6149 ;; ascending order; but that is beyond GCC so stick with what it knows.
6151 (define_expand "load_multiple"
6152 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6153 (match_operand:SI 1 "" ""))
6154 (use (match_operand:SI 2 "" ""))])]
6157 HOST_WIDE_INT offset = 0;
6159 /* Support only fixed point registers. */
6160 if (GET_CODE (operands[2]) != CONST_INT
6161 || INTVAL (operands[2]) > 14
6162 || INTVAL (operands[2]) < 2
6163 || GET_CODE (operands[1]) != MEM
6164 || GET_CODE (operands[0]) != REG
6165 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6166 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6170 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6171 force_reg (SImode, XEXP (operands[1], 0)),
6172 TRUE, FALSE, operands[1], &offset);
6175 ;; Load multiple with write-back
6177 (define_insn "*ldmsi_postinc4"
6178 [(match_parallel 0 "load_multiple_operation"
6179 [(set (match_operand:SI 1 "s_register_operand" "=r")
6180 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6182 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6183 (mem:SI (match_dup 2)))
6184 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6185 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6186 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6187 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6188 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6189 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6190 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6191 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6192 [(set_attr "type" "load4")
6193 (set_attr "predicable" "yes")]
6196 (define_insn "*ldmsi_postinc4_thumb1"
6197 [(match_parallel 0 "load_multiple_operation"
6198 [(set (match_operand:SI 1 "s_register_operand" "=l")
6199 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6201 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6202 (mem:SI (match_dup 2)))
6203 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6204 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6205 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6206 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6207 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6208 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6209 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6210 "ldmia\\t%1!, {%3, %4, %5, %6}"
6211 [(set_attr "type" "load4")]
6214 (define_insn "*ldmsi_postinc3"
6215 [(match_parallel 0 "load_multiple_operation"
6216 [(set (match_operand:SI 1 "s_register_operand" "=r")
6217 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6219 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6220 (mem:SI (match_dup 2)))
6221 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6222 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6223 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6224 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6225 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6226 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6227 [(set_attr "type" "load3")
6228 (set_attr "predicable" "yes")]
6231 (define_insn "*ldmsi_postinc2"
6232 [(match_parallel 0 "load_multiple_operation"
6233 [(set (match_operand:SI 1 "s_register_operand" "=r")
6234 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6236 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6237 (mem:SI (match_dup 2)))
6238 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6239 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6240 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6241 "ldm%(ia%)\\t%1!, {%3, %4}"
6242 [(set_attr "type" "load2")
6243 (set_attr "predicable" "yes")]
6246 ;; Ordinary load multiple
6248 (define_insn "*ldmsi4"
6249 [(match_parallel 0 "load_multiple_operation"
6250 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6251 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6252 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6253 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6254 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6255 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6256 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6257 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6258 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6259 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6260 [(set_attr "type" "load4")
6261 (set_attr "predicable" "yes")]
6264 (define_insn "*ldmsi3"
6265 [(match_parallel 0 "load_multiple_operation"
6266 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6267 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6268 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6269 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6270 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6271 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6272 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6273 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6274 [(set_attr "type" "load3")
6275 (set_attr "predicable" "yes")]
6278 (define_insn "*ldmsi2"
6279 [(match_parallel 0 "load_multiple_operation"
6280 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6281 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6282 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6283 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6284 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6285 "ldm%(ia%)\\t%1, {%2, %3}"
6286 [(set_attr "type" "load2")
6287 (set_attr "predicable" "yes")]
6290 (define_expand "store_multiple"
6291 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6292 (match_operand:SI 1 "" ""))
6293 (use (match_operand:SI 2 "" ""))])]
6296 HOST_WIDE_INT offset = 0;
6298 /* Support only fixed point registers. */
6299 if (GET_CODE (operands[2]) != CONST_INT
6300 || INTVAL (operands[2]) > 14
6301 || INTVAL (operands[2]) < 2
6302 || GET_CODE (operands[1]) != REG
6303 || GET_CODE (operands[0]) != MEM
6304 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6305 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6309 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6310 force_reg (SImode, XEXP (operands[0], 0)),
6311 TRUE, FALSE, operands[0], &offset);
6314 ;; Store multiple with write-back
6316 (define_insn "*stmsi_postinc4"
6317 [(match_parallel 0 "store_multiple_operation"
6318 [(set (match_operand:SI 1 "s_register_operand" "=r")
6319 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6321 (set (mem:SI (match_dup 2))
6322 (match_operand:SI 3 "arm_hard_register_operand" ""))
6323 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6324 (match_operand:SI 4 "arm_hard_register_operand" ""))
6325 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6326 (match_operand:SI 5 "arm_hard_register_operand" ""))
6327 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6328 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6329 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6330 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6331 [(set_attr "predicable" "yes")
6332 (set_attr "type" "store4")]
6335 (define_insn "*stmsi_postinc4_thumb1"
6336 [(match_parallel 0 "store_multiple_operation"
6337 [(set (match_operand:SI 1 "s_register_operand" "=l")
6338 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6340 (set (mem:SI (match_dup 2))
6341 (match_operand:SI 3 "arm_hard_register_operand" ""))
6342 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6343 (match_operand:SI 4 "arm_hard_register_operand" ""))
6344 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6345 (match_operand:SI 5 "arm_hard_register_operand" ""))
6346 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6347 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6348 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6349 "stmia\\t%1!, {%3, %4, %5, %6}"
6350 [(set_attr "type" "store4")]
6353 (define_insn "*stmsi_postinc3"
6354 [(match_parallel 0 "store_multiple_operation"
6355 [(set (match_operand:SI 1 "s_register_operand" "=r")
6356 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6358 (set (mem:SI (match_dup 2))
6359 (match_operand:SI 3 "arm_hard_register_operand" ""))
6360 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6361 (match_operand:SI 4 "arm_hard_register_operand" ""))
6362 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6363 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6364 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6365 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6366 [(set_attr "predicable" "yes")
6367 (set_attr "type" "store3")]
6370 (define_insn "*stmsi_postinc2"
6371 [(match_parallel 0 "store_multiple_operation"
6372 [(set (match_operand:SI 1 "s_register_operand" "=r")
6373 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6375 (set (mem:SI (match_dup 2))
6376 (match_operand:SI 3 "arm_hard_register_operand" ""))
6377 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6378 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6379 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6380 "stm%(ia%)\\t%1!, {%3, %4}"
6381 [(set_attr "predicable" "yes")
6382 (set_attr "type" "store2")]
6385 ;; Ordinary store multiple
6387 (define_insn "*stmsi4"
6388 [(match_parallel 0 "store_multiple_operation"
6389 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6390 (match_operand:SI 2 "arm_hard_register_operand" ""))
6391 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6392 (match_operand:SI 3 "arm_hard_register_operand" ""))
6393 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6394 (match_operand:SI 4 "arm_hard_register_operand" ""))
6395 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6396 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6397 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6398 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6399 [(set_attr "predicable" "yes")
6400 (set_attr "type" "store4")]
6403 (define_insn "*stmsi3"
6404 [(match_parallel 0 "store_multiple_operation"
6405 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6406 (match_operand:SI 2 "arm_hard_register_operand" ""))
6407 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6408 (match_operand:SI 3 "arm_hard_register_operand" ""))
6409 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6410 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6411 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6412 "stm%(ia%)\\t%1, {%2, %3, %4}"
6413 [(set_attr "predicable" "yes")
6414 (set_attr "type" "store3")]
6417 (define_insn "*stmsi2"
6418 [(match_parallel 0 "store_multiple_operation"
6419 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6420 (match_operand:SI 2 "arm_hard_register_operand" ""))
6421 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6422 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6423 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6424 "stm%(ia%)\\t%1, {%2, %3}"
6425 [(set_attr "predicable" "yes")
6426 (set_attr "type" "store2")]
6429 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6430 ;; We could let this apply for blocks of less than this, but it clobbers so
6431 ;; many registers that there is then probably a better way.
6433 (define_expand "movmemqi"
6434 [(match_operand:BLK 0 "general_operand" "")
6435 (match_operand:BLK 1 "general_operand" "")
6436 (match_operand:SI 2 "const_int_operand" "")
6437 (match_operand:SI 3 "const_int_operand" "")]
6442 if (arm_gen_movmemqi (operands))
6446 else /* TARGET_THUMB1 */
6448 if ( INTVAL (operands[3]) != 4
6449 || INTVAL (operands[2]) > 48)
6452 thumb_expand_movmemqi (operands);
6458 ;; Thumb block-move insns
6460 (define_insn "movmem12b"
6461 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6462 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6463 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6464 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6465 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6466 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6467 (set (match_operand:SI 0 "register_operand" "=l")
6468 (plus:SI (match_dup 2) (const_int 12)))
6469 (set (match_operand:SI 1 "register_operand" "=l")
6470 (plus:SI (match_dup 3) (const_int 12)))
6471 (clobber (match_scratch:SI 4 "=&l"))
6472 (clobber (match_scratch:SI 5 "=&l"))
6473 (clobber (match_scratch:SI 6 "=&l"))]
6475 "* return thumb_output_move_mem_multiple (3, operands);"
6476 [(set_attr "length" "4")
6477 ; This isn't entirely accurate... It loads as well, but in terms of
6478 ; scheduling the following insn it is better to consider it as a store
6479 (set_attr "type" "store3")]
6482 (define_insn "movmem8b"
6483 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6484 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6485 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6486 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6487 (set (match_operand:SI 0 "register_operand" "=l")
6488 (plus:SI (match_dup 2) (const_int 8)))
6489 (set (match_operand:SI 1 "register_operand" "=l")
6490 (plus:SI (match_dup 3) (const_int 8)))
6491 (clobber (match_scratch:SI 4 "=&l"))
6492 (clobber (match_scratch:SI 5 "=&l"))]
6494 "* return thumb_output_move_mem_multiple (2, operands);"
6495 [(set_attr "length" "4")
6496 ; This isn't entirely accurate... It loads as well, but in terms of
6497 ; scheduling the following insn it is better to consider it as a store
6498 (set_attr "type" "store2")]
6503 ;; Compare & branch insns
6504 ;; The range calculations are based as follows:
6505 ;; For forward branches, the address calculation returns the address of
6506 ;; the next instruction. This is 2 beyond the branch instruction.
6507 ;; For backward branches, the address calculation returns the address of
6508 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6509 ;; instruction for the shortest sequence, and 4 before the branch instruction
6510 ;; if we have to jump around an unconditional branch.
6511 ;; To the basic branch range the PC offset must be added (this is +4).
6512 ;; So for forward branches we have
6513 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6514 ;; And for backward branches we have
6515 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6517 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6518 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6520 (define_expand "cbranchsi4"
6521 [(set (pc) (if_then_else
6522 (match_operator 0 "arm_comparison_operator"
6523 [(match_operand:SI 1 "s_register_operand" "")
6524 (match_operand:SI 2 "nonmemory_operand" "")])
6525 (label_ref (match_operand 3 "" ""))
6527 "TARGET_THUMB1 || TARGET_32BIT"
6531 if (!arm_add_operand (operands[2], SImode))
6532 operands[2] = force_reg (SImode, operands[2]);
6533 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6537 if (thumb1_cmpneg_operand (operands[2], SImode))
6539 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6540 operands[3], operands[0]));
6543 if (!thumb1_cmp_operand (operands[2], SImode))
6544 operands[2] = force_reg (SImode, operands[2]);
6547 ;; A pattern to recognize a special situation and optimize for it.
6548 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6549 ;; due to the available addressing modes. Hence, convert a signed comparison
6550 ;; with zero into an unsigned comparison with 127 if possible.
6551 (define_expand "cbranchqi4"
6552 [(set (pc) (if_then_else
6553 (match_operator 0 "lt_ge_comparison_operator"
6554 [(match_operand:QI 1 "memory_operand" "")
6555 (match_operand:QI 2 "const0_operand" "")])
6556 (label_ref (match_operand 3 "" ""))
6561 xops[1] = gen_reg_rtx (SImode);
6562 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6563 xops[2] = GEN_INT (127);
6564 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6565 VOIDmode, xops[1], xops[2]);
6566 xops[3] = operands[3];
6567 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6571 (define_expand "cbranchsf4"
6572 [(set (pc) (if_then_else
6573 (match_operator 0 "arm_comparison_operator"
6574 [(match_operand:SF 1 "s_register_operand" "")
6575 (match_operand:SF 2 "arm_float_compare_operand" "")])
6576 (label_ref (match_operand 3 "" ""))
6578 "TARGET_32BIT && TARGET_HARD_FLOAT"
6579 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6580 operands[3])); DONE;"
6583 (define_expand "cbranchdf4"
6584 [(set (pc) (if_then_else
6585 (match_operator 0 "arm_comparison_operator"
6586 [(match_operand:DF 1 "s_register_operand" "")
6587 (match_operand:DF 2 "arm_float_compare_operand" "")])
6588 (label_ref (match_operand 3 "" ""))
6590 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6591 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6592 operands[3])); DONE;"
6595 ;; this uses the Cirrus DI compare instruction
6596 (define_expand "cbranchdi4"
6597 [(set (pc) (if_then_else
6598 (match_operator 0 "arm_comparison_operator"
6599 [(match_operand:DI 1 "cirrus_fp_register" "")
6600 (match_operand:DI 2 "cirrus_fp_register" "")])
6601 (label_ref (match_operand 3 "" ""))
6603 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6604 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6605 operands[3])); DONE;"
6608 (define_insn "cbranchsi4_insn"
6609 [(set (pc) (if_then_else
6610 (match_operator 0 "arm_comparison_operator"
6611 [(match_operand:SI 1 "s_register_operand" "l,*h")
6612 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6613 (label_ref (match_operand 3 "" ""))
6617 rtx t = prev_nonnote_insn (insn);
6620 && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
6622 t = XEXP (SET_SRC (PATTERN (t)), 0);
6623 if (!rtx_equal_p (XEXP (t, 0), operands[1])
6624 || !rtx_equal_p (XEXP (t, 1), operands[2]))
6630 output_asm_insn (\"cmp\\t%1, %2\", operands);
6632 switch (get_attr_length (insn))
6634 case 4: return \"b%d0\\t%l3\";
6635 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6636 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6639 [(set (attr "far_jump")
6641 (eq_attr "length" "8")
6642 (const_string "yes")
6643 (const_string "no")))
6644 (set (attr "length")
6646 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6647 (le (minus (match_dup 3) (pc)) (const_int 256)))
6650 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6651 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6656 (define_insn "cbranchsi4_scratch"
6657 [(set (pc) (if_then_else
6658 (match_operator 4 "arm_comparison_operator"
6659 [(match_operand:SI 1 "s_register_operand" "l,0")
6660 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6661 (label_ref (match_operand 3 "" ""))
6663 (clobber (match_scratch:SI 0 "=l,l"))]
6666 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6668 switch (get_attr_length (insn))
6670 case 4: return \"b%d4\\t%l3\";
6671 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6672 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6675 [(set (attr "far_jump")
6677 (eq_attr "length" "8")
6678 (const_string "yes")
6679 (const_string "no")))
6680 (set (attr "length")
6682 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6683 (le (minus (match_dup 3) (pc)) (const_int 256)))
6686 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6687 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6692 (define_insn "*movsi_cbranchsi4"
6695 (match_operator 3 "arm_comparison_operator"
6696 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6698 (label_ref (match_operand 2 "" ""))
6700 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6704 if (which_alternative == 0)
6705 output_asm_insn (\"cmp\t%0, #0\", operands);
6706 else if (which_alternative == 1)
6707 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6710 output_asm_insn (\"cmp\t%1, #0\", operands);
6711 if (which_alternative == 2)
6712 output_asm_insn (\"mov\t%0, %1\", operands);
6714 output_asm_insn (\"str\t%1, %0\", operands);
6716 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6718 case 4: return \"b%d3\\t%l2\";
6719 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6720 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6723 [(set (attr "far_jump")
6725 (ior (and (gt (symbol_ref ("which_alternative"))
6727 (eq_attr "length" "8"))
6728 (eq_attr "length" "10"))
6729 (const_string "yes")
6730 (const_string "no")))
6731 (set (attr "length")
6733 (le (symbol_ref ("which_alternative"))
6736 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6737 (le (minus (match_dup 2) (pc)) (const_int 256)))
6740 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6741 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6745 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6746 (le (minus (match_dup 2) (pc)) (const_int 256)))
6749 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6750 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6756 [(set (match_operand:SI 0 "low_register_operand" "")
6757 (match_operand:SI 1 "low_register_operand" ""))
6759 (if_then_else (match_operator 2 "arm_comparison_operator"
6760 [(match_dup 1) (const_int 0)])
6761 (label_ref (match_operand 3 "" ""))
6766 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6767 (label_ref (match_dup 3))
6769 (set (match_dup 0) (match_dup 1))])]
6773 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6774 ;; merge cases like this because the op1 is a hard register in
6775 ;; CLASS_LIKELY_SPILLED_P.
6777 [(set (match_operand:SI 0 "low_register_operand" "")
6778 (match_operand:SI 1 "low_register_operand" ""))
6780 (if_then_else (match_operator 2 "arm_comparison_operator"
6781 [(match_dup 0) (const_int 0)])
6782 (label_ref (match_operand 3 "" ""))
6787 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6788 (label_ref (match_dup 3))
6790 (set (match_dup 0) (match_dup 1))])]
6794 (define_insn "*negated_cbranchsi4"
6797 (match_operator 0 "equality_operator"
6798 [(match_operand:SI 1 "s_register_operand" "l")
6799 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6800 (label_ref (match_operand 3 "" ""))
6804 output_asm_insn (\"cmn\\t%1, %2\", operands);
6805 switch (get_attr_length (insn))
6807 case 4: return \"b%d0\\t%l3\";
6808 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6809 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6812 [(set (attr "far_jump")
6814 (eq_attr "length" "8")
6815 (const_string "yes")
6816 (const_string "no")))
6817 (set (attr "length")
6819 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6820 (le (minus (match_dup 3) (pc)) (const_int 256)))
6823 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6824 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6829 (define_insn "*tbit_cbranch"
6832 (match_operator 0 "equality_operator"
6833 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6835 (match_operand:SI 2 "const_int_operand" "i"))
6837 (label_ref (match_operand 3 "" ""))
6839 (clobber (match_scratch:SI 4 "=l"))]
6844 op[0] = operands[4];
6845 op[1] = operands[1];
6846 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6848 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6849 switch (get_attr_length (insn))
6851 case 4: return \"b%d0\\t%l3\";
6852 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6853 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6856 [(set (attr "far_jump")
6858 (eq_attr "length" "8")
6859 (const_string "yes")
6860 (const_string "no")))
6861 (set (attr "length")
6863 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6864 (le (minus (match_dup 3) (pc)) (const_int 256)))
6867 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6868 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6873 (define_insn "*tlobits_cbranch"
6876 (match_operator 0 "equality_operator"
6877 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6878 (match_operand:SI 2 "const_int_operand" "i")
6881 (label_ref (match_operand 3 "" ""))
6883 (clobber (match_scratch:SI 4 "=l"))]
6888 op[0] = operands[4];
6889 op[1] = operands[1];
6890 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6892 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6893 switch (get_attr_length (insn))
6895 case 4: return \"b%d0\\t%l3\";
6896 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6897 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6900 [(set (attr "far_jump")
6902 (eq_attr "length" "8")
6903 (const_string "yes")
6904 (const_string "no")))
6905 (set (attr "length")
6907 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6908 (le (minus (match_dup 3) (pc)) (const_int 256)))
6911 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6912 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6917 (define_insn "*tstsi3_cbranch"
6920 (match_operator 3 "equality_operator"
6921 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6922 (match_operand:SI 1 "s_register_operand" "l"))
6924 (label_ref (match_operand 2 "" ""))
6929 output_asm_insn (\"tst\\t%0, %1\", operands);
6930 switch (get_attr_length (insn))
6932 case 4: return \"b%d3\\t%l2\";
6933 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6934 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6937 [(set (attr "far_jump")
6939 (eq_attr "length" "8")
6940 (const_string "yes")
6941 (const_string "no")))
6942 (set (attr "length")
6944 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6945 (le (minus (match_dup 2) (pc)) (const_int 256)))
6948 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6949 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6954 (define_insn "*andsi3_cbranch"
6957 (match_operator 5 "equality_operator"
6958 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6959 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6961 (label_ref (match_operand 4 "" ""))
6963 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6964 (and:SI (match_dup 2) (match_dup 3)))
6965 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6969 if (which_alternative == 0)
6970 output_asm_insn (\"and\\t%0, %3\", operands);
6971 else if (which_alternative == 1)
6973 output_asm_insn (\"and\\t%1, %3\", operands);
6974 output_asm_insn (\"mov\\t%0, %1\", operands);
6978 output_asm_insn (\"and\\t%1, %3\", operands);
6979 output_asm_insn (\"str\\t%1, %0\", operands);
6982 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6984 case 4: return \"b%d5\\t%l4\";
6985 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6986 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6989 [(set (attr "far_jump")
6991 (ior (and (eq (symbol_ref ("which_alternative"))
6993 (eq_attr "length" "8"))
6994 (eq_attr "length" "10"))
6995 (const_string "yes")
6996 (const_string "no")))
6997 (set (attr "length")
6999 (eq (symbol_ref ("which_alternative"))
7002 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7003 (le (minus (match_dup 4) (pc)) (const_int 256)))
7006 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7007 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7011 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7012 (le (minus (match_dup 4) (pc)) (const_int 256)))
7015 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7016 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7021 (define_insn "*orrsi3_cbranch_scratch"
7024 (match_operator 4 "equality_operator"
7025 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7026 (match_operand:SI 2 "s_register_operand" "l"))
7028 (label_ref (match_operand 3 "" ""))
7030 (clobber (match_scratch:SI 0 "=l"))]
7034 output_asm_insn (\"orr\\t%0, %2\", operands);
7035 switch (get_attr_length (insn))
7037 case 4: return \"b%d4\\t%l3\";
7038 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7039 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7042 [(set (attr "far_jump")
7044 (eq_attr "length" "8")
7045 (const_string "yes")
7046 (const_string "no")))
7047 (set (attr "length")
7049 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7050 (le (minus (match_dup 3) (pc)) (const_int 256)))
7053 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7054 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7059 (define_insn "*orrsi3_cbranch"
7062 (match_operator 5 "equality_operator"
7063 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7064 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7066 (label_ref (match_operand 4 "" ""))
7068 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7069 (ior:SI (match_dup 2) (match_dup 3)))
7070 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7074 if (which_alternative == 0)
7075 output_asm_insn (\"orr\\t%0, %3\", operands);
7076 else if (which_alternative == 1)
7078 output_asm_insn (\"orr\\t%1, %3\", operands);
7079 output_asm_insn (\"mov\\t%0, %1\", operands);
7083 output_asm_insn (\"orr\\t%1, %3\", operands);
7084 output_asm_insn (\"str\\t%1, %0\", operands);
7087 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7089 case 4: return \"b%d5\\t%l4\";
7090 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7091 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7094 [(set (attr "far_jump")
7096 (ior (and (eq (symbol_ref ("which_alternative"))
7098 (eq_attr "length" "8"))
7099 (eq_attr "length" "10"))
7100 (const_string "yes")
7101 (const_string "no")))
7102 (set (attr "length")
7104 (eq (symbol_ref ("which_alternative"))
7107 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7108 (le (minus (match_dup 4) (pc)) (const_int 256)))
7111 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7112 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7116 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7117 (le (minus (match_dup 4) (pc)) (const_int 256)))
7120 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7121 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7126 (define_insn "*xorsi3_cbranch_scratch"
7129 (match_operator 4 "equality_operator"
7130 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7131 (match_operand:SI 2 "s_register_operand" "l"))
7133 (label_ref (match_operand 3 "" ""))
7135 (clobber (match_scratch:SI 0 "=l"))]
7139 output_asm_insn (\"eor\\t%0, %2\", operands);
7140 switch (get_attr_length (insn))
7142 case 4: return \"b%d4\\t%l3\";
7143 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7144 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7147 [(set (attr "far_jump")
7149 (eq_attr "length" "8")
7150 (const_string "yes")
7151 (const_string "no")))
7152 (set (attr "length")
7154 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7155 (le (minus (match_dup 3) (pc)) (const_int 256)))
7158 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7159 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7164 (define_insn "*xorsi3_cbranch"
7167 (match_operator 5 "equality_operator"
7168 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7169 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7171 (label_ref (match_operand 4 "" ""))
7173 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7174 (xor:SI (match_dup 2) (match_dup 3)))
7175 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7179 if (which_alternative == 0)
7180 output_asm_insn (\"eor\\t%0, %3\", operands);
7181 else if (which_alternative == 1)
7183 output_asm_insn (\"eor\\t%1, %3\", operands);
7184 output_asm_insn (\"mov\\t%0, %1\", operands);
7188 output_asm_insn (\"eor\\t%1, %3\", operands);
7189 output_asm_insn (\"str\\t%1, %0\", operands);
7192 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7194 case 4: return \"b%d5\\t%l4\";
7195 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7196 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7199 [(set (attr "far_jump")
7201 (ior (and (eq (symbol_ref ("which_alternative"))
7203 (eq_attr "length" "8"))
7204 (eq_attr "length" "10"))
7205 (const_string "yes")
7206 (const_string "no")))
7207 (set (attr "length")
7209 (eq (symbol_ref ("which_alternative"))
7212 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7213 (le (minus (match_dup 4) (pc)) (const_int 256)))
7216 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7217 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7221 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7222 (le (minus (match_dup 4) (pc)) (const_int 256)))
7225 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7226 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7231 (define_insn "*bicsi3_cbranch_scratch"
7234 (match_operator 4 "equality_operator"
7235 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7236 (match_operand:SI 1 "s_register_operand" "0"))
7238 (label_ref (match_operand 3 "" ""))
7240 (clobber (match_scratch:SI 0 "=l"))]
7244 output_asm_insn (\"bic\\t%0, %2\", operands);
7245 switch (get_attr_length (insn))
7247 case 4: return \"b%d4\\t%l3\";
7248 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7249 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7252 [(set (attr "far_jump")
7254 (eq_attr "length" "8")
7255 (const_string "yes")
7256 (const_string "no")))
7257 (set (attr "length")
7259 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7260 (le (minus (match_dup 3) (pc)) (const_int 256)))
7263 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7264 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7269 (define_insn "*bicsi3_cbranch"
7272 (match_operator 5 "equality_operator"
7273 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7274 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7276 (label_ref (match_operand 4 "" ""))
7278 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7279 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7280 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7284 if (which_alternative == 0)
7285 output_asm_insn (\"bic\\t%0, %3\", operands);
7286 else if (which_alternative <= 2)
7288 output_asm_insn (\"bic\\t%1, %3\", operands);
7289 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7290 conditions again, since we're only testing for equality. */
7291 output_asm_insn (\"mov\\t%0, %1\", operands);
7295 output_asm_insn (\"bic\\t%1, %3\", operands);
7296 output_asm_insn (\"str\\t%1, %0\", operands);
7299 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7301 case 4: return \"b%d5\\t%l4\";
7302 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7303 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7306 [(set (attr "far_jump")
7308 (ior (and (eq (symbol_ref ("which_alternative"))
7310 (eq_attr "length" "8"))
7311 (eq_attr "length" "10"))
7312 (const_string "yes")
7313 (const_string "no")))
7314 (set (attr "length")
7316 (eq (symbol_ref ("which_alternative"))
7319 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7320 (le (minus (match_dup 4) (pc)) (const_int 256)))
7323 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7324 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7328 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7329 (le (minus (match_dup 4) (pc)) (const_int 256)))
7332 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7333 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7338 (define_insn "*cbranchne_decr1"
7340 (if_then_else (match_operator 3 "equality_operator"
7341 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7343 (label_ref (match_operand 4 "" ""))
7345 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7346 (plus:SI (match_dup 2) (const_int -1)))
7347 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7352 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7354 VOIDmode, operands[2], const1_rtx);
7355 cond[1] = operands[4];
7357 if (which_alternative == 0)
7358 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7359 else if (which_alternative == 1)
7361 /* We must provide an alternative for a hi reg because reload
7362 cannot handle output reloads on a jump instruction, but we
7363 can't subtract into that. Fortunately a mov from lo to hi
7364 does not clobber the condition codes. */
7365 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7366 output_asm_insn (\"mov\\t%0, %1\", operands);
7370 /* Similarly, but the target is memory. */
7371 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7372 output_asm_insn (\"str\\t%1, %0\", operands);
7375 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7378 output_asm_insn (\"b%d0\\t%l1\", cond);
7381 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7382 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7384 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7385 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7389 [(set (attr "far_jump")
7391 (ior (and (eq (symbol_ref ("which_alternative"))
7393 (eq_attr "length" "8"))
7394 (eq_attr "length" "10"))
7395 (const_string "yes")
7396 (const_string "no")))
7397 (set_attr_alternative "length"
7401 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7402 (le (minus (match_dup 4) (pc)) (const_int 256)))
7405 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7406 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7411 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7412 (le (minus (match_dup 4) (pc)) (const_int 256)))
7415 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7416 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7421 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7422 (le (minus (match_dup 4) (pc)) (const_int 256)))
7425 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7426 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7431 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7432 (le (minus (match_dup 4) (pc)) (const_int 256)))
7435 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7436 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7441 (define_insn "*addsi3_cbranch"
7444 (match_operator 4 "arm_comparison_operator"
7446 (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
7447 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
7449 (label_ref (match_operand 5 "" ""))
7452 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7453 (plus:SI (match_dup 2) (match_dup 3)))
7454 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7456 && (GET_CODE (operands[4]) == EQ
7457 || GET_CODE (operands[4]) == NE
7458 || GET_CODE (operands[4]) == GE
7459 || GET_CODE (operands[4]) == LT)"
7464 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7465 cond[1] = operands[2];
7466 cond[2] = operands[3];
7468 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7469 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7471 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7473 if (which_alternative >= 2
7474 && which_alternative < 4)
7475 output_asm_insn (\"mov\\t%0, %1\", operands);
7476 else if (which_alternative >= 4)
7477 output_asm_insn (\"str\\t%1, %0\", operands);
7479 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7482 return \"b%d4\\t%l5\";
7484 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7486 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7490 [(set (attr "far_jump")
7492 (ior (and (lt (symbol_ref ("which_alternative"))
7494 (eq_attr "length" "8"))
7495 (eq_attr "length" "10"))
7496 (const_string "yes")
7497 (const_string "no")))
7498 (set (attr "length")
7500 (lt (symbol_ref ("which_alternative"))
7503 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7504 (le (minus (match_dup 5) (pc)) (const_int 256)))
7507 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7508 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7512 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7513 (le (minus (match_dup 5) (pc)) (const_int 256)))
7516 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7517 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7522 (define_insn "*addsi3_cbranch_scratch"
7525 (match_operator 3 "arm_comparison_operator"
7527 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7528 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7530 (label_ref (match_operand 4 "" ""))
7532 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7534 && (GET_CODE (operands[3]) == EQ
7535 || GET_CODE (operands[3]) == NE
7536 || GET_CODE (operands[3]) == GE
7537 || GET_CODE (operands[3]) == LT)"
7540 switch (which_alternative)
7543 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7546 output_asm_insn (\"cmn\t%1, %2\", operands);
7549 if (INTVAL (operands[2]) < 0)
7550 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7552 output_asm_insn (\"add\t%0, %1, %2\", operands);
7555 if (INTVAL (operands[2]) < 0)
7556 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7558 output_asm_insn (\"add\t%0, %0, %2\", operands);
7562 switch (get_attr_length (insn))
7565 return \"b%d3\\t%l4\";
7567 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7569 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7573 [(set (attr "far_jump")
7575 (eq_attr "length" "8")
7576 (const_string "yes")
7577 (const_string "no")))
7578 (set (attr "length")
7580 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7581 (le (minus (match_dup 4) (pc)) (const_int 256)))
7584 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7585 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7590 (define_insn "*subsi3_cbranch"
7593 (match_operator 4 "arm_comparison_operator"
7595 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7596 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7598 (label_ref (match_operand 5 "" ""))
7600 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7601 (minus:SI (match_dup 2) (match_dup 3)))
7602 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7604 && (GET_CODE (operands[4]) == EQ
7605 || GET_CODE (operands[4]) == NE
7606 || GET_CODE (operands[4]) == GE
7607 || GET_CODE (operands[4]) == LT)"
7610 if (which_alternative == 0)
7611 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7612 else if (which_alternative == 1)
7614 /* We must provide an alternative for a hi reg because reload
7615 cannot handle output reloads on a jump instruction, but we
7616 can't subtract into that. Fortunately a mov from lo to hi
7617 does not clobber the condition codes. */
7618 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7619 output_asm_insn (\"mov\\t%0, %1\", operands);
7623 /* Similarly, but the target is memory. */
7624 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7625 output_asm_insn (\"str\\t%1, %0\", operands);
7628 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7631 return \"b%d4\\t%l5\";
7633 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7635 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7639 [(set (attr "far_jump")
7641 (ior (and (eq (symbol_ref ("which_alternative"))
7643 (eq_attr "length" "8"))
7644 (eq_attr "length" "10"))
7645 (const_string "yes")
7646 (const_string "no")))
7647 (set (attr "length")
7649 (eq (symbol_ref ("which_alternative"))
7652 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7653 (le (minus (match_dup 5) (pc)) (const_int 256)))
7656 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7657 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7661 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7662 (le (minus (match_dup 5) (pc)) (const_int 256)))
7665 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7666 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7671 (define_insn "*subsi3_cbranch_scratch"
7674 (match_operator 0 "arm_comparison_operator"
7675 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7676 (match_operand:SI 2 "nonmemory_operand" "l"))
7678 (label_ref (match_operand 3 "" ""))
7681 && (GET_CODE (operands[0]) == EQ
7682 || GET_CODE (operands[0]) == NE
7683 || GET_CODE (operands[0]) == GE
7684 || GET_CODE (operands[0]) == LT)"
7686 output_asm_insn (\"cmp\\t%1, %2\", operands);
7687 switch (get_attr_length (insn))
7689 case 4: return \"b%d0\\t%l3\";
7690 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7691 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7694 [(set (attr "far_jump")
7696 (eq_attr "length" "8")
7697 (const_string "yes")
7698 (const_string "no")))
7699 (set (attr "length")
7701 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7702 (le (minus (match_dup 3) (pc)) (const_int 256)))
7705 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7706 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7711 ;; Comparison and test insns
7713 (define_insn "*arm_cmpsi_insn"
7714 [(set (reg:CC CC_REGNUM)
7715 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7716 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7721 [(set_attr "conds" "set")]
7724 (define_insn "*arm_cmpsi_shiftsi"
7725 [(set (reg:CC CC_REGNUM)
7726 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7727 (match_operator:SI 3 "shift_operator"
7728 [(match_operand:SI 1 "s_register_operand" "r")
7729 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7732 [(set_attr "conds" "set")
7733 (set_attr "shift" "1")
7734 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7735 (const_string "alu_shift")
7736 (const_string "alu_shift_reg")))]
7739 (define_insn "*arm_cmpsi_shiftsi_swp"
7740 [(set (reg:CC_SWP CC_REGNUM)
7741 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7742 [(match_operand:SI 1 "s_register_operand" "r")
7743 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7744 (match_operand:SI 0 "s_register_operand" "r")))]
7747 [(set_attr "conds" "set")
7748 (set_attr "shift" "1")
7749 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7750 (const_string "alu_shift")
7751 (const_string "alu_shift_reg")))]
7754 (define_insn "*arm_cmpsi_negshiftsi_si"
7755 [(set (reg:CC_Z CC_REGNUM)
7757 (neg:SI (match_operator:SI 1 "shift_operator"
7758 [(match_operand:SI 2 "s_register_operand" "r")
7759 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7760 (match_operand:SI 0 "s_register_operand" "r")))]
7763 [(set_attr "conds" "set")
7764 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7765 (const_string "alu_shift")
7766 (const_string "alu_shift_reg")))]
7769 ;; Cirrus SF compare instruction
7770 (define_insn "*cirrus_cmpsf"
7771 [(set (reg:CCFP CC_REGNUM)
7772 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7773 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7774 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7775 "cfcmps%?\\tr15, %V0, %V1"
7776 [(set_attr "type" "mav_farith")
7777 (set_attr "cirrus" "compare")]
7780 ;; Cirrus DF compare instruction
7781 (define_insn "*cirrus_cmpdf"
7782 [(set (reg:CCFP CC_REGNUM)
7783 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7784 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7785 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7786 "cfcmpd%?\\tr15, %V0, %V1"
7787 [(set_attr "type" "mav_farith")
7788 (set_attr "cirrus" "compare")]
7791 (define_insn "*cirrus_cmpdi"
7792 [(set (reg:CC CC_REGNUM)
7793 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7794 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7795 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7796 "cfcmp64%?\\tr15, %V0, %V1"
7797 [(set_attr "type" "mav_farith")
7798 (set_attr "cirrus" "compare")]
7801 ; This insn allows redundant compares to be removed by cse, nothing should
7802 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7803 ; is deleted later on. The match_dup will match the mode here, so that
7804 ; mode changes of the condition codes aren't lost by this even though we don't
7805 ; specify what they are.
7807 (define_insn "*deleted_compare"
7808 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7810 "\\t%@ deleted compare"
7811 [(set_attr "conds" "set")
7812 (set_attr "length" "0")]
7816 ;; Conditional branch insns
7818 (define_expand "cbranch_cc"
7820 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7821 (match_operand 2 "" "")])
7822 (label_ref (match_operand 3 "" ""))
7825 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7826 operands[1], operands[2]);
7827 operands[2] = const0_rtx;"
7831 ;; Patterns to match conditional branch insns.
7834 (define_insn "*arm_cond_branch"
7836 (if_then_else (match_operator 1 "arm_comparison_operator"
7837 [(match_operand 2 "cc_register" "") (const_int 0)])
7838 (label_ref (match_operand 0 "" ""))
7842 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7844 arm_ccfsm_state += 2;
7847 return \"b%d1\\t%l0\";
7849 [(set_attr "conds" "use")
7850 (set_attr "type" "branch")]
7853 (define_insn "*arm_cond_branch_reversed"
7855 (if_then_else (match_operator 1 "arm_comparison_operator"
7856 [(match_operand 2 "cc_register" "") (const_int 0)])
7858 (label_ref (match_operand 0 "" ""))))]
7861 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7863 arm_ccfsm_state += 2;
7866 return \"b%D1\\t%l0\";
7868 [(set_attr "conds" "use")
7869 (set_attr "type" "branch")]
7876 (define_expand "cstore_cc"
7877 [(set (match_operand:SI 0 "s_register_operand" "")
7878 (match_operator:SI 1 "" [(match_operand 2 "" "")
7879 (match_operand 3 "" "")]))]
7881 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7882 operands[2], operands[3]);
7883 operands[3] = const0_rtx;"
7886 (define_insn "*mov_scc"
7887 [(set (match_operand:SI 0 "s_register_operand" "=r")
7888 (match_operator:SI 1 "arm_comparison_operator"
7889 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7891 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7892 [(set_attr "conds" "use")
7893 (set_attr "length" "8")]
7896 (define_insn "*mov_negscc"
7897 [(set (match_operand:SI 0 "s_register_operand" "=r")
7898 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7899 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7901 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7902 [(set_attr "conds" "use")
7903 (set_attr "length" "8")]
7906 (define_insn "*mov_notscc"
7907 [(set (match_operand:SI 0 "s_register_operand" "=r")
7908 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7909 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7911 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7912 [(set_attr "conds" "use")
7913 (set_attr "length" "8")]
7916 (define_expand "cstoresi4"
7917 [(set (match_operand:SI 0 "s_register_operand" "")
7918 (match_operator:SI 1 "arm_comparison_operator"
7919 [(match_operand:SI 2 "s_register_operand" "")
7920 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7921 "TARGET_32BIT || TARGET_THUMB1"
7923 rtx op3, scratch, scratch2;
7927 if (!arm_add_operand (operands[3], SImode))
7928 operands[3] = force_reg (SImode, operands[3]);
7929 emit_insn (gen_cstore_cc (operands[0], operands[1],
7930 operands[2], operands[3]));
7934 if (operands[3] == const0_rtx)
7936 switch (GET_CODE (operands[1]))
7939 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7943 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7947 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7948 NULL_RTX, 0, OPTAB_WIDEN);
7949 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7950 NULL_RTX, 0, OPTAB_WIDEN);
7951 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7952 operands[0], 1, OPTAB_WIDEN);
7956 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7958 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7959 NULL_RTX, 1, OPTAB_WIDEN);
7963 scratch = expand_binop (SImode, ashr_optab, operands[2],
7964 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7965 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7966 NULL_RTX, 0, OPTAB_WIDEN);
7967 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7971 /* LT is handled by generic code. No need for unsigned with 0. */
7978 switch (GET_CODE (operands[1]))
7981 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7982 NULL_RTX, 0, OPTAB_WIDEN);
7983 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7987 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7988 NULL_RTX, 0, OPTAB_WIDEN);
7989 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7993 op3 = force_reg (SImode, operands[3]);
7995 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7996 NULL_RTX, 1, OPTAB_WIDEN);
7997 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7998 NULL_RTX, 0, OPTAB_WIDEN);
7999 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8005 if (!thumb1_cmp_operand (op3, SImode))
8006 op3 = force_reg (SImode, op3);
8007 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8008 NULL_RTX, 0, OPTAB_WIDEN);
8009 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8010 NULL_RTX, 1, OPTAB_WIDEN);
8011 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8016 op3 = force_reg (SImode, operands[3]);
8017 scratch = force_reg (SImode, const0_rtx);
8018 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8024 if (!thumb1_cmp_operand (op3, SImode))
8025 op3 = force_reg (SImode, op3);
8026 scratch = force_reg (SImode, const0_rtx);
8027 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8033 if (!thumb1_cmp_operand (op3, SImode))
8034 op3 = force_reg (SImode, op3);
8035 scratch = gen_reg_rtx (SImode);
8036 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8040 op3 = force_reg (SImode, operands[3]);
8041 scratch = gen_reg_rtx (SImode);
8042 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8045 /* No good sequences for GT, LT. */
8052 (define_expand "cstoresf4"
8053 [(set (match_operand:SI 0 "s_register_operand" "")
8054 (match_operator:SI 1 "arm_comparison_operator"
8055 [(match_operand:SF 2 "s_register_operand" "")
8056 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8057 "TARGET_32BIT && TARGET_HARD_FLOAT"
8058 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8059 operands[2], operands[3])); DONE;"
8062 (define_expand "cstoredf4"
8063 [(set (match_operand:SI 0 "s_register_operand" "")
8064 (match_operator:SI 1 "arm_comparison_operator"
8065 [(match_operand:DF 2 "s_register_operand" "")
8066 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8067 "TARGET_32BIT && TARGET_HARD_FLOAT"
8068 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8069 operands[2], operands[3])); DONE;"
8072 ;; this uses the Cirrus DI compare instruction
8073 (define_expand "cstoredi4"
8074 [(set (match_operand:SI 0 "s_register_operand" "")
8075 (match_operator:SI 1 "arm_comparison_operator"
8076 [(match_operand:DI 2 "cirrus_fp_register" "")
8077 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8078 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8079 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8080 operands[2], operands[3])); DONE;"
8084 (define_expand "cstoresi_eq0_thumb1"
8086 [(set (match_operand:SI 0 "s_register_operand" "")
8087 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8089 (clobber (match_dup:SI 2))])]
8091 "operands[2] = gen_reg_rtx (SImode);"
8094 (define_expand "cstoresi_ne0_thumb1"
8096 [(set (match_operand:SI 0 "s_register_operand" "")
8097 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8099 (clobber (match_dup:SI 2))])]
8101 "operands[2] = gen_reg_rtx (SImode);"
8104 (define_insn "*cstoresi_eq0_thumb1_insn"
8105 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8106 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8108 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8111 neg\\t%0, %1\;adc\\t%0, %0, %1
8112 neg\\t%2, %1\;adc\\t%0, %1, %2"
8113 [(set_attr "length" "4")]
8116 (define_insn "*cstoresi_ne0_thumb1_insn"
8117 [(set (match_operand:SI 0 "s_register_operand" "=l")
8118 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8120 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8122 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8123 [(set_attr "length" "4")]
8126 ;; Used as part of the expansion of thumb ltu and gtu sequences
8127 (define_insn "cstoresi_nltu_thumb1"
8128 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8129 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8130 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8132 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8133 [(set_attr "length" "4")]
8136 (define_insn_and_split "cstoresi_ltu_thumb1"
8137 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8138 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8139 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8144 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8145 (set (match_dup 0) (neg:SI (match_dup 3)))]
8146 "operands[3] = gen_reg_rtx (SImode);"
8147 [(set_attr "length" "4")]
8150 ;; Used as part of the expansion of thumb les sequence.
8151 (define_insn "thumb1_addsi3_addgeu"
8152 [(set (match_operand:SI 0 "s_register_operand" "=l")
8153 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8154 (match_operand:SI 2 "s_register_operand" "l"))
8155 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8156 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8158 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8159 [(set_attr "length" "4")]
8163 ;; Conditional move insns
8165 (define_expand "movsicc"
8166 [(set (match_operand:SI 0 "s_register_operand" "")
8167 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8168 (match_operand:SI 2 "arm_not_operand" "")
8169 (match_operand:SI 3 "arm_not_operand" "")))]
8173 enum rtx_code code = GET_CODE (operands[1]);
8176 if (code == UNEQ || code == LTGT)
8179 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8180 XEXP (operands[1], 1));
8181 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8185 (define_expand "movsfcc"
8186 [(set (match_operand:SF 0 "s_register_operand" "")
8187 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8188 (match_operand:SF 2 "s_register_operand" "")
8189 (match_operand:SF 3 "nonmemory_operand" "")))]
8190 "TARGET_32BIT && TARGET_HARD_FLOAT"
8193 enum rtx_code code = GET_CODE (operands[1]);
8196 if (code == UNEQ || code == LTGT)
8199 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8200 Otherwise, ensure it is a valid FP add operand */
8201 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8202 || (!arm_float_add_operand (operands[3], SFmode)))
8203 operands[3] = force_reg (SFmode, operands[3]);
8205 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8206 XEXP (operands[1], 1));
8207 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8211 (define_expand "movdfcc"
8212 [(set (match_operand:DF 0 "s_register_operand" "")
8213 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8214 (match_operand:DF 2 "s_register_operand" "")
8215 (match_operand:DF 3 "arm_float_add_operand" "")))]
8216 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8219 enum rtx_code code = GET_CODE (operands[1]);
8222 if (code == UNEQ || code == LTGT)
8225 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8226 XEXP (operands[1], 1));
8227 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8231 (define_insn "*movsicc_insn"
8232 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8234 (match_operator 3 "arm_comparison_operator"
8235 [(match_operand 4 "cc_register" "") (const_int 0)])
8236 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8237 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8244 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8245 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8246 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8247 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8248 [(set_attr "length" "4,4,4,4,8,8,8,8")
8249 (set_attr "conds" "use")]
8252 (define_insn "*movsfcc_soft_insn"
8253 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8254 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8255 [(match_operand 4 "cc_register" "") (const_int 0)])
8256 (match_operand:SF 1 "s_register_operand" "0,r")
8257 (match_operand:SF 2 "s_register_operand" "r,0")))]
8258 "TARGET_ARM && TARGET_SOFT_FLOAT"
8262 [(set_attr "conds" "use")]
8266 ;; Jump and linkage insns
8268 (define_expand "jump"
8270 (label_ref (match_operand 0 "" "")))]
8275 (define_insn "*arm_jump"
8277 (label_ref (match_operand 0 "" "")))]
8281 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8283 arm_ccfsm_state += 2;
8286 return \"b%?\\t%l0\";
8289 [(set_attr "predicable" "yes")]
8292 (define_insn "*thumb_jump"
8294 (label_ref (match_operand 0 "" "")))]
8297 if (get_attr_length (insn) == 2)
8299 return \"bl\\t%l0\\t%@ far jump\";
8301 [(set (attr "far_jump")
8303 (eq_attr "length" "4")
8304 (const_string "yes")
8305 (const_string "no")))
8306 (set (attr "length")
8308 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8309 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8314 (define_expand "call"
8315 [(parallel [(call (match_operand 0 "memory_operand" "")
8316 (match_operand 1 "general_operand" ""))
8317 (use (match_operand 2 "" ""))
8318 (clobber (reg:SI LR_REGNUM))])]
8324 /* In an untyped call, we can get NULL for operand 2. */
8325 if (operands[2] == NULL_RTX)
8326 operands[2] = const0_rtx;
8328 /* Decide if we should generate indirect calls by loading the
8329 32-bit address of the callee into a register before performing the
8331 callee = XEXP (operands[0], 0);
8332 if (GET_CODE (callee) == SYMBOL_REF
8333 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8335 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8337 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8338 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8343 (define_expand "call_internal"
8344 [(parallel [(call (match_operand 0 "memory_operand" "")
8345 (match_operand 1 "general_operand" ""))
8346 (use (match_operand 2 "" ""))
8347 (clobber (reg:SI LR_REGNUM))])])
8349 (define_insn "*call_reg_armv5"
8350 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8351 (match_operand 1 "" ""))
8352 (use (match_operand 2 "" ""))
8353 (clobber (reg:SI LR_REGNUM))]
8354 "TARGET_ARM && arm_arch5"
8356 [(set_attr "type" "call")]
8359 (define_insn "*call_reg_arm"
8360 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8361 (match_operand 1 "" ""))
8362 (use (match_operand 2 "" ""))
8363 (clobber (reg:SI LR_REGNUM))]
8364 "TARGET_ARM && !arm_arch5"
8366 return output_call (operands);
8368 ;; length is worst case, normally it is only two
8369 [(set_attr "length" "12")
8370 (set_attr "type" "call")]
8374 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8375 ;; considered a function call by the branch predictor of some cores (PR40887).
8376 ;; Falls back to blx rN (*call_reg_armv5).
8378 (define_insn "*call_mem"
8379 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8380 (match_operand 1 "" ""))
8381 (use (match_operand 2 "" ""))
8382 (clobber (reg:SI LR_REGNUM))]
8383 "TARGET_ARM && !arm_arch5"
8385 return output_call_mem (operands);
8387 [(set_attr "length" "12")
8388 (set_attr "type" "call")]
8391 (define_insn "*call_reg_thumb1_v5"
8392 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8393 (match_operand 1 "" ""))
8394 (use (match_operand 2 "" ""))
8395 (clobber (reg:SI LR_REGNUM))]
8396 "TARGET_THUMB1 && arm_arch5"
8398 [(set_attr "length" "2")
8399 (set_attr "type" "call")]
8402 (define_insn "*call_reg_thumb1"
8403 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8404 (match_operand 1 "" ""))
8405 (use (match_operand 2 "" ""))
8406 (clobber (reg:SI LR_REGNUM))]
8407 "TARGET_THUMB1 && !arm_arch5"
8410 if (!TARGET_CALLER_INTERWORKING)
8411 return thumb_call_via_reg (operands[0]);
8412 else if (operands[1] == const0_rtx)
8413 return \"bl\\t%__interwork_call_via_%0\";
8414 else if (frame_pointer_needed)
8415 return \"bl\\t%__interwork_r7_call_via_%0\";
8417 return \"bl\\t%__interwork_r11_call_via_%0\";
8419 [(set_attr "type" "call")]
8422 (define_expand "call_value"
8423 [(parallel [(set (match_operand 0 "" "")
8424 (call (match_operand 1 "memory_operand" "")
8425 (match_operand 2 "general_operand" "")))
8426 (use (match_operand 3 "" ""))
8427 (clobber (reg:SI LR_REGNUM))])]
8433 /* In an untyped call, we can get NULL for operand 2. */
8434 if (operands[3] == 0)
8435 operands[3] = const0_rtx;
8437 /* Decide if we should generate indirect calls by loading the
8438 32-bit address of the callee into a register before performing the
8440 callee = XEXP (operands[1], 0);
8441 if (GET_CODE (callee) == SYMBOL_REF
8442 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8444 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8446 pat = gen_call_value_internal (operands[0], operands[1],
8447 operands[2], operands[3]);
8448 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8453 (define_expand "call_value_internal"
8454 [(parallel [(set (match_operand 0 "" "")
8455 (call (match_operand 1 "memory_operand" "")
8456 (match_operand 2 "general_operand" "")))
8457 (use (match_operand 3 "" ""))
8458 (clobber (reg:SI LR_REGNUM))])])
8460 (define_insn "*call_value_reg_armv5"
8461 [(set (match_operand 0 "" "")
8462 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8463 (match_operand 2 "" "")))
8464 (use (match_operand 3 "" ""))
8465 (clobber (reg:SI LR_REGNUM))]
8466 "TARGET_ARM && arm_arch5"
8468 [(set_attr "type" "call")]
8471 (define_insn "*call_value_reg_arm"
8472 [(set (match_operand 0 "" "")
8473 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8474 (match_operand 2 "" "")))
8475 (use (match_operand 3 "" ""))
8476 (clobber (reg:SI LR_REGNUM))]
8477 "TARGET_ARM && !arm_arch5"
8479 return output_call (&operands[1]);
8481 [(set_attr "length" "12")
8482 (set_attr "type" "call")]
8485 ;; Note: see *call_mem
8487 (define_insn "*call_value_mem"
8488 [(set (match_operand 0 "" "")
8489 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8490 (match_operand 2 "" "")))
8491 (use (match_operand 3 "" ""))
8492 (clobber (reg:SI LR_REGNUM))]
8493 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8495 return output_call_mem (&operands[1]);
8497 [(set_attr "length" "12")
8498 (set_attr "type" "call")]
8501 (define_insn "*call_value_reg_thumb1_v5"
8502 [(set (match_operand 0 "" "")
8503 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8504 (match_operand 2 "" "")))
8505 (use (match_operand 3 "" ""))
8506 (clobber (reg:SI LR_REGNUM))]
8507 "TARGET_THUMB1 && arm_arch5"
8509 [(set_attr "length" "2")
8510 (set_attr "type" "call")]
8513 (define_insn "*call_value_reg_thumb1"
8514 [(set (match_operand 0 "" "")
8515 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8516 (match_operand 2 "" "")))
8517 (use (match_operand 3 "" ""))
8518 (clobber (reg:SI LR_REGNUM))]
8519 "TARGET_THUMB1 && !arm_arch5"
8522 if (!TARGET_CALLER_INTERWORKING)
8523 return thumb_call_via_reg (operands[1]);
8524 else if (operands[2] == const0_rtx)
8525 return \"bl\\t%__interwork_call_via_%1\";
8526 else if (frame_pointer_needed)
8527 return \"bl\\t%__interwork_r7_call_via_%1\";
8529 return \"bl\\t%__interwork_r11_call_via_%1\";
8531 [(set_attr "type" "call")]
8534 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8535 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8537 (define_insn "*call_symbol"
8538 [(call (mem:SI (match_operand:SI 0 "" ""))
8539 (match_operand 1 "" ""))
8540 (use (match_operand 2 "" ""))
8541 (clobber (reg:SI LR_REGNUM))]
8543 && (GET_CODE (operands[0]) == SYMBOL_REF)
8544 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8547 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8549 [(set_attr "type" "call")]
8552 (define_insn "*call_value_symbol"
8553 [(set (match_operand 0 "" "")
8554 (call (mem:SI (match_operand:SI 1 "" ""))
8555 (match_operand:SI 2 "" "")))
8556 (use (match_operand 3 "" ""))
8557 (clobber (reg:SI LR_REGNUM))]
8559 && (GET_CODE (operands[1]) == SYMBOL_REF)
8560 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8563 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8565 [(set_attr "type" "call")]
8568 (define_insn "*call_insn"
8569 [(call (mem:SI (match_operand:SI 0 "" ""))
8570 (match_operand:SI 1 "" ""))
8571 (use (match_operand 2 "" ""))
8572 (clobber (reg:SI LR_REGNUM))]
8574 && GET_CODE (operands[0]) == SYMBOL_REF
8575 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8577 [(set_attr "length" "4")
8578 (set_attr "type" "call")]
8581 (define_insn "*call_value_insn"
8582 [(set (match_operand 0 "" "")
8583 (call (mem:SI (match_operand 1 "" ""))
8584 (match_operand 2 "" "")))
8585 (use (match_operand 3 "" ""))
8586 (clobber (reg:SI LR_REGNUM))]
8588 && GET_CODE (operands[1]) == SYMBOL_REF
8589 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8591 [(set_attr "length" "4")
8592 (set_attr "type" "call")]
8595 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8596 (define_expand "sibcall"
8597 [(parallel [(call (match_operand 0 "memory_operand" "")
8598 (match_operand 1 "general_operand" ""))
8600 (use (match_operand 2 "" ""))])]
8604 if (operands[2] == NULL_RTX)
8605 operands[2] = const0_rtx;
8609 (define_expand "sibcall_value"
8610 [(parallel [(set (match_operand 0 "" "")
8611 (call (match_operand 1 "memory_operand" "")
8612 (match_operand 2 "general_operand" "")))
8614 (use (match_operand 3 "" ""))])]
8618 if (operands[3] == NULL_RTX)
8619 operands[3] = const0_rtx;
8623 (define_insn "*sibcall_insn"
8624 [(call (mem:SI (match_operand:SI 0 "" "X"))
8625 (match_operand 1 "" ""))
8627 (use (match_operand 2 "" ""))]
8628 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8630 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8632 [(set_attr "type" "call")]
8635 (define_insn "*sibcall_value_insn"
8636 [(set (match_operand 0 "" "")
8637 (call (mem:SI (match_operand:SI 1 "" "X"))
8638 (match_operand 2 "" "")))
8640 (use (match_operand 3 "" ""))]
8641 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8643 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8645 [(set_attr "type" "call")]
8648 (define_expand "return"
8650 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8653 ;; Often the return insn will be the same as loading from memory, so set attr
8654 (define_insn "*arm_return"
8656 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8659 if (arm_ccfsm_state == 2)
8661 arm_ccfsm_state += 2;
8664 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8666 [(set_attr "type" "load1")
8667 (set_attr "length" "12")
8668 (set_attr "predicable" "yes")]
8671 (define_insn "*cond_return"
8673 (if_then_else (match_operator 0 "arm_comparison_operator"
8674 [(match_operand 1 "cc_register" "") (const_int 0)])
8677 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8680 if (arm_ccfsm_state == 2)
8682 arm_ccfsm_state += 2;
8685 return output_return_instruction (operands[0], TRUE, FALSE);
8687 [(set_attr "conds" "use")
8688 (set_attr "length" "12")
8689 (set_attr "type" "load1")]
8692 (define_insn "*cond_return_inverted"
8694 (if_then_else (match_operator 0 "arm_comparison_operator"
8695 [(match_operand 1 "cc_register" "") (const_int 0)])
8698 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8701 if (arm_ccfsm_state == 2)
8703 arm_ccfsm_state += 2;
8706 return output_return_instruction (operands[0], TRUE, TRUE);
8708 [(set_attr "conds" "use")
8709 (set_attr "length" "12")
8710 (set_attr "type" "load1")]
8713 ;; Generate a sequence of instructions to determine if the processor is
8714 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8717 (define_expand "return_addr_mask"
8719 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8721 (set (match_operand:SI 0 "s_register_operand" "")
8722 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8724 (const_int 67108860)))] ; 0x03fffffc
8727 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8730 (define_insn "*check_arch2"
8731 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8732 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8735 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8736 [(set_attr "length" "8")
8737 (set_attr "conds" "set")]
8740 ;; Call subroutine returning any type.
8742 (define_expand "untyped_call"
8743 [(parallel [(call (match_operand 0 "" "")
8745 (match_operand 1 "" "")
8746 (match_operand 2 "" "")])]
8751 rtx par = gen_rtx_PARALLEL (VOIDmode,
8752 rtvec_alloc (XVECLEN (operands[2], 0)));
8753 rtx addr = gen_reg_rtx (Pmode);
8757 emit_move_insn (addr, XEXP (operands[1], 0));
8758 mem = change_address (operands[1], BLKmode, addr);
8760 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8762 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8764 /* Default code only uses r0 as a return value, but we could
8765 be using anything up to 4 registers. */
8766 if (REGNO (src) == R0_REGNUM)
8767 src = gen_rtx_REG (TImode, R0_REGNUM);
8769 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8771 size += GET_MODE_SIZE (GET_MODE (src));
8774 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8779 for (i = 0; i < XVECLEN (par, 0); i++)
8781 HOST_WIDE_INT offset = 0;
8782 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8785 emit_move_insn (addr, plus_constant (addr, size));
8787 mem = change_address (mem, GET_MODE (reg), NULL);
8788 if (REGNO (reg) == R0_REGNUM)
8790 /* On thumb we have to use a write-back instruction. */
8791 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8792 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8793 size = TARGET_ARM ? 16 : 0;
8797 emit_move_insn (mem, reg);
8798 size = GET_MODE_SIZE (GET_MODE (reg));
8802 /* The optimizer does not know that the call sets the function value
8803 registers we stored in the result block. We avoid problems by
8804 claiming that all hard registers are used and clobbered at this
8806 emit_insn (gen_blockage ());
8812 (define_expand "untyped_return"
8813 [(match_operand:BLK 0 "memory_operand" "")
8814 (match_operand 1 "" "")]
8819 rtx addr = gen_reg_rtx (Pmode);
8823 emit_move_insn (addr, XEXP (operands[0], 0));
8824 mem = change_address (operands[0], BLKmode, addr);
8826 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8828 HOST_WIDE_INT offset = 0;
8829 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8832 emit_move_insn (addr, plus_constant (addr, size));
8834 mem = change_address (mem, GET_MODE (reg), NULL);
8835 if (REGNO (reg) == R0_REGNUM)
8837 /* On thumb we have to use a write-back instruction. */
8838 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8839 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8840 size = TARGET_ARM ? 16 : 0;
8844 emit_move_insn (reg, mem);
8845 size = GET_MODE_SIZE (GET_MODE (reg));
8849 /* Emit USE insns before the return. */
8850 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8851 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8853 /* Construct the return. */
8854 expand_naked_return ();
8860 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8861 ;; all of memory. This blocks insns from being moved across this point.
8863 (define_insn "blockage"
8864 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8867 [(set_attr "length" "0")
8868 (set_attr "type" "block")]
8871 (define_expand "casesi"
8872 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8873 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8874 (match_operand:SI 2 "const_int_operand" "") ; total range
8875 (match_operand:SI 3 "" "") ; table label
8876 (match_operand:SI 4 "" "")] ; Out of range label
8877 "TARGET_32BIT || optimize_size || flag_pic"
8880 enum insn_code code;
8881 if (operands[1] != const0_rtx)
8883 rtx reg = gen_reg_rtx (SImode);
8885 emit_insn (gen_addsi3 (reg, operands[0],
8886 GEN_INT (-INTVAL (operands[1]))));
8891 code = CODE_FOR_arm_casesi_internal;
8892 else if (TARGET_THUMB1)
8893 code = CODE_FOR_thumb1_casesi_internal_pic;
8895 code = CODE_FOR_thumb2_casesi_internal_pic;
8897 code = CODE_FOR_thumb2_casesi_internal;
8899 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8900 operands[2] = force_reg (SImode, operands[2]);
8902 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8903 operands[3], operands[4]));
8908 ;; The USE in this pattern is needed to tell flow analysis that this is
8909 ;; a CASESI insn. It has no other purpose.
8910 (define_insn "arm_casesi_internal"
8911 [(parallel [(set (pc)
8913 (leu (match_operand:SI 0 "s_register_operand" "r")
8914 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8915 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8916 (label_ref (match_operand 2 "" ""))))
8917 (label_ref (match_operand 3 "" ""))))
8918 (clobber (reg:CC CC_REGNUM))
8919 (use (label_ref (match_dup 2)))])]
8923 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8924 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8926 [(set_attr "conds" "clob")
8927 (set_attr "length" "12")]
8930 (define_expand "thumb1_casesi_internal_pic"
8931 [(match_operand:SI 0 "s_register_operand" "")
8932 (match_operand:SI 1 "thumb1_cmp_operand" "")
8933 (match_operand 2 "" "")
8934 (match_operand 3 "" "")]
8938 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8939 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8941 reg0 = gen_rtx_REG (SImode, 0);
8942 emit_move_insn (reg0, operands[0]);
8943 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8948 (define_insn "thumb1_casesi_dispatch"
8949 [(parallel [(set (pc) (unspec [(reg:SI 0)
8950 (label_ref (match_operand 0 "" ""))
8951 ;; (label_ref (match_operand 1 "" ""))
8953 UNSPEC_THUMB1_CASESI))
8954 (clobber (reg:SI IP_REGNUM))
8955 (clobber (reg:SI LR_REGNUM))])]
8957 "* return thumb1_output_casesi(operands);"
8958 [(set_attr "length" "4")]
8961 (define_expand "indirect_jump"
8963 (match_operand:SI 0 "s_register_operand" ""))]
8966 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8967 address and use bx. */
8971 tmp = gen_reg_rtx (SImode);
8972 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8978 ;; NB Never uses BX.
8979 (define_insn "*arm_indirect_jump"
8981 (match_operand:SI 0 "s_register_operand" "r"))]
8983 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8984 [(set_attr "predicable" "yes")]
8987 (define_insn "*load_indirect_jump"
8989 (match_operand:SI 0 "memory_operand" "m"))]
8991 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8992 [(set_attr "type" "load1")
8993 (set_attr "pool_range" "4096")
8994 (set_attr "neg_pool_range" "4084")
8995 (set_attr "predicable" "yes")]
8998 ;; NB Never uses BX.
8999 (define_insn "*thumb1_indirect_jump"
9001 (match_operand:SI 0 "register_operand" "l*r"))]
9004 [(set_attr "conds" "clob")
9005 (set_attr "length" "2")]
9015 if (TARGET_UNIFIED_ASM)
9018 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9019 return \"mov\\tr8, r8\";
9021 [(set (attr "length")
9022 (if_then_else (eq_attr "is_thumb" "yes")
9028 ;; Patterns to allow combination of arithmetic, cond code and shifts
9030 (define_insn "*arith_shiftsi"
9031 [(set (match_operand:SI 0 "s_register_operand" "=r")
9032 (match_operator:SI 1 "shiftable_operator"
9033 [(match_operator:SI 3 "shift_operator"
9034 [(match_operand:SI 4 "s_register_operand" "r")
9035 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9036 (match_operand:SI 2 "s_register_operand" "r")]))]
9038 "%i1%?\\t%0, %2, %4%S3"
9039 [(set_attr "predicable" "yes")
9040 (set_attr "shift" "4")
9041 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9042 (const_string "alu_shift")
9043 (const_string "alu_shift_reg")))]
9047 [(set (match_operand:SI 0 "s_register_operand" "")
9048 (match_operator:SI 1 "shiftable_operator"
9049 [(match_operator:SI 2 "shiftable_operator"
9050 [(match_operator:SI 3 "shift_operator"
9051 [(match_operand:SI 4 "s_register_operand" "")
9052 (match_operand:SI 5 "reg_or_int_operand" "")])
9053 (match_operand:SI 6 "s_register_operand" "")])
9054 (match_operand:SI 7 "arm_rhs_operand" "")]))
9055 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9058 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9061 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9064 (define_insn "*arith_shiftsi_compare0"
9065 [(set (reg:CC_NOOV CC_REGNUM)
9066 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9067 [(match_operator:SI 3 "shift_operator"
9068 [(match_operand:SI 4 "s_register_operand" "r")
9069 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9070 (match_operand:SI 2 "s_register_operand" "r")])
9072 (set (match_operand:SI 0 "s_register_operand" "=r")
9073 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9076 "%i1%.\\t%0, %2, %4%S3"
9077 [(set_attr "conds" "set")
9078 (set_attr "shift" "4")
9079 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9080 (const_string "alu_shift")
9081 (const_string "alu_shift_reg")))]
9084 (define_insn "*arith_shiftsi_compare0_scratch"
9085 [(set (reg:CC_NOOV CC_REGNUM)
9086 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9087 [(match_operator:SI 3 "shift_operator"
9088 [(match_operand:SI 4 "s_register_operand" "r")
9089 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9090 (match_operand:SI 2 "s_register_operand" "r")])
9092 (clobber (match_scratch:SI 0 "=r"))]
9094 "%i1%.\\t%0, %2, %4%S3"
9095 [(set_attr "conds" "set")
9096 (set_attr "shift" "4")
9097 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9098 (const_string "alu_shift")
9099 (const_string "alu_shift_reg")))]
9102 (define_insn "*sub_shiftsi"
9103 [(set (match_operand:SI 0 "s_register_operand" "=r")
9104 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9105 (match_operator:SI 2 "shift_operator"
9106 [(match_operand:SI 3 "s_register_operand" "r")
9107 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9109 "sub%?\\t%0, %1, %3%S2"
9110 [(set_attr "predicable" "yes")
9111 (set_attr "shift" "3")
9112 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9113 (const_string "alu_shift")
9114 (const_string "alu_shift_reg")))]
9117 (define_insn "*sub_shiftsi_compare0"
9118 [(set (reg:CC_NOOV CC_REGNUM)
9120 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9121 (match_operator:SI 2 "shift_operator"
9122 [(match_operand:SI 3 "s_register_operand" "r")
9123 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9125 (set (match_operand:SI 0 "s_register_operand" "=r")
9126 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9129 "sub%.\\t%0, %1, %3%S2"
9130 [(set_attr "conds" "set")
9131 (set_attr "shift" "3")
9132 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9133 (const_string "alu_shift")
9134 (const_string "alu_shift_reg")))]
9137 (define_insn "*sub_shiftsi_compare0_scratch"
9138 [(set (reg:CC_NOOV CC_REGNUM)
9140 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9141 (match_operator:SI 2 "shift_operator"
9142 [(match_operand:SI 3 "s_register_operand" "r")
9143 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9145 (clobber (match_scratch:SI 0 "=r"))]
9147 "sub%.\\t%0, %1, %3%S2"
9148 [(set_attr "conds" "set")
9149 (set_attr "shift" "3")
9150 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9151 (const_string "alu_shift")
9152 (const_string "alu_shift_reg")))]
9157 (define_insn "*and_scc"
9158 [(set (match_operand:SI 0 "s_register_operand" "=r")
9159 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9160 [(match_operand 3 "cc_register" "") (const_int 0)])
9161 (match_operand:SI 2 "s_register_operand" "r")))]
9163 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9164 [(set_attr "conds" "use")
9165 (set_attr "length" "8")]
9168 (define_insn "*ior_scc"
9169 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9170 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9171 [(match_operand 3 "cc_register" "") (const_int 0)])
9172 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9176 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9177 [(set_attr "conds" "use")
9178 (set_attr "length" "4,8")]
9181 ; A series of splitters for the compare_scc pattern below. Note that
9182 ; order is important.
9184 [(set (match_operand:SI 0 "s_register_operand" "")
9185 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9187 (clobber (reg:CC CC_REGNUM))]
9188 "TARGET_32BIT && reload_completed"
9189 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9192 [(set (match_operand:SI 0 "s_register_operand" "")
9193 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9195 (clobber (reg:CC CC_REGNUM))]
9196 "TARGET_32BIT && reload_completed"
9197 [(set (match_dup 0) (not:SI (match_dup 1)))
9198 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9201 [(set (match_operand:SI 0 "s_register_operand" "")
9202 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9204 (clobber (reg:CC CC_REGNUM))]
9205 "TARGET_32BIT && reload_completed"
9207 [(set (reg:CC_NOTB CC_REGNUM)
9208 (compare:CC_NOTB (const_int 1) (match_dup 1)))
9210 (minus:SI (const_int 1) (match_dup 1)))])
9211 (cond_exec (ltu:CC_NOTB (reg:CC_NOTB CC_REGNUM) (const_int 0))
9212 (set (match_dup 0) (const_int 0)))])
9215 [(set (match_operand:SI 0 "s_register_operand" "")
9216 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9217 (match_operand:SI 2 "const_int_operand" "")))
9218 (clobber (reg:CC CC_REGNUM))]
9219 "TARGET_32BIT && reload_completed"
9221 [(set (reg:CC CC_REGNUM)
9222 (compare:CC (match_dup 1) (match_dup 2)))
9223 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9224 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9225 (set (match_dup 0) (const_int 1)))]
9227 operands[3] = GEN_INT (-INTVAL (operands[2]));
9231 [(set (match_operand:SI 0 "s_register_operand" "")
9232 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9233 (match_operand:SI 2 "arm_add_operand" "")))
9234 (clobber (reg:CC CC_REGNUM))]
9235 "TARGET_32BIT && reload_completed"
9237 [(set (reg:CC_NOOV CC_REGNUM)
9238 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9240 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9241 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9242 (set (match_dup 0) (const_int 1)))])
9244 (define_insn_and_split "*compare_scc"
9245 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9246 (match_operator:SI 1 "arm_comparison_operator"
9247 [(match_operand:SI 2 "s_register_operand" "r,r")
9248 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9249 (clobber (reg:CC CC_REGNUM))]
9252 "&& reload_completed"
9253 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9254 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9255 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9258 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9259 operands[2], operands[3]);
9260 enum rtx_code rc = GET_CODE (operands[1]);
9262 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9264 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9265 if (mode == CCFPmode || mode == CCFPEmode)
9266 rc = reverse_condition_maybe_unordered (rc);
9268 rc = reverse_condition (rc);
9269 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9272 (define_insn "*cond_move"
9273 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9274 (if_then_else:SI (match_operator 3 "equality_operator"
9275 [(match_operator 4 "arm_comparison_operator"
9276 [(match_operand 5 "cc_register" "") (const_int 0)])
9278 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9279 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9282 if (GET_CODE (operands[3]) == NE)
9284 if (which_alternative != 1)
9285 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9286 if (which_alternative != 0)
9287 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9290 if (which_alternative != 0)
9291 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9292 if (which_alternative != 1)
9293 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9296 [(set_attr "conds" "use")
9297 (set_attr "length" "4,4,8")]
9300 (define_insn "*cond_arith"
9301 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9302 (match_operator:SI 5 "shiftable_operator"
9303 [(match_operator:SI 4 "arm_comparison_operator"
9304 [(match_operand:SI 2 "s_register_operand" "r,r")
9305 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9306 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9307 (clobber (reg:CC CC_REGNUM))]
9310 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9311 return \"%i5\\t%0, %1, %2, lsr #31\";
9313 output_asm_insn (\"cmp\\t%2, %3\", operands);
9314 if (GET_CODE (operands[5]) == AND)
9315 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9316 else if (GET_CODE (operands[5]) == MINUS)
9317 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9318 else if (which_alternative != 0)
9319 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9320 return \"%i5%d4\\t%0, %1, #1\";
9322 [(set_attr "conds" "clob")
9323 (set_attr "length" "12")]
9326 (define_insn "*cond_sub"
9327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9328 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9329 (match_operator:SI 4 "arm_comparison_operator"
9330 [(match_operand:SI 2 "s_register_operand" "r,r")
9331 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9332 (clobber (reg:CC CC_REGNUM))]
9335 output_asm_insn (\"cmp\\t%2, %3\", operands);
9336 if (which_alternative != 0)
9337 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9338 return \"sub%d4\\t%0, %1, #1\";
9340 [(set_attr "conds" "clob")
9341 (set_attr "length" "8,12")]
9344 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9345 (define_insn "*cmp_ite0"
9346 [(set (match_operand 6 "dominant_cc_register" "")
9349 (match_operator 4 "arm_comparison_operator"
9350 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9351 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9352 (match_operator:SI 5 "arm_comparison_operator"
9353 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9354 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9360 static const char * const opcodes[4][2] =
9362 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9363 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9364 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9365 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9366 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9367 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9368 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9369 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9372 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9374 return opcodes[which_alternative][swap];
9376 [(set_attr "conds" "set")
9377 (set_attr "length" "8")]
9380 (define_insn "*cmp_ite1"
9381 [(set (match_operand 6 "dominant_cc_register" "")
9384 (match_operator 4 "arm_comparison_operator"
9385 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9386 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9387 (match_operator:SI 5 "arm_comparison_operator"
9388 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9389 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9395 static const char * const opcodes[4][2] =
9397 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9398 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9399 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9400 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9401 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9402 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9403 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9404 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9407 comparison_dominates_p (GET_CODE (operands[5]),
9408 reverse_condition (GET_CODE (operands[4])));
9410 return opcodes[which_alternative][swap];
9412 [(set_attr "conds" "set")
9413 (set_attr "length" "8")]
9416 (define_insn "*cmp_and"
9417 [(set (match_operand 6 "dominant_cc_register" "")
9420 (match_operator 4 "arm_comparison_operator"
9421 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9422 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9423 (match_operator:SI 5 "arm_comparison_operator"
9424 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9425 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9430 static const char *const opcodes[4][2] =
9432 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9433 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9434 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9435 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9436 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9437 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9438 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9439 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9442 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9444 return opcodes[which_alternative][swap];
9446 [(set_attr "conds" "set")
9447 (set_attr "predicable" "no")
9448 (set_attr "length" "8")]
9451 (define_insn "*cmp_ior"
9452 [(set (match_operand 6 "dominant_cc_register" "")
9455 (match_operator 4 "arm_comparison_operator"
9456 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9457 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9458 (match_operator:SI 5 "arm_comparison_operator"
9459 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9460 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9465 static const char *const opcodes[4][2] =
9467 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9468 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9469 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9470 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9471 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9472 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9473 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9474 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9477 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9479 return opcodes[which_alternative][swap];
9482 [(set_attr "conds" "set")
9483 (set_attr "length" "8")]
9486 (define_insn_and_split "*ior_scc_scc"
9487 [(set (match_operand:SI 0 "s_register_operand" "=r")
9488 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9489 [(match_operand:SI 1 "s_register_operand" "r")
9490 (match_operand:SI 2 "arm_add_operand" "rIL")])
9491 (match_operator:SI 6 "arm_comparison_operator"
9492 [(match_operand:SI 4 "s_register_operand" "r")
9493 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9494 (clobber (reg:CC CC_REGNUM))]
9496 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9499 "TARGET_ARM && reload_completed"
9503 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9504 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9506 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9508 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9511 [(set_attr "conds" "clob")
9512 (set_attr "length" "16")])
9514 ; If the above pattern is followed by a CMP insn, then the compare is
9515 ; redundant, since we can rework the conditional instruction that follows.
9516 (define_insn_and_split "*ior_scc_scc_cmp"
9517 [(set (match_operand 0 "dominant_cc_register" "")
9518 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9519 [(match_operand:SI 1 "s_register_operand" "r")
9520 (match_operand:SI 2 "arm_add_operand" "rIL")])
9521 (match_operator:SI 6 "arm_comparison_operator"
9522 [(match_operand:SI 4 "s_register_operand" "r")
9523 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9525 (set (match_operand:SI 7 "s_register_operand" "=r")
9526 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9527 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9530 "TARGET_ARM && reload_completed"
9534 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9535 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9537 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9539 [(set_attr "conds" "set")
9540 (set_attr "length" "16")])
9542 (define_insn_and_split "*and_scc_scc"
9543 [(set (match_operand:SI 0 "s_register_operand" "=r")
9544 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9545 [(match_operand:SI 1 "s_register_operand" "r")
9546 (match_operand:SI 2 "arm_add_operand" "rIL")])
9547 (match_operator:SI 6 "arm_comparison_operator"
9548 [(match_operand:SI 4 "s_register_operand" "r")
9549 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9550 (clobber (reg:CC CC_REGNUM))]
9552 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9555 "TARGET_ARM && reload_completed
9556 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9561 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9562 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9564 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9566 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9569 [(set_attr "conds" "clob")
9570 (set_attr "length" "16")])
9572 ; If the above pattern is followed by a CMP insn, then the compare is
9573 ; redundant, since we can rework the conditional instruction that follows.
9574 (define_insn_and_split "*and_scc_scc_cmp"
9575 [(set (match_operand 0 "dominant_cc_register" "")
9576 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9577 [(match_operand:SI 1 "s_register_operand" "r")
9578 (match_operand:SI 2 "arm_add_operand" "rIL")])
9579 (match_operator:SI 6 "arm_comparison_operator"
9580 [(match_operand:SI 4 "s_register_operand" "r")
9581 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9583 (set (match_operand:SI 7 "s_register_operand" "=r")
9584 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9585 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9588 "TARGET_ARM && reload_completed"
9592 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9593 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9595 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9597 [(set_attr "conds" "set")
9598 (set_attr "length" "16")])
9600 ;; If there is no dominance in the comparison, then we can still save an
9601 ;; instruction in the AND case, since we can know that the second compare
9602 ;; need only zero the value if false (if true, then the value is already
9604 (define_insn_and_split "*and_scc_scc_nodom"
9605 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9606 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9607 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9608 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9609 (match_operator:SI 6 "arm_comparison_operator"
9610 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9611 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9612 (clobber (reg:CC CC_REGNUM))]
9614 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9617 "TARGET_ARM && reload_completed"
9618 [(parallel [(set (match_dup 0)
9619 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9620 (clobber (reg:CC CC_REGNUM))])
9621 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9623 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9626 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9627 operands[4], operands[5]),
9629 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9631 [(set_attr "conds" "clob")
9632 (set_attr "length" "20")])
9635 [(set (reg:CC_NOOV CC_REGNUM)
9636 (compare:CC_NOOV (ior:SI
9637 (and:SI (match_operand:SI 0 "s_register_operand" "")
9639 (match_operator:SI 1 "arm_comparison_operator"
9640 [(match_operand:SI 2 "s_register_operand" "")
9641 (match_operand:SI 3 "arm_add_operand" "")]))
9643 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9646 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9648 (set (reg:CC_NOOV CC_REGNUM)
9649 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9654 [(set (reg:CC_NOOV CC_REGNUM)
9655 (compare:CC_NOOV (ior:SI
9656 (match_operator:SI 1 "arm_comparison_operator"
9657 [(match_operand:SI 2 "s_register_operand" "")
9658 (match_operand:SI 3 "arm_add_operand" "")])
9659 (and:SI (match_operand:SI 0 "s_register_operand" "")
9662 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9665 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9667 (set (reg:CC_NOOV CC_REGNUM)
9668 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9671 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9673 (define_insn "*negscc"
9674 [(set (match_operand:SI 0 "s_register_operand" "=r")
9675 (neg:SI (match_operator 3 "arm_comparison_operator"
9676 [(match_operand:SI 1 "s_register_operand" "r")
9677 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9678 (clobber (reg:CC CC_REGNUM))]
9681 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9682 return \"mov\\t%0, %1, asr #31\";
9684 if (GET_CODE (operands[3]) == NE)
9685 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9687 output_asm_insn (\"cmp\\t%1, %2\", operands);
9688 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9689 return \"mvn%d3\\t%0, #0\";
9691 [(set_attr "conds" "clob")
9692 (set_attr "length" "12")]
9695 (define_insn "movcond"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9698 (match_operator 5 "arm_comparison_operator"
9699 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9700 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9701 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9702 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9703 (clobber (reg:CC CC_REGNUM))]
9706 if (GET_CODE (operands[5]) == LT
9707 && (operands[4] == const0_rtx))
9709 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9711 if (operands[2] == const0_rtx)
9712 return \"and\\t%0, %1, %3, asr #31\";
9713 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9715 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9717 if (operands[1] == const0_rtx)
9718 return \"bic\\t%0, %2, %3, asr #31\";
9719 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9721 /* The only case that falls through to here is when both ops 1 & 2
9725 if (GET_CODE (operands[5]) == GE
9726 && (operands[4] == const0_rtx))
9728 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9730 if (operands[2] == const0_rtx)
9731 return \"bic\\t%0, %1, %3, asr #31\";
9732 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9734 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9736 if (operands[1] == const0_rtx)
9737 return \"and\\t%0, %2, %3, asr #31\";
9738 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9740 /* The only case that falls through to here is when both ops 1 & 2
9743 if (GET_CODE (operands[4]) == CONST_INT
9744 && !const_ok_for_arm (INTVAL (operands[4])))
9745 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9747 output_asm_insn (\"cmp\\t%3, %4\", operands);
9748 if (which_alternative != 0)
9749 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9750 if (which_alternative != 1)
9751 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9754 [(set_attr "conds" "clob")
9755 (set_attr "length" "8,8,12")]
9758 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9760 (define_insn "*ifcompare_plus_move"
9761 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9762 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9763 [(match_operand:SI 4 "s_register_operand" "r,r")
9764 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9766 (match_operand:SI 2 "s_register_operand" "r,r")
9767 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9768 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9769 (clobber (reg:CC CC_REGNUM))]
9772 [(set_attr "conds" "clob")
9773 (set_attr "length" "8,12")]
9776 (define_insn "*if_plus_move"
9777 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9779 (match_operator 4 "arm_comparison_operator"
9780 [(match_operand 5 "cc_register" "") (const_int 0)])
9782 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9783 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9784 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9788 sub%d4\\t%0, %2, #%n3
9789 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9790 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9791 [(set_attr "conds" "use")
9792 (set_attr "length" "4,4,8,8")
9793 (set_attr "type" "*,*,*,*")]
9796 (define_insn "*ifcompare_move_plus"
9797 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9798 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9799 [(match_operand:SI 4 "s_register_operand" "r,r")
9800 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9801 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9803 (match_operand:SI 2 "s_register_operand" "r,r")
9804 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9805 (clobber (reg:CC CC_REGNUM))]
9808 [(set_attr "conds" "clob")
9809 (set_attr "length" "8,12")]
9812 (define_insn "*if_move_plus"
9813 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9815 (match_operator 4 "arm_comparison_operator"
9816 [(match_operand 5 "cc_register" "") (const_int 0)])
9817 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9819 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9820 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9824 sub%D4\\t%0, %2, #%n3
9825 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9826 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9827 [(set_attr "conds" "use")
9828 (set_attr "length" "4,4,8,8")
9829 (set_attr "type" "*,*,*,*")]
9832 (define_insn "*ifcompare_arith_arith"
9833 [(set (match_operand:SI 0 "s_register_operand" "=r")
9834 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9835 [(match_operand:SI 5 "s_register_operand" "r")
9836 (match_operand:SI 6 "arm_add_operand" "rIL")])
9837 (match_operator:SI 8 "shiftable_operator"
9838 [(match_operand:SI 1 "s_register_operand" "r")
9839 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9840 (match_operator:SI 7 "shiftable_operator"
9841 [(match_operand:SI 3 "s_register_operand" "r")
9842 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9843 (clobber (reg:CC CC_REGNUM))]
9846 [(set_attr "conds" "clob")
9847 (set_attr "length" "12")]
9850 (define_insn "*if_arith_arith"
9851 [(set (match_operand:SI 0 "s_register_operand" "=r")
9852 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9853 [(match_operand 8 "cc_register" "") (const_int 0)])
9854 (match_operator:SI 6 "shiftable_operator"
9855 [(match_operand:SI 1 "s_register_operand" "r")
9856 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9857 (match_operator:SI 7 "shiftable_operator"
9858 [(match_operand:SI 3 "s_register_operand" "r")
9859 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9861 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9862 [(set_attr "conds" "use")
9863 (set_attr "length" "8")]
9866 (define_insn "*ifcompare_arith_move"
9867 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9868 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9869 [(match_operand:SI 2 "s_register_operand" "r,r")
9870 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9871 (match_operator:SI 7 "shiftable_operator"
9872 [(match_operand:SI 4 "s_register_operand" "r,r")
9873 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9874 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9875 (clobber (reg:CC CC_REGNUM))]
9878 /* If we have an operation where (op x 0) is the identity operation and
9879 the conditional operator is LT or GE and we are comparing against zero and
9880 everything is in registers then we can do this in two instructions. */
9881 if (operands[3] == const0_rtx
9882 && GET_CODE (operands[7]) != AND
9883 && GET_CODE (operands[5]) == REG
9884 && GET_CODE (operands[1]) == REG
9885 && REGNO (operands[1]) == REGNO (operands[4])
9886 && REGNO (operands[4]) != REGNO (operands[0]))
9888 if (GET_CODE (operands[6]) == LT)
9889 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9890 else if (GET_CODE (operands[6]) == GE)
9891 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9893 if (GET_CODE (operands[3]) == CONST_INT
9894 && !const_ok_for_arm (INTVAL (operands[3])))
9895 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9897 output_asm_insn (\"cmp\\t%2, %3\", operands);
9898 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9899 if (which_alternative != 0)
9900 return \"mov%D6\\t%0, %1\";
9903 [(set_attr "conds" "clob")
9904 (set_attr "length" "8,12")]
9907 (define_insn "*if_arith_move"
9908 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9909 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9910 [(match_operand 6 "cc_register" "") (const_int 0)])
9911 (match_operator:SI 5 "shiftable_operator"
9912 [(match_operand:SI 2 "s_register_operand" "r,r")
9913 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9914 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9918 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9919 [(set_attr "conds" "use")
9920 (set_attr "length" "4,8")
9921 (set_attr "type" "*,*")]
9924 (define_insn "*ifcompare_move_arith"
9925 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9926 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9927 [(match_operand:SI 4 "s_register_operand" "r,r")
9928 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9929 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9930 (match_operator:SI 7 "shiftable_operator"
9931 [(match_operand:SI 2 "s_register_operand" "r,r")
9932 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9933 (clobber (reg:CC CC_REGNUM))]
9936 /* If we have an operation where (op x 0) is the identity operation and
9937 the conditional operator is LT or GE and we are comparing against zero and
9938 everything is in registers then we can do this in two instructions */
9939 if (operands[5] == const0_rtx
9940 && GET_CODE (operands[7]) != AND
9941 && GET_CODE (operands[3]) == REG
9942 && GET_CODE (operands[1]) == REG
9943 && REGNO (operands[1]) == REGNO (operands[2])
9944 && REGNO (operands[2]) != REGNO (operands[0]))
9946 if (GET_CODE (operands[6]) == GE)
9947 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9948 else if (GET_CODE (operands[6]) == LT)
9949 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9952 if (GET_CODE (operands[5]) == CONST_INT
9953 && !const_ok_for_arm (INTVAL (operands[5])))
9954 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9956 output_asm_insn (\"cmp\\t%4, %5\", operands);
9958 if (which_alternative != 0)
9959 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9960 return \"%I7%D6\\t%0, %2, %3\";
9962 [(set_attr "conds" "clob")
9963 (set_attr "length" "8,12")]
9966 (define_insn "*if_move_arith"
9967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9969 (match_operator 4 "arm_comparison_operator"
9970 [(match_operand 6 "cc_register" "") (const_int 0)])
9971 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9972 (match_operator:SI 5 "shiftable_operator"
9973 [(match_operand:SI 2 "s_register_operand" "r,r")
9974 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9978 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9979 [(set_attr "conds" "use")
9980 (set_attr "length" "4,8")
9981 (set_attr "type" "*,*")]
9984 (define_insn "*ifcompare_move_not"
9985 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9987 (match_operator 5 "arm_comparison_operator"
9988 [(match_operand:SI 3 "s_register_operand" "r,r")
9989 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9990 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9992 (match_operand:SI 2 "s_register_operand" "r,r"))))
9993 (clobber (reg:CC CC_REGNUM))]
9996 [(set_attr "conds" "clob")
9997 (set_attr "length" "8,12")]
10000 (define_insn "*if_move_not"
10001 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10003 (match_operator 4 "arm_comparison_operator"
10004 [(match_operand 3 "cc_register" "") (const_int 0)])
10005 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10006 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10010 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10011 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10012 [(set_attr "conds" "use")
10013 (set_attr "length" "4,8,8")]
10016 (define_insn "*ifcompare_not_move"
10017 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10019 (match_operator 5 "arm_comparison_operator"
10020 [(match_operand:SI 3 "s_register_operand" "r,r")
10021 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10023 (match_operand:SI 2 "s_register_operand" "r,r"))
10024 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10025 (clobber (reg:CC CC_REGNUM))]
10028 [(set_attr "conds" "clob")
10029 (set_attr "length" "8,12")]
10032 (define_insn "*if_not_move"
10033 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10035 (match_operator 4 "arm_comparison_operator"
10036 [(match_operand 3 "cc_register" "") (const_int 0)])
10037 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10038 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10042 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10043 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10044 [(set_attr "conds" "use")
10045 (set_attr "length" "4,8,8")]
10048 (define_insn "*ifcompare_shift_move"
10049 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10051 (match_operator 6 "arm_comparison_operator"
10052 [(match_operand:SI 4 "s_register_operand" "r,r")
10053 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10054 (match_operator:SI 7 "shift_operator"
10055 [(match_operand:SI 2 "s_register_operand" "r,r")
10056 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10057 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10058 (clobber (reg:CC CC_REGNUM))]
10061 [(set_attr "conds" "clob")
10062 (set_attr "length" "8,12")]
10065 (define_insn "*if_shift_move"
10066 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10068 (match_operator 5 "arm_comparison_operator"
10069 [(match_operand 6 "cc_register" "") (const_int 0)])
10070 (match_operator:SI 4 "shift_operator"
10071 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10072 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10073 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10077 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10078 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10079 [(set_attr "conds" "use")
10080 (set_attr "shift" "2")
10081 (set_attr "length" "4,8,8")
10082 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10083 (const_string "alu_shift")
10084 (const_string "alu_shift_reg")))]
10087 (define_insn "*ifcompare_move_shift"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (match_operator 6 "arm_comparison_operator"
10091 [(match_operand:SI 4 "s_register_operand" "r,r")
10092 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10093 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10094 (match_operator:SI 7 "shift_operator"
10095 [(match_operand:SI 2 "s_register_operand" "r,r")
10096 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10097 (clobber (reg:CC CC_REGNUM))]
10100 [(set_attr "conds" "clob")
10101 (set_attr "length" "8,12")]
10104 (define_insn "*if_move_shift"
10105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10107 (match_operator 5 "arm_comparison_operator"
10108 [(match_operand 6 "cc_register" "") (const_int 0)])
10109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10110 (match_operator:SI 4 "shift_operator"
10111 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10112 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10116 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10117 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10118 [(set_attr "conds" "use")
10119 (set_attr "shift" "2")
10120 (set_attr "length" "4,8,8")
10121 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10122 (const_string "alu_shift")
10123 (const_string "alu_shift_reg")))]
10126 (define_insn "*ifcompare_shift_shift"
10127 [(set (match_operand:SI 0 "s_register_operand" "=r")
10129 (match_operator 7 "arm_comparison_operator"
10130 [(match_operand:SI 5 "s_register_operand" "r")
10131 (match_operand:SI 6 "arm_add_operand" "rIL")])
10132 (match_operator:SI 8 "shift_operator"
10133 [(match_operand:SI 1 "s_register_operand" "r")
10134 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10135 (match_operator:SI 9 "shift_operator"
10136 [(match_operand:SI 3 "s_register_operand" "r")
10137 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10138 (clobber (reg:CC CC_REGNUM))]
10141 [(set_attr "conds" "clob")
10142 (set_attr "length" "12")]
10145 (define_insn "*if_shift_shift"
10146 [(set (match_operand:SI 0 "s_register_operand" "=r")
10148 (match_operator 5 "arm_comparison_operator"
10149 [(match_operand 8 "cc_register" "") (const_int 0)])
10150 (match_operator:SI 6 "shift_operator"
10151 [(match_operand:SI 1 "s_register_operand" "r")
10152 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10153 (match_operator:SI 7 "shift_operator"
10154 [(match_operand:SI 3 "s_register_operand" "r")
10155 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10157 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10158 [(set_attr "conds" "use")
10159 (set_attr "shift" "1")
10160 (set_attr "length" "8")
10161 (set (attr "type") (if_then_else
10162 (and (match_operand 2 "const_int_operand" "")
10163 (match_operand 4 "const_int_operand" ""))
10164 (const_string "alu_shift")
10165 (const_string "alu_shift_reg")))]
10168 (define_insn "*ifcompare_not_arith"
10169 [(set (match_operand:SI 0 "s_register_operand" "=r")
10171 (match_operator 6 "arm_comparison_operator"
10172 [(match_operand:SI 4 "s_register_operand" "r")
10173 (match_operand:SI 5 "arm_add_operand" "rIL")])
10174 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10175 (match_operator:SI 7 "shiftable_operator"
10176 [(match_operand:SI 2 "s_register_operand" "r")
10177 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10178 (clobber (reg:CC CC_REGNUM))]
10181 [(set_attr "conds" "clob")
10182 (set_attr "length" "12")]
10185 (define_insn "*if_not_arith"
10186 [(set (match_operand:SI 0 "s_register_operand" "=r")
10188 (match_operator 5 "arm_comparison_operator"
10189 [(match_operand 4 "cc_register" "") (const_int 0)])
10190 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10191 (match_operator:SI 6 "shiftable_operator"
10192 [(match_operand:SI 2 "s_register_operand" "r")
10193 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10195 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10196 [(set_attr "conds" "use")
10197 (set_attr "length" "8")]
10200 (define_insn "*ifcompare_arith_not"
10201 [(set (match_operand:SI 0 "s_register_operand" "=r")
10203 (match_operator 6 "arm_comparison_operator"
10204 [(match_operand:SI 4 "s_register_operand" "r")
10205 (match_operand:SI 5 "arm_add_operand" "rIL")])
10206 (match_operator:SI 7 "shiftable_operator"
10207 [(match_operand:SI 2 "s_register_operand" "r")
10208 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10209 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10210 (clobber (reg:CC CC_REGNUM))]
10213 [(set_attr "conds" "clob")
10214 (set_attr "length" "12")]
10217 (define_insn "*if_arith_not"
10218 [(set (match_operand:SI 0 "s_register_operand" "=r")
10220 (match_operator 5 "arm_comparison_operator"
10221 [(match_operand 4 "cc_register" "") (const_int 0)])
10222 (match_operator:SI 6 "shiftable_operator"
10223 [(match_operand:SI 2 "s_register_operand" "r")
10224 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10225 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10227 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10228 [(set_attr "conds" "use")
10229 (set_attr "length" "8")]
10232 (define_insn "*ifcompare_neg_move"
10233 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10235 (match_operator 5 "arm_comparison_operator"
10236 [(match_operand:SI 3 "s_register_operand" "r,r")
10237 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10238 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10239 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10240 (clobber (reg:CC CC_REGNUM))]
10243 [(set_attr "conds" "clob")
10244 (set_attr "length" "8,12")]
10247 (define_insn "*if_neg_move"
10248 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10250 (match_operator 4 "arm_comparison_operator"
10251 [(match_operand 3 "cc_register" "") (const_int 0)])
10252 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10253 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10256 rsb%d4\\t%0, %2, #0
10257 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10258 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10259 [(set_attr "conds" "use")
10260 (set_attr "length" "4,8,8")]
10263 (define_insn "*ifcompare_move_neg"
10264 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10266 (match_operator 5 "arm_comparison_operator"
10267 [(match_operand:SI 3 "s_register_operand" "r,r")
10268 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10269 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10270 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10271 (clobber (reg:CC CC_REGNUM))]
10274 [(set_attr "conds" "clob")
10275 (set_attr "length" "8,12")]
10278 (define_insn "*if_move_neg"
10279 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10281 (match_operator 4 "arm_comparison_operator"
10282 [(match_operand 3 "cc_register" "") (const_int 0)])
10283 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10284 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10287 rsb%D4\\t%0, %2, #0
10288 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10289 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10290 [(set_attr "conds" "use")
10291 (set_attr "length" "4,8,8")]
10294 (define_insn "*arith_adjacentmem"
10295 [(set (match_operand:SI 0 "s_register_operand" "=r")
10296 (match_operator:SI 1 "shiftable_operator"
10297 [(match_operand:SI 2 "memory_operand" "m")
10298 (match_operand:SI 3 "memory_operand" "m")]))
10299 (clobber (match_scratch:SI 4 "=r"))]
10300 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10306 HOST_WIDE_INT val1 = 0, val2 = 0;
10308 if (REGNO (operands[0]) > REGNO (operands[4]))
10310 ldm[1] = operands[4];
10311 ldm[2] = operands[0];
10315 ldm[1] = operands[0];
10316 ldm[2] = operands[4];
10319 base_reg = XEXP (operands[2], 0);
10321 if (!REG_P (base_reg))
10323 val1 = INTVAL (XEXP (base_reg, 1));
10324 base_reg = XEXP (base_reg, 0);
10327 if (!REG_P (XEXP (operands[3], 0)))
10328 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10330 arith[0] = operands[0];
10331 arith[3] = operands[1];
10345 if (val1 !=0 && val2 != 0)
10349 if (val1 == 4 || val2 == 4)
10350 /* Other val must be 8, since we know they are adjacent and neither
10352 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10353 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10355 ldm[0] = ops[0] = operands[4];
10357 ops[2] = GEN_INT (val1);
10358 output_add_immediate (ops);
10360 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10362 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10366 /* Offset is out of range for a single add, so use two ldr. */
10369 ops[2] = GEN_INT (val1);
10370 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10372 ops[2] = GEN_INT (val2);
10373 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10376 else if (val1 != 0)
10379 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10381 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10386 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10388 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10390 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10393 [(set_attr "length" "12")
10394 (set_attr "predicable" "yes")
10395 (set_attr "type" "load1")]
10398 ; This pattern is never tried by combine, so do it as a peephole
10401 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10402 (match_operand:SI 1 "arm_general_register_operand" ""))
10403 (set (reg:CC CC_REGNUM)
10404 (compare:CC (match_dup 1) (const_int 0)))]
10406 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10407 (set (match_dup 0) (match_dup 1))])]
10411 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10412 ; reversed, check that the memory references aren't volatile.
10415 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10416 (match_operand:SI 4 "memory_operand" "m"))
10417 (set (match_operand:SI 1 "s_register_operand" "=rk")
10418 (match_operand:SI 5 "memory_operand" "m"))
10419 (set (match_operand:SI 2 "s_register_operand" "=rk")
10420 (match_operand:SI 6 "memory_operand" "m"))
10421 (set (match_operand:SI 3 "s_register_operand" "=rk")
10422 (match_operand:SI 7 "memory_operand" "m"))]
10423 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10425 return emit_ldm_seq (operands, 4);
10430 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10431 (match_operand:SI 3 "memory_operand" "m"))
10432 (set (match_operand:SI 1 "s_register_operand" "=rk")
10433 (match_operand:SI 4 "memory_operand" "m"))
10434 (set (match_operand:SI 2 "s_register_operand" "=rk")
10435 (match_operand:SI 5 "memory_operand" "m"))]
10436 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10438 return emit_ldm_seq (operands, 3);
10443 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10444 (match_operand:SI 2 "memory_operand" "m"))
10445 (set (match_operand:SI 1 "s_register_operand" "=rk")
10446 (match_operand:SI 3 "memory_operand" "m"))]
10447 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10449 return emit_ldm_seq (operands, 2);
10454 [(set (match_operand:SI 4 "memory_operand" "=m")
10455 (match_operand:SI 0 "s_register_operand" "rk"))
10456 (set (match_operand:SI 5 "memory_operand" "=m")
10457 (match_operand:SI 1 "s_register_operand" "rk"))
10458 (set (match_operand:SI 6 "memory_operand" "=m")
10459 (match_operand:SI 2 "s_register_operand" "rk"))
10460 (set (match_operand:SI 7 "memory_operand" "=m")
10461 (match_operand:SI 3 "s_register_operand" "rk"))]
10462 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10464 return emit_stm_seq (operands, 4);
10469 [(set (match_operand:SI 3 "memory_operand" "=m")
10470 (match_operand:SI 0 "s_register_operand" "rk"))
10471 (set (match_operand:SI 4 "memory_operand" "=m")
10472 (match_operand:SI 1 "s_register_operand" "rk"))
10473 (set (match_operand:SI 5 "memory_operand" "=m")
10474 (match_operand:SI 2 "s_register_operand" "rk"))]
10475 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10477 return emit_stm_seq (operands, 3);
10482 [(set (match_operand:SI 2 "memory_operand" "=m")
10483 (match_operand:SI 0 "s_register_operand" "rk"))
10484 (set (match_operand:SI 3 "memory_operand" "=m")
10485 (match_operand:SI 1 "s_register_operand" "rk"))]
10486 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10488 return emit_stm_seq (operands, 2);
10493 [(set (match_operand:SI 0 "s_register_operand" "")
10494 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10496 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10497 [(match_operand:SI 3 "s_register_operand" "")
10498 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10499 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10501 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10502 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10507 ;; This split can be used because CC_Z mode implies that the following
10508 ;; branch will be an equality, or an unsigned inequality, so the sign
10509 ;; extension is not needed.
10512 [(set (reg:CC_Z CC_REGNUM)
10514 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10516 (match_operand 1 "const_int_operand" "")))
10517 (clobber (match_scratch:SI 2 ""))]
10519 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10520 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10521 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10522 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10524 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10527 ;; ??? Check the patterns above for Thumb-2 usefulness
10529 (define_expand "prologue"
10530 [(clobber (const_int 0))]
10533 arm_expand_prologue ();
10535 thumb1_expand_prologue ();
10540 (define_expand "epilogue"
10541 [(clobber (const_int 0))]
10544 if (crtl->calls_eh_return)
10545 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10547 thumb1_expand_epilogue ();
10548 else if (USE_RETURN_INSN (FALSE))
10550 emit_jump_insn (gen_return ());
10553 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10555 gen_rtx_RETURN (VOIDmode)),
10556 VUNSPEC_EPILOGUE));
10561 ;; Note - although unspec_volatile's USE all hard registers,
10562 ;; USEs are ignored after relaod has completed. Thus we need
10563 ;; to add an unspec of the link register to ensure that flow
10564 ;; does not think that it is unused by the sibcall branch that
10565 ;; will replace the standard function epilogue.
10566 (define_insn "sibcall_epilogue"
10567 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10568 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10571 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10572 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10573 return arm_output_epilogue (next_nonnote_insn (insn));
10575 ;; Length is absolute worst case
10576 [(set_attr "length" "44")
10577 (set_attr "type" "block")
10578 ;; We don't clobber the conditions, but the potential length of this
10579 ;; operation is sufficient to make conditionalizing the sequence
10580 ;; unlikely to be profitable.
10581 (set_attr "conds" "clob")]
10584 (define_insn "*epilogue_insns"
10585 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10589 return arm_output_epilogue (NULL);
10590 else /* TARGET_THUMB1 */
10591 return thumb_unexpanded_epilogue ();
10593 ; Length is absolute worst case
10594 [(set_attr "length" "44")
10595 (set_attr "type" "block")
10596 ;; We don't clobber the conditions, but the potential length of this
10597 ;; operation is sufficient to make conditionalizing the sequence
10598 ;; unlikely to be profitable.
10599 (set_attr "conds" "clob")]
10602 (define_expand "eh_epilogue"
10603 [(use (match_operand:SI 0 "register_operand" ""))
10604 (use (match_operand:SI 1 "register_operand" ""))
10605 (use (match_operand:SI 2 "register_operand" ""))]
10609 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10610 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10612 rtx ra = gen_rtx_REG (Pmode, 2);
10614 emit_move_insn (ra, operands[2]);
10617 /* This is a hack -- we may have crystalized the function type too
10619 cfun->machine->func_type = 0;
10623 ;; This split is only used during output to reduce the number of patterns
10624 ;; that need assembler instructions adding to them. We allowed the setting
10625 ;; of the conditions to be implicit during rtl generation so that
10626 ;; the conditional compare patterns would work. However this conflicts to
10627 ;; some extent with the conditional data operations, so we have to split them
10630 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10631 ;; conditional execution sufficient?
10634 [(set (match_operand:SI 0 "s_register_operand" "")
10635 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10636 [(match_operand 2 "" "") (match_operand 3 "" "")])
10638 (match_operand 4 "" "")))
10639 (clobber (reg:CC CC_REGNUM))]
10640 "TARGET_ARM && reload_completed"
10641 [(set (match_dup 5) (match_dup 6))
10642 (cond_exec (match_dup 7)
10643 (set (match_dup 0) (match_dup 4)))]
10646 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10647 operands[2], operands[3]);
10648 enum rtx_code rc = GET_CODE (operands[1]);
10650 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10651 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10652 if (mode == CCFPmode || mode == CCFPEmode)
10653 rc = reverse_condition_maybe_unordered (rc);
10655 rc = reverse_condition (rc);
10657 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10662 [(set (match_operand:SI 0 "s_register_operand" "")
10663 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10664 [(match_operand 2 "" "") (match_operand 3 "" "")])
10665 (match_operand 4 "" "")
10667 (clobber (reg:CC CC_REGNUM))]
10668 "TARGET_ARM && reload_completed"
10669 [(set (match_dup 5) (match_dup 6))
10670 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10671 (set (match_dup 0) (match_dup 4)))]
10674 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10675 operands[2], operands[3]);
10677 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10678 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10683 [(set (match_operand:SI 0 "s_register_operand" "")
10684 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10685 [(match_operand 2 "" "") (match_operand 3 "" "")])
10686 (match_operand 4 "" "")
10687 (match_operand 5 "" "")))
10688 (clobber (reg:CC CC_REGNUM))]
10689 "TARGET_ARM && reload_completed"
10690 [(set (match_dup 6) (match_dup 7))
10691 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10692 (set (match_dup 0) (match_dup 4)))
10693 (cond_exec (match_dup 8)
10694 (set (match_dup 0) (match_dup 5)))]
10697 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10698 operands[2], operands[3]);
10699 enum rtx_code rc = GET_CODE (operands[1]);
10701 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10702 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10703 if (mode == CCFPmode || mode == CCFPEmode)
10704 rc = reverse_condition_maybe_unordered (rc);
10706 rc = reverse_condition (rc);
10708 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10713 [(set (match_operand:SI 0 "s_register_operand" "")
10714 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10715 [(match_operand:SI 2 "s_register_operand" "")
10716 (match_operand:SI 3 "arm_add_operand" "")])
10717 (match_operand:SI 4 "arm_rhs_operand" "")
10719 (match_operand:SI 5 "s_register_operand" ""))))
10720 (clobber (reg:CC CC_REGNUM))]
10721 "TARGET_ARM && reload_completed"
10722 [(set (match_dup 6) (match_dup 7))
10723 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10724 (set (match_dup 0) (match_dup 4)))
10725 (cond_exec (match_dup 8)
10726 (set (match_dup 0) (not:SI (match_dup 5))))]
10729 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10730 operands[2], operands[3]);
10731 enum rtx_code rc = GET_CODE (operands[1]);
10733 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10734 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10735 if (mode == CCFPmode || mode == CCFPEmode)
10736 rc = reverse_condition_maybe_unordered (rc);
10738 rc = reverse_condition (rc);
10740 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10744 (define_insn "*cond_move_not"
10745 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10746 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10747 [(match_operand 3 "cc_register" "") (const_int 0)])
10748 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10750 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10754 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10755 [(set_attr "conds" "use")
10756 (set_attr "length" "4,8")]
10759 ;; The next two patterns occur when an AND operation is followed by a
10760 ;; scc insn sequence
10762 (define_insn "*sign_extract_onebit"
10763 [(set (match_operand:SI 0 "s_register_operand" "=r")
10764 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10766 (match_operand:SI 2 "const_int_operand" "n")))
10767 (clobber (reg:CC CC_REGNUM))]
10770 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10771 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10772 return \"mvnne\\t%0, #0\";
10774 [(set_attr "conds" "clob")
10775 (set_attr "length" "8")]
10778 (define_insn "*not_signextract_onebit"
10779 [(set (match_operand:SI 0 "s_register_operand" "=r")
10781 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10783 (match_operand:SI 2 "const_int_operand" "n"))))
10784 (clobber (reg:CC CC_REGNUM))]
10787 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10788 output_asm_insn (\"tst\\t%1, %2\", operands);
10789 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10790 return \"movne\\t%0, #0\";
10792 [(set_attr "conds" "clob")
10793 (set_attr "length" "12")]
10795 ;; ??? The above patterns need auditing for Thumb-2
10797 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10798 ;; expressions. For simplicity, the first register is also in the unspec
10800 (define_insn "*push_multi"
10801 [(match_parallel 2 "multi_register_push"
10802 [(set (match_operand:BLK 0 "memory_operand" "=m")
10803 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10804 UNSPEC_PUSH_MULT))])]
10808 int num_saves = XVECLEN (operands[2], 0);
10810 /* For the StrongARM at least it is faster to
10811 use STR to store only a single register.
10812 In Thumb mode always use push, and the assembler will pick
10813 something appropriate. */
10814 if (num_saves == 1 && TARGET_ARM)
10815 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10822 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10824 strcpy (pattern, \"push\\t{%1\");
10826 for (i = 1; i < num_saves; i++)
10828 strcat (pattern, \", %|\");
10830 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10833 strcat (pattern, \"}\");
10834 output_asm_insn (pattern, operands);
10839 [(set_attr "type" "store4")]
10842 (define_insn "stack_tie"
10843 [(set (mem:BLK (scratch))
10844 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10845 (match_operand:SI 1 "s_register_operand" "rk")]
10849 [(set_attr "length" "0")]
10852 ;; Similarly for the floating point registers
10853 (define_insn "*push_fp_multi"
10854 [(match_parallel 2 "multi_register_push"
10855 [(set (match_operand:BLK 0 "memory_operand" "=m")
10856 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10857 UNSPEC_PUSH_MULT))])]
10858 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10863 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10864 output_asm_insn (pattern, operands);
10867 [(set_attr "type" "f_store")]
10870 ;; Special patterns for dealing with the constant pool
10872 (define_insn "align_4"
10873 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10876 assemble_align (32);
10881 (define_insn "align_8"
10882 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10885 assemble_align (64);
10890 (define_insn "consttable_end"
10891 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10894 making_const_table = FALSE;
10899 (define_insn "consttable_1"
10900 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10903 making_const_table = TRUE;
10904 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10905 assemble_zeros (3);
10908 [(set_attr "length" "4")]
10911 (define_insn "consttable_2"
10912 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10915 making_const_table = TRUE;
10916 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10917 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10918 assemble_zeros (2);
10921 [(set_attr "length" "4")]
10924 (define_insn "consttable_4"
10925 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10929 rtx x = operands[0];
10930 making_const_table = TRUE;
10931 switch (GET_MODE_CLASS (GET_MODE (x)))
10934 if (GET_MODE (x) == HFmode)
10935 arm_emit_fp16_const (x);
10939 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10940 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10944 /* XXX: Sometimes gcc does something really dumb and ends up with
10945 a HIGH in a constant pool entry, usually because it's trying to
10946 load into a VFP register. We know this will always be used in
10947 combination with a LO_SUM which ignores the high bits, so just
10948 strip off the HIGH. */
10949 if (GET_CODE (x) == HIGH)
10951 assemble_integer (x, 4, BITS_PER_WORD, 1);
10952 mark_symbol_refs_as_used (x);
10957 [(set_attr "length" "4")]
10960 (define_insn "consttable_8"
10961 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10965 making_const_table = TRUE;
10966 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10971 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10972 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10976 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10981 [(set_attr "length" "8")]
10984 (define_insn "consttable_16"
10985 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10989 making_const_table = TRUE;
10990 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10995 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10996 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11000 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11005 [(set_attr "length" "16")]
11008 ;; Miscellaneous Thumb patterns
11010 (define_expand "tablejump"
11011 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11012 (use (label_ref (match_operand 1 "" "")))])]
11017 /* Hopefully, CSE will eliminate this copy. */
11018 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11019 rtx reg2 = gen_reg_rtx (SImode);
11021 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11022 operands[0] = reg2;
11027 ;; NB never uses BX.
11028 (define_insn "*thumb1_tablejump"
11029 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11030 (use (label_ref (match_operand 1 "" "")))]
11033 [(set_attr "length" "2")]
11036 ;; V5 Instructions,
11038 (define_insn "clzsi2"
11039 [(set (match_operand:SI 0 "s_register_operand" "=r")
11040 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11041 "TARGET_32BIT && arm_arch5"
11043 [(set_attr "predicable" "yes")
11044 (set_attr "insn" "clz")])
11046 (define_insn "rbitsi2"
11047 [(set (match_operand:SI 0 "s_register_operand" "=r")
11048 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11049 "TARGET_32BIT && arm_arch_thumb2"
11051 [(set_attr "predicable" "yes")
11052 (set_attr "insn" "clz")])
11054 (define_expand "ctzsi2"
11055 [(set (match_operand:SI 0 "s_register_operand" "")
11056 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11057 "TARGET_32BIT && arm_arch_thumb2"
11060 rtx tmp = gen_reg_rtx (SImode);
11061 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11062 emit_insn (gen_clzsi2 (operands[0], tmp));
11068 ;; V5E instructions.
11070 (define_insn "prefetch"
11071 [(prefetch (match_operand:SI 0 "address_operand" "p")
11072 (match_operand:SI 1 "" "")
11073 (match_operand:SI 2 "" ""))]
11074 "TARGET_32BIT && arm_arch5e"
11077 ;; General predication pattern
11080 [(match_operator 0 "arm_comparison_operator"
11081 [(match_operand 1 "cc_register" "")
11087 (define_insn "prologue_use"
11088 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11090 "%@ %0 needed for prologue"
11091 [(set_attr "length" "0")]
11095 ;; Patterns for exception handling
11097 (define_expand "eh_return"
11098 [(use (match_operand 0 "general_operand" ""))]
11103 emit_insn (gen_arm_eh_return (operands[0]));
11105 emit_insn (gen_thumb_eh_return (operands[0]));
11110 ;; We can't expand this before we know where the link register is stored.
11111 (define_insn_and_split "arm_eh_return"
11112 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11114 (clobber (match_scratch:SI 1 "=&r"))]
11117 "&& reload_completed"
11121 arm_set_return_address (operands[0], operands[1]);
11126 (define_insn_and_split "thumb_eh_return"
11127 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11129 (clobber (match_scratch:SI 1 "=&l"))]
11132 "&& reload_completed"
11136 thumb_set_return_address (operands[0], operands[1]);
11144 (define_insn "load_tp_hard"
11145 [(set (match_operand:SI 0 "register_operand" "=r")
11146 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11148 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11149 [(set_attr "predicable" "yes")]
11152 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11153 (define_insn "load_tp_soft"
11154 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11155 (clobber (reg:SI LR_REGNUM))
11156 (clobber (reg:SI IP_REGNUM))
11157 (clobber (reg:CC CC_REGNUM))]
11159 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11160 [(set_attr "conds" "clob")]
11163 (define_insn "*arm_movtas_ze"
11164 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11167 (match_operand:SI 1 "const_int_operand" ""))]
11170 [(set_attr "predicable" "yes")
11171 (set_attr "length" "4")]
11174 (define_insn "arm_rev"
11175 [(set (match_operand:SI 0 "s_register_operand" "=r")
11176 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11177 "TARGET_EITHER && arm_arch6"
11179 [(set (attr "length")
11180 (if_then_else (eq_attr "is_thumb" "yes")
11185 (define_expand "arm_legacy_rev"
11186 [(set (match_operand:SI 2 "s_register_operand" "")
11187 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11191 (lshiftrt:SI (match_dup 2)
11193 (set (match_operand:SI 3 "s_register_operand" "")
11194 (rotatert:SI (match_dup 1)
11197 (and:SI (match_dup 2)
11198 (const_int -65281)))
11199 (set (match_operand:SI 0 "s_register_operand" "")
11200 (xor:SI (match_dup 3)
11206 ;; Reuse temporaries to keep register pressure down.
11207 (define_expand "thumb_legacy_rev"
11208 [(set (match_operand:SI 2 "s_register_operand" "")
11209 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11211 (set (match_operand:SI 3 "s_register_operand" "")
11212 (lshiftrt:SI (match_dup 1)
11215 (ior:SI (match_dup 3)
11217 (set (match_operand:SI 4 "s_register_operand" "")
11219 (set (match_operand:SI 5 "s_register_operand" "")
11220 (rotatert:SI (match_dup 1)
11223 (ashift:SI (match_dup 5)
11226 (lshiftrt:SI (match_dup 5)
11229 (ior:SI (match_dup 5)
11232 (rotatert:SI (match_dup 5)
11234 (set (match_operand:SI 0 "s_register_operand" "")
11235 (ior:SI (match_dup 5)
11241 (define_expand "bswapsi2"
11242 [(set (match_operand:SI 0 "s_register_operand" "=r")
11243 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11248 if (!optimize_size)
11250 rtx op2 = gen_reg_rtx (SImode);
11251 rtx op3 = gen_reg_rtx (SImode);
11255 rtx op4 = gen_reg_rtx (SImode);
11256 rtx op5 = gen_reg_rtx (SImode);
11258 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11259 op2, op3, op4, op5));
11263 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11275 ;; Load the FPA co-processor patterns
11277 ;; Load the Maverick co-processor patterns
11278 (include "cirrus.md")
11279 ;; Vector bits common to IWMMXT and Neon
11280 (include "vec-common.md")
11281 ;; Load the Intel Wireless Multimedia Extension patterns
11282 (include "iwmmxt.md")
11283 ;; Load the VFP co-processor patterns
11285 ;; Thumb-2 patterns
11286 (include "thumb2.md")
11288 (include "neon.md")