1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec enumerators for Neon are defined in neon.md.
56 (define_c_enum "unspec" [
57 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 UNSPEC_PUSH_MULT ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
81 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
94 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
96 UNSPEC_STACK_ALIGN ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
101 ; a given symbolic address.
102 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
103 UNSPEC_RBIT ; rbit operation.
104 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
105 ; another symbolic address.
106 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
109 ;; UNSPEC_VOLATILE Usage:
111 (define_c_enum "unspecv" [
112 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
114 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
122 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
124 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
126 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
128 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
130 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
132 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
133 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
134 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
135 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
136 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
137 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
138 VUNSPEC_EH_RETURN ; Use to override the return address for exception
140 VUNSPEC_SYNC_COMPARE_AND_SWAP ; Represent an atomic compare swap.
141 VUNSPEC_SYNC_LOCK ; Represent a sync_lock_test_and_set.
142 VUNSPEC_SYNC_OP ; Represent a sync_<op>
143 VUNSPEC_SYNC_NEW_OP ; Represent a sync_new_<op>
144 VUNSPEC_SYNC_OLD_OP ; Represent a sync_old_<op>
147 ;;---------------------------------------------------------------------------
150 ;; Processor type. This is created automatically from arm-cores.def.
151 (include "arm-tune.md")
153 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
154 ; generating ARM code. This is used to control the length of some insn
155 ; patterns that share the same RTL in both ARM and Thumb code.
156 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
158 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
159 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
161 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
162 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
164 ;; Operand number of an input operand that is shifted. Zero if the
165 ;; given instruction does not shift one of its input operands.
166 (define_attr "shift" "" (const_int 0))
168 ; Floating Point Unit. If we only have floating point emulation, then there
169 ; is no point in scheduling the floating point insns. (Well, for best
170 ; performance we should try and group them together).
171 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
172 (const (symbol_ref "arm_fpu_attr")))
174 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
180 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
181 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
182 (const_string "none"))
184 ; LENGTH of an instruction (in bytes)
185 (define_attr "length" ""
186 (cond [(not (eq_attr "sync_memory" "none"))
187 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
190 ; The architecture which supports the instruction (or alternative).
191 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
192 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
193 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
194 ; arm_arch6. This attribute is used to compute attribute "enabled",
195 ; use type "any" to enable an alternative in all cases.
196 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
197 (const_string "any"))
199 (define_attr "arch_enabled" "no,yes"
200 (cond [(eq_attr "arch" "any")
203 (and (eq_attr "arch" "a")
204 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
207 (and (eq_attr "arch" "t")
208 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
211 (and (eq_attr "arch" "t1")
212 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
215 (and (eq_attr "arch" "t2")
216 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
219 (and (eq_attr "arch" "32")
220 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
223 (and (eq_attr "arch" "v6")
224 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
227 (and (eq_attr "arch" "nov6")
228 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
231 (and (eq_attr "arch" "onlya8")
232 (eq_attr "tune" "cortexa8"))
235 (and (eq_attr "arch" "nota8")
236 (not (eq_attr "tune" "cortexa8")))
237 (const_string "yes")]
238 (const_string "no")))
240 ; Allows an insn to disable certain alternatives for reasons other than
242 (define_attr "insn_enabled" "no,yes"
243 (const_string "yes"))
245 ; Enable all alternatives that are both arch_enabled and insn_enabled.
246 (define_attr "enabled" "no,yes"
247 (if_then_else (eq_attr "insn_enabled" "yes")
248 (if_then_else (eq_attr "arch_enabled" "yes")
251 (const_string "no")))
253 ; POOL_RANGE is how far away from a constant pool entry that this insn
254 ; can be placed. If the distance is zero, then this insn will never
255 ; reference the pool.
256 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
257 ; before its address.
258 (define_attr "arm_pool_range" "" (const_int 0))
259 (define_attr "thumb2_pool_range" "" (const_int 0))
260 (define_attr "arm_neg_pool_range" "" (const_int 0))
261 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
263 (define_attr "pool_range" ""
264 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
265 (attr "arm_pool_range")))
266 (define_attr "neg_pool_range" ""
267 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
268 (attr "arm_neg_pool_range")))
270 ; An assembler sequence may clobber the condition codes without us knowing.
271 ; If such an insn references the pool, then we have no way of knowing how,
272 ; so use the most conservative value for pool_range.
273 (define_asm_attributes
274 [(set_attr "conds" "clob")
275 (set_attr "length" "4")
276 (set_attr "pool_range" "250")])
278 ;; The instruction used to implement a particular pattern. This
279 ;; information is used by pipeline descriptions to provide accurate
280 ;; scheduling information.
283 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
284 (const_string "other"))
286 ; TYPE attribute is used to detect floating point instructions which, if
287 ; running on a co-processor can run in parallel with other, basic instructions
288 ; If write-buffer scheduling is enabled then it can also be used in the
289 ; scheduling of writes.
291 ; Classification of each insn
292 ; Note: vfp.md has different meanings for some of these, and some further
293 ; types as well. See that file for details.
294 ; alu any alu instruction that doesn't hit memory or fp
295 ; regs or have a shifted source operand
296 ; alu_shift any data instruction that doesn't hit memory or fp
297 ; regs, but has a source operand shifted by a constant
298 ; alu_shift_reg any data instruction that doesn't hit memory or fp
299 ; regs, but has a source operand shifted by a register value
300 ; mult a multiply instruction
301 ; block blockage insn, this blocks all functional units
302 ; float a floating point arithmetic operation (subject to expansion)
303 ; fdivd DFmode floating point division
304 ; fdivs SFmode floating point division
305 ; fmul Floating point multiply
306 ; ffmul Fast floating point multiply
307 ; farith Floating point arithmetic (4 cycle)
308 ; ffarith Fast floating point arithmetic (2 cycle)
309 ; float_em a floating point arithmetic operation that is normally emulated
310 ; even on a machine with an fpa.
311 ; f_fpa_load a floating point load from memory. Only for the FPA.
312 ; f_fpa_store a floating point store to memory. Only for the FPA.
313 ; f_load[sd] A single/double load from memory. Used for VFP unit.
314 ; f_store[sd] A single/double store to memory. Used for VFP unit.
315 ; f_flag a transfer of co-processor flags to the CPSR
316 ; f_mem_r a transfer of a floating point register to a real reg via mem
317 ; r_mem_f the reverse of f_mem_r
318 ; f_2_r fast transfer float to arm (no memory needed)
319 ; r_2_f fast transfer arm to float
320 ; f_cvt convert floating<->integral
322 ; call a subroutine call
323 ; load_byte load byte(s) from memory to arm registers
324 ; load1 load 1 word from memory to arm registers
325 ; load2 load 2 words from memory to arm registers
326 ; load3 load 3 words from memory to arm registers
327 ; load4 load 4 words from memory to arm registers
328 ; store store 1 word to memory from arm registers
329 ; store2 store 2 words
330 ; store3 store 3 words
331 ; store4 store 4 (or more) words
332 ; Additions for Cirrus Maverick co-processor:
333 ; mav_farith Floating point arithmetic (4 cycle)
334 ; mav_dmult Double multiplies (7 cycle)
338 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
340 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
341 (const_string "mult")
342 (const_string "alu")))
344 ; Load scheduling, set from the arm_ld_sched variable
345 ; initialized by arm_option_override()
346 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
348 ;; Classification of NEON instructions for scheduling purposes.
349 ;; Do not set this attribute and the "type" attribute together in
350 ;; any one instruction pattern.
351 (define_attr "neon_type"
362 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
363 neon_mul_qqq_8_16_32_ddd_32,\
364 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
365 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
367 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
368 neon_mla_qqq_32_qqd_32_scalar,\
369 neon_mul_ddd_16_scalar_32_16_long_scalar,\
370 neon_mul_qqd_32_scalar,\
371 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
376 neon_vqshl_vrshl_vqrshl_qqq,\
378 neon_fp_vadd_ddd_vabs_dd,\
379 neon_fp_vadd_qqq_vabs_qq,\
385 neon_fp_vmla_ddd_scalar,\
386 neon_fp_vmla_qqq_scalar,\
387 neon_fp_vrecps_vrsqrts_ddd,\
388 neon_fp_vrecps_vrsqrts_qqq,\
396 neon_vld2_2_regs_vld1_vld2_all_lanes,\
399 neon_vst1_1_2_regs_vst2_2_regs,\
401 neon_vst2_4_regs_vst3_vst4,\
403 neon_vld1_vld2_lane,\
404 neon_vld3_vld4_lane,\
405 neon_vst1_vst2_lane,\
406 neon_vst3_vst4_lane,\
407 neon_vld3_vld4_all_lanes,\
415 (const_string "none"))
417 ; condition codes: this one is used by final_prescan_insn to speed up
418 ; conditionalizing instructions. It saves having to scan the rtl to see if
419 ; it uses or alters the condition codes.
421 ; USE means that the condition codes are used by the insn in the process of
422 ; outputting code, this means (at present) that we can't use the insn in
425 ; SET means that the purpose of the insn is to set the condition codes in a
426 ; well defined manner.
428 ; CLOB means that the condition codes are altered in an undefined manner, if
429 ; they are altered at all
431 ; UNCONDITIONAL means the instruction can not be conditionally executed and
432 ; that the instruction does not use or alter the condition codes.
434 ; NOCOND means that the instruction does not use or alter the condition
435 ; codes but can be converted into a conditionally exectuted instruction.
437 (define_attr "conds" "use,set,clob,unconditional,nocond"
439 (ior (eq_attr "is_thumb1" "yes")
440 (eq_attr "type" "call"))
441 (const_string "clob")
442 (if_then_else (eq_attr "neon_type" "none")
443 (const_string "nocond")
444 (const_string "unconditional"))))
446 ; Predicable means that the insn can be conditionally executed based on
447 ; an automatically added predicate (additional patterns are generated by
448 ; gen...). We default to 'no' because no Thumb patterns match this rule
449 ; and not all ARM patterns do.
450 (define_attr "predicable" "no,yes" (const_string "no"))
452 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
453 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
454 ; suffer blockages enough to warrant modelling this (and it can adversely
455 ; affect the schedule).
456 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
458 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
459 ; to stall the processor. Used with model_wbuf above.
460 (define_attr "write_conflict" "no,yes"
461 (if_then_else (eq_attr "type"
462 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
464 (const_string "no")))
466 ; Classify the insns into those that take one cycle and those that take more
467 ; than one on the main cpu execution unit.
468 (define_attr "core_cycles" "single,multi"
469 (if_then_else (eq_attr "type"
470 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
471 (const_string "single")
472 (const_string "multi")))
474 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
475 ;; distant label. Only applicable to Thumb code.
476 (define_attr "far_jump" "yes,no" (const_string "no"))
479 ;; The number of machine instructions this pattern expands to.
480 ;; Used for Thumb-2 conditional execution.
481 (define_attr "ce_count" "" (const_int 1))
483 ;;---------------------------------------------------------------------------
486 (include "iterators.md")
488 ;;---------------------------------------------------------------------------
491 (include "predicates.md")
492 (include "constraints.md")
494 ;;---------------------------------------------------------------------------
495 ;; Pipeline descriptions
497 (define_attr "tune_cortexr4" "yes,no"
499 (eq_attr "tune" "cortexr4,cortexr4f")
501 (const_string "no"))))
503 ;; True if the generic scheduling description should be used.
505 (define_attr "generic_sched" "yes,no"
507 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
508 (eq_attr "tune_cortexr4" "yes"))
510 (const_string "yes"))))
512 (define_attr "generic_vfp" "yes,no"
514 (and (eq_attr "fpu" "vfp")
515 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
516 (eq_attr "tune_cortexr4" "no"))
518 (const_string "no"))))
520 (include "arm-generic.md")
521 (include "arm926ejs.md")
522 (include "arm1020e.md")
523 (include "arm1026ejs.md")
524 (include "arm1136jfs.md")
526 (include "fa606te.md")
527 (include "fa626te.md")
528 (include "fmp626.md")
529 (include "fa726te.md")
530 (include "cortex-a5.md")
531 (include "cortex-a8.md")
532 (include "cortex-a9.md")
533 (include "cortex-r4.md")
534 (include "cortex-r4f.md")
535 (include "cortex-m4.md")
536 (include "cortex-m4-fpu.md")
540 ;;---------------------------------------------------------------------------
545 ;; Note: For DImode insns, there is normally no reason why operands should
546 ;; not be in the same register, what we don't want is for something being
547 ;; written to partially overlap something that is an input.
548 ;; Cirrus 64bit additions should not be split because we have a native
549 ;; 64bit addition instructions.
551 (define_expand "adddi3"
553 [(set (match_operand:DI 0 "s_register_operand" "")
554 (plus:DI (match_operand:DI 1 "s_register_operand" "")
555 (match_operand:DI 2 "s_register_operand" "")))
556 (clobber (reg:CC CC_REGNUM))])]
559 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
561 if (!cirrus_fp_register (operands[0], DImode))
562 operands[0] = force_reg (DImode, operands[0]);
563 if (!cirrus_fp_register (operands[1], DImode))
564 operands[1] = force_reg (DImode, operands[1]);
565 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
571 if (GET_CODE (operands[1]) != REG)
572 operands[1] = force_reg (DImode, operands[1]);
573 if (GET_CODE (operands[2]) != REG)
574 operands[2] = force_reg (DImode, operands[2]);
579 (define_insn "*thumb1_adddi3"
580 [(set (match_operand:DI 0 "register_operand" "=l")
581 (plus:DI (match_operand:DI 1 "register_operand" "%0")
582 (match_operand:DI 2 "register_operand" "l")))
583 (clobber (reg:CC CC_REGNUM))
586 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
587 [(set_attr "length" "4")]
590 (define_insn_and_split "*arm_adddi3"
591 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
592 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
593 (match_operand:DI 2 "s_register_operand" "r, 0")))
594 (clobber (reg:CC CC_REGNUM))]
595 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
597 "TARGET_32BIT && reload_completed
598 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
599 [(parallel [(set (reg:CC_C CC_REGNUM)
600 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
602 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
603 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
604 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
607 operands[3] = gen_highpart (SImode, operands[0]);
608 operands[0] = gen_lowpart (SImode, operands[0]);
609 operands[4] = gen_highpart (SImode, operands[1]);
610 operands[1] = gen_lowpart (SImode, operands[1]);
611 operands[5] = gen_highpart (SImode, operands[2]);
612 operands[2] = gen_lowpart (SImode, operands[2]);
614 [(set_attr "conds" "clob")
615 (set_attr "length" "8")]
618 (define_insn_and_split "*adddi_sesidi_di"
619 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
620 (plus:DI (sign_extend:DI
621 (match_operand:SI 2 "s_register_operand" "r,r"))
622 (match_operand:DI 1 "s_register_operand" "0,r")))
623 (clobber (reg:CC CC_REGNUM))]
624 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
626 "TARGET_32BIT && reload_completed"
627 [(parallel [(set (reg:CC_C CC_REGNUM)
628 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
630 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
631 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
634 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
637 operands[3] = gen_highpart (SImode, operands[0]);
638 operands[0] = gen_lowpart (SImode, operands[0]);
639 operands[4] = gen_highpart (SImode, operands[1]);
640 operands[1] = gen_lowpart (SImode, operands[1]);
641 operands[2] = gen_lowpart (SImode, operands[2]);
643 [(set_attr "conds" "clob")
644 (set_attr "length" "8")]
647 (define_insn_and_split "*adddi_zesidi_di"
648 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
649 (plus:DI (zero_extend:DI
650 (match_operand:SI 2 "s_register_operand" "r,r"))
651 (match_operand:DI 1 "s_register_operand" "0,r")))
652 (clobber (reg:CC CC_REGNUM))]
653 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
655 "TARGET_32BIT && reload_completed"
656 [(parallel [(set (reg:CC_C CC_REGNUM)
657 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
659 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
660 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
661 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
664 operands[3] = gen_highpart (SImode, operands[0]);
665 operands[0] = gen_lowpart (SImode, operands[0]);
666 operands[4] = gen_highpart (SImode, operands[1]);
667 operands[1] = gen_lowpart (SImode, operands[1]);
668 operands[2] = gen_lowpart (SImode, operands[2]);
670 [(set_attr "conds" "clob")
671 (set_attr "length" "8")]
674 (define_expand "addsi3"
675 [(set (match_operand:SI 0 "s_register_operand" "")
676 (plus:SI (match_operand:SI 1 "s_register_operand" "")
677 (match_operand:SI 2 "reg_or_int_operand" "")))]
680 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
682 arm_split_constant (PLUS, SImode, NULL_RTX,
683 INTVAL (operands[2]), operands[0], operands[1],
684 optimize && can_create_pseudo_p ());
690 ; If there is a scratch available, this will be faster than synthesizing the
693 [(match_scratch:SI 3 "r")
694 (set (match_operand:SI 0 "arm_general_register_operand" "")
695 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
696 (match_operand:SI 2 "const_int_operand" "")))]
698 !(const_ok_for_arm (INTVAL (operands[2]))
699 || const_ok_for_arm (-INTVAL (operands[2])))
700 && const_ok_for_arm (~INTVAL (operands[2]))"
701 [(set (match_dup 3) (match_dup 2))
702 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
706 ;; The r/r/k alternative is required when reloading the address
707 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
708 ;; put the duplicated register first, and not try the commutative version.
709 (define_insn_and_split "*arm_addsi3"
710 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
711 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
712 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
722 && GET_CODE (operands[2]) == CONST_INT
723 && !(const_ok_for_arm (INTVAL (operands[2]))
724 || const_ok_for_arm (-INTVAL (operands[2])))
725 && (reload_completed || !arm_eliminable_register (operands[1]))"
726 [(clobber (const_int 0))]
728 arm_split_constant (PLUS, SImode, curr_insn,
729 INTVAL (operands[2]), operands[0],
733 [(set_attr "length" "4,4,4,4,4,16")
734 (set_attr "predicable" "yes")]
737 (define_insn_and_split "*thumb1_addsi3"
738 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
739 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
740 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
743 static const char * const asms[] =
745 \"add\\t%0, %0, %2\",
746 \"sub\\t%0, %0, #%n2\",
747 \"add\\t%0, %1, %2\",
748 \"add\\t%0, %0, %2\",
749 \"add\\t%0, %0, %2\",
750 \"add\\t%0, %1, %2\",
751 \"add\\t%0, %1, %2\",
756 if ((which_alternative == 2 || which_alternative == 6)
757 && GET_CODE (operands[2]) == CONST_INT
758 && INTVAL (operands[2]) < 0)
759 return \"sub\\t%0, %1, #%n2\";
760 return asms[which_alternative];
762 "&& reload_completed && CONST_INT_P (operands[2])
763 && ((operands[1] != stack_pointer_rtx
764 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
765 || (operands[1] == stack_pointer_rtx
766 && INTVAL (operands[2]) > 1020))"
767 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
768 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
770 HOST_WIDE_INT offset = INTVAL (operands[2]);
771 if (operands[1] == stack_pointer_rtx)
777 else if (offset < -255)
780 operands[3] = GEN_INT (offset);
781 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
783 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
786 ;; Reloading and elimination of the frame pointer can
787 ;; sometimes cause this optimization to be missed.
789 [(set (match_operand:SI 0 "arm_general_register_operand" "")
790 (match_operand:SI 1 "const_int_operand" ""))
792 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
794 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
795 && (INTVAL (operands[1]) & 3) == 0"
796 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
800 (define_insn "*addsi3_compare0"
801 [(set (reg:CC_NOOV CC_REGNUM)
803 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
804 (match_operand:SI 2 "arm_add_operand" "rI,L"))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (match_dup 1) (match_dup 2)))]
811 sub%.\\t%0, %1, #%n2"
812 [(set_attr "conds" "set")]
815 (define_insn "*addsi3_compare0_scratch"
816 [(set (reg:CC_NOOV CC_REGNUM)
818 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
819 (match_operand:SI 1 "arm_add_operand" "rI,L"))
825 [(set_attr "conds" "set")]
828 (define_insn "*compare_negsi_si"
829 [(set (reg:CC_Z CC_REGNUM)
831 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
832 (match_operand:SI 1 "s_register_operand" "r")))]
835 [(set_attr "conds" "set")]
838 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
839 ;; addend is a constant.
840 (define_insn "*cmpsi2_addneg"
841 [(set (reg:CC CC_REGNUM)
843 (match_operand:SI 1 "s_register_operand" "r,r")
844 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
845 (set (match_operand:SI 0 "s_register_operand" "=r,r")
846 (plus:SI (match_dup 1)
847 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
848 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
851 sub%.\\t%0, %1, #%n3"
852 [(set_attr "conds" "set")]
855 ;; Convert the sequence
857 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
861 ;; bcs dest ((unsigned)rn >= 1)
862 ;; similarly for the beq variant using bcc.
863 ;; This is a common looping idiom (while (n--))
865 [(set (match_operand:SI 0 "arm_general_register_operand" "")
866 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
868 (set (match_operand 2 "cc_register" "")
869 (compare (match_dup 0) (const_int -1)))
871 (if_then_else (match_operator 3 "equality_operator"
872 [(match_dup 2) (const_int 0)])
873 (match_operand 4 "" "")
874 (match_operand 5 "" "")))]
875 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
879 (match_dup 1) (const_int 1)))
880 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
882 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
885 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
886 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
889 operands[2], const0_rtx);"
892 ;; The next four insns work because they compare the result with one of
893 ;; the operands, and we know that the use of the condition code is
894 ;; either GEU or LTU, so we can use the carry flag from the addition
895 ;; instead of doing the compare a second time.
896 (define_insn "*addsi3_compare_op1"
897 [(set (reg:CC_C CC_REGNUM)
899 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
900 (match_operand:SI 2 "arm_add_operand" "rI,L"))
902 (set (match_operand:SI 0 "s_register_operand" "=r,r")
903 (plus:SI (match_dup 1) (match_dup 2)))]
907 sub%.\\t%0, %1, #%n2"
908 [(set_attr "conds" "set")]
911 (define_insn "*addsi3_compare_op2"
912 [(set (reg:CC_C CC_REGNUM)
914 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
915 (match_operand:SI 2 "arm_add_operand" "rI,L"))
917 (set (match_operand:SI 0 "s_register_operand" "=r,r")
918 (plus:SI (match_dup 1) (match_dup 2)))]
922 sub%.\\t%0, %1, #%n2"
923 [(set_attr "conds" "set")]
926 (define_insn "*compare_addsi2_op0"
927 [(set (reg:CC_C CC_REGNUM)
929 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
930 (match_operand:SI 1 "arm_add_operand" "rI,L"))
936 [(set_attr "conds" "set")]
939 (define_insn "*compare_addsi2_op1"
940 [(set (reg:CC_C CC_REGNUM)
942 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
943 (match_operand:SI 1 "arm_add_operand" "rI,L"))
949 [(set_attr "conds" "set")]
952 (define_insn "*addsi3_carryin_<optab>"
953 [(set (match_operand:SI 0 "s_register_operand" "=r")
954 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
955 (match_operand:SI 2 "arm_rhs_operand" "rI"))
956 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
959 [(set_attr "conds" "use")]
962 (define_insn "*addsi3_carryin_alt2_<optab>"
963 [(set (match_operand:SI 0 "s_register_operand" "=r")
964 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
965 (match_operand:SI 1 "s_register_operand" "%r"))
966 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
969 [(set_attr "conds" "use")]
972 (define_insn "*addsi3_carryin_shift_<optab>"
973 [(set (match_operand:SI 0 "s_register_operand" "=r")
975 (match_operator:SI 2 "shift_operator"
976 [(match_operand:SI 3 "s_register_operand" "r")
977 (match_operand:SI 4 "reg_or_int_operand" "rM")])
978 (match_operand:SI 1 "s_register_operand" "r"))
979 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
981 "adc%?\\t%0, %1, %3%S2"
982 [(set_attr "conds" "use")
983 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
984 (const_string "alu_shift")
985 (const_string "alu_shift_reg")))]
988 (define_expand "incscc"
989 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
990 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
991 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
992 (match_operand:SI 1 "s_register_operand" "0,?r")))]
997 (define_insn "*arm_incscc"
998 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
999 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1000 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1001 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1005 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "length" "4,8")]
1010 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1012 [(set (match_operand:SI 0 "s_register_operand" "")
1013 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1014 (match_operand:SI 2 "s_register_operand" ""))
1016 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1018 [(set (match_dup 3) (match_dup 1))
1019 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1021 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1024 (define_expand "addsf3"
1025 [(set (match_operand:SF 0 "s_register_operand" "")
1026 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1027 (match_operand:SF 2 "arm_float_add_operand" "")))]
1028 "TARGET_32BIT && TARGET_HARD_FLOAT"
1031 && !cirrus_fp_register (operands[2], SFmode))
1032 operands[2] = force_reg (SFmode, operands[2]);
1035 (define_expand "adddf3"
1036 [(set (match_operand:DF 0 "s_register_operand" "")
1037 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1038 (match_operand:DF 2 "arm_float_add_operand" "")))]
1039 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1042 && !cirrus_fp_register (operands[2], DFmode))
1043 operands[2] = force_reg (DFmode, operands[2]);
1046 (define_expand "subdi3"
1048 [(set (match_operand:DI 0 "s_register_operand" "")
1049 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1050 (match_operand:DI 2 "s_register_operand" "")))
1051 (clobber (reg:CC CC_REGNUM))])]
1054 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1056 && cirrus_fp_register (operands[0], DImode)
1057 && cirrus_fp_register (operands[1], DImode))
1059 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1065 if (GET_CODE (operands[1]) != REG)
1066 operands[1] = force_reg (DImode, operands[1]);
1067 if (GET_CODE (operands[2]) != REG)
1068 operands[2] = force_reg (DImode, operands[2]);
1073 (define_insn "*arm_subdi3"
1074 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1075 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1076 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1077 (clobber (reg:CC CC_REGNUM))]
1078 "TARGET_32BIT && !TARGET_NEON"
1079 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1080 [(set_attr "conds" "clob")
1081 (set_attr "length" "8")]
1084 (define_insn "*thumb_subdi3"
1085 [(set (match_operand:DI 0 "register_operand" "=l")
1086 (minus:DI (match_operand:DI 1 "register_operand" "0")
1087 (match_operand:DI 2 "register_operand" "l")))
1088 (clobber (reg:CC CC_REGNUM))]
1090 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1091 [(set_attr "length" "4")]
1094 (define_insn "*subdi_di_zesidi"
1095 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1096 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1098 (match_operand:SI 2 "s_register_operand" "r,r"))))
1099 (clobber (reg:CC CC_REGNUM))]
1101 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1102 [(set_attr "conds" "clob")
1103 (set_attr "length" "8")]
1106 (define_insn "*subdi_di_sesidi"
1107 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1108 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1110 (match_operand:SI 2 "s_register_operand" "r,r"))))
1111 (clobber (reg:CC CC_REGNUM))]
1113 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1114 [(set_attr "conds" "clob")
1115 (set_attr "length" "8")]
1118 (define_insn "*subdi_zesidi_di"
1119 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1120 (minus:DI (zero_extend:DI
1121 (match_operand:SI 2 "s_register_operand" "r,r"))
1122 (match_operand:DI 1 "s_register_operand" "0,r")))
1123 (clobber (reg:CC CC_REGNUM))]
1125 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1126 [(set_attr "conds" "clob")
1127 (set_attr "length" "8")]
1130 (define_insn "*subdi_sesidi_di"
1131 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1132 (minus:DI (sign_extend:DI
1133 (match_operand:SI 2 "s_register_operand" "r,r"))
1134 (match_operand:DI 1 "s_register_operand" "0,r")))
1135 (clobber (reg:CC CC_REGNUM))]
1137 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1138 [(set_attr "conds" "clob")
1139 (set_attr "length" "8")]
1142 (define_insn "*subdi_zesidi_zesidi"
1143 [(set (match_operand:DI 0 "s_register_operand" "=r")
1144 (minus:DI (zero_extend:DI
1145 (match_operand:SI 1 "s_register_operand" "r"))
1147 (match_operand:SI 2 "s_register_operand" "r"))))
1148 (clobber (reg:CC CC_REGNUM))]
1150 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1151 [(set_attr "conds" "clob")
1152 (set_attr "length" "8")]
1155 (define_expand "subsi3"
1156 [(set (match_operand:SI 0 "s_register_operand" "")
1157 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1158 (match_operand:SI 2 "s_register_operand" "")))]
1161 if (GET_CODE (operands[1]) == CONST_INT)
1165 arm_split_constant (MINUS, SImode, NULL_RTX,
1166 INTVAL (operands[1]), operands[0],
1167 operands[2], optimize && can_create_pseudo_p ());
1170 else /* TARGET_THUMB1 */
1171 operands[1] = force_reg (SImode, operands[1]);
1176 (define_insn "thumb1_subsi3_insn"
1177 [(set (match_operand:SI 0 "register_operand" "=l")
1178 (minus:SI (match_operand:SI 1 "register_operand" "l")
1179 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1182 [(set_attr "length" "2")
1183 (set_attr "conds" "set")])
1185 ; ??? Check Thumb-2 split length
1186 (define_insn_and_split "*arm_subsi3_insn"
1187 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1188 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1189 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1197 "&& ((GET_CODE (operands[1]) == CONST_INT
1198 && !const_ok_for_arm (INTVAL (operands[1])))
1199 || (GET_CODE (operands[2]) == CONST_INT
1200 && !const_ok_for_arm (INTVAL (operands[2]))))"
1201 [(clobber (const_int 0))]
1203 arm_split_constant (MINUS, SImode, curr_insn,
1204 INTVAL (operands[1]), operands[0], operands[2], 0);
1207 [(set_attr "length" "4,4,4,16,16")
1208 (set_attr "predicable" "yes")]
1212 [(match_scratch:SI 3 "r")
1213 (set (match_operand:SI 0 "arm_general_register_operand" "")
1214 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1215 (match_operand:SI 2 "arm_general_register_operand" "")))]
1217 && !const_ok_for_arm (INTVAL (operands[1]))
1218 && const_ok_for_arm (~INTVAL (operands[1]))"
1219 [(set (match_dup 3) (match_dup 1))
1220 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1224 (define_insn "*subsi3_compare0"
1225 [(set (reg:CC_NOOV CC_REGNUM)
1227 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1228 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1230 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1231 (minus:SI (match_dup 1) (match_dup 2)))]
1236 [(set_attr "conds" "set")]
1239 (define_insn "*subsi3_compare"
1240 [(set (reg:CC CC_REGNUM)
1241 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1242 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1243 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1244 (minus:SI (match_dup 1) (match_dup 2)))]
1249 [(set_attr "conds" "set")]
1252 (define_expand "decscc"
1253 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1254 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1255 (match_operator:SI 2 "arm_comparison_operator"
1256 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1261 (define_insn "*arm_decscc"
1262 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1263 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1264 (match_operator:SI 2 "arm_comparison_operator"
1265 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1269 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1270 [(set_attr "conds" "use")
1271 (set_attr "length" "*,8")]
1274 (define_expand "subsf3"
1275 [(set (match_operand:SF 0 "s_register_operand" "")
1276 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1277 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1278 "TARGET_32BIT && TARGET_HARD_FLOAT"
1280 if (TARGET_MAVERICK)
1282 if (!cirrus_fp_register (operands[1], SFmode))
1283 operands[1] = force_reg (SFmode, operands[1]);
1284 if (!cirrus_fp_register (operands[2], SFmode))
1285 operands[2] = force_reg (SFmode, operands[2]);
1289 (define_expand "subdf3"
1290 [(set (match_operand:DF 0 "s_register_operand" "")
1291 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1292 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1293 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1295 if (TARGET_MAVERICK)
1297 if (!cirrus_fp_register (operands[1], DFmode))
1298 operands[1] = force_reg (DFmode, operands[1]);
1299 if (!cirrus_fp_register (operands[2], DFmode))
1300 operands[2] = force_reg (DFmode, operands[2]);
1305 ;; Multiplication insns
1307 (define_expand "mulsi3"
1308 [(set (match_operand:SI 0 "s_register_operand" "")
1309 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1310 (match_operand:SI 1 "s_register_operand" "")))]
1315 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1316 (define_insn "*arm_mulsi3"
1317 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1318 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1319 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1320 "TARGET_32BIT && !arm_arch6"
1321 "mul%?\\t%0, %2, %1"
1322 [(set_attr "insn" "mul")
1323 (set_attr "predicable" "yes")]
1326 (define_insn "*arm_mulsi3_v6"
1327 [(set (match_operand:SI 0 "s_register_operand" "=r")
1328 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1329 (match_operand:SI 2 "s_register_operand" "r")))]
1330 "TARGET_32BIT && arm_arch6"
1331 "mul%?\\t%0, %1, %2"
1332 [(set_attr "insn" "mul")
1333 (set_attr "predicable" "yes")]
1336 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1337 ; 1 and 2; are the same, because reload will make operand 0 match
1338 ; operand 1 without realizing that this conflicts with operand 2. We fix
1339 ; this by adding another alternative to match this case, and then `reload'
1340 ; it ourselves. This alternative must come first.
1341 (define_insn "*thumb_mulsi3"
1342 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1343 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1344 (match_operand:SI 2 "register_operand" "l,l,l")))]
1345 "TARGET_THUMB1 && !arm_arch6"
1347 if (which_alternative < 2)
1348 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1350 return \"mul\\t%0, %2\";
1352 [(set_attr "length" "4,4,2")
1353 (set_attr "insn" "mul")]
1356 (define_insn "*thumb_mulsi3_v6"
1357 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1358 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1359 (match_operand:SI 2 "register_operand" "l,0,0")))]
1360 "TARGET_THUMB1 && arm_arch6"
1365 [(set_attr "length" "2")
1366 (set_attr "insn" "mul")]
1369 (define_insn "*mulsi3_compare0"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1371 (compare:CC_NOOV (mult:SI
1372 (match_operand:SI 2 "s_register_operand" "r,r")
1373 (match_operand:SI 1 "s_register_operand" "%0,r"))
1375 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1376 (mult:SI (match_dup 2) (match_dup 1)))]
1377 "TARGET_ARM && !arm_arch6"
1378 "mul%.\\t%0, %2, %1"
1379 [(set_attr "conds" "set")
1380 (set_attr "insn" "muls")]
1383 (define_insn "*mulsi3_compare0_v6"
1384 [(set (reg:CC_NOOV CC_REGNUM)
1385 (compare:CC_NOOV (mult:SI
1386 (match_operand:SI 2 "s_register_operand" "r")
1387 (match_operand:SI 1 "s_register_operand" "r"))
1389 (set (match_operand:SI 0 "s_register_operand" "=r")
1390 (mult:SI (match_dup 2) (match_dup 1)))]
1391 "TARGET_ARM && arm_arch6 && optimize_size"
1392 "mul%.\\t%0, %2, %1"
1393 [(set_attr "conds" "set")
1394 (set_attr "insn" "muls")]
1397 (define_insn "*mulsi_compare0_scratch"
1398 [(set (reg:CC_NOOV CC_REGNUM)
1399 (compare:CC_NOOV (mult:SI
1400 (match_operand:SI 2 "s_register_operand" "r,r")
1401 (match_operand:SI 1 "s_register_operand" "%0,r"))
1403 (clobber (match_scratch:SI 0 "=&r,&r"))]
1404 "TARGET_ARM && !arm_arch6"
1405 "mul%.\\t%0, %2, %1"
1406 [(set_attr "conds" "set")
1407 (set_attr "insn" "muls")]
1410 (define_insn "*mulsi_compare0_scratch_v6"
1411 [(set (reg:CC_NOOV CC_REGNUM)
1412 (compare:CC_NOOV (mult:SI
1413 (match_operand:SI 2 "s_register_operand" "r")
1414 (match_operand:SI 1 "s_register_operand" "r"))
1416 (clobber (match_scratch:SI 0 "=r"))]
1417 "TARGET_ARM && arm_arch6 && optimize_size"
1418 "mul%.\\t%0, %2, %1"
1419 [(set_attr "conds" "set")
1420 (set_attr "insn" "muls")]
1423 ;; Unnamed templates to match MLA instruction.
1425 (define_insn "*mulsi3addsi"
1426 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1428 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1429 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1430 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1431 "TARGET_32BIT && !arm_arch6"
1432 "mla%?\\t%0, %2, %1, %3"
1433 [(set_attr "insn" "mla")
1434 (set_attr "predicable" "yes")]
1437 (define_insn "*mulsi3addsi_v6"
1438 [(set (match_operand:SI 0 "s_register_operand" "=r")
1440 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1441 (match_operand:SI 1 "s_register_operand" "r"))
1442 (match_operand:SI 3 "s_register_operand" "r")))]
1443 "TARGET_32BIT && arm_arch6"
1444 "mla%?\\t%0, %2, %1, %3"
1445 [(set_attr "insn" "mla")
1446 (set_attr "predicable" "yes")]
1449 (define_insn "*mulsi3addsi_compare0"
1450 [(set (reg:CC_NOOV CC_REGNUM)
1453 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1454 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1455 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1457 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1458 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1460 "TARGET_ARM && arm_arch6"
1461 "mla%.\\t%0, %2, %1, %3"
1462 [(set_attr "conds" "set")
1463 (set_attr "insn" "mlas")]
1466 (define_insn "*mulsi3addsi_compare0_v6"
1467 [(set (reg:CC_NOOV CC_REGNUM)
1470 (match_operand:SI 2 "s_register_operand" "r")
1471 (match_operand:SI 1 "s_register_operand" "r"))
1472 (match_operand:SI 3 "s_register_operand" "r"))
1474 (set (match_operand:SI 0 "s_register_operand" "=r")
1475 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1477 "TARGET_ARM && arm_arch6 && optimize_size"
1478 "mla%.\\t%0, %2, %1, %3"
1479 [(set_attr "conds" "set")
1480 (set_attr "insn" "mlas")]
1483 (define_insn "*mulsi3addsi_compare0_scratch"
1484 [(set (reg:CC_NOOV CC_REGNUM)
1487 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1488 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1489 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1491 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1492 "TARGET_ARM && !arm_arch6"
1493 "mla%.\\t%0, %2, %1, %3"
1494 [(set_attr "conds" "set")
1495 (set_attr "insn" "mlas")]
1498 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1499 [(set (reg:CC_NOOV CC_REGNUM)
1502 (match_operand:SI 2 "s_register_operand" "r")
1503 (match_operand:SI 1 "s_register_operand" "r"))
1504 (match_operand:SI 3 "s_register_operand" "r"))
1506 (clobber (match_scratch:SI 0 "=r"))]
1507 "TARGET_ARM && arm_arch6 && optimize_size"
1508 "mla%.\\t%0, %2, %1, %3"
1509 [(set_attr "conds" "set")
1510 (set_attr "insn" "mlas")]
1513 (define_insn "*mulsi3subsi"
1514 [(set (match_operand:SI 0 "s_register_operand" "=r")
1516 (match_operand:SI 3 "s_register_operand" "r")
1517 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1518 (match_operand:SI 1 "s_register_operand" "r"))))]
1519 "TARGET_32BIT && arm_arch_thumb2"
1520 "mls%?\\t%0, %2, %1, %3"
1521 [(set_attr "insn" "mla")
1522 (set_attr "predicable" "yes")]
1525 (define_expand "maddsidi4"
1526 [(set (match_operand:DI 0 "s_register_operand" "")
1529 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1530 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1531 (match_operand:DI 3 "s_register_operand" "")))]
1532 "TARGET_32BIT && arm_arch3m"
1535 (define_insn "*mulsidi3adddi"
1536 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1539 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1540 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1541 (match_operand:DI 1 "s_register_operand" "0")))]
1542 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1543 "smlal%?\\t%Q0, %R0, %3, %2"
1544 [(set_attr "insn" "smlal")
1545 (set_attr "predicable" "yes")]
1548 (define_insn "*mulsidi3adddi_v6"
1549 [(set (match_operand:DI 0 "s_register_operand" "=r")
1552 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1553 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1554 (match_operand:DI 1 "s_register_operand" "0")))]
1555 "TARGET_32BIT && arm_arch6"
1556 "smlal%?\\t%Q0, %R0, %3, %2"
1557 [(set_attr "insn" "smlal")
1558 (set_attr "predicable" "yes")]
1561 ;; 32x32->64 widening multiply.
1562 ;; As with mulsi3, the only difference between the v3-5 and v6+
1563 ;; versions of these patterns is the requirement that the output not
1564 ;; overlap the inputs, but that still means we have to have a named
1565 ;; expander and two different starred insns.
1567 (define_expand "mulsidi3"
1568 [(set (match_operand:DI 0 "s_register_operand" "")
1570 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1571 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1572 "TARGET_32BIT && arm_arch3m"
1576 (define_insn "*mulsidi3_nov6"
1577 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1581 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1582 "smull%?\\t%Q0, %R0, %1, %2"
1583 [(set_attr "insn" "smull")
1584 (set_attr "predicable" "yes")]
1587 (define_insn "*mulsidi3_v6"
1588 [(set (match_operand:DI 0 "s_register_operand" "=r")
1590 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1591 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1592 "TARGET_32BIT && arm_arch6"
1593 "smull%?\\t%Q0, %R0, %1, %2"
1594 [(set_attr "insn" "smull")
1595 (set_attr "predicable" "yes")]
1598 (define_expand "umulsidi3"
1599 [(set (match_operand:DI 0 "s_register_operand" "")
1601 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1602 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1603 "TARGET_32BIT && arm_arch3m"
1607 (define_insn "*umulsidi3_nov6"
1608 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1610 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1611 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1612 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1613 "umull%?\\t%Q0, %R0, %1, %2"
1614 [(set_attr "insn" "umull")
1615 (set_attr "predicable" "yes")]
1618 (define_insn "*umulsidi3_v6"
1619 [(set (match_operand:DI 0 "s_register_operand" "=r")
1621 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1622 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1623 "TARGET_32BIT && arm_arch6"
1624 "umull%?\\t%Q0, %R0, %1, %2"
1625 [(set_attr "insn" "umull")
1626 (set_attr "predicable" "yes")]
1629 (define_expand "umaddsidi4"
1630 [(set (match_operand:DI 0 "s_register_operand" "")
1633 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1634 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1635 (match_operand:DI 3 "s_register_operand" "")))]
1636 "TARGET_32BIT && arm_arch3m"
1639 (define_insn "*umulsidi3adddi"
1640 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1643 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1644 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1645 (match_operand:DI 1 "s_register_operand" "0")))]
1646 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1647 "umlal%?\\t%Q0, %R0, %3, %2"
1648 [(set_attr "insn" "umlal")
1649 (set_attr "predicable" "yes")]
1652 (define_insn "*umulsidi3adddi_v6"
1653 [(set (match_operand:DI 0 "s_register_operand" "=r")
1656 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1657 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1658 (match_operand:DI 1 "s_register_operand" "0")))]
1659 "TARGET_32BIT && arm_arch6"
1660 "umlal%?\\t%Q0, %R0, %3, %2"
1661 [(set_attr "insn" "umlal")
1662 (set_attr "predicable" "yes")]
1665 (define_expand "smulsi3_highpart"
1667 [(set (match_operand:SI 0 "s_register_operand" "")
1671 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1672 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1674 (clobber (match_scratch:SI 3 ""))])]
1675 "TARGET_32BIT && arm_arch3m"
1679 (define_insn "*smulsi3_highpart_nov6"
1680 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1684 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1685 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1687 (clobber (match_scratch:SI 3 "=&r,&r"))]
1688 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1689 "smull%?\\t%3, %0, %2, %1"
1690 [(set_attr "insn" "smull")
1691 (set_attr "predicable" "yes")]
1694 (define_insn "*smulsi3_highpart_v6"
1695 [(set (match_operand:SI 0 "s_register_operand" "=r")
1699 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1700 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1702 (clobber (match_scratch:SI 3 "=r"))]
1703 "TARGET_32BIT && arm_arch6"
1704 "smull%?\\t%3, %0, %2, %1"
1705 [(set_attr "insn" "smull")
1706 (set_attr "predicable" "yes")]
1709 (define_expand "umulsi3_highpart"
1711 [(set (match_operand:SI 0 "s_register_operand" "")
1715 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1716 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1718 (clobber (match_scratch:SI 3 ""))])]
1719 "TARGET_32BIT && arm_arch3m"
1723 (define_insn "*umulsi3_highpart_nov6"
1724 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1728 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1729 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1731 (clobber (match_scratch:SI 3 "=&r,&r"))]
1732 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1733 "umull%?\\t%3, %0, %2, %1"
1734 [(set_attr "insn" "umull")
1735 (set_attr "predicable" "yes")]
1738 (define_insn "*umulsi3_highpart_v6"
1739 [(set (match_operand:SI 0 "s_register_operand" "=r")
1743 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1744 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1746 (clobber (match_scratch:SI 3 "=r"))]
1747 "TARGET_32BIT && arm_arch6"
1748 "umull%?\\t%3, %0, %2, %1"
1749 [(set_attr "insn" "umull")
1750 (set_attr "predicable" "yes")]
1753 (define_insn "mulhisi3"
1754 [(set (match_operand:SI 0 "s_register_operand" "=r")
1755 (mult:SI (sign_extend:SI
1756 (match_operand:HI 1 "s_register_operand" "%r"))
1758 (match_operand:HI 2 "s_register_operand" "r"))))]
1759 "TARGET_DSP_MULTIPLY"
1760 "smulbb%?\\t%0, %1, %2"
1761 [(set_attr "insn" "smulxy")
1762 (set_attr "predicable" "yes")]
1765 (define_insn "*mulhisi3tb"
1766 [(set (match_operand:SI 0 "s_register_operand" "=r")
1767 (mult:SI (ashiftrt:SI
1768 (match_operand:SI 1 "s_register_operand" "r")
1771 (match_operand:HI 2 "s_register_operand" "r"))))]
1772 "TARGET_DSP_MULTIPLY"
1773 "smultb%?\\t%0, %1, %2"
1774 [(set_attr "insn" "smulxy")
1775 (set_attr "predicable" "yes")]
1778 (define_insn "*mulhisi3bt"
1779 [(set (match_operand:SI 0 "s_register_operand" "=r")
1780 (mult:SI (sign_extend:SI
1781 (match_operand:HI 1 "s_register_operand" "r"))
1783 (match_operand:SI 2 "s_register_operand" "r")
1785 "TARGET_DSP_MULTIPLY"
1786 "smulbt%?\\t%0, %1, %2"
1787 [(set_attr "insn" "smulxy")
1788 (set_attr "predicable" "yes")]
1791 (define_insn "*mulhisi3tt"
1792 [(set (match_operand:SI 0 "s_register_operand" "=r")
1793 (mult:SI (ashiftrt:SI
1794 (match_operand:SI 1 "s_register_operand" "r")
1797 (match_operand:SI 2 "s_register_operand" "r")
1799 "TARGET_DSP_MULTIPLY"
1800 "smultt%?\\t%0, %1, %2"
1801 [(set_attr "insn" "smulxy")
1802 (set_attr "predicable" "yes")]
1805 (define_insn "maddhisi4"
1806 [(set (match_operand:SI 0 "s_register_operand" "=r")
1807 (plus:SI (mult:SI (sign_extend:SI
1808 (match_operand:HI 1 "s_register_operand" "r"))
1810 (match_operand:HI 2 "s_register_operand" "r")))
1811 (match_operand:SI 3 "s_register_operand" "r")))]
1812 "TARGET_DSP_MULTIPLY"
1813 "smlabb%?\\t%0, %1, %2, %3"
1814 [(set_attr "insn" "smlaxy")
1815 (set_attr "predicable" "yes")]
1818 (define_insn "*maddhidi4"
1819 [(set (match_operand:DI 0 "s_register_operand" "=r")
1821 (mult:DI (sign_extend:DI
1822 (match_operand:HI 1 "s_register_operand" "r"))
1824 (match_operand:HI 2 "s_register_operand" "r")))
1825 (match_operand:DI 3 "s_register_operand" "0")))]
1826 "TARGET_DSP_MULTIPLY"
1827 "smlalbb%?\\t%Q0, %R0, %1, %2"
1828 [(set_attr "insn" "smlalxy")
1829 (set_attr "predicable" "yes")])
1831 (define_expand "mulsf3"
1832 [(set (match_operand:SF 0 "s_register_operand" "")
1833 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1834 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1835 "TARGET_32BIT && TARGET_HARD_FLOAT"
1838 && !cirrus_fp_register (operands[2], SFmode))
1839 operands[2] = force_reg (SFmode, operands[2]);
1842 (define_expand "muldf3"
1843 [(set (match_operand:DF 0 "s_register_operand" "")
1844 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1845 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1846 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1849 && !cirrus_fp_register (operands[2], DFmode))
1850 operands[2] = force_reg (DFmode, operands[2]);
1855 (define_expand "divsf3"
1856 [(set (match_operand:SF 0 "s_register_operand" "")
1857 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1858 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1859 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1862 (define_expand "divdf3"
1863 [(set (match_operand:DF 0 "s_register_operand" "")
1864 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1865 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1866 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1871 (define_expand "modsf3"
1872 [(set (match_operand:SF 0 "s_register_operand" "")
1873 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1874 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1875 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1878 (define_expand "moddf3"
1879 [(set (match_operand:DF 0 "s_register_operand" "")
1880 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1881 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1882 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1885 ;; Boolean and,ior,xor insns
1887 ;; Split up double word logical operations
1889 ;; Split up simple DImode logical operations. Simply perform the logical
1890 ;; operation on the upper and lower halves of the registers.
1892 [(set (match_operand:DI 0 "s_register_operand" "")
1893 (match_operator:DI 6 "logical_binary_operator"
1894 [(match_operand:DI 1 "s_register_operand" "")
1895 (match_operand:DI 2 "s_register_operand" "")]))]
1896 "TARGET_32BIT && reload_completed
1897 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1898 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1899 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1900 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1903 operands[3] = gen_highpart (SImode, operands[0]);
1904 operands[0] = gen_lowpart (SImode, operands[0]);
1905 operands[4] = gen_highpart (SImode, operands[1]);
1906 operands[1] = gen_lowpart (SImode, operands[1]);
1907 operands[5] = gen_highpart (SImode, operands[2]);
1908 operands[2] = gen_lowpart (SImode, operands[2]);
1913 [(set (match_operand:DI 0 "s_register_operand" "")
1914 (match_operator:DI 6 "logical_binary_operator"
1915 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1916 (match_operand:DI 1 "s_register_operand" "")]))]
1917 "TARGET_32BIT && reload_completed"
1918 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1919 (set (match_dup 3) (match_op_dup:SI 6
1920 [(ashiftrt:SI (match_dup 2) (const_int 31))
1924 operands[3] = gen_highpart (SImode, operands[0]);
1925 operands[0] = gen_lowpart (SImode, operands[0]);
1926 operands[4] = gen_highpart (SImode, operands[1]);
1927 operands[1] = gen_lowpart (SImode, operands[1]);
1928 operands[5] = gen_highpart (SImode, operands[2]);
1929 operands[2] = gen_lowpart (SImode, operands[2]);
1933 ;; The zero extend of operand 2 means we can just copy the high part of
1934 ;; operand1 into operand0.
1936 [(set (match_operand:DI 0 "s_register_operand" "")
1938 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1939 (match_operand:DI 1 "s_register_operand" "")))]
1940 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1941 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1942 (set (match_dup 3) (match_dup 4))]
1945 operands[4] = gen_highpart (SImode, operands[1]);
1946 operands[3] = gen_highpart (SImode, operands[0]);
1947 operands[0] = gen_lowpart (SImode, operands[0]);
1948 operands[1] = gen_lowpart (SImode, operands[1]);
1952 ;; The zero extend of operand 2 means we can just copy the high part of
1953 ;; operand1 into operand0.
1955 [(set (match_operand:DI 0 "s_register_operand" "")
1957 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1958 (match_operand:DI 1 "s_register_operand" "")))]
1959 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1960 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1961 (set (match_dup 3) (match_dup 4))]
1964 operands[4] = gen_highpart (SImode, operands[1]);
1965 operands[3] = gen_highpart (SImode, operands[0]);
1966 operands[0] = gen_lowpart (SImode, operands[0]);
1967 operands[1] = gen_lowpart (SImode, operands[1]);
1971 (define_expand "anddi3"
1972 [(set (match_operand:DI 0 "s_register_operand" "")
1973 (and:DI (match_operand:DI 1 "s_register_operand" "")
1974 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1979 (define_insn "*anddi3_insn"
1980 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1981 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1982 (match_operand:DI 2 "s_register_operand" "r,r")))]
1983 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1985 [(set_attr "length" "8")]
1988 (define_insn_and_split "*anddi_zesidi_di"
1989 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1990 (and:DI (zero_extend:DI
1991 (match_operand:SI 2 "s_register_operand" "r,r"))
1992 (match_operand:DI 1 "s_register_operand" "0,r")))]
1995 "TARGET_32BIT && reload_completed"
1996 ; The zero extend of operand 2 clears the high word of the output
1998 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1999 (set (match_dup 3) (const_int 0))]
2002 operands[3] = gen_highpart (SImode, operands[0]);
2003 operands[0] = gen_lowpart (SImode, operands[0]);
2004 operands[1] = gen_lowpart (SImode, operands[1]);
2006 [(set_attr "length" "8")]
2009 (define_insn "*anddi_sesdi_di"
2010 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2011 (and:DI (sign_extend:DI
2012 (match_operand:SI 2 "s_register_operand" "r,r"))
2013 (match_operand:DI 1 "s_register_operand" "0,r")))]
2016 [(set_attr "length" "8")]
2019 (define_expand "andsi3"
2020 [(set (match_operand:SI 0 "s_register_operand" "")
2021 (and:SI (match_operand:SI 1 "s_register_operand" "")
2022 (match_operand:SI 2 "reg_or_int_operand" "")))]
2027 if (GET_CODE (operands[2]) == CONST_INT)
2029 if (INTVAL (operands[2]) == 255 && arm_arch6)
2031 operands[1] = convert_to_mode (QImode, operands[1], 1);
2032 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2036 arm_split_constant (AND, SImode, NULL_RTX,
2037 INTVAL (operands[2]), operands[0],
2039 optimize && can_create_pseudo_p ());
2044 else /* TARGET_THUMB1 */
2046 if (GET_CODE (operands[2]) != CONST_INT)
2048 rtx tmp = force_reg (SImode, operands[2]);
2049 if (rtx_equal_p (operands[0], operands[1]))
2053 operands[2] = operands[1];
2061 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2063 operands[2] = force_reg (SImode,
2064 GEN_INT (~INTVAL (operands[2])));
2066 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2071 for (i = 9; i <= 31; i++)
2073 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2075 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2079 else if ((((HOST_WIDE_INT) 1) << i) - 1
2080 == ~INTVAL (operands[2]))
2082 rtx shift = GEN_INT (i);
2083 rtx reg = gen_reg_rtx (SImode);
2085 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2086 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2092 operands[2] = force_reg (SImode, operands[2]);
2098 ; ??? Check split length for Thumb-2
2099 (define_insn_and_split "*arm_andsi3_insn"
2100 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2101 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2102 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2106 bic%?\\t%0, %1, #%B2
2109 && GET_CODE (operands[2]) == CONST_INT
2110 && !(const_ok_for_arm (INTVAL (operands[2]))
2111 || const_ok_for_arm (~INTVAL (operands[2])))"
2112 [(clobber (const_int 0))]
2114 arm_split_constant (AND, SImode, curr_insn,
2115 INTVAL (operands[2]), operands[0], operands[1], 0);
2118 [(set_attr "length" "4,4,16")
2119 (set_attr "predicable" "yes")]
2122 (define_insn "*thumb1_andsi3_insn"
2123 [(set (match_operand:SI 0 "register_operand" "=l")
2124 (and:SI (match_operand:SI 1 "register_operand" "%0")
2125 (match_operand:SI 2 "register_operand" "l")))]
2128 [(set_attr "length" "2")
2129 (set_attr "conds" "set")])
2131 (define_insn "*andsi3_compare0"
2132 [(set (reg:CC_NOOV CC_REGNUM)
2134 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2135 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2137 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2138 (and:SI (match_dup 1) (match_dup 2)))]
2142 bic%.\\t%0, %1, #%B2"
2143 [(set_attr "conds" "set")]
2146 (define_insn "*andsi3_compare0_scratch"
2147 [(set (reg:CC_NOOV CC_REGNUM)
2149 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2150 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2152 (clobber (match_scratch:SI 2 "=X,r"))]
2156 bic%.\\t%2, %0, #%B1"
2157 [(set_attr "conds" "set")]
2160 (define_insn "*zeroextractsi_compare0_scratch"
2161 [(set (reg:CC_NOOV CC_REGNUM)
2162 (compare:CC_NOOV (zero_extract:SI
2163 (match_operand:SI 0 "s_register_operand" "r")
2164 (match_operand 1 "const_int_operand" "n")
2165 (match_operand 2 "const_int_operand" "n"))
2168 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2169 && INTVAL (operands[1]) > 0
2170 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2171 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2173 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2174 << INTVAL (operands[2]));
2175 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2178 [(set_attr "conds" "set")]
2181 (define_insn_and_split "*ne_zeroextractsi"
2182 [(set (match_operand:SI 0 "s_register_operand" "=r")
2183 (ne:SI (zero_extract:SI
2184 (match_operand:SI 1 "s_register_operand" "r")
2185 (match_operand:SI 2 "const_int_operand" "n")
2186 (match_operand:SI 3 "const_int_operand" "n"))
2188 (clobber (reg:CC CC_REGNUM))]
2190 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2191 && INTVAL (operands[2]) > 0
2192 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2193 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2196 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2197 && INTVAL (operands[2]) > 0
2198 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2199 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2200 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2201 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2203 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2205 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2206 (match_dup 0) (const_int 1)))]
2208 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2209 << INTVAL (operands[3]));
2211 [(set_attr "conds" "clob")
2212 (set (attr "length")
2213 (if_then_else (eq_attr "is_thumb" "yes")
2218 (define_insn_and_split "*ne_zeroextractsi_shifted"
2219 [(set (match_operand:SI 0 "s_register_operand" "=r")
2220 (ne:SI (zero_extract:SI
2221 (match_operand:SI 1 "s_register_operand" "r")
2222 (match_operand:SI 2 "const_int_operand" "n")
2225 (clobber (reg:CC CC_REGNUM))]
2229 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2230 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2232 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2234 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2235 (match_dup 0) (const_int 1)))]
2237 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2239 [(set_attr "conds" "clob")
2240 (set_attr "length" "8")]
2243 (define_insn_and_split "*ite_ne_zeroextractsi"
2244 [(set (match_operand:SI 0 "s_register_operand" "=r")
2245 (if_then_else:SI (ne (zero_extract:SI
2246 (match_operand:SI 1 "s_register_operand" "r")
2247 (match_operand:SI 2 "const_int_operand" "n")
2248 (match_operand:SI 3 "const_int_operand" "n"))
2250 (match_operand:SI 4 "arm_not_operand" "rIK")
2252 (clobber (reg:CC CC_REGNUM))]
2254 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2255 && INTVAL (operands[2]) > 0
2256 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2257 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2258 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2261 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2262 && INTVAL (operands[2]) > 0
2263 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2264 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2265 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2266 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2267 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2269 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2271 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2272 (match_dup 0) (match_dup 4)))]
2274 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2275 << INTVAL (operands[3]));
2277 [(set_attr "conds" "clob")
2278 (set_attr "length" "8")]
2281 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2282 [(set (match_operand:SI 0 "s_register_operand" "=r")
2283 (if_then_else:SI (ne (zero_extract:SI
2284 (match_operand:SI 1 "s_register_operand" "r")
2285 (match_operand:SI 2 "const_int_operand" "n")
2288 (match_operand:SI 3 "arm_not_operand" "rIK")
2290 (clobber (reg:CC CC_REGNUM))]
2291 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2293 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2294 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2295 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2297 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2299 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2300 (match_dup 0) (match_dup 3)))]
2302 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2304 [(set_attr "conds" "clob")
2305 (set_attr "length" "8")]
2309 [(set (match_operand:SI 0 "s_register_operand" "")
2310 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2311 (match_operand:SI 2 "const_int_operand" "")
2312 (match_operand:SI 3 "const_int_operand" "")))
2313 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2315 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2316 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2318 HOST_WIDE_INT temp = INTVAL (operands[2]);
2320 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2321 operands[3] = GEN_INT (32 - temp);
2325 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2327 [(set (match_operand:SI 0 "s_register_operand" "")
2328 (match_operator:SI 1 "shiftable_operator"
2329 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2330 (match_operand:SI 3 "const_int_operand" "")
2331 (match_operand:SI 4 "const_int_operand" ""))
2332 (match_operand:SI 5 "s_register_operand" "")]))
2333 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2335 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2338 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2341 HOST_WIDE_INT temp = INTVAL (operands[3]);
2343 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2344 operands[4] = GEN_INT (32 - temp);
2349 [(set (match_operand:SI 0 "s_register_operand" "")
2350 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2351 (match_operand:SI 2 "const_int_operand" "")
2352 (match_operand:SI 3 "const_int_operand" "")))]
2354 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2355 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2357 HOST_WIDE_INT temp = INTVAL (operands[2]);
2359 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2360 operands[3] = GEN_INT (32 - temp);
2365 [(set (match_operand:SI 0 "s_register_operand" "")
2366 (match_operator:SI 1 "shiftable_operator"
2367 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2368 (match_operand:SI 3 "const_int_operand" "")
2369 (match_operand:SI 4 "const_int_operand" ""))
2370 (match_operand:SI 5 "s_register_operand" "")]))
2371 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2373 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2376 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2379 HOST_WIDE_INT temp = INTVAL (operands[3]);
2381 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2382 operands[4] = GEN_INT (32 - temp);
2386 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2387 ;;; represented by the bitfield, then this will produce incorrect results.
2388 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2389 ;;; which have a real bit-field insert instruction, the truncation happens
2390 ;;; in the bit-field insert instruction itself. Since arm does not have a
2391 ;;; bit-field insert instruction, we would have to emit code here to truncate
2392 ;;; the value before we insert. This loses some of the advantage of having
2393 ;;; this insv pattern, so this pattern needs to be reevalutated.
2395 (define_expand "insv"
2396 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2397 (match_operand:SI 1 "general_operand" "")
2398 (match_operand:SI 2 "general_operand" ""))
2399 (match_operand:SI 3 "reg_or_int_operand" ""))]
2400 "TARGET_ARM || arm_arch_thumb2"
2403 int start_bit = INTVAL (operands[2]);
2404 int width = INTVAL (operands[1]);
2405 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2406 rtx target, subtarget;
2408 if (arm_arch_thumb2)
2410 bool use_bfi = TRUE;
2412 if (GET_CODE (operands[3]) == CONST_INT)
2414 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2418 emit_insn (gen_insv_zero (operands[0], operands[1],
2423 /* See if the set can be done with a single orr instruction. */
2424 if (val == mask && const_ok_for_arm (val << start_bit))
2430 if (GET_CODE (operands[3]) != REG)
2431 operands[3] = force_reg (SImode, operands[3]);
2433 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2439 target = copy_rtx (operands[0]);
2440 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2441 subreg as the final target. */
2442 if (GET_CODE (target) == SUBREG)
2444 subtarget = gen_reg_rtx (SImode);
2445 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2446 < GET_MODE_SIZE (SImode))
2447 target = SUBREG_REG (target);
2452 if (GET_CODE (operands[3]) == CONST_INT)
2454 /* Since we are inserting a known constant, we may be able to
2455 reduce the number of bits that we have to clear so that
2456 the mask becomes simple. */
2457 /* ??? This code does not check to see if the new mask is actually
2458 simpler. It may not be. */
2459 rtx op1 = gen_reg_rtx (SImode);
2460 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2461 start of this pattern. */
2462 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2463 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2465 emit_insn (gen_andsi3 (op1, operands[0],
2466 gen_int_mode (~mask2, SImode)));
2467 emit_insn (gen_iorsi3 (subtarget, op1,
2468 gen_int_mode (op3_value << start_bit, SImode)));
2470 else if (start_bit == 0
2471 && !(const_ok_for_arm (mask)
2472 || const_ok_for_arm (~mask)))
2474 /* A Trick, since we are setting the bottom bits in the word,
2475 we can shift operand[3] up, operand[0] down, OR them together
2476 and rotate the result back again. This takes 3 insns, and
2477 the third might be mergeable into another op. */
2478 /* The shift up copes with the possibility that operand[3] is
2479 wider than the bitfield. */
2480 rtx op0 = gen_reg_rtx (SImode);
2481 rtx op1 = gen_reg_rtx (SImode);
2483 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2484 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2485 emit_insn (gen_iorsi3 (op1, op1, op0));
2486 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2488 else if ((width + start_bit == 32)
2489 && !(const_ok_for_arm (mask)
2490 || const_ok_for_arm (~mask)))
2492 /* Similar trick, but slightly less efficient. */
2494 rtx op0 = gen_reg_rtx (SImode);
2495 rtx op1 = gen_reg_rtx (SImode);
2497 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2498 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2499 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2500 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2504 rtx op0 = gen_int_mode (mask, SImode);
2505 rtx op1 = gen_reg_rtx (SImode);
2506 rtx op2 = gen_reg_rtx (SImode);
2508 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2510 rtx tmp = gen_reg_rtx (SImode);
2512 emit_insn (gen_movsi (tmp, op0));
2516 /* Mask out any bits in operand[3] that are not needed. */
2517 emit_insn (gen_andsi3 (op1, operands[3], op0));
2519 if (GET_CODE (op0) == CONST_INT
2520 && (const_ok_for_arm (mask << start_bit)
2521 || const_ok_for_arm (~(mask << start_bit))))
2523 op0 = gen_int_mode (~(mask << start_bit), SImode);
2524 emit_insn (gen_andsi3 (op2, operands[0], op0));
2528 if (GET_CODE (op0) == CONST_INT)
2530 rtx tmp = gen_reg_rtx (SImode);
2532 emit_insn (gen_movsi (tmp, op0));
2537 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2539 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2543 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2545 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2548 if (subtarget != target)
2550 /* If TARGET is still a SUBREG, then it must be wider than a word,
2551 so we must be careful only to set the subword we were asked to. */
2552 if (GET_CODE (target) == SUBREG)
2553 emit_move_insn (target, subtarget);
2555 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2562 (define_insn "insv_zero"
2563 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2564 (match_operand:SI 1 "const_int_operand" "M")
2565 (match_operand:SI 2 "const_int_operand" "M"))
2569 [(set_attr "length" "4")
2570 (set_attr "predicable" "yes")]
2573 (define_insn "insv_t2"
2574 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2575 (match_operand:SI 1 "const_int_operand" "M")
2576 (match_operand:SI 2 "const_int_operand" "M"))
2577 (match_operand:SI 3 "s_register_operand" "r"))]
2579 "bfi%?\t%0, %3, %2, %1"
2580 [(set_attr "length" "4")
2581 (set_attr "predicable" "yes")]
2584 ; constants for op 2 will never be given to these patterns.
2585 (define_insn_and_split "*anddi_notdi_di"
2586 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2587 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2588 (match_operand:DI 2 "s_register_operand" "r,0")))]
2591 "TARGET_32BIT && reload_completed
2592 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2593 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2594 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2595 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2598 operands[3] = gen_highpart (SImode, operands[0]);
2599 operands[0] = gen_lowpart (SImode, operands[0]);
2600 operands[4] = gen_highpart (SImode, operands[1]);
2601 operands[1] = gen_lowpart (SImode, operands[1]);
2602 operands[5] = gen_highpart (SImode, operands[2]);
2603 operands[2] = gen_lowpart (SImode, operands[2]);
2605 [(set_attr "length" "8")
2606 (set_attr "predicable" "yes")]
2609 (define_insn_and_split "*anddi_notzesidi_di"
2610 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2611 (and:DI (not:DI (zero_extend:DI
2612 (match_operand:SI 2 "s_register_operand" "r,r")))
2613 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2616 bic%?\\t%Q0, %Q1, %2
2618 ; (not (zero_extend ...)) allows us to just copy the high word from
2619 ; operand1 to operand0.
2622 && operands[0] != operands[1]"
2623 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2624 (set (match_dup 3) (match_dup 4))]
2627 operands[3] = gen_highpart (SImode, operands[0]);
2628 operands[0] = gen_lowpart (SImode, operands[0]);
2629 operands[4] = gen_highpart (SImode, operands[1]);
2630 operands[1] = gen_lowpart (SImode, operands[1]);
2632 [(set_attr "length" "4,8")
2633 (set_attr "predicable" "yes")]
2636 (define_insn_and_split "*anddi_notsesidi_di"
2637 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2638 (and:DI (not:DI (sign_extend:DI
2639 (match_operand:SI 2 "s_register_operand" "r,r")))
2640 (match_operand:DI 1 "s_register_operand" "0,r")))]
2643 "TARGET_32BIT && reload_completed"
2644 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2645 (set (match_dup 3) (and:SI (not:SI
2646 (ashiftrt:SI (match_dup 2) (const_int 31)))
2650 operands[3] = gen_highpart (SImode, operands[0]);
2651 operands[0] = gen_lowpart (SImode, operands[0]);
2652 operands[4] = gen_highpart (SImode, operands[1]);
2653 operands[1] = gen_lowpart (SImode, operands[1]);
2655 [(set_attr "length" "8")
2656 (set_attr "predicable" "yes")]
2659 (define_insn "andsi_notsi_si"
2660 [(set (match_operand:SI 0 "s_register_operand" "=r")
2661 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2662 (match_operand:SI 1 "s_register_operand" "r")))]
2664 "bic%?\\t%0, %1, %2"
2665 [(set_attr "predicable" "yes")]
2668 (define_insn "thumb1_bicsi3"
2669 [(set (match_operand:SI 0 "register_operand" "=l")
2670 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2671 (match_operand:SI 2 "register_operand" "0")))]
2674 [(set_attr "length" "2")
2675 (set_attr "conds" "set")])
2677 (define_insn "andsi_not_shiftsi_si"
2678 [(set (match_operand:SI 0 "s_register_operand" "=r")
2679 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2680 [(match_operand:SI 2 "s_register_operand" "r")
2681 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2682 (match_operand:SI 1 "s_register_operand" "r")))]
2684 "bic%?\\t%0, %1, %2%S4"
2685 [(set_attr "predicable" "yes")
2686 (set_attr "shift" "2")
2687 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2688 (const_string "alu_shift")
2689 (const_string "alu_shift_reg")))]
2692 (define_insn "*andsi_notsi_si_compare0"
2693 [(set (reg:CC_NOOV CC_REGNUM)
2695 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2696 (match_operand:SI 1 "s_register_operand" "r"))
2698 (set (match_operand:SI 0 "s_register_operand" "=r")
2699 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2701 "bic%.\\t%0, %1, %2"
2702 [(set_attr "conds" "set")]
2705 (define_insn "*andsi_notsi_si_compare0_scratch"
2706 [(set (reg:CC_NOOV CC_REGNUM)
2708 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2709 (match_operand:SI 1 "s_register_operand" "r"))
2711 (clobber (match_scratch:SI 0 "=r"))]
2713 "bic%.\\t%0, %1, %2"
2714 [(set_attr "conds" "set")]
2717 (define_expand "iordi3"
2718 [(set (match_operand:DI 0 "s_register_operand" "")
2719 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2720 (match_operand:DI 2 "neon_logic_op2" "")))]
2725 (define_insn "*iordi3_insn"
2726 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2727 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2728 (match_operand:DI 2 "s_register_operand" "r,r")))]
2729 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2731 [(set_attr "length" "8")
2732 (set_attr "predicable" "yes")]
2735 (define_insn "*iordi_zesidi_di"
2736 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2737 (ior:DI (zero_extend:DI
2738 (match_operand:SI 2 "s_register_operand" "r,r"))
2739 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2742 orr%?\\t%Q0, %Q1, %2
2744 [(set_attr "length" "4,8")
2745 (set_attr "predicable" "yes")]
2748 (define_insn "*iordi_sesidi_di"
2749 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2750 (ior:DI (sign_extend:DI
2751 (match_operand:SI 2 "s_register_operand" "r,r"))
2752 (match_operand:DI 1 "s_register_operand" "0,r")))]
2755 [(set_attr "length" "8")
2756 (set_attr "predicable" "yes")]
2759 (define_expand "iorsi3"
2760 [(set (match_operand:SI 0 "s_register_operand" "")
2761 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2762 (match_operand:SI 2 "reg_or_int_operand" "")))]
2765 if (GET_CODE (operands[2]) == CONST_INT)
2769 arm_split_constant (IOR, SImode, NULL_RTX,
2770 INTVAL (operands[2]), operands[0], operands[1],
2771 optimize && can_create_pseudo_p ());
2774 else /* TARGET_THUMB1 */
2776 rtx tmp = force_reg (SImode, operands[2]);
2777 if (rtx_equal_p (operands[0], operands[1]))
2781 operands[2] = operands[1];
2789 (define_insn_and_split "*iorsi3_insn"
2790 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2791 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2792 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2796 orn%?\\t%0, %1, #%B2
2799 && GET_CODE (operands[2]) == CONST_INT
2800 && !(const_ok_for_arm (INTVAL (operands[2]))
2801 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2802 [(clobber (const_int 0))]
2804 arm_split_constant (IOR, SImode, curr_insn,
2805 INTVAL (operands[2]), operands[0], operands[1], 0);
2808 [(set_attr "length" "4,4,16")
2809 (set_attr "arch" "32,t2,32")
2810 (set_attr "predicable" "yes")])
2812 (define_insn "*thumb1_iorsi3_insn"
2813 [(set (match_operand:SI 0 "register_operand" "=l")
2814 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2815 (match_operand:SI 2 "register_operand" "l")))]
2818 [(set_attr "length" "2")
2819 (set_attr "conds" "set")])
2822 [(match_scratch:SI 3 "r")
2823 (set (match_operand:SI 0 "arm_general_register_operand" "")
2824 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2825 (match_operand:SI 2 "const_int_operand" "")))]
2827 && !const_ok_for_arm (INTVAL (operands[2]))
2828 && const_ok_for_arm (~INTVAL (operands[2]))"
2829 [(set (match_dup 3) (match_dup 2))
2830 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2834 (define_insn "*iorsi3_compare0"
2835 [(set (reg:CC_NOOV CC_REGNUM)
2836 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2837 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2839 (set (match_operand:SI 0 "s_register_operand" "=r")
2840 (ior:SI (match_dup 1) (match_dup 2)))]
2842 "orr%.\\t%0, %1, %2"
2843 [(set_attr "conds" "set")]
2846 (define_insn "*iorsi3_compare0_scratch"
2847 [(set (reg:CC_NOOV CC_REGNUM)
2848 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2849 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2851 (clobber (match_scratch:SI 0 "=r"))]
2853 "orr%.\\t%0, %1, %2"
2854 [(set_attr "conds" "set")]
2857 (define_expand "xordi3"
2858 [(set (match_operand:DI 0 "s_register_operand" "")
2859 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2860 (match_operand:DI 2 "s_register_operand" "")))]
2865 (define_insn "*xordi3_insn"
2866 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2867 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2868 (match_operand:DI 2 "s_register_operand" "r,r")))]
2869 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2871 [(set_attr "length" "8")
2872 (set_attr "predicable" "yes")]
2875 (define_insn "*xordi_zesidi_di"
2876 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2877 (xor:DI (zero_extend:DI
2878 (match_operand:SI 2 "s_register_operand" "r,r"))
2879 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2882 eor%?\\t%Q0, %Q1, %2
2884 [(set_attr "length" "4,8")
2885 (set_attr "predicable" "yes")]
2888 (define_insn "*xordi_sesidi_di"
2889 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2890 (xor:DI (sign_extend:DI
2891 (match_operand:SI 2 "s_register_operand" "r,r"))
2892 (match_operand:DI 1 "s_register_operand" "0,r")))]
2895 [(set_attr "length" "8")
2896 (set_attr "predicable" "yes")]
2899 (define_expand "xorsi3"
2900 [(set (match_operand:SI 0 "s_register_operand" "")
2901 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2902 (match_operand:SI 2 "reg_or_int_operand" "")))]
2904 "if (GET_CODE (operands[2]) == CONST_INT)
2908 arm_split_constant (XOR, SImode, NULL_RTX,
2909 INTVAL (operands[2]), operands[0], operands[1],
2910 optimize && can_create_pseudo_p ());
2913 else /* TARGET_THUMB1 */
2915 rtx tmp = force_reg (SImode, operands[2]);
2916 if (rtx_equal_p (operands[0], operands[1]))
2920 operands[2] = operands[1];
2927 (define_insn "*arm_xorsi3"
2928 [(set (match_operand:SI 0 "s_register_operand" "=r")
2929 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2930 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2932 "eor%?\\t%0, %1, %2"
2933 [(set_attr "predicable" "yes")]
2936 (define_insn "*thumb1_xorsi3_insn"
2937 [(set (match_operand:SI 0 "register_operand" "=l")
2938 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2939 (match_operand:SI 2 "register_operand" "l")))]
2942 [(set_attr "length" "2")
2943 (set_attr "conds" "set")])
2945 (define_insn "*xorsi3_compare0"
2946 [(set (reg:CC_NOOV CC_REGNUM)
2947 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2948 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2950 (set (match_operand:SI 0 "s_register_operand" "=r")
2951 (xor:SI (match_dup 1) (match_dup 2)))]
2953 "eor%.\\t%0, %1, %2"
2954 [(set_attr "conds" "set")]
2957 (define_insn "*xorsi3_compare0_scratch"
2958 [(set (reg:CC_NOOV CC_REGNUM)
2959 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2960 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2964 [(set_attr "conds" "set")]
2967 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2968 ; (NOT D) we can sometimes merge the final NOT into one of the following
2972 [(set (match_operand:SI 0 "s_register_operand" "")
2973 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2974 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2975 (match_operand:SI 3 "arm_rhs_operand" "")))
2976 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2978 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2979 (not:SI (match_dup 3))))
2980 (set (match_dup 0) (not:SI (match_dup 4)))]
2984 (define_insn "*andsi_iorsi3_notsi"
2985 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2986 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2987 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2988 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2990 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2991 [(set_attr "length" "8")
2992 (set_attr "ce_count" "2")
2993 (set_attr "predicable" "yes")]
2996 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2997 ; insns are available?
2999 [(set (match_operand:SI 0 "s_register_operand" "")
3000 (match_operator:SI 1 "logical_binary_operator"
3001 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3002 (match_operand:SI 3 "const_int_operand" "")
3003 (match_operand:SI 4 "const_int_operand" ""))
3004 (match_operator:SI 9 "logical_binary_operator"
3005 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3006 (match_operand:SI 6 "const_int_operand" ""))
3007 (match_operand:SI 7 "s_register_operand" "")])]))
3008 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3010 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3011 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3014 [(ashift:SI (match_dup 2) (match_dup 4))
3018 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3021 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3025 [(set (match_operand:SI 0 "s_register_operand" "")
3026 (match_operator:SI 1 "logical_binary_operator"
3027 [(match_operator:SI 9 "logical_binary_operator"
3028 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3029 (match_operand:SI 6 "const_int_operand" ""))
3030 (match_operand:SI 7 "s_register_operand" "")])
3031 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3032 (match_operand:SI 3 "const_int_operand" "")
3033 (match_operand:SI 4 "const_int_operand" ""))]))
3034 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3036 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3037 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3040 [(ashift:SI (match_dup 2) (match_dup 4))
3044 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3047 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3051 [(set (match_operand:SI 0 "s_register_operand" "")
3052 (match_operator:SI 1 "logical_binary_operator"
3053 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3054 (match_operand:SI 3 "const_int_operand" "")
3055 (match_operand:SI 4 "const_int_operand" ""))
3056 (match_operator:SI 9 "logical_binary_operator"
3057 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3058 (match_operand:SI 6 "const_int_operand" ""))
3059 (match_operand:SI 7 "s_register_operand" "")])]))
3060 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3062 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3063 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3066 [(ashift:SI (match_dup 2) (match_dup 4))
3070 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3073 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3077 [(set (match_operand:SI 0 "s_register_operand" "")
3078 (match_operator:SI 1 "logical_binary_operator"
3079 [(match_operator:SI 9 "logical_binary_operator"
3080 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3081 (match_operand:SI 6 "const_int_operand" ""))
3082 (match_operand:SI 7 "s_register_operand" "")])
3083 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3084 (match_operand:SI 3 "const_int_operand" "")
3085 (match_operand:SI 4 "const_int_operand" ""))]))
3086 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3088 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3089 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3092 [(ashift:SI (match_dup 2) (match_dup 4))
3096 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3099 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3103 ;; Minimum and maximum insns
3105 (define_expand "smaxsi3"
3107 (set (match_operand:SI 0 "s_register_operand" "")
3108 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3109 (match_operand:SI 2 "arm_rhs_operand" "")))
3110 (clobber (reg:CC CC_REGNUM))])]
3113 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3115 /* No need for a clobber of the condition code register here. */
3116 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3117 gen_rtx_SMAX (SImode, operands[1],
3123 (define_insn "*smax_0"
3124 [(set (match_operand:SI 0 "s_register_operand" "=r")
3125 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3128 "bic%?\\t%0, %1, %1, asr #31"
3129 [(set_attr "predicable" "yes")]
3132 (define_insn "*smax_m1"
3133 [(set (match_operand:SI 0 "s_register_operand" "=r")
3134 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3137 "orr%?\\t%0, %1, %1, asr #31"
3138 [(set_attr "predicable" "yes")]
3141 (define_insn "*arm_smax_insn"
3142 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3143 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3144 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3145 (clobber (reg:CC CC_REGNUM))]
3148 cmp\\t%1, %2\;movlt\\t%0, %2
3149 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3150 [(set_attr "conds" "clob")
3151 (set_attr "length" "8,12")]
3154 (define_expand "sminsi3"
3156 (set (match_operand:SI 0 "s_register_operand" "")
3157 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3158 (match_operand:SI 2 "arm_rhs_operand" "")))
3159 (clobber (reg:CC CC_REGNUM))])]
3162 if (operands[2] == const0_rtx)
3164 /* No need for a clobber of the condition code register here. */
3165 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3166 gen_rtx_SMIN (SImode, operands[1],
3172 (define_insn "*smin_0"
3173 [(set (match_operand:SI 0 "s_register_operand" "=r")
3174 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3177 "and%?\\t%0, %1, %1, asr #31"
3178 [(set_attr "predicable" "yes")]
3181 (define_insn "*arm_smin_insn"
3182 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3183 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3184 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3185 (clobber (reg:CC CC_REGNUM))]
3188 cmp\\t%1, %2\;movge\\t%0, %2
3189 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3190 [(set_attr "conds" "clob")
3191 (set_attr "length" "8,12")]
3194 (define_expand "umaxsi3"
3196 (set (match_operand:SI 0 "s_register_operand" "")
3197 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3198 (match_operand:SI 2 "arm_rhs_operand" "")))
3199 (clobber (reg:CC CC_REGNUM))])]
3204 (define_insn "*arm_umaxsi3"
3205 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3206 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3207 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3208 (clobber (reg:CC CC_REGNUM))]
3211 cmp\\t%1, %2\;movcc\\t%0, %2
3212 cmp\\t%1, %2\;movcs\\t%0, %1
3213 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3214 [(set_attr "conds" "clob")
3215 (set_attr "length" "8,8,12")]
3218 (define_expand "uminsi3"
3220 (set (match_operand:SI 0 "s_register_operand" "")
3221 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3222 (match_operand:SI 2 "arm_rhs_operand" "")))
3223 (clobber (reg:CC CC_REGNUM))])]
3228 (define_insn "*arm_uminsi3"
3229 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3230 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3231 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3232 (clobber (reg:CC CC_REGNUM))]
3235 cmp\\t%1, %2\;movcs\\t%0, %2
3236 cmp\\t%1, %2\;movcc\\t%0, %1
3237 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3238 [(set_attr "conds" "clob")
3239 (set_attr "length" "8,8,12")]
3242 (define_insn "*store_minmaxsi"
3243 [(set (match_operand:SI 0 "memory_operand" "=m")
3244 (match_operator:SI 3 "minmax_operator"
3245 [(match_operand:SI 1 "s_register_operand" "r")
3246 (match_operand:SI 2 "s_register_operand" "r")]))
3247 (clobber (reg:CC CC_REGNUM))]
3250 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3251 operands[1], operands[2]);
3252 output_asm_insn (\"cmp\\t%1, %2\", operands);
3254 output_asm_insn (\"ite\t%d3\", operands);
3255 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3256 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3259 [(set_attr "conds" "clob")
3260 (set (attr "length")
3261 (if_then_else (eq_attr "is_thumb" "yes")
3264 (set_attr "type" "store1")]
3267 ; Reject the frame pointer in operand[1], since reloading this after
3268 ; it has been eliminated can cause carnage.
3269 (define_insn "*minmax_arithsi"
3270 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3271 (match_operator:SI 4 "shiftable_operator"
3272 [(match_operator:SI 5 "minmax_operator"
3273 [(match_operand:SI 2 "s_register_operand" "r,r")
3274 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3275 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3276 (clobber (reg:CC CC_REGNUM))]
3277 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3280 enum rtx_code code = GET_CODE (operands[4]);
3283 if (which_alternative != 0 || operands[3] != const0_rtx
3284 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3289 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3290 operands[2], operands[3]);
3291 output_asm_insn (\"cmp\\t%2, %3\", operands);
3295 output_asm_insn (\"ite\\t%d5\", operands);
3297 output_asm_insn (\"it\\t%d5\", operands);
3299 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3301 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3304 [(set_attr "conds" "clob")
3305 (set (attr "length")
3306 (if_then_else (eq_attr "is_thumb" "yes")
3312 ;; Shift and rotation insns
3314 (define_expand "ashldi3"
3315 [(set (match_operand:DI 0 "s_register_operand" "")
3316 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3317 (match_operand:SI 2 "reg_or_int_operand" "")))]
3320 if (GET_CODE (operands[2]) == CONST_INT)
3322 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3324 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3327 /* Ideally we shouldn't fail here if we could know that operands[1]
3328 ends up already living in an iwmmxt register. Otherwise it's
3329 cheaper to have the alternate code being generated than moving
3330 values to iwmmxt regs and back. */
3333 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3338 (define_insn "arm_ashldi3_1bit"
3339 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3340 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3342 (clobber (reg:CC CC_REGNUM))]
3344 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3345 [(set_attr "conds" "clob")
3346 (set_attr "length" "8")]
3349 (define_expand "ashlsi3"
3350 [(set (match_operand:SI 0 "s_register_operand" "")
3351 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3352 (match_operand:SI 2 "arm_rhs_operand" "")))]
3355 if (GET_CODE (operands[2]) == CONST_INT
3356 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3358 emit_insn (gen_movsi (operands[0], const0_rtx));
3364 (define_insn "*thumb1_ashlsi3"
3365 [(set (match_operand:SI 0 "register_operand" "=l,l")
3366 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3367 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3370 [(set_attr "length" "2")
3371 (set_attr "conds" "set")])
3373 (define_expand "ashrdi3"
3374 [(set (match_operand:DI 0 "s_register_operand" "")
3375 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3376 (match_operand:SI 2 "reg_or_int_operand" "")))]
3379 if (GET_CODE (operands[2]) == CONST_INT)
3381 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3383 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3386 /* Ideally we shouldn't fail here if we could know that operands[1]
3387 ends up already living in an iwmmxt register. Otherwise it's
3388 cheaper to have the alternate code being generated than moving
3389 values to iwmmxt regs and back. */
3392 else if (!TARGET_REALLY_IWMMXT)
3397 (define_insn "arm_ashrdi3_1bit"
3398 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3399 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3401 (clobber (reg:CC CC_REGNUM))]
3403 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3404 [(set_attr "conds" "clob")
3405 (set_attr "insn" "mov")
3406 (set_attr "length" "8")]
3409 (define_expand "ashrsi3"
3410 [(set (match_operand:SI 0 "s_register_operand" "")
3411 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3412 (match_operand:SI 2 "arm_rhs_operand" "")))]
3415 if (GET_CODE (operands[2]) == CONST_INT
3416 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3417 operands[2] = GEN_INT (31);
3421 (define_insn "*thumb1_ashrsi3"
3422 [(set (match_operand:SI 0 "register_operand" "=l,l")
3423 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3424 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3427 [(set_attr "length" "2")
3428 (set_attr "conds" "set")])
3430 (define_expand "lshrdi3"
3431 [(set (match_operand:DI 0 "s_register_operand" "")
3432 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3433 (match_operand:SI 2 "reg_or_int_operand" "")))]
3436 if (GET_CODE (operands[2]) == CONST_INT)
3438 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3440 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3443 /* Ideally we shouldn't fail here if we could know that operands[1]
3444 ends up already living in an iwmmxt register. Otherwise it's
3445 cheaper to have the alternate code being generated than moving
3446 values to iwmmxt regs and back. */
3449 else if (!TARGET_REALLY_IWMMXT)
3454 (define_insn "arm_lshrdi3_1bit"
3455 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3456 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3458 (clobber (reg:CC CC_REGNUM))]
3460 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3461 [(set_attr "conds" "clob")
3462 (set_attr "insn" "mov")
3463 (set_attr "length" "8")]
3466 (define_expand "lshrsi3"
3467 [(set (match_operand:SI 0 "s_register_operand" "")
3468 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3469 (match_operand:SI 2 "arm_rhs_operand" "")))]
3472 if (GET_CODE (operands[2]) == CONST_INT
3473 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3475 emit_insn (gen_movsi (operands[0], const0_rtx));
3481 (define_insn "*thumb1_lshrsi3"
3482 [(set (match_operand:SI 0 "register_operand" "=l,l")
3483 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3484 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3487 [(set_attr "length" "2")
3488 (set_attr "conds" "set")])
3490 (define_expand "rotlsi3"
3491 [(set (match_operand:SI 0 "s_register_operand" "")
3492 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3493 (match_operand:SI 2 "reg_or_int_operand" "")))]
3496 if (GET_CODE (operands[2]) == CONST_INT)
3497 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3500 rtx reg = gen_reg_rtx (SImode);
3501 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3507 (define_expand "rotrsi3"
3508 [(set (match_operand:SI 0 "s_register_operand" "")
3509 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3510 (match_operand:SI 2 "arm_rhs_operand" "")))]
3515 if (GET_CODE (operands[2]) == CONST_INT
3516 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3517 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3519 else /* TARGET_THUMB1 */
3521 if (GET_CODE (operands [2]) == CONST_INT)
3522 operands [2] = force_reg (SImode, operands[2]);
3527 (define_insn "*thumb1_rotrsi3"
3528 [(set (match_operand:SI 0 "register_operand" "=l")
3529 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3530 (match_operand:SI 2 "register_operand" "l")))]
3533 [(set_attr "length" "2")]
3536 (define_insn "*arm_shiftsi3"
3537 [(set (match_operand:SI 0 "s_register_operand" "=r")
3538 (match_operator:SI 3 "shift_operator"
3539 [(match_operand:SI 1 "s_register_operand" "r")
3540 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3542 "* return arm_output_shift(operands, 0);"
3543 [(set_attr "predicable" "yes")
3544 (set_attr "shift" "1")
3545 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3546 (const_string "alu_shift")
3547 (const_string "alu_shift_reg")))]
3550 (define_insn "*shiftsi3_compare0"
3551 [(set (reg:CC_NOOV CC_REGNUM)
3552 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3553 [(match_operand:SI 1 "s_register_operand" "r")
3554 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3556 (set (match_operand:SI 0 "s_register_operand" "=r")
3557 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3559 "* return arm_output_shift(operands, 1);"
3560 [(set_attr "conds" "set")
3561 (set_attr "shift" "1")
3562 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3563 (const_string "alu_shift")
3564 (const_string "alu_shift_reg")))]
3567 (define_insn "*shiftsi3_compare0_scratch"
3568 [(set (reg:CC_NOOV CC_REGNUM)
3569 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3570 [(match_operand:SI 1 "s_register_operand" "r")
3571 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3573 (clobber (match_scratch:SI 0 "=r"))]
3575 "* return arm_output_shift(operands, 1);"
3576 [(set_attr "conds" "set")
3577 (set_attr "shift" "1")]
3580 (define_insn "*not_shiftsi"
3581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3582 (not:SI (match_operator:SI 3 "shift_operator"
3583 [(match_operand:SI 1 "s_register_operand" "r,r")
3584 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3587 [(set_attr "predicable" "yes")
3588 (set_attr "shift" "1")
3589 (set_attr "insn" "mvn")
3590 (set_attr "arch" "32,a")
3591 (set_attr "type" "alu_shift,alu_shift_reg")])
3593 (define_insn "*not_shiftsi_compare0"
3594 [(set (reg:CC_NOOV CC_REGNUM)
3596 (not:SI (match_operator:SI 3 "shift_operator"
3597 [(match_operand:SI 1 "s_register_operand" "r,r")
3598 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3600 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3601 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3604 [(set_attr "conds" "set")
3605 (set_attr "shift" "1")
3606 (set_attr "insn" "mvn")
3607 (set_attr "arch" "32,a")
3608 (set_attr "type" "alu_shift,alu_shift_reg")])
3610 (define_insn "*not_shiftsi_compare0_scratch"
3611 [(set (reg:CC_NOOV CC_REGNUM)
3613 (not:SI (match_operator:SI 3 "shift_operator"
3614 [(match_operand:SI 1 "s_register_operand" "r,r")
3615 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3617 (clobber (match_scratch:SI 0 "=r,r"))]
3620 [(set_attr "conds" "set")
3621 (set_attr "shift" "1")
3622 (set_attr "insn" "mvn")
3623 (set_attr "arch" "32,a")
3624 (set_attr "type" "alu_shift,alu_shift_reg")])
3626 ;; We don't really have extzv, but defining this using shifts helps
3627 ;; to reduce register pressure later on.
3629 (define_expand "extzv"
3631 (ashift:SI (match_operand:SI 1 "register_operand" "")
3632 (match_operand:SI 2 "const_int_operand" "")))
3633 (set (match_operand:SI 0 "register_operand" "")
3634 (lshiftrt:SI (match_dup 4)
3635 (match_operand:SI 3 "const_int_operand" "")))]
3636 "TARGET_THUMB1 || arm_arch_thumb2"
3639 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3640 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3642 if (arm_arch_thumb2)
3644 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3649 operands[3] = GEN_INT (rshift);
3653 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3657 operands[2] = GEN_INT (lshift);
3658 operands[4] = gen_reg_rtx (SImode);
3663 [(set (match_operand:SI 0 "s_register_operand" "=r")
3664 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3665 (match_operand:SI 2 "const_int_operand" "M")
3666 (match_operand:SI 3 "const_int_operand" "M")))]
3668 "sbfx%?\t%0, %1, %3, %2"
3669 [(set_attr "length" "4")
3670 (set_attr "predicable" "yes")]
3673 (define_insn "extzv_t2"
3674 [(set (match_operand:SI 0 "s_register_operand" "=r")
3675 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3676 (match_operand:SI 2 "const_int_operand" "M")
3677 (match_operand:SI 3 "const_int_operand" "M")))]
3679 "ubfx%?\t%0, %1, %3, %2"
3680 [(set_attr "length" "4")
3681 (set_attr "predicable" "yes")]
3685 ;; Unary arithmetic insns
3687 (define_expand "negdi2"
3689 [(set (match_operand:DI 0 "s_register_operand" "")
3690 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3691 (clobber (reg:CC CC_REGNUM))])]
3696 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3697 ;; The first alternative allows the common case of a *full* overlap.
3698 (define_insn "*arm_negdi2"
3699 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3700 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3701 (clobber (reg:CC CC_REGNUM))]
3703 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3704 [(set_attr "conds" "clob")
3705 (set_attr "length" "8")]
3708 (define_insn "*thumb1_negdi2"
3709 [(set (match_operand:DI 0 "register_operand" "=&l")
3710 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3711 (clobber (reg:CC CC_REGNUM))]
3713 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3714 [(set_attr "length" "6")]
3717 (define_expand "negsi2"
3718 [(set (match_operand:SI 0 "s_register_operand" "")
3719 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3724 (define_insn "*arm_negsi2"
3725 [(set (match_operand:SI 0 "s_register_operand" "=r")
3726 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3728 "rsb%?\\t%0, %1, #0"
3729 [(set_attr "predicable" "yes")]
3732 (define_insn "*thumb1_negsi2"
3733 [(set (match_operand:SI 0 "register_operand" "=l")
3734 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3737 [(set_attr "length" "2")]
3740 (define_expand "negsf2"
3741 [(set (match_operand:SF 0 "s_register_operand" "")
3742 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3743 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3747 (define_expand "negdf2"
3748 [(set (match_operand:DF 0 "s_register_operand" "")
3749 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3750 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3753 ;; abssi2 doesn't really clobber the condition codes if a different register
3754 ;; is being set. To keep things simple, assume during rtl manipulations that
3755 ;; it does, but tell the final scan operator the truth. Similarly for
3758 (define_expand "abssi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "")
3761 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3762 (clobber (match_dup 2))])]
3766 operands[2] = gen_rtx_SCRATCH (SImode);
3768 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3771 (define_insn "*arm_abssi2"
3772 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3773 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3774 (clobber (reg:CC CC_REGNUM))]
3777 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3778 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3779 [(set_attr "conds" "clob,*")
3780 (set_attr "shift" "1")
3781 ;; predicable can't be set based on the variant, so left as no
3782 (set_attr "length" "8")]
3785 (define_insn_and_split "*thumb1_abssi2"
3786 [(set (match_operand:SI 0 "s_register_operand" "=l")
3787 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3788 (clobber (match_scratch:SI 2 "=&l"))]
3791 "TARGET_THUMB1 && reload_completed"
3792 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3793 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3794 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3796 [(set_attr "length" "6")]
3799 (define_insn "*arm_neg_abssi2"
3800 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3801 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3802 (clobber (reg:CC CC_REGNUM))]
3805 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3806 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3807 [(set_attr "conds" "clob,*")
3808 (set_attr "shift" "1")
3809 ;; predicable can't be set based on the variant, so left as no
3810 (set_attr "length" "8")]
3813 (define_insn_and_split "*thumb1_neg_abssi2"
3814 [(set (match_operand:SI 0 "s_register_operand" "=l")
3815 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3816 (clobber (match_scratch:SI 2 "=&l"))]
3819 "TARGET_THUMB1 && reload_completed"
3820 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3821 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3822 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3824 [(set_attr "length" "6")]
3827 (define_expand "abssf2"
3828 [(set (match_operand:SF 0 "s_register_operand" "")
3829 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3830 "TARGET_32BIT && TARGET_HARD_FLOAT"
3833 (define_expand "absdf2"
3834 [(set (match_operand:DF 0 "s_register_operand" "")
3835 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3836 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3839 (define_expand "sqrtsf2"
3840 [(set (match_operand:SF 0 "s_register_operand" "")
3841 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3842 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3845 (define_expand "sqrtdf2"
3846 [(set (match_operand:DF 0 "s_register_operand" "")
3847 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3848 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3851 (define_insn_and_split "one_cmpldi2"
3852 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3853 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3856 "TARGET_32BIT && reload_completed"
3857 [(set (match_dup 0) (not:SI (match_dup 1)))
3858 (set (match_dup 2) (not:SI (match_dup 3)))]
3861 operands[2] = gen_highpart (SImode, operands[0]);
3862 operands[0] = gen_lowpart (SImode, operands[0]);
3863 operands[3] = gen_highpart (SImode, operands[1]);
3864 operands[1] = gen_lowpart (SImode, operands[1]);
3866 [(set_attr "length" "8")
3867 (set_attr "predicable" "yes")]
3870 (define_expand "one_cmplsi2"
3871 [(set (match_operand:SI 0 "s_register_operand" "")
3872 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3877 (define_insn "*arm_one_cmplsi2"
3878 [(set (match_operand:SI 0 "s_register_operand" "=r")
3879 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3882 [(set_attr "predicable" "yes")
3883 (set_attr "insn" "mvn")]
3886 (define_insn "*thumb1_one_cmplsi2"
3887 [(set (match_operand:SI 0 "register_operand" "=l")
3888 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3891 [(set_attr "length" "2")
3892 (set_attr "insn" "mvn")]
3895 (define_insn "*notsi_compare0"
3896 [(set (reg:CC_NOOV CC_REGNUM)
3897 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3899 (set (match_operand:SI 0 "s_register_operand" "=r")
3900 (not:SI (match_dup 1)))]
3903 [(set_attr "conds" "set")
3904 (set_attr "insn" "mvn")]
3907 (define_insn "*notsi_compare0_scratch"
3908 [(set (reg:CC_NOOV CC_REGNUM)
3909 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3911 (clobber (match_scratch:SI 0 "=r"))]
3914 [(set_attr "conds" "set")
3915 (set_attr "insn" "mvn")]
3918 ;; Fixed <--> Floating conversion insns
3920 (define_expand "floatsihf2"
3921 [(set (match_operand:HF 0 "general_operand" "")
3922 (float:HF (match_operand:SI 1 "general_operand" "")))]
3926 rtx op1 = gen_reg_rtx (SFmode);
3927 expand_float (op1, operands[1], 0);
3928 op1 = convert_to_mode (HFmode, op1, 0);
3929 emit_move_insn (operands[0], op1);
3934 (define_expand "floatdihf2"
3935 [(set (match_operand:HF 0 "general_operand" "")
3936 (float:HF (match_operand:DI 1 "general_operand" "")))]
3940 rtx op1 = gen_reg_rtx (SFmode);
3941 expand_float (op1, operands[1], 0);
3942 op1 = convert_to_mode (HFmode, op1, 0);
3943 emit_move_insn (operands[0], op1);
3948 (define_expand "floatsisf2"
3949 [(set (match_operand:SF 0 "s_register_operand" "")
3950 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3951 "TARGET_32BIT && TARGET_HARD_FLOAT"
3953 if (TARGET_MAVERICK)
3955 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3960 (define_expand "floatsidf2"
3961 [(set (match_operand:DF 0 "s_register_operand" "")
3962 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3963 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3965 if (TARGET_MAVERICK)
3967 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3972 (define_expand "fix_trunchfsi2"
3973 [(set (match_operand:SI 0 "general_operand" "")
3974 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3978 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3979 expand_fix (operands[0], op1, 0);
3984 (define_expand "fix_trunchfdi2"
3985 [(set (match_operand:DI 0 "general_operand" "")
3986 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3990 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3991 expand_fix (operands[0], op1, 0);
3996 (define_expand "fix_truncsfsi2"
3997 [(set (match_operand:SI 0 "s_register_operand" "")
3998 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3999 "TARGET_32BIT && TARGET_HARD_FLOAT"
4001 if (TARGET_MAVERICK)
4003 if (!cirrus_fp_register (operands[0], SImode))
4004 operands[0] = force_reg (SImode, operands[0]);
4005 if (!cirrus_fp_register (operands[1], SFmode))
4006 operands[1] = force_reg (SFmode, operands[0]);
4007 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4012 (define_expand "fix_truncdfsi2"
4013 [(set (match_operand:SI 0 "s_register_operand" "")
4014 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4015 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4017 if (TARGET_MAVERICK)
4019 if (!cirrus_fp_register (operands[1], DFmode))
4020 operands[1] = force_reg (DFmode, operands[0]);
4021 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4028 (define_expand "truncdfsf2"
4029 [(set (match_operand:SF 0 "s_register_operand" "")
4031 (match_operand:DF 1 "s_register_operand" "")))]
4032 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4036 /* DFmode -> HFmode conversions have to go through SFmode. */
4037 (define_expand "truncdfhf2"
4038 [(set (match_operand:HF 0 "general_operand" "")
4040 (match_operand:DF 1 "general_operand" "")))]
4045 op1 = convert_to_mode (SFmode, operands[1], 0);
4046 op1 = convert_to_mode (HFmode, op1, 0);
4047 emit_move_insn (operands[0], op1);
4052 ;; Zero and sign extension instructions.
4054 (define_insn "zero_extend<mode>di2"
4055 [(set (match_operand:DI 0 "s_register_operand" "=r")
4056 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4057 "<qhs_extenddi_cstr>")))]
4058 "TARGET_32BIT <qhs_zextenddi_cond>"
4060 [(set_attr "length" "8")
4061 (set_attr "ce_count" "2")
4062 (set_attr "predicable" "yes")]
4065 (define_insn "extend<mode>di2"
4066 [(set (match_operand:DI 0 "s_register_operand" "=r")
4067 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4068 "<qhs_extenddi_cstr>")))]
4069 "TARGET_32BIT <qhs_sextenddi_cond>"
4071 [(set_attr "length" "8")
4072 (set_attr "ce_count" "2")
4073 (set_attr "shift" "1")
4074 (set_attr "predicable" "yes")]
4077 ;; Splits for all extensions to DImode
4079 [(set (match_operand:DI 0 "s_register_operand" "")
4080 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4082 [(set (match_dup 0) (match_dup 1))]
4084 rtx lo_part = gen_lowpart (SImode, operands[0]);
4085 enum machine_mode src_mode = GET_MODE (operands[1]);
4087 if (REG_P (operands[0])
4088 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4089 emit_clobber (operands[0]);
4090 if (!REG_P (lo_part) || src_mode != SImode
4091 || !rtx_equal_p (lo_part, operands[1]))
4093 if (src_mode == SImode)
4094 emit_move_insn (lo_part, operands[1]);
4096 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4097 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4098 operands[1] = lo_part;
4100 operands[0] = gen_highpart (SImode, operands[0]);
4101 operands[1] = const0_rtx;
4105 [(set (match_operand:DI 0 "s_register_operand" "")
4106 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4108 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4110 rtx lo_part = gen_lowpart (SImode, operands[0]);
4111 enum machine_mode src_mode = GET_MODE (operands[1]);
4113 if (REG_P (operands[0])
4114 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4115 emit_clobber (operands[0]);
4117 if (!REG_P (lo_part) || src_mode != SImode
4118 || !rtx_equal_p (lo_part, operands[1]))
4120 if (src_mode == SImode)
4121 emit_move_insn (lo_part, operands[1]);
4123 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4124 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4125 operands[1] = lo_part;
4127 operands[0] = gen_highpart (SImode, operands[0]);
4130 (define_expand "zero_extendhisi2"
4131 [(set (match_operand:SI 0 "s_register_operand" "")
4132 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4135 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4137 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4140 if (!arm_arch6 && !MEM_P (operands[1]))
4142 rtx t = gen_lowpart (SImode, operands[1]);
4143 rtx tmp = gen_reg_rtx (SImode);
4144 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4145 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4151 [(set (match_operand:SI 0 "s_register_operand" "")
4152 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4153 "!TARGET_THUMB2 && !arm_arch6"
4154 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4155 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4157 operands[2] = gen_lowpart (SImode, operands[1]);
4160 (define_insn "*thumb1_zero_extendhisi2"
4161 [(set (match_operand:SI 0 "register_operand" "=l,l")
4162 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4167 if (which_alternative == 0 && arm_arch6)
4168 return "uxth\t%0, %1";
4169 if (which_alternative == 0)
4172 mem = XEXP (operands[1], 0);
4174 if (GET_CODE (mem) == CONST)
4175 mem = XEXP (mem, 0);
4177 if (GET_CODE (mem) == PLUS)
4179 rtx a = XEXP (mem, 0);
4181 /* This can happen due to bugs in reload. */
4182 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4185 ops[0] = operands[0];
4188 output_asm_insn ("mov\t%0, %1", ops);
4190 XEXP (mem, 0) = operands[0];
4194 return "ldrh\t%0, %1";
4196 [(set_attr_alternative "length"
4197 [(if_then_else (eq_attr "is_arch6" "yes")
4198 (const_int 2) (const_int 4))
4200 (set_attr "type" "alu_shift,load_byte")]
4203 (define_insn "*arm_zero_extendhisi2"
4204 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4205 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4206 "TARGET_ARM && arm_arch4 && !arm_arch6"
4210 [(set_attr "type" "alu_shift,load_byte")
4211 (set_attr "predicable" "yes")]
4214 (define_insn "*arm_zero_extendhisi2_v6"
4215 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4216 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4217 "TARGET_ARM && arm_arch6"
4221 [(set_attr "type" "alu_shift,load_byte")
4222 (set_attr "predicable" "yes")]
4225 (define_insn "*arm_zero_extendhisi2addsi"
4226 [(set (match_operand:SI 0 "s_register_operand" "=r")
4227 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4228 (match_operand:SI 2 "s_register_operand" "r")))]
4230 "uxtah%?\\t%0, %2, %1"
4231 [(set_attr "type" "alu_shift")
4232 (set_attr "predicable" "yes")]
4235 (define_expand "zero_extendqisi2"
4236 [(set (match_operand:SI 0 "s_register_operand" "")
4237 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4240 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4242 emit_insn (gen_andsi3 (operands[0],
4243 gen_lowpart (SImode, operands[1]),
4247 if (!arm_arch6 && !MEM_P (operands[1]))
4249 rtx t = gen_lowpart (SImode, operands[1]);
4250 rtx tmp = gen_reg_rtx (SImode);
4251 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4252 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4258 [(set (match_operand:SI 0 "s_register_operand" "")
4259 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4261 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4262 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4264 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4267 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4272 (define_insn "*thumb1_zero_extendqisi2"
4273 [(set (match_operand:SI 0 "register_operand" "=l,l")
4274 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4275 "TARGET_THUMB1 && !arm_arch6"
4279 [(set_attr "length" "4,2")
4280 (set_attr "type" "alu_shift,load_byte")
4281 (set_attr "pool_range" "*,32")]
4284 (define_insn "*thumb1_zero_extendqisi2_v6"
4285 [(set (match_operand:SI 0 "register_operand" "=l,l")
4286 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4287 "TARGET_THUMB1 && arm_arch6"
4291 [(set_attr "length" "2")
4292 (set_attr "type" "alu_shift,load_byte")]
4295 (define_insn "*arm_zero_extendqisi2"
4296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4297 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4298 "TARGET_ARM && !arm_arch6"
4301 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4302 [(set_attr "length" "8,4")
4303 (set_attr "type" "alu_shift,load_byte")
4304 (set_attr "predicable" "yes")]
4307 (define_insn "*arm_zero_extendqisi2_v6"
4308 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4309 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4310 "TARGET_ARM && arm_arch6"
4313 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4314 [(set_attr "type" "alu_shift,load_byte")
4315 (set_attr "predicable" "yes")]
4318 (define_insn "*arm_zero_extendqisi2addsi"
4319 [(set (match_operand:SI 0 "s_register_operand" "=r")
4320 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4321 (match_operand:SI 2 "s_register_operand" "r")))]
4323 "uxtab%?\\t%0, %2, %1"
4324 [(set_attr "predicable" "yes")
4325 (set_attr "insn" "xtab")
4326 (set_attr "type" "alu_shift")]
4330 [(set (match_operand:SI 0 "s_register_operand" "")
4331 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4332 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4333 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4334 [(set (match_dup 2) (match_dup 1))
4335 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4340 [(set (match_operand:SI 0 "s_register_operand" "")
4341 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4342 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4343 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4344 [(set (match_dup 2) (match_dup 1))
4345 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4351 [(set (match_operand:SI 0 "s_register_operand" "")
4352 (ior_xor:SI (and:SI (ashift:SI
4353 (match_operand:SI 1 "s_register_operand" "")
4354 (match_operand:SI 2 "const_int_operand" ""))
4355 (match_operand:SI 3 "const_int_operand" ""))
4357 (match_operator 5 "subreg_lowpart_operator"
4358 [(match_operand:SI 4 "s_register_operand" "")]))))]
4360 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4361 == (GET_MODE_MASK (GET_MODE (operands[5]))
4362 & (GET_MODE_MASK (GET_MODE (operands[5]))
4363 << (INTVAL (operands[2])))))"
4364 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4366 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4367 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4370 (define_insn "*compareqi_eq0"
4371 [(set (reg:CC_Z CC_REGNUM)
4372 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4376 [(set_attr "conds" "set")]
4379 (define_expand "extendhisi2"
4380 [(set (match_operand:SI 0 "s_register_operand" "")
4381 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4386 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4389 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4391 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4395 if (!arm_arch6 && !MEM_P (operands[1]))
4397 rtx t = gen_lowpart (SImode, operands[1]);
4398 rtx tmp = gen_reg_rtx (SImode);
4399 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4400 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4407 [(set (match_operand:SI 0 "register_operand" "")
4408 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4409 (clobber (match_scratch:SI 2 ""))])]
4411 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4412 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4414 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4417 ;; We used to have an early-clobber on the scratch register here.
4418 ;; However, there's a bug somewhere in reload which means that this
4419 ;; can be partially ignored during spill allocation if the memory
4420 ;; address also needs reloading; this causes us to die later on when
4421 ;; we try to verify the operands. Fortunately, we don't really need
4422 ;; the early-clobber: we can always use operand 0 if operand 2
4423 ;; overlaps the address.
4424 (define_insn "thumb1_extendhisi2"
4425 [(set (match_operand:SI 0 "register_operand" "=l,l")
4426 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4427 (clobber (match_scratch:SI 2 "=X,l"))]
4434 if (which_alternative == 0 && !arm_arch6)
4436 if (which_alternative == 0)
4437 return \"sxth\\t%0, %1\";
4439 mem = XEXP (operands[1], 0);
4441 /* This code used to try to use 'V', and fix the address only if it was
4442 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4443 range of QImode offsets, and offsettable_address_p does a QImode
4446 if (GET_CODE (mem) == CONST)
4447 mem = XEXP (mem, 0);
4449 if (GET_CODE (mem) == LABEL_REF)
4450 return \"ldr\\t%0, %1\";
4452 if (GET_CODE (mem) == PLUS)
4454 rtx a = XEXP (mem, 0);
4455 rtx b = XEXP (mem, 1);
4457 if (GET_CODE (a) == LABEL_REF
4458 && GET_CODE (b) == CONST_INT)
4459 return \"ldr\\t%0, %1\";
4461 if (GET_CODE (b) == REG)
4462 return \"ldrsh\\t%0, %1\";
4470 ops[2] = const0_rtx;
4473 gcc_assert (GET_CODE (ops[1]) == REG);
4475 ops[0] = operands[0];
4476 if (reg_mentioned_p (operands[2], ops[1]))
4479 ops[3] = operands[2];
4480 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4483 [(set_attr_alternative "length"
4484 [(if_then_else (eq_attr "is_arch6" "yes")
4485 (const_int 2) (const_int 4))
4487 (set_attr "type" "alu_shift,load_byte")
4488 (set_attr "pool_range" "*,1020")]
4491 ;; This pattern will only be used when ldsh is not available
4492 (define_expand "extendhisi2_mem"
4493 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4495 (zero_extend:SI (match_dup 7)))
4496 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4497 (set (match_operand:SI 0 "" "")
4498 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4503 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4505 mem1 = change_address (operands[1], QImode, addr);
4506 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4507 operands[0] = gen_lowpart (SImode, operands[0]);
4509 operands[2] = gen_reg_rtx (SImode);
4510 operands[3] = gen_reg_rtx (SImode);
4511 operands[6] = gen_reg_rtx (SImode);
4514 if (BYTES_BIG_ENDIAN)
4516 operands[4] = operands[2];
4517 operands[5] = operands[3];
4521 operands[4] = operands[3];
4522 operands[5] = operands[2];
4528 [(set (match_operand:SI 0 "register_operand" "")
4529 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4531 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4532 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4534 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4537 (define_insn "*arm_extendhisi2"
4538 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4539 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4540 "TARGET_ARM && arm_arch4 && !arm_arch6"
4544 [(set_attr "length" "8,4")
4545 (set_attr "type" "alu_shift,load_byte")
4546 (set_attr "predicable" "yes")
4547 (set_attr "pool_range" "*,256")
4548 (set_attr "neg_pool_range" "*,244")]
4551 ;; ??? Check Thumb-2 pool range
4552 (define_insn "*arm_extendhisi2_v6"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4554 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4555 "TARGET_32BIT && arm_arch6"
4559 [(set_attr "type" "alu_shift,load_byte")
4560 (set_attr "predicable" "yes")
4561 (set_attr "pool_range" "*,256")
4562 (set_attr "neg_pool_range" "*,244")]
4565 (define_insn "*arm_extendhisi2addsi"
4566 [(set (match_operand:SI 0 "s_register_operand" "=r")
4567 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4568 (match_operand:SI 2 "s_register_operand" "r")))]
4570 "sxtah%?\\t%0, %2, %1"
4573 (define_expand "extendqihi2"
4575 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4577 (set (match_operand:HI 0 "s_register_operand" "")
4578 (ashiftrt:SI (match_dup 2)
4583 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4585 emit_insn (gen_rtx_SET (VOIDmode,
4587 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4590 if (!s_register_operand (operands[1], QImode))
4591 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4592 operands[0] = gen_lowpart (SImode, operands[0]);
4593 operands[1] = gen_lowpart (SImode, operands[1]);
4594 operands[2] = gen_reg_rtx (SImode);
4598 (define_insn "*arm_extendqihi_insn"
4599 [(set (match_operand:HI 0 "s_register_operand" "=r")
4600 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4601 "TARGET_ARM && arm_arch4"
4602 "ldr%(sb%)\\t%0, %1"
4603 [(set_attr "type" "load_byte")
4604 (set_attr "predicable" "yes")
4605 (set_attr "pool_range" "256")
4606 (set_attr "neg_pool_range" "244")]
4609 (define_expand "extendqisi2"
4610 [(set (match_operand:SI 0 "s_register_operand" "")
4611 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4614 if (!arm_arch4 && MEM_P (operands[1]))
4615 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4617 if (!arm_arch6 && !MEM_P (operands[1]))
4619 rtx t = gen_lowpart (SImode, operands[1]);
4620 rtx tmp = gen_reg_rtx (SImode);
4621 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4622 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4628 [(set (match_operand:SI 0 "register_operand" "")
4629 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4631 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4632 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4634 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4637 (define_insn "*arm_extendqisi"
4638 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4639 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4640 "TARGET_ARM && arm_arch4 && !arm_arch6"
4644 [(set_attr "length" "8,4")
4645 (set_attr "type" "alu_shift,load_byte")
4646 (set_attr "predicable" "yes")
4647 (set_attr "pool_range" "*,256")
4648 (set_attr "neg_pool_range" "*,244")]
4651 (define_insn "*arm_extendqisi_v6"
4652 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4654 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4655 "TARGET_ARM && arm_arch6"
4659 [(set_attr "type" "alu_shift,load_byte")
4660 (set_attr "predicable" "yes")
4661 (set_attr "pool_range" "*,256")
4662 (set_attr "neg_pool_range" "*,244")]
4665 (define_insn "*arm_extendqisi2addsi"
4666 [(set (match_operand:SI 0 "s_register_operand" "=r")
4667 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4668 (match_operand:SI 2 "s_register_operand" "r")))]
4670 "sxtab%?\\t%0, %2, %1"
4671 [(set_attr "type" "alu_shift")
4672 (set_attr "insn" "xtab")
4673 (set_attr "predicable" "yes")]
4677 [(set (match_operand:SI 0 "register_operand" "")
4678 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4679 "TARGET_THUMB1 && reload_completed"
4680 [(set (match_dup 0) (match_dup 2))
4681 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4683 rtx addr = XEXP (operands[1], 0);
4685 if (GET_CODE (addr) == CONST)
4686 addr = XEXP (addr, 0);
4688 if (GET_CODE (addr) == PLUS
4689 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4690 /* No split necessary. */
4693 if (GET_CODE (addr) == PLUS
4694 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4697 if (reg_overlap_mentioned_p (operands[0], addr))
4699 rtx t = gen_lowpart (QImode, operands[0]);
4700 emit_move_insn (t, operands[1]);
4701 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4707 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4708 operands[2] = const0_rtx;
4710 else if (GET_CODE (addr) != PLUS)
4712 else if (REG_P (XEXP (addr, 0)))
4714 operands[2] = XEXP (addr, 1);
4715 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4719 operands[2] = XEXP (addr, 0);
4720 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4723 operands[3] = change_address (operands[1], QImode, addr);
4727 [(set (match_operand:SI 0 "register_operand" "")
4728 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4729 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4730 (set (match_operand:SI 3 "register_operand" "")
4731 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4733 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4734 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4735 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4736 && (peep2_reg_dead_p (3, operands[0])
4737 || rtx_equal_p (operands[0], operands[3]))
4738 && (peep2_reg_dead_p (3, operands[2])
4739 || rtx_equal_p (operands[2], operands[3]))"
4740 [(set (match_dup 2) (match_dup 1))
4741 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4743 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4744 operands[4] = change_address (operands[4], QImode, addr);
4747 (define_insn "thumb1_extendqisi2"
4748 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4749 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4754 if (which_alternative == 0 && arm_arch6)
4755 return "sxtb\\t%0, %1";
4756 if (which_alternative == 0)
4759 addr = XEXP (operands[1], 0);
4760 if (GET_CODE (addr) == PLUS
4761 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4762 return "ldrsb\\t%0, %1";
4766 [(set_attr_alternative "length"
4767 [(if_then_else (eq_attr "is_arch6" "yes")
4768 (const_int 2) (const_int 4))
4770 (if_then_else (eq_attr "is_arch6" "yes")
4771 (const_int 4) (const_int 6))])
4772 (set_attr "type" "alu_shift,load_byte,load_byte")]
4775 (define_expand "extendsfdf2"
4776 [(set (match_operand:DF 0 "s_register_operand" "")
4777 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4778 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4782 /* HFmode -> DFmode conversions have to go through SFmode. */
4783 (define_expand "extendhfdf2"
4784 [(set (match_operand:DF 0 "general_operand" "")
4785 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4790 op1 = convert_to_mode (SFmode, operands[1], 0);
4791 op1 = convert_to_mode (DFmode, op1, 0);
4792 emit_insn (gen_movdf (operands[0], op1));
4797 ;; Move insns (including loads and stores)
4799 ;; XXX Just some ideas about movti.
4800 ;; I don't think these are a good idea on the arm, there just aren't enough
4802 ;;(define_expand "loadti"
4803 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4804 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4807 ;;(define_expand "storeti"
4808 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4809 ;; (match_operand:TI 1 "s_register_operand" ""))]
4812 ;;(define_expand "movti"
4813 ;; [(set (match_operand:TI 0 "general_operand" "")
4814 ;; (match_operand:TI 1 "general_operand" ""))]
4820 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4821 ;; operands[1] = copy_to_reg (operands[1]);
4822 ;; if (GET_CODE (operands[0]) == MEM)
4823 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4824 ;; else if (GET_CODE (operands[1]) == MEM)
4825 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4829 ;; emit_insn (insn);
4833 ;; Recognize garbage generated above.
4836 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4837 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4841 ;; register mem = (which_alternative < 3);
4842 ;; register const char *template;
4844 ;; operands[mem] = XEXP (operands[mem], 0);
4845 ;; switch (which_alternative)
4847 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4848 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4849 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4850 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4851 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4852 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4854 ;; output_asm_insn (template, operands);
4858 (define_expand "movdi"
4859 [(set (match_operand:DI 0 "general_operand" "")
4860 (match_operand:DI 1 "general_operand" ""))]
4863 if (can_create_pseudo_p ())
4865 if (GET_CODE (operands[0]) != REG)
4866 operands[1] = force_reg (DImode, operands[1]);
4871 (define_insn "*arm_movdi"
4872 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4873 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4875 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4877 && ( register_operand (operands[0], DImode)
4878 || register_operand (operands[1], DImode))"
4880 switch (which_alternative)
4887 return output_move_double (operands);
4890 [(set_attr "length" "8,12,16,8,8")
4891 (set_attr "type" "*,*,*,load2,store2")
4892 (set_attr "arm_pool_range" "*,*,*,1020,*")
4893 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4894 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4895 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4899 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4900 (match_operand:ANY64 1 "const_double_operand" ""))]
4903 && (arm_const_double_inline_cost (operands[1])
4904 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4907 arm_split_constant (SET, SImode, curr_insn,
4908 INTVAL (gen_lowpart (SImode, operands[1])),
4909 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4910 arm_split_constant (SET, SImode, curr_insn,
4911 INTVAL (gen_highpart_mode (SImode,
4912 GET_MODE (operands[0]),
4914 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4919 ; If optimizing for size, or if we have load delay slots, then
4920 ; we want to split the constant into two separate operations.
4921 ; In both cases this may split a trivial part into a single data op
4922 ; leaving a single complex constant to load. We can also get longer
4923 ; offsets in a LDR which means we get better chances of sharing the pool
4924 ; entries. Finally, we can normally do a better job of scheduling
4925 ; LDR instructions than we can with LDM.
4926 ; This pattern will only match if the one above did not.
4928 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4929 (match_operand:ANY64 1 "const_double_operand" ""))]
4930 "TARGET_ARM && reload_completed
4931 && arm_const_double_by_parts (operands[1])"
4932 [(set (match_dup 0) (match_dup 1))
4933 (set (match_dup 2) (match_dup 3))]
4935 operands[2] = gen_highpart (SImode, operands[0]);
4936 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4938 operands[0] = gen_lowpart (SImode, operands[0]);
4939 operands[1] = gen_lowpart (SImode, operands[1]);
4944 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4945 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4946 "TARGET_EITHER && reload_completed"
4947 [(set (match_dup 0) (match_dup 1))
4948 (set (match_dup 2) (match_dup 3))]
4950 operands[2] = gen_highpart (SImode, operands[0]);
4951 operands[3] = gen_highpart (SImode, operands[1]);
4952 operands[0] = gen_lowpart (SImode, operands[0]);
4953 operands[1] = gen_lowpart (SImode, operands[1]);
4955 /* Handle a partial overlap. */
4956 if (rtx_equal_p (operands[0], operands[3]))
4958 rtx tmp0 = operands[0];
4959 rtx tmp1 = operands[1];
4961 operands[0] = operands[2];
4962 operands[1] = operands[3];
4969 ;; We can't actually do base+index doubleword loads if the index and
4970 ;; destination overlap. Split here so that we at least have chance to
4973 [(set (match_operand:DI 0 "s_register_operand" "")
4974 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4975 (match_operand:SI 2 "s_register_operand" ""))))]
4977 && reg_overlap_mentioned_p (operands[0], operands[1])
4978 && reg_overlap_mentioned_p (operands[0], operands[2])"
4980 (plus:SI (match_dup 1)
4983 (mem:DI (match_dup 4)))]
4985 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4989 ;;; ??? This should have alternatives for constants.
4990 ;;; ??? This was originally identical to the movdf_insn pattern.
4991 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4992 ;;; thumb_reorg with a memory reference.
4993 (define_insn "*thumb1_movdi_insn"
4994 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4995 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4997 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4998 && ( register_operand (operands[0], DImode)
4999 || register_operand (operands[1], DImode))"
5002 switch (which_alternative)
5006 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5007 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5008 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5010 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5012 operands[1] = GEN_INT (- INTVAL (operands[1]));
5013 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5015 return \"ldmia\\t%1, {%0, %H0}\";
5017 return \"stmia\\t%0, {%1, %H1}\";
5019 return thumb_load_double_from_address (operands);
5021 operands[2] = gen_rtx_MEM (SImode,
5022 plus_constant (XEXP (operands[0], 0), 4));
5023 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5026 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5027 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5028 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5031 [(set_attr "length" "4,4,6,2,2,6,4,4")
5032 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5033 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5034 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5037 (define_expand "movsi"
5038 [(set (match_operand:SI 0 "general_operand" "")
5039 (match_operand:SI 1 "general_operand" ""))]
5043 rtx base, offset, tmp;
5047 /* Everything except mem = const or mem = mem can be done easily. */
5048 if (GET_CODE (operands[0]) == MEM)
5049 operands[1] = force_reg (SImode, operands[1]);
5050 if (arm_general_register_operand (operands[0], SImode)
5051 && GET_CODE (operands[1]) == CONST_INT
5052 && !(const_ok_for_arm (INTVAL (operands[1]))
5053 || const_ok_for_arm (~INTVAL (operands[1]))))
5055 arm_split_constant (SET, SImode, NULL_RTX,
5056 INTVAL (operands[1]), operands[0], NULL_RTX,
5057 optimize && can_create_pseudo_p ());
5061 if (TARGET_USE_MOVT && !target_word_relocations
5062 && GET_CODE (operands[1]) == SYMBOL_REF
5063 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5065 arm_emit_movpair (operands[0], operands[1]);
5069 else /* TARGET_THUMB1... */
5071 if (can_create_pseudo_p ())
5073 if (GET_CODE (operands[0]) != REG)
5074 operands[1] = force_reg (SImode, operands[1]);
5078 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5080 split_const (operands[1], &base, &offset);
5081 if (GET_CODE (base) == SYMBOL_REF
5082 && !offset_within_block_p (base, INTVAL (offset)))
5084 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5085 emit_move_insn (tmp, base);
5086 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5091 /* Recognize the case where operand[1] is a reference to thread-local
5092 data and load its address to a register. */
5093 if (arm_tls_referenced_p (operands[1]))
5095 rtx tmp = operands[1];
5098 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5100 addend = XEXP (XEXP (tmp, 0), 1);
5101 tmp = XEXP (XEXP (tmp, 0), 0);
5104 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5105 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5107 tmp = legitimize_tls_address (tmp,
5108 !can_create_pseudo_p () ? operands[0] : 0);
5111 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5112 tmp = force_operand (tmp, operands[0]);
5117 && (CONSTANT_P (operands[1])
5118 || symbol_mentioned_p (operands[1])
5119 || label_mentioned_p (operands[1])))
5120 operands[1] = legitimize_pic_address (operands[1], SImode,
5121 (!can_create_pseudo_p ()
5128 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5129 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5130 ;; so this does not matter.
5131 (define_insn "*arm_movt"
5132 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5133 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5134 (match_operand:SI 2 "general_operand" "i")))]
5136 "movt%?\t%0, #:upper16:%c2"
5137 [(set_attr "predicable" "yes")
5138 (set_attr "length" "4")]
5141 (define_insn "*arm_movsi_insn"
5142 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5143 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5144 "TARGET_ARM && ! TARGET_IWMMXT
5145 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5146 && ( register_operand (operands[0], SImode)
5147 || register_operand (operands[1], SImode))"
5155 [(set_attr "type" "*,*,*,*,load1,store1")
5156 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5157 (set_attr "predicable" "yes")
5158 (set_attr "pool_range" "*,*,*,*,4096,*")
5159 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5163 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5164 (match_operand:SI 1 "const_int_operand" ""))]
5166 && (!(const_ok_for_arm (INTVAL (operands[1]))
5167 || const_ok_for_arm (~INTVAL (operands[1]))))"
5168 [(clobber (const_int 0))]
5170 arm_split_constant (SET, SImode, NULL_RTX,
5171 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5176 (define_insn "*thumb1_movsi_insn"
5177 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5178 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5180 && ( register_operand (operands[0], SImode)
5181 || register_operand (operands[1], SImode))"
5192 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5193 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5194 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5195 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5198 [(set (match_operand:SI 0 "register_operand" "")
5199 (match_operand:SI 1 "const_int_operand" ""))]
5200 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5201 [(set (match_dup 2) (match_dup 1))
5202 (set (match_dup 0) (neg:SI (match_dup 2)))]
5205 operands[1] = GEN_INT (- INTVAL (operands[1]));
5206 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5211 [(set (match_operand:SI 0 "register_operand" "")
5212 (match_operand:SI 1 "const_int_operand" ""))]
5213 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5214 [(set (match_dup 2) (match_dup 1))
5215 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5218 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5219 unsigned HOST_WIDE_INT mask = 0xff;
5222 for (i = 0; i < 25; i++)
5223 if ((val & (mask << i)) == val)
5226 /* Don't split if the shift is zero. */
5230 operands[1] = GEN_INT (val >> i);
5231 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5232 operands[3] = GEN_INT (i);
5236 ;; When generating pic, we need to load the symbol offset into a register.
5237 ;; So that the optimizer does not confuse this with a normal symbol load
5238 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5239 ;; since that is the only type of relocation we can use.
5241 ;; Wrap calculation of the whole PIC address in a single pattern for the
5242 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5243 ;; a PIC address involves two loads from memory, so we want to CSE it
5244 ;; as often as possible.
5245 ;; This pattern will be split into one of the pic_load_addr_* patterns
5246 ;; and a move after GCSE optimizations.
5248 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5249 (define_expand "calculate_pic_address"
5250 [(set (match_operand:SI 0 "register_operand" "")
5251 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5252 (unspec:SI [(match_operand:SI 2 "" "")]
5257 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5259 [(set (match_operand:SI 0 "register_operand" "")
5260 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5261 (unspec:SI [(match_operand:SI 2 "" "")]
5264 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5265 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5266 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5269 ;; The rather odd constraints on the following are to force reload to leave
5270 ;; the insn alone, and to force the minipool generation pass to then move
5271 ;; the GOT symbol to memory.
5273 (define_insn "pic_load_addr_32bit"
5274 [(set (match_operand:SI 0 "s_register_operand" "=r")
5275 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5276 "TARGET_32BIT && flag_pic"
5278 [(set_attr "type" "load1")
5279 (set_attr "pool_range" "4096")
5280 (set (attr "neg_pool_range")
5281 (if_then_else (eq_attr "is_thumb" "no")
5286 (define_insn "pic_load_addr_thumb1"
5287 [(set (match_operand:SI 0 "s_register_operand" "=l")
5288 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5289 "TARGET_THUMB1 && flag_pic"
5291 [(set_attr "type" "load1")
5292 (set (attr "pool_range") (const_int 1024))]
5295 (define_insn "pic_add_dot_plus_four"
5296 [(set (match_operand:SI 0 "register_operand" "=r")
5297 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5299 (match_operand 2 "" "")]
5303 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5304 INTVAL (operands[2]));
5305 return \"add\\t%0, %|pc\";
5307 [(set_attr "length" "2")]
5310 (define_insn "pic_add_dot_plus_eight"
5311 [(set (match_operand:SI 0 "register_operand" "=r")
5312 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5314 (match_operand 2 "" "")]
5318 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5319 INTVAL (operands[2]));
5320 return \"add%?\\t%0, %|pc, %1\";
5322 [(set_attr "predicable" "yes")]
5325 (define_insn "tls_load_dot_plus_eight"
5326 [(set (match_operand:SI 0 "register_operand" "=r")
5327 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5329 (match_operand 2 "" "")]
5333 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5334 INTVAL (operands[2]));
5335 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5337 [(set_attr "predicable" "yes")]
5340 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5341 ;; followed by a load. These sequences can be crunched down to
5342 ;; tls_load_dot_plus_eight by a peephole.
5345 [(set (match_operand:SI 0 "register_operand" "")
5346 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5348 (match_operand 1 "" "")]
5350 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5351 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5353 (mem:SI (unspec:SI [(match_dup 3)
5360 (define_insn "pic_offset_arm"
5361 [(set (match_operand:SI 0 "register_operand" "=r")
5362 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5363 (unspec:SI [(match_operand:SI 2 "" "X")]
5364 UNSPEC_PIC_OFFSET))))]
5365 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5366 "ldr%?\\t%0, [%1,%2]"
5367 [(set_attr "type" "load1")]
5370 (define_expand "builtin_setjmp_receiver"
5371 [(label_ref (match_operand 0 "" ""))]
5375 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5377 if (arm_pic_register != INVALID_REGNUM)
5378 arm_load_pic_register (1UL << 3);
5382 ;; If copying one reg to another we can set the condition codes according to
5383 ;; its value. Such a move is common after a return from subroutine and the
5384 ;; result is being tested against zero.
5386 (define_insn "*movsi_compare0"
5387 [(set (reg:CC CC_REGNUM)
5388 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5390 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5396 [(set_attr "conds" "set")]
5399 ;; Subroutine to store a half word from a register into memory.
5400 ;; Operand 0 is the source register (HImode)
5401 ;; Operand 1 is the destination address in a register (SImode)
5403 ;; In both this routine and the next, we must be careful not to spill
5404 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5405 ;; can generate unrecognizable rtl.
5407 (define_expand "storehi"
5408 [;; store the low byte
5409 (set (match_operand 1 "" "") (match_dup 3))
5410 ;; extract the high byte
5412 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5413 ;; store the high byte
5414 (set (match_dup 4) (match_dup 5))]
5418 rtx op1 = operands[1];
5419 rtx addr = XEXP (op1, 0);
5420 enum rtx_code code = GET_CODE (addr);
5422 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5424 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5426 operands[4] = adjust_address (op1, QImode, 1);
5427 operands[1] = adjust_address (operands[1], QImode, 0);
5428 operands[3] = gen_lowpart (QImode, operands[0]);
5429 operands[0] = gen_lowpart (SImode, operands[0]);
5430 operands[2] = gen_reg_rtx (SImode);
5431 operands[5] = gen_lowpart (QImode, operands[2]);
5435 (define_expand "storehi_bigend"
5436 [(set (match_dup 4) (match_dup 3))
5438 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5439 (set (match_operand 1 "" "") (match_dup 5))]
5443 rtx op1 = operands[1];
5444 rtx addr = XEXP (op1, 0);
5445 enum rtx_code code = GET_CODE (addr);
5447 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5449 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5451 operands[4] = adjust_address (op1, QImode, 1);
5452 operands[1] = adjust_address (operands[1], QImode, 0);
5453 operands[3] = gen_lowpart (QImode, operands[0]);
5454 operands[0] = gen_lowpart (SImode, operands[0]);
5455 operands[2] = gen_reg_rtx (SImode);
5456 operands[5] = gen_lowpart (QImode, operands[2]);
5460 ;; Subroutine to store a half word integer constant into memory.
5461 (define_expand "storeinthi"
5462 [(set (match_operand 0 "" "")
5463 (match_operand 1 "" ""))
5464 (set (match_dup 3) (match_dup 2))]
5468 HOST_WIDE_INT value = INTVAL (operands[1]);
5469 rtx addr = XEXP (operands[0], 0);
5470 rtx op0 = operands[0];
5471 enum rtx_code code = GET_CODE (addr);
5473 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5475 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5477 operands[1] = gen_reg_rtx (SImode);
5478 if (BYTES_BIG_ENDIAN)
5480 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5481 if ((value & 255) == ((value >> 8) & 255))
5482 operands[2] = operands[1];
5485 operands[2] = gen_reg_rtx (SImode);
5486 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5491 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5492 if ((value & 255) == ((value >> 8) & 255))
5493 operands[2] = operands[1];
5496 operands[2] = gen_reg_rtx (SImode);
5497 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5501 operands[3] = adjust_address (op0, QImode, 1);
5502 operands[0] = adjust_address (operands[0], QImode, 0);
5503 operands[2] = gen_lowpart (QImode, operands[2]);
5504 operands[1] = gen_lowpart (QImode, operands[1]);
5508 (define_expand "storehi_single_op"
5509 [(set (match_operand:HI 0 "memory_operand" "")
5510 (match_operand:HI 1 "general_operand" ""))]
5511 "TARGET_32BIT && arm_arch4"
5513 if (!s_register_operand (operands[1], HImode))
5514 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5518 (define_expand "movhi"
5519 [(set (match_operand:HI 0 "general_operand" "")
5520 (match_operand:HI 1 "general_operand" ""))]
5525 if (can_create_pseudo_p ())
5527 if (GET_CODE (operands[0]) == MEM)
5531 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5534 if (GET_CODE (operands[1]) == CONST_INT)
5535 emit_insn (gen_storeinthi (operands[0], operands[1]));
5538 if (GET_CODE (operands[1]) == MEM)
5539 operands[1] = force_reg (HImode, operands[1]);
5540 if (BYTES_BIG_ENDIAN)
5541 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5543 emit_insn (gen_storehi (operands[1], operands[0]));
5547 /* Sign extend a constant, and keep it in an SImode reg. */
5548 else if (GET_CODE (operands[1]) == CONST_INT)
5550 rtx reg = gen_reg_rtx (SImode);
5551 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5553 /* If the constant is already valid, leave it alone. */
5554 if (!const_ok_for_arm (val))
5556 /* If setting all the top bits will make the constant
5557 loadable in a single instruction, then set them.
5558 Otherwise, sign extend the number. */
5560 if (const_ok_for_arm (~(val | ~0xffff)))
5562 else if (val & 0x8000)
5566 emit_insn (gen_movsi (reg, GEN_INT (val)));
5567 operands[1] = gen_lowpart (HImode, reg);
5569 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5570 && GET_CODE (operands[1]) == MEM)
5572 rtx reg = gen_reg_rtx (SImode);
5574 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5575 operands[1] = gen_lowpart (HImode, reg);
5577 else if (!arm_arch4)
5579 if (GET_CODE (operands[1]) == MEM)
5582 rtx offset = const0_rtx;
5583 rtx reg = gen_reg_rtx (SImode);
5585 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5586 || (GET_CODE (base) == PLUS
5587 && (GET_CODE (offset = XEXP (base, 1))
5589 && ((INTVAL(offset) & 1) != 1)
5590 && GET_CODE (base = XEXP (base, 0)) == REG))
5591 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5595 new_rtx = widen_memory_access (operands[1], SImode,
5596 ((INTVAL (offset) & ~3)
5597 - INTVAL (offset)));
5598 emit_insn (gen_movsi (reg, new_rtx));
5599 if (((INTVAL (offset) & 2) != 0)
5600 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5602 rtx reg2 = gen_reg_rtx (SImode);
5604 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5609 emit_insn (gen_movhi_bytes (reg, operands[1]));
5611 operands[1] = gen_lowpart (HImode, reg);
5615 /* Handle loading a large integer during reload. */
5616 else if (GET_CODE (operands[1]) == CONST_INT
5617 && !const_ok_for_arm (INTVAL (operands[1]))
5618 && !const_ok_for_arm (~INTVAL (operands[1])))
5620 /* Writing a constant to memory needs a scratch, which should
5621 be handled with SECONDARY_RELOADs. */
5622 gcc_assert (GET_CODE (operands[0]) == REG);
5624 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5625 emit_insn (gen_movsi (operands[0], operands[1]));
5629 else if (TARGET_THUMB2)
5631 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5632 if (can_create_pseudo_p ())
5634 if (GET_CODE (operands[0]) != REG)
5635 operands[1] = force_reg (HImode, operands[1]);
5636 /* Zero extend a constant, and keep it in an SImode reg. */
5637 else if (GET_CODE (operands[1]) == CONST_INT)
5639 rtx reg = gen_reg_rtx (SImode);
5640 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5642 emit_insn (gen_movsi (reg, GEN_INT (val)));
5643 operands[1] = gen_lowpart (HImode, reg);
5647 else /* TARGET_THUMB1 */
5649 if (can_create_pseudo_p ())
5651 if (GET_CODE (operands[1]) == CONST_INT)
5653 rtx reg = gen_reg_rtx (SImode);
5655 emit_insn (gen_movsi (reg, operands[1]));
5656 operands[1] = gen_lowpart (HImode, reg);
5659 /* ??? We shouldn't really get invalid addresses here, but this can
5660 happen if we are passed a SP (never OK for HImode/QImode) or
5661 virtual register (also rejected as illegitimate for HImode/QImode)
5662 relative address. */
5663 /* ??? This should perhaps be fixed elsewhere, for instance, in
5664 fixup_stack_1, by checking for other kinds of invalid addresses,
5665 e.g. a bare reference to a virtual register. This may confuse the
5666 alpha though, which must handle this case differently. */
5667 if (GET_CODE (operands[0]) == MEM
5668 && !memory_address_p (GET_MODE (operands[0]),
5669 XEXP (operands[0], 0)))
5671 = replace_equiv_address (operands[0],
5672 copy_to_reg (XEXP (operands[0], 0)));
5674 if (GET_CODE (operands[1]) == MEM
5675 && !memory_address_p (GET_MODE (operands[1]),
5676 XEXP (operands[1], 0)))
5678 = replace_equiv_address (operands[1],
5679 copy_to_reg (XEXP (operands[1], 0)));
5681 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5683 rtx reg = gen_reg_rtx (SImode);
5685 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5686 operands[1] = gen_lowpart (HImode, reg);
5689 if (GET_CODE (operands[0]) == MEM)
5690 operands[1] = force_reg (HImode, operands[1]);
5692 else if (GET_CODE (operands[1]) == CONST_INT
5693 && !satisfies_constraint_I (operands[1]))
5695 /* Handle loading a large integer during reload. */
5697 /* Writing a constant to memory needs a scratch, which should
5698 be handled with SECONDARY_RELOADs. */
5699 gcc_assert (GET_CODE (operands[0]) == REG);
5701 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5702 emit_insn (gen_movsi (operands[0], operands[1]));
5709 (define_insn "*thumb1_movhi_insn"
5710 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5711 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5713 && ( register_operand (operands[0], HImode)
5714 || register_operand (operands[1], HImode))"
5716 switch (which_alternative)
5718 case 0: return \"add %0, %1, #0\";
5719 case 2: return \"strh %1, %0\";
5720 case 3: return \"mov %0, %1\";
5721 case 4: return \"mov %0, %1\";
5722 case 5: return \"mov %0, %1\";
5723 default: gcc_unreachable ();
5725 /* The stack pointer can end up being taken as an index register.
5726 Catch this case here and deal with it. */
5727 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5728 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5729 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5732 ops[0] = operands[0];
5733 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5735 output_asm_insn (\"mov %0, %1\", ops);
5737 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5740 return \"ldrh %0, %1\";
5742 [(set_attr "length" "2,4,2,2,2,2")
5743 (set_attr "type" "*,load1,store1,*,*,*")
5744 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5747 (define_expand "movhi_bytes"
5748 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5750 (zero_extend:SI (match_dup 6)))
5751 (set (match_operand:SI 0 "" "")
5752 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5757 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5759 mem1 = change_address (operands[1], QImode, addr);
5760 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5761 operands[0] = gen_lowpart (SImode, operands[0]);
5763 operands[2] = gen_reg_rtx (SImode);
5764 operands[3] = gen_reg_rtx (SImode);
5767 if (BYTES_BIG_ENDIAN)
5769 operands[4] = operands[2];
5770 operands[5] = operands[3];
5774 operands[4] = operands[3];
5775 operands[5] = operands[2];
5780 (define_expand "movhi_bigend"
5782 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5785 (ashiftrt:SI (match_dup 2) (const_int 16)))
5786 (set (match_operand:HI 0 "s_register_operand" "")
5790 operands[2] = gen_reg_rtx (SImode);
5791 operands[3] = gen_reg_rtx (SImode);
5792 operands[4] = gen_lowpart (HImode, operands[3]);
5796 ;; Pattern to recognize insn generated default case above
5797 (define_insn "*movhi_insn_arch4"
5798 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5799 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
5802 && (register_operand (operands[0], HImode)
5803 || register_operand (operands[1], HImode))"
5805 mov%?\\t%0, %1\\t%@ movhi
5806 mvn%?\\t%0, #%B1\\t%@ movhi
5807 str%(h%)\\t%1, %0\\t%@ movhi
5808 ldr%(h%)\\t%0, %1\\t%@ movhi"
5809 [(set_attr "type" "*,*,store1,load1")
5810 (set_attr "predicable" "yes")
5811 (set_attr "insn" "mov,mvn,*,*")
5812 (set_attr "pool_range" "*,*,*,256")
5813 (set_attr "neg_pool_range" "*,*,*,244")]
5816 (define_insn "*movhi_bytes"
5817 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5818 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5821 mov%?\\t%0, %1\\t%@ movhi
5822 mvn%?\\t%0, #%B1\\t%@ movhi"
5823 [(set_attr "predicable" "yes")
5824 (set_attr "insn" "mov,mvn")]
5827 (define_expand "thumb_movhi_clobber"
5828 [(set (match_operand:HI 0 "memory_operand" "")
5829 (match_operand:HI 1 "register_operand" ""))
5830 (clobber (match_operand:DI 2 "register_operand" ""))]
5833 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5834 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5836 emit_insn (gen_movhi (operands[0], operands[1]));
5839 /* XXX Fixme, need to handle other cases here as well. */
5844 ;; We use a DImode scratch because we may occasionally need an additional
5845 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5846 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5847 (define_expand "reload_outhi"
5848 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5849 (match_operand:HI 1 "s_register_operand" "r")
5850 (match_operand:DI 2 "s_register_operand" "=&l")])]
5853 arm_reload_out_hi (operands);
5855 thumb_reload_out_hi (operands);
5860 (define_expand "reload_inhi"
5861 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5862 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5863 (match_operand:DI 2 "s_register_operand" "=&r")])]
5867 arm_reload_in_hi (operands);
5869 thumb_reload_out_hi (operands);
5873 (define_expand "movqi"
5874 [(set (match_operand:QI 0 "general_operand" "")
5875 (match_operand:QI 1 "general_operand" ""))]
5878 /* Everything except mem = const or mem = mem can be done easily */
5880 if (can_create_pseudo_p ())
5882 if (GET_CODE (operands[1]) == CONST_INT)
5884 rtx reg = gen_reg_rtx (SImode);
5886 /* For thumb we want an unsigned immediate, then we are more likely
5887 to be able to use a movs insn. */
5889 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5891 emit_insn (gen_movsi (reg, operands[1]));
5892 operands[1] = gen_lowpart (QImode, reg);
5897 /* ??? We shouldn't really get invalid addresses here, but this can
5898 happen if we are passed a SP (never OK for HImode/QImode) or
5899 virtual register (also rejected as illegitimate for HImode/QImode)
5900 relative address. */
5901 /* ??? This should perhaps be fixed elsewhere, for instance, in
5902 fixup_stack_1, by checking for other kinds of invalid addresses,
5903 e.g. a bare reference to a virtual register. This may confuse the
5904 alpha though, which must handle this case differently. */
5905 if (GET_CODE (operands[0]) == MEM
5906 && !memory_address_p (GET_MODE (operands[0]),
5907 XEXP (operands[0], 0)))
5909 = replace_equiv_address (operands[0],
5910 copy_to_reg (XEXP (operands[0], 0)));
5911 if (GET_CODE (operands[1]) == MEM
5912 && !memory_address_p (GET_MODE (operands[1]),
5913 XEXP (operands[1], 0)))
5915 = replace_equiv_address (operands[1],
5916 copy_to_reg (XEXP (operands[1], 0)));
5919 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5921 rtx reg = gen_reg_rtx (SImode);
5923 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5924 operands[1] = gen_lowpart (QImode, reg);
5927 if (GET_CODE (operands[0]) == MEM)
5928 operands[1] = force_reg (QImode, operands[1]);
5930 else if (TARGET_THUMB
5931 && GET_CODE (operands[1]) == CONST_INT
5932 && !satisfies_constraint_I (operands[1]))
5934 /* Handle loading a large integer during reload. */
5936 /* Writing a constant to memory needs a scratch, which should
5937 be handled with SECONDARY_RELOADs. */
5938 gcc_assert (GET_CODE (operands[0]) == REG);
5940 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5941 emit_insn (gen_movsi (operands[0], operands[1]));
5948 (define_insn "*arm_movqi_insn"
5949 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5950 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5952 && ( register_operand (operands[0], QImode)
5953 || register_operand (operands[1], QImode))"
5959 [(set_attr "type" "*,*,load1,store1")
5960 (set_attr "insn" "mov,mvn,*,*")
5961 (set_attr "predicable" "yes")]
5964 (define_insn "*thumb1_movqi_insn"
5965 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5966 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5968 && ( register_operand (operands[0], QImode)
5969 || register_operand (operands[1], QImode))"
5977 [(set_attr "length" "2")
5978 (set_attr "type" "*,load1,store1,*,*,*")
5979 (set_attr "insn" "*,*,*,mov,mov,mov")
5980 (set_attr "pool_range" "*,32,*,*,*,*")
5981 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5984 (define_expand "movhf"
5985 [(set (match_operand:HF 0 "general_operand" "")
5986 (match_operand:HF 1 "general_operand" ""))]
5991 if (GET_CODE (operands[0]) == MEM)
5992 operands[1] = force_reg (HFmode, operands[1]);
5994 else /* TARGET_THUMB1 */
5996 if (can_create_pseudo_p ())
5998 if (GET_CODE (operands[0]) != REG)
5999 operands[1] = force_reg (HFmode, operands[1]);
6005 (define_insn "*arm32_movhf"
6006 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6007 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6008 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6009 && ( s_register_operand (operands[0], HFmode)
6010 || s_register_operand (operands[1], HFmode))"
6012 switch (which_alternative)
6014 case 0: /* ARM register from memory */
6015 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6016 case 1: /* memory from ARM register */
6017 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6018 case 2: /* ARM register from ARM register */
6019 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6020 case 3: /* ARM register from constant */
6026 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6027 bits = real_to_target (NULL, &r, HFmode);
6028 ops[0] = operands[0];
6029 ops[1] = GEN_INT (bits);
6030 ops[2] = GEN_INT (bits & 0xff00);
6031 ops[3] = GEN_INT (bits & 0x00ff);
6033 if (arm_arch_thumb2)
6034 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6036 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6043 [(set_attr "conds" "unconditional")
6044 (set_attr "type" "load1,store1,*,*")
6045 (set_attr "insn" "*,*,mov,mov")
6046 (set_attr "length" "4,4,4,8")
6047 (set_attr "predicable" "yes")]
6050 (define_insn "*thumb1_movhf"
6051 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6052 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6054 && ( s_register_operand (operands[0], HFmode)
6055 || s_register_operand (operands[1], HFmode))"
6057 switch (which_alternative)
6062 gcc_assert (GET_CODE(operands[1]) == MEM);
6063 addr = XEXP (operands[1], 0);
6064 if (GET_CODE (addr) == LABEL_REF
6065 || (GET_CODE (addr) == CONST
6066 && GET_CODE (XEXP (addr, 0)) == PLUS
6067 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6068 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6070 /* Constant pool entry. */
6071 return \"ldr\\t%0, %1\";
6073 return \"ldrh\\t%0, %1\";
6075 case 2: return \"strh\\t%1, %0\";
6076 default: return \"mov\\t%0, %1\";
6079 [(set_attr "length" "2")
6080 (set_attr "type" "*,load1,store1,*,*")
6081 (set_attr "insn" "mov,*,*,mov,mov")
6082 (set_attr "pool_range" "*,1020,*,*,*")
6083 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6085 (define_expand "movsf"
6086 [(set (match_operand:SF 0 "general_operand" "")
6087 (match_operand:SF 1 "general_operand" ""))]
6092 if (GET_CODE (operands[0]) == MEM)
6093 operands[1] = force_reg (SFmode, operands[1]);
6095 else /* TARGET_THUMB1 */
6097 if (can_create_pseudo_p ())
6099 if (GET_CODE (operands[0]) != REG)
6100 operands[1] = force_reg (SFmode, operands[1]);
6106 ;; Transform a floating-point move of a constant into a core register into
6107 ;; an SImode operation.
6109 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6110 (match_operand:SF 1 "immediate_operand" ""))]
6113 && GET_CODE (operands[1]) == CONST_DOUBLE"
6114 [(set (match_dup 2) (match_dup 3))]
6116 operands[2] = gen_lowpart (SImode, operands[0]);
6117 operands[3] = gen_lowpart (SImode, operands[1]);
6118 if (operands[2] == 0 || operands[3] == 0)
6123 (define_insn "*arm_movsf_soft_insn"
6124 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6125 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6127 && TARGET_SOFT_FLOAT
6128 && (GET_CODE (operands[0]) != MEM
6129 || register_operand (operands[1], SFmode))"
6132 ldr%?\\t%0, %1\\t%@ float
6133 str%?\\t%1, %0\\t%@ float"
6134 [(set_attr "predicable" "yes")
6135 (set_attr "type" "*,load1,store1")
6136 (set_attr "insn" "mov,*,*")
6137 (set_attr "pool_range" "*,4096,*")
6138 (set_attr "arm_neg_pool_range" "*,4084,*")
6139 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6142 ;;; ??? This should have alternatives for constants.
6143 (define_insn "*thumb1_movsf_insn"
6144 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6145 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6147 && ( register_operand (operands[0], SFmode)
6148 || register_operand (operands[1], SFmode))"
6157 [(set_attr "length" "2")
6158 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6159 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6160 (set_attr "insn" "*,*,*,*,*,mov,mov")
6161 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6164 (define_expand "movdf"
6165 [(set (match_operand:DF 0 "general_operand" "")
6166 (match_operand:DF 1 "general_operand" ""))]
6171 if (GET_CODE (operands[0]) == MEM)
6172 operands[1] = force_reg (DFmode, operands[1]);
6174 else /* TARGET_THUMB */
6176 if (can_create_pseudo_p ())
6178 if (GET_CODE (operands[0]) != REG)
6179 operands[1] = force_reg (DFmode, operands[1]);
6185 ;; Reloading a df mode value stored in integer regs to memory can require a
6187 (define_expand "reload_outdf"
6188 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6189 (match_operand:DF 1 "s_register_operand" "r")
6190 (match_operand:SI 2 "s_register_operand" "=&r")]
6194 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6197 operands[2] = XEXP (operands[0], 0);
6198 else if (code == POST_INC || code == PRE_DEC)
6200 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6201 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6202 emit_insn (gen_movdi (operands[0], operands[1]));
6205 else if (code == PRE_INC)
6207 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6209 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6212 else if (code == POST_DEC)
6213 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6215 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6216 XEXP (XEXP (operands[0], 0), 1)));
6218 emit_insn (gen_rtx_SET (VOIDmode,
6219 replace_equiv_address (operands[0], operands[2]),
6222 if (code == POST_DEC)
6223 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6229 (define_insn "*movdf_soft_insn"
6230 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6231 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6232 "TARGET_32BIT && TARGET_SOFT_FLOAT
6233 && ( register_operand (operands[0], DFmode)
6234 || register_operand (operands[1], DFmode))"
6236 switch (which_alternative)
6243 return output_move_double (operands);
6246 [(set_attr "length" "8,12,16,8,8")
6247 (set_attr "type" "*,*,*,load2,store2")
6248 (set_attr "pool_range" "*,*,*,1020,*")
6249 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6250 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6253 ;;; ??? This should have alternatives for constants.
6254 ;;; ??? This was originally identical to the movdi_insn pattern.
6255 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6256 ;;; thumb_reorg with a memory reference.
6257 (define_insn "*thumb_movdf_insn"
6258 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6259 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6261 && ( register_operand (operands[0], DFmode)
6262 || register_operand (operands[1], DFmode))"
6264 switch (which_alternative)
6268 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6269 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6270 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6272 return \"ldmia\\t%1, {%0, %H0}\";
6274 return \"stmia\\t%0, {%1, %H1}\";
6276 return thumb_load_double_from_address (operands);
6278 operands[2] = gen_rtx_MEM (SImode,
6279 plus_constant (XEXP (operands[0], 0), 4));
6280 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6283 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6284 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6285 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6288 [(set_attr "length" "4,2,2,6,4,4")
6289 (set_attr "type" "*,load2,store2,load2,store2,*")
6290 (set_attr "insn" "*,*,*,*,*,mov")
6291 (set_attr "pool_range" "*,*,*,1020,*,*")]
6294 (define_expand "movxf"
6295 [(set (match_operand:XF 0 "general_operand" "")
6296 (match_operand:XF 1 "general_operand" ""))]
6297 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6299 if (GET_CODE (operands[0]) == MEM)
6300 operands[1] = force_reg (XFmode, operands[1]);
6306 ;; load- and store-multiple insns
6307 ;; The arm can load/store any set of registers, provided that they are in
6308 ;; ascending order, but these expanders assume a contiguous set.
6310 (define_expand "load_multiple"
6311 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6312 (match_operand:SI 1 "" ""))
6313 (use (match_operand:SI 2 "" ""))])]
6316 HOST_WIDE_INT offset = 0;
6318 /* Support only fixed point registers. */
6319 if (GET_CODE (operands[2]) != CONST_INT
6320 || INTVAL (operands[2]) > 14
6321 || INTVAL (operands[2]) < 2
6322 || GET_CODE (operands[1]) != MEM
6323 || GET_CODE (operands[0]) != REG
6324 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6325 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6329 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6330 INTVAL (operands[2]),
6331 force_reg (SImode, XEXP (operands[1], 0)),
6332 FALSE, operands[1], &offset);
6335 (define_expand "store_multiple"
6336 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6337 (match_operand:SI 1 "" ""))
6338 (use (match_operand:SI 2 "" ""))])]
6341 HOST_WIDE_INT offset = 0;
6343 /* Support only fixed point registers. */
6344 if (GET_CODE (operands[2]) != CONST_INT
6345 || INTVAL (operands[2]) > 14
6346 || INTVAL (operands[2]) < 2
6347 || GET_CODE (operands[1]) != REG
6348 || GET_CODE (operands[0]) != MEM
6349 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6350 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6354 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6355 INTVAL (operands[2]),
6356 force_reg (SImode, XEXP (operands[0], 0)),
6357 FALSE, operands[0], &offset);
6361 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6362 ;; We could let this apply for blocks of less than this, but it clobbers so
6363 ;; many registers that there is then probably a better way.
6365 (define_expand "movmemqi"
6366 [(match_operand:BLK 0 "general_operand" "")
6367 (match_operand:BLK 1 "general_operand" "")
6368 (match_operand:SI 2 "const_int_operand" "")
6369 (match_operand:SI 3 "const_int_operand" "")]
6374 if (arm_gen_movmemqi (operands))
6378 else /* TARGET_THUMB1 */
6380 if ( INTVAL (operands[3]) != 4
6381 || INTVAL (operands[2]) > 48)
6384 thumb_expand_movmemqi (operands);
6390 ;; Thumb block-move insns
6392 (define_insn "movmem12b"
6393 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6394 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6395 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6396 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6397 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6398 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6399 (set (match_operand:SI 0 "register_operand" "=l")
6400 (plus:SI (match_dup 2) (const_int 12)))
6401 (set (match_operand:SI 1 "register_operand" "=l")
6402 (plus:SI (match_dup 3) (const_int 12)))
6403 (clobber (match_scratch:SI 4 "=&l"))
6404 (clobber (match_scratch:SI 5 "=&l"))
6405 (clobber (match_scratch:SI 6 "=&l"))]
6407 "* return thumb_output_move_mem_multiple (3, operands);"
6408 [(set_attr "length" "4")
6409 ; This isn't entirely accurate... It loads as well, but in terms of
6410 ; scheduling the following insn it is better to consider it as a store
6411 (set_attr "type" "store3")]
6414 (define_insn "movmem8b"
6415 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6416 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6417 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6418 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6419 (set (match_operand:SI 0 "register_operand" "=l")
6420 (plus:SI (match_dup 2) (const_int 8)))
6421 (set (match_operand:SI 1 "register_operand" "=l")
6422 (plus:SI (match_dup 3) (const_int 8)))
6423 (clobber (match_scratch:SI 4 "=&l"))
6424 (clobber (match_scratch:SI 5 "=&l"))]
6426 "* return thumb_output_move_mem_multiple (2, operands);"
6427 [(set_attr "length" "4")
6428 ; This isn't entirely accurate... It loads as well, but in terms of
6429 ; scheduling the following insn it is better to consider it as a store
6430 (set_attr "type" "store2")]
6435 ;; Compare & branch insns
6436 ;; The range calculations are based as follows:
6437 ;; For forward branches, the address calculation returns the address of
6438 ;; the next instruction. This is 2 beyond the branch instruction.
6439 ;; For backward branches, the address calculation returns the address of
6440 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6441 ;; instruction for the shortest sequence, and 4 before the branch instruction
6442 ;; if we have to jump around an unconditional branch.
6443 ;; To the basic branch range the PC offset must be added (this is +4).
6444 ;; So for forward branches we have
6445 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6446 ;; And for backward branches we have
6447 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6449 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6450 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6452 (define_expand "cbranchsi4"
6453 [(set (pc) (if_then_else
6454 (match_operator 0 "arm_comparison_operator"
6455 [(match_operand:SI 1 "s_register_operand" "")
6456 (match_operand:SI 2 "nonmemory_operand" "")])
6457 (label_ref (match_operand 3 "" ""))
6459 "TARGET_THUMB1 || TARGET_32BIT"
6463 if (!arm_add_operand (operands[2], SImode))
6464 operands[2] = force_reg (SImode, operands[2]);
6465 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6469 if (thumb1_cmpneg_operand (operands[2], SImode))
6471 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6472 operands[3], operands[0]));
6475 if (!thumb1_cmp_operand (operands[2], SImode))
6476 operands[2] = force_reg (SImode, operands[2]);
6479 ;; A pattern to recognize a special situation and optimize for it.
6480 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6481 ;; due to the available addressing modes. Hence, convert a signed comparison
6482 ;; with zero into an unsigned comparison with 127 if possible.
6483 (define_expand "cbranchqi4"
6484 [(set (pc) (if_then_else
6485 (match_operator 0 "lt_ge_comparison_operator"
6486 [(match_operand:QI 1 "memory_operand" "")
6487 (match_operand:QI 2 "const0_operand" "")])
6488 (label_ref (match_operand 3 "" ""))
6493 xops[1] = gen_reg_rtx (SImode);
6494 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6495 xops[2] = GEN_INT (127);
6496 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6497 VOIDmode, xops[1], xops[2]);
6498 xops[3] = operands[3];
6499 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6503 (define_expand "cbranchsf4"
6504 [(set (pc) (if_then_else
6505 (match_operator 0 "arm_comparison_operator"
6506 [(match_operand:SF 1 "s_register_operand" "")
6507 (match_operand:SF 2 "arm_float_compare_operand" "")])
6508 (label_ref (match_operand 3 "" ""))
6510 "TARGET_32BIT && TARGET_HARD_FLOAT"
6511 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6512 operands[3])); DONE;"
6515 (define_expand "cbranchdf4"
6516 [(set (pc) (if_then_else
6517 (match_operator 0 "arm_comparison_operator"
6518 [(match_operand:DF 1 "s_register_operand" "")
6519 (match_operand:DF 2 "arm_float_compare_operand" "")])
6520 (label_ref (match_operand 3 "" ""))
6522 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6523 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6524 operands[3])); DONE;"
6527 (define_expand "cbranchdi4"
6528 [(set (pc) (if_then_else
6529 (match_operator 0 "arm_comparison_operator"
6530 [(match_operand:DI 1 "cmpdi_operand" "")
6531 (match_operand:DI 2 "cmpdi_operand" "")])
6532 (label_ref (match_operand 3 "" ""))
6536 rtx swap = NULL_RTX;
6537 enum rtx_code code = GET_CODE (operands[0]);
6539 /* We should not have two constants. */
6540 gcc_assert (GET_MODE (operands[1]) == DImode
6541 || GET_MODE (operands[2]) == DImode);
6543 /* Flip unimplemented DImode comparisons to a form that
6544 arm_gen_compare_reg can handle. */
6548 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6550 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6552 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6554 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6559 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6562 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6568 (define_insn "cbranchsi4_insn"
6569 [(set (pc) (if_then_else
6570 (match_operator 0 "arm_comparison_operator"
6571 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6572 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6573 (label_ref (match_operand 3 "" ""))
6577 rtx t = cfun->machine->thumb1_cc_insn;
6580 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6581 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6583 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6585 if (!noov_comparison_operator (operands[0], VOIDmode))
6588 else if (cfun->machine->thumb1_cc_mode != CCmode)
6593 output_asm_insn ("cmp\t%1, %2", operands);
6594 cfun->machine->thumb1_cc_insn = insn;
6595 cfun->machine->thumb1_cc_op0 = operands[1];
6596 cfun->machine->thumb1_cc_op1 = operands[2];
6597 cfun->machine->thumb1_cc_mode = CCmode;
6600 /* Ensure we emit the right type of condition code on the jump. */
6601 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6604 switch (get_attr_length (insn))
6606 case 4: return \"b%d0\\t%l3\";
6607 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6608 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6611 [(set (attr "far_jump")
6613 (eq_attr "length" "8")
6614 (const_string "yes")
6615 (const_string "no")))
6616 (set (attr "length")
6618 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6619 (le (minus (match_dup 3) (pc)) (const_int 256)))
6622 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6623 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6628 (define_insn "cbranchsi4_scratch"
6629 [(set (pc) (if_then_else
6630 (match_operator 4 "arm_comparison_operator"
6631 [(match_operand:SI 1 "s_register_operand" "l,0")
6632 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6633 (label_ref (match_operand 3 "" ""))
6635 (clobber (match_scratch:SI 0 "=l,l"))]
6638 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6640 switch (get_attr_length (insn))
6642 case 4: return \"b%d4\\t%l3\";
6643 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6644 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6647 [(set (attr "far_jump")
6649 (eq_attr "length" "8")
6650 (const_string "yes")
6651 (const_string "no")))
6652 (set (attr "length")
6654 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6655 (le (minus (match_dup 3) (pc)) (const_int 256)))
6658 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6659 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6664 ;; Two peepholes to generate subtract of 0 instead of a move if the
6665 ;; condition codes will be useful.
6667 [(set (match_operand:SI 0 "low_register_operand" "")
6668 (match_operand:SI 1 "low_register_operand" ""))
6670 (if_then_else (match_operator 2 "arm_comparison_operator"
6671 [(match_dup 1) (const_int 0)])
6672 (label_ref (match_operand 3 "" ""))
6675 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6677 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6678 (label_ref (match_dup 3))
6682 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6683 ;; merge cases like this because the op1 is a hard register in
6684 ;; arm_class_likely_spilled_p.
6686 [(set (match_operand:SI 0 "low_register_operand" "")
6687 (match_operand:SI 1 "low_register_operand" ""))
6689 (if_then_else (match_operator 2 "arm_comparison_operator"
6690 [(match_dup 0) (const_int 0)])
6691 (label_ref (match_operand 3 "" ""))
6694 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6696 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6697 (label_ref (match_dup 3))
6701 (define_insn "*negated_cbranchsi4"
6704 (match_operator 0 "equality_operator"
6705 [(match_operand:SI 1 "s_register_operand" "l")
6706 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6707 (label_ref (match_operand 3 "" ""))
6711 output_asm_insn (\"cmn\\t%1, %2\", operands);
6712 switch (get_attr_length (insn))
6714 case 4: return \"b%d0\\t%l3\";
6715 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6716 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6719 [(set (attr "far_jump")
6721 (eq_attr "length" "8")
6722 (const_string "yes")
6723 (const_string "no")))
6724 (set (attr "length")
6726 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6727 (le (minus (match_dup 3) (pc)) (const_int 256)))
6730 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6731 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6736 (define_insn "*tbit_cbranch"
6739 (match_operator 0 "equality_operator"
6740 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6742 (match_operand:SI 2 "const_int_operand" "i"))
6744 (label_ref (match_operand 3 "" ""))
6746 (clobber (match_scratch:SI 4 "=l"))]
6751 op[0] = operands[4];
6752 op[1] = operands[1];
6753 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6755 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6756 switch (get_attr_length (insn))
6758 case 4: return \"b%d0\\t%l3\";
6759 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6760 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6763 [(set (attr "far_jump")
6765 (eq_attr "length" "8")
6766 (const_string "yes")
6767 (const_string "no")))
6768 (set (attr "length")
6770 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6771 (le (minus (match_dup 3) (pc)) (const_int 256)))
6774 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6775 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6780 (define_insn "*tlobits_cbranch"
6783 (match_operator 0 "equality_operator"
6784 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6785 (match_operand:SI 2 "const_int_operand" "i")
6788 (label_ref (match_operand 3 "" ""))
6790 (clobber (match_scratch:SI 4 "=l"))]
6795 op[0] = operands[4];
6796 op[1] = operands[1];
6797 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6799 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6800 switch (get_attr_length (insn))
6802 case 4: return \"b%d0\\t%l3\";
6803 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6804 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6807 [(set (attr "far_jump")
6809 (eq_attr "length" "8")
6810 (const_string "yes")
6811 (const_string "no")))
6812 (set (attr "length")
6814 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6815 (le (minus (match_dup 3) (pc)) (const_int 256)))
6818 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6819 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6824 (define_insn "*tstsi3_cbranch"
6827 (match_operator 3 "equality_operator"
6828 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6829 (match_operand:SI 1 "s_register_operand" "l"))
6831 (label_ref (match_operand 2 "" ""))
6836 output_asm_insn (\"tst\\t%0, %1\", operands);
6837 switch (get_attr_length (insn))
6839 case 4: return \"b%d3\\t%l2\";
6840 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6841 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6844 [(set (attr "far_jump")
6846 (eq_attr "length" "8")
6847 (const_string "yes")
6848 (const_string "no")))
6849 (set (attr "length")
6851 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6852 (le (minus (match_dup 2) (pc)) (const_int 256)))
6855 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6856 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6861 (define_insn "*cbranchne_decr1"
6863 (if_then_else (match_operator 3 "equality_operator"
6864 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6866 (label_ref (match_operand 4 "" ""))
6868 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6869 (plus:SI (match_dup 2) (const_int -1)))
6870 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6875 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6877 VOIDmode, operands[2], const1_rtx);
6878 cond[1] = operands[4];
6880 if (which_alternative == 0)
6881 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6882 else if (which_alternative == 1)
6884 /* We must provide an alternative for a hi reg because reload
6885 cannot handle output reloads on a jump instruction, but we
6886 can't subtract into that. Fortunately a mov from lo to hi
6887 does not clobber the condition codes. */
6888 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6889 output_asm_insn (\"mov\\t%0, %1\", operands);
6893 /* Similarly, but the target is memory. */
6894 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6895 output_asm_insn (\"str\\t%1, %0\", operands);
6898 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6901 output_asm_insn (\"b%d0\\t%l1\", cond);
6904 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6905 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6907 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6908 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6912 [(set (attr "far_jump")
6914 (ior (and (eq (symbol_ref ("which_alternative"))
6916 (eq_attr "length" "8"))
6917 (eq_attr "length" "10"))
6918 (const_string "yes")
6919 (const_string "no")))
6920 (set_attr_alternative "length"
6924 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6925 (le (minus (match_dup 4) (pc)) (const_int 256)))
6928 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6929 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6934 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6935 (le (minus (match_dup 4) (pc)) (const_int 256)))
6938 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6939 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6944 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6945 (le (minus (match_dup 4) (pc)) (const_int 256)))
6948 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6949 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6954 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6955 (le (minus (match_dup 4) (pc)) (const_int 256)))
6958 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6959 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6964 (define_insn "*addsi3_cbranch"
6967 (match_operator 4 "arm_comparison_operator"
6969 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6970 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6972 (label_ref (match_operand 5 "" ""))
6975 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6976 (plus:SI (match_dup 2) (match_dup 3)))
6977 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6979 && (GET_CODE (operands[4]) == EQ
6980 || GET_CODE (operands[4]) == NE
6981 || GET_CODE (operands[4]) == GE
6982 || GET_CODE (operands[4]) == LT)"
6987 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6988 cond[1] = operands[2];
6989 cond[2] = operands[3];
6991 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6992 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6994 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6996 if (which_alternative >= 2
6997 && which_alternative < 4)
6998 output_asm_insn (\"mov\\t%0, %1\", operands);
6999 else if (which_alternative >= 4)
7000 output_asm_insn (\"str\\t%1, %0\", operands);
7002 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7005 return \"b%d4\\t%l5\";
7007 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7009 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7013 [(set (attr "far_jump")
7015 (ior (and (lt (symbol_ref ("which_alternative"))
7017 (eq_attr "length" "8"))
7018 (eq_attr "length" "10"))
7019 (const_string "yes")
7020 (const_string "no")))
7021 (set (attr "length")
7023 (lt (symbol_ref ("which_alternative"))
7026 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7027 (le (minus (match_dup 5) (pc)) (const_int 256)))
7030 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7031 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7035 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7036 (le (minus (match_dup 5) (pc)) (const_int 256)))
7039 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7040 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7045 (define_insn "*addsi3_cbranch_scratch"
7048 (match_operator 3 "arm_comparison_operator"
7050 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7051 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7053 (label_ref (match_operand 4 "" ""))
7055 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7057 && (GET_CODE (operands[3]) == EQ
7058 || GET_CODE (operands[3]) == NE
7059 || GET_CODE (operands[3]) == GE
7060 || GET_CODE (operands[3]) == LT)"
7063 switch (which_alternative)
7066 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7069 output_asm_insn (\"cmn\t%1, %2\", operands);
7072 if (INTVAL (operands[2]) < 0)
7073 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7075 output_asm_insn (\"add\t%0, %1, %2\", operands);
7078 if (INTVAL (operands[2]) < 0)
7079 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7081 output_asm_insn (\"add\t%0, %0, %2\", operands);
7085 switch (get_attr_length (insn))
7088 return \"b%d3\\t%l4\";
7090 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7092 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7096 [(set (attr "far_jump")
7098 (eq_attr "length" "8")
7099 (const_string "yes")
7100 (const_string "no")))
7101 (set (attr "length")
7103 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7104 (le (minus (match_dup 4) (pc)) (const_int 256)))
7107 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7108 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7114 ;; Comparison and test insns
7116 (define_insn "*arm_cmpsi_insn"
7117 [(set (reg:CC CC_REGNUM)
7118 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7119 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7126 [(set_attr "conds" "set")
7127 (set_attr "arch" "t2,t2,any,any")
7128 (set_attr "length" "2,2,4,4")]
7131 (define_insn "*cmpsi_shiftsi"
7132 [(set (reg:CC CC_REGNUM)
7133 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7134 (match_operator:SI 3 "shift_operator"
7135 [(match_operand:SI 1 "s_register_operand" "r,r")
7136 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7139 [(set_attr "conds" "set")
7140 (set_attr "shift" "1")
7141 (set_attr "arch" "32,a")
7142 (set_attr "type" "alu_shift,alu_shift_reg")])
7144 (define_insn "*cmpsi_shiftsi_swp"
7145 [(set (reg:CC_SWP CC_REGNUM)
7146 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7147 [(match_operand:SI 1 "s_register_operand" "r,r")
7148 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7149 (match_operand:SI 0 "s_register_operand" "r,r")))]
7152 [(set_attr "conds" "set")
7153 (set_attr "shift" "1")
7154 (set_attr "arch" "32,a")
7155 (set_attr "type" "alu_shift,alu_shift_reg")])
7157 (define_insn "*arm_cmpsi_negshiftsi_si"
7158 [(set (reg:CC_Z CC_REGNUM)
7160 (neg:SI (match_operator:SI 1 "shift_operator"
7161 [(match_operand:SI 2 "s_register_operand" "r")
7162 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7163 (match_operand:SI 0 "s_register_operand" "r")))]
7166 [(set_attr "conds" "set")
7167 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7168 (const_string "alu_shift")
7169 (const_string "alu_shift_reg")))]
7172 ;; DImode comparisons. The generic code generates branches that
7173 ;; if-conversion can not reduce to a conditional compare, so we do
7176 (define_insn "*arm_cmpdi_insn"
7177 [(set (reg:CC_NCV CC_REGNUM)
7178 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7179 (match_operand:DI 1 "arm_di_operand" "rDi")))
7180 (clobber (match_scratch:SI 2 "=r"))]
7181 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7182 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7183 [(set_attr "conds" "set")
7184 (set_attr "length" "8")]
7187 (define_insn "*arm_cmpdi_unsigned"
7188 [(set (reg:CC_CZ CC_REGNUM)
7189 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7190 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7192 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7193 [(set_attr "conds" "set")
7194 (set_attr "length" "8")]
7197 (define_insn "*arm_cmpdi_zero"
7198 [(set (reg:CC_Z CC_REGNUM)
7199 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7201 (clobber (match_scratch:SI 1 "=r"))]
7203 "orr%.\\t%1, %Q0, %R0"
7204 [(set_attr "conds" "set")]
7207 (define_insn "*thumb_cmpdi_zero"
7208 [(set (reg:CC_Z CC_REGNUM)
7209 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7211 (clobber (match_scratch:SI 1 "=l"))]
7213 "orr\\t%1, %Q0, %R0"
7214 [(set_attr "conds" "set")
7215 (set_attr "length" "2")]
7218 ;; Cirrus SF compare instruction
7219 (define_insn "*cirrus_cmpsf"
7220 [(set (reg:CCFP CC_REGNUM)
7221 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7222 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7223 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7224 "cfcmps%?\\tr15, %V0, %V1"
7225 [(set_attr "type" "mav_farith")
7226 (set_attr "cirrus" "compare")]
7229 ;; Cirrus DF compare instruction
7230 (define_insn "*cirrus_cmpdf"
7231 [(set (reg:CCFP CC_REGNUM)
7232 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7233 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7234 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7235 "cfcmpd%?\\tr15, %V0, %V1"
7236 [(set_attr "type" "mav_farith")
7237 (set_attr "cirrus" "compare")]
7240 (define_insn "*cirrus_cmpdi"
7241 [(set (reg:CC CC_REGNUM)
7242 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7243 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7244 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7245 "cfcmp64%?\\tr15, %V0, %V1"
7246 [(set_attr "type" "mav_farith")
7247 (set_attr "cirrus" "compare")]
7250 ; This insn allows redundant compares to be removed by cse, nothing should
7251 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7252 ; is deleted later on. The match_dup will match the mode here, so that
7253 ; mode changes of the condition codes aren't lost by this even though we don't
7254 ; specify what they are.
7256 (define_insn "*deleted_compare"
7257 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7259 "\\t%@ deleted compare"
7260 [(set_attr "conds" "set")
7261 (set_attr "length" "0")]
7265 ;; Conditional branch insns
7267 (define_expand "cbranch_cc"
7269 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7270 (match_operand 2 "" "")])
7271 (label_ref (match_operand 3 "" ""))
7274 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7275 operands[1], operands[2]);
7276 operands[2] = const0_rtx;"
7280 ;; Patterns to match conditional branch insns.
7283 (define_insn "*arm_cond_branch"
7285 (if_then_else (match_operator 1 "arm_comparison_operator"
7286 [(match_operand 2 "cc_register" "") (const_int 0)])
7287 (label_ref (match_operand 0 "" ""))
7291 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7293 arm_ccfsm_state += 2;
7296 return \"b%d1\\t%l0\";
7298 [(set_attr "conds" "use")
7299 (set_attr "type" "branch")
7300 (set (attr "length")
7302 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7303 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7304 (le (minus (match_dup 0) (pc)) (const_int 256))))
7309 (define_insn "*arm_cond_branch_reversed"
7311 (if_then_else (match_operator 1 "arm_comparison_operator"
7312 [(match_operand 2 "cc_register" "") (const_int 0)])
7314 (label_ref (match_operand 0 "" ""))))]
7317 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7319 arm_ccfsm_state += 2;
7322 return \"b%D1\\t%l0\";
7324 [(set_attr "conds" "use")
7325 (set_attr "type" "branch")
7326 (set (attr "length")
7328 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7329 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7330 (le (minus (match_dup 0) (pc)) (const_int 256))))
7339 (define_expand "cstore_cc"
7340 [(set (match_operand:SI 0 "s_register_operand" "")
7341 (match_operator:SI 1 "" [(match_operand 2 "" "")
7342 (match_operand 3 "" "")]))]
7344 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7345 operands[2], operands[3]);
7346 operands[3] = const0_rtx;"
7349 (define_insn "*mov_scc"
7350 [(set (match_operand:SI 0 "s_register_operand" "=r")
7351 (match_operator:SI 1 "arm_comparison_operator"
7352 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7354 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7355 [(set_attr "conds" "use")
7356 (set_attr "insn" "mov")
7357 (set_attr "length" "8")]
7360 (define_insn "*mov_negscc"
7361 [(set (match_operand:SI 0 "s_register_operand" "=r")
7362 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7363 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7365 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7366 [(set_attr "conds" "use")
7367 (set_attr "insn" "mov")
7368 (set_attr "length" "8")]
7371 (define_insn "*mov_notscc"
7372 [(set (match_operand:SI 0 "s_register_operand" "=r")
7373 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7374 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7376 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7377 [(set_attr "conds" "use")
7378 (set_attr "insn" "mov")
7379 (set_attr "length" "8")]
7382 (define_expand "cstoresi4"
7383 [(set (match_operand:SI 0 "s_register_operand" "")
7384 (match_operator:SI 1 "arm_comparison_operator"
7385 [(match_operand:SI 2 "s_register_operand" "")
7386 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7387 "TARGET_32BIT || TARGET_THUMB1"
7389 rtx op3, scratch, scratch2;
7393 if (!arm_add_operand (operands[3], SImode))
7394 operands[3] = force_reg (SImode, operands[3]);
7395 emit_insn (gen_cstore_cc (operands[0], operands[1],
7396 operands[2], operands[3]));
7400 if (operands[3] == const0_rtx)
7402 switch (GET_CODE (operands[1]))
7405 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7409 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7413 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7414 NULL_RTX, 0, OPTAB_WIDEN);
7415 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7416 NULL_RTX, 0, OPTAB_WIDEN);
7417 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7418 operands[0], 1, OPTAB_WIDEN);
7422 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7424 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7425 NULL_RTX, 1, OPTAB_WIDEN);
7429 scratch = expand_binop (SImode, ashr_optab, operands[2],
7430 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7431 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7432 NULL_RTX, 0, OPTAB_WIDEN);
7433 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7437 /* LT is handled by generic code. No need for unsigned with 0. */
7444 switch (GET_CODE (operands[1]))
7447 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7448 NULL_RTX, 0, OPTAB_WIDEN);
7449 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7453 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7454 NULL_RTX, 0, OPTAB_WIDEN);
7455 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7459 op3 = force_reg (SImode, operands[3]);
7461 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7462 NULL_RTX, 1, OPTAB_WIDEN);
7463 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7464 NULL_RTX, 0, OPTAB_WIDEN);
7465 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7471 if (!thumb1_cmp_operand (op3, SImode))
7472 op3 = force_reg (SImode, op3);
7473 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7474 NULL_RTX, 0, OPTAB_WIDEN);
7475 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7476 NULL_RTX, 1, OPTAB_WIDEN);
7477 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7482 op3 = force_reg (SImode, operands[3]);
7483 scratch = force_reg (SImode, const0_rtx);
7484 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7490 if (!thumb1_cmp_operand (op3, SImode))
7491 op3 = force_reg (SImode, op3);
7492 scratch = force_reg (SImode, const0_rtx);
7493 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7499 if (!thumb1_cmp_operand (op3, SImode))
7500 op3 = force_reg (SImode, op3);
7501 scratch = gen_reg_rtx (SImode);
7502 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7506 op3 = force_reg (SImode, operands[3]);
7507 scratch = gen_reg_rtx (SImode);
7508 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7511 /* No good sequences for GT, LT. */
7518 (define_expand "cstoresf4"
7519 [(set (match_operand:SI 0 "s_register_operand" "")
7520 (match_operator:SI 1 "arm_comparison_operator"
7521 [(match_operand:SF 2 "s_register_operand" "")
7522 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7523 "TARGET_32BIT && TARGET_HARD_FLOAT"
7524 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7525 operands[2], operands[3])); DONE;"
7528 (define_expand "cstoredf4"
7529 [(set (match_operand:SI 0 "s_register_operand" "")
7530 (match_operator:SI 1 "arm_comparison_operator"
7531 [(match_operand:DF 2 "s_register_operand" "")
7532 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7533 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7534 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7535 operands[2], operands[3])); DONE;"
7538 (define_expand "cstoredi4"
7539 [(set (match_operand:SI 0 "s_register_operand" "")
7540 (match_operator:SI 1 "arm_comparison_operator"
7541 [(match_operand:DI 2 "cmpdi_operand" "")
7542 (match_operand:DI 3 "cmpdi_operand" "")]))]
7545 rtx swap = NULL_RTX;
7546 enum rtx_code code = GET_CODE (operands[1]);
7548 /* We should not have two constants. */
7549 gcc_assert (GET_MODE (operands[2]) == DImode
7550 || GET_MODE (operands[3]) == DImode);
7552 /* Flip unimplemented DImode comparisons to a form that
7553 arm_gen_compare_reg can handle. */
7557 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7559 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7561 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7563 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7568 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7571 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7577 (define_expand "cstoresi_eq0_thumb1"
7579 [(set (match_operand:SI 0 "s_register_operand" "")
7580 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7582 (clobber (match_dup:SI 2))])]
7584 "operands[2] = gen_reg_rtx (SImode);"
7587 (define_expand "cstoresi_ne0_thumb1"
7589 [(set (match_operand:SI 0 "s_register_operand" "")
7590 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7592 (clobber (match_dup:SI 2))])]
7594 "operands[2] = gen_reg_rtx (SImode);"
7597 (define_insn "*cstoresi_eq0_thumb1_insn"
7598 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7599 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7601 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7604 neg\\t%0, %1\;adc\\t%0, %0, %1
7605 neg\\t%2, %1\;adc\\t%0, %1, %2"
7606 [(set_attr "length" "4")]
7609 (define_insn "*cstoresi_ne0_thumb1_insn"
7610 [(set (match_operand:SI 0 "s_register_operand" "=l")
7611 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7613 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7615 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7616 [(set_attr "length" "4")]
7619 ;; Used as part of the expansion of thumb ltu and gtu sequences
7620 (define_insn "cstoresi_nltu_thumb1"
7621 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7622 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7623 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7625 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7626 [(set_attr "length" "4")]
7629 (define_insn_and_split "cstoresi_ltu_thumb1"
7630 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7631 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7632 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7637 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7638 (set (match_dup 0) (neg:SI (match_dup 3)))]
7639 "operands[3] = gen_reg_rtx (SImode);"
7640 [(set_attr "length" "4")]
7643 ;; Used as part of the expansion of thumb les sequence.
7644 (define_insn "thumb1_addsi3_addgeu"
7645 [(set (match_operand:SI 0 "s_register_operand" "=l")
7646 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7647 (match_operand:SI 2 "s_register_operand" "l"))
7648 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7649 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7651 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7652 [(set_attr "length" "4")]
7656 ;; Conditional move insns
7658 (define_expand "movsicc"
7659 [(set (match_operand:SI 0 "s_register_operand" "")
7660 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7661 (match_operand:SI 2 "arm_not_operand" "")
7662 (match_operand:SI 3 "arm_not_operand" "")))]
7666 enum rtx_code code = GET_CODE (operands[1]);
7669 if (code == UNEQ || code == LTGT)
7672 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7673 XEXP (operands[1], 1));
7674 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7678 (define_expand "movsfcc"
7679 [(set (match_operand:SF 0 "s_register_operand" "")
7680 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7681 (match_operand:SF 2 "s_register_operand" "")
7682 (match_operand:SF 3 "nonmemory_operand" "")))]
7683 "TARGET_32BIT && TARGET_HARD_FLOAT"
7686 enum rtx_code code = GET_CODE (operands[1]);
7689 if (code == UNEQ || code == LTGT)
7692 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7693 Otherwise, ensure it is a valid FP add operand */
7694 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7695 || (!arm_float_add_operand (operands[3], SFmode)))
7696 operands[3] = force_reg (SFmode, operands[3]);
7698 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7699 XEXP (operands[1], 1));
7700 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7704 (define_expand "movdfcc"
7705 [(set (match_operand:DF 0 "s_register_operand" "")
7706 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7707 (match_operand:DF 2 "s_register_operand" "")
7708 (match_operand:DF 3 "arm_float_add_operand" "")))]
7709 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7712 enum rtx_code code = GET_CODE (operands[1]);
7715 if (code == UNEQ || code == LTGT)
7718 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7719 XEXP (operands[1], 1));
7720 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7724 (define_insn "*movsicc_insn"
7725 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7727 (match_operator 3 "arm_comparison_operator"
7728 [(match_operand 4 "cc_register" "") (const_int 0)])
7729 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7730 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7737 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7738 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7739 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7740 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7741 [(set_attr "length" "4,4,4,4,8,8,8,8")
7742 (set_attr "conds" "use")
7743 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7746 (define_insn "*movsfcc_soft_insn"
7747 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7748 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7749 [(match_operand 4 "cc_register" "") (const_int 0)])
7750 (match_operand:SF 1 "s_register_operand" "0,r")
7751 (match_operand:SF 2 "s_register_operand" "r,0")))]
7752 "TARGET_ARM && TARGET_SOFT_FLOAT"
7756 [(set_attr "conds" "use")
7757 (set_attr "insn" "mov")]
7761 ;; Jump and linkage insns
7763 (define_expand "jump"
7765 (label_ref (match_operand 0 "" "")))]
7770 (define_insn "*arm_jump"
7772 (label_ref (match_operand 0 "" "")))]
7776 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7778 arm_ccfsm_state += 2;
7781 return \"b%?\\t%l0\";
7784 [(set_attr "predicable" "yes")
7785 (set (attr "length")
7787 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7788 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7789 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7794 (define_insn "*thumb_jump"
7796 (label_ref (match_operand 0 "" "")))]
7799 if (get_attr_length (insn) == 2)
7801 return \"bl\\t%l0\\t%@ far jump\";
7803 [(set (attr "far_jump")
7805 (eq_attr "length" "4")
7806 (const_string "yes")
7807 (const_string "no")))
7808 (set (attr "length")
7810 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7811 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7816 (define_expand "call"
7817 [(parallel [(call (match_operand 0 "memory_operand" "")
7818 (match_operand 1 "general_operand" ""))
7819 (use (match_operand 2 "" ""))
7820 (clobber (reg:SI LR_REGNUM))])]
7826 /* In an untyped call, we can get NULL for operand 2. */
7827 if (operands[2] == NULL_RTX)
7828 operands[2] = const0_rtx;
7830 /* Decide if we should generate indirect calls by loading the
7831 32-bit address of the callee into a register before performing the
7833 callee = XEXP (operands[0], 0);
7834 if (GET_CODE (callee) == SYMBOL_REF
7835 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7837 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7839 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7840 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7845 (define_expand "call_internal"
7846 [(parallel [(call (match_operand 0 "memory_operand" "")
7847 (match_operand 1 "general_operand" ""))
7848 (use (match_operand 2 "" ""))
7849 (clobber (reg:SI LR_REGNUM))])])
7851 (define_insn "*call_reg_armv5"
7852 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7853 (match_operand 1 "" ""))
7854 (use (match_operand 2 "" ""))
7855 (clobber (reg:SI LR_REGNUM))]
7856 "TARGET_ARM && arm_arch5"
7858 [(set_attr "type" "call")]
7861 (define_insn "*call_reg_arm"
7862 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7863 (match_operand 1 "" ""))
7864 (use (match_operand 2 "" ""))
7865 (clobber (reg:SI LR_REGNUM))]
7866 "TARGET_ARM && !arm_arch5"
7868 return output_call (operands);
7870 ;; length is worst case, normally it is only two
7871 [(set_attr "length" "12")
7872 (set_attr "type" "call")]
7876 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7877 ;; considered a function call by the branch predictor of some cores (PR40887).
7878 ;; Falls back to blx rN (*call_reg_armv5).
7880 (define_insn "*call_mem"
7881 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7882 (match_operand 1 "" ""))
7883 (use (match_operand 2 "" ""))
7884 (clobber (reg:SI LR_REGNUM))]
7885 "TARGET_ARM && !arm_arch5"
7887 return output_call_mem (operands);
7889 [(set_attr "length" "12")
7890 (set_attr "type" "call")]
7893 (define_insn "*call_reg_thumb1_v5"
7894 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7895 (match_operand 1 "" ""))
7896 (use (match_operand 2 "" ""))
7897 (clobber (reg:SI LR_REGNUM))]
7898 "TARGET_THUMB1 && arm_arch5"
7900 [(set_attr "length" "2")
7901 (set_attr "type" "call")]
7904 (define_insn "*call_reg_thumb1"
7905 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7906 (match_operand 1 "" ""))
7907 (use (match_operand 2 "" ""))
7908 (clobber (reg:SI LR_REGNUM))]
7909 "TARGET_THUMB1 && !arm_arch5"
7912 if (!TARGET_CALLER_INTERWORKING)
7913 return thumb_call_via_reg (operands[0]);
7914 else if (operands[1] == const0_rtx)
7915 return \"bl\\t%__interwork_call_via_%0\";
7916 else if (frame_pointer_needed)
7917 return \"bl\\t%__interwork_r7_call_via_%0\";
7919 return \"bl\\t%__interwork_r11_call_via_%0\";
7921 [(set_attr "type" "call")]
7924 (define_expand "call_value"
7925 [(parallel [(set (match_operand 0 "" "")
7926 (call (match_operand 1 "memory_operand" "")
7927 (match_operand 2 "general_operand" "")))
7928 (use (match_operand 3 "" ""))
7929 (clobber (reg:SI LR_REGNUM))])]
7935 /* In an untyped call, we can get NULL for operand 2. */
7936 if (operands[3] == 0)
7937 operands[3] = const0_rtx;
7939 /* Decide if we should generate indirect calls by loading the
7940 32-bit address of the callee into a register before performing the
7942 callee = XEXP (operands[1], 0);
7943 if (GET_CODE (callee) == SYMBOL_REF
7944 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7946 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7948 pat = gen_call_value_internal (operands[0], operands[1],
7949 operands[2], operands[3]);
7950 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7955 (define_expand "call_value_internal"
7956 [(parallel [(set (match_operand 0 "" "")
7957 (call (match_operand 1 "memory_operand" "")
7958 (match_operand 2 "general_operand" "")))
7959 (use (match_operand 3 "" ""))
7960 (clobber (reg:SI LR_REGNUM))])])
7962 (define_insn "*call_value_reg_armv5"
7963 [(set (match_operand 0 "" "")
7964 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7965 (match_operand 2 "" "")))
7966 (use (match_operand 3 "" ""))
7967 (clobber (reg:SI LR_REGNUM))]
7968 "TARGET_ARM && arm_arch5"
7970 [(set_attr "type" "call")]
7973 (define_insn "*call_value_reg_arm"
7974 [(set (match_operand 0 "" "")
7975 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7976 (match_operand 2 "" "")))
7977 (use (match_operand 3 "" ""))
7978 (clobber (reg:SI LR_REGNUM))]
7979 "TARGET_ARM && !arm_arch5"
7981 return output_call (&operands[1]);
7983 [(set_attr "length" "12")
7984 (set_attr "type" "call")]
7987 ;; Note: see *call_mem
7989 (define_insn "*call_value_mem"
7990 [(set (match_operand 0 "" "")
7991 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7992 (match_operand 2 "" "")))
7993 (use (match_operand 3 "" ""))
7994 (clobber (reg:SI LR_REGNUM))]
7995 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7997 return output_call_mem (&operands[1]);
7999 [(set_attr "length" "12")
8000 (set_attr "type" "call")]
8003 (define_insn "*call_value_reg_thumb1_v5"
8004 [(set (match_operand 0 "" "")
8005 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8006 (match_operand 2 "" "")))
8007 (use (match_operand 3 "" ""))
8008 (clobber (reg:SI LR_REGNUM))]
8009 "TARGET_THUMB1 && arm_arch5"
8011 [(set_attr "length" "2")
8012 (set_attr "type" "call")]
8015 (define_insn "*call_value_reg_thumb1"
8016 [(set (match_operand 0 "" "")
8017 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8018 (match_operand 2 "" "")))
8019 (use (match_operand 3 "" ""))
8020 (clobber (reg:SI LR_REGNUM))]
8021 "TARGET_THUMB1 && !arm_arch5"
8024 if (!TARGET_CALLER_INTERWORKING)
8025 return thumb_call_via_reg (operands[1]);
8026 else if (operands[2] == const0_rtx)
8027 return \"bl\\t%__interwork_call_via_%1\";
8028 else if (frame_pointer_needed)
8029 return \"bl\\t%__interwork_r7_call_via_%1\";
8031 return \"bl\\t%__interwork_r11_call_via_%1\";
8033 [(set_attr "type" "call")]
8036 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8037 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8039 (define_insn "*call_symbol"
8040 [(call (mem:SI (match_operand:SI 0 "" ""))
8041 (match_operand 1 "" ""))
8042 (use (match_operand 2 "" ""))
8043 (clobber (reg:SI LR_REGNUM))]
8045 && (GET_CODE (operands[0]) == SYMBOL_REF)
8046 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8049 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8051 [(set_attr "type" "call")]
8054 (define_insn "*call_value_symbol"
8055 [(set (match_operand 0 "" "")
8056 (call (mem:SI (match_operand:SI 1 "" ""))
8057 (match_operand:SI 2 "" "")))
8058 (use (match_operand 3 "" ""))
8059 (clobber (reg:SI LR_REGNUM))]
8061 && (GET_CODE (operands[1]) == SYMBOL_REF)
8062 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8065 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8067 [(set_attr "type" "call")]
8070 (define_insn "*call_insn"
8071 [(call (mem:SI (match_operand:SI 0 "" ""))
8072 (match_operand:SI 1 "" ""))
8073 (use (match_operand 2 "" ""))
8074 (clobber (reg:SI LR_REGNUM))]
8076 && GET_CODE (operands[0]) == SYMBOL_REF
8077 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8079 [(set_attr "length" "4")
8080 (set_attr "type" "call")]
8083 (define_insn "*call_value_insn"
8084 [(set (match_operand 0 "" "")
8085 (call (mem:SI (match_operand 1 "" ""))
8086 (match_operand 2 "" "")))
8087 (use (match_operand 3 "" ""))
8088 (clobber (reg:SI LR_REGNUM))]
8090 && GET_CODE (operands[1]) == SYMBOL_REF
8091 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8093 [(set_attr "length" "4")
8094 (set_attr "type" "call")]
8097 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8098 (define_expand "sibcall"
8099 [(parallel [(call (match_operand 0 "memory_operand" "")
8100 (match_operand 1 "general_operand" ""))
8102 (use (match_operand 2 "" ""))])]
8106 if (operands[2] == NULL_RTX)
8107 operands[2] = const0_rtx;
8111 (define_expand "sibcall_value"
8112 [(parallel [(set (match_operand 0 "" "")
8113 (call (match_operand 1 "memory_operand" "")
8114 (match_operand 2 "general_operand" "")))
8116 (use (match_operand 3 "" ""))])]
8120 if (operands[3] == NULL_RTX)
8121 operands[3] = const0_rtx;
8125 (define_insn "*sibcall_insn"
8126 [(call (mem:SI (match_operand:SI 0 "" "X"))
8127 (match_operand 1 "" ""))
8129 (use (match_operand 2 "" ""))]
8130 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8132 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8134 [(set_attr "type" "call")]
8137 (define_insn "*sibcall_value_insn"
8138 [(set (match_operand 0 "" "")
8139 (call (mem:SI (match_operand:SI 1 "" "X"))
8140 (match_operand 2 "" "")))
8142 (use (match_operand 3 "" ""))]
8143 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8145 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8147 [(set_attr "type" "call")]
8150 (define_expand "return"
8152 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8155 ;; Often the return insn will be the same as loading from memory, so set attr
8156 (define_insn "*arm_return"
8158 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8161 if (arm_ccfsm_state == 2)
8163 arm_ccfsm_state += 2;
8166 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8168 [(set_attr "type" "load1")
8169 (set_attr "length" "12")
8170 (set_attr "predicable" "yes")]
8173 (define_insn "*cond_return"
8175 (if_then_else (match_operator 0 "arm_comparison_operator"
8176 [(match_operand 1 "cc_register" "") (const_int 0)])
8179 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8182 if (arm_ccfsm_state == 2)
8184 arm_ccfsm_state += 2;
8187 return output_return_instruction (operands[0], TRUE, FALSE);
8189 [(set_attr "conds" "use")
8190 (set_attr "length" "12")
8191 (set_attr "type" "load1")]
8194 (define_insn "*cond_return_inverted"
8196 (if_then_else (match_operator 0 "arm_comparison_operator"
8197 [(match_operand 1 "cc_register" "") (const_int 0)])
8200 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8203 if (arm_ccfsm_state == 2)
8205 arm_ccfsm_state += 2;
8208 return output_return_instruction (operands[0], TRUE, TRUE);
8210 [(set_attr "conds" "use")
8211 (set_attr "length" "12")
8212 (set_attr "type" "load1")]
8215 ;; Generate a sequence of instructions to determine if the processor is
8216 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8219 (define_expand "return_addr_mask"
8221 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8223 (set (match_operand:SI 0 "s_register_operand" "")
8224 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8226 (const_int 67108860)))] ; 0x03fffffc
8229 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8232 (define_insn "*check_arch2"
8233 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8234 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8237 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8238 [(set_attr "length" "8")
8239 (set_attr "conds" "set")]
8242 ;; Call subroutine returning any type.
8244 (define_expand "untyped_call"
8245 [(parallel [(call (match_operand 0 "" "")
8247 (match_operand 1 "" "")
8248 (match_operand 2 "" "")])]
8253 rtx par = gen_rtx_PARALLEL (VOIDmode,
8254 rtvec_alloc (XVECLEN (operands[2], 0)));
8255 rtx addr = gen_reg_rtx (Pmode);
8259 emit_move_insn (addr, XEXP (operands[1], 0));
8260 mem = change_address (operands[1], BLKmode, addr);
8262 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8264 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8266 /* Default code only uses r0 as a return value, but we could
8267 be using anything up to 4 registers. */
8268 if (REGNO (src) == R0_REGNUM)
8269 src = gen_rtx_REG (TImode, R0_REGNUM);
8271 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8273 size += GET_MODE_SIZE (GET_MODE (src));
8276 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8281 for (i = 0; i < XVECLEN (par, 0); i++)
8283 HOST_WIDE_INT offset = 0;
8284 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8287 emit_move_insn (addr, plus_constant (addr, size));
8289 mem = change_address (mem, GET_MODE (reg), NULL);
8290 if (REGNO (reg) == R0_REGNUM)
8292 /* On thumb we have to use a write-back instruction. */
8293 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8294 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8295 size = TARGET_ARM ? 16 : 0;
8299 emit_move_insn (mem, reg);
8300 size = GET_MODE_SIZE (GET_MODE (reg));
8304 /* The optimizer does not know that the call sets the function value
8305 registers we stored in the result block. We avoid problems by
8306 claiming that all hard registers are used and clobbered at this
8308 emit_insn (gen_blockage ());
8314 (define_expand "untyped_return"
8315 [(match_operand:BLK 0 "memory_operand" "")
8316 (match_operand 1 "" "")]
8321 rtx addr = gen_reg_rtx (Pmode);
8325 emit_move_insn (addr, XEXP (operands[0], 0));
8326 mem = change_address (operands[0], BLKmode, addr);
8328 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8330 HOST_WIDE_INT offset = 0;
8331 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8334 emit_move_insn (addr, plus_constant (addr, size));
8336 mem = change_address (mem, GET_MODE (reg), NULL);
8337 if (REGNO (reg) == R0_REGNUM)
8339 /* On thumb we have to use a write-back instruction. */
8340 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8341 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8342 size = TARGET_ARM ? 16 : 0;
8346 emit_move_insn (reg, mem);
8347 size = GET_MODE_SIZE (GET_MODE (reg));
8351 /* Emit USE insns before the return. */
8352 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8353 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8355 /* Construct the return. */
8356 expand_naked_return ();
8362 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8363 ;; all of memory. This blocks insns from being moved across this point.
8365 (define_insn "blockage"
8366 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8369 [(set_attr "length" "0")
8370 (set_attr "type" "block")]
8373 (define_expand "casesi"
8374 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8375 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8376 (match_operand:SI 2 "const_int_operand" "") ; total range
8377 (match_operand:SI 3 "" "") ; table label
8378 (match_operand:SI 4 "" "")] ; Out of range label
8379 "TARGET_32BIT || optimize_size || flag_pic"
8382 enum insn_code code;
8383 if (operands[1] != const0_rtx)
8385 rtx reg = gen_reg_rtx (SImode);
8387 emit_insn (gen_addsi3 (reg, operands[0],
8388 gen_int_mode (-INTVAL (operands[1]),
8394 code = CODE_FOR_arm_casesi_internal;
8395 else if (TARGET_THUMB1)
8396 code = CODE_FOR_thumb1_casesi_internal_pic;
8398 code = CODE_FOR_thumb2_casesi_internal_pic;
8400 code = CODE_FOR_thumb2_casesi_internal;
8402 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8403 operands[2] = force_reg (SImode, operands[2]);
8405 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8406 operands[3], operands[4]));
8411 ;; The USE in this pattern is needed to tell flow analysis that this is
8412 ;; a CASESI insn. It has no other purpose.
8413 (define_insn "arm_casesi_internal"
8414 [(parallel [(set (pc)
8416 (leu (match_operand:SI 0 "s_register_operand" "r")
8417 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8418 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8419 (label_ref (match_operand 2 "" ""))))
8420 (label_ref (match_operand 3 "" ""))))
8421 (clobber (reg:CC CC_REGNUM))
8422 (use (label_ref (match_dup 2)))])]
8426 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8427 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8429 [(set_attr "conds" "clob")
8430 (set_attr "length" "12")]
8433 (define_expand "thumb1_casesi_internal_pic"
8434 [(match_operand:SI 0 "s_register_operand" "")
8435 (match_operand:SI 1 "thumb1_cmp_operand" "")
8436 (match_operand 2 "" "")
8437 (match_operand 3 "" "")]
8441 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8442 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8444 reg0 = gen_rtx_REG (SImode, 0);
8445 emit_move_insn (reg0, operands[0]);
8446 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8451 (define_insn "thumb1_casesi_dispatch"
8452 [(parallel [(set (pc) (unspec [(reg:SI 0)
8453 (label_ref (match_operand 0 "" ""))
8454 ;; (label_ref (match_operand 1 "" ""))
8456 UNSPEC_THUMB1_CASESI))
8457 (clobber (reg:SI IP_REGNUM))
8458 (clobber (reg:SI LR_REGNUM))])]
8460 "* return thumb1_output_casesi(operands);"
8461 [(set_attr "length" "4")]
8464 (define_expand "indirect_jump"
8466 (match_operand:SI 0 "s_register_operand" ""))]
8469 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8470 address and use bx. */
8474 tmp = gen_reg_rtx (SImode);
8475 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8481 ;; NB Never uses BX.
8482 (define_insn "*arm_indirect_jump"
8484 (match_operand:SI 0 "s_register_operand" "r"))]
8486 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8487 [(set_attr "predicable" "yes")]
8490 (define_insn "*load_indirect_jump"
8492 (match_operand:SI 0 "memory_operand" "m"))]
8494 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8495 [(set_attr "type" "load1")
8496 (set_attr "pool_range" "4096")
8497 (set_attr "neg_pool_range" "4084")
8498 (set_attr "predicable" "yes")]
8501 ;; NB Never uses BX.
8502 (define_insn "*thumb1_indirect_jump"
8504 (match_operand:SI 0 "register_operand" "l*r"))]
8507 [(set_attr "conds" "clob")
8508 (set_attr "length" "2")]
8518 if (TARGET_UNIFIED_ASM)
8521 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8522 return \"mov\\tr8, r8\";
8524 [(set (attr "length")
8525 (if_then_else (eq_attr "is_thumb" "yes")
8531 ;; Patterns to allow combination of arithmetic, cond code and shifts
8533 (define_insn "*arith_shiftsi"
8534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8535 (match_operator:SI 1 "shiftable_operator"
8536 [(match_operator:SI 3 "shift_operator"
8537 [(match_operand:SI 4 "s_register_operand" "r,r")
8538 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8539 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8541 "%i1%?\\t%0, %2, %4%S3"
8542 [(set_attr "predicable" "yes")
8543 (set_attr "shift" "4")
8544 (set_attr "arch" "32,a")
8545 ;; We have to make sure to disable the second alternative if
8546 ;; the shift_operator is MULT, since otherwise the insn will
8547 ;; also match a multiply_accumulate pattern and validate_change
8548 ;; will allow a replacement of the constant with a register
8549 ;; despite the checks done in shift_operator.
8550 (set_attr_alternative "insn_enabled"
8551 [(const_string "yes")
8553 (match_operand:SI 3 "mult_operator" "")
8554 (const_string "no") (const_string "yes"))])
8555 (set_attr "type" "alu_shift,alu_shift_reg")])
8558 [(set (match_operand:SI 0 "s_register_operand" "")
8559 (match_operator:SI 1 "shiftable_operator"
8560 [(match_operator:SI 2 "shiftable_operator"
8561 [(match_operator:SI 3 "shift_operator"
8562 [(match_operand:SI 4 "s_register_operand" "")
8563 (match_operand:SI 5 "reg_or_int_operand" "")])
8564 (match_operand:SI 6 "s_register_operand" "")])
8565 (match_operand:SI 7 "arm_rhs_operand" "")]))
8566 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8569 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8572 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8575 (define_insn "*arith_shiftsi_compare0"
8576 [(set (reg:CC_NOOV CC_REGNUM)
8578 (match_operator:SI 1 "shiftable_operator"
8579 [(match_operator:SI 3 "shift_operator"
8580 [(match_operand:SI 4 "s_register_operand" "r,r")
8581 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8582 (match_operand:SI 2 "s_register_operand" "r,r")])
8584 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8585 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8588 "%i1%.\\t%0, %2, %4%S3"
8589 [(set_attr "conds" "set")
8590 (set_attr "shift" "4")
8591 (set_attr "arch" "32,a")
8592 (set_attr "type" "alu_shift,alu_shift_reg")])
8594 (define_insn "*arith_shiftsi_compare0_scratch"
8595 [(set (reg:CC_NOOV CC_REGNUM)
8597 (match_operator:SI 1 "shiftable_operator"
8598 [(match_operator:SI 3 "shift_operator"
8599 [(match_operand:SI 4 "s_register_operand" "r,r")
8600 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8601 (match_operand:SI 2 "s_register_operand" "r,r")])
8603 (clobber (match_scratch:SI 0 "=r,r"))]
8605 "%i1%.\\t%0, %2, %4%S3"
8606 [(set_attr "conds" "set")
8607 (set_attr "shift" "4")
8608 (set_attr "arch" "32,a")
8609 (set_attr "type" "alu_shift,alu_shift_reg")])
8611 (define_insn "*sub_shiftsi"
8612 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8613 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8614 (match_operator:SI 2 "shift_operator"
8615 [(match_operand:SI 3 "s_register_operand" "r,r")
8616 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8618 "sub%?\\t%0, %1, %3%S2"
8619 [(set_attr "predicable" "yes")
8620 (set_attr "shift" "3")
8621 (set_attr "arch" "32,a")
8622 (set_attr "type" "alu_shift,alu_shift_reg")])
8624 (define_insn "*sub_shiftsi_compare0"
8625 [(set (reg:CC_NOOV CC_REGNUM)
8627 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8628 (match_operator:SI 2 "shift_operator"
8629 [(match_operand:SI 3 "s_register_operand" "r,r")
8630 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8632 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8633 (minus:SI (match_dup 1)
8634 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8636 "sub%.\\t%0, %1, %3%S2"
8637 [(set_attr "conds" "set")
8638 (set_attr "shift" "3")
8639 (set_attr "arch" "32,a")
8640 (set_attr "type" "alu_shift,alu_shift_reg")])
8642 (define_insn "*sub_shiftsi_compare0_scratch"
8643 [(set (reg:CC_NOOV CC_REGNUM)
8645 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8646 (match_operator:SI 2 "shift_operator"
8647 [(match_operand:SI 3 "s_register_operand" "r,r")
8648 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8650 (clobber (match_scratch:SI 0 "=r,r"))]
8652 "sub%.\\t%0, %1, %3%S2"
8653 [(set_attr "conds" "set")
8654 (set_attr "shift" "3")
8655 (set_attr "arch" "32,a")
8656 (set_attr "type" "alu_shift,alu_shift_reg")])
8659 (define_insn "*and_scc"
8660 [(set (match_operand:SI 0 "s_register_operand" "=r")
8661 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8662 [(match_operand 3 "cc_register" "") (const_int 0)])
8663 (match_operand:SI 2 "s_register_operand" "r")))]
8665 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8666 [(set_attr "conds" "use")
8667 (set_attr "insn" "mov")
8668 (set_attr "length" "8")]
8671 (define_insn "*ior_scc"
8672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8673 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8674 [(match_operand 3 "cc_register" "") (const_int 0)])
8675 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8679 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8680 [(set_attr "conds" "use")
8681 (set_attr "length" "4,8")]
8684 ; A series of splitters for the compare_scc pattern below. Note that
8685 ; order is important.
8687 [(set (match_operand:SI 0 "s_register_operand" "")
8688 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8690 (clobber (reg:CC CC_REGNUM))]
8691 "TARGET_32BIT && reload_completed"
8692 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8695 [(set (match_operand:SI 0 "s_register_operand" "")
8696 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8698 (clobber (reg:CC CC_REGNUM))]
8699 "TARGET_32BIT && reload_completed"
8700 [(set (match_dup 0) (not:SI (match_dup 1)))
8701 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8704 [(set (match_operand:SI 0 "s_register_operand" "")
8705 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8707 (clobber (reg:CC CC_REGNUM))]
8708 "TARGET_32BIT && reload_completed"
8710 [(set (reg:CC CC_REGNUM)
8711 (compare:CC (const_int 1) (match_dup 1)))
8713 (minus:SI (const_int 1) (match_dup 1)))])
8714 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8715 (set (match_dup 0) (const_int 0)))])
8718 [(set (match_operand:SI 0 "s_register_operand" "")
8719 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8720 (match_operand:SI 2 "const_int_operand" "")))
8721 (clobber (reg:CC CC_REGNUM))]
8722 "TARGET_32BIT && reload_completed"
8724 [(set (reg:CC CC_REGNUM)
8725 (compare:CC (match_dup 1) (match_dup 2)))
8726 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8727 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8728 (set (match_dup 0) (const_int 1)))]
8730 operands[3] = GEN_INT (-INTVAL (operands[2]));
8734 [(set (match_operand:SI 0 "s_register_operand" "")
8735 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8736 (match_operand:SI 2 "arm_add_operand" "")))
8737 (clobber (reg:CC CC_REGNUM))]
8738 "TARGET_32BIT && reload_completed"
8740 [(set (reg:CC_NOOV CC_REGNUM)
8741 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8743 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8744 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8745 (set (match_dup 0) (const_int 1)))])
8747 (define_insn_and_split "*compare_scc"
8748 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8749 (match_operator:SI 1 "arm_comparison_operator"
8750 [(match_operand:SI 2 "s_register_operand" "r,r")
8751 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8752 (clobber (reg:CC CC_REGNUM))]
8755 "&& reload_completed"
8756 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8757 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8758 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8761 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8762 operands[2], operands[3]);
8763 enum rtx_code rc = GET_CODE (operands[1]);
8765 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8767 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8768 if (mode == CCFPmode || mode == CCFPEmode)
8769 rc = reverse_condition_maybe_unordered (rc);
8771 rc = reverse_condition (rc);
8772 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8775 ;; Attempt to improve the sequence generated by the compare_scc splitters
8776 ;; not to use conditional execution.
8778 [(set (reg:CC CC_REGNUM)
8779 (compare:CC (match_operand:SI 1 "register_operand" "")
8780 (match_operand:SI 2 "arm_rhs_operand" "")))
8781 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8782 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8783 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8784 (set (match_dup 0) (const_int 1)))
8785 (match_scratch:SI 3 "r")]
8787 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8789 [(set (reg:CC CC_REGNUM)
8790 (compare:CC (const_int 0) (match_dup 3)))
8791 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8793 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8794 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8796 (define_insn "*cond_move"
8797 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8798 (if_then_else:SI (match_operator 3 "equality_operator"
8799 [(match_operator 4 "arm_comparison_operator"
8800 [(match_operand 5 "cc_register" "") (const_int 0)])
8802 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8803 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8806 if (GET_CODE (operands[3]) == NE)
8808 if (which_alternative != 1)
8809 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8810 if (which_alternative != 0)
8811 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8814 if (which_alternative != 0)
8815 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8816 if (which_alternative != 1)
8817 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8820 [(set_attr "conds" "use")
8821 (set_attr "insn" "mov")
8822 (set_attr "length" "4,4,8")]
8825 (define_insn "*cond_arith"
8826 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8827 (match_operator:SI 5 "shiftable_operator"
8828 [(match_operator:SI 4 "arm_comparison_operator"
8829 [(match_operand:SI 2 "s_register_operand" "r,r")
8830 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8831 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8832 (clobber (reg:CC CC_REGNUM))]
8835 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8836 return \"%i5\\t%0, %1, %2, lsr #31\";
8838 output_asm_insn (\"cmp\\t%2, %3\", operands);
8839 if (GET_CODE (operands[5]) == AND)
8840 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8841 else if (GET_CODE (operands[5]) == MINUS)
8842 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8843 else if (which_alternative != 0)
8844 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8845 return \"%i5%d4\\t%0, %1, #1\";
8847 [(set_attr "conds" "clob")
8848 (set_attr "length" "12")]
8851 (define_insn "*cond_sub"
8852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8853 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8854 (match_operator:SI 4 "arm_comparison_operator"
8855 [(match_operand:SI 2 "s_register_operand" "r,r")
8856 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8857 (clobber (reg:CC CC_REGNUM))]
8860 output_asm_insn (\"cmp\\t%2, %3\", operands);
8861 if (which_alternative != 0)
8862 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8863 return \"sub%d4\\t%0, %1, #1\";
8865 [(set_attr "conds" "clob")
8866 (set_attr "length" "8,12")]
8869 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8870 (define_insn "*cmp_ite0"
8871 [(set (match_operand 6 "dominant_cc_register" "")
8874 (match_operator 4 "arm_comparison_operator"
8875 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8876 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8877 (match_operator:SI 5 "arm_comparison_operator"
8878 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8879 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8885 static const char * const opcodes[4][2] =
8887 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8888 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8889 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8890 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8891 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8892 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8893 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8894 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8897 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8899 return opcodes[which_alternative][swap];
8901 [(set_attr "conds" "set")
8902 (set_attr "length" "8")]
8905 (define_insn "*cmp_ite1"
8906 [(set (match_operand 6 "dominant_cc_register" "")
8909 (match_operator 4 "arm_comparison_operator"
8910 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8911 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8912 (match_operator:SI 5 "arm_comparison_operator"
8913 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8914 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8920 static const char * const opcodes[4][2] =
8922 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8923 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8924 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8925 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8926 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8927 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8928 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8929 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8932 comparison_dominates_p (GET_CODE (operands[5]),
8933 reverse_condition (GET_CODE (operands[4])));
8935 return opcodes[which_alternative][swap];
8937 [(set_attr "conds" "set")
8938 (set_attr "length" "8")]
8941 (define_insn "*cmp_and"
8942 [(set (match_operand 6 "dominant_cc_register" "")
8945 (match_operator 4 "arm_comparison_operator"
8946 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8947 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8948 (match_operator:SI 5 "arm_comparison_operator"
8949 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8950 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8955 static const char *const opcodes[4][2] =
8957 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8958 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8959 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8960 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8961 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8962 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8963 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8964 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8967 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8969 return opcodes[which_alternative][swap];
8971 [(set_attr "conds" "set")
8972 (set_attr "predicable" "no")
8973 (set_attr "length" "8")]
8976 (define_insn "*cmp_ior"
8977 [(set (match_operand 6 "dominant_cc_register" "")
8980 (match_operator 4 "arm_comparison_operator"
8981 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8982 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8983 (match_operator:SI 5 "arm_comparison_operator"
8984 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8985 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8990 static const char *const opcodes[4][2] =
8992 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8993 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8994 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8995 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8996 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8997 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8998 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8999 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9002 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9004 return opcodes[which_alternative][swap];
9007 [(set_attr "conds" "set")
9008 (set_attr "length" "8")]
9011 (define_insn_and_split "*ior_scc_scc"
9012 [(set (match_operand:SI 0 "s_register_operand" "=r")
9013 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9014 [(match_operand:SI 1 "s_register_operand" "r")
9015 (match_operand:SI 2 "arm_add_operand" "rIL")])
9016 (match_operator:SI 6 "arm_comparison_operator"
9017 [(match_operand:SI 4 "s_register_operand" "r")
9018 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9019 (clobber (reg:CC CC_REGNUM))]
9021 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9024 "TARGET_ARM && reload_completed"
9028 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9029 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9031 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9033 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9036 [(set_attr "conds" "clob")
9037 (set_attr "length" "16")])
9039 ; If the above pattern is followed by a CMP insn, then the compare is
9040 ; redundant, since we can rework the conditional instruction that follows.
9041 (define_insn_and_split "*ior_scc_scc_cmp"
9042 [(set (match_operand 0 "dominant_cc_register" "")
9043 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9044 [(match_operand:SI 1 "s_register_operand" "r")
9045 (match_operand:SI 2 "arm_add_operand" "rIL")])
9046 (match_operator:SI 6 "arm_comparison_operator"
9047 [(match_operand:SI 4 "s_register_operand" "r")
9048 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9050 (set (match_operand:SI 7 "s_register_operand" "=r")
9051 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9052 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9055 "TARGET_ARM && reload_completed"
9059 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9060 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9062 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9064 [(set_attr "conds" "set")
9065 (set_attr "length" "16")])
9067 (define_insn_and_split "*and_scc_scc"
9068 [(set (match_operand:SI 0 "s_register_operand" "=r")
9069 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9070 [(match_operand:SI 1 "s_register_operand" "r")
9071 (match_operand:SI 2 "arm_add_operand" "rIL")])
9072 (match_operator:SI 6 "arm_comparison_operator"
9073 [(match_operand:SI 4 "s_register_operand" "r")
9074 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9075 (clobber (reg:CC CC_REGNUM))]
9077 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9080 "TARGET_ARM && reload_completed
9081 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9086 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9087 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9089 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9091 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9094 [(set_attr "conds" "clob")
9095 (set_attr "length" "16")])
9097 ; If the above pattern is followed by a CMP insn, then the compare is
9098 ; redundant, since we can rework the conditional instruction that follows.
9099 (define_insn_and_split "*and_scc_scc_cmp"
9100 [(set (match_operand 0 "dominant_cc_register" "")
9101 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9102 [(match_operand:SI 1 "s_register_operand" "r")
9103 (match_operand:SI 2 "arm_add_operand" "rIL")])
9104 (match_operator:SI 6 "arm_comparison_operator"
9105 [(match_operand:SI 4 "s_register_operand" "r")
9106 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9108 (set (match_operand:SI 7 "s_register_operand" "=r")
9109 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9110 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9113 "TARGET_ARM && reload_completed"
9117 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9118 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9120 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9122 [(set_attr "conds" "set")
9123 (set_attr "length" "16")])
9125 ;; If there is no dominance in the comparison, then we can still save an
9126 ;; instruction in the AND case, since we can know that the second compare
9127 ;; need only zero the value if false (if true, then the value is already
9129 (define_insn_and_split "*and_scc_scc_nodom"
9130 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9131 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9132 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9133 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9134 (match_operator:SI 6 "arm_comparison_operator"
9135 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9136 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9137 (clobber (reg:CC CC_REGNUM))]
9139 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9142 "TARGET_ARM && reload_completed"
9143 [(parallel [(set (match_dup 0)
9144 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9145 (clobber (reg:CC CC_REGNUM))])
9146 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9148 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9151 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9152 operands[4], operands[5]),
9154 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9156 [(set_attr "conds" "clob")
9157 (set_attr "length" "20")])
9160 [(set (reg:CC_NOOV CC_REGNUM)
9161 (compare:CC_NOOV (ior:SI
9162 (and:SI (match_operand:SI 0 "s_register_operand" "")
9164 (match_operator:SI 1 "arm_comparison_operator"
9165 [(match_operand:SI 2 "s_register_operand" "")
9166 (match_operand:SI 3 "arm_add_operand" "")]))
9168 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9171 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9173 (set (reg:CC_NOOV CC_REGNUM)
9174 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9179 [(set (reg:CC_NOOV CC_REGNUM)
9180 (compare:CC_NOOV (ior:SI
9181 (match_operator:SI 1 "arm_comparison_operator"
9182 [(match_operand:SI 2 "s_register_operand" "")
9183 (match_operand:SI 3 "arm_add_operand" "")])
9184 (and:SI (match_operand:SI 0 "s_register_operand" "")
9187 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9190 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9192 (set (reg:CC_NOOV CC_REGNUM)
9193 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9196 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9198 (define_insn "*negscc"
9199 [(set (match_operand:SI 0 "s_register_operand" "=r")
9200 (neg:SI (match_operator 3 "arm_comparison_operator"
9201 [(match_operand:SI 1 "s_register_operand" "r")
9202 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9203 (clobber (reg:CC CC_REGNUM))]
9206 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9207 return \"mov\\t%0, %1, asr #31\";
9209 if (GET_CODE (operands[3]) == NE)
9210 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9212 output_asm_insn (\"cmp\\t%1, %2\", operands);
9213 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9214 return \"mvn%d3\\t%0, #0\";
9216 [(set_attr "conds" "clob")
9217 (set_attr "length" "12")]
9220 (define_insn "movcond"
9221 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9223 (match_operator 5 "arm_comparison_operator"
9224 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9225 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9226 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9227 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9228 (clobber (reg:CC CC_REGNUM))]
9231 if (GET_CODE (operands[5]) == LT
9232 && (operands[4] == const0_rtx))
9234 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9236 if (operands[2] == const0_rtx)
9237 return \"and\\t%0, %1, %3, asr #31\";
9238 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9240 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9242 if (operands[1] == const0_rtx)
9243 return \"bic\\t%0, %2, %3, asr #31\";
9244 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9246 /* The only case that falls through to here is when both ops 1 & 2
9250 if (GET_CODE (operands[5]) == GE
9251 && (operands[4] == const0_rtx))
9253 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9255 if (operands[2] == const0_rtx)
9256 return \"bic\\t%0, %1, %3, asr #31\";
9257 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9259 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9261 if (operands[1] == const0_rtx)
9262 return \"and\\t%0, %2, %3, asr #31\";
9263 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9265 /* The only case that falls through to here is when both ops 1 & 2
9268 if (GET_CODE (operands[4]) == CONST_INT
9269 && !const_ok_for_arm (INTVAL (operands[4])))
9270 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9272 output_asm_insn (\"cmp\\t%3, %4\", operands);
9273 if (which_alternative != 0)
9274 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9275 if (which_alternative != 1)
9276 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9279 [(set_attr "conds" "clob")
9280 (set_attr "length" "8,8,12")]
9283 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9285 (define_insn "*ifcompare_plus_move"
9286 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9287 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9288 [(match_operand:SI 4 "s_register_operand" "r,r")
9289 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9291 (match_operand:SI 2 "s_register_operand" "r,r")
9292 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9293 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9294 (clobber (reg:CC CC_REGNUM))]
9297 [(set_attr "conds" "clob")
9298 (set_attr "length" "8,12")]
9301 (define_insn "*if_plus_move"
9302 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9304 (match_operator 4 "arm_comparison_operator"
9305 [(match_operand 5 "cc_register" "") (const_int 0)])
9307 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9308 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9309 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9313 sub%d4\\t%0, %2, #%n3
9314 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9315 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9316 [(set_attr "conds" "use")
9317 (set_attr "length" "4,4,8,8")
9318 (set_attr "type" "*,*,*,*")]
9321 (define_insn "*ifcompare_move_plus"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9323 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9324 [(match_operand:SI 4 "s_register_operand" "r,r")
9325 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9326 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9328 (match_operand:SI 2 "s_register_operand" "r,r")
9329 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9330 (clobber (reg:CC CC_REGNUM))]
9333 [(set_attr "conds" "clob")
9334 (set_attr "length" "8,12")]
9337 (define_insn "*if_move_plus"
9338 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9340 (match_operator 4 "arm_comparison_operator"
9341 [(match_operand 5 "cc_register" "") (const_int 0)])
9342 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9344 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9345 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9349 sub%D4\\t%0, %2, #%n3
9350 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9351 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9352 [(set_attr "conds" "use")
9353 (set_attr "length" "4,4,8,8")
9354 (set_attr "type" "*,*,*,*")]
9357 (define_insn "*ifcompare_arith_arith"
9358 [(set (match_operand:SI 0 "s_register_operand" "=r")
9359 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9360 [(match_operand:SI 5 "s_register_operand" "r")
9361 (match_operand:SI 6 "arm_add_operand" "rIL")])
9362 (match_operator:SI 8 "shiftable_operator"
9363 [(match_operand:SI 1 "s_register_operand" "r")
9364 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9365 (match_operator:SI 7 "shiftable_operator"
9366 [(match_operand:SI 3 "s_register_operand" "r")
9367 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9368 (clobber (reg:CC CC_REGNUM))]
9371 [(set_attr "conds" "clob")
9372 (set_attr "length" "12")]
9375 (define_insn "*if_arith_arith"
9376 [(set (match_operand:SI 0 "s_register_operand" "=r")
9377 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9378 [(match_operand 8 "cc_register" "") (const_int 0)])
9379 (match_operator:SI 6 "shiftable_operator"
9380 [(match_operand:SI 1 "s_register_operand" "r")
9381 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9382 (match_operator:SI 7 "shiftable_operator"
9383 [(match_operand:SI 3 "s_register_operand" "r")
9384 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9386 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9387 [(set_attr "conds" "use")
9388 (set_attr "length" "8")]
9391 (define_insn "*ifcompare_arith_move"
9392 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9393 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9394 [(match_operand:SI 2 "s_register_operand" "r,r")
9395 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9396 (match_operator:SI 7 "shiftable_operator"
9397 [(match_operand:SI 4 "s_register_operand" "r,r")
9398 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9399 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9400 (clobber (reg:CC CC_REGNUM))]
9403 /* If we have an operation where (op x 0) is the identity operation and
9404 the conditional operator is LT or GE and we are comparing against zero and
9405 everything is in registers then we can do this in two instructions. */
9406 if (operands[3] == const0_rtx
9407 && GET_CODE (operands[7]) != AND
9408 && GET_CODE (operands[5]) == REG
9409 && GET_CODE (operands[1]) == REG
9410 && REGNO (operands[1]) == REGNO (operands[4])
9411 && REGNO (operands[4]) != REGNO (operands[0]))
9413 if (GET_CODE (operands[6]) == LT)
9414 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9415 else if (GET_CODE (operands[6]) == GE)
9416 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9418 if (GET_CODE (operands[3]) == CONST_INT
9419 && !const_ok_for_arm (INTVAL (operands[3])))
9420 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9422 output_asm_insn (\"cmp\\t%2, %3\", operands);
9423 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9424 if (which_alternative != 0)
9425 return \"mov%D6\\t%0, %1\";
9428 [(set_attr "conds" "clob")
9429 (set_attr "length" "8,12")]
9432 (define_insn "*if_arith_move"
9433 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9434 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9435 [(match_operand 6 "cc_register" "") (const_int 0)])
9436 (match_operator:SI 5 "shiftable_operator"
9437 [(match_operand:SI 2 "s_register_operand" "r,r")
9438 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9439 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9443 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9444 [(set_attr "conds" "use")
9445 (set_attr "length" "4,8")
9446 (set_attr "type" "*,*")]
9449 (define_insn "*ifcompare_move_arith"
9450 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9451 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9452 [(match_operand:SI 4 "s_register_operand" "r,r")
9453 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9454 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9455 (match_operator:SI 7 "shiftable_operator"
9456 [(match_operand:SI 2 "s_register_operand" "r,r")
9457 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9458 (clobber (reg:CC CC_REGNUM))]
9461 /* If we have an operation where (op x 0) is the identity operation and
9462 the conditional operator is LT or GE and we are comparing against zero and
9463 everything is in registers then we can do this in two instructions */
9464 if (operands[5] == const0_rtx
9465 && GET_CODE (operands[7]) != AND
9466 && GET_CODE (operands[3]) == REG
9467 && GET_CODE (operands[1]) == REG
9468 && REGNO (operands[1]) == REGNO (operands[2])
9469 && REGNO (operands[2]) != REGNO (operands[0]))
9471 if (GET_CODE (operands[6]) == GE)
9472 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9473 else if (GET_CODE (operands[6]) == LT)
9474 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9477 if (GET_CODE (operands[5]) == CONST_INT
9478 && !const_ok_for_arm (INTVAL (operands[5])))
9479 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9481 output_asm_insn (\"cmp\\t%4, %5\", operands);
9483 if (which_alternative != 0)
9484 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9485 return \"%I7%D6\\t%0, %2, %3\";
9487 [(set_attr "conds" "clob")
9488 (set_attr "length" "8,12")]
9491 (define_insn "*if_move_arith"
9492 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9494 (match_operator 4 "arm_comparison_operator"
9495 [(match_operand 6 "cc_register" "") (const_int 0)])
9496 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9497 (match_operator:SI 5 "shiftable_operator"
9498 [(match_operand:SI 2 "s_register_operand" "r,r")
9499 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9503 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9504 [(set_attr "conds" "use")
9505 (set_attr "length" "4,8")
9506 (set_attr "type" "*,*")]
9509 (define_insn "*ifcompare_move_not"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9512 (match_operator 5 "arm_comparison_operator"
9513 [(match_operand:SI 3 "s_register_operand" "r,r")
9514 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9515 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9517 (match_operand:SI 2 "s_register_operand" "r,r"))))
9518 (clobber (reg:CC CC_REGNUM))]
9521 [(set_attr "conds" "clob")
9522 (set_attr "length" "8,12")]
9525 (define_insn "*if_move_not"
9526 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9528 (match_operator 4 "arm_comparison_operator"
9529 [(match_operand 3 "cc_register" "") (const_int 0)])
9530 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9531 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9535 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9536 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9537 [(set_attr "conds" "use")
9538 (set_attr "insn" "mvn")
9539 (set_attr "length" "4,8,8")]
9542 (define_insn "*ifcompare_not_move"
9543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9545 (match_operator 5 "arm_comparison_operator"
9546 [(match_operand:SI 3 "s_register_operand" "r,r")
9547 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9549 (match_operand:SI 2 "s_register_operand" "r,r"))
9550 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9551 (clobber (reg:CC CC_REGNUM))]
9554 [(set_attr "conds" "clob")
9555 (set_attr "length" "8,12")]
9558 (define_insn "*if_not_move"
9559 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9561 (match_operator 4 "arm_comparison_operator"
9562 [(match_operand 3 "cc_register" "") (const_int 0)])
9563 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9564 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9568 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9569 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9570 [(set_attr "conds" "use")
9571 (set_attr "insn" "mvn")
9572 (set_attr "length" "4,8,8")]
9575 (define_insn "*ifcompare_shift_move"
9576 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9578 (match_operator 6 "arm_comparison_operator"
9579 [(match_operand:SI 4 "s_register_operand" "r,r")
9580 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9581 (match_operator:SI 7 "shift_operator"
9582 [(match_operand:SI 2 "s_register_operand" "r,r")
9583 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9584 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9585 (clobber (reg:CC CC_REGNUM))]
9588 [(set_attr "conds" "clob")
9589 (set_attr "length" "8,12")]
9592 (define_insn "*if_shift_move"
9593 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9595 (match_operator 5 "arm_comparison_operator"
9596 [(match_operand 6 "cc_register" "") (const_int 0)])
9597 (match_operator:SI 4 "shift_operator"
9598 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9599 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9600 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9604 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9605 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9606 [(set_attr "conds" "use")
9607 (set_attr "shift" "2")
9608 (set_attr "length" "4,8,8")
9609 (set_attr "insn" "mov")
9610 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9611 (const_string "alu_shift")
9612 (const_string "alu_shift_reg")))]
9615 (define_insn "*ifcompare_move_shift"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9618 (match_operator 6 "arm_comparison_operator"
9619 [(match_operand:SI 4 "s_register_operand" "r,r")
9620 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9621 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9622 (match_operator:SI 7 "shift_operator"
9623 [(match_operand:SI 2 "s_register_operand" "r,r")
9624 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9625 (clobber (reg:CC CC_REGNUM))]
9628 [(set_attr "conds" "clob")
9629 (set_attr "length" "8,12")]
9632 (define_insn "*if_move_shift"
9633 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9635 (match_operator 5 "arm_comparison_operator"
9636 [(match_operand 6 "cc_register" "") (const_int 0)])
9637 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9638 (match_operator:SI 4 "shift_operator"
9639 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9640 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9644 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9645 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9646 [(set_attr "conds" "use")
9647 (set_attr "shift" "2")
9648 (set_attr "length" "4,8,8")
9649 (set_attr "insn" "mov")
9650 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9651 (const_string "alu_shift")
9652 (const_string "alu_shift_reg")))]
9655 (define_insn "*ifcompare_shift_shift"
9656 [(set (match_operand:SI 0 "s_register_operand" "=r")
9658 (match_operator 7 "arm_comparison_operator"
9659 [(match_operand:SI 5 "s_register_operand" "r")
9660 (match_operand:SI 6 "arm_add_operand" "rIL")])
9661 (match_operator:SI 8 "shift_operator"
9662 [(match_operand:SI 1 "s_register_operand" "r")
9663 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9664 (match_operator:SI 9 "shift_operator"
9665 [(match_operand:SI 3 "s_register_operand" "r")
9666 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9667 (clobber (reg:CC CC_REGNUM))]
9670 [(set_attr "conds" "clob")
9671 (set_attr "length" "12")]
9674 (define_insn "*if_shift_shift"
9675 [(set (match_operand:SI 0 "s_register_operand" "=r")
9677 (match_operator 5 "arm_comparison_operator"
9678 [(match_operand 8 "cc_register" "") (const_int 0)])
9679 (match_operator:SI 6 "shift_operator"
9680 [(match_operand:SI 1 "s_register_operand" "r")
9681 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9682 (match_operator:SI 7 "shift_operator"
9683 [(match_operand:SI 3 "s_register_operand" "r")
9684 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9686 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9687 [(set_attr "conds" "use")
9688 (set_attr "shift" "1")
9689 (set_attr "length" "8")
9690 (set_attr "insn" "mov")
9691 (set (attr "type") (if_then_else
9692 (and (match_operand 2 "const_int_operand" "")
9693 (match_operand 4 "const_int_operand" ""))
9694 (const_string "alu_shift")
9695 (const_string "alu_shift_reg")))]
9698 (define_insn "*ifcompare_not_arith"
9699 [(set (match_operand:SI 0 "s_register_operand" "=r")
9701 (match_operator 6 "arm_comparison_operator"
9702 [(match_operand:SI 4 "s_register_operand" "r")
9703 (match_operand:SI 5 "arm_add_operand" "rIL")])
9704 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9705 (match_operator:SI 7 "shiftable_operator"
9706 [(match_operand:SI 2 "s_register_operand" "r")
9707 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9708 (clobber (reg:CC CC_REGNUM))]
9711 [(set_attr "conds" "clob")
9712 (set_attr "length" "12")]
9715 (define_insn "*if_not_arith"
9716 [(set (match_operand:SI 0 "s_register_operand" "=r")
9718 (match_operator 5 "arm_comparison_operator"
9719 [(match_operand 4 "cc_register" "") (const_int 0)])
9720 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9721 (match_operator:SI 6 "shiftable_operator"
9722 [(match_operand:SI 2 "s_register_operand" "r")
9723 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9725 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9726 [(set_attr "conds" "use")
9727 (set_attr "insn" "mvn")
9728 (set_attr "length" "8")]
9731 (define_insn "*ifcompare_arith_not"
9732 [(set (match_operand:SI 0 "s_register_operand" "=r")
9734 (match_operator 6 "arm_comparison_operator"
9735 [(match_operand:SI 4 "s_register_operand" "r")
9736 (match_operand:SI 5 "arm_add_operand" "rIL")])
9737 (match_operator:SI 7 "shiftable_operator"
9738 [(match_operand:SI 2 "s_register_operand" "r")
9739 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9740 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9741 (clobber (reg:CC CC_REGNUM))]
9744 [(set_attr "conds" "clob")
9745 (set_attr "length" "12")]
9748 (define_insn "*if_arith_not"
9749 [(set (match_operand:SI 0 "s_register_operand" "=r")
9751 (match_operator 5 "arm_comparison_operator"
9752 [(match_operand 4 "cc_register" "") (const_int 0)])
9753 (match_operator:SI 6 "shiftable_operator"
9754 [(match_operand:SI 2 "s_register_operand" "r")
9755 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9756 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9758 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9759 [(set_attr "conds" "use")
9760 (set_attr "insn" "mvn")
9761 (set_attr "length" "8")]
9764 (define_insn "*ifcompare_neg_move"
9765 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9767 (match_operator 5 "arm_comparison_operator"
9768 [(match_operand:SI 3 "s_register_operand" "r,r")
9769 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9770 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9771 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9772 (clobber (reg:CC CC_REGNUM))]
9775 [(set_attr "conds" "clob")
9776 (set_attr "length" "8,12")]
9779 (define_insn "*if_neg_move"
9780 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9782 (match_operator 4 "arm_comparison_operator"
9783 [(match_operand 3 "cc_register" "") (const_int 0)])
9784 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9785 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9789 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9790 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9791 [(set_attr "conds" "use")
9792 (set_attr "length" "4,8,8")]
9795 (define_insn "*ifcompare_move_neg"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9798 (match_operator 5 "arm_comparison_operator"
9799 [(match_operand:SI 3 "s_register_operand" "r,r")
9800 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9801 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9802 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9803 (clobber (reg:CC CC_REGNUM))]
9806 [(set_attr "conds" "clob")
9807 (set_attr "length" "8,12")]
9810 (define_insn "*if_move_neg"
9811 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9813 (match_operator 4 "arm_comparison_operator"
9814 [(match_operand 3 "cc_register" "") (const_int 0)])
9815 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9816 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9820 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9821 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9822 [(set_attr "conds" "use")
9823 (set_attr "length" "4,8,8")]
9826 (define_insn "*arith_adjacentmem"
9827 [(set (match_operand:SI 0 "s_register_operand" "=r")
9828 (match_operator:SI 1 "shiftable_operator"
9829 [(match_operand:SI 2 "memory_operand" "m")
9830 (match_operand:SI 3 "memory_operand" "m")]))
9831 (clobber (match_scratch:SI 4 "=r"))]
9832 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9838 HOST_WIDE_INT val1 = 0, val2 = 0;
9840 if (REGNO (operands[0]) > REGNO (operands[4]))
9842 ldm[1] = operands[4];
9843 ldm[2] = operands[0];
9847 ldm[1] = operands[0];
9848 ldm[2] = operands[4];
9851 base_reg = XEXP (operands[2], 0);
9853 if (!REG_P (base_reg))
9855 val1 = INTVAL (XEXP (base_reg, 1));
9856 base_reg = XEXP (base_reg, 0);
9859 if (!REG_P (XEXP (operands[3], 0)))
9860 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9862 arith[0] = operands[0];
9863 arith[3] = operands[1];
9877 if (val1 !=0 && val2 != 0)
9881 if (val1 == 4 || val2 == 4)
9882 /* Other val must be 8, since we know they are adjacent and neither
9884 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9885 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9887 ldm[0] = ops[0] = operands[4];
9889 ops[2] = GEN_INT (val1);
9890 output_add_immediate (ops);
9892 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9894 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9898 /* Offset is out of range for a single add, so use two ldr. */
9901 ops[2] = GEN_INT (val1);
9902 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9904 ops[2] = GEN_INT (val2);
9905 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9911 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9913 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9918 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9920 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9922 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9925 [(set_attr "length" "12")
9926 (set_attr "predicable" "yes")
9927 (set_attr "type" "load1")]
9930 ; This pattern is never tried by combine, so do it as a peephole
9933 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9934 (match_operand:SI 1 "arm_general_register_operand" ""))
9935 (set (reg:CC CC_REGNUM)
9936 (compare:CC (match_dup 1) (const_int 0)))]
9938 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9939 (set (match_dup 0) (match_dup 1))])]
9944 [(set (match_operand:SI 0 "s_register_operand" "")
9945 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9947 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9948 [(match_operand:SI 3 "s_register_operand" "")
9949 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9950 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9952 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9953 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9958 ;; This split can be used because CC_Z mode implies that the following
9959 ;; branch will be an equality, or an unsigned inequality, so the sign
9960 ;; extension is not needed.
9963 [(set (reg:CC_Z CC_REGNUM)
9965 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9967 (match_operand 1 "const_int_operand" "")))
9968 (clobber (match_scratch:SI 2 ""))]
9970 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9971 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9972 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9973 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9975 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9978 ;; ??? Check the patterns above for Thumb-2 usefulness
9980 (define_expand "prologue"
9981 [(clobber (const_int 0))]
9984 arm_expand_prologue ();
9986 thumb1_expand_prologue ();
9991 (define_expand "epilogue"
9992 [(clobber (const_int 0))]
9995 if (crtl->calls_eh_return)
9996 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9998 thumb1_expand_epilogue ();
9999 else if (USE_RETURN_INSN (FALSE))
10001 emit_jump_insn (gen_return ());
10004 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10006 gen_rtx_RETURN (VOIDmode)),
10007 VUNSPEC_EPILOGUE));
10012 ;; Note - although unspec_volatile's USE all hard registers,
10013 ;; USEs are ignored after relaod has completed. Thus we need
10014 ;; to add an unspec of the link register to ensure that flow
10015 ;; does not think that it is unused by the sibcall branch that
10016 ;; will replace the standard function epilogue.
10017 (define_insn "sibcall_epilogue"
10018 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10019 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10022 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10023 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10024 return arm_output_epilogue (next_nonnote_insn (insn));
10026 ;; Length is absolute worst case
10027 [(set_attr "length" "44")
10028 (set_attr "type" "block")
10029 ;; We don't clobber the conditions, but the potential length of this
10030 ;; operation is sufficient to make conditionalizing the sequence
10031 ;; unlikely to be profitable.
10032 (set_attr "conds" "clob")]
10035 (define_insn "*epilogue_insns"
10036 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10040 return arm_output_epilogue (NULL);
10041 else /* TARGET_THUMB1 */
10042 return thumb_unexpanded_epilogue ();
10044 ; Length is absolute worst case
10045 [(set_attr "length" "44")
10046 (set_attr "type" "block")
10047 ;; We don't clobber the conditions, but the potential length of this
10048 ;; operation is sufficient to make conditionalizing the sequence
10049 ;; unlikely to be profitable.
10050 (set_attr "conds" "clob")]
10053 (define_expand "eh_epilogue"
10054 [(use (match_operand:SI 0 "register_operand" ""))
10055 (use (match_operand:SI 1 "register_operand" ""))
10056 (use (match_operand:SI 2 "register_operand" ""))]
10060 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10061 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10063 rtx ra = gen_rtx_REG (Pmode, 2);
10065 emit_move_insn (ra, operands[2]);
10068 /* This is a hack -- we may have crystalized the function type too
10070 cfun->machine->func_type = 0;
10074 ;; This split is only used during output to reduce the number of patterns
10075 ;; that need assembler instructions adding to them. We allowed the setting
10076 ;; of the conditions to be implicit during rtl generation so that
10077 ;; the conditional compare patterns would work. However this conflicts to
10078 ;; some extent with the conditional data operations, so we have to split them
10081 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10082 ;; conditional execution sufficient?
10085 [(set (match_operand:SI 0 "s_register_operand" "")
10086 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10087 [(match_operand 2 "" "") (match_operand 3 "" "")])
10089 (match_operand 4 "" "")))
10090 (clobber (reg:CC CC_REGNUM))]
10091 "TARGET_ARM && reload_completed"
10092 [(set (match_dup 5) (match_dup 6))
10093 (cond_exec (match_dup 7)
10094 (set (match_dup 0) (match_dup 4)))]
10097 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10098 operands[2], operands[3]);
10099 enum rtx_code rc = GET_CODE (operands[1]);
10101 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10102 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10103 if (mode == CCFPmode || mode == CCFPEmode)
10104 rc = reverse_condition_maybe_unordered (rc);
10106 rc = reverse_condition (rc);
10108 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10113 [(set (match_operand:SI 0 "s_register_operand" "")
10114 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10115 [(match_operand 2 "" "") (match_operand 3 "" "")])
10116 (match_operand 4 "" "")
10118 (clobber (reg:CC CC_REGNUM))]
10119 "TARGET_ARM && reload_completed"
10120 [(set (match_dup 5) (match_dup 6))
10121 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10122 (set (match_dup 0) (match_dup 4)))]
10125 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10126 operands[2], operands[3]);
10128 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10129 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10134 [(set (match_operand:SI 0 "s_register_operand" "")
10135 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10136 [(match_operand 2 "" "") (match_operand 3 "" "")])
10137 (match_operand 4 "" "")
10138 (match_operand 5 "" "")))
10139 (clobber (reg:CC CC_REGNUM))]
10140 "TARGET_ARM && reload_completed"
10141 [(set (match_dup 6) (match_dup 7))
10142 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10143 (set (match_dup 0) (match_dup 4)))
10144 (cond_exec (match_dup 8)
10145 (set (match_dup 0) (match_dup 5)))]
10148 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10149 operands[2], operands[3]);
10150 enum rtx_code rc = GET_CODE (operands[1]);
10152 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10153 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10154 if (mode == CCFPmode || mode == CCFPEmode)
10155 rc = reverse_condition_maybe_unordered (rc);
10157 rc = reverse_condition (rc);
10159 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10164 [(set (match_operand:SI 0 "s_register_operand" "")
10165 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10166 [(match_operand:SI 2 "s_register_operand" "")
10167 (match_operand:SI 3 "arm_add_operand" "")])
10168 (match_operand:SI 4 "arm_rhs_operand" "")
10170 (match_operand:SI 5 "s_register_operand" ""))))
10171 (clobber (reg:CC CC_REGNUM))]
10172 "TARGET_ARM && reload_completed"
10173 [(set (match_dup 6) (match_dup 7))
10174 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10175 (set (match_dup 0) (match_dup 4)))
10176 (cond_exec (match_dup 8)
10177 (set (match_dup 0) (not:SI (match_dup 5))))]
10180 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10181 operands[2], operands[3]);
10182 enum rtx_code rc = GET_CODE (operands[1]);
10184 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10185 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10186 if (mode == CCFPmode || mode == CCFPEmode)
10187 rc = reverse_condition_maybe_unordered (rc);
10189 rc = reverse_condition (rc);
10191 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10195 (define_insn "*cond_move_not"
10196 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10197 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10198 [(match_operand 3 "cc_register" "") (const_int 0)])
10199 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10201 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10205 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10206 [(set_attr "conds" "use")
10207 (set_attr "insn" "mvn")
10208 (set_attr "length" "4,8")]
10211 ;; The next two patterns occur when an AND operation is followed by a
10212 ;; scc insn sequence
10214 (define_insn "*sign_extract_onebit"
10215 [(set (match_operand:SI 0 "s_register_operand" "=r")
10216 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10218 (match_operand:SI 2 "const_int_operand" "n")))
10219 (clobber (reg:CC CC_REGNUM))]
10222 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10223 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10224 return \"mvnne\\t%0, #0\";
10226 [(set_attr "conds" "clob")
10227 (set_attr "length" "8")]
10230 (define_insn "*not_signextract_onebit"
10231 [(set (match_operand:SI 0 "s_register_operand" "=r")
10233 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10235 (match_operand:SI 2 "const_int_operand" "n"))))
10236 (clobber (reg:CC CC_REGNUM))]
10239 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10240 output_asm_insn (\"tst\\t%1, %2\", operands);
10241 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10242 return \"movne\\t%0, #0\";
10244 [(set_attr "conds" "clob")
10245 (set_attr "length" "12")]
10247 ;; ??? The above patterns need auditing for Thumb-2
10249 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10250 ;; expressions. For simplicity, the first register is also in the unspec
10252 ;; To avoid the usage of GNU extension, the length attribute is computed
10253 ;; in a C function arm_attr_length_push_multi.
10254 (define_insn "*push_multi"
10255 [(match_parallel 2 "multi_register_push"
10256 [(set (match_operand:BLK 0 "memory_operand" "=m")
10257 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10258 UNSPEC_PUSH_MULT))])]
10262 int num_saves = XVECLEN (operands[2], 0);
10264 /* For the StrongARM at least it is faster to
10265 use STR to store only a single register.
10266 In Thumb mode always use push, and the assembler will pick
10267 something appropriate. */
10268 if (num_saves == 1 && TARGET_ARM)
10269 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10276 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10278 strcpy (pattern, \"push\\t{%1\");
10280 for (i = 1; i < num_saves; i++)
10282 strcat (pattern, \", %|\");
10284 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10287 strcat (pattern, \"}\");
10288 output_asm_insn (pattern, operands);
10293 [(set_attr "type" "store4")
10294 (set (attr "length")
10295 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10298 (define_insn "stack_tie"
10299 [(set (mem:BLK (scratch))
10300 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10301 (match_operand:SI 1 "s_register_operand" "rk")]
10305 [(set_attr "length" "0")]
10308 ;; Similarly for the floating point registers
10309 (define_insn "*push_fp_multi"
10310 [(match_parallel 2 "multi_register_push"
10311 [(set (match_operand:BLK 0 "memory_operand" "=m")
10312 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10313 UNSPEC_PUSH_MULT))])]
10314 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10319 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10320 output_asm_insn (pattern, operands);
10323 [(set_attr "type" "f_fpa_store")]
10326 ;; Special patterns for dealing with the constant pool
10328 (define_insn "align_4"
10329 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10332 assemble_align (32);
10337 (define_insn "align_8"
10338 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10341 assemble_align (64);
10346 (define_insn "consttable_end"
10347 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10350 making_const_table = FALSE;
10355 (define_insn "consttable_1"
10356 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10359 making_const_table = TRUE;
10360 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10361 assemble_zeros (3);
10364 [(set_attr "length" "4")]
10367 (define_insn "consttable_2"
10368 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10371 making_const_table = TRUE;
10372 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10373 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10374 assemble_zeros (2);
10377 [(set_attr "length" "4")]
10380 (define_insn "consttable_4"
10381 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10385 rtx x = operands[0];
10386 making_const_table = TRUE;
10387 switch (GET_MODE_CLASS (GET_MODE (x)))
10390 if (GET_MODE (x) == HFmode)
10391 arm_emit_fp16_const (x);
10395 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10396 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10400 /* XXX: Sometimes gcc does something really dumb and ends up with
10401 a HIGH in a constant pool entry, usually because it's trying to
10402 load into a VFP register. We know this will always be used in
10403 combination with a LO_SUM which ignores the high bits, so just
10404 strip off the HIGH. */
10405 if (GET_CODE (x) == HIGH)
10407 assemble_integer (x, 4, BITS_PER_WORD, 1);
10408 mark_symbol_refs_as_used (x);
10413 [(set_attr "length" "4")]
10416 (define_insn "consttable_8"
10417 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10421 making_const_table = TRUE;
10422 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10427 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10428 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10432 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10437 [(set_attr "length" "8")]
10440 (define_insn "consttable_16"
10441 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10445 making_const_table = TRUE;
10446 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10451 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10452 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10456 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10461 [(set_attr "length" "16")]
10464 ;; Miscellaneous Thumb patterns
10466 (define_expand "tablejump"
10467 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10468 (use (label_ref (match_operand 1 "" "")))])]
10473 /* Hopefully, CSE will eliminate this copy. */
10474 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10475 rtx reg2 = gen_reg_rtx (SImode);
10477 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10478 operands[0] = reg2;
10483 ;; NB never uses BX.
10484 (define_insn "*thumb1_tablejump"
10485 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10486 (use (label_ref (match_operand 1 "" "")))]
10489 [(set_attr "length" "2")]
10492 ;; V5 Instructions,
10494 (define_insn "clzsi2"
10495 [(set (match_operand:SI 0 "s_register_operand" "=r")
10496 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10497 "TARGET_32BIT && arm_arch5"
10499 [(set_attr "predicable" "yes")
10500 (set_attr "insn" "clz")])
10502 (define_insn "rbitsi2"
10503 [(set (match_operand:SI 0 "s_register_operand" "=r")
10504 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10505 "TARGET_32BIT && arm_arch_thumb2"
10507 [(set_attr "predicable" "yes")
10508 (set_attr "insn" "clz")])
10510 (define_expand "ctzsi2"
10511 [(set (match_operand:SI 0 "s_register_operand" "")
10512 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10513 "TARGET_32BIT && arm_arch_thumb2"
10516 rtx tmp = gen_reg_rtx (SImode);
10517 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10518 emit_insn (gen_clzsi2 (operands[0], tmp));
10524 ;; V5E instructions.
10526 (define_insn "prefetch"
10527 [(prefetch (match_operand:SI 0 "address_operand" "p")
10528 (match_operand:SI 1 "" "")
10529 (match_operand:SI 2 "" ""))]
10530 "TARGET_32BIT && arm_arch5e"
10533 ;; General predication pattern
10536 [(match_operator 0 "arm_comparison_operator"
10537 [(match_operand 1 "cc_register" "")
10543 (define_insn "prologue_use"
10544 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10546 "%@ %0 needed for prologue"
10547 [(set_attr "length" "0")]
10551 ;; Patterns for exception handling
10553 (define_expand "eh_return"
10554 [(use (match_operand 0 "general_operand" ""))]
10559 emit_insn (gen_arm_eh_return (operands[0]));
10561 emit_insn (gen_thumb_eh_return (operands[0]));
10566 ;; We can't expand this before we know where the link register is stored.
10567 (define_insn_and_split "arm_eh_return"
10568 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10570 (clobber (match_scratch:SI 1 "=&r"))]
10573 "&& reload_completed"
10577 arm_set_return_address (operands[0], operands[1]);
10582 (define_insn_and_split "thumb_eh_return"
10583 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10585 (clobber (match_scratch:SI 1 "=&l"))]
10588 "&& reload_completed"
10592 thumb_set_return_address (operands[0], operands[1]);
10600 (define_insn "load_tp_hard"
10601 [(set (match_operand:SI 0 "register_operand" "=r")
10602 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10604 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10605 [(set_attr "predicable" "yes")]
10608 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10609 (define_insn "load_tp_soft"
10610 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10611 (clobber (reg:SI LR_REGNUM))
10612 (clobber (reg:SI IP_REGNUM))
10613 (clobber (reg:CC CC_REGNUM))]
10615 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10616 [(set_attr "conds" "clob")]
10619 ;; We only care about the lower 16 bits of the constant
10620 ;; being inserted into the upper 16 bits of the register.
10621 (define_insn "*arm_movtas_ze"
10622 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10625 (match_operand:SI 1 "const_int_operand" ""))]
10628 [(set_attr "predicable" "yes")
10629 (set_attr "length" "4")]
10632 (define_insn "*arm_rev"
10633 [(set (match_operand:SI 0 "s_register_operand" "=r")
10634 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10635 "TARGET_32BIT && arm_arch6"
10637 [(set_attr "predicable" "yes")
10638 (set_attr "length" "4")]
10641 (define_insn "*thumb1_rev"
10642 [(set (match_operand:SI 0 "s_register_operand" "=l")
10643 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10644 "TARGET_THUMB1 && arm_arch6"
10646 [(set_attr "length" "2")]
10649 (define_expand "arm_legacy_rev"
10650 [(set (match_operand:SI 2 "s_register_operand" "")
10651 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10655 (lshiftrt:SI (match_dup 2)
10657 (set (match_operand:SI 3 "s_register_operand" "")
10658 (rotatert:SI (match_dup 1)
10661 (and:SI (match_dup 2)
10662 (const_int -65281)))
10663 (set (match_operand:SI 0 "s_register_operand" "")
10664 (xor:SI (match_dup 3)
10670 ;; Reuse temporaries to keep register pressure down.
10671 (define_expand "thumb_legacy_rev"
10672 [(set (match_operand:SI 2 "s_register_operand" "")
10673 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10675 (set (match_operand:SI 3 "s_register_operand" "")
10676 (lshiftrt:SI (match_dup 1)
10679 (ior:SI (match_dup 3)
10681 (set (match_operand:SI 4 "s_register_operand" "")
10683 (set (match_operand:SI 5 "s_register_operand" "")
10684 (rotatert:SI (match_dup 1)
10687 (ashift:SI (match_dup 5)
10690 (lshiftrt:SI (match_dup 5)
10693 (ior:SI (match_dup 5)
10696 (rotatert:SI (match_dup 5)
10698 (set (match_operand:SI 0 "s_register_operand" "")
10699 (ior:SI (match_dup 5)
10705 (define_expand "bswapsi2"
10706 [(set (match_operand:SI 0 "s_register_operand" "=r")
10707 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10708 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10712 rtx op2 = gen_reg_rtx (SImode);
10713 rtx op3 = gen_reg_rtx (SImode);
10717 rtx op4 = gen_reg_rtx (SImode);
10718 rtx op5 = gen_reg_rtx (SImode);
10720 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10721 op2, op3, op4, op5));
10725 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10734 ;; Load the load/store multiple patterns
10735 (include "ldmstm.md")
10736 ;; Load the FPA co-processor patterns
10738 ;; Load the Maverick co-processor patterns
10739 (include "cirrus.md")
10740 ;; Vector bits common to IWMMXT and Neon
10741 (include "vec-common.md")
10742 ;; Load the Intel Wireless Multimedia Extension patterns
10743 (include "iwmmxt.md")
10744 ;; Load the VFP co-processor patterns
10746 ;; Thumb-2 patterns
10747 (include "thumb2.md")
10749 (include "neon.md")
10750 ;; Synchronization Primitives
10751 (include "sync.md")