1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
45 ;; 3rd operand to select_dominance_cc_mode
52 ;; conditional compare combination
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
66 (define_c_enum "unspec" [
67 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 UNSPEC_PUSH_MULT ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
77 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
90 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
91 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
104 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
107 ; correctly for PIC usage.
108 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
109 ; a given symbolic address.
110 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
111 UNSPEC_RBIT ; rbit operation.
112 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
113 ; another symbolic address.
114 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
115 UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access
116 ; unaligned locations, on architectures which support
118 UNSPEC_UNALIGNED_STORE ; Same for str/strh.
121 ;; UNSPEC_VOLATILE Usage:
123 (define_c_enum "unspecv" [
124 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
126 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
127 ; instruction epilogue sequence that isn't expanded
128 ; into normal RTL. Used for both normal and sibcall
130 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
131 ; modes from arm to thumb.
132 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
133 ; for inlined constants.
134 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
136 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
138 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
140 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
142 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
144 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
146 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
147 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
148 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
149 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
150 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
151 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
152 VUNSPEC_EH_RETURN ; Use to override the return address for exception
154 VUNSPEC_SYNC_COMPARE_AND_SWAP ; Represent an atomic compare swap.
155 VUNSPEC_SYNC_LOCK ; Represent a sync_lock_test_and_set.
156 VUNSPEC_SYNC_OP ; Represent a sync_<op>
157 VUNSPEC_SYNC_NEW_OP ; Represent a sync_new_<op>
158 VUNSPEC_SYNC_OLD_OP ; Represent a sync_old_<op>
161 ;;---------------------------------------------------------------------------
164 ;; Processor type. This is created automatically from arm-cores.def.
165 (include "arm-tune.md")
167 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
168 ; generating ARM code. This is used to control the length of some insn
169 ; patterns that share the same RTL in both ARM and Thumb code.
170 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
172 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
173 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
175 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
176 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
178 ;; Operand number of an input operand that is shifted. Zero if the
179 ;; given instruction does not shift one of its input operands.
180 (define_attr "shift" "" (const_int 0))
182 ; Floating Point Unit. If we only have floating point emulation, then there
183 ; is no point in scheduling the floating point insns. (Well, for best
184 ; performance we should try and group them together).
185 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
186 (const (symbol_ref "arm_fpu_attr")))
188 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
189 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
190 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
191 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
192 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
193 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
194 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
195 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
196 (const_string "none"))
198 ; LENGTH of an instruction (in bytes)
199 (define_attr "length" ""
200 (cond [(not (eq_attr "sync_memory" "none"))
201 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
204 ; The architecture which supports the instruction (or alternative).
205 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
206 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
207 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
208 ; arm_arch6. This attribute is used to compute attribute "enabled",
209 ; use type "any" to enable an alternative in all cases.
210 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
211 (const_string "any"))
213 (define_attr "arch_enabled" "no,yes"
214 (cond [(eq_attr "arch" "any")
217 (and (eq_attr "arch" "a")
218 (match_test "TARGET_ARM"))
221 (and (eq_attr "arch" "t")
222 (match_test "TARGET_THUMB"))
225 (and (eq_attr "arch" "t1")
226 (match_test "TARGET_THUMB1"))
229 (and (eq_attr "arch" "t2")
230 (match_test "TARGET_THUMB2"))
233 (and (eq_attr "arch" "32")
234 (match_test "TARGET_32BIT"))
237 (and (eq_attr "arch" "v6")
238 (match_test "TARGET_32BIT && arm_arch6"))
241 (and (eq_attr "arch" "nov6")
242 (match_test "TARGET_32BIT && !arm_arch6"))
245 (and (eq_attr "arch" "onlya8")
246 (eq_attr "tune" "cortexa8"))
249 (and (eq_attr "arch" "nota8")
250 (not (eq_attr "tune" "cortexa8")))
251 (const_string "yes")]
252 (const_string "no")))
254 ; Allows an insn to disable certain alternatives for reasons other than
256 (define_attr "insn_enabled" "no,yes"
257 (const_string "yes"))
259 ; Enable all alternatives that are both arch_enabled and insn_enabled.
260 (define_attr "enabled" "no,yes"
261 (if_then_else (eq_attr "insn_enabled" "yes")
262 (if_then_else (eq_attr "arch_enabled" "yes")
265 (const_string "no")))
267 ; POOL_RANGE is how far away from a constant pool entry that this insn
268 ; can be placed. If the distance is zero, then this insn will never
269 ; reference the pool.
270 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
271 ; before its address. It is set to <max_range> - (8 + <data_size>).
272 (define_attr "arm_pool_range" "" (const_int 0))
273 (define_attr "thumb2_pool_range" "" (const_int 0))
274 (define_attr "arm_neg_pool_range" "" (const_int 0))
275 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
277 (define_attr "pool_range" ""
278 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
279 (attr "arm_pool_range")))
280 (define_attr "neg_pool_range" ""
281 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
282 (attr "arm_neg_pool_range")))
284 ; An assembler sequence may clobber the condition codes without us knowing.
285 ; If such an insn references the pool, then we have no way of knowing how,
286 ; so use the most conservative value for pool_range.
287 (define_asm_attributes
288 [(set_attr "conds" "clob")
289 (set_attr "length" "4")
290 (set_attr "pool_range" "250")])
292 ;; The instruction used to implement a particular pattern. This
293 ;; information is used by pipeline descriptions to provide accurate
294 ;; scheduling information.
297 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
298 (const_string "other"))
300 ; TYPE attribute is used to detect floating point instructions which, if
301 ; running on a co-processor can run in parallel with other, basic instructions
302 ; If write-buffer scheduling is enabled then it can also be used in the
303 ; scheduling of writes.
305 ; Classification of each insn
306 ; Note: vfp.md has different meanings for some of these, and some further
307 ; types as well. See that file for details.
308 ; alu any alu instruction that doesn't hit memory or fp
309 ; regs or have a shifted source operand
310 ; alu_shift any data instruction that doesn't hit memory or fp
311 ; regs, but has a source operand shifted by a constant
312 ; alu_shift_reg any data instruction that doesn't hit memory or fp
313 ; regs, but has a source operand shifted by a register value
314 ; mult a multiply instruction
315 ; block blockage insn, this blocks all functional units
316 ; float a floating point arithmetic operation (subject to expansion)
317 ; fdivd DFmode floating point division
318 ; fdivs SFmode floating point division
319 ; fmul Floating point multiply
320 ; ffmul Fast floating point multiply
321 ; farith Floating point arithmetic (4 cycle)
322 ; ffarith Fast floating point arithmetic (2 cycle)
323 ; float_em a floating point arithmetic operation that is normally emulated
324 ; even on a machine with an fpa.
325 ; f_fpa_load a floating point load from memory. Only for the FPA.
326 ; f_fpa_store a floating point store to memory. Only for the FPA.
327 ; f_load[sd] A single/double load from memory. Used for VFP unit.
328 ; f_store[sd] A single/double store to memory. Used for VFP unit.
329 ; f_flag a transfer of co-processor flags to the CPSR
330 ; f_mem_r a transfer of a floating point register to a real reg via mem
331 ; r_mem_f the reverse of f_mem_r
332 ; f_2_r fast transfer float to arm (no memory needed)
333 ; r_2_f fast transfer arm to float
334 ; f_cvt convert floating<->integral
336 ; call a subroutine call
337 ; load_byte load byte(s) from memory to arm registers
338 ; load1 load 1 word from memory to arm registers
339 ; load2 load 2 words from memory to arm registers
340 ; load3 load 3 words from memory to arm registers
341 ; load4 load 4 words from memory to arm registers
342 ; store store 1 word to memory from arm registers
343 ; store2 store 2 words
344 ; store3 store 3 words
345 ; store4 store 4 (or more) words
346 ; Additions for Cirrus Maverick co-processor:
347 ; mav_farith Floating point arithmetic (4 cycle)
348 ; mav_dmult Double multiplies (7 cycle)
352 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
354 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
355 (const_string "mult")
356 (const_string "alu")))
358 ; Is this an (integer side) multiply with a 64-bit result?
359 (define_attr "mul64" "no,yes"
361 (eq_attr "insn" "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
363 (const_string "no")))
365 ; Load scheduling, set from the arm_ld_sched variable
366 ; initialized by arm_option_override()
367 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
369 ;; Classification of NEON instructions for scheduling purposes.
370 ;; Do not set this attribute and the "type" attribute together in
371 ;; any one instruction pattern.
372 (define_attr "neon_type"
383 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
384 neon_mul_qqq_8_16_32_ddd_32,\
385 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
386 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
388 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
389 neon_mla_qqq_32_qqd_32_scalar,\
390 neon_mul_ddd_16_scalar_32_16_long_scalar,\
391 neon_mul_qqd_32_scalar,\
392 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
397 neon_vqshl_vrshl_vqrshl_qqq,\
399 neon_fp_vadd_ddd_vabs_dd,\
400 neon_fp_vadd_qqq_vabs_qq,\
406 neon_fp_vmla_ddd_scalar,\
407 neon_fp_vmla_qqq_scalar,\
408 neon_fp_vrecps_vrsqrts_ddd,\
409 neon_fp_vrecps_vrsqrts_qqq,\
417 neon_vld2_2_regs_vld1_vld2_all_lanes,\
420 neon_vst1_1_2_regs_vst2_2_regs,\
422 neon_vst2_4_regs_vst3_vst4,\
424 neon_vld1_vld2_lane,\
425 neon_vld3_vld4_lane,\
426 neon_vst1_vst2_lane,\
427 neon_vst3_vst4_lane,\
428 neon_vld3_vld4_all_lanes,\
436 (const_string "none"))
438 ; condition codes: this one is used by final_prescan_insn to speed up
439 ; conditionalizing instructions. It saves having to scan the rtl to see if
440 ; it uses or alters the condition codes.
442 ; USE means that the condition codes are used by the insn in the process of
443 ; outputting code, this means (at present) that we can't use the insn in
446 ; SET means that the purpose of the insn is to set the condition codes in a
447 ; well defined manner.
449 ; CLOB means that the condition codes are altered in an undefined manner, if
450 ; they are altered at all
452 ; UNCONDITIONAL means the instruction can not be conditionally executed and
453 ; that the instruction does not use or alter the condition codes.
455 ; NOCOND means that the instruction does not use or alter the condition
456 ; codes but can be converted into a conditionally exectuted instruction.
458 (define_attr "conds" "use,set,clob,unconditional,nocond"
460 (ior (eq_attr "is_thumb1" "yes")
461 (eq_attr "type" "call"))
462 (const_string "clob")
463 (if_then_else (eq_attr "neon_type" "none")
464 (const_string "nocond")
465 (const_string "unconditional"))))
467 ; Predicable means that the insn can be conditionally executed based on
468 ; an automatically added predicate (additional patterns are generated by
469 ; gen...). We default to 'no' because no Thumb patterns match this rule
470 ; and not all ARM patterns do.
471 (define_attr "predicable" "no,yes" (const_string "no"))
473 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
474 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
475 ; suffer blockages enough to warrant modelling this (and it can adversely
476 ; affect the schedule).
477 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
479 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
480 ; to stall the processor. Used with model_wbuf above.
481 (define_attr "write_conflict" "no,yes"
482 (if_then_else (eq_attr "type"
483 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
485 (const_string "no")))
487 ; Classify the insns into those that take one cycle and those that take more
488 ; than one on the main cpu execution unit.
489 (define_attr "core_cycles" "single,multi"
490 (if_then_else (eq_attr "type"
491 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
492 (const_string "single")
493 (const_string "multi")))
495 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
496 ;; distant label. Only applicable to Thumb code.
497 (define_attr "far_jump" "yes,no" (const_string "no"))
500 ;; The number of machine instructions this pattern expands to.
501 ;; Used for Thumb-2 conditional execution.
502 (define_attr "ce_count" "" (const_int 1))
504 ;;---------------------------------------------------------------------------
507 (include "iterators.md")
509 ;;---------------------------------------------------------------------------
512 (include "predicates.md")
513 (include "constraints.md")
515 ;;---------------------------------------------------------------------------
516 ;; Pipeline descriptions
518 (define_attr "tune_cortexr4" "yes,no"
520 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
522 (const_string "no"))))
524 ;; True if the generic scheduling description should be used.
526 (define_attr "generic_sched" "yes,no"
528 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexa15,cortexm4")
529 (eq_attr "tune_cortexr4" "yes"))
531 (const_string "yes"))))
533 (define_attr "generic_vfp" "yes,no"
535 (and (eq_attr "fpu" "vfp")
536 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
537 (eq_attr "tune_cortexr4" "no"))
539 (const_string "no"))))
541 (include "arm-generic.md")
542 (include "arm926ejs.md")
543 (include "arm1020e.md")
544 (include "arm1026ejs.md")
545 (include "arm1136jfs.md")
547 (include "fa606te.md")
548 (include "fa626te.md")
549 (include "fmp626.md")
550 (include "fa726te.md")
551 (include "cortex-a5.md")
552 (include "cortex-a8.md")
553 (include "cortex-a9.md")
554 (include "cortex-a15.md")
555 (include "cortex-r4.md")
556 (include "cortex-r4f.md")
557 (include "cortex-m4.md")
558 (include "cortex-m4-fpu.md")
562 ;;---------------------------------------------------------------------------
567 ;; Note: For DImode insns, there is normally no reason why operands should
568 ;; not be in the same register, what we don't want is for something being
569 ;; written to partially overlap something that is an input.
570 ;; Cirrus 64bit additions should not be split because we have a native
571 ;; 64bit addition instructions.
573 (define_expand "adddi3"
575 [(set (match_operand:DI 0 "s_register_operand" "")
576 (plus:DI (match_operand:DI 1 "s_register_operand" "")
577 (match_operand:DI 2 "s_register_operand" "")))
578 (clobber (reg:CC CC_REGNUM))])]
581 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
583 if (!cirrus_fp_register (operands[0], DImode))
584 operands[0] = force_reg (DImode, operands[0]);
585 if (!cirrus_fp_register (operands[1], DImode))
586 operands[1] = force_reg (DImode, operands[1]);
587 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
593 if (GET_CODE (operands[1]) != REG)
594 operands[1] = force_reg (DImode, operands[1]);
595 if (GET_CODE (operands[2]) != REG)
596 operands[2] = force_reg (DImode, operands[2]);
601 (define_insn "*thumb1_adddi3"
602 [(set (match_operand:DI 0 "register_operand" "=l")
603 (plus:DI (match_operand:DI 1 "register_operand" "%0")
604 (match_operand:DI 2 "register_operand" "l")))
605 (clobber (reg:CC CC_REGNUM))
608 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
609 [(set_attr "length" "4")]
612 (define_insn_and_split "*arm_adddi3"
613 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
614 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
615 (match_operand:DI 2 "s_register_operand" "r, 0")))
616 (clobber (reg:CC CC_REGNUM))]
617 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
619 "TARGET_32BIT && reload_completed
620 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
621 [(parallel [(set (reg:CC_C CC_REGNUM)
622 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
624 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
625 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
626 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
629 operands[3] = gen_highpart (SImode, operands[0]);
630 operands[0] = gen_lowpart (SImode, operands[0]);
631 operands[4] = gen_highpart (SImode, operands[1]);
632 operands[1] = gen_lowpart (SImode, operands[1]);
633 operands[5] = gen_highpart (SImode, operands[2]);
634 operands[2] = gen_lowpart (SImode, operands[2]);
636 [(set_attr "conds" "clob")
637 (set_attr "length" "8")]
640 (define_insn_and_split "*adddi_sesidi_di"
641 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
642 (plus:DI (sign_extend:DI
643 (match_operand:SI 2 "s_register_operand" "r,r"))
644 (match_operand:DI 1 "s_register_operand" "0,r")))
645 (clobber (reg:CC CC_REGNUM))]
646 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
648 "TARGET_32BIT && reload_completed"
649 [(parallel [(set (reg:CC_C CC_REGNUM)
650 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
652 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
653 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
656 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
659 operands[3] = gen_highpart (SImode, operands[0]);
660 operands[0] = gen_lowpart (SImode, operands[0]);
661 operands[4] = gen_highpart (SImode, operands[1]);
662 operands[1] = gen_lowpart (SImode, operands[1]);
663 operands[2] = gen_lowpart (SImode, operands[2]);
665 [(set_attr "conds" "clob")
666 (set_attr "length" "8")]
669 (define_insn_and_split "*adddi_zesidi_di"
670 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
671 (plus:DI (zero_extend:DI
672 (match_operand:SI 2 "s_register_operand" "r,r"))
673 (match_operand:DI 1 "s_register_operand" "0,r")))
674 (clobber (reg:CC CC_REGNUM))]
675 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
677 "TARGET_32BIT && reload_completed"
678 [(parallel [(set (reg:CC_C CC_REGNUM)
679 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
681 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
682 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
683 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
686 operands[3] = gen_highpart (SImode, operands[0]);
687 operands[0] = gen_lowpart (SImode, operands[0]);
688 operands[4] = gen_highpart (SImode, operands[1]);
689 operands[1] = gen_lowpart (SImode, operands[1]);
690 operands[2] = gen_lowpart (SImode, operands[2]);
692 [(set_attr "conds" "clob")
693 (set_attr "length" "8")]
696 (define_expand "addsi3"
697 [(set (match_operand:SI 0 "s_register_operand" "")
698 (plus:SI (match_operand:SI 1 "s_register_operand" "")
699 (match_operand:SI 2 "reg_or_int_operand" "")))]
702 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
704 arm_split_constant (PLUS, SImode, NULL_RTX,
705 INTVAL (operands[2]), operands[0], operands[1],
706 optimize && can_create_pseudo_p ());
712 ; If there is a scratch available, this will be faster than synthesizing the
715 [(match_scratch:SI 3 "r")
716 (set (match_operand:SI 0 "arm_general_register_operand" "")
717 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
718 (match_operand:SI 2 "const_int_operand" "")))]
720 !(const_ok_for_arm (INTVAL (operands[2]))
721 || const_ok_for_arm (-INTVAL (operands[2])))
722 && const_ok_for_arm (~INTVAL (operands[2]))"
723 [(set (match_dup 3) (match_dup 2))
724 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
728 ;; The r/r/k alternative is required when reloading the address
729 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
730 ;; put the duplicated register first, and not try the commutative version.
731 (define_insn_and_split "*arm_addsi3"
732 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
733 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
734 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
744 subw%?\\t%0, %1, #%n2
745 subw%?\\t%0, %1, #%n2
748 && GET_CODE (operands[2]) == CONST_INT
749 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
750 && (reload_completed || !arm_eliminable_register (operands[1]))"
751 [(clobber (const_int 0))]
753 arm_split_constant (PLUS, SImode, curr_insn,
754 INTVAL (operands[2]), operands[0],
758 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
759 (set_attr "predicable" "yes")
760 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
763 (define_insn_and_split "*thumb1_addsi3"
764 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
765 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
766 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
769 static const char * const asms[] =
771 \"add\\t%0, %0, %2\",
772 \"sub\\t%0, %0, #%n2\",
773 \"add\\t%0, %1, %2\",
774 \"add\\t%0, %0, %2\",
775 \"add\\t%0, %0, %2\",
776 \"add\\t%0, %1, %2\",
777 \"add\\t%0, %1, %2\",
782 if ((which_alternative == 2 || which_alternative == 6)
783 && GET_CODE (operands[2]) == CONST_INT
784 && INTVAL (operands[2]) < 0)
785 return \"sub\\t%0, %1, #%n2\";
786 return asms[which_alternative];
788 "&& reload_completed && CONST_INT_P (operands[2])
789 && ((operands[1] != stack_pointer_rtx
790 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
791 || (operands[1] == stack_pointer_rtx
792 && INTVAL (operands[2]) > 1020))"
793 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
794 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
796 HOST_WIDE_INT offset = INTVAL (operands[2]);
797 if (operands[1] == stack_pointer_rtx)
803 else if (offset < -255)
806 operands[3] = GEN_INT (offset);
807 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
809 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
812 ;; Reloading and elimination of the frame pointer can
813 ;; sometimes cause this optimization to be missed.
815 [(set (match_operand:SI 0 "arm_general_register_operand" "")
816 (match_operand:SI 1 "const_int_operand" ""))
818 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
820 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
821 && (INTVAL (operands[1]) & 3) == 0"
822 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
826 (define_insn "addsi3_compare0"
827 [(set (reg:CC_NOOV CC_REGNUM)
829 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
830 (match_operand:SI 2 "arm_add_operand" "rI,L"))
832 (set (match_operand:SI 0 "s_register_operand" "=r,r")
833 (plus:SI (match_dup 1) (match_dup 2)))]
837 sub%.\\t%0, %1, #%n2"
838 [(set_attr "conds" "set")]
841 (define_insn "*addsi3_compare0_scratch"
842 [(set (reg:CC_NOOV CC_REGNUM)
844 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
845 (match_operand:SI 1 "arm_add_operand" "rI,L"))
851 [(set_attr "conds" "set")]
854 (define_insn "*compare_negsi_si"
855 [(set (reg:CC_Z CC_REGNUM)
857 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
858 (match_operand:SI 1 "s_register_operand" "r")))]
861 [(set_attr "conds" "set")]
864 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
865 ;; addend is a constant.
866 (define_insn "*cmpsi2_addneg"
867 [(set (reg:CC CC_REGNUM)
869 (match_operand:SI 1 "s_register_operand" "r,r")
870 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
871 (set (match_operand:SI 0 "s_register_operand" "=r,r")
872 (plus:SI (match_dup 1)
873 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
874 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
877 sub%.\\t%0, %1, #%n3"
878 [(set_attr "conds" "set")]
881 ;; Convert the sequence
883 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
887 ;; bcs dest ((unsigned)rn >= 1)
888 ;; similarly for the beq variant using bcc.
889 ;; This is a common looping idiom (while (n--))
891 [(set (match_operand:SI 0 "arm_general_register_operand" "")
892 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
894 (set (match_operand 2 "cc_register" "")
895 (compare (match_dup 0) (const_int -1)))
897 (if_then_else (match_operator 3 "equality_operator"
898 [(match_dup 2) (const_int 0)])
899 (match_operand 4 "" "")
900 (match_operand 5 "" "")))]
901 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
905 (match_dup 1) (const_int 1)))
906 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
908 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
911 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
912 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
915 operands[2], const0_rtx);"
918 ;; The next four insns work because they compare the result with one of
919 ;; the operands, and we know that the use of the condition code is
920 ;; either GEU or LTU, so we can use the carry flag from the addition
921 ;; instead of doing the compare a second time.
922 (define_insn "*addsi3_compare_op1"
923 [(set (reg:CC_C CC_REGNUM)
925 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
926 (match_operand:SI 2 "arm_add_operand" "rI,L"))
928 (set (match_operand:SI 0 "s_register_operand" "=r,r")
929 (plus:SI (match_dup 1) (match_dup 2)))]
933 sub%.\\t%0, %1, #%n2"
934 [(set_attr "conds" "set")]
937 (define_insn "*addsi3_compare_op2"
938 [(set (reg:CC_C CC_REGNUM)
940 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
941 (match_operand:SI 2 "arm_add_operand" "rI,L"))
943 (set (match_operand:SI 0 "s_register_operand" "=r,r")
944 (plus:SI (match_dup 1) (match_dup 2)))]
948 sub%.\\t%0, %1, #%n2"
949 [(set_attr "conds" "set")]
952 (define_insn "*compare_addsi2_op0"
953 [(set (reg:CC_C CC_REGNUM)
955 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
956 (match_operand:SI 1 "arm_add_operand" "rI,L"))
962 [(set_attr "conds" "set")]
965 (define_insn "*compare_addsi2_op1"
966 [(set (reg:CC_C CC_REGNUM)
968 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
969 (match_operand:SI 1 "arm_add_operand" "rI,L"))
975 [(set_attr "conds" "set")]
978 (define_insn "*addsi3_carryin_<optab>"
979 [(set (match_operand:SI 0 "s_register_operand" "=r")
980 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
981 (match_operand:SI 2 "arm_rhs_operand" "rI"))
982 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
985 [(set_attr "conds" "use")]
988 (define_insn "*addsi3_carryin_alt2_<optab>"
989 [(set (match_operand:SI 0 "s_register_operand" "=r")
990 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
991 (match_operand:SI 1 "s_register_operand" "%r"))
992 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
995 [(set_attr "conds" "use")]
998 (define_insn "*addsi3_carryin_shift_<optab>"
999 [(set (match_operand:SI 0 "s_register_operand" "=r")
1001 (match_operator:SI 2 "shift_operator"
1002 [(match_operand:SI 3 "s_register_operand" "r")
1003 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1004 (match_operand:SI 1 "s_register_operand" "r"))
1005 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1007 "adc%?\\t%0, %1, %3%S2"
1008 [(set_attr "conds" "use")
1009 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1010 (const_string "alu_shift")
1011 (const_string "alu_shift_reg")))]
1014 (define_insn "*addsi3_carryin_clobercc_<optab>"
1015 [(set (match_operand:SI 0 "s_register_operand" "=r")
1016 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1017 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1018 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1019 (clobber (reg:CC CC_REGNUM))]
1021 "adc%.\\t%0, %1, %2"
1022 [(set_attr "conds" "set")]
1025 (define_expand "incscc"
1026 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1027 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1028 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1029 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1034 (define_insn "*arm_incscc"
1035 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1036 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1037 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1038 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1042 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1043 [(set_attr "conds" "use")
1044 (set_attr "length" "4,8")]
1047 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1049 [(set (match_operand:SI 0 "s_register_operand" "")
1050 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1051 (match_operand:SI 2 "s_register_operand" ""))
1053 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1055 [(set (match_dup 3) (match_dup 1))
1056 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1058 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1061 (define_expand "addsf3"
1062 [(set (match_operand:SF 0 "s_register_operand" "")
1063 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1064 (match_operand:SF 2 "arm_float_add_operand" "")))]
1065 "TARGET_32BIT && TARGET_HARD_FLOAT"
1068 && !cirrus_fp_register (operands[2], SFmode))
1069 operands[2] = force_reg (SFmode, operands[2]);
1072 (define_expand "adddf3"
1073 [(set (match_operand:DF 0 "s_register_operand" "")
1074 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1075 (match_operand:DF 2 "arm_float_add_operand" "")))]
1076 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1079 && !cirrus_fp_register (operands[2], DFmode))
1080 operands[2] = force_reg (DFmode, operands[2]);
1083 (define_expand "subdi3"
1085 [(set (match_operand:DI 0 "s_register_operand" "")
1086 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1087 (match_operand:DI 2 "s_register_operand" "")))
1088 (clobber (reg:CC CC_REGNUM))])]
1091 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1093 && cirrus_fp_register (operands[0], DImode)
1094 && cirrus_fp_register (operands[1], DImode))
1096 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1102 if (GET_CODE (operands[1]) != REG)
1103 operands[1] = force_reg (DImode, operands[1]);
1104 if (GET_CODE (operands[2]) != REG)
1105 operands[2] = force_reg (DImode, operands[2]);
1110 (define_insn "*arm_subdi3"
1111 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1112 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1113 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1114 (clobber (reg:CC CC_REGNUM))]
1115 "TARGET_32BIT && !TARGET_NEON"
1116 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1117 [(set_attr "conds" "clob")
1118 (set_attr "length" "8")]
1121 (define_insn "*thumb_subdi3"
1122 [(set (match_operand:DI 0 "register_operand" "=l")
1123 (minus:DI (match_operand:DI 1 "register_operand" "0")
1124 (match_operand:DI 2 "register_operand" "l")))
1125 (clobber (reg:CC CC_REGNUM))]
1127 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1128 [(set_attr "length" "4")]
1131 (define_insn "*subdi_di_zesidi"
1132 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1133 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1135 (match_operand:SI 2 "s_register_operand" "r,r"))))
1136 (clobber (reg:CC CC_REGNUM))]
1138 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1139 [(set_attr "conds" "clob")
1140 (set_attr "length" "8")]
1143 (define_insn "*subdi_di_sesidi"
1144 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1145 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1147 (match_operand:SI 2 "s_register_operand" "r,r"))))
1148 (clobber (reg:CC CC_REGNUM))]
1150 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1151 [(set_attr "conds" "clob")
1152 (set_attr "length" "8")]
1155 (define_insn "*subdi_zesidi_di"
1156 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1157 (minus:DI (zero_extend:DI
1158 (match_operand:SI 2 "s_register_operand" "r,r"))
1159 (match_operand:DI 1 "s_register_operand" "0,r")))
1160 (clobber (reg:CC CC_REGNUM))]
1162 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1163 [(set_attr "conds" "clob")
1164 (set_attr "length" "8")]
1167 (define_insn "*subdi_sesidi_di"
1168 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1169 (minus:DI (sign_extend:DI
1170 (match_operand:SI 2 "s_register_operand" "r,r"))
1171 (match_operand:DI 1 "s_register_operand" "0,r")))
1172 (clobber (reg:CC CC_REGNUM))]
1174 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1175 [(set_attr "conds" "clob")
1176 (set_attr "length" "8")]
1179 (define_insn "*subdi_zesidi_zesidi"
1180 [(set (match_operand:DI 0 "s_register_operand" "=r")
1181 (minus:DI (zero_extend:DI
1182 (match_operand:SI 1 "s_register_operand" "r"))
1184 (match_operand:SI 2 "s_register_operand" "r"))))
1185 (clobber (reg:CC CC_REGNUM))]
1187 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1188 [(set_attr "conds" "clob")
1189 (set_attr "length" "8")]
1192 (define_expand "subsi3"
1193 [(set (match_operand:SI 0 "s_register_operand" "")
1194 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1195 (match_operand:SI 2 "s_register_operand" "")))]
1198 if (GET_CODE (operands[1]) == CONST_INT)
1202 arm_split_constant (MINUS, SImode, NULL_RTX,
1203 INTVAL (operands[1]), operands[0],
1204 operands[2], optimize && can_create_pseudo_p ());
1207 else /* TARGET_THUMB1 */
1208 operands[1] = force_reg (SImode, operands[1]);
1213 (define_insn "thumb1_subsi3_insn"
1214 [(set (match_operand:SI 0 "register_operand" "=l")
1215 (minus:SI (match_operand:SI 1 "register_operand" "l")
1216 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1219 [(set_attr "length" "2")
1220 (set_attr "conds" "set")])
1222 ; ??? Check Thumb-2 split length
1223 (define_insn_and_split "*arm_subsi3_insn"
1224 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r")
1225 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n")
1226 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r")))]
1233 "&& (GET_CODE (operands[1]) == CONST_INT
1234 && !const_ok_for_arm (INTVAL (operands[1])))"
1235 [(clobber (const_int 0))]
1237 arm_split_constant (MINUS, SImode, curr_insn,
1238 INTVAL (operands[1]), operands[0], operands[2], 0);
1241 [(set_attr "length" "4,4,4,16")
1242 (set_attr "predicable" "yes")]
1246 [(match_scratch:SI 3 "r")
1247 (set (match_operand:SI 0 "arm_general_register_operand" "")
1248 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1249 (match_operand:SI 2 "arm_general_register_operand" "")))]
1251 && !const_ok_for_arm (INTVAL (operands[1]))
1252 && const_ok_for_arm (~INTVAL (operands[1]))"
1253 [(set (match_dup 3) (match_dup 1))
1254 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1258 (define_insn "*subsi3_compare0"
1259 [(set (reg:CC_NOOV CC_REGNUM)
1261 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1262 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1264 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1265 (minus:SI (match_dup 1) (match_dup 2)))]
1270 [(set_attr "conds" "set")]
1273 (define_insn "*subsi3_compare"
1274 [(set (reg:CC CC_REGNUM)
1275 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1276 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1277 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1278 (minus:SI (match_dup 1) (match_dup 2)))]
1283 [(set_attr "conds" "set")]
1286 (define_expand "decscc"
1287 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1288 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1289 (match_operator:SI 2 "arm_comparison_operator"
1290 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1295 (define_insn "*arm_decscc"
1296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1297 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1298 (match_operator:SI 2 "arm_comparison_operator"
1299 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1303 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1304 [(set_attr "conds" "use")
1305 (set_attr "length" "*,8")]
1308 (define_expand "subsf3"
1309 [(set (match_operand:SF 0 "s_register_operand" "")
1310 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1311 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1312 "TARGET_32BIT && TARGET_HARD_FLOAT"
1314 if (TARGET_MAVERICK)
1316 if (!cirrus_fp_register (operands[1], SFmode))
1317 operands[1] = force_reg (SFmode, operands[1]);
1318 if (!cirrus_fp_register (operands[2], SFmode))
1319 operands[2] = force_reg (SFmode, operands[2]);
1323 (define_expand "subdf3"
1324 [(set (match_operand:DF 0 "s_register_operand" "")
1325 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1326 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1327 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1329 if (TARGET_MAVERICK)
1331 if (!cirrus_fp_register (operands[1], DFmode))
1332 operands[1] = force_reg (DFmode, operands[1]);
1333 if (!cirrus_fp_register (operands[2], DFmode))
1334 operands[2] = force_reg (DFmode, operands[2]);
1339 ;; Multiplication insns
1341 (define_expand "mulsi3"
1342 [(set (match_operand:SI 0 "s_register_operand" "")
1343 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1344 (match_operand:SI 1 "s_register_operand" "")))]
1349 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1350 (define_insn "*arm_mulsi3"
1351 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1352 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1353 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1354 "TARGET_32BIT && !arm_arch6"
1355 "mul%?\\t%0, %2, %1"
1356 [(set_attr "insn" "mul")
1357 (set_attr "predicable" "yes")]
1360 (define_insn "*arm_mulsi3_v6"
1361 [(set (match_operand:SI 0 "s_register_operand" "=r")
1362 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1363 (match_operand:SI 2 "s_register_operand" "r")))]
1364 "TARGET_32BIT && arm_arch6"
1365 "mul%?\\t%0, %1, %2"
1366 [(set_attr "insn" "mul")
1367 (set_attr "predicable" "yes")]
1370 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1371 ; 1 and 2; are the same, because reload will make operand 0 match
1372 ; operand 1 without realizing that this conflicts with operand 2. We fix
1373 ; this by adding another alternative to match this case, and then `reload'
1374 ; it ourselves. This alternative must come first.
1375 (define_insn "*thumb_mulsi3"
1376 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1377 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1378 (match_operand:SI 2 "register_operand" "l,l,l")))]
1379 "TARGET_THUMB1 && !arm_arch6"
1381 if (which_alternative < 2)
1382 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1384 return \"mul\\t%0, %2\";
1386 [(set_attr "length" "4,4,2")
1387 (set_attr "insn" "mul")]
1390 (define_insn "*thumb_mulsi3_v6"
1391 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1392 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1393 (match_operand:SI 2 "register_operand" "l,0,0")))]
1394 "TARGET_THUMB1 && arm_arch6"
1399 [(set_attr "length" "2")
1400 (set_attr "insn" "mul")]
1403 (define_insn "*mulsi3_compare0"
1404 [(set (reg:CC_NOOV CC_REGNUM)
1405 (compare:CC_NOOV (mult:SI
1406 (match_operand:SI 2 "s_register_operand" "r,r")
1407 (match_operand:SI 1 "s_register_operand" "%0,r"))
1409 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1410 (mult:SI (match_dup 2) (match_dup 1)))]
1411 "TARGET_ARM && !arm_arch6"
1412 "mul%.\\t%0, %2, %1"
1413 [(set_attr "conds" "set")
1414 (set_attr "insn" "muls")]
1417 (define_insn "*mulsi3_compare0_v6"
1418 [(set (reg:CC_NOOV CC_REGNUM)
1419 (compare:CC_NOOV (mult:SI
1420 (match_operand:SI 2 "s_register_operand" "r")
1421 (match_operand:SI 1 "s_register_operand" "r"))
1423 (set (match_operand:SI 0 "s_register_operand" "=r")
1424 (mult:SI (match_dup 2) (match_dup 1)))]
1425 "TARGET_ARM && arm_arch6 && optimize_size"
1426 "mul%.\\t%0, %2, %1"
1427 [(set_attr "conds" "set")
1428 (set_attr "insn" "muls")]
1431 (define_insn "*mulsi_compare0_scratch"
1432 [(set (reg:CC_NOOV CC_REGNUM)
1433 (compare:CC_NOOV (mult:SI
1434 (match_operand:SI 2 "s_register_operand" "r,r")
1435 (match_operand:SI 1 "s_register_operand" "%0,r"))
1437 (clobber (match_scratch:SI 0 "=&r,&r"))]
1438 "TARGET_ARM && !arm_arch6"
1439 "mul%.\\t%0, %2, %1"
1440 [(set_attr "conds" "set")
1441 (set_attr "insn" "muls")]
1444 (define_insn "*mulsi_compare0_scratch_v6"
1445 [(set (reg:CC_NOOV CC_REGNUM)
1446 (compare:CC_NOOV (mult:SI
1447 (match_operand:SI 2 "s_register_operand" "r")
1448 (match_operand:SI 1 "s_register_operand" "r"))
1450 (clobber (match_scratch:SI 0 "=r"))]
1451 "TARGET_ARM && arm_arch6 && optimize_size"
1452 "mul%.\\t%0, %2, %1"
1453 [(set_attr "conds" "set")
1454 (set_attr "insn" "muls")]
1457 ;; Unnamed templates to match MLA instruction.
1459 (define_insn "*mulsi3addsi"
1460 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1462 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1463 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1464 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1465 "TARGET_32BIT && !arm_arch6"
1466 "mla%?\\t%0, %2, %1, %3"
1467 [(set_attr "insn" "mla")
1468 (set_attr "predicable" "yes")]
1471 (define_insn "*mulsi3addsi_v6"
1472 [(set (match_operand:SI 0 "s_register_operand" "=r")
1474 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1475 (match_operand:SI 1 "s_register_operand" "r"))
1476 (match_operand:SI 3 "s_register_operand" "r")))]
1477 "TARGET_32BIT && arm_arch6"
1478 "mla%?\\t%0, %2, %1, %3"
1479 [(set_attr "insn" "mla")
1480 (set_attr "predicable" "yes")]
1483 (define_insn "*mulsi3addsi_compare0"
1484 [(set (reg:CC_NOOV CC_REGNUM)
1487 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1488 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1489 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1491 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1492 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1494 "TARGET_ARM && arm_arch6"
1495 "mla%.\\t%0, %2, %1, %3"
1496 [(set_attr "conds" "set")
1497 (set_attr "insn" "mlas")]
1500 (define_insn "*mulsi3addsi_compare0_v6"
1501 [(set (reg:CC_NOOV CC_REGNUM)
1504 (match_operand:SI 2 "s_register_operand" "r")
1505 (match_operand:SI 1 "s_register_operand" "r"))
1506 (match_operand:SI 3 "s_register_operand" "r"))
1508 (set (match_operand:SI 0 "s_register_operand" "=r")
1509 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1511 "TARGET_ARM && arm_arch6 && optimize_size"
1512 "mla%.\\t%0, %2, %1, %3"
1513 [(set_attr "conds" "set")
1514 (set_attr "insn" "mlas")]
1517 (define_insn "*mulsi3addsi_compare0_scratch"
1518 [(set (reg:CC_NOOV CC_REGNUM)
1521 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1522 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1523 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1525 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1526 "TARGET_ARM && !arm_arch6"
1527 "mla%.\\t%0, %2, %1, %3"
1528 [(set_attr "conds" "set")
1529 (set_attr "insn" "mlas")]
1532 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1533 [(set (reg:CC_NOOV CC_REGNUM)
1536 (match_operand:SI 2 "s_register_operand" "r")
1537 (match_operand:SI 1 "s_register_operand" "r"))
1538 (match_operand:SI 3 "s_register_operand" "r"))
1540 (clobber (match_scratch:SI 0 "=r"))]
1541 "TARGET_ARM && arm_arch6 && optimize_size"
1542 "mla%.\\t%0, %2, %1, %3"
1543 [(set_attr "conds" "set")
1544 (set_attr "insn" "mlas")]
1547 (define_insn "*mulsi3subsi"
1548 [(set (match_operand:SI 0 "s_register_operand" "=r")
1550 (match_operand:SI 3 "s_register_operand" "r")
1551 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1552 (match_operand:SI 1 "s_register_operand" "r"))))]
1553 "TARGET_32BIT && arm_arch_thumb2"
1554 "mls%?\\t%0, %2, %1, %3"
1555 [(set_attr "insn" "mla")
1556 (set_attr "predicable" "yes")]
1559 (define_expand "maddsidi4"
1560 [(set (match_operand:DI 0 "s_register_operand" "")
1563 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1564 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1565 (match_operand:DI 3 "s_register_operand" "")))]
1566 "TARGET_32BIT && arm_arch3m"
1569 (define_insn "*mulsidi3adddi"
1570 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1573 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1574 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1575 (match_operand:DI 1 "s_register_operand" "0")))]
1576 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1577 "smlal%?\\t%Q0, %R0, %3, %2"
1578 [(set_attr "insn" "smlal")
1579 (set_attr "predicable" "yes")]
1582 (define_insn "*mulsidi3adddi_v6"
1583 [(set (match_operand:DI 0 "s_register_operand" "=r")
1586 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1587 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1588 (match_operand:DI 1 "s_register_operand" "0")))]
1589 "TARGET_32BIT && arm_arch6"
1590 "smlal%?\\t%Q0, %R0, %3, %2"
1591 [(set_attr "insn" "smlal")
1592 (set_attr "predicable" "yes")]
1595 ;; 32x32->64 widening multiply.
1596 ;; As with mulsi3, the only difference between the v3-5 and v6+
1597 ;; versions of these patterns is the requirement that the output not
1598 ;; overlap the inputs, but that still means we have to have a named
1599 ;; expander and two different starred insns.
1601 (define_expand "mulsidi3"
1602 [(set (match_operand:DI 0 "s_register_operand" "")
1604 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1605 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1606 "TARGET_32BIT && arm_arch3m"
1610 (define_insn "*mulsidi3_nov6"
1611 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1613 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1614 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1615 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1616 "smull%?\\t%Q0, %R0, %1, %2"
1617 [(set_attr "insn" "smull")
1618 (set_attr "predicable" "yes")]
1621 (define_insn "*mulsidi3_v6"
1622 [(set (match_operand:DI 0 "s_register_operand" "=r")
1624 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1625 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1626 "TARGET_32BIT && arm_arch6"
1627 "smull%?\\t%Q0, %R0, %1, %2"
1628 [(set_attr "insn" "smull")
1629 (set_attr "predicable" "yes")]
1632 (define_expand "umulsidi3"
1633 [(set (match_operand:DI 0 "s_register_operand" "")
1635 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1636 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1637 "TARGET_32BIT && arm_arch3m"
1641 (define_insn "*umulsidi3_nov6"
1642 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1644 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1645 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1646 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1647 "umull%?\\t%Q0, %R0, %1, %2"
1648 [(set_attr "insn" "umull")
1649 (set_attr "predicable" "yes")]
1652 (define_insn "*umulsidi3_v6"
1653 [(set (match_operand:DI 0 "s_register_operand" "=r")
1655 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1656 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1657 "TARGET_32BIT && arm_arch6"
1658 "umull%?\\t%Q0, %R0, %1, %2"
1659 [(set_attr "insn" "umull")
1660 (set_attr "predicable" "yes")]
1663 (define_expand "umaddsidi4"
1664 [(set (match_operand:DI 0 "s_register_operand" "")
1667 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1668 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1669 (match_operand:DI 3 "s_register_operand" "")))]
1670 "TARGET_32BIT && arm_arch3m"
1673 (define_insn "*umulsidi3adddi"
1674 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1677 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1678 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1679 (match_operand:DI 1 "s_register_operand" "0")))]
1680 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1681 "umlal%?\\t%Q0, %R0, %3, %2"
1682 [(set_attr "insn" "umlal")
1683 (set_attr "predicable" "yes")]
1686 (define_insn "*umulsidi3adddi_v6"
1687 [(set (match_operand:DI 0 "s_register_operand" "=r")
1690 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1691 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1692 (match_operand:DI 1 "s_register_operand" "0")))]
1693 "TARGET_32BIT && arm_arch6"
1694 "umlal%?\\t%Q0, %R0, %3, %2"
1695 [(set_attr "insn" "umlal")
1696 (set_attr "predicable" "yes")]
1699 (define_expand "smulsi3_highpart"
1701 [(set (match_operand:SI 0 "s_register_operand" "")
1705 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1706 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1708 (clobber (match_scratch:SI 3 ""))])]
1709 "TARGET_32BIT && arm_arch3m"
1713 (define_insn "*smulsi3_highpart_nov6"
1714 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1718 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1719 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1721 (clobber (match_scratch:SI 3 "=&r,&r"))]
1722 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1723 "smull%?\\t%3, %0, %2, %1"
1724 [(set_attr "insn" "smull")
1725 (set_attr "predicable" "yes")]
1728 (define_insn "*smulsi3_highpart_v6"
1729 [(set (match_operand:SI 0 "s_register_operand" "=r")
1733 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1734 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1736 (clobber (match_scratch:SI 3 "=r"))]
1737 "TARGET_32BIT && arm_arch6"
1738 "smull%?\\t%3, %0, %2, %1"
1739 [(set_attr "insn" "smull")
1740 (set_attr "predicable" "yes")]
1743 (define_expand "umulsi3_highpart"
1745 [(set (match_operand:SI 0 "s_register_operand" "")
1749 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1750 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1752 (clobber (match_scratch:SI 3 ""))])]
1753 "TARGET_32BIT && arm_arch3m"
1757 (define_insn "*umulsi3_highpart_nov6"
1758 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1762 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1763 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1765 (clobber (match_scratch:SI 3 "=&r,&r"))]
1766 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1767 "umull%?\\t%3, %0, %2, %1"
1768 [(set_attr "insn" "umull")
1769 (set_attr "predicable" "yes")]
1772 (define_insn "*umulsi3_highpart_v6"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1777 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1778 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1780 (clobber (match_scratch:SI 3 "=r"))]
1781 "TARGET_32BIT && arm_arch6"
1782 "umull%?\\t%3, %0, %2, %1"
1783 [(set_attr "insn" "umull")
1784 (set_attr "predicable" "yes")]
1787 (define_insn "mulhisi3"
1788 [(set (match_operand:SI 0 "s_register_operand" "=r")
1789 (mult:SI (sign_extend:SI
1790 (match_operand:HI 1 "s_register_operand" "%r"))
1792 (match_operand:HI 2 "s_register_operand" "r"))))]
1793 "TARGET_DSP_MULTIPLY"
1794 "smulbb%?\\t%0, %1, %2"
1795 [(set_attr "insn" "smulxy")
1796 (set_attr "predicable" "yes")]
1799 (define_insn "*mulhisi3tb"
1800 [(set (match_operand:SI 0 "s_register_operand" "=r")
1801 (mult:SI (ashiftrt:SI
1802 (match_operand:SI 1 "s_register_operand" "r")
1805 (match_operand:HI 2 "s_register_operand" "r"))))]
1806 "TARGET_DSP_MULTIPLY"
1807 "smultb%?\\t%0, %1, %2"
1808 [(set_attr "insn" "smulxy")
1809 (set_attr "predicable" "yes")]
1812 (define_insn "*mulhisi3bt"
1813 [(set (match_operand:SI 0 "s_register_operand" "=r")
1814 (mult:SI (sign_extend:SI
1815 (match_operand:HI 1 "s_register_operand" "r"))
1817 (match_operand:SI 2 "s_register_operand" "r")
1819 "TARGET_DSP_MULTIPLY"
1820 "smulbt%?\\t%0, %1, %2"
1821 [(set_attr "insn" "smulxy")
1822 (set_attr "predicable" "yes")]
1825 (define_insn "*mulhisi3tt"
1826 [(set (match_operand:SI 0 "s_register_operand" "=r")
1827 (mult:SI (ashiftrt:SI
1828 (match_operand:SI 1 "s_register_operand" "r")
1831 (match_operand:SI 2 "s_register_operand" "r")
1833 "TARGET_DSP_MULTIPLY"
1834 "smultt%?\\t%0, %1, %2"
1835 [(set_attr "insn" "smulxy")
1836 (set_attr "predicable" "yes")]
1839 (define_insn "maddhisi4"
1840 [(set (match_operand:SI 0 "s_register_operand" "=r")
1841 (plus:SI (mult:SI (sign_extend:SI
1842 (match_operand:HI 1 "s_register_operand" "r"))
1844 (match_operand:HI 2 "s_register_operand" "r")))
1845 (match_operand:SI 3 "s_register_operand" "r")))]
1846 "TARGET_DSP_MULTIPLY"
1847 "smlabb%?\\t%0, %1, %2, %3"
1848 [(set_attr "insn" "smlaxy")
1849 (set_attr "predicable" "yes")]
1852 ;; Note: there is no maddhisi4ibt because this one is canonical form
1853 (define_insn "*maddhisi4tb"
1854 [(set (match_operand:SI 0 "s_register_operand" "=r")
1855 (plus:SI (mult:SI (ashiftrt:SI
1856 (match_operand:SI 1 "s_register_operand" "r")
1859 (match_operand:HI 2 "s_register_operand" "r")))
1860 (match_operand:SI 3 "s_register_operand" "r")))]
1861 "TARGET_DSP_MULTIPLY"
1862 "smlatb%?\\t%0, %1, %2, %3"
1863 [(set_attr "insn" "smlaxy")
1864 (set_attr "predicable" "yes")]
1867 (define_insn "*maddhisi4tt"
1868 [(set (match_operand:SI 0 "s_register_operand" "=r")
1869 (plus:SI (mult:SI (ashiftrt:SI
1870 (match_operand:SI 1 "s_register_operand" "r")
1873 (match_operand:SI 2 "s_register_operand" "r")
1875 (match_operand:SI 3 "s_register_operand" "r")))]
1876 "TARGET_DSP_MULTIPLY"
1877 "smlatt%?\\t%0, %1, %2, %3"
1878 [(set_attr "insn" "smlaxy")
1879 (set_attr "predicable" "yes")]
1882 (define_insn "maddhidi4"
1883 [(set (match_operand:DI 0 "s_register_operand" "=r")
1885 (mult:DI (sign_extend:DI
1886 (match_operand:HI 1 "s_register_operand" "r"))
1888 (match_operand:HI 2 "s_register_operand" "r")))
1889 (match_operand:DI 3 "s_register_operand" "0")))]
1890 "TARGET_DSP_MULTIPLY"
1891 "smlalbb%?\\t%Q0, %R0, %1, %2"
1892 [(set_attr "insn" "smlalxy")
1893 (set_attr "predicable" "yes")])
1895 ;; Note: there is no maddhidi4ibt because this one is canonical form
1896 (define_insn "*maddhidi4tb"
1897 [(set (match_operand:DI 0 "s_register_operand" "=r")
1899 (mult:DI (sign_extend:DI
1901 (match_operand:SI 1 "s_register_operand" "r")
1904 (match_operand:HI 2 "s_register_operand" "r")))
1905 (match_operand:DI 3 "s_register_operand" "0")))]
1906 "TARGET_DSP_MULTIPLY"
1907 "smlaltb%?\\t%Q0, %R0, %1, %2"
1908 [(set_attr "insn" "smlalxy")
1909 (set_attr "predicable" "yes")])
1911 (define_insn "*maddhidi4tt"
1912 [(set (match_operand:DI 0 "s_register_operand" "=r")
1914 (mult:DI (sign_extend:DI
1916 (match_operand:SI 1 "s_register_operand" "r")
1920 (match_operand:SI 2 "s_register_operand" "r")
1922 (match_operand:DI 3 "s_register_operand" "0")))]
1923 "TARGET_DSP_MULTIPLY"
1924 "smlaltt%?\\t%Q0, %R0, %1, %2"
1925 [(set_attr "insn" "smlalxy")
1926 (set_attr "predicable" "yes")])
1928 (define_expand "mulsf3"
1929 [(set (match_operand:SF 0 "s_register_operand" "")
1930 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1931 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1932 "TARGET_32BIT && TARGET_HARD_FLOAT"
1935 && !cirrus_fp_register (operands[2], SFmode))
1936 operands[2] = force_reg (SFmode, operands[2]);
1939 (define_expand "muldf3"
1940 [(set (match_operand:DF 0 "s_register_operand" "")
1941 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1942 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1943 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1946 && !cirrus_fp_register (operands[2], DFmode))
1947 operands[2] = force_reg (DFmode, operands[2]);
1952 (define_expand "divsf3"
1953 [(set (match_operand:SF 0 "s_register_operand" "")
1954 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1955 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1956 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1959 (define_expand "divdf3"
1960 [(set (match_operand:DF 0 "s_register_operand" "")
1961 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1962 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1963 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1968 (define_expand "modsf3"
1969 [(set (match_operand:SF 0 "s_register_operand" "")
1970 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1971 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1972 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1975 (define_expand "moddf3"
1976 [(set (match_operand:DF 0 "s_register_operand" "")
1977 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1978 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1979 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1982 ;; Boolean and,ior,xor insns
1984 ;; Split up double word logical operations
1986 ;; Split up simple DImode logical operations. Simply perform the logical
1987 ;; operation on the upper and lower halves of the registers.
1989 [(set (match_operand:DI 0 "s_register_operand" "")
1990 (match_operator:DI 6 "logical_binary_operator"
1991 [(match_operand:DI 1 "s_register_operand" "")
1992 (match_operand:DI 2 "s_register_operand" "")]))]
1993 "TARGET_32BIT && reload_completed
1994 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1995 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1996 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1997 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2000 operands[3] = gen_highpart (SImode, operands[0]);
2001 operands[0] = gen_lowpart (SImode, operands[0]);
2002 operands[4] = gen_highpart (SImode, operands[1]);
2003 operands[1] = gen_lowpart (SImode, operands[1]);
2004 operands[5] = gen_highpart (SImode, operands[2]);
2005 operands[2] = gen_lowpart (SImode, operands[2]);
2010 [(set (match_operand:DI 0 "s_register_operand" "")
2011 (match_operator:DI 6 "logical_binary_operator"
2012 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2013 (match_operand:DI 1 "s_register_operand" "")]))]
2014 "TARGET_32BIT && reload_completed"
2015 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2016 (set (match_dup 3) (match_op_dup:SI 6
2017 [(ashiftrt:SI (match_dup 2) (const_int 31))
2021 operands[3] = gen_highpart (SImode, operands[0]);
2022 operands[0] = gen_lowpart (SImode, operands[0]);
2023 operands[4] = gen_highpart (SImode, operands[1]);
2024 operands[1] = gen_lowpart (SImode, operands[1]);
2025 operands[5] = gen_highpart (SImode, operands[2]);
2026 operands[2] = gen_lowpart (SImode, operands[2]);
2030 ;; The zero extend of operand 2 means we can just copy the high part of
2031 ;; operand1 into operand0.
2033 [(set (match_operand:DI 0 "s_register_operand" "")
2035 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2036 (match_operand:DI 1 "s_register_operand" "")))]
2037 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2038 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2039 (set (match_dup 3) (match_dup 4))]
2042 operands[4] = gen_highpart (SImode, operands[1]);
2043 operands[3] = gen_highpart (SImode, operands[0]);
2044 operands[0] = gen_lowpart (SImode, operands[0]);
2045 operands[1] = gen_lowpart (SImode, operands[1]);
2049 ;; The zero extend of operand 2 means we can just copy the high part of
2050 ;; operand1 into operand0.
2052 [(set (match_operand:DI 0 "s_register_operand" "")
2054 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2055 (match_operand:DI 1 "s_register_operand" "")))]
2056 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2057 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2058 (set (match_dup 3) (match_dup 4))]
2061 operands[4] = gen_highpart (SImode, operands[1]);
2062 operands[3] = gen_highpart (SImode, operands[0]);
2063 operands[0] = gen_lowpart (SImode, operands[0]);
2064 operands[1] = gen_lowpart (SImode, operands[1]);
2068 (define_expand "anddi3"
2069 [(set (match_operand:DI 0 "s_register_operand" "")
2070 (and:DI (match_operand:DI 1 "s_register_operand" "")
2071 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2076 (define_insn "*anddi3_insn"
2077 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2078 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2079 (match_operand:DI 2 "s_register_operand" "r,r")))]
2080 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2082 [(set_attr "length" "8")]
2085 (define_insn_and_split "*anddi_zesidi_di"
2086 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2087 (and:DI (zero_extend:DI
2088 (match_operand:SI 2 "s_register_operand" "r,r"))
2089 (match_operand:DI 1 "s_register_operand" "0,r")))]
2092 "TARGET_32BIT && reload_completed"
2093 ; The zero extend of operand 2 clears the high word of the output
2095 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2096 (set (match_dup 3) (const_int 0))]
2099 operands[3] = gen_highpart (SImode, operands[0]);
2100 operands[0] = gen_lowpart (SImode, operands[0]);
2101 operands[1] = gen_lowpart (SImode, operands[1]);
2103 [(set_attr "length" "8")]
2106 (define_insn "*anddi_sesdi_di"
2107 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2108 (and:DI (sign_extend:DI
2109 (match_operand:SI 2 "s_register_operand" "r,r"))
2110 (match_operand:DI 1 "s_register_operand" "0,r")))]
2113 [(set_attr "length" "8")]
2116 (define_expand "andsi3"
2117 [(set (match_operand:SI 0 "s_register_operand" "")
2118 (and:SI (match_operand:SI 1 "s_register_operand" "")
2119 (match_operand:SI 2 "reg_or_int_operand" "")))]
2124 if (GET_CODE (operands[2]) == CONST_INT)
2126 if (INTVAL (operands[2]) == 255 && arm_arch6)
2128 operands[1] = convert_to_mode (QImode, operands[1], 1);
2129 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2133 arm_split_constant (AND, SImode, NULL_RTX,
2134 INTVAL (operands[2]), operands[0],
2136 optimize && can_create_pseudo_p ());
2141 else /* TARGET_THUMB1 */
2143 if (GET_CODE (operands[2]) != CONST_INT)
2145 rtx tmp = force_reg (SImode, operands[2]);
2146 if (rtx_equal_p (operands[0], operands[1]))
2150 operands[2] = operands[1];
2158 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2160 operands[2] = force_reg (SImode,
2161 GEN_INT (~INTVAL (operands[2])));
2163 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2168 for (i = 9; i <= 31; i++)
2170 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2172 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2176 else if ((((HOST_WIDE_INT) 1) << i) - 1
2177 == ~INTVAL (operands[2]))
2179 rtx shift = GEN_INT (i);
2180 rtx reg = gen_reg_rtx (SImode);
2182 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2183 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2189 operands[2] = force_reg (SImode, operands[2]);
2195 ; ??? Check split length for Thumb-2
2196 (define_insn_and_split "*arm_andsi3_insn"
2197 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2198 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2199 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2203 bic%?\\t%0, %1, #%B2
2206 && GET_CODE (operands[2]) == CONST_INT
2207 && !(const_ok_for_arm (INTVAL (operands[2]))
2208 || const_ok_for_arm (~INTVAL (operands[2])))"
2209 [(clobber (const_int 0))]
2211 arm_split_constant (AND, SImode, curr_insn,
2212 INTVAL (operands[2]), operands[0], operands[1], 0);
2215 [(set_attr "length" "4,4,16")
2216 (set_attr "predicable" "yes")]
2219 (define_insn "*thumb1_andsi3_insn"
2220 [(set (match_operand:SI 0 "register_operand" "=l")
2221 (and:SI (match_operand:SI 1 "register_operand" "%0")
2222 (match_operand:SI 2 "register_operand" "l")))]
2225 [(set_attr "length" "2")
2226 (set_attr "conds" "set")])
2228 (define_insn "*andsi3_compare0"
2229 [(set (reg:CC_NOOV CC_REGNUM)
2231 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2232 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2234 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2235 (and:SI (match_dup 1) (match_dup 2)))]
2239 bic%.\\t%0, %1, #%B2"
2240 [(set_attr "conds" "set")]
2243 (define_insn "*andsi3_compare0_scratch"
2244 [(set (reg:CC_NOOV CC_REGNUM)
2246 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2247 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2249 (clobber (match_scratch:SI 2 "=X,r"))]
2253 bic%.\\t%2, %0, #%B1"
2254 [(set_attr "conds" "set")]
2257 (define_insn "*zeroextractsi_compare0_scratch"
2258 [(set (reg:CC_NOOV CC_REGNUM)
2259 (compare:CC_NOOV (zero_extract:SI
2260 (match_operand:SI 0 "s_register_operand" "r")
2261 (match_operand 1 "const_int_operand" "n")
2262 (match_operand 2 "const_int_operand" "n"))
2265 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2266 && INTVAL (operands[1]) > 0
2267 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2268 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2270 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2271 << INTVAL (operands[2]));
2272 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2275 [(set_attr "conds" "set")]
2278 (define_insn_and_split "*ne_zeroextractsi"
2279 [(set (match_operand:SI 0 "s_register_operand" "=r")
2280 (ne:SI (zero_extract:SI
2281 (match_operand:SI 1 "s_register_operand" "r")
2282 (match_operand:SI 2 "const_int_operand" "n")
2283 (match_operand:SI 3 "const_int_operand" "n"))
2285 (clobber (reg:CC CC_REGNUM))]
2287 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2288 && INTVAL (operands[2]) > 0
2289 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2290 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2293 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2294 && INTVAL (operands[2]) > 0
2295 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2296 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2297 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2298 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2300 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2302 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2303 (match_dup 0) (const_int 1)))]
2305 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2306 << INTVAL (operands[3]));
2308 [(set_attr "conds" "clob")
2309 (set (attr "length")
2310 (if_then_else (eq_attr "is_thumb" "yes")
2315 (define_insn_and_split "*ne_zeroextractsi_shifted"
2316 [(set (match_operand:SI 0 "s_register_operand" "=r")
2317 (ne:SI (zero_extract:SI
2318 (match_operand:SI 1 "s_register_operand" "r")
2319 (match_operand:SI 2 "const_int_operand" "n")
2322 (clobber (reg:CC CC_REGNUM))]
2326 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2327 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2329 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2331 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2332 (match_dup 0) (const_int 1)))]
2334 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2336 [(set_attr "conds" "clob")
2337 (set_attr "length" "8")]
2340 (define_insn_and_split "*ite_ne_zeroextractsi"
2341 [(set (match_operand:SI 0 "s_register_operand" "=r")
2342 (if_then_else:SI (ne (zero_extract:SI
2343 (match_operand:SI 1 "s_register_operand" "r")
2344 (match_operand:SI 2 "const_int_operand" "n")
2345 (match_operand:SI 3 "const_int_operand" "n"))
2347 (match_operand:SI 4 "arm_not_operand" "rIK")
2349 (clobber (reg:CC CC_REGNUM))]
2351 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2352 && INTVAL (operands[2]) > 0
2353 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2354 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2355 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2358 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2359 && INTVAL (operands[2]) > 0
2360 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2361 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2362 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2363 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2364 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2366 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2368 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2369 (match_dup 0) (match_dup 4)))]
2371 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2372 << INTVAL (operands[3]));
2374 [(set_attr "conds" "clob")
2375 (set_attr "length" "8")]
2378 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2379 [(set (match_operand:SI 0 "s_register_operand" "=r")
2380 (if_then_else:SI (ne (zero_extract:SI
2381 (match_operand:SI 1 "s_register_operand" "r")
2382 (match_operand:SI 2 "const_int_operand" "n")
2385 (match_operand:SI 3 "arm_not_operand" "rIK")
2387 (clobber (reg:CC CC_REGNUM))]
2388 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2390 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2391 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2392 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2394 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2396 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2397 (match_dup 0) (match_dup 3)))]
2399 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2401 [(set_attr "conds" "clob")
2402 (set_attr "length" "8")]
2406 [(set (match_operand:SI 0 "s_register_operand" "")
2407 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2408 (match_operand:SI 2 "const_int_operand" "")
2409 (match_operand:SI 3 "const_int_operand" "")))
2410 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2412 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2413 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2415 HOST_WIDE_INT temp = INTVAL (operands[2]);
2417 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2418 operands[3] = GEN_INT (32 - temp);
2422 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2424 [(set (match_operand:SI 0 "s_register_operand" "")
2425 (match_operator:SI 1 "shiftable_operator"
2426 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2427 (match_operand:SI 3 "const_int_operand" "")
2428 (match_operand:SI 4 "const_int_operand" ""))
2429 (match_operand:SI 5 "s_register_operand" "")]))
2430 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2432 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2435 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2438 HOST_WIDE_INT temp = INTVAL (operands[3]);
2440 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2441 operands[4] = GEN_INT (32 - temp);
2446 [(set (match_operand:SI 0 "s_register_operand" "")
2447 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2448 (match_operand:SI 2 "const_int_operand" "")
2449 (match_operand:SI 3 "const_int_operand" "")))]
2451 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2452 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2454 HOST_WIDE_INT temp = INTVAL (operands[2]);
2456 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2457 operands[3] = GEN_INT (32 - temp);
2462 [(set (match_operand:SI 0 "s_register_operand" "")
2463 (match_operator:SI 1 "shiftable_operator"
2464 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2465 (match_operand:SI 3 "const_int_operand" "")
2466 (match_operand:SI 4 "const_int_operand" ""))
2467 (match_operand:SI 5 "s_register_operand" "")]))
2468 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2470 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2473 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2476 HOST_WIDE_INT temp = INTVAL (operands[3]);
2478 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2479 operands[4] = GEN_INT (32 - temp);
2483 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2484 ;;; represented by the bitfield, then this will produce incorrect results.
2485 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2486 ;;; which have a real bit-field insert instruction, the truncation happens
2487 ;;; in the bit-field insert instruction itself. Since arm does not have a
2488 ;;; bit-field insert instruction, we would have to emit code here to truncate
2489 ;;; the value before we insert. This loses some of the advantage of having
2490 ;;; this insv pattern, so this pattern needs to be reevalutated.
2492 (define_expand "insv"
2493 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2494 (match_operand 1 "general_operand" "")
2495 (match_operand 2 "general_operand" ""))
2496 (match_operand 3 "reg_or_int_operand" ""))]
2497 "TARGET_ARM || arm_arch_thumb2"
2500 int start_bit = INTVAL (operands[2]);
2501 int width = INTVAL (operands[1]);
2502 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2503 rtx target, subtarget;
2505 if (arm_arch_thumb2)
2507 if (unaligned_access && MEM_P (operands[0])
2508 && s_register_operand (operands[3], GET_MODE (operands[3]))
2509 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2513 if (BYTES_BIG_ENDIAN)
2514 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2519 base_addr = adjust_address (operands[0], SImode,
2520 start_bit / BITS_PER_UNIT);
2521 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2525 rtx tmp = gen_reg_rtx (HImode);
2527 base_addr = adjust_address (operands[0], HImode,
2528 start_bit / BITS_PER_UNIT);
2529 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2530 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2534 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2536 bool use_bfi = TRUE;
2538 if (GET_CODE (operands[3]) == CONST_INT)
2540 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2544 emit_insn (gen_insv_zero (operands[0], operands[1],
2549 /* See if the set can be done with a single orr instruction. */
2550 if (val == mask && const_ok_for_arm (val << start_bit))
2556 if (GET_CODE (operands[3]) != REG)
2557 operands[3] = force_reg (SImode, operands[3]);
2559 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2568 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2571 target = copy_rtx (operands[0]);
2572 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2573 subreg as the final target. */
2574 if (GET_CODE (target) == SUBREG)
2576 subtarget = gen_reg_rtx (SImode);
2577 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2578 < GET_MODE_SIZE (SImode))
2579 target = SUBREG_REG (target);
2584 if (GET_CODE (operands[3]) == CONST_INT)
2586 /* Since we are inserting a known constant, we may be able to
2587 reduce the number of bits that we have to clear so that
2588 the mask becomes simple. */
2589 /* ??? This code does not check to see if the new mask is actually
2590 simpler. It may not be. */
2591 rtx op1 = gen_reg_rtx (SImode);
2592 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2593 start of this pattern. */
2594 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2595 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2597 emit_insn (gen_andsi3 (op1, operands[0],
2598 gen_int_mode (~mask2, SImode)));
2599 emit_insn (gen_iorsi3 (subtarget, op1,
2600 gen_int_mode (op3_value << start_bit, SImode)));
2602 else if (start_bit == 0
2603 && !(const_ok_for_arm (mask)
2604 || const_ok_for_arm (~mask)))
2606 /* A Trick, since we are setting the bottom bits in the word,
2607 we can shift operand[3] up, operand[0] down, OR them together
2608 and rotate the result back again. This takes 3 insns, and
2609 the third might be mergeable into another op. */
2610 /* The shift up copes with the possibility that operand[3] is
2611 wider than the bitfield. */
2612 rtx op0 = gen_reg_rtx (SImode);
2613 rtx op1 = gen_reg_rtx (SImode);
2615 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2616 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2617 emit_insn (gen_iorsi3 (op1, op1, op0));
2618 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2620 else if ((width + start_bit == 32)
2621 && !(const_ok_for_arm (mask)
2622 || const_ok_for_arm (~mask)))
2624 /* Similar trick, but slightly less efficient. */
2626 rtx op0 = gen_reg_rtx (SImode);
2627 rtx op1 = gen_reg_rtx (SImode);
2629 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2630 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2631 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2632 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2636 rtx op0 = gen_int_mode (mask, SImode);
2637 rtx op1 = gen_reg_rtx (SImode);
2638 rtx op2 = gen_reg_rtx (SImode);
2640 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2642 rtx tmp = gen_reg_rtx (SImode);
2644 emit_insn (gen_movsi (tmp, op0));
2648 /* Mask out any bits in operand[3] that are not needed. */
2649 emit_insn (gen_andsi3 (op1, operands[3], op0));
2651 if (GET_CODE (op0) == CONST_INT
2652 && (const_ok_for_arm (mask << start_bit)
2653 || const_ok_for_arm (~(mask << start_bit))))
2655 op0 = gen_int_mode (~(mask << start_bit), SImode);
2656 emit_insn (gen_andsi3 (op2, operands[0], op0));
2660 if (GET_CODE (op0) == CONST_INT)
2662 rtx tmp = gen_reg_rtx (SImode);
2664 emit_insn (gen_movsi (tmp, op0));
2669 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2671 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2675 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2677 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2680 if (subtarget != target)
2682 /* If TARGET is still a SUBREG, then it must be wider than a word,
2683 so we must be careful only to set the subword we were asked to. */
2684 if (GET_CODE (target) == SUBREG)
2685 emit_move_insn (target, subtarget);
2687 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2694 (define_insn "insv_zero"
2695 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2696 (match_operand:SI 1 "const_int_operand" "M")
2697 (match_operand:SI 2 "const_int_operand" "M"))
2701 [(set_attr "length" "4")
2702 (set_attr "predicable" "yes")]
2705 (define_insn "insv_t2"
2706 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2707 (match_operand:SI 1 "const_int_operand" "M")
2708 (match_operand:SI 2 "const_int_operand" "M"))
2709 (match_operand:SI 3 "s_register_operand" "r"))]
2711 "bfi%?\t%0, %3, %2, %1"
2712 [(set_attr "length" "4")
2713 (set_attr "predicable" "yes")]
2716 ; constants for op 2 will never be given to these patterns.
2717 (define_insn_and_split "*anddi_notdi_di"
2718 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2719 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2720 (match_operand:DI 2 "s_register_operand" "r,0")))]
2723 "TARGET_32BIT && reload_completed
2724 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2725 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2726 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2727 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2730 operands[3] = gen_highpart (SImode, operands[0]);
2731 operands[0] = gen_lowpart (SImode, operands[0]);
2732 operands[4] = gen_highpart (SImode, operands[1]);
2733 operands[1] = gen_lowpart (SImode, operands[1]);
2734 operands[5] = gen_highpart (SImode, operands[2]);
2735 operands[2] = gen_lowpart (SImode, operands[2]);
2737 [(set_attr "length" "8")
2738 (set_attr "predicable" "yes")]
2741 (define_insn_and_split "*anddi_notzesidi_di"
2742 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2743 (and:DI (not:DI (zero_extend:DI
2744 (match_operand:SI 2 "s_register_operand" "r,r")))
2745 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2748 bic%?\\t%Q0, %Q1, %2
2750 ; (not (zero_extend ...)) allows us to just copy the high word from
2751 ; operand1 to operand0.
2754 && operands[0] != operands[1]"
2755 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2756 (set (match_dup 3) (match_dup 4))]
2759 operands[3] = gen_highpart (SImode, operands[0]);
2760 operands[0] = gen_lowpart (SImode, operands[0]);
2761 operands[4] = gen_highpart (SImode, operands[1]);
2762 operands[1] = gen_lowpart (SImode, operands[1]);
2764 [(set_attr "length" "4,8")
2765 (set_attr "predicable" "yes")]
2768 (define_insn_and_split "*anddi_notsesidi_di"
2769 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2770 (and:DI (not:DI (sign_extend:DI
2771 (match_operand:SI 2 "s_register_operand" "r,r")))
2772 (match_operand:DI 1 "s_register_operand" "0,r")))]
2775 "TARGET_32BIT && reload_completed"
2776 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2777 (set (match_dup 3) (and:SI (not:SI
2778 (ashiftrt:SI (match_dup 2) (const_int 31)))
2782 operands[3] = gen_highpart (SImode, operands[0]);
2783 operands[0] = gen_lowpart (SImode, operands[0]);
2784 operands[4] = gen_highpart (SImode, operands[1]);
2785 operands[1] = gen_lowpart (SImode, operands[1]);
2787 [(set_attr "length" "8")
2788 (set_attr "predicable" "yes")]
2791 (define_insn "andsi_notsi_si"
2792 [(set (match_operand:SI 0 "s_register_operand" "=r")
2793 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2794 (match_operand:SI 1 "s_register_operand" "r")))]
2796 "bic%?\\t%0, %1, %2"
2797 [(set_attr "predicable" "yes")]
2800 (define_insn "thumb1_bicsi3"
2801 [(set (match_operand:SI 0 "register_operand" "=l")
2802 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2803 (match_operand:SI 2 "register_operand" "0")))]
2806 [(set_attr "length" "2")
2807 (set_attr "conds" "set")])
2809 (define_insn "andsi_not_shiftsi_si"
2810 [(set (match_operand:SI 0 "s_register_operand" "=r")
2811 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2812 [(match_operand:SI 2 "s_register_operand" "r")
2813 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2814 (match_operand:SI 1 "s_register_operand" "r")))]
2816 "bic%?\\t%0, %1, %2%S4"
2817 [(set_attr "predicable" "yes")
2818 (set_attr "shift" "2")
2819 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2820 (const_string "alu_shift")
2821 (const_string "alu_shift_reg")))]
2824 (define_insn "*andsi_notsi_si_compare0"
2825 [(set (reg:CC_NOOV CC_REGNUM)
2827 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2828 (match_operand:SI 1 "s_register_operand" "r"))
2830 (set (match_operand:SI 0 "s_register_operand" "=r")
2831 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2833 "bic%.\\t%0, %1, %2"
2834 [(set_attr "conds" "set")]
2837 (define_insn "*andsi_notsi_si_compare0_scratch"
2838 [(set (reg:CC_NOOV CC_REGNUM)
2840 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2841 (match_operand:SI 1 "s_register_operand" "r"))
2843 (clobber (match_scratch:SI 0 "=r"))]
2845 "bic%.\\t%0, %1, %2"
2846 [(set_attr "conds" "set")]
2849 (define_expand "iordi3"
2850 [(set (match_operand:DI 0 "s_register_operand" "")
2851 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2852 (match_operand:DI 2 "neon_logic_op2" "")))]
2857 (define_insn "*iordi3_insn"
2858 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2859 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2860 (match_operand:DI 2 "s_register_operand" "r,r")))]
2861 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2863 [(set_attr "length" "8")
2864 (set_attr "predicable" "yes")]
2867 (define_insn "*iordi_zesidi_di"
2868 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2869 (ior:DI (zero_extend:DI
2870 (match_operand:SI 2 "s_register_operand" "r,r"))
2871 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2874 orr%?\\t%Q0, %Q1, %2
2876 [(set_attr "length" "4,8")
2877 (set_attr "predicable" "yes")]
2880 (define_insn "*iordi_sesidi_di"
2881 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2882 (ior:DI (sign_extend:DI
2883 (match_operand:SI 2 "s_register_operand" "r,r"))
2884 (match_operand:DI 1 "s_register_operand" "0,r")))]
2887 [(set_attr "length" "8")
2888 (set_attr "predicable" "yes")]
2891 (define_expand "iorsi3"
2892 [(set (match_operand:SI 0 "s_register_operand" "")
2893 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2894 (match_operand:SI 2 "reg_or_int_operand" "")))]
2897 if (GET_CODE (operands[2]) == CONST_INT)
2901 arm_split_constant (IOR, SImode, NULL_RTX,
2902 INTVAL (operands[2]), operands[0], operands[1],
2903 optimize && can_create_pseudo_p ());
2906 else /* TARGET_THUMB1 */
2908 rtx tmp = force_reg (SImode, operands[2]);
2909 if (rtx_equal_p (operands[0], operands[1]))
2913 operands[2] = operands[1];
2921 (define_insn_and_split "*iorsi3_insn"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2923 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2924 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2928 orn%?\\t%0, %1, #%B2
2931 && GET_CODE (operands[2]) == CONST_INT
2932 && !(const_ok_for_arm (INTVAL (operands[2]))
2933 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2934 [(clobber (const_int 0))]
2936 arm_split_constant (IOR, SImode, curr_insn,
2937 INTVAL (operands[2]), operands[0], operands[1], 0);
2940 [(set_attr "length" "4,4,16")
2941 (set_attr "arch" "32,t2,32")
2942 (set_attr "predicable" "yes")])
2944 (define_insn "*thumb1_iorsi3_insn"
2945 [(set (match_operand:SI 0 "register_operand" "=l")
2946 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2947 (match_operand:SI 2 "register_operand" "l")))]
2950 [(set_attr "length" "2")
2951 (set_attr "conds" "set")])
2954 [(match_scratch:SI 3 "r")
2955 (set (match_operand:SI 0 "arm_general_register_operand" "")
2956 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2957 (match_operand:SI 2 "const_int_operand" "")))]
2959 && !const_ok_for_arm (INTVAL (operands[2]))
2960 && const_ok_for_arm (~INTVAL (operands[2]))"
2961 [(set (match_dup 3) (match_dup 2))
2962 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2966 (define_insn "*iorsi3_compare0"
2967 [(set (reg:CC_NOOV CC_REGNUM)
2968 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2969 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2971 (set (match_operand:SI 0 "s_register_operand" "=r")
2972 (ior:SI (match_dup 1) (match_dup 2)))]
2974 "orr%.\\t%0, %1, %2"
2975 [(set_attr "conds" "set")]
2978 (define_insn "*iorsi3_compare0_scratch"
2979 [(set (reg:CC_NOOV CC_REGNUM)
2980 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2981 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2983 (clobber (match_scratch:SI 0 "=r"))]
2985 "orr%.\\t%0, %1, %2"
2986 [(set_attr "conds" "set")]
2989 (define_expand "xordi3"
2990 [(set (match_operand:DI 0 "s_register_operand" "")
2991 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2992 (match_operand:DI 2 "s_register_operand" "")))]
2997 (define_insn "*xordi3_insn"
2998 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2999 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
3000 (match_operand:DI 2 "s_register_operand" "r,r")))]
3001 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
3003 [(set_attr "length" "8")
3004 (set_attr "predicable" "yes")]
3007 (define_insn "*xordi_zesidi_di"
3008 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3009 (xor:DI (zero_extend:DI
3010 (match_operand:SI 2 "s_register_operand" "r,r"))
3011 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3014 eor%?\\t%Q0, %Q1, %2
3016 [(set_attr "length" "4,8")
3017 (set_attr "predicable" "yes")]
3020 (define_insn "*xordi_sesidi_di"
3021 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3022 (xor:DI (sign_extend:DI
3023 (match_operand:SI 2 "s_register_operand" "r,r"))
3024 (match_operand:DI 1 "s_register_operand" "0,r")))]
3027 [(set_attr "length" "8")
3028 (set_attr "predicable" "yes")]
3031 (define_expand "xorsi3"
3032 [(set (match_operand:SI 0 "s_register_operand" "")
3033 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3034 (match_operand:SI 2 "reg_or_int_operand" "")))]
3036 "if (GET_CODE (operands[2]) == CONST_INT)
3040 arm_split_constant (XOR, SImode, NULL_RTX,
3041 INTVAL (operands[2]), operands[0], operands[1],
3042 optimize && can_create_pseudo_p ());
3045 else /* TARGET_THUMB1 */
3047 rtx tmp = force_reg (SImode, operands[2]);
3048 if (rtx_equal_p (operands[0], operands[1]))
3052 operands[2] = operands[1];
3059 (define_insn "*arm_xorsi3"
3060 [(set (match_operand:SI 0 "s_register_operand" "=r")
3061 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3062 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
3064 "eor%?\\t%0, %1, %2"
3065 [(set_attr "predicable" "yes")]
3068 (define_insn "*thumb1_xorsi3_insn"
3069 [(set (match_operand:SI 0 "register_operand" "=l")
3070 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3071 (match_operand:SI 2 "register_operand" "l")))]
3074 [(set_attr "length" "2")
3075 (set_attr "conds" "set")])
3077 (define_insn "*xorsi3_compare0"
3078 [(set (reg:CC_NOOV CC_REGNUM)
3079 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3080 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3082 (set (match_operand:SI 0 "s_register_operand" "=r")
3083 (xor:SI (match_dup 1) (match_dup 2)))]
3085 "eor%.\\t%0, %1, %2"
3086 [(set_attr "conds" "set")]
3089 (define_insn "*xorsi3_compare0_scratch"
3090 [(set (reg:CC_NOOV CC_REGNUM)
3091 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3092 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3096 [(set_attr "conds" "set")]
3099 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3100 ; (NOT D) we can sometimes merge the final NOT into one of the following
3104 [(set (match_operand:SI 0 "s_register_operand" "")
3105 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3106 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3107 (match_operand:SI 3 "arm_rhs_operand" "")))
3108 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3110 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3111 (not:SI (match_dup 3))))
3112 (set (match_dup 0) (not:SI (match_dup 4)))]
3116 (define_insn "*andsi_iorsi3_notsi"
3117 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3118 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3119 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3120 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3122 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3123 [(set_attr "length" "8")
3124 (set_attr "ce_count" "2")
3125 (set_attr "predicable" "yes")]
3128 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3129 ; insns are available?
3131 [(set (match_operand:SI 0 "s_register_operand" "")
3132 (match_operator:SI 1 "logical_binary_operator"
3133 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3134 (match_operand:SI 3 "const_int_operand" "")
3135 (match_operand:SI 4 "const_int_operand" ""))
3136 (match_operator:SI 9 "logical_binary_operator"
3137 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3138 (match_operand:SI 6 "const_int_operand" ""))
3139 (match_operand:SI 7 "s_register_operand" "")])]))
3140 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3142 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3143 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3146 [(ashift:SI (match_dup 2) (match_dup 4))
3150 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3153 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3157 [(set (match_operand:SI 0 "s_register_operand" "")
3158 (match_operator:SI 1 "logical_binary_operator"
3159 [(match_operator:SI 9 "logical_binary_operator"
3160 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3161 (match_operand:SI 6 "const_int_operand" ""))
3162 (match_operand:SI 7 "s_register_operand" "")])
3163 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3164 (match_operand:SI 3 "const_int_operand" "")
3165 (match_operand:SI 4 "const_int_operand" ""))]))
3166 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3168 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3169 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3172 [(ashift:SI (match_dup 2) (match_dup 4))
3176 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3179 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3183 [(set (match_operand:SI 0 "s_register_operand" "")
3184 (match_operator:SI 1 "logical_binary_operator"
3185 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3186 (match_operand:SI 3 "const_int_operand" "")
3187 (match_operand:SI 4 "const_int_operand" ""))
3188 (match_operator:SI 9 "logical_binary_operator"
3189 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3190 (match_operand:SI 6 "const_int_operand" ""))
3191 (match_operand:SI 7 "s_register_operand" "")])]))
3192 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3194 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3195 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3198 [(ashift:SI (match_dup 2) (match_dup 4))
3202 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3205 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3209 [(set (match_operand:SI 0 "s_register_operand" "")
3210 (match_operator:SI 1 "logical_binary_operator"
3211 [(match_operator:SI 9 "logical_binary_operator"
3212 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3213 (match_operand:SI 6 "const_int_operand" ""))
3214 (match_operand:SI 7 "s_register_operand" "")])
3215 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3216 (match_operand:SI 3 "const_int_operand" "")
3217 (match_operand:SI 4 "const_int_operand" ""))]))
3218 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3220 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3221 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3224 [(ashift:SI (match_dup 2) (match_dup 4))
3228 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3231 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3235 ;; Minimum and maximum insns
3237 (define_expand "smaxsi3"
3239 (set (match_operand:SI 0 "s_register_operand" "")
3240 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3241 (match_operand:SI 2 "arm_rhs_operand" "")))
3242 (clobber (reg:CC CC_REGNUM))])]
3245 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3247 /* No need for a clobber of the condition code register here. */
3248 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3249 gen_rtx_SMAX (SImode, operands[1],
3255 (define_insn "*smax_0"
3256 [(set (match_operand:SI 0 "s_register_operand" "=r")
3257 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3260 "bic%?\\t%0, %1, %1, asr #31"
3261 [(set_attr "predicable" "yes")]
3264 (define_insn "*smax_m1"
3265 [(set (match_operand:SI 0 "s_register_operand" "=r")
3266 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3269 "orr%?\\t%0, %1, %1, asr #31"
3270 [(set_attr "predicable" "yes")]
3273 (define_insn "*arm_smax_insn"
3274 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3275 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3276 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3277 (clobber (reg:CC CC_REGNUM))]
3280 cmp\\t%1, %2\;movlt\\t%0, %2
3281 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3282 [(set_attr "conds" "clob")
3283 (set_attr "length" "8,12")]
3286 (define_expand "sminsi3"
3288 (set (match_operand:SI 0 "s_register_operand" "")
3289 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3290 (match_operand:SI 2 "arm_rhs_operand" "")))
3291 (clobber (reg:CC CC_REGNUM))])]
3294 if (operands[2] == const0_rtx)
3296 /* No need for a clobber of the condition code register here. */
3297 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3298 gen_rtx_SMIN (SImode, operands[1],
3304 (define_insn "*smin_0"
3305 [(set (match_operand:SI 0 "s_register_operand" "=r")
3306 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3309 "and%?\\t%0, %1, %1, asr #31"
3310 [(set_attr "predicable" "yes")]
3313 (define_insn "*arm_smin_insn"
3314 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3315 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3316 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3317 (clobber (reg:CC CC_REGNUM))]
3320 cmp\\t%1, %2\;movge\\t%0, %2
3321 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3322 [(set_attr "conds" "clob")
3323 (set_attr "length" "8,12")]
3326 (define_expand "umaxsi3"
3328 (set (match_operand:SI 0 "s_register_operand" "")
3329 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3330 (match_operand:SI 2 "arm_rhs_operand" "")))
3331 (clobber (reg:CC CC_REGNUM))])]
3336 (define_insn "*arm_umaxsi3"
3337 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3338 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3339 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3340 (clobber (reg:CC CC_REGNUM))]
3343 cmp\\t%1, %2\;movcc\\t%0, %2
3344 cmp\\t%1, %2\;movcs\\t%0, %1
3345 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3346 [(set_attr "conds" "clob")
3347 (set_attr "length" "8,8,12")]
3350 (define_expand "uminsi3"
3352 (set (match_operand:SI 0 "s_register_operand" "")
3353 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3354 (match_operand:SI 2 "arm_rhs_operand" "")))
3355 (clobber (reg:CC CC_REGNUM))])]
3360 (define_insn "*arm_uminsi3"
3361 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3362 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3363 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3364 (clobber (reg:CC CC_REGNUM))]
3367 cmp\\t%1, %2\;movcs\\t%0, %2
3368 cmp\\t%1, %2\;movcc\\t%0, %1
3369 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3370 [(set_attr "conds" "clob")
3371 (set_attr "length" "8,8,12")]
3374 (define_insn "*store_minmaxsi"
3375 [(set (match_operand:SI 0 "memory_operand" "=m")
3376 (match_operator:SI 3 "minmax_operator"
3377 [(match_operand:SI 1 "s_register_operand" "r")
3378 (match_operand:SI 2 "s_register_operand" "r")]))
3379 (clobber (reg:CC CC_REGNUM))]
3382 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3383 operands[1], operands[2]);
3384 output_asm_insn (\"cmp\\t%1, %2\", operands);
3386 output_asm_insn (\"ite\t%d3\", operands);
3387 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3388 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3391 [(set_attr "conds" "clob")
3392 (set (attr "length")
3393 (if_then_else (eq_attr "is_thumb" "yes")
3396 (set_attr "type" "store1")]
3399 ; Reject the frame pointer in operand[1], since reloading this after
3400 ; it has been eliminated can cause carnage.
3401 (define_insn "*minmax_arithsi"
3402 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3403 (match_operator:SI 4 "shiftable_operator"
3404 [(match_operator:SI 5 "minmax_operator"
3405 [(match_operand:SI 2 "s_register_operand" "r,r")
3406 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3407 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3408 (clobber (reg:CC CC_REGNUM))]
3409 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3412 enum rtx_code code = GET_CODE (operands[4]);
3415 if (which_alternative != 0 || operands[3] != const0_rtx
3416 || (code != PLUS && code != IOR && code != XOR))
3421 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3422 operands[2], operands[3]);
3423 output_asm_insn (\"cmp\\t%2, %3\", operands);
3427 output_asm_insn (\"ite\\t%d5\", operands);
3429 output_asm_insn (\"it\\t%d5\", operands);
3431 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3433 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3436 [(set_attr "conds" "clob")
3437 (set (attr "length")
3438 (if_then_else (eq_attr "is_thumb" "yes")
3444 ;; Shift and rotation insns
3446 (define_expand "ashldi3"
3447 [(set (match_operand:DI 0 "s_register_operand" "")
3448 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3449 (match_operand:SI 2 "reg_or_int_operand" "")))]
3452 if (GET_CODE (operands[2]) == CONST_INT)
3454 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3456 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3459 /* Ideally we shouldn't fail here if we could know that operands[1]
3460 ends up already living in an iwmmxt register. Otherwise it's
3461 cheaper to have the alternate code being generated than moving
3462 values to iwmmxt regs and back. */
3465 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3470 (define_insn "arm_ashldi3_1bit"
3471 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3472 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3474 (clobber (reg:CC CC_REGNUM))]
3476 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3477 [(set_attr "conds" "clob")
3478 (set_attr "length" "8")]
3481 (define_expand "ashlsi3"
3482 [(set (match_operand:SI 0 "s_register_operand" "")
3483 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3484 (match_operand:SI 2 "arm_rhs_operand" "")))]
3487 if (GET_CODE (operands[2]) == CONST_INT
3488 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3490 emit_insn (gen_movsi (operands[0], const0_rtx));
3496 (define_insn "*thumb1_ashlsi3"
3497 [(set (match_operand:SI 0 "register_operand" "=l,l")
3498 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3499 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3502 [(set_attr "length" "2")
3503 (set_attr "conds" "set")])
3505 (define_expand "ashrdi3"
3506 [(set (match_operand:DI 0 "s_register_operand" "")
3507 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3508 (match_operand:SI 2 "reg_or_int_operand" "")))]
3511 if (GET_CODE (operands[2]) == CONST_INT)
3513 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3515 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3518 /* Ideally we shouldn't fail here if we could know that operands[1]
3519 ends up already living in an iwmmxt register. Otherwise it's
3520 cheaper to have the alternate code being generated than moving
3521 values to iwmmxt regs and back. */
3524 else if (!TARGET_REALLY_IWMMXT)
3529 (define_insn "arm_ashrdi3_1bit"
3530 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3531 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3533 (clobber (reg:CC CC_REGNUM))]
3535 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3536 [(set_attr "conds" "clob")
3537 (set_attr "insn" "mov")
3538 (set_attr "length" "8")]
3541 (define_expand "ashrsi3"
3542 [(set (match_operand:SI 0 "s_register_operand" "")
3543 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3544 (match_operand:SI 2 "arm_rhs_operand" "")))]
3547 if (GET_CODE (operands[2]) == CONST_INT
3548 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3549 operands[2] = GEN_INT (31);
3553 (define_insn "*thumb1_ashrsi3"
3554 [(set (match_operand:SI 0 "register_operand" "=l,l")
3555 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3556 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3559 [(set_attr "length" "2")
3560 (set_attr "conds" "set")])
3562 (define_expand "lshrdi3"
3563 [(set (match_operand:DI 0 "s_register_operand" "")
3564 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3565 (match_operand:SI 2 "reg_or_int_operand" "")))]
3568 if (GET_CODE (operands[2]) == CONST_INT)
3570 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3572 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3575 /* Ideally we shouldn't fail here if we could know that operands[1]
3576 ends up already living in an iwmmxt register. Otherwise it's
3577 cheaper to have the alternate code being generated than moving
3578 values to iwmmxt regs and back. */
3581 else if (!TARGET_REALLY_IWMMXT)
3586 (define_insn "arm_lshrdi3_1bit"
3587 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3588 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3590 (clobber (reg:CC CC_REGNUM))]
3592 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3593 [(set_attr "conds" "clob")
3594 (set_attr "insn" "mov")
3595 (set_attr "length" "8")]
3598 (define_expand "lshrsi3"
3599 [(set (match_operand:SI 0 "s_register_operand" "")
3600 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3601 (match_operand:SI 2 "arm_rhs_operand" "")))]
3604 if (GET_CODE (operands[2]) == CONST_INT
3605 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3607 emit_insn (gen_movsi (operands[0], const0_rtx));
3613 (define_insn "*thumb1_lshrsi3"
3614 [(set (match_operand:SI 0 "register_operand" "=l,l")
3615 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3616 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3619 [(set_attr "length" "2")
3620 (set_attr "conds" "set")])
3622 (define_expand "rotlsi3"
3623 [(set (match_operand:SI 0 "s_register_operand" "")
3624 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3625 (match_operand:SI 2 "reg_or_int_operand" "")))]
3628 if (GET_CODE (operands[2]) == CONST_INT)
3629 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3632 rtx reg = gen_reg_rtx (SImode);
3633 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3639 (define_expand "rotrsi3"
3640 [(set (match_operand:SI 0 "s_register_operand" "")
3641 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3642 (match_operand:SI 2 "arm_rhs_operand" "")))]
3647 if (GET_CODE (operands[2]) == CONST_INT
3648 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3649 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3651 else /* TARGET_THUMB1 */
3653 if (GET_CODE (operands [2]) == CONST_INT)
3654 operands [2] = force_reg (SImode, operands[2]);
3659 (define_insn "*thumb1_rotrsi3"
3660 [(set (match_operand:SI 0 "register_operand" "=l")
3661 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3662 (match_operand:SI 2 "register_operand" "l")))]
3665 [(set_attr "length" "2")]
3668 (define_insn "*arm_shiftsi3"
3669 [(set (match_operand:SI 0 "s_register_operand" "=r")
3670 (match_operator:SI 3 "shift_operator"
3671 [(match_operand:SI 1 "s_register_operand" "r")
3672 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3674 "* return arm_output_shift(operands, 0);"
3675 [(set_attr "predicable" "yes")
3676 (set_attr "shift" "1")
3677 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3678 (const_string "alu_shift")
3679 (const_string "alu_shift_reg")))]
3682 (define_insn "*shiftsi3_compare0"
3683 [(set (reg:CC_NOOV CC_REGNUM)
3684 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3685 [(match_operand:SI 1 "s_register_operand" "r")
3686 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3688 (set (match_operand:SI 0 "s_register_operand" "=r")
3689 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3691 "* return arm_output_shift(operands, 1);"
3692 [(set_attr "conds" "set")
3693 (set_attr "shift" "1")
3694 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3695 (const_string "alu_shift")
3696 (const_string "alu_shift_reg")))]
3699 (define_insn "*shiftsi3_compare0_scratch"
3700 [(set (reg:CC_NOOV CC_REGNUM)
3701 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3702 [(match_operand:SI 1 "s_register_operand" "r")
3703 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3705 (clobber (match_scratch:SI 0 "=r"))]
3707 "* return arm_output_shift(operands, 1);"
3708 [(set_attr "conds" "set")
3709 (set_attr "shift" "1")]
3712 (define_insn "*not_shiftsi"
3713 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3714 (not:SI (match_operator:SI 3 "shift_operator"
3715 [(match_operand:SI 1 "s_register_operand" "r,r")
3716 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3719 [(set_attr "predicable" "yes")
3720 (set_attr "shift" "1")
3721 (set_attr "insn" "mvn")
3722 (set_attr "arch" "32,a")
3723 (set_attr "type" "alu_shift,alu_shift_reg")])
3725 (define_insn "*not_shiftsi_compare0"
3726 [(set (reg:CC_NOOV CC_REGNUM)
3728 (not:SI (match_operator:SI 3 "shift_operator"
3729 [(match_operand:SI 1 "s_register_operand" "r,r")
3730 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3732 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3733 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3736 [(set_attr "conds" "set")
3737 (set_attr "shift" "1")
3738 (set_attr "insn" "mvn")
3739 (set_attr "arch" "32,a")
3740 (set_attr "type" "alu_shift,alu_shift_reg")])
3742 (define_insn "*not_shiftsi_compare0_scratch"
3743 [(set (reg:CC_NOOV CC_REGNUM)
3745 (not:SI (match_operator:SI 3 "shift_operator"
3746 [(match_operand:SI 1 "s_register_operand" "r,r")
3747 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3749 (clobber (match_scratch:SI 0 "=r,r"))]
3752 [(set_attr "conds" "set")
3753 (set_attr "shift" "1")
3754 (set_attr "insn" "mvn")
3755 (set_attr "arch" "32,a")
3756 (set_attr "type" "alu_shift,alu_shift_reg")])
3758 ;; We don't really have extzv, but defining this using shifts helps
3759 ;; to reduce register pressure later on.
3761 (define_expand "extzv"
3762 [(set (match_operand 0 "s_register_operand" "")
3763 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3764 (match_operand 2 "const_int_operand" "")
3765 (match_operand 3 "const_int_operand" "")))]
3766 "TARGET_THUMB1 || arm_arch_thumb2"
3769 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3770 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3772 if (arm_arch_thumb2)
3774 HOST_WIDE_INT width = INTVAL (operands[2]);
3775 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3777 if (unaligned_access && MEM_P (operands[1])
3778 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3782 if (BYTES_BIG_ENDIAN)
3783 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3788 base_addr = adjust_address (operands[1], SImode,
3789 bitpos / BITS_PER_UNIT);
3790 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3794 rtx dest = operands[0];
3795 rtx tmp = gen_reg_rtx (SImode);
3797 /* We may get a paradoxical subreg here. Strip it off. */
3798 if (GET_CODE (dest) == SUBREG
3799 && GET_MODE (dest) == SImode
3800 && GET_MODE (SUBREG_REG (dest)) == HImode)
3801 dest = SUBREG_REG (dest);
3803 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3806 base_addr = adjust_address (operands[1], HImode,
3807 bitpos / BITS_PER_UNIT);
3808 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3809 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3813 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3815 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3823 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3826 operands[3] = GEN_INT (rshift);
3830 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3834 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3835 operands[3], gen_reg_rtx (SImode)));
3840 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3842 (define_expand "extzv_t1"
3843 [(set (match_operand:SI 4 "s_register_operand" "")
3844 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3845 (match_operand:SI 2 "const_int_operand" "")))
3846 (set (match_operand:SI 0 "s_register_operand" "")
3847 (lshiftrt:SI (match_dup 4)
3848 (match_operand:SI 3 "const_int_operand" "")))]
3852 (define_expand "extv"
3853 [(set (match_operand 0 "s_register_operand" "")
3854 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3855 (match_operand 2 "const_int_operand" "")
3856 (match_operand 3 "const_int_operand" "")))]
3859 HOST_WIDE_INT width = INTVAL (operands[2]);
3860 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3862 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3863 && (bitpos % BITS_PER_UNIT) == 0)
3867 if (BYTES_BIG_ENDIAN)
3868 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3872 base_addr = adjust_address (operands[1], SImode,
3873 bitpos / BITS_PER_UNIT);
3874 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3878 rtx dest = operands[0];
3879 rtx tmp = gen_reg_rtx (SImode);
3881 /* We may get a paradoxical subreg here. Strip it off. */
3882 if (GET_CODE (dest) == SUBREG
3883 && GET_MODE (dest) == SImode
3884 && GET_MODE (SUBREG_REG (dest)) == HImode)
3885 dest = SUBREG_REG (dest);
3887 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3890 base_addr = adjust_address (operands[1], HImode,
3891 bitpos / BITS_PER_UNIT);
3892 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3893 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3898 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3900 else if (GET_MODE (operands[0]) == SImode
3901 && GET_MODE (operands[1]) == SImode)
3903 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3911 ; Helper to expand register forms of extv with the proper modes.
3913 (define_expand "extv_regsi"
3914 [(set (match_operand:SI 0 "s_register_operand" "")
3915 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
3916 (match_operand 2 "const_int_operand" "")
3917 (match_operand 3 "const_int_operand" "")))]
3922 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3924 (define_insn "unaligned_loadsi"
3925 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3926 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
3927 UNSPEC_UNALIGNED_LOAD))]
3928 "unaligned_access && TARGET_32BIT"
3929 "ldr%?\t%0, %1\t@ unaligned"
3930 [(set_attr "arch" "t2,any")
3931 (set_attr "length" "2,4")
3932 (set_attr "predicable" "yes")
3933 (set_attr "type" "load1")])
3935 (define_insn "unaligned_loadhis"
3936 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3938 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3939 UNSPEC_UNALIGNED_LOAD)))]
3940 "unaligned_access && TARGET_32BIT"
3941 "ldr%(sh%)\t%0, %1\t@ unaligned"
3942 [(set_attr "arch" "t2,any")
3943 (set_attr "length" "2,4")
3944 (set_attr "predicable" "yes")
3945 (set_attr "type" "load_byte")])
3947 (define_insn "unaligned_loadhiu"
3948 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3950 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3951 UNSPEC_UNALIGNED_LOAD)))]
3952 "unaligned_access && TARGET_32BIT"
3953 "ldr%(h%)\t%0, %1\t@ unaligned"
3954 [(set_attr "arch" "t2,any")
3955 (set_attr "length" "2,4")
3956 (set_attr "predicable" "yes")
3957 (set_attr "type" "load_byte")])
3959 (define_insn "unaligned_storesi"
3960 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
3961 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
3962 UNSPEC_UNALIGNED_STORE))]
3963 "unaligned_access && TARGET_32BIT"
3964 "str%?\t%1, %0\t@ unaligned"
3965 [(set_attr "arch" "t2,any")
3966 (set_attr "length" "2,4")
3967 (set_attr "predicable" "yes")
3968 (set_attr "type" "store1")])
3970 (define_insn "unaligned_storehi"
3971 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
3972 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
3973 UNSPEC_UNALIGNED_STORE))]
3974 "unaligned_access && TARGET_32BIT"
3975 "str%(h%)\t%1, %0\t@ unaligned"
3976 [(set_attr "arch" "t2,any")
3977 (set_attr "length" "2,4")
3978 (set_attr "predicable" "yes")
3979 (set_attr "type" "store1")])
3981 (define_insn "*extv_reg"
3982 [(set (match_operand:SI 0 "s_register_operand" "=r")
3983 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3984 (match_operand:SI 2 "const_int_operand" "M")
3985 (match_operand:SI 3 "const_int_operand" "M")))]
3987 "sbfx%?\t%0, %1, %3, %2"
3988 [(set_attr "length" "4")
3989 (set_attr "predicable" "yes")]
3992 (define_insn "extzv_t2"
3993 [(set (match_operand:SI 0 "s_register_operand" "=r")
3994 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3995 (match_operand:SI 2 "const_int_operand" "M")
3996 (match_operand:SI 3 "const_int_operand" "M")))]
3998 "ubfx%?\t%0, %1, %3, %2"
3999 [(set_attr "length" "4")
4000 (set_attr "predicable" "yes")]
4004 ;; Division instructions
4005 (define_insn "divsi3"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r")
4007 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4008 (match_operand:SI 2 "s_register_operand" "r")))]
4010 "sdiv%?\t%0, %1, %2"
4011 [(set_attr "predicable" "yes")
4012 (set_attr "insn" "sdiv")]
4015 (define_insn "udivsi3"
4016 [(set (match_operand:SI 0 "s_register_operand" "=r")
4017 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4018 (match_operand:SI 2 "s_register_operand" "r")))]
4020 "udiv%?\t%0, %1, %2"
4021 [(set_attr "predicable" "yes")
4022 (set_attr "insn" "udiv")]
4026 ;; Unary arithmetic insns
4028 (define_expand "negdi2"
4030 [(set (match_operand:DI 0 "s_register_operand" "")
4031 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4032 (clobber (reg:CC CC_REGNUM))])]
4037 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4038 ;; The first alternative allows the common case of a *full* overlap.
4039 (define_insn "*arm_negdi2"
4040 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4041 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4042 (clobber (reg:CC CC_REGNUM))]
4044 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4045 [(set_attr "conds" "clob")
4046 (set_attr "length" "8")]
4049 (define_insn "*thumb1_negdi2"
4050 [(set (match_operand:DI 0 "register_operand" "=&l")
4051 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4052 (clobber (reg:CC CC_REGNUM))]
4054 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4055 [(set_attr "length" "6")]
4058 (define_expand "negsi2"
4059 [(set (match_operand:SI 0 "s_register_operand" "")
4060 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4065 (define_insn "*arm_negsi2"
4066 [(set (match_operand:SI 0 "s_register_operand" "=r")
4067 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4069 "rsb%?\\t%0, %1, #0"
4070 [(set_attr "predicable" "yes")]
4073 (define_insn "*thumb1_negsi2"
4074 [(set (match_operand:SI 0 "register_operand" "=l")
4075 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4078 [(set_attr "length" "2")]
4081 (define_expand "negsf2"
4082 [(set (match_operand:SF 0 "s_register_operand" "")
4083 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4084 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4088 (define_expand "negdf2"
4089 [(set (match_operand:DF 0 "s_register_operand" "")
4090 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4091 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4094 ;; abssi2 doesn't really clobber the condition codes if a different register
4095 ;; is being set. To keep things simple, assume during rtl manipulations that
4096 ;; it does, but tell the final scan operator the truth. Similarly for
4099 (define_expand "abssi2"
4101 [(set (match_operand:SI 0 "s_register_operand" "")
4102 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4103 (clobber (match_dup 2))])]
4107 operands[2] = gen_rtx_SCRATCH (SImode);
4109 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4112 (define_insn "*arm_abssi2"
4113 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4114 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4115 (clobber (reg:CC CC_REGNUM))]
4118 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4119 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4120 [(set_attr "conds" "clob,*")
4121 (set_attr "shift" "1")
4122 ;; predicable can't be set based on the variant, so left as no
4123 (set_attr "length" "8")]
4126 (define_insn_and_split "*thumb1_abssi2"
4127 [(set (match_operand:SI 0 "s_register_operand" "=l")
4128 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4129 (clobber (match_scratch:SI 2 "=&l"))]
4132 "TARGET_THUMB1 && reload_completed"
4133 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4134 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4135 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4137 [(set_attr "length" "6")]
4140 (define_insn "*arm_neg_abssi2"
4141 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4142 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4143 (clobber (reg:CC CC_REGNUM))]
4146 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4147 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4148 [(set_attr "conds" "clob,*")
4149 (set_attr "shift" "1")
4150 ;; predicable can't be set based on the variant, so left as no
4151 (set_attr "length" "8")]
4154 (define_insn_and_split "*thumb1_neg_abssi2"
4155 [(set (match_operand:SI 0 "s_register_operand" "=l")
4156 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4157 (clobber (match_scratch:SI 2 "=&l"))]
4160 "TARGET_THUMB1 && reload_completed"
4161 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4162 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4163 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4165 [(set_attr "length" "6")]
4168 (define_expand "abssf2"
4169 [(set (match_operand:SF 0 "s_register_operand" "")
4170 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4171 "TARGET_32BIT && TARGET_HARD_FLOAT"
4174 (define_expand "absdf2"
4175 [(set (match_operand:DF 0 "s_register_operand" "")
4176 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4177 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4180 (define_expand "sqrtsf2"
4181 [(set (match_operand:SF 0 "s_register_operand" "")
4182 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4183 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4186 (define_expand "sqrtdf2"
4187 [(set (match_operand:DF 0 "s_register_operand" "")
4188 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4189 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4192 (define_insn_and_split "one_cmpldi2"
4193 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
4194 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
4197 "TARGET_32BIT && reload_completed"
4198 [(set (match_dup 0) (not:SI (match_dup 1)))
4199 (set (match_dup 2) (not:SI (match_dup 3)))]
4202 operands[2] = gen_highpart (SImode, operands[0]);
4203 operands[0] = gen_lowpart (SImode, operands[0]);
4204 operands[3] = gen_highpart (SImode, operands[1]);
4205 operands[1] = gen_lowpart (SImode, operands[1]);
4207 [(set_attr "length" "8")
4208 (set_attr "predicable" "yes")]
4211 (define_expand "one_cmplsi2"
4212 [(set (match_operand:SI 0 "s_register_operand" "")
4213 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4218 (define_insn "*arm_one_cmplsi2"
4219 [(set (match_operand:SI 0 "s_register_operand" "=r")
4220 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4223 [(set_attr "predicable" "yes")
4224 (set_attr "insn" "mvn")]
4227 (define_insn "*thumb1_one_cmplsi2"
4228 [(set (match_operand:SI 0 "register_operand" "=l")
4229 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4232 [(set_attr "length" "2")
4233 (set_attr "insn" "mvn")]
4236 (define_insn "*notsi_compare0"
4237 [(set (reg:CC_NOOV CC_REGNUM)
4238 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4240 (set (match_operand:SI 0 "s_register_operand" "=r")
4241 (not:SI (match_dup 1)))]
4244 [(set_attr "conds" "set")
4245 (set_attr "insn" "mvn")]
4248 (define_insn "*notsi_compare0_scratch"
4249 [(set (reg:CC_NOOV CC_REGNUM)
4250 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4252 (clobber (match_scratch:SI 0 "=r"))]
4255 [(set_attr "conds" "set")
4256 (set_attr "insn" "mvn")]
4259 ;; Fixed <--> Floating conversion insns
4261 (define_expand "floatsihf2"
4262 [(set (match_operand:HF 0 "general_operand" "")
4263 (float:HF (match_operand:SI 1 "general_operand" "")))]
4267 rtx op1 = gen_reg_rtx (SFmode);
4268 expand_float (op1, operands[1], 0);
4269 op1 = convert_to_mode (HFmode, op1, 0);
4270 emit_move_insn (operands[0], op1);
4275 (define_expand "floatdihf2"
4276 [(set (match_operand:HF 0 "general_operand" "")
4277 (float:HF (match_operand:DI 1 "general_operand" "")))]
4281 rtx op1 = gen_reg_rtx (SFmode);
4282 expand_float (op1, operands[1], 0);
4283 op1 = convert_to_mode (HFmode, op1, 0);
4284 emit_move_insn (operands[0], op1);
4289 (define_expand "floatsisf2"
4290 [(set (match_operand:SF 0 "s_register_operand" "")
4291 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4292 "TARGET_32BIT && TARGET_HARD_FLOAT"
4294 if (TARGET_MAVERICK)
4296 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4301 (define_expand "floatsidf2"
4302 [(set (match_operand:DF 0 "s_register_operand" "")
4303 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4304 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4306 if (TARGET_MAVERICK)
4308 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4313 (define_expand "fix_trunchfsi2"
4314 [(set (match_operand:SI 0 "general_operand" "")
4315 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4319 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4320 expand_fix (operands[0], op1, 0);
4325 (define_expand "fix_trunchfdi2"
4326 [(set (match_operand:DI 0 "general_operand" "")
4327 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4331 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4332 expand_fix (operands[0], op1, 0);
4337 (define_expand "fix_truncsfsi2"
4338 [(set (match_operand:SI 0 "s_register_operand" "")
4339 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4340 "TARGET_32BIT && TARGET_HARD_FLOAT"
4342 if (TARGET_MAVERICK)
4344 if (!cirrus_fp_register (operands[0], SImode))
4345 operands[0] = force_reg (SImode, operands[0]);
4346 if (!cirrus_fp_register (operands[1], SFmode))
4347 operands[1] = force_reg (SFmode, operands[0]);
4348 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4353 (define_expand "fix_truncdfsi2"
4354 [(set (match_operand:SI 0 "s_register_operand" "")
4355 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4356 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4358 if (TARGET_MAVERICK)
4360 if (!cirrus_fp_register (operands[1], DFmode))
4361 operands[1] = force_reg (DFmode, operands[0]);
4362 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4369 (define_expand "truncdfsf2"
4370 [(set (match_operand:SF 0 "s_register_operand" "")
4372 (match_operand:DF 1 "s_register_operand" "")))]
4373 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4377 /* DFmode -> HFmode conversions have to go through SFmode. */
4378 (define_expand "truncdfhf2"
4379 [(set (match_operand:HF 0 "general_operand" "")
4381 (match_operand:DF 1 "general_operand" "")))]
4386 op1 = convert_to_mode (SFmode, operands[1], 0);
4387 op1 = convert_to_mode (HFmode, op1, 0);
4388 emit_move_insn (operands[0], op1);
4393 ;; Zero and sign extension instructions.
4395 (define_insn "zero_extend<mode>di2"
4396 [(set (match_operand:DI 0 "s_register_operand" "=r")
4397 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4398 "<qhs_zextenddi_cstr>")))]
4399 "TARGET_32BIT <qhs_zextenddi_cond>"
4401 [(set_attr "length" "8")
4402 (set_attr "ce_count" "2")
4403 (set_attr "predicable" "yes")]
4406 (define_insn "extend<mode>di2"
4407 [(set (match_operand:DI 0 "s_register_operand" "=r")
4408 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4409 "<qhs_extenddi_cstr>")))]
4410 "TARGET_32BIT <qhs_sextenddi_cond>"
4412 [(set_attr "length" "8")
4413 (set_attr "ce_count" "2")
4414 (set_attr "shift" "1")
4415 (set_attr "predicable" "yes")]
4418 ;; Splits for all extensions to DImode
4420 [(set (match_operand:DI 0 "s_register_operand" "")
4421 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4423 [(set (match_dup 0) (match_dup 1))]
4425 rtx lo_part = gen_lowpart (SImode, operands[0]);
4426 enum machine_mode src_mode = GET_MODE (operands[1]);
4428 if (REG_P (operands[0])
4429 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4430 emit_clobber (operands[0]);
4431 if (!REG_P (lo_part) || src_mode != SImode
4432 || !rtx_equal_p (lo_part, operands[1]))
4434 if (src_mode == SImode)
4435 emit_move_insn (lo_part, operands[1]);
4437 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4438 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4439 operands[1] = lo_part;
4441 operands[0] = gen_highpart (SImode, operands[0]);
4442 operands[1] = const0_rtx;
4446 [(set (match_operand:DI 0 "s_register_operand" "")
4447 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4449 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4451 rtx lo_part = gen_lowpart (SImode, operands[0]);
4452 enum machine_mode src_mode = GET_MODE (operands[1]);
4454 if (REG_P (operands[0])
4455 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4456 emit_clobber (operands[0]);
4458 if (!REG_P (lo_part) || src_mode != SImode
4459 || !rtx_equal_p (lo_part, operands[1]))
4461 if (src_mode == SImode)
4462 emit_move_insn (lo_part, operands[1]);
4464 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4465 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4466 operands[1] = lo_part;
4468 operands[0] = gen_highpart (SImode, operands[0]);
4471 (define_expand "zero_extendhisi2"
4472 [(set (match_operand:SI 0 "s_register_operand" "")
4473 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4476 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4478 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4481 if (!arm_arch6 && !MEM_P (operands[1]))
4483 rtx t = gen_lowpart (SImode, operands[1]);
4484 rtx tmp = gen_reg_rtx (SImode);
4485 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4486 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4492 [(set (match_operand:SI 0 "s_register_operand" "")
4493 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4494 "!TARGET_THUMB2 && !arm_arch6"
4495 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4496 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4498 operands[2] = gen_lowpart (SImode, operands[1]);
4501 (define_insn "*thumb1_zero_extendhisi2"
4502 [(set (match_operand:SI 0 "register_operand" "=l,l")
4503 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4508 if (which_alternative == 0 && arm_arch6)
4509 return "uxth\t%0, %1";
4510 if (which_alternative == 0)
4513 mem = XEXP (operands[1], 0);
4515 if (GET_CODE (mem) == CONST)
4516 mem = XEXP (mem, 0);
4518 if (GET_CODE (mem) == PLUS)
4520 rtx a = XEXP (mem, 0);
4522 /* This can happen due to bugs in reload. */
4523 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4526 ops[0] = operands[0];
4529 output_asm_insn ("mov\t%0, %1", ops);
4531 XEXP (mem, 0) = operands[0];
4535 return "ldrh\t%0, %1";
4537 [(set_attr_alternative "length"
4538 [(if_then_else (eq_attr "is_arch6" "yes")
4539 (const_int 2) (const_int 4))
4541 (set_attr "type" "alu_shift,load_byte")]
4544 (define_insn "*arm_zero_extendhisi2"
4545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4546 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4547 "TARGET_ARM && arm_arch4 && !arm_arch6"
4551 [(set_attr "type" "alu_shift,load_byte")
4552 (set_attr "predicable" "yes")]
4555 (define_insn "*arm_zero_extendhisi2_v6"
4556 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4557 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4558 "TARGET_ARM && arm_arch6"
4562 [(set_attr "type" "alu_shift,load_byte")
4563 (set_attr "predicable" "yes")]
4566 (define_insn "*arm_zero_extendhisi2addsi"
4567 [(set (match_operand:SI 0 "s_register_operand" "=r")
4568 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4569 (match_operand:SI 2 "s_register_operand" "r")))]
4571 "uxtah%?\\t%0, %2, %1"
4572 [(set_attr "type" "alu_shift")
4573 (set_attr "predicable" "yes")]
4576 (define_expand "zero_extendqisi2"
4577 [(set (match_operand:SI 0 "s_register_operand" "")
4578 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4581 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4583 emit_insn (gen_andsi3 (operands[0],
4584 gen_lowpart (SImode, operands[1]),
4588 if (!arm_arch6 && !MEM_P (operands[1]))
4590 rtx t = gen_lowpart (SImode, operands[1]);
4591 rtx tmp = gen_reg_rtx (SImode);
4592 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4593 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4599 [(set (match_operand:SI 0 "s_register_operand" "")
4600 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4602 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4603 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4605 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4608 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4613 (define_insn "*thumb1_zero_extendqisi2"
4614 [(set (match_operand:SI 0 "register_operand" "=l,l")
4615 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4616 "TARGET_THUMB1 && !arm_arch6"
4620 [(set_attr "length" "4,2")
4621 (set_attr "type" "alu_shift,load_byte")
4622 (set_attr "pool_range" "*,32")]
4625 (define_insn "*thumb1_zero_extendqisi2_v6"
4626 [(set (match_operand:SI 0 "register_operand" "=l,l")
4627 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4628 "TARGET_THUMB1 && arm_arch6"
4632 [(set_attr "length" "2")
4633 (set_attr "type" "alu_shift,load_byte")]
4636 (define_insn "*arm_zero_extendqisi2"
4637 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4638 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4639 "TARGET_ARM && !arm_arch6"
4642 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4643 [(set_attr "length" "8,4")
4644 (set_attr "type" "alu_shift,load_byte")
4645 (set_attr "predicable" "yes")]
4648 (define_insn "*arm_zero_extendqisi2_v6"
4649 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4650 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4651 "TARGET_ARM && arm_arch6"
4654 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4655 [(set_attr "type" "alu_shift,load_byte")
4656 (set_attr "predicable" "yes")]
4659 (define_insn "*arm_zero_extendqisi2addsi"
4660 [(set (match_operand:SI 0 "s_register_operand" "=r")
4661 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4662 (match_operand:SI 2 "s_register_operand" "r")))]
4664 "uxtab%?\\t%0, %2, %1"
4665 [(set_attr "predicable" "yes")
4666 (set_attr "insn" "xtab")
4667 (set_attr "type" "alu_shift")]
4671 [(set (match_operand:SI 0 "s_register_operand" "")
4672 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4673 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4674 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4675 [(set (match_dup 2) (match_dup 1))
4676 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4681 [(set (match_operand:SI 0 "s_register_operand" "")
4682 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4683 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4684 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4685 [(set (match_dup 2) (match_dup 1))
4686 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4692 [(set (match_operand:SI 0 "s_register_operand" "")
4693 (ior_xor:SI (and:SI (ashift:SI
4694 (match_operand:SI 1 "s_register_operand" "")
4695 (match_operand:SI 2 "const_int_operand" ""))
4696 (match_operand:SI 3 "const_int_operand" ""))
4698 (match_operator 5 "subreg_lowpart_operator"
4699 [(match_operand:SI 4 "s_register_operand" "")]))))]
4701 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4702 == (GET_MODE_MASK (GET_MODE (operands[5]))
4703 & (GET_MODE_MASK (GET_MODE (operands[5]))
4704 << (INTVAL (operands[2])))))"
4705 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4707 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4708 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4711 (define_insn "*compareqi_eq0"
4712 [(set (reg:CC_Z CC_REGNUM)
4713 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4717 [(set_attr "conds" "set")]
4720 (define_expand "extendhisi2"
4721 [(set (match_operand:SI 0 "s_register_operand" "")
4722 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4727 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4730 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4732 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4736 if (!arm_arch6 && !MEM_P (operands[1]))
4738 rtx t = gen_lowpart (SImode, operands[1]);
4739 rtx tmp = gen_reg_rtx (SImode);
4740 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4741 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4748 [(set (match_operand:SI 0 "register_operand" "")
4749 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4750 (clobber (match_scratch:SI 2 ""))])]
4752 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4753 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4755 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4758 ;; We used to have an early-clobber on the scratch register here.
4759 ;; However, there's a bug somewhere in reload which means that this
4760 ;; can be partially ignored during spill allocation if the memory
4761 ;; address also needs reloading; this causes us to die later on when
4762 ;; we try to verify the operands. Fortunately, we don't really need
4763 ;; the early-clobber: we can always use operand 0 if operand 2
4764 ;; overlaps the address.
4765 (define_insn "thumb1_extendhisi2"
4766 [(set (match_operand:SI 0 "register_operand" "=l,l")
4767 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4768 (clobber (match_scratch:SI 2 "=X,l"))]
4775 if (which_alternative == 0 && !arm_arch6)
4777 if (which_alternative == 0)
4778 return \"sxth\\t%0, %1\";
4780 mem = XEXP (operands[1], 0);
4782 /* This code used to try to use 'V', and fix the address only if it was
4783 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4784 range of QImode offsets, and offsettable_address_p does a QImode
4787 if (GET_CODE (mem) == CONST)
4788 mem = XEXP (mem, 0);
4790 if (GET_CODE (mem) == LABEL_REF)
4791 return \"ldr\\t%0, %1\";
4793 if (GET_CODE (mem) == PLUS)
4795 rtx a = XEXP (mem, 0);
4796 rtx b = XEXP (mem, 1);
4798 if (GET_CODE (a) == LABEL_REF
4799 && GET_CODE (b) == CONST_INT)
4800 return \"ldr\\t%0, %1\";
4802 if (GET_CODE (b) == REG)
4803 return \"ldrsh\\t%0, %1\";
4811 ops[2] = const0_rtx;
4814 gcc_assert (GET_CODE (ops[1]) == REG);
4816 ops[0] = operands[0];
4817 if (reg_mentioned_p (operands[2], ops[1]))
4820 ops[3] = operands[2];
4821 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4824 [(set_attr_alternative "length"
4825 [(if_then_else (eq_attr "is_arch6" "yes")
4826 (const_int 2) (const_int 4))
4828 (set_attr "type" "alu_shift,load_byte")
4829 (set_attr "pool_range" "*,1020")]
4832 ;; This pattern will only be used when ldsh is not available
4833 (define_expand "extendhisi2_mem"
4834 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4836 (zero_extend:SI (match_dup 7)))
4837 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4838 (set (match_operand:SI 0 "" "")
4839 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4844 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4846 mem1 = change_address (operands[1], QImode, addr);
4847 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4848 operands[0] = gen_lowpart (SImode, operands[0]);
4850 operands[2] = gen_reg_rtx (SImode);
4851 operands[3] = gen_reg_rtx (SImode);
4852 operands[6] = gen_reg_rtx (SImode);
4855 if (BYTES_BIG_ENDIAN)
4857 operands[4] = operands[2];
4858 operands[5] = operands[3];
4862 operands[4] = operands[3];
4863 operands[5] = operands[2];
4869 [(set (match_operand:SI 0 "register_operand" "")
4870 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4872 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4873 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4875 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4878 (define_insn "*arm_extendhisi2"
4879 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4880 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4881 "TARGET_ARM && arm_arch4 && !arm_arch6"
4885 [(set_attr "length" "8,4")
4886 (set_attr "type" "alu_shift,load_byte")
4887 (set_attr "predicable" "yes")
4888 (set_attr "pool_range" "*,256")
4889 (set_attr "neg_pool_range" "*,244")]
4892 ;; ??? Check Thumb-2 pool range
4893 (define_insn "*arm_extendhisi2_v6"
4894 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4895 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4896 "TARGET_32BIT && arm_arch6"
4900 [(set_attr "type" "alu_shift,load_byte")
4901 (set_attr "predicable" "yes")
4902 (set_attr "pool_range" "*,256")
4903 (set_attr "neg_pool_range" "*,244")]
4906 (define_insn "*arm_extendhisi2addsi"
4907 [(set (match_operand:SI 0 "s_register_operand" "=r")
4908 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4909 (match_operand:SI 2 "s_register_operand" "r")))]
4911 "sxtah%?\\t%0, %2, %1"
4914 (define_expand "extendqihi2"
4916 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4918 (set (match_operand:HI 0 "s_register_operand" "")
4919 (ashiftrt:SI (match_dup 2)
4924 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4926 emit_insn (gen_rtx_SET (VOIDmode,
4928 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4931 if (!s_register_operand (operands[1], QImode))
4932 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4933 operands[0] = gen_lowpart (SImode, operands[0]);
4934 operands[1] = gen_lowpart (SImode, operands[1]);
4935 operands[2] = gen_reg_rtx (SImode);
4939 (define_insn "*arm_extendqihi_insn"
4940 [(set (match_operand:HI 0 "s_register_operand" "=r")
4941 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4942 "TARGET_ARM && arm_arch4"
4943 "ldr%(sb%)\\t%0, %1"
4944 [(set_attr "type" "load_byte")
4945 (set_attr "predicable" "yes")
4946 (set_attr "pool_range" "256")
4947 (set_attr "neg_pool_range" "244")]
4950 (define_expand "extendqisi2"
4951 [(set (match_operand:SI 0 "s_register_operand" "")
4952 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4955 if (!arm_arch4 && MEM_P (operands[1]))
4956 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4958 if (!arm_arch6 && !MEM_P (operands[1]))
4960 rtx t = gen_lowpart (SImode, operands[1]);
4961 rtx tmp = gen_reg_rtx (SImode);
4962 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4963 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4969 [(set (match_operand:SI 0 "register_operand" "")
4970 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4972 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4973 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4975 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4978 (define_insn "*arm_extendqisi"
4979 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4980 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4981 "TARGET_ARM && arm_arch4 && !arm_arch6"
4985 [(set_attr "length" "8,4")
4986 (set_attr "type" "alu_shift,load_byte")
4987 (set_attr "predicable" "yes")
4988 (set_attr "pool_range" "*,256")
4989 (set_attr "neg_pool_range" "*,244")]
4992 (define_insn "*arm_extendqisi_v6"
4993 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4995 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4996 "TARGET_ARM && arm_arch6"
5000 [(set_attr "type" "alu_shift,load_byte")
5001 (set_attr "predicable" "yes")
5002 (set_attr "pool_range" "*,256")
5003 (set_attr "neg_pool_range" "*,244")]
5006 (define_insn "*arm_extendqisi2addsi"
5007 [(set (match_operand:SI 0 "s_register_operand" "=r")
5008 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5009 (match_operand:SI 2 "s_register_operand" "r")))]
5011 "sxtab%?\\t%0, %2, %1"
5012 [(set_attr "type" "alu_shift")
5013 (set_attr "insn" "xtab")
5014 (set_attr "predicable" "yes")]
5018 [(set (match_operand:SI 0 "register_operand" "")
5019 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5020 "TARGET_THUMB1 && reload_completed"
5021 [(set (match_dup 0) (match_dup 2))
5022 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5024 rtx addr = XEXP (operands[1], 0);
5026 if (GET_CODE (addr) == CONST)
5027 addr = XEXP (addr, 0);
5029 if (GET_CODE (addr) == PLUS
5030 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5031 /* No split necessary. */
5034 if (GET_CODE (addr) == PLUS
5035 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5038 if (reg_overlap_mentioned_p (operands[0], addr))
5040 rtx t = gen_lowpart (QImode, operands[0]);
5041 emit_move_insn (t, operands[1]);
5042 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5048 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5049 operands[2] = const0_rtx;
5051 else if (GET_CODE (addr) != PLUS)
5053 else if (REG_P (XEXP (addr, 0)))
5055 operands[2] = XEXP (addr, 1);
5056 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5060 operands[2] = XEXP (addr, 0);
5061 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5064 operands[3] = change_address (operands[1], QImode, addr);
5068 [(set (match_operand:SI 0 "register_operand" "")
5069 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5070 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5071 (set (match_operand:SI 3 "register_operand" "")
5072 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5074 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5075 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5076 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5077 && (peep2_reg_dead_p (3, operands[0])
5078 || rtx_equal_p (operands[0], operands[3]))
5079 && (peep2_reg_dead_p (3, operands[2])
5080 || rtx_equal_p (operands[2], operands[3]))"
5081 [(set (match_dup 2) (match_dup 1))
5082 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5084 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5085 operands[4] = change_address (operands[4], QImode, addr);
5088 (define_insn "thumb1_extendqisi2"
5089 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5090 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5095 if (which_alternative == 0 && arm_arch6)
5096 return "sxtb\\t%0, %1";
5097 if (which_alternative == 0)
5100 addr = XEXP (operands[1], 0);
5101 if (GET_CODE (addr) == PLUS
5102 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5103 return "ldrsb\\t%0, %1";
5107 [(set_attr_alternative "length"
5108 [(if_then_else (eq_attr "is_arch6" "yes")
5109 (const_int 2) (const_int 4))
5111 (if_then_else (eq_attr "is_arch6" "yes")
5112 (const_int 4) (const_int 6))])
5113 (set_attr "type" "alu_shift,load_byte,load_byte")]
5116 (define_expand "extendsfdf2"
5117 [(set (match_operand:DF 0 "s_register_operand" "")
5118 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5119 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5123 /* HFmode -> DFmode conversions have to go through SFmode. */
5124 (define_expand "extendhfdf2"
5125 [(set (match_operand:DF 0 "general_operand" "")
5126 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5131 op1 = convert_to_mode (SFmode, operands[1], 0);
5132 op1 = convert_to_mode (DFmode, op1, 0);
5133 emit_insn (gen_movdf (operands[0], op1));
5138 ;; Move insns (including loads and stores)
5140 ;; XXX Just some ideas about movti.
5141 ;; I don't think these are a good idea on the arm, there just aren't enough
5143 ;;(define_expand "loadti"
5144 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5145 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5148 ;;(define_expand "storeti"
5149 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5150 ;; (match_operand:TI 1 "s_register_operand" ""))]
5153 ;;(define_expand "movti"
5154 ;; [(set (match_operand:TI 0 "general_operand" "")
5155 ;; (match_operand:TI 1 "general_operand" ""))]
5161 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
5162 ;; operands[1] = copy_to_reg (operands[1]);
5163 ;; if (GET_CODE (operands[0]) == MEM)
5164 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5165 ;; else if (GET_CODE (operands[1]) == MEM)
5166 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5170 ;; emit_insn (insn);
5174 ;; Recognize garbage generated above.
5177 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5178 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5182 ;; register mem = (which_alternative < 3);
5183 ;; register const char *template;
5185 ;; operands[mem] = XEXP (operands[mem], 0);
5186 ;; switch (which_alternative)
5188 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5189 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5190 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5191 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5192 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5193 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5195 ;; output_asm_insn (template, operands);
5199 (define_expand "movdi"
5200 [(set (match_operand:DI 0 "general_operand" "")
5201 (match_operand:DI 1 "general_operand" ""))]
5204 if (can_create_pseudo_p ())
5206 if (GET_CODE (operands[0]) != REG)
5207 operands[1] = force_reg (DImode, operands[1]);
5212 (define_insn "*arm_movdi"
5213 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5214 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5216 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
5218 && ( register_operand (operands[0], DImode)
5219 || register_operand (operands[1], DImode))"
5221 switch (which_alternative)
5228 return output_move_double (operands, true, NULL);
5231 [(set_attr "length" "8,12,16,8,8")
5232 (set_attr "type" "*,*,*,load2,store2")
5233 (set_attr "arm_pool_range" "*,*,*,1020,*")
5234 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5235 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5236 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5240 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5241 (match_operand:ANY64 1 "const_double_operand" ""))]
5244 && (arm_const_double_inline_cost (operands[1])
5245 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5248 arm_split_constant (SET, SImode, curr_insn,
5249 INTVAL (gen_lowpart (SImode, operands[1])),
5250 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5251 arm_split_constant (SET, SImode, curr_insn,
5252 INTVAL (gen_highpart_mode (SImode,
5253 GET_MODE (operands[0]),
5255 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5260 ; If optimizing for size, or if we have load delay slots, then
5261 ; we want to split the constant into two separate operations.
5262 ; In both cases this may split a trivial part into a single data op
5263 ; leaving a single complex constant to load. We can also get longer
5264 ; offsets in a LDR which means we get better chances of sharing the pool
5265 ; entries. Finally, we can normally do a better job of scheduling
5266 ; LDR instructions than we can with LDM.
5267 ; This pattern will only match if the one above did not.
5269 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5270 (match_operand:ANY64 1 "const_double_operand" ""))]
5271 "TARGET_ARM && reload_completed
5272 && arm_const_double_by_parts (operands[1])"
5273 [(set (match_dup 0) (match_dup 1))
5274 (set (match_dup 2) (match_dup 3))]
5276 operands[2] = gen_highpart (SImode, operands[0]);
5277 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5279 operands[0] = gen_lowpart (SImode, operands[0]);
5280 operands[1] = gen_lowpart (SImode, operands[1]);
5285 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5286 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5287 "TARGET_EITHER && reload_completed"
5288 [(set (match_dup 0) (match_dup 1))
5289 (set (match_dup 2) (match_dup 3))]
5291 operands[2] = gen_highpart (SImode, operands[0]);
5292 operands[3] = gen_highpart (SImode, operands[1]);
5293 operands[0] = gen_lowpart (SImode, operands[0]);
5294 operands[1] = gen_lowpart (SImode, operands[1]);
5296 /* Handle a partial overlap. */
5297 if (rtx_equal_p (operands[0], operands[3]))
5299 rtx tmp0 = operands[0];
5300 rtx tmp1 = operands[1];
5302 operands[0] = operands[2];
5303 operands[1] = operands[3];
5310 ;; We can't actually do base+index doubleword loads if the index and
5311 ;; destination overlap. Split here so that we at least have chance to
5314 [(set (match_operand:DI 0 "s_register_operand" "")
5315 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5316 (match_operand:SI 2 "s_register_operand" ""))))]
5318 && reg_overlap_mentioned_p (operands[0], operands[1])
5319 && reg_overlap_mentioned_p (operands[0], operands[2])"
5321 (plus:SI (match_dup 1)
5324 (mem:DI (match_dup 4)))]
5326 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5330 ;;; ??? This should have alternatives for constants.
5331 ;;; ??? This was originally identical to the movdf_insn pattern.
5332 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5333 ;;; thumb_reorg with a memory reference.
5334 (define_insn "*thumb1_movdi_insn"
5335 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5336 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5338 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5339 && ( register_operand (operands[0], DImode)
5340 || register_operand (operands[1], DImode))"
5343 switch (which_alternative)
5347 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5348 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5349 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5351 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5353 operands[1] = GEN_INT (- INTVAL (operands[1]));
5354 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5356 return \"ldmia\\t%1, {%0, %H0}\";
5358 return \"stmia\\t%0, {%1, %H1}\";
5360 return thumb_load_double_from_address (operands);
5362 operands[2] = gen_rtx_MEM (SImode,
5363 plus_constant (XEXP (operands[0], 0), 4));
5364 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5367 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5368 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5369 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5372 [(set_attr "length" "4,4,6,2,2,6,4,4")
5373 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5374 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5375 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5378 (define_expand "movsi"
5379 [(set (match_operand:SI 0 "general_operand" "")
5380 (match_operand:SI 1 "general_operand" ""))]
5384 rtx base, offset, tmp;
5388 /* Everything except mem = const or mem = mem can be done easily. */
5389 if (GET_CODE (operands[0]) == MEM)
5390 operands[1] = force_reg (SImode, operands[1]);
5391 if (arm_general_register_operand (operands[0], SImode)
5392 && GET_CODE (operands[1]) == CONST_INT
5393 && !(const_ok_for_arm (INTVAL (operands[1]))
5394 || const_ok_for_arm (~INTVAL (operands[1]))))
5396 arm_split_constant (SET, SImode, NULL_RTX,
5397 INTVAL (operands[1]), operands[0], NULL_RTX,
5398 optimize && can_create_pseudo_p ());
5402 if (TARGET_USE_MOVT && !target_word_relocations
5403 && GET_CODE (operands[1]) == SYMBOL_REF
5404 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5406 arm_emit_movpair (operands[0], operands[1]);
5410 else /* TARGET_THUMB1... */
5412 if (can_create_pseudo_p ())
5414 if (GET_CODE (operands[0]) != REG)
5415 operands[1] = force_reg (SImode, operands[1]);
5419 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5421 split_const (operands[1], &base, &offset);
5422 if (GET_CODE (base) == SYMBOL_REF
5423 && !offset_within_block_p (base, INTVAL (offset)))
5425 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5426 emit_move_insn (tmp, base);
5427 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5432 /* Recognize the case where operand[1] is a reference to thread-local
5433 data and load its address to a register. */
5434 if (arm_tls_referenced_p (operands[1]))
5436 rtx tmp = operands[1];
5439 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5441 addend = XEXP (XEXP (tmp, 0), 1);
5442 tmp = XEXP (XEXP (tmp, 0), 0);
5445 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5446 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5448 tmp = legitimize_tls_address (tmp,
5449 !can_create_pseudo_p () ? operands[0] : 0);
5452 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5453 tmp = force_operand (tmp, operands[0]);
5458 && (CONSTANT_P (operands[1])
5459 || symbol_mentioned_p (operands[1])
5460 || label_mentioned_p (operands[1])))
5461 operands[1] = legitimize_pic_address (operands[1], SImode,
5462 (!can_create_pseudo_p ()
5469 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5470 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5471 ;; so this does not matter.
5472 (define_insn "*arm_movt"
5473 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5474 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5475 (match_operand:SI 2 "general_operand" "i")))]
5477 "movt%?\t%0, #:upper16:%c2"
5478 [(set_attr "predicable" "yes")
5479 (set_attr "length" "4")]
5482 (define_insn "*arm_movsi_insn"
5483 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5484 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5485 "TARGET_ARM && ! TARGET_IWMMXT
5486 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5487 && ( register_operand (operands[0], SImode)
5488 || register_operand (operands[1], SImode))"
5496 [(set_attr "type" "*,*,*,*,load1,store1")
5497 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5498 (set_attr "predicable" "yes")
5499 (set_attr "pool_range" "*,*,*,*,4096,*")
5500 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5504 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5505 (match_operand:SI 1 "const_int_operand" ""))]
5507 && (!(const_ok_for_arm (INTVAL (operands[1]))
5508 || const_ok_for_arm (~INTVAL (operands[1]))))"
5509 [(clobber (const_int 0))]
5511 arm_split_constant (SET, SImode, NULL_RTX,
5512 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5517 (define_insn "*thumb1_movsi_insn"
5518 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5519 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5521 && ( register_operand (operands[0], SImode)
5522 || register_operand (operands[1], SImode))"
5533 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5534 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5535 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5536 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5539 [(set (match_operand:SI 0 "register_operand" "")
5540 (match_operand:SI 1 "const_int_operand" ""))]
5541 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5542 [(set (match_dup 2) (match_dup 1))
5543 (set (match_dup 0) (neg:SI (match_dup 2)))]
5546 operands[1] = GEN_INT (- INTVAL (operands[1]));
5547 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5552 [(set (match_operand:SI 0 "register_operand" "")
5553 (match_operand:SI 1 "const_int_operand" ""))]
5554 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5555 [(set (match_dup 2) (match_dup 1))
5556 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5559 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5560 unsigned HOST_WIDE_INT mask = 0xff;
5563 for (i = 0; i < 25; i++)
5564 if ((val & (mask << i)) == val)
5567 /* Don't split if the shift is zero. */
5571 operands[1] = GEN_INT (val >> i);
5572 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5573 operands[3] = GEN_INT (i);
5577 ;; When generating pic, we need to load the symbol offset into a register.
5578 ;; So that the optimizer does not confuse this with a normal symbol load
5579 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5580 ;; since that is the only type of relocation we can use.
5582 ;; Wrap calculation of the whole PIC address in a single pattern for the
5583 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5584 ;; a PIC address involves two loads from memory, so we want to CSE it
5585 ;; as often as possible.
5586 ;; This pattern will be split into one of the pic_load_addr_* patterns
5587 ;; and a move after GCSE optimizations.
5589 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5590 (define_expand "calculate_pic_address"
5591 [(set (match_operand:SI 0 "register_operand" "")
5592 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5593 (unspec:SI [(match_operand:SI 2 "" "")]
5598 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5600 [(set (match_operand:SI 0 "register_operand" "")
5601 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5602 (unspec:SI [(match_operand:SI 2 "" "")]
5605 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5606 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5607 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5610 ;; The rather odd constraints on the following are to force reload to leave
5611 ;; the insn alone, and to force the minipool generation pass to then move
5612 ;; the GOT symbol to memory.
5614 (define_insn "pic_load_addr_32bit"
5615 [(set (match_operand:SI 0 "s_register_operand" "=r")
5616 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5617 "TARGET_32BIT && flag_pic"
5619 [(set_attr "type" "load1")
5620 (set_attr "pool_range" "4096")
5621 (set (attr "neg_pool_range")
5622 (if_then_else (eq_attr "is_thumb" "no")
5627 (define_insn "pic_load_addr_thumb1"
5628 [(set (match_operand:SI 0 "s_register_operand" "=l")
5629 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5630 "TARGET_THUMB1 && flag_pic"
5632 [(set_attr "type" "load1")
5633 (set (attr "pool_range") (const_int 1024))]
5636 (define_insn "pic_add_dot_plus_four"
5637 [(set (match_operand:SI 0 "register_operand" "=r")
5638 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5640 (match_operand 2 "" "")]
5644 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5645 INTVAL (operands[2]));
5646 return \"add\\t%0, %|pc\";
5648 [(set_attr "length" "2")]
5651 (define_insn "pic_add_dot_plus_eight"
5652 [(set (match_operand:SI 0 "register_operand" "=r")
5653 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5655 (match_operand 2 "" "")]
5659 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5660 INTVAL (operands[2]));
5661 return \"add%?\\t%0, %|pc, %1\";
5663 [(set_attr "predicable" "yes")]
5666 (define_insn "tls_load_dot_plus_eight"
5667 [(set (match_operand:SI 0 "register_operand" "=r")
5668 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5670 (match_operand 2 "" "")]
5674 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5675 INTVAL (operands[2]));
5676 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5678 [(set_attr "predicable" "yes")]
5681 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5682 ;; followed by a load. These sequences can be crunched down to
5683 ;; tls_load_dot_plus_eight by a peephole.
5686 [(set (match_operand:SI 0 "register_operand" "")
5687 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5689 (match_operand 1 "" "")]
5691 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5692 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5694 (mem:SI (unspec:SI [(match_dup 3)
5701 (define_insn "pic_offset_arm"
5702 [(set (match_operand:SI 0 "register_operand" "=r")
5703 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5704 (unspec:SI [(match_operand:SI 2 "" "X")]
5705 UNSPEC_PIC_OFFSET))))]
5706 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5707 "ldr%?\\t%0, [%1,%2]"
5708 [(set_attr "type" "load1")]
5711 (define_expand "builtin_setjmp_receiver"
5712 [(label_ref (match_operand 0 "" ""))]
5716 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5718 if (arm_pic_register != INVALID_REGNUM)
5719 arm_load_pic_register (1UL << 3);
5723 ;; If copying one reg to another we can set the condition codes according to
5724 ;; its value. Such a move is common after a return from subroutine and the
5725 ;; result is being tested against zero.
5727 (define_insn "*movsi_compare0"
5728 [(set (reg:CC CC_REGNUM)
5729 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5731 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5737 [(set_attr "conds" "set")]
5740 ;; Subroutine to store a half word from a register into memory.
5741 ;; Operand 0 is the source register (HImode)
5742 ;; Operand 1 is the destination address in a register (SImode)
5744 ;; In both this routine and the next, we must be careful not to spill
5745 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5746 ;; can generate unrecognizable rtl.
5748 (define_expand "storehi"
5749 [;; store the low byte
5750 (set (match_operand 1 "" "") (match_dup 3))
5751 ;; extract the high byte
5753 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5754 ;; store the high byte
5755 (set (match_dup 4) (match_dup 5))]
5759 rtx op1 = operands[1];
5760 rtx addr = XEXP (op1, 0);
5761 enum rtx_code code = GET_CODE (addr);
5763 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5765 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5767 operands[4] = adjust_address (op1, QImode, 1);
5768 operands[1] = adjust_address (operands[1], QImode, 0);
5769 operands[3] = gen_lowpart (QImode, operands[0]);
5770 operands[0] = gen_lowpart (SImode, operands[0]);
5771 operands[2] = gen_reg_rtx (SImode);
5772 operands[5] = gen_lowpart (QImode, operands[2]);
5776 (define_expand "storehi_bigend"
5777 [(set (match_dup 4) (match_dup 3))
5779 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5780 (set (match_operand 1 "" "") (match_dup 5))]
5784 rtx op1 = operands[1];
5785 rtx addr = XEXP (op1, 0);
5786 enum rtx_code code = GET_CODE (addr);
5788 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5790 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5792 operands[4] = adjust_address (op1, QImode, 1);
5793 operands[1] = adjust_address (operands[1], QImode, 0);
5794 operands[3] = gen_lowpart (QImode, operands[0]);
5795 operands[0] = gen_lowpart (SImode, operands[0]);
5796 operands[2] = gen_reg_rtx (SImode);
5797 operands[5] = gen_lowpart (QImode, operands[2]);
5801 ;; Subroutine to store a half word integer constant into memory.
5802 (define_expand "storeinthi"
5803 [(set (match_operand 0 "" "")
5804 (match_operand 1 "" ""))
5805 (set (match_dup 3) (match_dup 2))]
5809 HOST_WIDE_INT value = INTVAL (operands[1]);
5810 rtx addr = XEXP (operands[0], 0);
5811 rtx op0 = operands[0];
5812 enum rtx_code code = GET_CODE (addr);
5814 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5816 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5818 operands[1] = gen_reg_rtx (SImode);
5819 if (BYTES_BIG_ENDIAN)
5821 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5822 if ((value & 255) == ((value >> 8) & 255))
5823 operands[2] = operands[1];
5826 operands[2] = gen_reg_rtx (SImode);
5827 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5832 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5833 if ((value & 255) == ((value >> 8) & 255))
5834 operands[2] = operands[1];
5837 operands[2] = gen_reg_rtx (SImode);
5838 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5842 operands[3] = adjust_address (op0, QImode, 1);
5843 operands[0] = adjust_address (operands[0], QImode, 0);
5844 operands[2] = gen_lowpart (QImode, operands[2]);
5845 operands[1] = gen_lowpart (QImode, operands[1]);
5849 (define_expand "storehi_single_op"
5850 [(set (match_operand:HI 0 "memory_operand" "")
5851 (match_operand:HI 1 "general_operand" ""))]
5852 "TARGET_32BIT && arm_arch4"
5854 if (!s_register_operand (operands[1], HImode))
5855 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5859 (define_expand "movhi"
5860 [(set (match_operand:HI 0 "general_operand" "")
5861 (match_operand:HI 1 "general_operand" ""))]
5866 if (can_create_pseudo_p ())
5868 if (GET_CODE (operands[0]) == MEM)
5872 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5875 if (GET_CODE (operands[1]) == CONST_INT)
5876 emit_insn (gen_storeinthi (operands[0], operands[1]));
5879 if (GET_CODE (operands[1]) == MEM)
5880 operands[1] = force_reg (HImode, operands[1]);
5881 if (BYTES_BIG_ENDIAN)
5882 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5884 emit_insn (gen_storehi (operands[1], operands[0]));
5888 /* Sign extend a constant, and keep it in an SImode reg. */
5889 else if (GET_CODE (operands[1]) == CONST_INT)
5891 rtx reg = gen_reg_rtx (SImode);
5892 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5894 /* If the constant is already valid, leave it alone. */
5895 if (!const_ok_for_arm (val))
5897 /* If setting all the top bits will make the constant
5898 loadable in a single instruction, then set them.
5899 Otherwise, sign extend the number. */
5901 if (const_ok_for_arm (~(val | ~0xffff)))
5903 else if (val & 0x8000)
5907 emit_insn (gen_movsi (reg, GEN_INT (val)));
5908 operands[1] = gen_lowpart (HImode, reg);
5910 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5911 && GET_CODE (operands[1]) == MEM)
5913 rtx reg = gen_reg_rtx (SImode);
5915 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5916 operands[1] = gen_lowpart (HImode, reg);
5918 else if (!arm_arch4)
5920 if (GET_CODE (operands[1]) == MEM)
5923 rtx offset = const0_rtx;
5924 rtx reg = gen_reg_rtx (SImode);
5926 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5927 || (GET_CODE (base) == PLUS
5928 && (GET_CODE (offset = XEXP (base, 1))
5930 && ((INTVAL(offset) & 1) != 1)
5931 && GET_CODE (base = XEXP (base, 0)) == REG))
5932 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5936 new_rtx = widen_memory_access (operands[1], SImode,
5937 ((INTVAL (offset) & ~3)
5938 - INTVAL (offset)));
5939 emit_insn (gen_movsi (reg, new_rtx));
5940 if (((INTVAL (offset) & 2) != 0)
5941 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5943 rtx reg2 = gen_reg_rtx (SImode);
5945 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5950 emit_insn (gen_movhi_bytes (reg, operands[1]));
5952 operands[1] = gen_lowpart (HImode, reg);
5956 /* Handle loading a large integer during reload. */
5957 else if (GET_CODE (operands[1]) == CONST_INT
5958 && !const_ok_for_arm (INTVAL (operands[1]))
5959 && !const_ok_for_arm (~INTVAL (operands[1])))
5961 /* Writing a constant to memory needs a scratch, which should
5962 be handled with SECONDARY_RELOADs. */
5963 gcc_assert (GET_CODE (operands[0]) == REG);
5965 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5966 emit_insn (gen_movsi (operands[0], operands[1]));
5970 else if (TARGET_THUMB2)
5972 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5973 if (can_create_pseudo_p ())
5975 if (GET_CODE (operands[0]) != REG)
5976 operands[1] = force_reg (HImode, operands[1]);
5977 /* Zero extend a constant, and keep it in an SImode reg. */
5978 else if (GET_CODE (operands[1]) == CONST_INT)
5980 rtx reg = gen_reg_rtx (SImode);
5981 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5983 emit_insn (gen_movsi (reg, GEN_INT (val)));
5984 operands[1] = gen_lowpart (HImode, reg);
5988 else /* TARGET_THUMB1 */
5990 if (can_create_pseudo_p ())
5992 if (GET_CODE (operands[1]) == CONST_INT)
5994 rtx reg = gen_reg_rtx (SImode);
5996 emit_insn (gen_movsi (reg, operands[1]));
5997 operands[1] = gen_lowpart (HImode, reg);
6000 /* ??? We shouldn't really get invalid addresses here, but this can
6001 happen if we are passed a SP (never OK for HImode/QImode) or
6002 virtual register (also rejected as illegitimate for HImode/QImode)
6003 relative address. */
6004 /* ??? This should perhaps be fixed elsewhere, for instance, in
6005 fixup_stack_1, by checking for other kinds of invalid addresses,
6006 e.g. a bare reference to a virtual register. This may confuse the
6007 alpha though, which must handle this case differently. */
6008 if (GET_CODE (operands[0]) == MEM
6009 && !memory_address_p (GET_MODE (operands[0]),
6010 XEXP (operands[0], 0)))
6012 = replace_equiv_address (operands[0],
6013 copy_to_reg (XEXP (operands[0], 0)));
6015 if (GET_CODE (operands[1]) == MEM
6016 && !memory_address_p (GET_MODE (operands[1]),
6017 XEXP (operands[1], 0)))
6019 = replace_equiv_address (operands[1],
6020 copy_to_reg (XEXP (operands[1], 0)));
6022 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6024 rtx reg = gen_reg_rtx (SImode);
6026 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6027 operands[1] = gen_lowpart (HImode, reg);
6030 if (GET_CODE (operands[0]) == MEM)
6031 operands[1] = force_reg (HImode, operands[1]);
6033 else if (GET_CODE (operands[1]) == CONST_INT
6034 && !satisfies_constraint_I (operands[1]))
6036 /* Handle loading a large integer during reload. */
6038 /* Writing a constant to memory needs a scratch, which should
6039 be handled with SECONDARY_RELOADs. */
6040 gcc_assert (GET_CODE (operands[0]) == REG);
6042 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6043 emit_insn (gen_movsi (operands[0], operands[1]));
6050 (define_insn "*thumb1_movhi_insn"
6051 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6052 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6054 && ( register_operand (operands[0], HImode)
6055 || register_operand (operands[1], HImode))"
6057 switch (which_alternative)
6059 case 0: return \"add %0, %1, #0\";
6060 case 2: return \"strh %1, %0\";
6061 case 3: return \"mov %0, %1\";
6062 case 4: return \"mov %0, %1\";
6063 case 5: return \"mov %0, %1\";
6064 default: gcc_unreachable ();
6066 /* The stack pointer can end up being taken as an index register.
6067 Catch this case here and deal with it. */
6068 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6069 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
6070 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6073 ops[0] = operands[0];
6074 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6076 output_asm_insn (\"mov %0, %1\", ops);
6078 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6081 return \"ldrh %0, %1\";
6083 [(set_attr "length" "2,4,2,2,2,2")
6084 (set_attr "type" "*,load1,store1,*,*,*")
6085 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6088 (define_expand "movhi_bytes"
6089 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6091 (zero_extend:SI (match_dup 6)))
6092 (set (match_operand:SI 0 "" "")
6093 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6098 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6100 mem1 = change_address (operands[1], QImode, addr);
6101 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
6102 operands[0] = gen_lowpart (SImode, operands[0]);
6104 operands[2] = gen_reg_rtx (SImode);
6105 operands[3] = gen_reg_rtx (SImode);
6108 if (BYTES_BIG_ENDIAN)
6110 operands[4] = operands[2];
6111 operands[5] = operands[3];
6115 operands[4] = operands[3];
6116 operands[5] = operands[2];
6121 (define_expand "movhi_bigend"
6123 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6126 (ashiftrt:SI (match_dup 2) (const_int 16)))
6127 (set (match_operand:HI 0 "s_register_operand" "")
6131 operands[2] = gen_reg_rtx (SImode);
6132 operands[3] = gen_reg_rtx (SImode);
6133 operands[4] = gen_lowpart (HImode, operands[3]);
6137 ;; Pattern to recognize insn generated default case above
6138 (define_insn "*movhi_insn_arch4"
6139 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6140 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6143 && (register_operand (operands[0], HImode)
6144 || register_operand (operands[1], HImode))"
6146 mov%?\\t%0, %1\\t%@ movhi
6147 mvn%?\\t%0, #%B1\\t%@ movhi
6148 str%(h%)\\t%1, %0\\t%@ movhi
6149 ldr%(h%)\\t%0, %1\\t%@ movhi"
6150 [(set_attr "type" "*,*,store1,load1")
6151 (set_attr "predicable" "yes")
6152 (set_attr "insn" "mov,mvn,*,*")
6153 (set_attr "pool_range" "*,*,*,256")
6154 (set_attr "neg_pool_range" "*,*,*,244")]
6157 (define_insn "*movhi_bytes"
6158 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
6159 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
6162 mov%?\\t%0, %1\\t%@ movhi
6163 mvn%?\\t%0, #%B1\\t%@ movhi"
6164 [(set_attr "predicable" "yes")
6165 (set_attr "insn" "mov,mvn")]
6168 (define_expand "thumb_movhi_clobber"
6169 [(set (match_operand:HI 0 "memory_operand" "")
6170 (match_operand:HI 1 "register_operand" ""))
6171 (clobber (match_operand:DI 2 "register_operand" ""))]
6174 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6175 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6177 emit_insn (gen_movhi (operands[0], operands[1]));
6180 /* XXX Fixme, need to handle other cases here as well. */
6185 ;; We use a DImode scratch because we may occasionally need an additional
6186 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6187 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6188 (define_expand "reload_outhi"
6189 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6190 (match_operand:HI 1 "s_register_operand" "r")
6191 (match_operand:DI 2 "s_register_operand" "=&l")])]
6194 arm_reload_out_hi (operands);
6196 thumb_reload_out_hi (operands);
6201 (define_expand "reload_inhi"
6202 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6203 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6204 (match_operand:DI 2 "s_register_operand" "=&r")])]
6208 arm_reload_in_hi (operands);
6210 thumb_reload_out_hi (operands);
6214 (define_expand "movqi"
6215 [(set (match_operand:QI 0 "general_operand" "")
6216 (match_operand:QI 1 "general_operand" ""))]
6219 /* Everything except mem = const or mem = mem can be done easily */
6221 if (can_create_pseudo_p ())
6223 if (GET_CODE (operands[1]) == CONST_INT)
6225 rtx reg = gen_reg_rtx (SImode);
6227 /* For thumb we want an unsigned immediate, then we are more likely
6228 to be able to use a movs insn. */
6230 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6232 emit_insn (gen_movsi (reg, operands[1]));
6233 operands[1] = gen_lowpart (QImode, reg);
6238 /* ??? We shouldn't really get invalid addresses here, but this can
6239 happen if we are passed a SP (never OK for HImode/QImode) or
6240 virtual register (also rejected as illegitimate for HImode/QImode)
6241 relative address. */
6242 /* ??? This should perhaps be fixed elsewhere, for instance, in
6243 fixup_stack_1, by checking for other kinds of invalid addresses,
6244 e.g. a bare reference to a virtual register. This may confuse the
6245 alpha though, which must handle this case differently. */
6246 if (GET_CODE (operands[0]) == MEM
6247 && !memory_address_p (GET_MODE (operands[0]),
6248 XEXP (operands[0], 0)))
6250 = replace_equiv_address (operands[0],
6251 copy_to_reg (XEXP (operands[0], 0)));
6252 if (GET_CODE (operands[1]) == MEM
6253 && !memory_address_p (GET_MODE (operands[1]),
6254 XEXP (operands[1], 0)))
6256 = replace_equiv_address (operands[1],
6257 copy_to_reg (XEXP (operands[1], 0)));
6260 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6262 rtx reg = gen_reg_rtx (SImode);
6264 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6265 operands[1] = gen_lowpart (QImode, reg);
6268 if (GET_CODE (operands[0]) == MEM)
6269 operands[1] = force_reg (QImode, operands[1]);
6271 else if (TARGET_THUMB
6272 && GET_CODE (operands[1]) == CONST_INT
6273 && !satisfies_constraint_I (operands[1]))
6275 /* Handle loading a large integer during reload. */
6277 /* Writing a constant to memory needs a scratch, which should
6278 be handled with SECONDARY_RELOADs. */
6279 gcc_assert (GET_CODE (operands[0]) == REG);
6281 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6282 emit_insn (gen_movsi (operands[0], operands[1]));
6289 (define_insn "*arm_movqi_insn"
6290 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6291 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6293 && ( register_operand (operands[0], QImode)
6294 || register_operand (operands[1], QImode))"
6302 [(set_attr "type" "*,*,load1,store1,load1,store1")
6303 (set_attr "insn" "mov,mvn,*,*,*,*")
6304 (set_attr "predicable" "yes")
6305 (set_attr "arch" "any,any,t2,t2,any,any")
6306 (set_attr "length" "4,4,2,2,4,4")]
6309 (define_insn "*thumb1_movqi_insn"
6310 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6311 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6313 && ( register_operand (operands[0], QImode)
6314 || register_operand (operands[1], QImode))"
6322 [(set_attr "length" "2")
6323 (set_attr "type" "*,load1,store1,*,*,*")
6324 (set_attr "insn" "*,*,*,mov,mov,mov")
6325 (set_attr "pool_range" "*,32,*,*,*,*")
6326 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6329 (define_expand "movhf"
6330 [(set (match_operand:HF 0 "general_operand" "")
6331 (match_operand:HF 1 "general_operand" ""))]
6336 if (GET_CODE (operands[0]) == MEM)
6337 operands[1] = force_reg (HFmode, operands[1]);
6339 else /* TARGET_THUMB1 */
6341 if (can_create_pseudo_p ())
6343 if (GET_CODE (operands[0]) != REG)
6344 operands[1] = force_reg (HFmode, operands[1]);
6350 (define_insn "*arm32_movhf"
6351 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6352 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6353 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6354 && ( s_register_operand (operands[0], HFmode)
6355 || s_register_operand (operands[1], HFmode))"
6357 switch (which_alternative)
6359 case 0: /* ARM register from memory */
6360 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6361 case 1: /* memory from ARM register */
6362 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6363 case 2: /* ARM register from ARM register */
6364 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6365 case 3: /* ARM register from constant */
6371 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6372 bits = real_to_target (NULL, &r, HFmode);
6373 ops[0] = operands[0];
6374 ops[1] = GEN_INT (bits);
6375 ops[2] = GEN_INT (bits & 0xff00);
6376 ops[3] = GEN_INT (bits & 0x00ff);
6378 if (arm_arch_thumb2)
6379 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6381 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6388 [(set_attr "conds" "unconditional")
6389 (set_attr "type" "load1,store1,*,*")
6390 (set_attr "insn" "*,*,mov,mov")
6391 (set_attr "length" "4,4,4,8")
6392 (set_attr "predicable" "yes")]
6395 (define_insn "*thumb1_movhf"
6396 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6397 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6399 && ( s_register_operand (operands[0], HFmode)
6400 || s_register_operand (operands[1], HFmode))"
6402 switch (which_alternative)
6407 gcc_assert (GET_CODE(operands[1]) == MEM);
6408 addr = XEXP (operands[1], 0);
6409 if (GET_CODE (addr) == LABEL_REF
6410 || (GET_CODE (addr) == CONST
6411 && GET_CODE (XEXP (addr, 0)) == PLUS
6412 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6413 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6415 /* Constant pool entry. */
6416 return \"ldr\\t%0, %1\";
6418 return \"ldrh\\t%0, %1\";
6420 case 2: return \"strh\\t%1, %0\";
6421 default: return \"mov\\t%0, %1\";
6424 [(set_attr "length" "2")
6425 (set_attr "type" "*,load1,store1,*,*")
6426 (set_attr "insn" "mov,*,*,mov,mov")
6427 (set_attr "pool_range" "*,1020,*,*,*")
6428 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6430 (define_expand "movsf"
6431 [(set (match_operand:SF 0 "general_operand" "")
6432 (match_operand:SF 1 "general_operand" ""))]
6437 if (GET_CODE (operands[0]) == MEM)
6438 operands[1] = force_reg (SFmode, operands[1]);
6440 else /* TARGET_THUMB1 */
6442 if (can_create_pseudo_p ())
6444 if (GET_CODE (operands[0]) != REG)
6445 operands[1] = force_reg (SFmode, operands[1]);
6451 ;; Transform a floating-point move of a constant into a core register into
6452 ;; an SImode operation.
6454 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6455 (match_operand:SF 1 "immediate_operand" ""))]
6458 && GET_CODE (operands[1]) == CONST_DOUBLE"
6459 [(set (match_dup 2) (match_dup 3))]
6461 operands[2] = gen_lowpart (SImode, operands[0]);
6462 operands[3] = gen_lowpart (SImode, operands[1]);
6463 if (operands[2] == 0 || operands[3] == 0)
6468 (define_insn "*arm_movsf_soft_insn"
6469 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6470 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6472 && TARGET_SOFT_FLOAT
6473 && (GET_CODE (operands[0]) != MEM
6474 || register_operand (operands[1], SFmode))"
6477 ldr%?\\t%0, %1\\t%@ float
6478 str%?\\t%1, %0\\t%@ float"
6479 [(set_attr "predicable" "yes")
6480 (set_attr "type" "*,load1,store1")
6481 (set_attr "insn" "mov,*,*")
6482 (set_attr "pool_range" "*,4096,*")
6483 (set_attr "arm_neg_pool_range" "*,4084,*")
6484 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6487 ;;; ??? This should have alternatives for constants.
6488 (define_insn "*thumb1_movsf_insn"
6489 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6490 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6492 && ( register_operand (operands[0], SFmode)
6493 || register_operand (operands[1], SFmode))"
6502 [(set_attr "length" "2")
6503 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6504 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6505 (set_attr "insn" "*,*,*,*,*,mov,mov")
6506 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6509 (define_expand "movdf"
6510 [(set (match_operand:DF 0 "general_operand" "")
6511 (match_operand:DF 1 "general_operand" ""))]
6516 if (GET_CODE (operands[0]) == MEM)
6517 operands[1] = force_reg (DFmode, operands[1]);
6519 else /* TARGET_THUMB */
6521 if (can_create_pseudo_p ())
6523 if (GET_CODE (operands[0]) != REG)
6524 operands[1] = force_reg (DFmode, operands[1]);
6530 ;; Reloading a df mode value stored in integer regs to memory can require a
6532 (define_expand "reload_outdf"
6533 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6534 (match_operand:DF 1 "s_register_operand" "r")
6535 (match_operand:SI 2 "s_register_operand" "=&r")]
6539 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6542 operands[2] = XEXP (operands[0], 0);
6543 else if (code == POST_INC || code == PRE_DEC)
6545 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6546 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6547 emit_insn (gen_movdi (operands[0], operands[1]));
6550 else if (code == PRE_INC)
6552 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6554 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6557 else if (code == POST_DEC)
6558 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6560 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6561 XEXP (XEXP (operands[0], 0), 1)));
6563 emit_insn (gen_rtx_SET (VOIDmode,
6564 replace_equiv_address (operands[0], operands[2]),
6567 if (code == POST_DEC)
6568 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6574 (define_insn "*movdf_soft_insn"
6575 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6576 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6577 "TARGET_32BIT && TARGET_SOFT_FLOAT
6578 && ( register_operand (operands[0], DFmode)
6579 || register_operand (operands[1], DFmode))"
6581 switch (which_alternative)
6588 return output_move_double (operands, true, NULL);
6591 [(set_attr "length" "8,12,16,8,8")
6592 (set_attr "type" "*,*,*,load2,store2")
6593 (set_attr "pool_range" "*,*,*,1020,*")
6594 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6595 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6598 ;;; ??? This should have alternatives for constants.
6599 ;;; ??? This was originally identical to the movdi_insn pattern.
6600 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6601 ;;; thumb_reorg with a memory reference.
6602 (define_insn "*thumb_movdf_insn"
6603 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6604 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6606 && ( register_operand (operands[0], DFmode)
6607 || register_operand (operands[1], DFmode))"
6609 switch (which_alternative)
6613 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6614 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6615 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6617 return \"ldmia\\t%1, {%0, %H0}\";
6619 return \"stmia\\t%0, {%1, %H1}\";
6621 return thumb_load_double_from_address (operands);
6623 operands[2] = gen_rtx_MEM (SImode,
6624 plus_constant (XEXP (operands[0], 0), 4));
6625 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6628 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6629 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6630 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6633 [(set_attr "length" "4,2,2,6,4,4")
6634 (set_attr "type" "*,load2,store2,load2,store2,*")
6635 (set_attr "insn" "*,*,*,*,*,mov")
6636 (set_attr "pool_range" "*,*,*,1020,*,*")]
6639 (define_expand "movxf"
6640 [(set (match_operand:XF 0 "general_operand" "")
6641 (match_operand:XF 1 "general_operand" ""))]
6642 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6644 if (GET_CODE (operands[0]) == MEM)
6645 operands[1] = force_reg (XFmode, operands[1]);
6651 ;; load- and store-multiple insns
6652 ;; The arm can load/store any set of registers, provided that they are in
6653 ;; ascending order, but these expanders assume a contiguous set.
6655 (define_expand "load_multiple"
6656 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6657 (match_operand:SI 1 "" ""))
6658 (use (match_operand:SI 2 "" ""))])]
6661 HOST_WIDE_INT offset = 0;
6663 /* Support only fixed point registers. */
6664 if (GET_CODE (operands[2]) != CONST_INT
6665 || INTVAL (operands[2]) > 14
6666 || INTVAL (operands[2]) < 2
6667 || GET_CODE (operands[1]) != MEM
6668 || GET_CODE (operands[0]) != REG
6669 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6670 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6674 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6675 INTVAL (operands[2]),
6676 force_reg (SImode, XEXP (operands[1], 0)),
6677 FALSE, operands[1], &offset);
6680 (define_expand "store_multiple"
6681 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6682 (match_operand:SI 1 "" ""))
6683 (use (match_operand:SI 2 "" ""))])]
6686 HOST_WIDE_INT offset = 0;
6688 /* Support only fixed point registers. */
6689 if (GET_CODE (operands[2]) != CONST_INT
6690 || INTVAL (operands[2]) > 14
6691 || INTVAL (operands[2]) < 2
6692 || GET_CODE (operands[1]) != REG
6693 || GET_CODE (operands[0]) != MEM
6694 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6695 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6699 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6700 INTVAL (operands[2]),
6701 force_reg (SImode, XEXP (operands[0], 0)),
6702 FALSE, operands[0], &offset);
6706 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6707 ;; We could let this apply for blocks of less than this, but it clobbers so
6708 ;; many registers that there is then probably a better way.
6710 (define_expand "movmemqi"
6711 [(match_operand:BLK 0 "general_operand" "")
6712 (match_operand:BLK 1 "general_operand" "")
6713 (match_operand:SI 2 "const_int_operand" "")
6714 (match_operand:SI 3 "const_int_operand" "")]
6719 if (arm_gen_movmemqi (operands))
6723 else /* TARGET_THUMB1 */
6725 if ( INTVAL (operands[3]) != 4
6726 || INTVAL (operands[2]) > 48)
6729 thumb_expand_movmemqi (operands);
6735 ;; Thumb block-move insns
6737 (define_insn "movmem12b"
6738 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6739 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6740 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6741 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6742 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6743 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6744 (set (match_operand:SI 0 "register_operand" "=l")
6745 (plus:SI (match_dup 2) (const_int 12)))
6746 (set (match_operand:SI 1 "register_operand" "=l")
6747 (plus:SI (match_dup 3) (const_int 12)))
6748 (clobber (match_scratch:SI 4 "=&l"))
6749 (clobber (match_scratch:SI 5 "=&l"))
6750 (clobber (match_scratch:SI 6 "=&l"))]
6752 "* return thumb_output_move_mem_multiple (3, operands);"
6753 [(set_attr "length" "4")
6754 ; This isn't entirely accurate... It loads as well, but in terms of
6755 ; scheduling the following insn it is better to consider it as a store
6756 (set_attr "type" "store3")]
6759 (define_insn "movmem8b"
6760 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6761 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6762 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6763 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6764 (set (match_operand:SI 0 "register_operand" "=l")
6765 (plus:SI (match_dup 2) (const_int 8)))
6766 (set (match_operand:SI 1 "register_operand" "=l")
6767 (plus:SI (match_dup 3) (const_int 8)))
6768 (clobber (match_scratch:SI 4 "=&l"))
6769 (clobber (match_scratch:SI 5 "=&l"))]
6771 "* return thumb_output_move_mem_multiple (2, operands);"
6772 [(set_attr "length" "4")
6773 ; This isn't entirely accurate... It loads as well, but in terms of
6774 ; scheduling the following insn it is better to consider it as a store
6775 (set_attr "type" "store2")]
6780 ;; Compare & branch insns
6781 ;; The range calculations are based as follows:
6782 ;; For forward branches, the address calculation returns the address of
6783 ;; the next instruction. This is 2 beyond the branch instruction.
6784 ;; For backward branches, the address calculation returns the address of
6785 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6786 ;; instruction for the shortest sequence, and 4 before the branch instruction
6787 ;; if we have to jump around an unconditional branch.
6788 ;; To the basic branch range the PC offset must be added (this is +4).
6789 ;; So for forward branches we have
6790 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6791 ;; And for backward branches we have
6792 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6794 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6795 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6797 (define_expand "cbranchsi4"
6798 [(set (pc) (if_then_else
6799 (match_operator 0 "expandable_comparison_operator"
6800 [(match_operand:SI 1 "s_register_operand" "")
6801 (match_operand:SI 2 "nonmemory_operand" "")])
6802 (label_ref (match_operand 3 "" ""))
6804 "TARGET_THUMB1 || TARGET_32BIT"
6808 if (!arm_add_operand (operands[2], SImode))
6809 operands[2] = force_reg (SImode, operands[2]);
6810 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6814 if (thumb1_cmpneg_operand (operands[2], SImode))
6816 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6817 operands[3], operands[0]));
6820 if (!thumb1_cmp_operand (operands[2], SImode))
6821 operands[2] = force_reg (SImode, operands[2]);
6824 ;; A pattern to recognize a special situation and optimize for it.
6825 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6826 ;; due to the available addressing modes. Hence, convert a signed comparison
6827 ;; with zero into an unsigned comparison with 127 if possible.
6828 (define_expand "cbranchqi4"
6829 [(set (pc) (if_then_else
6830 (match_operator 0 "lt_ge_comparison_operator"
6831 [(match_operand:QI 1 "memory_operand" "")
6832 (match_operand:QI 2 "const0_operand" "")])
6833 (label_ref (match_operand 3 "" ""))
6838 xops[1] = gen_reg_rtx (SImode);
6839 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6840 xops[2] = GEN_INT (127);
6841 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6842 VOIDmode, xops[1], xops[2]);
6843 xops[3] = operands[3];
6844 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6848 (define_expand "cbranchsf4"
6849 [(set (pc) (if_then_else
6850 (match_operator 0 "expandable_comparison_operator"
6851 [(match_operand:SF 1 "s_register_operand" "")
6852 (match_operand:SF 2 "arm_float_compare_operand" "")])
6853 (label_ref (match_operand 3 "" ""))
6855 "TARGET_32BIT && TARGET_HARD_FLOAT"
6856 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6857 operands[3])); DONE;"
6860 (define_expand "cbranchdf4"
6861 [(set (pc) (if_then_else
6862 (match_operator 0 "expandable_comparison_operator"
6863 [(match_operand:DF 1 "s_register_operand" "")
6864 (match_operand:DF 2 "arm_float_compare_operand" "")])
6865 (label_ref (match_operand 3 "" ""))
6867 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6868 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6869 operands[3])); DONE;"
6872 (define_expand "cbranchdi4"
6873 [(set (pc) (if_then_else
6874 (match_operator 0 "expandable_comparison_operator"
6875 [(match_operand:DI 1 "cmpdi_operand" "")
6876 (match_operand:DI 2 "cmpdi_operand" "")])
6877 (label_ref (match_operand 3 "" ""))
6881 rtx swap = NULL_RTX;
6882 enum rtx_code code = GET_CODE (operands[0]);
6884 /* We should not have two constants. */
6885 gcc_assert (GET_MODE (operands[1]) == DImode
6886 || GET_MODE (operands[2]) == DImode);
6888 /* Flip unimplemented DImode comparisons to a form that
6889 arm_gen_compare_reg can handle. */
6893 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6895 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6897 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6899 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6904 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6907 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6913 (define_insn "cbranchsi4_insn"
6914 [(set (pc) (if_then_else
6915 (match_operator 0 "arm_comparison_operator"
6916 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6917 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6918 (label_ref (match_operand 3 "" ""))
6922 rtx t = cfun->machine->thumb1_cc_insn;
6925 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6926 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6928 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6930 if (!noov_comparison_operator (operands[0], VOIDmode))
6933 else if (cfun->machine->thumb1_cc_mode != CCmode)
6938 output_asm_insn ("cmp\t%1, %2", operands);
6939 cfun->machine->thumb1_cc_insn = insn;
6940 cfun->machine->thumb1_cc_op0 = operands[1];
6941 cfun->machine->thumb1_cc_op1 = operands[2];
6942 cfun->machine->thumb1_cc_mode = CCmode;
6945 /* Ensure we emit the right type of condition code on the jump. */
6946 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6949 switch (get_attr_length (insn))
6951 case 4: return \"b%d0\\t%l3\";
6952 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6953 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6956 [(set (attr "far_jump")
6958 (eq_attr "length" "8")
6959 (const_string "yes")
6960 (const_string "no")))
6961 (set (attr "length")
6963 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6964 (le (minus (match_dup 3) (pc)) (const_int 256)))
6967 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6968 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6973 (define_insn "cbranchsi4_scratch"
6974 [(set (pc) (if_then_else
6975 (match_operator 4 "arm_comparison_operator"
6976 [(match_operand:SI 1 "s_register_operand" "l,0")
6977 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6978 (label_ref (match_operand 3 "" ""))
6980 (clobber (match_scratch:SI 0 "=l,l"))]
6983 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6985 switch (get_attr_length (insn))
6987 case 4: return \"b%d4\\t%l3\";
6988 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6989 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6992 [(set (attr "far_jump")
6994 (eq_attr "length" "8")
6995 (const_string "yes")
6996 (const_string "no")))
6997 (set (attr "length")
6999 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7000 (le (minus (match_dup 3) (pc)) (const_int 256)))
7003 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7004 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7009 ;; Two peepholes to generate subtract of 0 instead of a move if the
7010 ;; condition codes will be useful.
7012 [(set (match_operand:SI 0 "low_register_operand" "")
7013 (match_operand:SI 1 "low_register_operand" ""))
7015 (if_then_else (match_operator 2 "arm_comparison_operator"
7016 [(match_dup 1) (const_int 0)])
7017 (label_ref (match_operand 3 "" ""))
7020 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7022 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7023 (label_ref (match_dup 3))
7027 ;; Sigh! This variant shouldn't be needed, but combine often fails to
7028 ;; merge cases like this because the op1 is a hard register in
7029 ;; arm_class_likely_spilled_p.
7031 [(set (match_operand:SI 0 "low_register_operand" "")
7032 (match_operand:SI 1 "low_register_operand" ""))
7034 (if_then_else (match_operator 2 "arm_comparison_operator"
7035 [(match_dup 0) (const_int 0)])
7036 (label_ref (match_operand 3 "" ""))
7039 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7041 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7042 (label_ref (match_dup 3))
7046 (define_insn "*negated_cbranchsi4"
7049 (match_operator 0 "equality_operator"
7050 [(match_operand:SI 1 "s_register_operand" "l")
7051 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7052 (label_ref (match_operand 3 "" ""))
7056 output_asm_insn (\"cmn\\t%1, %2\", operands);
7057 switch (get_attr_length (insn))
7059 case 4: return \"b%d0\\t%l3\";
7060 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7061 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7064 [(set (attr "far_jump")
7066 (eq_attr "length" "8")
7067 (const_string "yes")
7068 (const_string "no")))
7069 (set (attr "length")
7071 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7072 (le (minus (match_dup 3) (pc)) (const_int 256)))
7075 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7076 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7081 (define_insn "*tbit_cbranch"
7084 (match_operator 0 "equality_operator"
7085 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7087 (match_operand:SI 2 "const_int_operand" "i"))
7089 (label_ref (match_operand 3 "" ""))
7091 (clobber (match_scratch:SI 4 "=l"))]
7096 op[0] = operands[4];
7097 op[1] = operands[1];
7098 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7100 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7101 switch (get_attr_length (insn))
7103 case 4: return \"b%d0\\t%l3\";
7104 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7105 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7108 [(set (attr "far_jump")
7110 (eq_attr "length" "8")
7111 (const_string "yes")
7112 (const_string "no")))
7113 (set (attr "length")
7115 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7116 (le (minus (match_dup 3) (pc)) (const_int 256)))
7119 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7120 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7125 (define_insn "*tlobits_cbranch"
7128 (match_operator 0 "equality_operator"
7129 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7130 (match_operand:SI 2 "const_int_operand" "i")
7133 (label_ref (match_operand 3 "" ""))
7135 (clobber (match_scratch:SI 4 "=l"))]
7140 op[0] = operands[4];
7141 op[1] = operands[1];
7142 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7144 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7145 switch (get_attr_length (insn))
7147 case 4: return \"b%d0\\t%l3\";
7148 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7149 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7152 [(set (attr "far_jump")
7154 (eq_attr "length" "8")
7155 (const_string "yes")
7156 (const_string "no")))
7157 (set (attr "length")
7159 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7160 (le (minus (match_dup 3) (pc)) (const_int 256)))
7163 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7164 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7169 (define_insn "*tstsi3_cbranch"
7172 (match_operator 3 "equality_operator"
7173 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7174 (match_operand:SI 1 "s_register_operand" "l"))
7176 (label_ref (match_operand 2 "" ""))
7181 output_asm_insn (\"tst\\t%0, %1\", operands);
7182 switch (get_attr_length (insn))
7184 case 4: return \"b%d3\\t%l2\";
7185 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7186 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7189 [(set (attr "far_jump")
7191 (eq_attr "length" "8")
7192 (const_string "yes")
7193 (const_string "no")))
7194 (set (attr "length")
7196 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7197 (le (minus (match_dup 2) (pc)) (const_int 256)))
7200 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7201 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7206 (define_insn "*cbranchne_decr1"
7208 (if_then_else (match_operator 3 "equality_operator"
7209 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7211 (label_ref (match_operand 4 "" ""))
7213 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7214 (plus:SI (match_dup 2) (const_int -1)))
7215 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7220 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7222 VOIDmode, operands[2], const1_rtx);
7223 cond[1] = operands[4];
7225 if (which_alternative == 0)
7226 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7227 else if (which_alternative == 1)
7229 /* We must provide an alternative for a hi reg because reload
7230 cannot handle output reloads on a jump instruction, but we
7231 can't subtract into that. Fortunately a mov from lo to hi
7232 does not clobber the condition codes. */
7233 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7234 output_asm_insn (\"mov\\t%0, %1\", operands);
7238 /* Similarly, but the target is memory. */
7239 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7240 output_asm_insn (\"str\\t%1, %0\", operands);
7243 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7246 output_asm_insn (\"b%d0\\t%l1\", cond);
7249 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7250 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7252 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7253 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7257 [(set (attr "far_jump")
7259 (ior (and (eq (symbol_ref ("which_alternative"))
7261 (eq_attr "length" "8"))
7262 (eq_attr "length" "10"))
7263 (const_string "yes")
7264 (const_string "no")))
7265 (set_attr_alternative "length"
7269 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7270 (le (minus (match_dup 4) (pc)) (const_int 256)))
7273 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7274 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7279 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7280 (le (minus (match_dup 4) (pc)) (const_int 256)))
7283 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7284 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7289 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7290 (le (minus (match_dup 4) (pc)) (const_int 256)))
7293 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7294 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7299 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7300 (le (minus (match_dup 4) (pc)) (const_int 256)))
7303 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7304 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7309 (define_insn "*addsi3_cbranch"
7312 (match_operator 4 "arm_comparison_operator"
7314 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7315 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7317 (label_ref (match_operand 5 "" ""))
7320 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7321 (plus:SI (match_dup 2) (match_dup 3)))
7322 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7324 && (GET_CODE (operands[4]) == EQ
7325 || GET_CODE (operands[4]) == NE
7326 || GET_CODE (operands[4]) == GE
7327 || GET_CODE (operands[4]) == LT)"
7332 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7333 cond[1] = operands[2];
7334 cond[2] = operands[3];
7336 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7337 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7339 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7341 if (which_alternative >= 2
7342 && which_alternative < 4)
7343 output_asm_insn (\"mov\\t%0, %1\", operands);
7344 else if (which_alternative >= 4)
7345 output_asm_insn (\"str\\t%1, %0\", operands);
7347 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7350 return \"b%d4\\t%l5\";
7352 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7354 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7358 [(set (attr "far_jump")
7360 (ior (and (lt (symbol_ref ("which_alternative"))
7362 (eq_attr "length" "8"))
7363 (eq_attr "length" "10"))
7364 (const_string "yes")
7365 (const_string "no")))
7366 (set (attr "length")
7368 (lt (symbol_ref ("which_alternative"))
7371 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7372 (le (minus (match_dup 5) (pc)) (const_int 256)))
7375 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7376 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7380 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7381 (le (minus (match_dup 5) (pc)) (const_int 256)))
7384 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7385 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7390 (define_insn "*addsi3_cbranch_scratch"
7393 (match_operator 3 "arm_comparison_operator"
7395 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7396 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7398 (label_ref (match_operand 4 "" ""))
7400 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7402 && (GET_CODE (operands[3]) == EQ
7403 || GET_CODE (operands[3]) == NE
7404 || GET_CODE (operands[3]) == GE
7405 || GET_CODE (operands[3]) == LT)"
7408 switch (which_alternative)
7411 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7414 output_asm_insn (\"cmn\t%1, %2\", operands);
7417 if (INTVAL (operands[2]) < 0)
7418 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7420 output_asm_insn (\"add\t%0, %1, %2\", operands);
7423 if (INTVAL (operands[2]) < 0)
7424 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7426 output_asm_insn (\"add\t%0, %0, %2\", operands);
7430 switch (get_attr_length (insn))
7433 return \"b%d3\\t%l4\";
7435 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7437 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7441 [(set (attr "far_jump")
7443 (eq_attr "length" "8")
7444 (const_string "yes")
7445 (const_string "no")))
7446 (set (attr "length")
7448 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7449 (le (minus (match_dup 4) (pc)) (const_int 256)))
7452 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7453 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7459 ;; Comparison and test insns
7461 (define_insn "*arm_cmpsi_insn"
7462 [(set (reg:CC CC_REGNUM)
7463 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7464 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7471 [(set_attr "conds" "set")
7472 (set_attr "arch" "t2,t2,any,any")
7473 (set_attr "length" "2,2,4,4")]
7476 (define_insn "*cmpsi_shiftsi"
7477 [(set (reg:CC CC_REGNUM)
7478 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7479 (match_operator:SI 3 "shift_operator"
7480 [(match_operand:SI 1 "s_register_operand" "r,r")
7481 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7484 [(set_attr "conds" "set")
7485 (set_attr "shift" "1")
7486 (set_attr "arch" "32,a")
7487 (set_attr "type" "alu_shift,alu_shift_reg")])
7489 (define_insn "*cmpsi_shiftsi_swp"
7490 [(set (reg:CC_SWP CC_REGNUM)
7491 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7492 [(match_operand:SI 1 "s_register_operand" "r,r")
7493 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7494 (match_operand:SI 0 "s_register_operand" "r,r")))]
7497 [(set_attr "conds" "set")
7498 (set_attr "shift" "1")
7499 (set_attr "arch" "32,a")
7500 (set_attr "type" "alu_shift,alu_shift_reg")])
7502 (define_insn "*arm_cmpsi_negshiftsi_si"
7503 [(set (reg:CC_Z CC_REGNUM)
7505 (neg:SI (match_operator:SI 1 "shift_operator"
7506 [(match_operand:SI 2 "s_register_operand" "r")
7507 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7508 (match_operand:SI 0 "s_register_operand" "r")))]
7511 [(set_attr "conds" "set")
7512 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7513 (const_string "alu_shift")
7514 (const_string "alu_shift_reg")))]
7517 ;; DImode comparisons. The generic code generates branches that
7518 ;; if-conversion can not reduce to a conditional compare, so we do
7521 (define_insn "*arm_cmpdi_insn"
7522 [(set (reg:CC_NCV CC_REGNUM)
7523 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7524 (match_operand:DI 1 "arm_di_operand" "rDi")))
7525 (clobber (match_scratch:SI 2 "=r"))]
7526 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7527 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7528 [(set_attr "conds" "set")
7529 (set_attr "length" "8")]
7532 (define_insn "*arm_cmpdi_unsigned"
7533 [(set (reg:CC_CZ CC_REGNUM)
7534 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7535 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7537 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7538 [(set_attr "conds" "set")
7539 (set_attr "length" "8")]
7542 (define_insn "*arm_cmpdi_zero"
7543 [(set (reg:CC_Z CC_REGNUM)
7544 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7546 (clobber (match_scratch:SI 1 "=r"))]
7548 "orr%.\\t%1, %Q0, %R0"
7549 [(set_attr "conds" "set")]
7552 (define_insn "*thumb_cmpdi_zero"
7553 [(set (reg:CC_Z CC_REGNUM)
7554 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7556 (clobber (match_scratch:SI 1 "=l"))]
7558 "orr\\t%1, %Q0, %R0"
7559 [(set_attr "conds" "set")
7560 (set_attr "length" "2")]
7563 ;; Cirrus SF compare instruction
7564 (define_insn "*cirrus_cmpsf"
7565 [(set (reg:CCFP CC_REGNUM)
7566 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7567 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7568 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7569 "cfcmps%?\\tr15, %V0, %V1"
7570 [(set_attr "type" "mav_farith")
7571 (set_attr "cirrus" "compare")]
7574 ;; Cirrus DF compare instruction
7575 (define_insn "*cirrus_cmpdf"
7576 [(set (reg:CCFP CC_REGNUM)
7577 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7578 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7579 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7580 "cfcmpd%?\\tr15, %V0, %V1"
7581 [(set_attr "type" "mav_farith")
7582 (set_attr "cirrus" "compare")]
7585 (define_insn "*cirrus_cmpdi"
7586 [(set (reg:CC CC_REGNUM)
7587 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7588 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7589 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7590 "cfcmp64%?\\tr15, %V0, %V1"
7591 [(set_attr "type" "mav_farith")
7592 (set_attr "cirrus" "compare")]
7595 ; This insn allows redundant compares to be removed by cse, nothing should
7596 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7597 ; is deleted later on. The match_dup will match the mode here, so that
7598 ; mode changes of the condition codes aren't lost by this even though we don't
7599 ; specify what they are.
7601 (define_insn "*deleted_compare"
7602 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7604 "\\t%@ deleted compare"
7605 [(set_attr "conds" "set")
7606 (set_attr "length" "0")]
7610 ;; Conditional branch insns
7612 (define_expand "cbranch_cc"
7614 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7615 (match_operand 2 "" "")])
7616 (label_ref (match_operand 3 "" ""))
7619 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7620 operands[1], operands[2]);
7621 operands[2] = const0_rtx;"
7625 ;; Patterns to match conditional branch insns.
7628 (define_insn "*arm_cond_branch"
7630 (if_then_else (match_operator 1 "arm_comparison_operator"
7631 [(match_operand 2 "cc_register" "") (const_int 0)])
7632 (label_ref (match_operand 0 "" ""))
7636 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7638 arm_ccfsm_state += 2;
7641 return \"b%d1\\t%l0\";
7643 [(set_attr "conds" "use")
7644 (set_attr "type" "branch")
7645 (set (attr "length")
7647 (and (match_test "TARGET_THUMB2")
7648 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7649 (le (minus (match_dup 0) (pc)) (const_int 256))))
7654 (define_insn "*arm_cond_branch_reversed"
7656 (if_then_else (match_operator 1 "arm_comparison_operator"
7657 [(match_operand 2 "cc_register" "") (const_int 0)])
7659 (label_ref (match_operand 0 "" ""))))]
7662 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7664 arm_ccfsm_state += 2;
7667 return \"b%D1\\t%l0\";
7669 [(set_attr "conds" "use")
7670 (set_attr "type" "branch")
7671 (set (attr "length")
7673 (and (match_test "TARGET_THUMB2")
7674 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7675 (le (minus (match_dup 0) (pc)) (const_int 256))))
7684 (define_expand "cstore_cc"
7685 [(set (match_operand:SI 0 "s_register_operand" "")
7686 (match_operator:SI 1 "" [(match_operand 2 "" "")
7687 (match_operand 3 "" "")]))]
7689 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7690 operands[2], operands[3]);
7691 operands[3] = const0_rtx;"
7694 (define_insn "*mov_scc"
7695 [(set (match_operand:SI 0 "s_register_operand" "=r")
7696 (match_operator:SI 1 "arm_comparison_operator"
7697 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7699 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7700 [(set_attr "conds" "use")
7701 (set_attr "insn" "mov")
7702 (set_attr "length" "8")]
7705 (define_insn "*mov_negscc"
7706 [(set (match_operand:SI 0 "s_register_operand" "=r")
7707 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7708 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7710 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7711 [(set_attr "conds" "use")
7712 (set_attr "insn" "mov")
7713 (set_attr "length" "8")]
7716 (define_insn "*mov_notscc"
7717 [(set (match_operand:SI 0 "s_register_operand" "=r")
7718 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7719 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7721 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7722 [(set_attr "conds" "use")
7723 (set_attr "insn" "mov")
7724 (set_attr "length" "8")]
7727 (define_expand "cstoresi4"
7728 [(set (match_operand:SI 0 "s_register_operand" "")
7729 (match_operator:SI 1 "expandable_comparison_operator"
7730 [(match_operand:SI 2 "s_register_operand" "")
7731 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7732 "TARGET_32BIT || TARGET_THUMB1"
7734 rtx op3, scratch, scratch2;
7738 if (!arm_add_operand (operands[3], SImode))
7739 operands[3] = force_reg (SImode, operands[3]);
7740 emit_insn (gen_cstore_cc (operands[0], operands[1],
7741 operands[2], operands[3]));
7745 if (operands[3] == const0_rtx)
7747 switch (GET_CODE (operands[1]))
7750 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7754 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7758 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7759 NULL_RTX, 0, OPTAB_WIDEN);
7760 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7761 NULL_RTX, 0, OPTAB_WIDEN);
7762 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7763 operands[0], 1, OPTAB_WIDEN);
7767 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7769 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7770 NULL_RTX, 1, OPTAB_WIDEN);
7774 scratch = expand_binop (SImode, ashr_optab, operands[2],
7775 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7776 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7777 NULL_RTX, 0, OPTAB_WIDEN);
7778 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7782 /* LT is handled by generic code. No need for unsigned with 0. */
7789 switch (GET_CODE (operands[1]))
7792 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7793 NULL_RTX, 0, OPTAB_WIDEN);
7794 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7798 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7799 NULL_RTX, 0, OPTAB_WIDEN);
7800 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7804 op3 = force_reg (SImode, operands[3]);
7806 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7807 NULL_RTX, 1, OPTAB_WIDEN);
7808 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7809 NULL_RTX, 0, OPTAB_WIDEN);
7810 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7816 if (!thumb1_cmp_operand (op3, SImode))
7817 op3 = force_reg (SImode, op3);
7818 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7819 NULL_RTX, 0, OPTAB_WIDEN);
7820 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7821 NULL_RTX, 1, OPTAB_WIDEN);
7822 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7827 op3 = force_reg (SImode, operands[3]);
7828 scratch = force_reg (SImode, const0_rtx);
7829 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7835 if (!thumb1_cmp_operand (op3, SImode))
7836 op3 = force_reg (SImode, op3);
7837 scratch = force_reg (SImode, const0_rtx);
7838 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7844 if (!thumb1_cmp_operand (op3, SImode))
7845 op3 = force_reg (SImode, op3);
7846 scratch = gen_reg_rtx (SImode);
7847 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7851 op3 = force_reg (SImode, operands[3]);
7852 scratch = gen_reg_rtx (SImode);
7853 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7856 /* No good sequences for GT, LT. */
7863 (define_expand "cstoresf4"
7864 [(set (match_operand:SI 0 "s_register_operand" "")
7865 (match_operator:SI 1 "expandable_comparison_operator"
7866 [(match_operand:SF 2 "s_register_operand" "")
7867 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7868 "TARGET_32BIT && TARGET_HARD_FLOAT"
7869 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7870 operands[2], operands[3])); DONE;"
7873 (define_expand "cstoredf4"
7874 [(set (match_operand:SI 0 "s_register_operand" "")
7875 (match_operator:SI 1 "expandable_comparison_operator"
7876 [(match_operand:DF 2 "s_register_operand" "")
7877 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7878 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7879 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7880 operands[2], operands[3])); DONE;"
7883 (define_expand "cstoredi4"
7884 [(set (match_operand:SI 0 "s_register_operand" "")
7885 (match_operator:SI 1 "expandable_comparison_operator"
7886 [(match_operand:DI 2 "cmpdi_operand" "")
7887 (match_operand:DI 3 "cmpdi_operand" "")]))]
7890 rtx swap = NULL_RTX;
7891 enum rtx_code code = GET_CODE (operands[1]);
7893 /* We should not have two constants. */
7894 gcc_assert (GET_MODE (operands[2]) == DImode
7895 || GET_MODE (operands[3]) == DImode);
7897 /* Flip unimplemented DImode comparisons to a form that
7898 arm_gen_compare_reg can handle. */
7902 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7904 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7906 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7908 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7913 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7916 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7922 (define_expand "cstoresi_eq0_thumb1"
7924 [(set (match_operand:SI 0 "s_register_operand" "")
7925 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7927 (clobber (match_dup:SI 2))])]
7929 "operands[2] = gen_reg_rtx (SImode);"
7932 (define_expand "cstoresi_ne0_thumb1"
7934 [(set (match_operand:SI 0 "s_register_operand" "")
7935 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7937 (clobber (match_dup:SI 2))])]
7939 "operands[2] = gen_reg_rtx (SImode);"
7942 (define_insn "*cstoresi_eq0_thumb1_insn"
7943 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7944 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7946 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7949 neg\\t%0, %1\;adc\\t%0, %0, %1
7950 neg\\t%2, %1\;adc\\t%0, %1, %2"
7951 [(set_attr "length" "4")]
7954 (define_insn "*cstoresi_ne0_thumb1_insn"
7955 [(set (match_operand:SI 0 "s_register_operand" "=l")
7956 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7958 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7960 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7961 [(set_attr "length" "4")]
7964 ;; Used as part of the expansion of thumb ltu and gtu sequences
7965 (define_insn "cstoresi_nltu_thumb1"
7966 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7967 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7968 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7970 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7971 [(set_attr "length" "4")]
7974 (define_insn_and_split "cstoresi_ltu_thumb1"
7975 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7976 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7977 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7982 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7983 (set (match_dup 0) (neg:SI (match_dup 3)))]
7984 "operands[3] = gen_reg_rtx (SImode);"
7985 [(set_attr "length" "4")]
7988 ;; Used as part of the expansion of thumb les sequence.
7989 (define_insn "thumb1_addsi3_addgeu"
7990 [(set (match_operand:SI 0 "s_register_operand" "=l")
7991 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7992 (match_operand:SI 2 "s_register_operand" "l"))
7993 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7994 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7996 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7997 [(set_attr "length" "4")]
8001 ;; Conditional move insns
8003 (define_expand "movsicc"
8004 [(set (match_operand:SI 0 "s_register_operand" "")
8005 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8006 (match_operand:SI 2 "arm_not_operand" "")
8007 (match_operand:SI 3 "arm_not_operand" "")))]
8011 enum rtx_code code = GET_CODE (operands[1]);
8014 if (code == UNEQ || code == LTGT)
8017 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8018 XEXP (operands[1], 1));
8019 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8023 (define_expand "movsfcc"
8024 [(set (match_operand:SF 0 "s_register_operand" "")
8025 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8026 (match_operand:SF 2 "s_register_operand" "")
8027 (match_operand:SF 3 "nonmemory_operand" "")))]
8028 "TARGET_32BIT && TARGET_HARD_FLOAT"
8031 enum rtx_code code = GET_CODE (operands[1]);
8034 if (code == UNEQ || code == LTGT)
8037 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8038 Otherwise, ensure it is a valid FP add operand */
8039 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8040 || (!arm_float_add_operand (operands[3], SFmode)))
8041 operands[3] = force_reg (SFmode, operands[3]);
8043 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8044 XEXP (operands[1], 1));
8045 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8049 (define_expand "movdfcc"
8050 [(set (match_operand:DF 0 "s_register_operand" "")
8051 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8052 (match_operand:DF 2 "s_register_operand" "")
8053 (match_operand:DF 3 "arm_float_add_operand" "")))]
8054 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8057 enum rtx_code code = GET_CODE (operands[1]);
8060 if (code == UNEQ || code == LTGT)
8063 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8064 XEXP (operands[1], 1));
8065 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8069 (define_insn "*movsicc_insn"
8070 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8072 (match_operator 3 "arm_comparison_operator"
8073 [(match_operand 4 "cc_register" "") (const_int 0)])
8074 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8075 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8082 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8083 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8084 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8085 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8086 [(set_attr "length" "4,4,4,4,8,8,8,8")
8087 (set_attr "conds" "use")
8088 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
8091 (define_insn "*movsfcc_soft_insn"
8092 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8093 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8094 [(match_operand 4 "cc_register" "") (const_int 0)])
8095 (match_operand:SF 1 "s_register_operand" "0,r")
8096 (match_operand:SF 2 "s_register_operand" "r,0")))]
8097 "TARGET_ARM && TARGET_SOFT_FLOAT"
8101 [(set_attr "conds" "use")
8102 (set_attr "insn" "mov")]
8106 ;; Jump and linkage insns
8108 (define_expand "jump"
8110 (label_ref (match_operand 0 "" "")))]
8115 (define_insn "*arm_jump"
8117 (label_ref (match_operand 0 "" "")))]
8121 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8123 arm_ccfsm_state += 2;
8126 return \"b%?\\t%l0\";
8129 [(set_attr "predicable" "yes")
8130 (set (attr "length")
8132 (and (match_test "TARGET_THUMB2")
8133 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8134 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8139 (define_insn "*thumb_jump"
8141 (label_ref (match_operand 0 "" "")))]
8144 if (get_attr_length (insn) == 2)
8146 return \"bl\\t%l0\\t%@ far jump\";
8148 [(set (attr "far_jump")
8150 (eq_attr "length" "4")
8151 (const_string "yes")
8152 (const_string "no")))
8153 (set (attr "length")
8155 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8156 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8161 (define_expand "call"
8162 [(parallel [(call (match_operand 0 "memory_operand" "")
8163 (match_operand 1 "general_operand" ""))
8164 (use (match_operand 2 "" ""))
8165 (clobber (reg:SI LR_REGNUM))])]
8171 /* In an untyped call, we can get NULL for operand 2. */
8172 if (operands[2] == NULL_RTX)
8173 operands[2] = const0_rtx;
8175 /* Decide if we should generate indirect calls by loading the
8176 32-bit address of the callee into a register before performing the
8178 callee = XEXP (operands[0], 0);
8179 if (GET_CODE (callee) == SYMBOL_REF
8180 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8182 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8184 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8185 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8190 (define_expand "call_internal"
8191 [(parallel [(call (match_operand 0 "memory_operand" "")
8192 (match_operand 1 "general_operand" ""))
8193 (use (match_operand 2 "" ""))
8194 (clobber (reg:SI LR_REGNUM))])])
8196 (define_insn "*call_reg_armv5"
8197 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8198 (match_operand 1 "" ""))
8199 (use (match_operand 2 "" ""))
8200 (clobber (reg:SI LR_REGNUM))]
8201 "TARGET_ARM && arm_arch5"
8203 [(set_attr "type" "call")]
8206 (define_insn "*call_reg_arm"
8207 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8208 (match_operand 1 "" ""))
8209 (use (match_operand 2 "" ""))
8210 (clobber (reg:SI LR_REGNUM))]
8211 "TARGET_ARM && !arm_arch5"
8213 return output_call (operands);
8215 ;; length is worst case, normally it is only two
8216 [(set_attr "length" "12")
8217 (set_attr "type" "call")]
8221 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8222 ;; considered a function call by the branch predictor of some cores (PR40887).
8223 ;; Falls back to blx rN (*call_reg_armv5).
8225 (define_insn "*call_mem"
8226 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8227 (match_operand 1 "" ""))
8228 (use (match_operand 2 "" ""))
8229 (clobber (reg:SI LR_REGNUM))]
8230 "TARGET_ARM && !arm_arch5"
8232 return output_call_mem (operands);
8234 [(set_attr "length" "12")
8235 (set_attr "type" "call")]
8238 (define_insn "*call_reg_thumb1_v5"
8239 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8240 (match_operand 1 "" ""))
8241 (use (match_operand 2 "" ""))
8242 (clobber (reg:SI LR_REGNUM))]
8243 "TARGET_THUMB1 && arm_arch5"
8245 [(set_attr "length" "2")
8246 (set_attr "type" "call")]
8249 (define_insn "*call_reg_thumb1"
8250 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8251 (match_operand 1 "" ""))
8252 (use (match_operand 2 "" ""))
8253 (clobber (reg:SI LR_REGNUM))]
8254 "TARGET_THUMB1 && !arm_arch5"
8257 if (!TARGET_CALLER_INTERWORKING)
8258 return thumb_call_via_reg (operands[0]);
8259 else if (operands[1] == const0_rtx)
8260 return \"bl\\t%__interwork_call_via_%0\";
8261 else if (frame_pointer_needed)
8262 return \"bl\\t%__interwork_r7_call_via_%0\";
8264 return \"bl\\t%__interwork_r11_call_via_%0\";
8266 [(set_attr "type" "call")]
8269 (define_expand "call_value"
8270 [(parallel [(set (match_operand 0 "" "")
8271 (call (match_operand 1 "memory_operand" "")
8272 (match_operand 2 "general_operand" "")))
8273 (use (match_operand 3 "" ""))
8274 (clobber (reg:SI LR_REGNUM))])]
8280 /* In an untyped call, we can get NULL for operand 2. */
8281 if (operands[3] == 0)
8282 operands[3] = const0_rtx;
8284 /* Decide if we should generate indirect calls by loading the
8285 32-bit address of the callee into a register before performing the
8287 callee = XEXP (operands[1], 0);
8288 if (GET_CODE (callee) == SYMBOL_REF
8289 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8291 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8293 pat = gen_call_value_internal (operands[0], operands[1],
8294 operands[2], operands[3]);
8295 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8300 (define_expand "call_value_internal"
8301 [(parallel [(set (match_operand 0 "" "")
8302 (call (match_operand 1 "memory_operand" "")
8303 (match_operand 2 "general_operand" "")))
8304 (use (match_operand 3 "" ""))
8305 (clobber (reg:SI LR_REGNUM))])])
8307 (define_insn "*call_value_reg_armv5"
8308 [(set (match_operand 0 "" "")
8309 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8310 (match_operand 2 "" "")))
8311 (use (match_operand 3 "" ""))
8312 (clobber (reg:SI LR_REGNUM))]
8313 "TARGET_ARM && arm_arch5"
8315 [(set_attr "type" "call")]
8318 (define_insn "*call_value_reg_arm"
8319 [(set (match_operand 0 "" "")
8320 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8321 (match_operand 2 "" "")))
8322 (use (match_operand 3 "" ""))
8323 (clobber (reg:SI LR_REGNUM))]
8324 "TARGET_ARM && !arm_arch5"
8326 return output_call (&operands[1]);
8328 [(set_attr "length" "12")
8329 (set_attr "type" "call")]
8332 ;; Note: see *call_mem
8334 (define_insn "*call_value_mem"
8335 [(set (match_operand 0 "" "")
8336 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8337 (match_operand 2 "" "")))
8338 (use (match_operand 3 "" ""))
8339 (clobber (reg:SI LR_REGNUM))]
8340 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8342 return output_call_mem (&operands[1]);
8344 [(set_attr "length" "12")
8345 (set_attr "type" "call")]
8348 (define_insn "*call_value_reg_thumb1_v5"
8349 [(set (match_operand 0 "" "")
8350 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8351 (match_operand 2 "" "")))
8352 (use (match_operand 3 "" ""))
8353 (clobber (reg:SI LR_REGNUM))]
8354 "TARGET_THUMB1 && arm_arch5"
8356 [(set_attr "length" "2")
8357 (set_attr "type" "call")]
8360 (define_insn "*call_value_reg_thumb1"
8361 [(set (match_operand 0 "" "")
8362 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8363 (match_operand 2 "" "")))
8364 (use (match_operand 3 "" ""))
8365 (clobber (reg:SI LR_REGNUM))]
8366 "TARGET_THUMB1 && !arm_arch5"
8369 if (!TARGET_CALLER_INTERWORKING)
8370 return thumb_call_via_reg (operands[1]);
8371 else if (operands[2] == const0_rtx)
8372 return \"bl\\t%__interwork_call_via_%1\";
8373 else if (frame_pointer_needed)
8374 return \"bl\\t%__interwork_r7_call_via_%1\";
8376 return \"bl\\t%__interwork_r11_call_via_%1\";
8378 [(set_attr "type" "call")]
8381 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8382 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8384 (define_insn "*call_symbol"
8385 [(call (mem:SI (match_operand:SI 0 "" ""))
8386 (match_operand 1 "" ""))
8387 (use (match_operand 2 "" ""))
8388 (clobber (reg:SI LR_REGNUM))]
8390 && (GET_CODE (operands[0]) == SYMBOL_REF)
8391 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8394 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8396 [(set_attr "type" "call")]
8399 (define_insn "*call_value_symbol"
8400 [(set (match_operand 0 "" "")
8401 (call (mem:SI (match_operand:SI 1 "" ""))
8402 (match_operand:SI 2 "" "")))
8403 (use (match_operand 3 "" ""))
8404 (clobber (reg:SI LR_REGNUM))]
8406 && (GET_CODE (operands[1]) == SYMBOL_REF)
8407 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8410 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8412 [(set_attr "type" "call")]
8415 (define_insn "*call_insn"
8416 [(call (mem:SI (match_operand:SI 0 "" ""))
8417 (match_operand:SI 1 "" ""))
8418 (use (match_operand 2 "" ""))
8419 (clobber (reg:SI LR_REGNUM))]
8421 && GET_CODE (operands[0]) == SYMBOL_REF
8422 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8424 [(set_attr "length" "4")
8425 (set_attr "type" "call")]
8428 (define_insn "*call_value_insn"
8429 [(set (match_operand 0 "" "")
8430 (call (mem:SI (match_operand 1 "" ""))
8431 (match_operand 2 "" "")))
8432 (use (match_operand 3 "" ""))
8433 (clobber (reg:SI LR_REGNUM))]
8435 && GET_CODE (operands[1]) == SYMBOL_REF
8436 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8438 [(set_attr "length" "4")
8439 (set_attr "type" "call")]
8442 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8443 (define_expand "sibcall"
8444 [(parallel [(call (match_operand 0 "memory_operand" "")
8445 (match_operand 1 "general_operand" ""))
8447 (use (match_operand 2 "" ""))])]
8451 if (operands[2] == NULL_RTX)
8452 operands[2] = const0_rtx;
8456 (define_expand "sibcall_value"
8457 [(parallel [(set (match_operand 0 "" "")
8458 (call (match_operand 1 "memory_operand" "")
8459 (match_operand 2 "general_operand" "")))
8461 (use (match_operand 3 "" ""))])]
8465 if (operands[3] == NULL_RTX)
8466 operands[3] = const0_rtx;
8470 (define_insn "*sibcall_insn"
8471 [(call (mem:SI (match_operand:SI 0 "" "X"))
8472 (match_operand 1 "" ""))
8474 (use (match_operand 2 "" ""))]
8475 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8477 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8479 [(set_attr "type" "call")]
8482 (define_insn "*sibcall_value_insn"
8483 [(set (match_operand 0 "" "")
8484 (call (mem:SI (match_operand:SI 1 "" "X"))
8485 (match_operand 2 "" "")))
8487 (use (match_operand 3 "" ""))]
8488 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8490 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8492 [(set_attr "type" "call")]
8495 (define_expand "return"
8497 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8500 ;; Often the return insn will be the same as loading from memory, so set attr
8501 (define_insn "*arm_return"
8503 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8506 if (arm_ccfsm_state == 2)
8508 arm_ccfsm_state += 2;
8511 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8513 [(set_attr "type" "load1")
8514 (set_attr "length" "12")
8515 (set_attr "predicable" "yes")]
8518 (define_insn "*cond_return"
8520 (if_then_else (match_operator 0 "arm_comparison_operator"
8521 [(match_operand 1 "cc_register" "") (const_int 0)])
8524 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8527 if (arm_ccfsm_state == 2)
8529 arm_ccfsm_state += 2;
8532 return output_return_instruction (operands[0], TRUE, FALSE);
8534 [(set_attr "conds" "use")
8535 (set_attr "length" "12")
8536 (set_attr "type" "load1")]
8539 (define_insn "*cond_return_inverted"
8541 (if_then_else (match_operator 0 "arm_comparison_operator"
8542 [(match_operand 1 "cc_register" "") (const_int 0)])
8545 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8548 if (arm_ccfsm_state == 2)
8550 arm_ccfsm_state += 2;
8553 return output_return_instruction (operands[0], TRUE, TRUE);
8555 [(set_attr "conds" "use")
8556 (set_attr "length" "12")
8557 (set_attr "type" "load1")]
8560 ;; Generate a sequence of instructions to determine if the processor is
8561 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8564 (define_expand "return_addr_mask"
8566 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8568 (set (match_operand:SI 0 "s_register_operand" "")
8569 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8571 (const_int 67108860)))] ; 0x03fffffc
8574 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8577 (define_insn "*check_arch2"
8578 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8579 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8582 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8583 [(set_attr "length" "8")
8584 (set_attr "conds" "set")]
8587 ;; Call subroutine returning any type.
8589 (define_expand "untyped_call"
8590 [(parallel [(call (match_operand 0 "" "")
8592 (match_operand 1 "" "")
8593 (match_operand 2 "" "")])]
8598 rtx par = gen_rtx_PARALLEL (VOIDmode,
8599 rtvec_alloc (XVECLEN (operands[2], 0)));
8600 rtx addr = gen_reg_rtx (Pmode);
8604 emit_move_insn (addr, XEXP (operands[1], 0));
8605 mem = change_address (operands[1], BLKmode, addr);
8607 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8609 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8611 /* Default code only uses r0 as a return value, but we could
8612 be using anything up to 4 registers. */
8613 if (REGNO (src) == R0_REGNUM)
8614 src = gen_rtx_REG (TImode, R0_REGNUM);
8616 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8618 size += GET_MODE_SIZE (GET_MODE (src));
8621 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8626 for (i = 0; i < XVECLEN (par, 0); i++)
8628 HOST_WIDE_INT offset = 0;
8629 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8632 emit_move_insn (addr, plus_constant (addr, size));
8634 mem = change_address (mem, GET_MODE (reg), NULL);
8635 if (REGNO (reg) == R0_REGNUM)
8637 /* On thumb we have to use a write-back instruction. */
8638 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8639 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8640 size = TARGET_ARM ? 16 : 0;
8644 emit_move_insn (mem, reg);
8645 size = GET_MODE_SIZE (GET_MODE (reg));
8649 /* The optimizer does not know that the call sets the function value
8650 registers we stored in the result block. We avoid problems by
8651 claiming that all hard registers are used and clobbered at this
8653 emit_insn (gen_blockage ());
8659 (define_expand "untyped_return"
8660 [(match_operand:BLK 0 "memory_operand" "")
8661 (match_operand 1 "" "")]
8666 rtx addr = gen_reg_rtx (Pmode);
8670 emit_move_insn (addr, XEXP (operands[0], 0));
8671 mem = change_address (operands[0], BLKmode, addr);
8673 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8675 HOST_WIDE_INT offset = 0;
8676 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8679 emit_move_insn (addr, plus_constant (addr, size));
8681 mem = change_address (mem, GET_MODE (reg), NULL);
8682 if (REGNO (reg) == R0_REGNUM)
8684 /* On thumb we have to use a write-back instruction. */
8685 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8686 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8687 size = TARGET_ARM ? 16 : 0;
8691 emit_move_insn (reg, mem);
8692 size = GET_MODE_SIZE (GET_MODE (reg));
8696 /* Emit USE insns before the return. */
8697 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8698 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8700 /* Construct the return. */
8701 expand_naked_return ();
8707 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8708 ;; all of memory. This blocks insns from being moved across this point.
8710 (define_insn "blockage"
8711 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8714 [(set_attr "length" "0")
8715 (set_attr "type" "block")]
8718 (define_expand "casesi"
8719 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8720 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8721 (match_operand:SI 2 "const_int_operand" "") ; total range
8722 (match_operand:SI 3 "" "") ; table label
8723 (match_operand:SI 4 "" "")] ; Out of range label
8724 "TARGET_32BIT || optimize_size || flag_pic"
8727 enum insn_code code;
8728 if (operands[1] != const0_rtx)
8730 rtx reg = gen_reg_rtx (SImode);
8732 emit_insn (gen_addsi3 (reg, operands[0],
8733 gen_int_mode (-INTVAL (operands[1]),
8739 code = CODE_FOR_arm_casesi_internal;
8740 else if (TARGET_THUMB1)
8741 code = CODE_FOR_thumb1_casesi_internal_pic;
8743 code = CODE_FOR_thumb2_casesi_internal_pic;
8745 code = CODE_FOR_thumb2_casesi_internal;
8747 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8748 operands[2] = force_reg (SImode, operands[2]);
8750 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8751 operands[3], operands[4]));
8756 ;; The USE in this pattern is needed to tell flow analysis that this is
8757 ;; a CASESI insn. It has no other purpose.
8758 (define_insn "arm_casesi_internal"
8759 [(parallel [(set (pc)
8761 (leu (match_operand:SI 0 "s_register_operand" "r")
8762 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8763 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8764 (label_ref (match_operand 2 "" ""))))
8765 (label_ref (match_operand 3 "" ""))))
8766 (clobber (reg:CC CC_REGNUM))
8767 (use (label_ref (match_dup 2)))])]
8771 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8772 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8774 [(set_attr "conds" "clob")
8775 (set_attr "length" "12")]
8778 (define_expand "thumb1_casesi_internal_pic"
8779 [(match_operand:SI 0 "s_register_operand" "")
8780 (match_operand:SI 1 "thumb1_cmp_operand" "")
8781 (match_operand 2 "" "")
8782 (match_operand 3 "" "")]
8786 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8787 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8789 reg0 = gen_rtx_REG (SImode, 0);
8790 emit_move_insn (reg0, operands[0]);
8791 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8796 (define_insn "thumb1_casesi_dispatch"
8797 [(parallel [(set (pc) (unspec [(reg:SI 0)
8798 (label_ref (match_operand 0 "" ""))
8799 ;; (label_ref (match_operand 1 "" ""))
8801 UNSPEC_THUMB1_CASESI))
8802 (clobber (reg:SI IP_REGNUM))
8803 (clobber (reg:SI LR_REGNUM))])]
8805 "* return thumb1_output_casesi(operands);"
8806 [(set_attr "length" "4")]
8809 (define_expand "indirect_jump"
8811 (match_operand:SI 0 "s_register_operand" ""))]
8814 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8815 address and use bx. */
8819 tmp = gen_reg_rtx (SImode);
8820 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8826 ;; NB Never uses BX.
8827 (define_insn "*arm_indirect_jump"
8829 (match_operand:SI 0 "s_register_operand" "r"))]
8831 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8832 [(set_attr "predicable" "yes")]
8835 (define_insn "*load_indirect_jump"
8837 (match_operand:SI 0 "memory_operand" "m"))]
8839 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8840 [(set_attr "type" "load1")
8841 (set_attr "pool_range" "4096")
8842 (set_attr "neg_pool_range" "4084")
8843 (set_attr "predicable" "yes")]
8846 ;; NB Never uses BX.
8847 (define_insn "*thumb1_indirect_jump"
8849 (match_operand:SI 0 "register_operand" "l*r"))]
8852 [(set_attr "conds" "clob")
8853 (set_attr "length" "2")]
8863 if (TARGET_UNIFIED_ASM)
8866 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8867 return \"mov\\tr8, r8\";
8869 [(set (attr "length")
8870 (if_then_else (eq_attr "is_thumb" "yes")
8876 ;; Patterns to allow combination of arithmetic, cond code and shifts
8878 (define_insn "*arith_shiftsi"
8879 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8880 (match_operator:SI 1 "shiftable_operator"
8881 [(match_operator:SI 3 "shift_operator"
8882 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8883 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8884 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8886 "%i1%?\\t%0, %2, %4%S3"
8887 [(set_attr "predicable" "yes")
8888 (set_attr "shift" "4")
8889 (set_attr "arch" "a,t2,t2,a")
8890 ;; Thumb2 doesn't allow the stack pointer to be used for
8891 ;; operand1 for all operations other than add and sub. In this case
8892 ;; the minus operation is a candidate for an rsub and hence needs
8894 ;; We have to make sure to disable the fourth alternative if
8895 ;; the shift_operator is MULT, since otherwise the insn will
8896 ;; also match a multiply_accumulate pattern and validate_change
8897 ;; will allow a replacement of the constant with a register
8898 ;; despite the checks done in shift_operator.
8899 (set_attr_alternative "insn_enabled"
8900 [(const_string "yes")
8902 (match_operand:SI 1 "add_operator" "")
8903 (const_string "yes") (const_string "no"))
8904 (const_string "yes")
8906 (match_operand:SI 3 "mult_operator" "")
8907 (const_string "no") (const_string "yes"))])
8908 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8911 [(set (match_operand:SI 0 "s_register_operand" "")
8912 (match_operator:SI 1 "shiftable_operator"
8913 [(match_operator:SI 2 "shiftable_operator"
8914 [(match_operator:SI 3 "shift_operator"
8915 [(match_operand:SI 4 "s_register_operand" "")
8916 (match_operand:SI 5 "reg_or_int_operand" "")])
8917 (match_operand:SI 6 "s_register_operand" "")])
8918 (match_operand:SI 7 "arm_rhs_operand" "")]))
8919 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8922 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8925 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8928 (define_insn "*arith_shiftsi_compare0"
8929 [(set (reg:CC_NOOV CC_REGNUM)
8931 (match_operator:SI 1 "shiftable_operator"
8932 [(match_operator:SI 3 "shift_operator"
8933 [(match_operand:SI 4 "s_register_operand" "r,r")
8934 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8935 (match_operand:SI 2 "s_register_operand" "r,r")])
8937 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8938 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8941 "%i1%.\\t%0, %2, %4%S3"
8942 [(set_attr "conds" "set")
8943 (set_attr "shift" "4")
8944 (set_attr "arch" "32,a")
8945 (set_attr "type" "alu_shift,alu_shift_reg")])
8947 (define_insn "*arith_shiftsi_compare0_scratch"
8948 [(set (reg:CC_NOOV CC_REGNUM)
8950 (match_operator:SI 1 "shiftable_operator"
8951 [(match_operator:SI 3 "shift_operator"
8952 [(match_operand:SI 4 "s_register_operand" "r,r")
8953 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8954 (match_operand:SI 2 "s_register_operand" "r,r")])
8956 (clobber (match_scratch:SI 0 "=r,r"))]
8958 "%i1%.\\t%0, %2, %4%S3"
8959 [(set_attr "conds" "set")
8960 (set_attr "shift" "4")
8961 (set_attr "arch" "32,a")
8962 (set_attr "type" "alu_shift,alu_shift_reg")])
8964 (define_insn "*sub_shiftsi"
8965 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8966 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8967 (match_operator:SI 2 "shift_operator"
8968 [(match_operand:SI 3 "s_register_operand" "r,r")
8969 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8971 "sub%?\\t%0, %1, %3%S2"
8972 [(set_attr "predicable" "yes")
8973 (set_attr "shift" "3")
8974 (set_attr "arch" "32,a")
8975 (set_attr "type" "alu_shift,alu_shift_reg")])
8977 (define_insn "*sub_shiftsi_compare0"
8978 [(set (reg:CC_NOOV CC_REGNUM)
8980 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8981 (match_operator:SI 2 "shift_operator"
8982 [(match_operand:SI 3 "s_register_operand" "r,r")
8983 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8985 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8986 (minus:SI (match_dup 1)
8987 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8989 "sub%.\\t%0, %1, %3%S2"
8990 [(set_attr "conds" "set")
8991 (set_attr "shift" "3")
8992 (set_attr "arch" "32,a")
8993 (set_attr "type" "alu_shift,alu_shift_reg")])
8995 (define_insn "*sub_shiftsi_compare0_scratch"
8996 [(set (reg:CC_NOOV CC_REGNUM)
8998 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8999 (match_operator:SI 2 "shift_operator"
9000 [(match_operand:SI 3 "s_register_operand" "r,r")
9001 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9003 (clobber (match_scratch:SI 0 "=r,r"))]
9005 "sub%.\\t%0, %1, %3%S2"
9006 [(set_attr "conds" "set")
9007 (set_attr "shift" "3")
9008 (set_attr "arch" "32,a")
9009 (set_attr "type" "alu_shift,alu_shift_reg")])
9012 (define_insn "*and_scc"
9013 [(set (match_operand:SI 0 "s_register_operand" "=r")
9014 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9015 [(match_operand 3 "cc_register" "") (const_int 0)])
9016 (match_operand:SI 2 "s_register_operand" "r")))]
9018 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9019 [(set_attr "conds" "use")
9020 (set_attr "insn" "mov")
9021 (set_attr "length" "8")]
9024 (define_insn "*ior_scc"
9025 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9026 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9027 [(match_operand 3 "cc_register" "") (const_int 0)])
9028 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9032 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9033 [(set_attr "conds" "use")
9034 (set_attr "length" "4,8")]
9037 ; A series of splitters for the compare_scc pattern below. Note that
9038 ; order is important.
9040 [(set (match_operand:SI 0 "s_register_operand" "")
9041 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9043 (clobber (reg:CC CC_REGNUM))]
9044 "TARGET_32BIT && reload_completed"
9045 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9048 [(set (match_operand:SI 0 "s_register_operand" "")
9049 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9051 (clobber (reg:CC CC_REGNUM))]
9052 "TARGET_32BIT && reload_completed"
9053 [(set (match_dup 0) (not:SI (match_dup 1)))
9054 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9057 [(set (match_operand:SI 0 "s_register_operand" "")
9058 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9060 (clobber (reg:CC CC_REGNUM))]
9061 "TARGET_32BIT && reload_completed"
9063 [(set (reg:CC CC_REGNUM)
9064 (compare:CC (const_int 1) (match_dup 1)))
9066 (minus:SI (const_int 1) (match_dup 1)))])
9067 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9068 (set (match_dup 0) (const_int 0)))])
9071 [(set (match_operand:SI 0 "s_register_operand" "")
9072 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9073 (match_operand:SI 2 "const_int_operand" "")))
9074 (clobber (reg:CC CC_REGNUM))]
9075 "TARGET_32BIT && reload_completed"
9077 [(set (reg:CC CC_REGNUM)
9078 (compare:CC (match_dup 1) (match_dup 2)))
9079 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9080 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9081 (set (match_dup 0) (const_int 1)))]
9083 operands[3] = GEN_INT (-INTVAL (operands[2]));
9087 [(set (match_operand:SI 0 "s_register_operand" "")
9088 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9089 (match_operand:SI 2 "arm_add_operand" "")))
9090 (clobber (reg:CC CC_REGNUM))]
9091 "TARGET_32BIT && reload_completed"
9093 [(set (reg:CC_NOOV CC_REGNUM)
9094 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9096 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9097 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9098 (set (match_dup 0) (const_int 1)))])
9100 (define_insn_and_split "*compare_scc"
9101 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9102 (match_operator:SI 1 "arm_comparison_operator"
9103 [(match_operand:SI 2 "s_register_operand" "r,r")
9104 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9105 (clobber (reg:CC CC_REGNUM))]
9108 "&& reload_completed"
9109 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9110 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9111 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9114 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9115 operands[2], operands[3]);
9116 enum rtx_code rc = GET_CODE (operands[1]);
9118 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9120 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9121 if (mode == CCFPmode || mode == CCFPEmode)
9122 rc = reverse_condition_maybe_unordered (rc);
9124 rc = reverse_condition (rc);
9125 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9128 ;; Attempt to improve the sequence generated by the compare_scc splitters
9129 ;; not to use conditional execution.
9131 [(set (reg:CC CC_REGNUM)
9132 (compare:CC (match_operand:SI 1 "register_operand" "")
9133 (match_operand:SI 2 "arm_rhs_operand" "")))
9134 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9135 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9136 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9137 (set (match_dup 0) (const_int 1)))
9138 (match_scratch:SI 3 "r")]
9141 [(set (reg:CC CC_REGNUM)
9142 (compare:CC (match_dup 1) (match_dup 2)))
9143 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9145 [(set (reg:CC CC_REGNUM)
9146 (compare:CC (const_int 0) (match_dup 3)))
9147 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9150 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9151 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9152 (clobber (reg:CC CC_REGNUM))])])
9154 (define_insn "*cond_move"
9155 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9156 (if_then_else:SI (match_operator 3 "equality_operator"
9157 [(match_operator 4 "arm_comparison_operator"
9158 [(match_operand 5 "cc_register" "") (const_int 0)])
9160 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9161 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9164 if (GET_CODE (operands[3]) == NE)
9166 if (which_alternative != 1)
9167 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9168 if (which_alternative != 0)
9169 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9172 if (which_alternative != 0)
9173 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9174 if (which_alternative != 1)
9175 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9178 [(set_attr "conds" "use")
9179 (set_attr "insn" "mov")
9180 (set_attr "length" "4,4,8")]
9183 (define_insn "*cond_arith"
9184 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9185 (match_operator:SI 5 "shiftable_operator"
9186 [(match_operator:SI 4 "arm_comparison_operator"
9187 [(match_operand:SI 2 "s_register_operand" "r,r")
9188 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9189 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9190 (clobber (reg:CC CC_REGNUM))]
9193 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9194 return \"%i5\\t%0, %1, %2, lsr #31\";
9196 output_asm_insn (\"cmp\\t%2, %3\", operands);
9197 if (GET_CODE (operands[5]) == AND)
9198 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9199 else if (GET_CODE (operands[5]) == MINUS)
9200 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9201 else if (which_alternative != 0)
9202 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9203 return \"%i5%d4\\t%0, %1, #1\";
9205 [(set_attr "conds" "clob")
9206 (set_attr "length" "12")]
9209 (define_insn "*cond_sub"
9210 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9211 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9212 (match_operator:SI 4 "arm_comparison_operator"
9213 [(match_operand:SI 2 "s_register_operand" "r,r")
9214 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9215 (clobber (reg:CC CC_REGNUM))]
9218 output_asm_insn (\"cmp\\t%2, %3\", operands);
9219 if (which_alternative != 0)
9220 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9221 return \"sub%d4\\t%0, %1, #1\";
9223 [(set_attr "conds" "clob")
9224 (set_attr "length" "8,12")]
9227 (define_insn "*cmp_ite0"
9228 [(set (match_operand 6 "dominant_cc_register" "")
9231 (match_operator 4 "arm_comparison_operator"
9232 [(match_operand:SI 0 "s_register_operand"
9233 "l,l,l,r,r,r,r,r,r")
9234 (match_operand:SI 1 "arm_add_operand"
9235 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9236 (match_operator:SI 5 "arm_comparison_operator"
9237 [(match_operand:SI 2 "s_register_operand"
9238 "l,r,r,l,l,r,r,r,r")
9239 (match_operand:SI 3 "arm_add_operand"
9240 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9246 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9248 {\"cmp%d5\\t%0, %1\",
9249 \"cmp%d4\\t%2, %3\"},
9250 {\"cmn%d5\\t%0, #%n1\",
9251 \"cmp%d4\\t%2, %3\"},
9252 {\"cmp%d5\\t%0, %1\",
9253 \"cmn%d4\\t%2, #%n3\"},
9254 {\"cmn%d5\\t%0, #%n1\",
9255 \"cmn%d4\\t%2, #%n3\"}
9257 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9262 \"cmn\\t%0, #%n1\"},
9263 {\"cmn\\t%2, #%n3\",
9265 {\"cmn\\t%2, #%n3\",
9268 static const char * const ite[2] =
9273 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9274 CMP_CMP, CMN_CMP, CMP_CMP,
9275 CMN_CMP, CMP_CMN, CMN_CMN};
9277 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9279 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9280 if (TARGET_THUMB2) {
9281 output_asm_insn (ite[swap], operands);
9283 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9286 [(set_attr "conds" "set")
9287 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9288 (set_attr_alternative "length"
9294 (if_then_else (eq_attr "is_thumb" "no")
9297 (if_then_else (eq_attr "is_thumb" "no")
9300 (if_then_else (eq_attr "is_thumb" "no")
9303 (if_then_else (eq_attr "is_thumb" "no")
9308 (define_insn "*cmp_ite1"
9309 [(set (match_operand 6 "dominant_cc_register" "")
9312 (match_operator 4 "arm_comparison_operator"
9313 [(match_operand:SI 0 "s_register_operand"
9314 "l,l,l,r,r,r,r,r,r")
9315 (match_operand:SI 1 "arm_add_operand"
9316 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9317 (match_operator:SI 5 "arm_comparison_operator"
9318 [(match_operand:SI 2 "s_register_operand"
9319 "l,r,r,l,l,r,r,r,r")
9320 (match_operand:SI 3 "arm_add_operand"
9321 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9327 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9331 {\"cmn\\t%0, #%n1\",
9334 \"cmn\\t%2, #%n3\"},
9335 {\"cmn\\t%0, #%n1\",
9338 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9340 {\"cmp%d4\\t%2, %3\",
9341 \"cmp%D5\\t%0, %1\"},
9342 {\"cmp%d4\\t%2, %3\",
9343 \"cmn%D5\\t%0, #%n1\"},
9344 {\"cmn%d4\\t%2, #%n3\",
9345 \"cmp%D5\\t%0, %1\"},
9346 {\"cmn%d4\\t%2, #%n3\",
9347 \"cmn%D5\\t%0, #%n1\"}
9349 static const char * const ite[2] =
9354 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9355 CMP_CMP, CMN_CMP, CMP_CMP,
9356 CMN_CMP, CMP_CMN, CMN_CMN};
9358 comparison_dominates_p (GET_CODE (operands[5]),
9359 reverse_condition (GET_CODE (operands[4])));
9361 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9362 if (TARGET_THUMB2) {
9363 output_asm_insn (ite[swap], operands);
9365 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9368 [(set_attr "conds" "set")
9369 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9370 (set_attr_alternative "length"
9376 (if_then_else (eq_attr "is_thumb" "no")
9379 (if_then_else (eq_attr "is_thumb" "no")
9382 (if_then_else (eq_attr "is_thumb" "no")
9385 (if_then_else (eq_attr "is_thumb" "no")
9390 (define_insn "*cmp_and"
9391 [(set (match_operand 6 "dominant_cc_register" "")
9394 (match_operator 4 "arm_comparison_operator"
9395 [(match_operand:SI 0 "s_register_operand"
9396 "l,l,l,r,r,r,r,r,r")
9397 (match_operand:SI 1 "arm_add_operand"
9398 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9399 (match_operator:SI 5 "arm_comparison_operator"
9400 [(match_operand:SI 2 "s_register_operand"
9401 "l,r,r,l,l,r,r,r,r")
9402 (match_operand:SI 3 "arm_add_operand"
9403 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9408 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9410 {\"cmp%d5\\t%0, %1\",
9411 \"cmp%d4\\t%2, %3\"},
9412 {\"cmn%d5\\t%0, #%n1\",
9413 \"cmp%d4\\t%2, %3\"},
9414 {\"cmp%d5\\t%0, %1\",
9415 \"cmn%d4\\t%2, #%n3\"},
9416 {\"cmn%d5\\t%0, #%n1\",
9417 \"cmn%d4\\t%2, #%n3\"}
9419 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9424 \"cmn\\t%0, #%n1\"},
9425 {\"cmn\\t%2, #%n3\",
9427 {\"cmn\\t%2, #%n3\",
9430 static const char *const ite[2] =
9435 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9436 CMP_CMP, CMN_CMP, CMP_CMP,
9437 CMN_CMP, CMP_CMN, CMN_CMN};
9439 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9441 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9442 if (TARGET_THUMB2) {
9443 output_asm_insn (ite[swap], operands);
9445 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9448 [(set_attr "conds" "set")
9449 (set_attr "predicable" "no")
9450 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9451 (set_attr_alternative "length"
9457 (if_then_else (eq_attr "is_thumb" "no")
9460 (if_then_else (eq_attr "is_thumb" "no")
9463 (if_then_else (eq_attr "is_thumb" "no")
9466 (if_then_else (eq_attr "is_thumb" "no")
9471 (define_insn "*cmp_ior"
9472 [(set (match_operand 6 "dominant_cc_register" "")
9475 (match_operator 4 "arm_comparison_operator"
9476 [(match_operand:SI 0 "s_register_operand"
9477 "l,l,l,r,r,r,r,r,r")
9478 (match_operand:SI 1 "arm_add_operand"
9479 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9480 (match_operator:SI 5 "arm_comparison_operator"
9481 [(match_operand:SI 2 "s_register_operand"
9482 "l,r,r,l,l,r,r,r,r")
9483 (match_operand:SI 3 "arm_add_operand"
9484 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9489 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9493 {\"cmn\\t%0, #%n1\",
9496 \"cmn\\t%2, #%n3\"},
9497 {\"cmn\\t%0, #%n1\",
9500 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9502 {\"cmp%D4\\t%2, %3\",
9503 \"cmp%D5\\t%0, %1\"},
9504 {\"cmp%D4\\t%2, %3\",
9505 \"cmn%D5\\t%0, #%n1\"},
9506 {\"cmn%D4\\t%2, #%n3\",
9507 \"cmp%D5\\t%0, %1\"},
9508 {\"cmn%D4\\t%2, #%n3\",
9509 \"cmn%D5\\t%0, #%n1\"}
9511 static const char *const ite[2] =
9516 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9517 CMP_CMP, CMN_CMP, CMP_CMP,
9518 CMN_CMP, CMP_CMN, CMN_CMN};
9520 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9522 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9523 if (TARGET_THUMB2) {
9524 output_asm_insn (ite[swap], operands);
9526 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9530 [(set_attr "conds" "set")
9531 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9532 (set_attr_alternative "length"
9538 (if_then_else (eq_attr "is_thumb" "no")
9541 (if_then_else (eq_attr "is_thumb" "no")
9544 (if_then_else (eq_attr "is_thumb" "no")
9547 (if_then_else (eq_attr "is_thumb" "no")
9552 (define_insn_and_split "*ior_scc_scc"
9553 [(set (match_operand:SI 0 "s_register_operand" "=r")
9554 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9555 [(match_operand:SI 1 "s_register_operand" "r")
9556 (match_operand:SI 2 "arm_add_operand" "rIL")])
9557 (match_operator:SI 6 "arm_comparison_operator"
9558 [(match_operand:SI 4 "s_register_operand" "r")
9559 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9560 (clobber (reg:CC CC_REGNUM))]
9562 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9565 "TARGET_32BIT && reload_completed"
9569 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9570 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9572 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9574 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9577 [(set_attr "conds" "clob")
9578 (set_attr "length" "16")])
9580 ; If the above pattern is followed by a CMP insn, then the compare is
9581 ; redundant, since we can rework the conditional instruction that follows.
9582 (define_insn_and_split "*ior_scc_scc_cmp"
9583 [(set (match_operand 0 "dominant_cc_register" "")
9584 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9585 [(match_operand:SI 1 "s_register_operand" "r")
9586 (match_operand:SI 2 "arm_add_operand" "rIL")])
9587 (match_operator:SI 6 "arm_comparison_operator"
9588 [(match_operand:SI 4 "s_register_operand" "r")
9589 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9591 (set (match_operand:SI 7 "s_register_operand" "=r")
9592 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9593 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9596 "TARGET_32BIT && reload_completed"
9600 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9601 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9603 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9605 [(set_attr "conds" "set")
9606 (set_attr "length" "16")])
9608 (define_insn_and_split "*and_scc_scc"
9609 [(set (match_operand:SI 0 "s_register_operand" "=r")
9610 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9611 [(match_operand:SI 1 "s_register_operand" "r")
9612 (match_operand:SI 2 "arm_add_operand" "rIL")])
9613 (match_operator:SI 6 "arm_comparison_operator"
9614 [(match_operand:SI 4 "s_register_operand" "r")
9615 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9616 (clobber (reg:CC CC_REGNUM))]
9618 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9621 "TARGET_32BIT && reload_completed
9622 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9627 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9628 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9630 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9632 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9635 [(set_attr "conds" "clob")
9636 (set_attr "length" "16")])
9638 ; If the above pattern is followed by a CMP insn, then the compare is
9639 ; redundant, since we can rework the conditional instruction that follows.
9640 (define_insn_and_split "*and_scc_scc_cmp"
9641 [(set (match_operand 0 "dominant_cc_register" "")
9642 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9643 [(match_operand:SI 1 "s_register_operand" "r")
9644 (match_operand:SI 2 "arm_add_operand" "rIL")])
9645 (match_operator:SI 6 "arm_comparison_operator"
9646 [(match_operand:SI 4 "s_register_operand" "r")
9647 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9649 (set (match_operand:SI 7 "s_register_operand" "=r")
9650 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9651 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9654 "TARGET_32BIT && reload_completed"
9658 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9659 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9661 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9663 [(set_attr "conds" "set")
9664 (set_attr "length" "16")])
9666 ;; If there is no dominance in the comparison, then we can still save an
9667 ;; instruction in the AND case, since we can know that the second compare
9668 ;; need only zero the value if false (if true, then the value is already
9670 (define_insn_and_split "*and_scc_scc_nodom"
9671 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9672 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9673 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9674 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9675 (match_operator:SI 6 "arm_comparison_operator"
9676 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9677 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9678 (clobber (reg:CC CC_REGNUM))]
9680 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9683 "TARGET_32BIT && reload_completed"
9684 [(parallel [(set (match_dup 0)
9685 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9686 (clobber (reg:CC CC_REGNUM))])
9687 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9689 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9692 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9693 operands[4], operands[5]),
9695 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9697 [(set_attr "conds" "clob")
9698 (set_attr "length" "20")])
9701 [(set (reg:CC_NOOV CC_REGNUM)
9702 (compare:CC_NOOV (ior:SI
9703 (and:SI (match_operand:SI 0 "s_register_operand" "")
9705 (match_operator:SI 1 "arm_comparison_operator"
9706 [(match_operand:SI 2 "s_register_operand" "")
9707 (match_operand:SI 3 "arm_add_operand" "")]))
9709 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9712 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9714 (set (reg:CC_NOOV CC_REGNUM)
9715 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9720 [(set (reg:CC_NOOV CC_REGNUM)
9721 (compare:CC_NOOV (ior:SI
9722 (match_operator:SI 1 "arm_comparison_operator"
9723 [(match_operand:SI 2 "s_register_operand" "")
9724 (match_operand:SI 3 "arm_add_operand" "")])
9725 (and:SI (match_operand:SI 0 "s_register_operand" "")
9728 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9731 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9733 (set (reg:CC_NOOV CC_REGNUM)
9734 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9737 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9739 (define_insn "*negscc"
9740 [(set (match_operand:SI 0 "s_register_operand" "=r")
9741 (neg:SI (match_operator 3 "arm_comparison_operator"
9742 [(match_operand:SI 1 "s_register_operand" "r")
9743 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9744 (clobber (reg:CC CC_REGNUM))]
9747 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9748 return \"mov\\t%0, %1, asr #31\";
9750 if (GET_CODE (operands[3]) == NE)
9751 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9753 output_asm_insn (\"cmp\\t%1, %2\", operands);
9754 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9755 return \"mvn%d3\\t%0, #0\";
9757 [(set_attr "conds" "clob")
9758 (set_attr "length" "12")]
9761 (define_insn "movcond"
9762 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9764 (match_operator 5 "arm_comparison_operator"
9765 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9766 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9767 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9768 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9769 (clobber (reg:CC CC_REGNUM))]
9772 if (GET_CODE (operands[5]) == LT
9773 && (operands[4] == const0_rtx))
9775 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9777 if (operands[2] == const0_rtx)
9778 return \"and\\t%0, %1, %3, asr #31\";
9779 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9781 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9783 if (operands[1] == const0_rtx)
9784 return \"bic\\t%0, %2, %3, asr #31\";
9785 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9787 /* The only case that falls through to here is when both ops 1 & 2
9791 if (GET_CODE (operands[5]) == GE
9792 && (operands[4] == const0_rtx))
9794 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9796 if (operands[2] == const0_rtx)
9797 return \"bic\\t%0, %1, %3, asr #31\";
9798 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9800 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9802 if (operands[1] == const0_rtx)
9803 return \"and\\t%0, %2, %3, asr #31\";
9804 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9806 /* The only case that falls through to here is when both ops 1 & 2
9809 if (GET_CODE (operands[4]) == CONST_INT
9810 && !const_ok_for_arm (INTVAL (operands[4])))
9811 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9813 output_asm_insn (\"cmp\\t%3, %4\", operands);
9814 if (which_alternative != 0)
9815 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9816 if (which_alternative != 1)
9817 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9820 [(set_attr "conds" "clob")
9821 (set_attr "length" "8,8,12")]
9824 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9826 (define_insn "*ifcompare_plus_move"
9827 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9828 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9829 [(match_operand:SI 4 "s_register_operand" "r,r")
9830 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9832 (match_operand:SI 2 "s_register_operand" "r,r")
9833 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9834 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9835 (clobber (reg:CC CC_REGNUM))]
9838 [(set_attr "conds" "clob")
9839 (set_attr "length" "8,12")]
9842 (define_insn "*if_plus_move"
9843 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9845 (match_operator 4 "arm_comparison_operator"
9846 [(match_operand 5 "cc_register" "") (const_int 0)])
9848 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9849 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9850 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9854 sub%d4\\t%0, %2, #%n3
9855 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9856 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9857 [(set_attr "conds" "use")
9858 (set_attr "length" "4,4,8,8")
9859 (set_attr "type" "*,*,*,*")]
9862 (define_insn "*ifcompare_move_plus"
9863 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9864 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9865 [(match_operand:SI 4 "s_register_operand" "r,r")
9866 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9867 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9869 (match_operand:SI 2 "s_register_operand" "r,r")
9870 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9871 (clobber (reg:CC CC_REGNUM))]
9874 [(set_attr "conds" "clob")
9875 (set_attr "length" "8,12")]
9878 (define_insn "*if_move_plus"
9879 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9881 (match_operator 4 "arm_comparison_operator"
9882 [(match_operand 5 "cc_register" "") (const_int 0)])
9883 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9885 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9886 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9890 sub%D4\\t%0, %2, #%n3
9891 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9892 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9893 [(set_attr "conds" "use")
9894 (set_attr "length" "4,4,8,8")
9895 (set_attr "type" "*,*,*,*")]
9898 (define_insn "*ifcompare_arith_arith"
9899 [(set (match_operand:SI 0 "s_register_operand" "=r")
9900 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9901 [(match_operand:SI 5 "s_register_operand" "r")
9902 (match_operand:SI 6 "arm_add_operand" "rIL")])
9903 (match_operator:SI 8 "shiftable_operator"
9904 [(match_operand:SI 1 "s_register_operand" "r")
9905 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9906 (match_operator:SI 7 "shiftable_operator"
9907 [(match_operand:SI 3 "s_register_operand" "r")
9908 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9909 (clobber (reg:CC CC_REGNUM))]
9912 [(set_attr "conds" "clob")
9913 (set_attr "length" "12")]
9916 (define_insn "*if_arith_arith"
9917 [(set (match_operand:SI 0 "s_register_operand" "=r")
9918 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9919 [(match_operand 8 "cc_register" "") (const_int 0)])
9920 (match_operator:SI 6 "shiftable_operator"
9921 [(match_operand:SI 1 "s_register_operand" "r")
9922 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9923 (match_operator:SI 7 "shiftable_operator"
9924 [(match_operand:SI 3 "s_register_operand" "r")
9925 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9927 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9928 [(set_attr "conds" "use")
9929 (set_attr "length" "8")]
9932 (define_insn "*ifcompare_arith_move"
9933 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9934 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9935 [(match_operand:SI 2 "s_register_operand" "r,r")
9936 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9937 (match_operator:SI 7 "shiftable_operator"
9938 [(match_operand:SI 4 "s_register_operand" "r,r")
9939 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9940 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9941 (clobber (reg:CC CC_REGNUM))]
9944 /* If we have an operation where (op x 0) is the identity operation and
9945 the conditional operator is LT or GE and we are comparing against zero and
9946 everything is in registers then we can do this in two instructions. */
9947 if (operands[3] == const0_rtx
9948 && GET_CODE (operands[7]) != AND
9949 && GET_CODE (operands[5]) == REG
9950 && GET_CODE (operands[1]) == REG
9951 && REGNO (operands[1]) == REGNO (operands[4])
9952 && REGNO (operands[4]) != REGNO (operands[0]))
9954 if (GET_CODE (operands[6]) == LT)
9955 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9956 else if (GET_CODE (operands[6]) == GE)
9957 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9959 if (GET_CODE (operands[3]) == CONST_INT
9960 && !const_ok_for_arm (INTVAL (operands[3])))
9961 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9963 output_asm_insn (\"cmp\\t%2, %3\", operands);
9964 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9965 if (which_alternative != 0)
9966 return \"mov%D6\\t%0, %1\";
9969 [(set_attr "conds" "clob")
9970 (set_attr "length" "8,12")]
9973 (define_insn "*if_arith_move"
9974 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9975 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9976 [(match_operand 6 "cc_register" "") (const_int 0)])
9977 (match_operator:SI 5 "shiftable_operator"
9978 [(match_operand:SI 2 "s_register_operand" "r,r")
9979 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9980 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9984 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9985 [(set_attr "conds" "use")
9986 (set_attr "length" "4,8")
9987 (set_attr "type" "*,*")]
9990 (define_insn "*ifcompare_move_arith"
9991 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9992 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9993 [(match_operand:SI 4 "s_register_operand" "r,r")
9994 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9995 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9996 (match_operator:SI 7 "shiftable_operator"
9997 [(match_operand:SI 2 "s_register_operand" "r,r")
9998 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9999 (clobber (reg:CC CC_REGNUM))]
10002 /* If we have an operation where (op x 0) is the identity operation and
10003 the conditional operator is LT or GE and we are comparing against zero and
10004 everything is in registers then we can do this in two instructions */
10005 if (operands[5] == const0_rtx
10006 && GET_CODE (operands[7]) != AND
10007 && GET_CODE (operands[3]) == REG
10008 && GET_CODE (operands[1]) == REG
10009 && REGNO (operands[1]) == REGNO (operands[2])
10010 && REGNO (operands[2]) != REGNO (operands[0]))
10012 if (GET_CODE (operands[6]) == GE)
10013 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10014 else if (GET_CODE (operands[6]) == LT)
10015 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10018 if (GET_CODE (operands[5]) == CONST_INT
10019 && !const_ok_for_arm (INTVAL (operands[5])))
10020 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10022 output_asm_insn (\"cmp\\t%4, %5\", operands);
10024 if (which_alternative != 0)
10025 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10026 return \"%I7%D6\\t%0, %2, %3\";
10028 [(set_attr "conds" "clob")
10029 (set_attr "length" "8,12")]
10032 (define_insn "*if_move_arith"
10033 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10035 (match_operator 4 "arm_comparison_operator"
10036 [(match_operand 6 "cc_register" "") (const_int 0)])
10037 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10038 (match_operator:SI 5 "shiftable_operator"
10039 [(match_operand:SI 2 "s_register_operand" "r,r")
10040 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10043 %I5%D4\\t%0, %2, %3
10044 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10045 [(set_attr "conds" "use")
10046 (set_attr "length" "4,8")
10047 (set_attr "type" "*,*")]
10050 (define_insn "*ifcompare_move_not"
10051 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10053 (match_operator 5 "arm_comparison_operator"
10054 [(match_operand:SI 3 "s_register_operand" "r,r")
10055 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10056 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10058 (match_operand:SI 2 "s_register_operand" "r,r"))))
10059 (clobber (reg:CC CC_REGNUM))]
10062 [(set_attr "conds" "clob")
10063 (set_attr "length" "8,12")]
10066 (define_insn "*if_move_not"
10067 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10069 (match_operator 4 "arm_comparison_operator"
10070 [(match_operand 3 "cc_register" "") (const_int 0)])
10071 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10072 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10076 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10077 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10078 [(set_attr "conds" "use")
10079 (set_attr "insn" "mvn")
10080 (set_attr "length" "4,8,8")]
10083 (define_insn "*ifcompare_not_move"
10084 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10086 (match_operator 5 "arm_comparison_operator"
10087 [(match_operand:SI 3 "s_register_operand" "r,r")
10088 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10090 (match_operand:SI 2 "s_register_operand" "r,r"))
10091 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10092 (clobber (reg:CC CC_REGNUM))]
10095 [(set_attr "conds" "clob")
10096 (set_attr "length" "8,12")]
10099 (define_insn "*if_not_move"
10100 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10102 (match_operator 4 "arm_comparison_operator"
10103 [(match_operand 3 "cc_register" "") (const_int 0)])
10104 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10105 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10109 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10110 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10111 [(set_attr "conds" "use")
10112 (set_attr "insn" "mvn")
10113 (set_attr "length" "4,8,8")]
10116 (define_insn "*ifcompare_shift_move"
10117 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10119 (match_operator 6 "arm_comparison_operator"
10120 [(match_operand:SI 4 "s_register_operand" "r,r")
10121 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10122 (match_operator:SI 7 "shift_operator"
10123 [(match_operand:SI 2 "s_register_operand" "r,r")
10124 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10125 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10126 (clobber (reg:CC CC_REGNUM))]
10129 [(set_attr "conds" "clob")
10130 (set_attr "length" "8,12")]
10133 (define_insn "*if_shift_move"
10134 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10136 (match_operator 5 "arm_comparison_operator"
10137 [(match_operand 6 "cc_register" "") (const_int 0)])
10138 (match_operator:SI 4 "shift_operator"
10139 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10140 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10141 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10145 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10146 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10147 [(set_attr "conds" "use")
10148 (set_attr "shift" "2")
10149 (set_attr "length" "4,8,8")
10150 (set_attr "insn" "mov")
10151 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10152 (const_string "alu_shift")
10153 (const_string "alu_shift_reg")))]
10156 (define_insn "*ifcompare_move_shift"
10157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10159 (match_operator 6 "arm_comparison_operator"
10160 [(match_operand:SI 4 "s_register_operand" "r,r")
10161 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10162 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10163 (match_operator:SI 7 "shift_operator"
10164 [(match_operand:SI 2 "s_register_operand" "r,r")
10165 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10166 (clobber (reg:CC CC_REGNUM))]
10169 [(set_attr "conds" "clob")
10170 (set_attr "length" "8,12")]
10173 (define_insn "*if_move_shift"
10174 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10176 (match_operator 5 "arm_comparison_operator"
10177 [(match_operand 6 "cc_register" "") (const_int 0)])
10178 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10179 (match_operator:SI 4 "shift_operator"
10180 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10181 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10185 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10186 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10187 [(set_attr "conds" "use")
10188 (set_attr "shift" "2")
10189 (set_attr "length" "4,8,8")
10190 (set_attr "insn" "mov")
10191 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10192 (const_string "alu_shift")
10193 (const_string "alu_shift_reg")))]
10196 (define_insn "*ifcompare_shift_shift"
10197 [(set (match_operand:SI 0 "s_register_operand" "=r")
10199 (match_operator 7 "arm_comparison_operator"
10200 [(match_operand:SI 5 "s_register_operand" "r")
10201 (match_operand:SI 6 "arm_add_operand" "rIL")])
10202 (match_operator:SI 8 "shift_operator"
10203 [(match_operand:SI 1 "s_register_operand" "r")
10204 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10205 (match_operator:SI 9 "shift_operator"
10206 [(match_operand:SI 3 "s_register_operand" "r")
10207 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10208 (clobber (reg:CC CC_REGNUM))]
10211 [(set_attr "conds" "clob")
10212 (set_attr "length" "12")]
10215 (define_insn "*if_shift_shift"
10216 [(set (match_operand:SI 0 "s_register_operand" "=r")
10218 (match_operator 5 "arm_comparison_operator"
10219 [(match_operand 8 "cc_register" "") (const_int 0)])
10220 (match_operator:SI 6 "shift_operator"
10221 [(match_operand:SI 1 "s_register_operand" "r")
10222 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10223 (match_operator:SI 7 "shift_operator"
10224 [(match_operand:SI 3 "s_register_operand" "r")
10225 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10227 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10228 [(set_attr "conds" "use")
10229 (set_attr "shift" "1")
10230 (set_attr "length" "8")
10231 (set_attr "insn" "mov")
10232 (set (attr "type") (if_then_else
10233 (and (match_operand 2 "const_int_operand" "")
10234 (match_operand 4 "const_int_operand" ""))
10235 (const_string "alu_shift")
10236 (const_string "alu_shift_reg")))]
10239 (define_insn "*ifcompare_not_arith"
10240 [(set (match_operand:SI 0 "s_register_operand" "=r")
10242 (match_operator 6 "arm_comparison_operator"
10243 [(match_operand:SI 4 "s_register_operand" "r")
10244 (match_operand:SI 5 "arm_add_operand" "rIL")])
10245 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10246 (match_operator:SI 7 "shiftable_operator"
10247 [(match_operand:SI 2 "s_register_operand" "r")
10248 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10249 (clobber (reg:CC CC_REGNUM))]
10252 [(set_attr "conds" "clob")
10253 (set_attr "length" "12")]
10256 (define_insn "*if_not_arith"
10257 [(set (match_operand:SI 0 "s_register_operand" "=r")
10259 (match_operator 5 "arm_comparison_operator"
10260 [(match_operand 4 "cc_register" "") (const_int 0)])
10261 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10262 (match_operator:SI 6 "shiftable_operator"
10263 [(match_operand:SI 2 "s_register_operand" "r")
10264 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10266 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10267 [(set_attr "conds" "use")
10268 (set_attr "insn" "mvn")
10269 (set_attr "length" "8")]
10272 (define_insn "*ifcompare_arith_not"
10273 [(set (match_operand:SI 0 "s_register_operand" "=r")
10275 (match_operator 6 "arm_comparison_operator"
10276 [(match_operand:SI 4 "s_register_operand" "r")
10277 (match_operand:SI 5 "arm_add_operand" "rIL")])
10278 (match_operator:SI 7 "shiftable_operator"
10279 [(match_operand:SI 2 "s_register_operand" "r")
10280 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10281 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10282 (clobber (reg:CC CC_REGNUM))]
10285 [(set_attr "conds" "clob")
10286 (set_attr "length" "12")]
10289 (define_insn "*if_arith_not"
10290 [(set (match_operand:SI 0 "s_register_operand" "=r")
10292 (match_operator 5 "arm_comparison_operator"
10293 [(match_operand 4 "cc_register" "") (const_int 0)])
10294 (match_operator:SI 6 "shiftable_operator"
10295 [(match_operand:SI 2 "s_register_operand" "r")
10296 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10297 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10299 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10300 [(set_attr "conds" "use")
10301 (set_attr "insn" "mvn")
10302 (set_attr "length" "8")]
10305 (define_insn "*ifcompare_neg_move"
10306 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10308 (match_operator 5 "arm_comparison_operator"
10309 [(match_operand:SI 3 "s_register_operand" "r,r")
10310 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10311 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10312 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10313 (clobber (reg:CC CC_REGNUM))]
10316 [(set_attr "conds" "clob")
10317 (set_attr "length" "8,12")]
10320 (define_insn "*if_neg_move"
10321 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10323 (match_operator 4 "arm_comparison_operator"
10324 [(match_operand 3 "cc_register" "") (const_int 0)])
10325 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10326 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10329 rsb%d4\\t%0, %2, #0
10330 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10331 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10332 [(set_attr "conds" "use")
10333 (set_attr "length" "4,8,8")]
10336 (define_insn "*ifcompare_move_neg"
10337 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10339 (match_operator 5 "arm_comparison_operator"
10340 [(match_operand:SI 3 "s_register_operand" "r,r")
10341 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10342 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10343 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10344 (clobber (reg:CC CC_REGNUM))]
10347 [(set_attr "conds" "clob")
10348 (set_attr "length" "8,12")]
10351 (define_insn "*if_move_neg"
10352 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10354 (match_operator 4 "arm_comparison_operator"
10355 [(match_operand 3 "cc_register" "") (const_int 0)])
10356 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10357 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10360 rsb%D4\\t%0, %2, #0
10361 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10362 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10363 [(set_attr "conds" "use")
10364 (set_attr "length" "4,8,8")]
10367 (define_insn "*arith_adjacentmem"
10368 [(set (match_operand:SI 0 "s_register_operand" "=r")
10369 (match_operator:SI 1 "shiftable_operator"
10370 [(match_operand:SI 2 "memory_operand" "m")
10371 (match_operand:SI 3 "memory_operand" "m")]))
10372 (clobber (match_scratch:SI 4 "=r"))]
10373 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10379 HOST_WIDE_INT val1 = 0, val2 = 0;
10381 if (REGNO (operands[0]) > REGNO (operands[4]))
10383 ldm[1] = operands[4];
10384 ldm[2] = operands[0];
10388 ldm[1] = operands[0];
10389 ldm[2] = operands[4];
10392 base_reg = XEXP (operands[2], 0);
10394 if (!REG_P (base_reg))
10396 val1 = INTVAL (XEXP (base_reg, 1));
10397 base_reg = XEXP (base_reg, 0);
10400 if (!REG_P (XEXP (operands[3], 0)))
10401 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10403 arith[0] = operands[0];
10404 arith[3] = operands[1];
10418 if (val1 !=0 && val2 != 0)
10422 if (val1 == 4 || val2 == 4)
10423 /* Other val must be 8, since we know they are adjacent and neither
10425 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10426 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10428 ldm[0] = ops[0] = operands[4];
10430 ops[2] = GEN_INT (val1);
10431 output_add_immediate (ops);
10433 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10435 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10439 /* Offset is out of range for a single add, so use two ldr. */
10442 ops[2] = GEN_INT (val1);
10443 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10445 ops[2] = GEN_INT (val2);
10446 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10449 else if (val1 != 0)
10452 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10454 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10459 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10461 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10463 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10466 [(set_attr "length" "12")
10467 (set_attr "predicable" "yes")
10468 (set_attr "type" "load1")]
10471 ; This pattern is never tried by combine, so do it as a peephole
10474 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10475 (match_operand:SI 1 "arm_general_register_operand" ""))
10476 (set (reg:CC CC_REGNUM)
10477 (compare:CC (match_dup 1) (const_int 0)))]
10479 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10480 (set (match_dup 0) (match_dup 1))])]
10485 [(set (match_operand:SI 0 "s_register_operand" "")
10486 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10488 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10489 [(match_operand:SI 3 "s_register_operand" "")
10490 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10491 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10493 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10494 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10499 ;; This split can be used because CC_Z mode implies that the following
10500 ;; branch will be an equality, or an unsigned inequality, so the sign
10501 ;; extension is not needed.
10504 [(set (reg:CC_Z CC_REGNUM)
10506 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10508 (match_operand 1 "const_int_operand" "")))
10509 (clobber (match_scratch:SI 2 ""))]
10511 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10512 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10513 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10514 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10516 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10519 ;; ??? Check the patterns above for Thumb-2 usefulness
10521 (define_expand "prologue"
10522 [(clobber (const_int 0))]
10525 arm_expand_prologue ();
10527 thumb1_expand_prologue ();
10532 (define_expand "epilogue"
10533 [(clobber (const_int 0))]
10536 if (crtl->calls_eh_return)
10537 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10539 thumb1_expand_epilogue ();
10540 else if (USE_RETURN_INSN (FALSE))
10542 emit_jump_insn (gen_return ());
10545 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10546 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10551 (define_insn "prologue_thumb1_interwork"
10552 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10554 "* return thumb1_output_interwork ();"
10555 [(set_attr "length" "8")]
10558 ;; Note - although unspec_volatile's USE all hard registers,
10559 ;; USEs are ignored after relaod has completed. Thus we need
10560 ;; to add an unspec of the link register to ensure that flow
10561 ;; does not think that it is unused by the sibcall branch that
10562 ;; will replace the standard function epilogue.
10563 (define_insn "sibcall_epilogue"
10564 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10565 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10568 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10569 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10570 return arm_output_epilogue (next_nonnote_insn (insn));
10572 ;; Length is absolute worst case
10573 [(set_attr "length" "44")
10574 (set_attr "type" "block")
10575 ;; We don't clobber the conditions, but the potential length of this
10576 ;; operation is sufficient to make conditionalizing the sequence
10577 ;; unlikely to be profitable.
10578 (set_attr "conds" "clob")]
10581 (define_insn "*epilogue_insns"
10582 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10586 return arm_output_epilogue (NULL);
10587 else /* TARGET_THUMB1 */
10588 return thumb_unexpanded_epilogue ();
10590 ; Length is absolute worst case
10591 [(set_attr "length" "44")
10592 (set_attr "type" "block")
10593 ;; We don't clobber the conditions, but the potential length of this
10594 ;; operation is sufficient to make conditionalizing the sequence
10595 ;; unlikely to be profitable.
10596 (set_attr "conds" "clob")]
10599 (define_expand "eh_epilogue"
10600 [(use (match_operand:SI 0 "register_operand" ""))
10601 (use (match_operand:SI 1 "register_operand" ""))
10602 (use (match_operand:SI 2 "register_operand" ""))]
10606 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10607 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10609 rtx ra = gen_rtx_REG (Pmode, 2);
10611 emit_move_insn (ra, operands[2]);
10614 /* This is a hack -- we may have crystalized the function type too
10616 cfun->machine->func_type = 0;
10620 ;; This split is only used during output to reduce the number of patterns
10621 ;; that need assembler instructions adding to them. We allowed the setting
10622 ;; of the conditions to be implicit during rtl generation so that
10623 ;; the conditional compare patterns would work. However this conflicts to
10624 ;; some extent with the conditional data operations, so we have to split them
10627 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10628 ;; conditional execution sufficient?
10631 [(set (match_operand:SI 0 "s_register_operand" "")
10632 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10633 [(match_operand 2 "" "") (match_operand 3 "" "")])
10635 (match_operand 4 "" "")))
10636 (clobber (reg:CC CC_REGNUM))]
10637 "TARGET_ARM && reload_completed"
10638 [(set (match_dup 5) (match_dup 6))
10639 (cond_exec (match_dup 7)
10640 (set (match_dup 0) (match_dup 4)))]
10643 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10644 operands[2], operands[3]);
10645 enum rtx_code rc = GET_CODE (operands[1]);
10647 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10648 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10649 if (mode == CCFPmode || mode == CCFPEmode)
10650 rc = reverse_condition_maybe_unordered (rc);
10652 rc = reverse_condition (rc);
10654 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10659 [(set (match_operand:SI 0 "s_register_operand" "")
10660 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10661 [(match_operand 2 "" "") (match_operand 3 "" "")])
10662 (match_operand 4 "" "")
10664 (clobber (reg:CC CC_REGNUM))]
10665 "TARGET_ARM && reload_completed"
10666 [(set (match_dup 5) (match_dup 6))
10667 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10668 (set (match_dup 0) (match_dup 4)))]
10671 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10672 operands[2], operands[3]);
10674 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10675 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10680 [(set (match_operand:SI 0 "s_register_operand" "")
10681 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10682 [(match_operand 2 "" "") (match_operand 3 "" "")])
10683 (match_operand 4 "" "")
10684 (match_operand 5 "" "")))
10685 (clobber (reg:CC CC_REGNUM))]
10686 "TARGET_ARM && reload_completed"
10687 [(set (match_dup 6) (match_dup 7))
10688 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10689 (set (match_dup 0) (match_dup 4)))
10690 (cond_exec (match_dup 8)
10691 (set (match_dup 0) (match_dup 5)))]
10694 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10695 operands[2], operands[3]);
10696 enum rtx_code rc = GET_CODE (operands[1]);
10698 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10699 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10700 if (mode == CCFPmode || mode == CCFPEmode)
10701 rc = reverse_condition_maybe_unordered (rc);
10703 rc = reverse_condition (rc);
10705 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10710 [(set (match_operand:SI 0 "s_register_operand" "")
10711 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10712 [(match_operand:SI 2 "s_register_operand" "")
10713 (match_operand:SI 3 "arm_add_operand" "")])
10714 (match_operand:SI 4 "arm_rhs_operand" "")
10716 (match_operand:SI 5 "s_register_operand" ""))))
10717 (clobber (reg:CC CC_REGNUM))]
10718 "TARGET_ARM && reload_completed"
10719 [(set (match_dup 6) (match_dup 7))
10720 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10721 (set (match_dup 0) (match_dup 4)))
10722 (cond_exec (match_dup 8)
10723 (set (match_dup 0) (not:SI (match_dup 5))))]
10726 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10727 operands[2], operands[3]);
10728 enum rtx_code rc = GET_CODE (operands[1]);
10730 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10731 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10732 if (mode == CCFPmode || mode == CCFPEmode)
10733 rc = reverse_condition_maybe_unordered (rc);
10735 rc = reverse_condition (rc);
10737 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10741 (define_insn "*cond_move_not"
10742 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10743 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10744 [(match_operand 3 "cc_register" "") (const_int 0)])
10745 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10747 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10751 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10752 [(set_attr "conds" "use")
10753 (set_attr "insn" "mvn")
10754 (set_attr "length" "4,8")]
10757 ;; The next two patterns occur when an AND operation is followed by a
10758 ;; scc insn sequence
10760 (define_insn "*sign_extract_onebit"
10761 [(set (match_operand:SI 0 "s_register_operand" "=r")
10762 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10764 (match_operand:SI 2 "const_int_operand" "n")))
10765 (clobber (reg:CC CC_REGNUM))]
10768 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10769 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10770 return \"mvnne\\t%0, #0\";
10772 [(set_attr "conds" "clob")
10773 (set_attr "length" "8")]
10776 (define_insn "*not_signextract_onebit"
10777 [(set (match_operand:SI 0 "s_register_operand" "=r")
10779 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10781 (match_operand:SI 2 "const_int_operand" "n"))))
10782 (clobber (reg:CC CC_REGNUM))]
10785 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10786 output_asm_insn (\"tst\\t%1, %2\", operands);
10787 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10788 return \"movne\\t%0, #0\";
10790 [(set_attr "conds" "clob")
10791 (set_attr "length" "12")]
10793 ;; ??? The above patterns need auditing for Thumb-2
10795 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10796 ;; expressions. For simplicity, the first register is also in the unspec
10798 ;; To avoid the usage of GNU extension, the length attribute is computed
10799 ;; in a C function arm_attr_length_push_multi.
10800 (define_insn "*push_multi"
10801 [(match_parallel 2 "multi_register_push"
10802 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10803 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10804 UNSPEC_PUSH_MULT))])]
10808 int num_saves = XVECLEN (operands[2], 0);
10810 /* For the StrongARM at least it is faster to
10811 use STR to store only a single register.
10812 In Thumb mode always use push, and the assembler will pick
10813 something appropriate. */
10814 if (num_saves == 1 && TARGET_ARM)
10815 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10822 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10823 else if (TARGET_THUMB2)
10824 strcpy (pattern, \"push%?\\t{%1\");
10826 strcpy (pattern, \"push\\t{%1\");
10828 for (i = 1; i < num_saves; i++)
10830 strcat (pattern, \", %|\");
10832 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10835 strcat (pattern, \"}\");
10836 output_asm_insn (pattern, operands);
10841 [(set_attr "type" "store4")
10842 (set (attr "length")
10843 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10846 (define_insn "stack_tie"
10847 [(set (mem:BLK (scratch))
10848 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10849 (match_operand:SI 1 "s_register_operand" "rk")]
10853 [(set_attr "length" "0")]
10856 ;; Similarly for the floating point registers
10857 (define_insn "*push_fp_multi"
10858 [(match_parallel 2 "multi_register_push"
10859 [(set (match_operand:BLK 0 "memory_operand" "=m")
10860 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10861 UNSPEC_PUSH_MULT))])]
10862 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10867 sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10868 output_asm_insn (pattern, operands);
10871 [(set_attr "type" "f_fpa_store")]
10874 ;; Special patterns for dealing with the constant pool
10876 (define_insn "align_4"
10877 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10880 assemble_align (32);
10885 (define_insn "align_8"
10886 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10889 assemble_align (64);
10894 (define_insn "consttable_end"
10895 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10898 making_const_table = FALSE;
10903 (define_insn "consttable_1"
10904 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10907 making_const_table = TRUE;
10908 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10909 assemble_zeros (3);
10912 [(set_attr "length" "4")]
10915 (define_insn "consttable_2"
10916 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10919 making_const_table = TRUE;
10920 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10921 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10922 assemble_zeros (2);
10925 [(set_attr "length" "4")]
10928 (define_insn "consttable_4"
10929 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10933 rtx x = operands[0];
10934 making_const_table = TRUE;
10935 switch (GET_MODE_CLASS (GET_MODE (x)))
10938 if (GET_MODE (x) == HFmode)
10939 arm_emit_fp16_const (x);
10943 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10944 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10948 /* XXX: Sometimes gcc does something really dumb and ends up with
10949 a HIGH in a constant pool entry, usually because it's trying to
10950 load into a VFP register. We know this will always be used in
10951 combination with a LO_SUM which ignores the high bits, so just
10952 strip off the HIGH. */
10953 if (GET_CODE (x) == HIGH)
10955 assemble_integer (x, 4, BITS_PER_WORD, 1);
10956 mark_symbol_refs_as_used (x);
10961 [(set_attr "length" "4")]
10964 (define_insn "consttable_8"
10965 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10969 making_const_table = TRUE;
10970 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10975 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10976 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10980 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10985 [(set_attr "length" "8")]
10988 (define_insn "consttable_16"
10989 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10993 making_const_table = TRUE;
10994 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10999 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11000 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11004 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11009 [(set_attr "length" "16")]
11012 ;; Miscellaneous Thumb patterns
11014 (define_expand "tablejump"
11015 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11016 (use (label_ref (match_operand 1 "" "")))])]
11021 /* Hopefully, CSE will eliminate this copy. */
11022 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11023 rtx reg2 = gen_reg_rtx (SImode);
11025 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11026 operands[0] = reg2;
11031 ;; NB never uses BX.
11032 (define_insn "*thumb1_tablejump"
11033 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11034 (use (label_ref (match_operand 1 "" "")))]
11037 [(set_attr "length" "2")]
11040 ;; V5 Instructions,
11042 (define_insn "clzsi2"
11043 [(set (match_operand:SI 0 "s_register_operand" "=r")
11044 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11045 "TARGET_32BIT && arm_arch5"
11047 [(set_attr "predicable" "yes")
11048 (set_attr "insn" "clz")])
11050 (define_insn "rbitsi2"
11051 [(set (match_operand:SI 0 "s_register_operand" "=r")
11052 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11053 "TARGET_32BIT && arm_arch_thumb2"
11055 [(set_attr "predicable" "yes")
11056 (set_attr "insn" "clz")])
11058 (define_expand "ctzsi2"
11059 [(set (match_operand:SI 0 "s_register_operand" "")
11060 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11061 "TARGET_32BIT && arm_arch_thumb2"
11064 rtx tmp = gen_reg_rtx (SImode);
11065 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11066 emit_insn (gen_clzsi2 (operands[0], tmp));
11072 ;; V5E instructions.
11074 (define_insn "prefetch"
11075 [(prefetch (match_operand:SI 0 "address_operand" "p")
11076 (match_operand:SI 1 "" "")
11077 (match_operand:SI 2 "" ""))]
11078 "TARGET_32BIT && arm_arch5e"
11081 ;; General predication pattern
11084 [(match_operator 0 "arm_comparison_operator"
11085 [(match_operand 1 "cc_register" "")
11091 (define_insn "prologue_use"
11092 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11094 "%@ %0 needed for prologue"
11095 [(set_attr "length" "0")]
11099 ;; Patterns for exception handling
11101 (define_expand "eh_return"
11102 [(use (match_operand 0 "general_operand" ""))]
11107 emit_insn (gen_arm_eh_return (operands[0]));
11109 emit_insn (gen_thumb_eh_return (operands[0]));
11114 ;; We can't expand this before we know where the link register is stored.
11115 (define_insn_and_split "arm_eh_return"
11116 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11118 (clobber (match_scratch:SI 1 "=&r"))]
11121 "&& reload_completed"
11125 arm_set_return_address (operands[0], operands[1]);
11130 (define_insn_and_split "thumb_eh_return"
11131 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11133 (clobber (match_scratch:SI 1 "=&l"))]
11136 "&& reload_completed"
11140 thumb_set_return_address (operands[0], operands[1]);
11148 (define_insn "load_tp_hard"
11149 [(set (match_operand:SI 0 "register_operand" "=r")
11150 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11152 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11153 [(set_attr "predicable" "yes")]
11156 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11157 (define_insn "load_tp_soft"
11158 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11159 (clobber (reg:SI LR_REGNUM))
11160 (clobber (reg:SI IP_REGNUM))
11161 (clobber (reg:CC CC_REGNUM))]
11163 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11164 [(set_attr "conds" "clob")]
11167 ;; tls descriptor call
11168 (define_insn "tlscall"
11169 [(set (reg:SI R0_REGNUM)
11170 (unspec:SI [(reg:SI R0_REGNUM)
11171 (match_operand:SI 0 "" "X")
11172 (match_operand 1 "" "")] UNSPEC_TLS))
11173 (clobber (reg:SI R1_REGNUM))
11174 (clobber (reg:SI LR_REGNUM))
11175 (clobber (reg:SI CC_REGNUM))]
11178 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11179 INTVAL (operands[1]));
11180 return "bl\\t%c0(tlscall)";
11182 [(set_attr "conds" "clob")
11183 (set_attr "length" "4")]
11188 ;; We only care about the lower 16 bits of the constant
11189 ;; being inserted into the upper 16 bits of the register.
11190 (define_insn "*arm_movtas_ze"
11191 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11194 (match_operand:SI 1 "const_int_operand" ""))]
11197 [(set_attr "predicable" "yes")
11198 (set_attr "length" "4")]
11201 (define_insn "*arm_rev"
11202 [(set (match_operand:SI 0 "s_register_operand" "=r")
11203 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11204 "TARGET_32BIT && arm_arch6"
11206 [(set_attr "predicable" "yes")
11207 (set_attr "length" "4")]
11210 (define_insn "*thumb1_rev"
11211 [(set (match_operand:SI 0 "s_register_operand" "=l")
11212 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11213 "TARGET_THUMB1 && arm_arch6"
11215 [(set_attr "length" "2")]
11218 (define_expand "arm_legacy_rev"
11219 [(set (match_operand:SI 2 "s_register_operand" "")
11220 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11224 (lshiftrt:SI (match_dup 2)
11226 (set (match_operand:SI 3 "s_register_operand" "")
11227 (rotatert:SI (match_dup 1)
11230 (and:SI (match_dup 2)
11231 (const_int -65281)))
11232 (set (match_operand:SI 0 "s_register_operand" "")
11233 (xor:SI (match_dup 3)
11239 ;; Reuse temporaries to keep register pressure down.
11240 (define_expand "thumb_legacy_rev"
11241 [(set (match_operand:SI 2 "s_register_operand" "")
11242 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11244 (set (match_operand:SI 3 "s_register_operand" "")
11245 (lshiftrt:SI (match_dup 1)
11248 (ior:SI (match_dup 3)
11250 (set (match_operand:SI 4 "s_register_operand" "")
11252 (set (match_operand:SI 5 "s_register_operand" "")
11253 (rotatert:SI (match_dup 1)
11256 (ashift:SI (match_dup 5)
11259 (lshiftrt:SI (match_dup 5)
11262 (ior:SI (match_dup 5)
11265 (rotatert:SI (match_dup 5)
11267 (set (match_operand:SI 0 "s_register_operand" "")
11268 (ior:SI (match_dup 5)
11274 (define_expand "bswapsi2"
11275 [(set (match_operand:SI 0 "s_register_operand" "=r")
11276 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11277 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11281 rtx op2 = gen_reg_rtx (SImode);
11282 rtx op3 = gen_reg_rtx (SImode);
11286 rtx op4 = gen_reg_rtx (SImode);
11287 rtx op5 = gen_reg_rtx (SImode);
11289 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11290 op2, op3, op4, op5));
11294 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11303 ;; Load the load/store multiple patterns
11304 (include "ldmstm.md")
11305 ;; Load the FPA co-processor patterns
11307 ;; Load the Maverick co-processor patterns
11308 (include "cirrus.md")
11309 ;; Vector bits common to IWMMXT and Neon
11310 (include "vec-common.md")
11311 ;; Load the Intel Wireless Multimedia Extension patterns
11312 (include "iwmmxt.md")
11313 ;; Load the VFP co-processor patterns
11315 ;; Thumb-2 patterns
11316 (include "thumb2.md")
11318 (include "neon.md")
11319 ;; Synchronization Primitives
11320 (include "sync.md")
11321 ;; Fixed-point patterns
11322 (include "arm-fixed.md")